]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/dse.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / dse.c
CommitLineData
3072d30e 1/* RTL dead store elimination.
d353bf18 2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3072d30e 3
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
8c4c00c1 11Software Foundation; either version 3, or (at your option) any later
3072d30e 12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
3072d30e 22
23#undef BASELINE
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
9ef16211 28#include "backend.h"
29#include "tree.h"
30#include "gimple.h"
3072d30e 31#include "rtl.h"
9ef16211 32#include "df.h"
b20a8bb4 33#include "alias.h"
b20a8bb4 34#include "fold-const.h"
9ed99284 35#include "stor-layout.h"
10d4de0e 36#include "tm_p.h"
3072d30e 37#include "regs.h"
5a9ecd4a 38#include "regset.h"
3072d30e 39#include "flags.h"
94ea8568 40#include "cfgrtl.h"
3072d30e 41#include "cselib.h"
3072d30e 42#include "tree-pass.h"
43#include "alloc-pool.h"
3072d30e 44#include "insn-config.h"
d53441c8 45#include "expmed.h"
46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
49#include "emit-rtl.h"
50#include "varasm.h"
51#include "stmt.h"
3072d30e 52#include "expr.h"
53#include "recog.h"
34517c64 54#include "insn-codes.h"
5c9051a4 55#include "optabs.h"
3072d30e 56#include "dbgcnt.h"
9311ed8a 57#include "target.h"
1242bee6 58#include "params.h"
bc61cadb 59#include "internal-fn.h"
073c1fd5 60#include "gimple-ssa.h"
ec1203cd 61#include "rtl-iter.h"
1f91a12d 62#include "cfgcleanup.h"
3072d30e 63
64/* This file contains three techniques for performing Dead Store
48e1416a 65 Elimination (dse).
3072d30e 66
67 * The first technique performs dse locally on any base address. It
68 is based on the cselib which is a local value numbering technique.
69 This technique is local to a basic block but deals with a fairly
70 general addresses.
48e1416a 71
3072d30e 72 * The second technique performs dse globally but is restricted to
73 base addresses that are either constant or are relative to the
74 frame_pointer.
75
76 * The third technique, (which is only done after register allocation)
77 processes the spill spill slots. This differs from the second
78 technique because it takes advantage of the fact that spilling is
79 completely free from the effects of aliasing.
80
81 Logically, dse is a backwards dataflow problem. A store can be
82 deleted if it if cannot be reached in the backward direction by any
83 use of the value being stored. However, the local technique uses a
84 forwards scan of the basic block because cselib requires that the
85 block be processed in that order.
86
87 The pass is logically broken into 7 steps:
88
89 0) Initialization.
90
91 1) The local algorithm, as well as scanning the insns for the two
92 global algorithms.
93
94 2) Analysis to see if the global algs are necessary. In the case
95 of stores base on a constant address, there must be at least two
96 stores to that address, to make it possible to delete some of the
97 stores. In the case of stores off of the frame or spill related
98 stores, only one store to an address is necessary because those
99 stores die at the end of the function.
100
48e1416a 101 3) Set up the global dataflow equations based on processing the
3072d30e 102 info parsed in the first step.
103
104 4) Solve the dataflow equations.
105
106 5) Delete the insns that the global analysis has indicated are
107 unnecessary.
108
9d75589a 109 6) Delete insns that store the same value as preceding store
aa140b76 110 where the earlier store couldn't be eliminated.
111
112 7) Cleanup.
3072d30e 113
114 This step uses cselib and canon_rtx to build the largest expression
115 possible for each address. This pass is a forwards pass through
116 each basic block. From the point of view of the global technique,
117 the first pass could examine a block in either direction. The
bef304b8 118 forwards ordering is to accommodate cselib.
3072d30e 119
2d0fd66d 120 We make a simplifying assumption: addresses fall into four broad
3072d30e 121 categories:
122
123 1) base has rtx_varies_p == false, offset is constant.
124 2) base has rtx_varies_p == false, offset variable.
125 3) base has rtx_varies_p == true, offset constant.
126 4) base has rtx_varies_p == true, offset variable.
127
128 The local passes are able to process all 4 kinds of addresses. The
2d0fd66d 129 global pass only handles 1).
3072d30e 130
131 The global problem is formulated as follows:
132
133 A store, S1, to address A, where A is not relative to the stack
134 frame, can be eliminated if all paths from S1 to the end of the
2d0fd66d 135 function contain another store to A before a read to A.
3072d30e 136
137 If the address A is relative to the stack frame, a store S2 to A
2d0fd66d 138 can be eliminated if there are no paths from S2 that reach the
3072d30e 139 end of the function that read A before another store to A. In
2d0fd66d 140 this case S2 can be deleted if there are paths from S2 to the
3072d30e 141 end of the function that have no reads or writes to A. This
142 second case allows stores to the stack frame to be deleted that
143 would otherwise die when the function returns. This cannot be
144 done if stores_off_frame_dead_at_return is not true. See the doc
145 for that variable for when this variable is false.
146
147 The global problem is formulated as a backwards set union
148 dataflow problem where the stores are the gens and reads are the
149 kills. Set union problems are rare and require some special
150 handling given our representation of bitmaps. A straightforward
2d0fd66d 151 implementation requires a lot of bitmaps filled with 1s.
3072d30e 152 These are expensive and cumbersome in our bitmap formulation so
153 care has been taken to avoid large vectors filled with 1s. See
154 the comments in bb_info and in the dataflow confluence functions
48e1416a 155 for details.
3072d30e 156
157 There are two places for further enhancements to this algorithm:
48e1416a 158
3072d30e 159 1) The original dse which was embedded in a pass called flow also
160 did local address forwarding. For example in
161
162 A <- r100
163 ... <- A
164
165 flow would replace the right hand side of the second insn with a
6dfdc153 166 reference to r100. Most of the information is available to add this
3072d30e 167 to this pass. It has not done it because it is a lot of work in
168 the case that either r100 is assigned to between the first and
169 second insn and/or the second insn is a load of part of the value
170 stored by the first insn.
171
172 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
173 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
174 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
175 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
176
177 2) The cleaning up of spill code is quite profitable. It currently
178 depends on reading tea leaves and chicken entrails left by reload.
179 This pass depends on reload creating a singleton alias set for each
180 spill slot and telling the next dse pass which of these alias sets
181 are the singletons. Rather than analyze the addresses of the
182 spills, dse's spill processing just does analysis of the loads and
183 stores that use those alias sets. There are three cases where this
184 falls short:
185
186 a) Reload sometimes creates the slot for one mode of access, and
187 then inserts loads and/or stores for a smaller mode. In this
188 case, the current code just punts on the slot. The proper thing
189 to do is to back out and use one bit vector position for each
190 byte of the entity associated with the slot. This depends on
191 KNOWING that reload always generates the accesses for each of the
192 bytes in some canonical (read that easy to understand several
193 passes after reload happens) way.
194
195 b) Reload sometimes decides that spill slot it allocated was not
196 large enough for the mode and goes back and allocates more slots
197 with the same mode and alias set. The backout in this case is a
198 little more graceful than (a). In this case the slot is unmarked
199 as being a spill slot and if final address comes out to be based
48e1416a 200 off the frame pointer, the global algorithm handles this slot.
3072d30e 201
202 c) For any pass that may prespill, there is currently no
203 mechanism to tell the dse pass that the slot being used has the
204 special properties that reload uses. It may be that all that is
bef304b8 205 required is to have those passes make the same calls that reload
3072d30e 206 does, assuming that the alias sets can be manipulated in the same
207 way. */
208
209/* There are limits to the size of constant offsets we model for the
210 global problem. There are certainly test cases, that exceed this
211 limit, however, it is unlikely that there are important programs
212 that really have constant offsets this size. */
213#define MAX_OFFSET (64 * 1024)
214
4fb07d00 215/* Obstack for the DSE dataflow bitmaps. We don't want to put these
216 on the default obstack because these bitmaps can grow quite large
217 (~2GB for the small (!) test case of PR54146) and we'll hold on to
218 all that memory until the end of the compiler run.
219 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
220 releasing the whole obstack. */
221static bitmap_obstack dse_bitmap_obstack;
222
223/* Obstack for other data. As for above: Kinda nice to be able to
224 throw it all away at the end in one big sweep. */
225static struct obstack dse_obstack;
226
227/* Scratch bitmap for cselib's cselib_expand_value_rtx. */
3072d30e 228static bitmap scratch = NULL;
4fb07d00 229
55c5ac9f 230struct insn_info_type;
3072d30e 231
232/* This structure holds information about a candidate store. */
48e1416a 233struct store_info
3072d30e 234{
235
236 /* False means this is a clobber. */
237 bool is_set;
238
aa140b76 239 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
240 bool is_large;
241
3072d30e 242 /* The id of the mem group of the base address. If rtx_varies_p is
243 true, this is -1. Otherwise, it is the index into the group
244 table. */
245 int group_id;
48e1416a 246
3072d30e 247 /* This is the cselib value. */
248 cselib_val *cse_base;
249
250 /* This canonized mem. */
251 rtx mem;
252
82d2c88b 253 /* Canonized MEM address for use by canon_true_dependence. */
3072d30e 254 rtx mem_addr;
255
256 /* If this is non-zero, it is the alias set of a spill location. */
32c2fdea 257 alias_set_type alias_set;
3072d30e 258
259 /* The offset of the first and byte before the last byte associated
260 with the operation. */
aa140b76 261 HOST_WIDE_INT begin, end;
262
263 union
264 {
265 /* A bitmask as wide as the number of bytes in the word that
266 contains a 1 if the byte may be needed. The store is unused if
267 all of the bits are 0. This is used if IS_LARGE is false. */
268 unsigned HOST_WIDE_INT small_bitmask;
269
270 struct
271 {
272 /* A bitmap with one bit per byte. Cleared bit means the position
273 is needed. Used if IS_LARGE is false. */
843bd2fa 274 bitmap bmap;
3072d30e 275
aa140b76 276 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
277 equal to END - BEGIN, the whole store is unused. */
278 int count;
279 } large;
280 } positions_needed;
3072d30e 281
282 /* The next store info for this insn. */
283 struct store_info *next;
284
285 /* The right hand side of the store. This is used if there is a
286 subsequent reload of the mems address somewhere later in the
287 basic block. */
aa140b76 288 rtx rhs;
289
290 /* If rhs is or holds a constant, this contains that constant,
291 otherwise NULL. */
292 rtx const_rhs;
293
294 /* Set if this store stores the same constant value as REDUNDANT_REASON
295 insn stored. These aren't eliminated early, because doing that
296 might prevent the earlier larger store to be eliminated. */
55c5ac9f 297 struct insn_info_type *redundant_reason;
3072d30e 298};
299
4e43e20a 300/* Return a bitmask with the first N low bits set. */
301
302static unsigned HOST_WIDE_INT
303lowpart_bitmask (int n)
304{
305 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
306 return mask >> (HOST_BITS_PER_WIDE_INT - n);
307}
308
3072d30e 309typedef struct store_info *store_info_t;
55c5ac9f 310static pool_allocator<store_info> cse_store_info_pool ("cse_store_info_pool",
311 100);
312
313static pool_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool",
314 100);
3072d30e 315
316/* This structure holds information about a load. These are only
317 built for rtx bases. */
55c5ac9f 318struct read_info_type
3072d30e 319{
320 /* The id of the mem group of the base address. */
321 int group_id;
322
323 /* If this is non-zero, it is the alias set of a spill location. */
32c2fdea 324 alias_set_type alias_set;
3072d30e 325
326 /* The offset of the first and byte after the last byte associated
327 with the operation. If begin == end == 0, the read did not have
328 a constant offset. */
329 int begin, end;
330
331 /* The mem being read. */
332 rtx mem;
333
334 /* The next read_info for this insn. */
55c5ac9f 335 struct read_info_type *next;
336
337 /* Pool allocation new operator. */
338 inline void *operator new (size_t)
339 {
340 return pool.allocate ();
341 }
342
343 /* Delete operator utilizing pool allocation. */
344 inline void operator delete (void *ptr)
345 {
346 pool.remove ((read_info_type *) ptr);
347 }
348
349 /* Memory allocation pool. */
350 static pool_allocator<read_info_type> pool;
3072d30e 351};
55c5ac9f 352typedef struct read_info_type *read_info_t;
3072d30e 353
55c5ac9f 354pool_allocator<read_info_type> read_info_type::pool ("read_info_pool", 100);
3072d30e 355
356/* One of these records is created for each insn. */
357
55c5ac9f 358struct insn_info_type
3072d30e 359{
360 /* Set true if the insn contains a store but the insn itself cannot
361 be deleted. This is set if the insn is a parallel and there is
362 more than one non dead output or if the insn is in some way
363 volatile. */
364 bool cannot_delete;
365
366 /* This field is only used by the global algorithm. It is set true
367 if the insn contains any read of mem except for a (1). This is
368 also set if the insn is a call or has a clobber mem. If the insn
369 contains a wild read, the use_rec will be null. */
370 bool wild_read;
371
b4a708fb 372 /* This is true only for CALL instructions which could potentially read
373 any non-frame memory location. This field is used by the global
374 algorithm. */
375 bool non_frame_wild_read;
376
17e1318c 377 /* This field is only used for the processing of const functions.
378 These functions cannot read memory, but they can read the stack
16bf64db 379 because that is where they may get their parms. We need to be
380 this conservative because, like the store motion pass, we don't
381 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
382 Moreover, we need to distinguish two cases:
383 1. Before reload (register elimination), the stores related to
384 outgoing arguments are stack pointer based and thus deemed
385 of non-constant base in this pass. This requires special
386 handling but also means that the frame pointer based stores
387 need not be killed upon encountering a const function call.
388 2. After reload, the stores related to outgoing arguments can be
389 either stack pointer or hard frame pointer based. This means
390 that we have no other choice than also killing all the frame
391 pointer based stores upon encountering a const function call.
17853422 392 This field is set after reload for const function calls and before
393 reload for const tail function calls on targets where arg pointer
394 is the frame pointer. Having this set is less severe than a wild
395 read, it just means that all the frame related stores are killed
396 rather than all the stores. */
16bf64db 397 bool frame_read;
398
399 /* This field is only used for the processing of const functions.
400 It is set if the insn may contain a stack pointer based store. */
17e1318c 401 bool stack_pointer_based;
3072d30e 402
403 /* This is true if any of the sets within the store contains a
404 cselib base. Such stores can only be deleted by the local
405 algorithm. */
406 bool contains_cselib_groups;
407
408 /* The insn. */
ebabb7a3 409 rtx_insn *insn;
3072d30e 410
411 /* The list of mem sets or mem clobbers that are contained in this
412 insn. If the insn is deletable, it contains only one mem set.
413 But it could also contain clobbers. Insns that contain more than
414 one mem set are not deletable, but each of those mems are here in
6dfdc153 415 order to provide info to delete other insns. */
3072d30e 416 store_info_t store_rec;
417
418 /* The linked list of mem uses in this insn. Only the reads from
419 rtx bases are listed here. The reads to cselib bases are
420 completely processed during the first scan and so are never
421 created. */
422 read_info_t read_rec;
423
5a9ecd4a 424 /* The live fixed registers. We assume only fixed registers can
425 cause trouble by being clobbered from an expanded pattern;
426 storing only the live fixed registers (rather than all registers)
427 means less memory needs to be allocated / copied for the individual
428 stores. */
429 regset fixed_regs_live;
430
3072d30e 431 /* The prev insn in the basic block. */
55c5ac9f 432 struct insn_info_type * prev_insn;
3072d30e 433
434 /* The linked list of insns that are in consideration for removal in
9d75589a 435 the forwards pass through the basic block. This pointer may be
3072d30e 436 trash as it is not cleared when a wild read occurs. The only
f0b5f617 437 time it is guaranteed to be correct is when the traversal starts
3072d30e 438 at active_local_stores. */
55c5ac9f 439 struct insn_info_type * next_local_store;
440
441 /* Pool allocation new operator. */
442 inline void *operator new (size_t)
443 {
444 return pool.allocate ();
445 }
446
447 /* Delete operator utilizing pool allocation. */
448 inline void operator delete (void *ptr)
449 {
450 pool.remove ((insn_info_type *) ptr);
451 }
452
453 /* Memory allocation pool. */
454 static pool_allocator<insn_info_type> pool;
3072d30e 455};
55c5ac9f 456typedef struct insn_info_type *insn_info_t;
3072d30e 457
55c5ac9f 458pool_allocator<insn_info_type> insn_info_type::pool ("insn_info_pool", 100);
3072d30e 459
460/* The linked list of stores that are under consideration in this
48e1416a 461 basic block. */
3072d30e 462static insn_info_t active_local_stores;
1242bee6 463static int active_local_stores_len;
3072d30e 464
55c5ac9f 465struct dse_bb_info_type
3072d30e 466{
3072d30e 467 /* Pointer to the insn info for the last insn in the block. These
468 are linked so this is how all of the insns are reached. During
469 scanning this is the current insn being scanned. */
470 insn_info_t last_insn;
471
472 /* The info for the global dataflow problem. */
473
474
475 /* This is set if the transfer function should and in the wild_read
476 bitmap before applying the kill and gen sets. That vector knocks
477 out most of the bits in the bitmap and thus speeds up the
478 operations. */
479 bool apply_wild_read;
480
a1b0a968 481 /* The following 4 bitvectors hold information about which positions
482 of which stores are live or dead. They are indexed by
483 get_bitmap_index. */
484
3072d30e 485 /* The set of store positions that exist in this block before a wild read. */
486 bitmap gen;
48e1416a 487
3072d30e 488 /* The set of load positions that exist in this block above the
489 same position of a store. */
490 bitmap kill;
491
492 /* The set of stores that reach the top of the block without being
493 killed by a read.
494
495 Do not represent the in if it is all ones. Note that this is
496 what the bitvector should logically be initialized to for a set
497 intersection problem. However, like the kill set, this is too
498 expensive. So initially, the in set will only be created for the
499 exit block and any block that contains a wild read. */
500 bitmap in;
501
502 /* The set of stores that reach the bottom of the block from it's
503 successors.
504
505 Do not represent the in if it is all ones. Note that this is
506 what the bitvector should logically be initialized to for a set
507 intersection problem. However, like the kill and in set, this is
508 too expensive. So what is done is that the confluence operator
509 just initializes the vector from one of the out sets of the
510 successors of the block. */
511 bitmap out;
a1b0a968 512
513 /* The following bitvector is indexed by the reg number. It
514 contains the set of regs that are live at the current instruction
515 being processed. While it contains info for all of the
5a9ecd4a 516 registers, only the hard registers are actually examined. It is used
517 to assure that shift and/or add sequences that are inserted do not
9d75589a 518 accidentally clobber live hard regs. */
a1b0a968 519 bitmap regs_live;
55c5ac9f 520
521 /* Pool allocation new operator. */
522 inline void *operator new (size_t)
523 {
524 return pool.allocate ();
525 }
526
527 /* Delete operator utilizing pool allocation. */
528 inline void operator delete (void *ptr)
529 {
530 pool.remove ((dse_bb_info_type *) ptr);
531 }
532
533 /* Memory allocation pool. */
534 static pool_allocator<dse_bb_info_type> pool;
3072d30e 535};
536
55c5ac9f 537typedef struct dse_bb_info_type *bb_info_t;
538pool_allocator<dse_bb_info_type> dse_bb_info_type::pool ("bb_info_pool", 100);
3072d30e 539
540/* Table to hold all bb_infos. */
541static bb_info_t *bb_table;
542
543/* There is a group_info for each rtx base that is used to reference
544 memory. There are also not many of the rtx bases because they are
545 very limited in scope. */
546
48e1416a 547struct group_info
3072d30e 548{
549 /* The actual base of the address. */
550 rtx rtx_base;
551
552 /* The sequential id of the base. This allows us to have a
553 canonical ordering of these that is not based on addresses. */
554 int id;
555
0ac758f7 556 /* True if there are any positions that are to be processed
557 globally. */
558 bool process_globally;
559
560 /* True if the base of this group is either the frame_pointer or
561 hard_frame_pointer. */
562 bool frame_related;
563
ec410bf1 564 /* A mem wrapped around the base pointer for the group in order to do
565 read dependency. It must be given BLKmode in order to encompass all
566 the possible offsets from the base. */
3072d30e 567 rtx base_mem;
48e1416a 568
82d2c88b 569 /* Canonized version of base_mem's address. */
570 rtx canon_base_addr;
3072d30e 571
572 /* These two sets of two bitmaps are used to keep track of how many
6dfdc153 573 stores are actually referencing that position from this base. We
3072d30e 574 only do this for rtx bases as this will be used to assign
6dfdc153 575 positions in the bitmaps for the global problem. Bit N is set in
3072d30e 576 store1 on the first store for offset N. Bit N is set in store2
577 for the second store to offset N. This is all we need since we
578 only care about offsets that have two or more stores for them.
579
580 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
581 for 0 and greater offsets.
582
583 There is one special case here, for stores into the stack frame,
584 we will or store1 into store2 before deciding which stores look
585 at globally. This is because stores to the stack frame that have
586 no other reads before the end of the function can also be
587 deleted. */
588 bitmap store1_n, store1_p, store2_n, store2_p;
589
b4a708fb 590 /* These bitmaps keep track of offsets in this group escape this function.
591 An offset escapes if it corresponds to a named variable whose
592 addressable flag is set. */
593 bitmap escaped_n, escaped_p;
594
6dfdc153 595 /* The positions in this bitmap have the same assignments as the in,
3072d30e 596 out, gen and kill bitmaps. This bitmap is all zeros except for
6dfdc153 597 the positions that are occupied by stores for this group. */
3072d30e 598 bitmap group_kill;
599
3072d30e 600 /* The offset_map is used to map the offsets from this base into
6dfdc153 601 positions in the global bitmaps. It is only created after all of
3072d30e 602 the all of stores have been scanned and we know which ones we
603 care about. */
48e1416a 604 int *offset_map_n, *offset_map_p;
605 int offset_map_size_n, offset_map_size_p;
55c5ac9f 606
607 /* Pool allocation new operator. */
608 inline void *operator new (size_t)
609 {
610 return pool.allocate ();
611 }
612
613 /* Delete operator utilizing pool allocation. */
614 inline void operator delete (void *ptr)
615 {
616 pool.remove ((group_info *) ptr);
617 }
618
619 /* Memory allocation pool. */
620 static pool_allocator<group_info> pool;
3072d30e 621};
622typedef struct group_info *group_info_t;
c1fdef8e 623typedef const struct group_info *const_group_info_t;
55c5ac9f 624
625pool_allocator<group_info> group_info::pool ("rtx_group_info_pool", 100);
3072d30e 626
3072d30e 627/* Index into the rtx_group_vec. */
628static int rtx_group_next_id;
629
3072d30e 630
f1f41a6c 631static vec<group_info_t> rtx_group_vec;
3072d30e 632
633
634/* This structure holds the set of changes that are being deferred
635 when removing read operation. See replace_read. */
48e1416a 636struct deferred_change
3072d30e 637{
638
639 /* The mem that is being replaced. */
640 rtx *loc;
641
642 /* The reg it is being replaced with. */
643 rtx reg;
644
645 struct deferred_change *next;
55c5ac9f 646
647 /* Pool allocation new operator. */
648 inline void *operator new (size_t)
649 {
650 return pool.allocate ();
651 }
652
653 /* Delete operator utilizing pool allocation. */
654 inline void operator delete (void *ptr)
655 {
656 pool.remove ((deferred_change *) ptr);
657 }
658
659 /* Memory allocation pool. */
660 static pool_allocator<deferred_change> pool;
3072d30e 661};
662
663typedef struct deferred_change *deferred_change_t;
55c5ac9f 664
665pool_allocator<deferred_change> deferred_change::pool
666 ("deferred_change_pool", 10);
3072d30e 667
668static deferred_change_t deferred_change_list = NULL;
669
3072d30e 670/* The group that holds all of the clear_alias_sets. */
671static group_info_t clear_alias_group;
672
673/* The modes of the clear_alias_sets. */
674static htab_t clear_alias_mode_table;
675
676/* Hash table element to look up the mode for an alias set. */
677struct clear_alias_mode_holder
678{
32c2fdea 679 alias_set_type alias_set;
3754d046 680 machine_mode mode;
3072d30e 681};
682
18d50ae6 683/* This is true except if cfun->stdarg -- i.e. we cannot do
ff3ae375 684 this for vararg functions because they play games with the frame. */
3072d30e 685static bool stores_off_frame_dead_at_return;
686
687/* Counter for stats. */
48e1416a 688static int globally_deleted;
689static int locally_deleted;
690static int spill_deleted;
691
3072d30e 692static bitmap all_blocks;
693
b4a708fb 694/* Locations that are killed by calls in the global phase. */
695static bitmap kill_on_calls;
696
3072d30e 697/* The number of bits used in the global bitmaps. */
698static unsigned int current_position;
3072d30e 699\f
700/*----------------------------------------------------------------------------
701 Zeroth step.
702
48e1416a 703 Initialization.
3072d30e 704----------------------------------------------------------------------------*/
705
3072d30e 706
707/* Find the entry associated with ALIAS_SET. */
708
709static struct clear_alias_mode_holder *
32c2fdea 710clear_alias_set_lookup (alias_set_type alias_set)
3072d30e 711{
712 struct clear_alias_mode_holder tmp_holder;
713 void **slot;
48e1416a 714
3072d30e 715 tmp_holder.alias_set = alias_set;
716 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
717 gcc_assert (*slot);
48e1416a 718
364c0c59 719 return (struct clear_alias_mode_holder *) *slot;
3072d30e 720}
721
722
723/* Hashtable callbacks for maintaining the "bases" field of
724 store_group_info, given that the addresses are function invariants. */
725
770ff93b 726struct invariant_group_base_hasher : nofree_ptr_hash <group_info>
d1455aa3 727{
9969c043 728 static inline hashval_t hash (const group_info *);
729 static inline bool equal (const group_info *, const group_info *);
d1455aa3 730};
731
732inline bool
9969c043 733invariant_group_base_hasher::equal (const group_info *gi1,
734 const group_info *gi2)
3072d30e 735{
3072d30e 736 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
737}
738
d1455aa3 739inline hashval_t
9969c043 740invariant_group_base_hasher::hash (const group_info *gi)
3072d30e 741{
3072d30e 742 int do_not_record;
743 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
744}
745
d1455aa3 746/* Tables of group_info structures, hashed by base value. */
c1f445d2 747static hash_table<invariant_group_base_hasher> *rtx_group_table;
d1455aa3 748
3072d30e 749
750/* Get the GROUP for BASE. Add a new group if it is not there. */
751
752static group_info_t
753get_group_info (rtx base)
754{
48e1416a 755 struct group_info tmp_gi;
756 group_info_t gi;
d1455aa3 757 group_info **slot;
3072d30e 758
759 if (base)
760 {
761 /* Find the store_base_info structure for BASE, creating a new one
762 if necessary. */
763 tmp_gi.rtx_base = base;
c1f445d2 764 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
3072d30e 765 gi = (group_info_t) *slot;
766 }
767 else
768 {
769 if (!clear_alias_group)
770 {
55c5ac9f 771 clear_alias_group = gi = new group_info;
3072d30e 772 memset (gi, 0, sizeof (struct group_info));
773 gi->id = rtx_group_next_id++;
4fb07d00 774 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
775 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
776 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
777 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
778 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
779 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
780 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 781 gi->process_globally = false;
782 gi->offset_map_size_n = 0;
783 gi->offset_map_size_p = 0;
784 gi->offset_map_n = NULL;
785 gi->offset_map_p = NULL;
f1f41a6c 786 rtx_group_vec.safe_push (gi);
3072d30e 787 }
788 return clear_alias_group;
789 }
790
791 if (gi == NULL)
792 {
55c5ac9f 793 *slot = gi = new group_info;
3072d30e 794 gi->rtx_base = base;
795 gi->id = rtx_group_next_id++;
ec410bf1 796 gi->base_mem = gen_rtx_MEM (BLKmode, base);
82d2c88b 797 gi->canon_base_addr = canon_rtx (base);
4fb07d00 798 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
799 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
800 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
801 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
802 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
803 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
804 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 805 gi->process_globally = false;
48e1416a 806 gi->frame_related =
3072d30e 807 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
808 gi->offset_map_size_n = 0;
809 gi->offset_map_size_p = 0;
810 gi->offset_map_n = NULL;
811 gi->offset_map_p = NULL;
f1f41a6c 812 rtx_group_vec.safe_push (gi);
3072d30e 813 }
814
815 return gi;
816}
817
818
819/* Initialization of data structures. */
820
821static void
822dse_step0 (void)
823{
824 locally_deleted = 0;
825 globally_deleted = 0;
826 spill_deleted = 0;
827
4fb07d00 828 bitmap_obstack_initialize (&dse_bitmap_obstack);
829 gcc_obstack_init (&dse_obstack);
830
831 scratch = BITMAP_ALLOC (&reg_obstack);
832 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 833
3072d30e 834
c1f445d2 835 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
3072d30e 836
fe672ac0 837 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
3072d30e 838 rtx_group_next_id = 0;
839
18d50ae6 840 stores_off_frame_dead_at_return = !cfun->stdarg;
3072d30e 841
842 init_alias_analysis ();
48e1416a 843
e85eaec5 844 clear_alias_group = NULL;
3072d30e 845}
846
847
848\f
849/*----------------------------------------------------------------------------
850 First step.
851
852 Scan all of the insns. Any random ordering of the blocks is fine.
bef304b8 853 Each block is scanned in forward order to accommodate cselib which
3072d30e 854 is used to remove stores with non-constant bases.
855----------------------------------------------------------------------------*/
856
857/* Delete all of the store_info recs from INSN_INFO. */
858
48e1416a 859static void
3072d30e 860free_store_info (insn_info_t insn_info)
861{
862 store_info_t store_info = insn_info->store_rec;
863 while (store_info)
864 {
865 store_info_t next = store_info->next;
aa140b76 866 if (store_info->is_large)
843bd2fa 867 BITMAP_FREE (store_info->positions_needed.large.bmap);
3072d30e 868 if (store_info->cse_base)
55c5ac9f 869 cse_store_info_pool.remove (store_info);
3072d30e 870 else
55c5ac9f 871 rtx_store_info_pool.remove (store_info);
3072d30e 872 store_info = next;
873 }
874
875 insn_info->cannot_delete = true;
876 insn_info->contains_cselib_groups = false;
877 insn_info->store_rec = NULL;
878}
879
5a9ecd4a 880typedef struct
881{
4cd001d5 882 rtx_insn *first, *current;
5a9ecd4a 883 regset fixed_regs_live;
884 bool failure;
885} note_add_store_info;
886
887/* Callback for emit_inc_dec_insn_before via note_stores.
888 Check if a register is clobbered which is live afterwards. */
889
890static void
891note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
892{
4cd001d5 893 rtx_insn *insn;
5a9ecd4a 894 note_add_store_info *info = (note_add_store_info *) data;
5a9ecd4a 895
896 if (!REG_P (loc))
897 return;
898
899 /* If this register is referenced by the current or an earlier insn,
900 that's OK. E.g. this applies to the register that is being incremented
901 with this addition. */
902 for (insn = info->first;
903 insn != NEXT_INSN (info->current);
904 insn = NEXT_INSN (insn))
905 if (reg_referenced_p (loc, PATTERN (insn)))
906 return;
907
908 /* If we come here, we have a clobber of a register that's only OK
909 if that register is not live. If we don't have liveness information
910 available, fail now. */
911 if (!info->fixed_regs_live)
912 {
6a298741 913 info->failure = true;
5a9ecd4a 914 return;
915 }
916 /* Now check if this is a live fixed register. */
6a298741 917 unsigned int end_regno = END_REGNO (loc);
918 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
919 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
920 info->failure = true;
5a9ecd4a 921}
922
1f864115 923/* Callback for for_each_inc_dec that emits an INSN that sets DEST to
924 SRC + SRCOFF before insn ARG. */
3072d30e 925
926static int
1f864115 927emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
928 rtx op ATTRIBUTE_UNUSED,
929 rtx dest, rtx src, rtx srcoff, void *arg)
3072d30e 930{
5a9ecd4a 931 insn_info_t insn_info = (insn_info_t) arg;
4cd001d5 932 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
5a9ecd4a 933 note_add_store_info info;
48e1416a 934
1f864115 935 /* We can reuse all operands without copying, because we are about
936 to delete the insn that contained it. */
5a9ecd4a 937 if (srcoff)
a280a5da 938 {
939 start_sequence ();
940 emit_insn (gen_add3_insn (dest, src, srcoff));
941 new_insn = get_insns ();
942 end_sequence ();
943 }
5a9ecd4a 944 else
f9a00e9e 945 new_insn = gen_move_insn (dest, src);
5a9ecd4a 946 info.first = new_insn;
947 info.fixed_regs_live = insn_info->fixed_regs_live;
948 info.failure = false;
949 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
950 {
951 info.current = cur;
952 note_stores (PATTERN (cur), note_add_store, &info);
953 }
3072d30e 954
5a9ecd4a 955 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
956 return it immediately, communicating the failure to its caller. */
957 if (info.failure)
958 return 1;
959
960 emit_insn_before (new_insn, insn);
3072d30e 961
623ad592 962 return 0;
3072d30e 963}
964
5a9ecd4a 965/* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
966 is there, is split into a separate insn.
967 Return true on success (or if there was nothing to do), false on failure. */
3072d30e 968
5a9ecd4a 969static bool
970check_for_inc_dec_1 (insn_info_t insn_info)
3072d30e 971{
ebabb7a3 972 rtx_insn *insn = insn_info->insn;
3072d30e 973 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
974 if (note)
623ad592 975 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
976 insn_info) == 0;
5a9ecd4a 977 return true;
3072d30e 978}
979
980
5a9ecd4a 981/* Entry point for postreload. If you work on reload_cse, or you need this
982 anywhere else, consider if you can provide register liveness information
983 and add a parameter to this function so that it can be passed down in
984 insn_info.fixed_regs_live. */
985bool
ebabb7a3 986check_for_inc_dec (rtx_insn *insn)
5a9ecd4a 987{
55c5ac9f 988 insn_info_type insn_info;
5a9ecd4a 989 rtx note;
990
991 insn_info.insn = insn;
992 insn_info.fixed_regs_live = NULL;
993 note = find_reg_note (insn, REG_INC, NULL_RTX);
994 if (note)
623ad592 995 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
996 &insn_info) == 0;
5a9ecd4a 997 return true;
998}
999
48e1416a 1000/* Delete the insn and free all of the fields inside INSN_INFO. */
3072d30e 1001
1002static void
1003delete_dead_store_insn (insn_info_t insn_info)
1004{
1005 read_info_t read_info;
1006
1007 if (!dbg_cnt (dse))
1008 return;
1009
5a9ecd4a 1010 if (!check_for_inc_dec_1 (insn_info))
1011 return;
1ca59310 1012 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1013 {
48e1416a 1014 fprintf (dump_file, "Locally deleting insn %d ",
3072d30e 1015 INSN_UID (insn_info->insn));
1016 if (insn_info->store_rec->alias_set)
48e1416a 1017 fprintf (dump_file, "alias set %d\n",
32c2fdea 1018 (int) insn_info->store_rec->alias_set);
3072d30e 1019 else
1020 fprintf (dump_file, "\n");
1021 }
1022
1023 free_store_info (insn_info);
1024 read_info = insn_info->read_rec;
48e1416a 1025
3072d30e 1026 while (read_info)
1027 {
1028 read_info_t next = read_info->next;
55c5ac9f 1029 delete read_info;
3072d30e 1030 read_info = next;
1031 }
1032 insn_info->read_rec = NULL;
1033
1034 delete_insn (insn_info->insn);
1035 locally_deleted++;
1036 insn_info->insn = NULL;
1037
1038 insn_info->wild_read = false;
1039}
1040
f7b5f694 1041/* Return whether DECL, a local variable, can possibly escape the current
1042 function scope. */
1043
1044static bool
1045local_variable_can_escape (tree decl)
1046{
1047 if (TREE_ADDRESSABLE (decl))
1048 return true;
1049
1050 /* If this is a partitioned variable, we need to consider all the variables
1051 in the partition. This is necessary because a store into one of them can
1052 be replaced with a store into another and this may not change the outcome
1053 of the escape analysis. */
1054 if (cfun->gimple_df->decls_to_pointers != NULL)
1055 {
5f8841a5 1056 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
f7b5f694 1057 if (namep)
5f8841a5 1058 return TREE_ADDRESSABLE (*namep);
f7b5f694 1059 }
1060
1061 return false;
1062}
1063
1064/* Return whether EXPR can possibly escape the current function scope. */
1065
b4a708fb 1066static bool
1067can_escape (tree expr)
1068{
1069 tree base;
1070 if (!expr)
1071 return true;
1072 base = get_base_address (expr);
1073 if (DECL_P (base)
f7b5f694 1074 && !may_be_aliased (base)
1075 && !(TREE_CODE (base) == VAR_DECL
1076 && !DECL_EXTERNAL (base)
1077 && !TREE_STATIC (base)
1078 && local_variable_can_escape (base)))
b4a708fb 1079 return false;
1080 return true;
1081}
3072d30e 1082
1083/* Set the store* bitmaps offset_map_size* fields in GROUP based on
1084 OFFSET and WIDTH. */
1085
1086static void
b4a708fb 1087set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1088 tree expr)
3072d30e 1089{
1090 HOST_WIDE_INT i;
b4a708fb 1091 bool expr_escapes = can_escape (expr);
aa140b76 1092 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
3072d30e 1093 for (i=offset; i<offset+width; i++)
1094 {
1095 bitmap store1;
1096 bitmap store2;
b4a708fb 1097 bitmap escaped;
3072d30e 1098 int ai;
1099 if (i < 0)
1100 {
1101 store1 = group->store1_n;
1102 store2 = group->store2_n;
b4a708fb 1103 escaped = group->escaped_n;
3072d30e 1104 ai = -i;
1105 }
1106 else
1107 {
1108 store1 = group->store1_p;
1109 store2 = group->store2_p;
b4a708fb 1110 escaped = group->escaped_p;
3072d30e 1111 ai = i;
1112 }
48e1416a 1113
6ef9bbe0 1114 if (!bitmap_set_bit (store1, ai))
3072d30e 1115 bitmap_set_bit (store2, ai);
48e1416a 1116 else
3072d30e 1117 {
3072d30e 1118 if (i < 0)
1119 {
1120 if (group->offset_map_size_n < ai)
1121 group->offset_map_size_n = ai;
1122 }
1123 else
1124 {
1125 if (group->offset_map_size_p < ai)
1126 group->offset_map_size_p = ai;
1127 }
1128 }
b4a708fb 1129 if (expr_escapes)
1130 bitmap_set_bit (escaped, ai);
3072d30e 1131 }
1132}
1133
b4a708fb 1134static void
1135reset_active_stores (void)
1136{
1137 active_local_stores = NULL;
1138 active_local_stores_len = 0;
1139}
3072d30e 1140
b4a708fb 1141/* Free all READ_REC of the LAST_INSN of BB_INFO. */
3072d30e 1142
1143static void
b4a708fb 1144free_read_records (bb_info_t bb_info)
3072d30e 1145{
1146 insn_info_t insn_info = bb_info->last_insn;
1147 read_info_t *ptr = &insn_info->read_rec;
3072d30e 1148 while (*ptr)
1149 {
1150 read_info_t next = (*ptr)->next;
32c2fdea 1151 if ((*ptr)->alias_set == 0)
3072d30e 1152 {
55c5ac9f 1153 delete *ptr;
3072d30e 1154 *ptr = next;
b4a708fb 1155 }
48e1416a 1156 else
b4a708fb 1157 ptr = &(*ptr)->next;
3072d30e 1158 }
b4a708fb 1159}
1160
1161/* Set the BB_INFO so that the last insn is marked as a wild read. */
1162
1163static void
1164add_wild_read (bb_info_t bb_info)
1165{
1166 insn_info_t insn_info = bb_info->last_insn;
3072d30e 1167 insn_info->wild_read = true;
b4a708fb 1168 free_read_records (bb_info);
1169 reset_active_stores ();
3072d30e 1170}
1171
b4a708fb 1172/* Set the BB_INFO so that the last insn is marked as a wild read of
1173 non-frame locations. */
1174
1175static void
1176add_non_frame_wild_read (bb_info_t bb_info)
1177{
1178 insn_info_t insn_info = bb_info->last_insn;
1179 insn_info->non_frame_wild_read = true;
1180 free_read_records (bb_info);
1181 reset_active_stores ();
1182}
3072d30e 1183
17e1318c 1184/* Return true if X is a constant or one of the registers that behave
1185 as a constant over the life of a function. This is equivalent to
1186 !rtx_varies_p for memory addresses. */
3072d30e 1187
1188static bool
1189const_or_frame_p (rtx x)
1190{
0349edce 1191 if (CONSTANT_P (x))
1192 return true;
1193
1194 if (GET_CODE (x) == REG)
3072d30e 1195 {
3072d30e 1196 /* Note that we have to test for the actual rtx used for the frame
1197 and arg pointers and not just the register number in case we have
1198 eliminated the frame and/or arg pointer and are using it
1199 for pseudos. */
1200 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1201 /* The arg pointer varies if it is not a fixed register. */
1202 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1203 || x == pic_offset_table_rtx)
1204 return true;
1205 return false;
3072d30e 1206 }
55c5ac9f 1207
0349edce 1208 return false;
3072d30e 1209}
1210
48e1416a 1211/* Take all reasonable action to put the address of MEM into the form
1212 that we can do analysis on.
3072d30e 1213
1214 The gold standard is to get the address into the form: address +
1215 OFFSET where address is something that rtx_varies_p considers a
1216 constant. When we can get the address in this form, we can do
1217 global analysis on it. Note that for constant bases, address is
1218 not actually returned, only the group_id. The address can be
1219 obtained from that.
1220
1221 If that fails, we try cselib to get a value we can at least use
48e1416a 1222 locally. If that fails we return false.
1223
3072d30e 1224 The GROUP_ID is set to -1 for cselib bases and the index of the
1225 group for non_varying bases.
1226
1227 FOR_READ is true if this is a mem read and false if not. */
1228
1229static bool
1230canon_address (rtx mem,
32c2fdea 1231 alias_set_type *alias_set_out,
3072d30e 1232 int *group_id,
48e1416a 1233 HOST_WIDE_INT *offset,
3072d30e 1234 cselib_val **base)
1235{
3754d046 1236 machine_mode address_mode = get_address_mode (mem);
3072d30e 1237 rtx mem_address = XEXP (mem, 0);
1238 rtx expanded_address, address;
627540ce 1239 int expanded;
1240
3072d30e 1241 *alias_set_out = 0;
1242
1f864115 1243 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
3072d30e 1244
1ca59310 1245 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1246 {
1247 fprintf (dump_file, " mem: ");
1248 print_inline_rtx (dump_file, mem_address, 0);
1249 fprintf (dump_file, "\n");
1250 }
1251
627540ce 1252 /* First see if just canon_rtx (mem_address) is const or frame,
1253 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1254 address = NULL_RTX;
1255 for (expanded = 0; expanded < 2; expanded++)
1256 {
1257 if (expanded)
1258 {
1259 /* Use cselib to replace all of the reg references with the full
48e1416a 1260 expression. This will take care of the case where we have
3072d30e 1261
627540ce 1262 r_x = base + offset;
1263 val = *r_x;
48e1416a 1264
1265 by making it into
3072d30e 1266
627540ce 1267 val = *(base + offset); */
3072d30e 1268
627540ce 1269 expanded_address = cselib_expand_value_rtx (mem_address,
1270 scratch, 5);
3072d30e 1271
627540ce 1272 /* If this fails, just go with the address from first
1273 iteration. */
1274 if (!expanded_address)
1275 break;
1276 }
1277 else
1278 expanded_address = mem_address;
3072d30e 1279
627540ce 1280 /* Split the address into canonical BASE + OFFSET terms. */
1281 address = canon_rtx (expanded_address);
3072d30e 1282
627540ce 1283 *offset = 0;
3072d30e 1284
1ca59310 1285 if (dump_file && (dump_flags & TDF_DETAILS))
627540ce 1286 {
1287 if (expanded)
1288 {
1289 fprintf (dump_file, "\n after cselib_expand address: ");
1290 print_inline_rtx (dump_file, expanded_address, 0);
1291 fprintf (dump_file, "\n");
1292 }
3072d30e 1293
627540ce 1294 fprintf (dump_file, "\n after canon_rtx address: ");
1295 print_inline_rtx (dump_file, address, 0);
1296 fprintf (dump_file, "\n");
1297 }
3072d30e 1298
627540ce 1299 if (GET_CODE (address) == CONST)
1300 address = XEXP (address, 0);
3072d30e 1301
627540ce 1302 if (GET_CODE (address) == PLUS
1303 && CONST_INT_P (XEXP (address, 1)))
1304 {
1305 *offset = INTVAL (XEXP (address, 1));
1306 address = XEXP (address, 0);
1307 }
3072d30e 1308
bd1a81f7 1309 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1310 && const_or_frame_p (address))
3072d30e 1311 {
627540ce 1312 group_info_t group = get_group_info (address);
1313
1ca59310 1314 if (dump_file && (dump_flags & TDF_DETAILS))
627540ce 1315 fprintf (dump_file, " gid=%d offset=%d \n",
1316 group->id, (int)*offset);
1317 *base = NULL;
1318 *group_id = group->id;
1319 return true;
3072d30e 1320 }
627540ce 1321 }
1322
1f864115 1323 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
627540ce 1324 *group_id = -1;
1325
1326 if (*base == NULL)
1327 {
1ca59310 1328 if (dump_file && (dump_flags & TDF_DETAILS))
627540ce 1329 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1330 return false;
3072d30e 1331 }
1ca59310 1332 if (dump_file && (dump_flags & TDF_DETAILS))
01df1184 1333 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1334 (*base)->uid, (*base)->hash, (int)*offset);
3072d30e 1335 return true;
1336}
1337
1338
1339/* Clear the rhs field from the active_local_stores array. */
1340
1341static void
1342clear_rhs_from_active_local_stores (void)
1343{
1344 insn_info_t ptr = active_local_stores;
1345
1346 while (ptr)
1347 {
1348 store_info_t store_info = ptr->store_rec;
1349 /* Skip the clobbers. */
1350 while (!store_info->is_set)
1351 store_info = store_info->next;
1352
1353 store_info->rhs = NULL;
aa140b76 1354 store_info->const_rhs = NULL;
3072d30e 1355
1356 ptr = ptr->next_local_store;
1357 }
1358}
1359
1360
aa140b76 1361/* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1362
1363static inline void
1364set_position_unneeded (store_info_t s_info, int pos)
1365{
1366 if (__builtin_expect (s_info->is_large, false))
1367 {
6ef9bbe0 1368 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1369 s_info->positions_needed.large.count++;
aa140b76 1370 }
1371 else
1372 s_info->positions_needed.small_bitmask
1373 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1374}
1375
1376/* Mark the whole store S_INFO as unneeded. */
1377
1378static inline void
1379set_all_positions_unneeded (store_info_t s_info)
1380{
1381 if (__builtin_expect (s_info->is_large, false))
1382 {
1383 int pos, end = s_info->end - s_info->begin;
1384 for (pos = 0; pos < end; pos++)
843bd2fa 1385 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
aa140b76 1386 s_info->positions_needed.large.count = end;
1387 }
1388 else
1389 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1390}
1391
1392/* Return TRUE if any bytes from S_INFO store are needed. */
1393
1394static inline bool
1395any_positions_needed_p (store_info_t s_info)
1396{
1397 if (__builtin_expect (s_info->is_large, false))
1398 return (s_info->positions_needed.large.count
1399 < s_info->end - s_info->begin);
1400 else
1401 return (s_info->positions_needed.small_bitmask
1402 != (unsigned HOST_WIDE_INT) 0);
1403}
1404
1405/* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1406 store are needed. */
1407
1408static inline bool
1409all_positions_needed_p (store_info_t s_info, int start, int width)
1410{
1411 if (__builtin_expect (s_info->is_large, false))
1412 {
1413 int end = start + width;
1414 while (start < end)
843bd2fa 1415 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
aa140b76 1416 return false;
1417 return true;
1418 }
1419 else
1420 {
1421 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1422 return (s_info->positions_needed.small_bitmask & mask) == mask;
1423 }
1424}
1425
1426
3754d046 1427static rtx get_stored_val (store_info_t, machine_mode, HOST_WIDE_INT,
aa140b76 1428 HOST_WIDE_INT, basic_block, bool);
1429
1430
3072d30e 1431/* BODY is an instruction pattern that belongs to INSN. Return 1 if
1432 there is a candidate store, after adding it to the appropriate
1433 local store group if so. */
1434
1435static int
1436record_store (rtx body, bb_info_t bb_info)
1437{
82d2c88b 1438 rtx mem, rhs, const_rhs, mem_addr;
3072d30e 1439 HOST_WIDE_INT offset = 0;
1440 HOST_WIDE_INT width = 0;
32c2fdea 1441 alias_set_type spill_alias_set;
3072d30e 1442 insn_info_t insn_info = bb_info->last_insn;
1443 store_info_t store_info = NULL;
1444 int group_id;
1445 cselib_val *base = NULL;
aa140b76 1446 insn_info_t ptr, last, redundant_reason;
3072d30e 1447 bool store_is_unused;
1448
1449 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1450 return 0;
1451
aa140b76 1452 mem = SET_DEST (body);
1453
3072d30e 1454 /* If this is not used, then this cannot be used to keep the insn
1455 from being deleted. On the other hand, it does provide something
1456 that can be used to prove that another store is dead. */
1457 store_is_unused
aa140b76 1458 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
3072d30e 1459
1460 /* Check whether that value is a suitable memory location. */
3072d30e 1461 if (!MEM_P (mem))
1462 {
1463 /* If the set or clobber is unused, then it does not effect our
1464 ability to get rid of the entire insn. */
1465 if (!store_is_unused)
1466 insn_info->cannot_delete = true;
1467 return 0;
1468 }
1469
1470 /* At this point we know mem is a mem. */
1471 if (GET_MODE (mem) == BLKmode)
1472 {
1473 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1474 {
1ca59310 1475 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1476 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1477 add_wild_read (bb_info);
1478 insn_info->cannot_delete = true;
aa140b76 1479 return 0;
3072d30e 1480 }
aa140b76 1481 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1482 as memset (addr, 0, 36); */
5b2a69fa 1483 else if (!MEM_SIZE_KNOWN_P (mem)
1484 || MEM_SIZE (mem) <= 0
1485 || MEM_SIZE (mem) > MAX_OFFSET
aa140b76 1486 || GET_CODE (body) != SET
aa140b76 1487 || !CONST_INT_P (SET_SRC (body)))
3072d30e 1488 {
aa140b76 1489 if (!store_is_unused)
1490 {
1491 /* If the set or clobber is unused, then it does not effect our
1492 ability to get rid of the entire insn. */
1493 insn_info->cannot_delete = true;
1494 clear_rhs_from_active_local_stores ();
1495 }
1496 return 0;
3072d30e 1497 }
3072d30e 1498 }
1499
1500 /* We can still process a volatile mem, we just cannot delete it. */
1501 if (MEM_VOLATILE_P (mem))
aa140b76 1502 insn_info->cannot_delete = true;
3072d30e 1503
1504 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1505 {
1506 clear_rhs_from_active_local_stores ();
1507 return 0;
1508 }
1509
aa140b76 1510 if (GET_MODE (mem) == BLKmode)
5b2a69fa 1511 width = MEM_SIZE (mem);
aa140b76 1512 else
e4209874 1513 width = GET_MODE_SIZE (GET_MODE (mem));
3072d30e 1514
1515 if (spill_alias_set)
1516 {
1517 bitmap store1 = clear_alias_group->store1_p;
1518 bitmap store2 = clear_alias_group->store2_p;
aa140b76 1519
1520 gcc_assert (GET_MODE (mem) != BLKmode);
48e1416a 1521
6ef9bbe0 1522 if (!bitmap_set_bit (store1, spill_alias_set))
3072d30e 1523 bitmap_set_bit (store2, spill_alias_set);
48e1416a 1524
3072d30e 1525 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1526 clear_alias_group->offset_map_size_p = spill_alias_set;
48e1416a 1527
55c5ac9f 1528 store_info = rtx_store_info_pool.allocate ();
3072d30e 1529
1ca59310 1530 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1531 fprintf (dump_file, " processing spill store %d(%s)\n",
32c2fdea 1532 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
3072d30e 1533 }
1534 else if (group_id >= 0)
1535 {
1536 /* In the restrictive case where the base is a constant or the
1537 frame pointer we can do global analysis. */
48e1416a 1538
1539 group_info_t group
f1f41a6c 1540 = rtx_group_vec[group_id];
b4a708fb 1541 tree expr = MEM_EXPR (mem);
48e1416a 1542
55c5ac9f 1543 store_info = rtx_store_info_pool.allocate ();
b4a708fb 1544 set_usage_bits (group, offset, width, expr);
3072d30e 1545
1ca59310 1546 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1547 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1548 group_id, (int)offset, (int)(offset+width));
1549 }
1550 else
1551 {
86e87ef6 1552 if (may_be_sp_based_p (XEXP (mem, 0)))
17e1318c 1553 insn_info->stack_pointer_based = true;
3072d30e 1554 insn_info->contains_cselib_groups = true;
17e1318c 1555
55c5ac9f 1556 store_info = cse_store_info_pool.allocate ();
3072d30e 1557 group_id = -1;
1558
1ca59310 1559 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1560 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1561 (int)offset, (int)(offset+width));
1562 }
1563
aa140b76 1564 const_rhs = rhs = NULL_RTX;
1565 if (GET_CODE (body) == SET
1566 /* No place to keep the value after ra. */
1567 && !reload_completed
1568 && (REG_P (SET_SRC (body))
1569 || GET_CODE (SET_SRC (body)) == SUBREG
1570 || CONSTANT_P (SET_SRC (body)))
1571 && !MEM_VOLATILE_P (mem)
1572 /* Sometimes the store and reload is used for truncation and
1573 rounding. */
1574 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1575 {
1576 rhs = SET_SRC (body);
1577 if (CONSTANT_P (rhs))
1578 const_rhs = rhs;
1579 else if (body == PATTERN (insn_info->insn))
1580 {
1581 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1582 if (tem && CONSTANT_P (XEXP (tem, 0)))
1583 const_rhs = XEXP (tem, 0);
1584 }
1585 if (const_rhs == NULL_RTX && REG_P (rhs))
1586 {
1587 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1588
1589 if (tem && CONSTANT_P (tem))
1590 const_rhs = tem;
1591 }
1592 }
1593
3072d30e 1594 /* Check to see if this stores causes some other stores to be
1595 dead. */
1596 ptr = active_local_stores;
1597 last = NULL;
aa140b76 1598 redundant_reason = NULL;
82d2c88b 1599 mem = canon_rtx (mem);
1600 /* For alias_set != 0 canon_true_dependence should be never called. */
1601 if (spill_alias_set)
1602 mem_addr = NULL_RTX;
1603 else
1604 {
1605 if (group_id < 0)
1606 mem_addr = base->val_rtx;
1607 else
1608 {
1609 group_info_t group
f1f41a6c 1610 = rtx_group_vec[group_id];
82d2c88b 1611 mem_addr = group->canon_base_addr;
1612 }
90f3e775 1613 /* get_addr can only handle VALUE but cannot handle expr like:
1614 VALUE + OFFSET, so call get_addr to get original addr for
1615 mem_addr before plus_constant. */
1616 mem_addr = get_addr (mem_addr);
82d2c88b 1617 if (offset)
87cf5753 1618 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
82d2c88b 1619 }
3072d30e 1620
1621 while (ptr)
1622 {
1623 insn_info_t next = ptr->next_local_store;
1624 store_info_t s_info = ptr->store_rec;
9ce37fa7 1625 bool del = true;
3072d30e 1626
1627 /* Skip the clobbers. We delete the active insn if this insn
6dfdc153 1628 shadows the set. To have been put on the active list, it
3072d30e 1629 has exactly on set. */
1630 while (!s_info->is_set)
1631 s_info = s_info->next;
1632
1633 if (s_info->alias_set != spill_alias_set)
9ce37fa7 1634 del = false;
3072d30e 1635 else if (s_info->alias_set)
1636 {
48e1416a 1637 struct clear_alias_mode_holder *entry
3072d30e 1638 = clear_alias_set_lookup (s_info->alias_set);
1639 /* Generally, spills cannot be processed if and of the
1640 references to the slot have a different mode. But if
1641 we are in the same block and mode is exactly the same
1642 between this store and one before in the same block,
1643 we can still delete it. */
1644 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1645 && (GET_MODE (mem) == entry->mode))
1646 {
9ce37fa7 1647 del = true;
aa140b76 1648 set_all_positions_unneeded (s_info);
3072d30e 1649 }
1ca59310 1650 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1651 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
32c2fdea 1652 INSN_UID (ptr->insn), (int) s_info->alias_set);
3072d30e 1653 }
48e1416a 1654 else if ((s_info->group_id == group_id)
3072d30e 1655 && (s_info->cse_base == base))
1656 {
1657 HOST_WIDE_INT i;
1ca59310 1658 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 1659 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
48e1416a 1660 INSN_UID (ptr->insn), s_info->group_id,
3072d30e 1661 (int)s_info->begin, (int)s_info->end);
aa140b76 1662
1663 /* Even if PTR won't be eliminated as unneeded, if both
1664 PTR and this insn store the same constant value, we might
1665 eliminate this insn instead. */
1666 if (s_info->const_rhs
1667 && const_rhs
1668 && offset >= s_info->begin
1669 && offset + width <= s_info->end
1670 && all_positions_needed_p (s_info, offset - s_info->begin,
1671 width))
1672 {
1673 if (GET_MODE (mem) == BLKmode)
1674 {
1675 if (GET_MODE (s_info->mem) == BLKmode
1676 && s_info->const_rhs == const_rhs)
1677 redundant_reason = ptr;
1678 }
1679 else if (s_info->const_rhs == const0_rtx
1680 && const_rhs == const0_rtx)
1681 redundant_reason = ptr;
1682 else
1683 {
1684 rtx val;
1685 start_sequence ();
1686 val = get_stored_val (s_info, GET_MODE (mem),
1687 offset, offset + width,
1688 BLOCK_FOR_INSN (insn_info->insn),
1689 true);
1690 if (get_insns () != NULL)
1691 val = NULL_RTX;
1692 end_sequence ();
1693 if (val && rtx_equal_p (val, const_rhs))
1694 redundant_reason = ptr;
1695 }
1696 }
1697
1698 for (i = MAX (offset, s_info->begin);
1699 i < offset + width && i < s_info->end;
1700 i++)
1701 set_position_unneeded (s_info, i - s_info->begin);
3072d30e 1702 }
1703 else if (s_info->rhs)
1704 /* Need to see if it is possible for this store to overwrite
1705 the value of store_info. If it is, set the rhs to NULL to
1706 keep it from being used to remove a load. */
1707 {
48e1416a 1708 if (canon_true_dependence (s_info->mem,
3072d30e 1709 GET_MODE (s_info->mem),
1710 s_info->mem_addr,
376a287d 1711 mem, mem_addr))
aa140b76 1712 {
1713 s_info->rhs = NULL;
1714 s_info->const_rhs = NULL;
1715 }
3072d30e 1716 }
82d2c88b 1717
3072d30e 1718 /* An insn can be deleted if every position of every one of
1719 its s_infos is zero. */
c75be2fe 1720 if (any_positions_needed_p (s_info))
9ce37fa7 1721 del = false;
aa140b76 1722
9ce37fa7 1723 if (del)
3072d30e 1724 {
1725 insn_info_t insn_to_delete = ptr;
48e1416a 1726
1242bee6 1727 active_local_stores_len--;
3072d30e 1728 if (last)
1729 last->next_local_store = ptr->next_local_store;
1730 else
1731 active_local_stores = ptr->next_local_store;
48e1416a 1732
c75be2fe 1733 if (!insn_to_delete->cannot_delete)
1734 delete_dead_store_insn (insn_to_delete);
3072d30e 1735 }
1736 else
1737 last = ptr;
48e1416a 1738
3072d30e 1739 ptr = next;
1740 }
48e1416a 1741
3072d30e 1742 /* Finish filling in the store_info. */
1743 store_info->next = insn_info->store_rec;
1744 insn_info->store_rec = store_info;
82d2c88b 1745 store_info->mem = mem;
3072d30e 1746 store_info->alias_set = spill_alias_set;
82d2c88b 1747 store_info->mem_addr = mem_addr;
3072d30e 1748 store_info->cse_base = base;
aa140b76 1749 if (width > HOST_BITS_PER_WIDE_INT)
1750 {
1751 store_info->is_large = true;
1752 store_info->positions_needed.large.count = 0;
4fb07d00 1753 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
aa140b76 1754 }
1755 else
1756 {
1757 store_info->is_large = false;
1758 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1759 }
3072d30e 1760 store_info->group_id = group_id;
1761 store_info->begin = offset;
1762 store_info->end = offset + width;
1763 store_info->is_set = GET_CODE (body) == SET;
aa140b76 1764 store_info->rhs = rhs;
1765 store_info->const_rhs = const_rhs;
1766 store_info->redundant_reason = redundant_reason;
3072d30e 1767
3072d30e 1768 /* If this is a clobber, we return 0. We will only be able to
1769 delete this insn if there is only one store USED store, but we
1770 can use the clobber to delete other stores earlier. */
1771 return store_info->is_set ? 1 : 0;
1772}
1773
1774
1775static void
1776dump_insn_info (const char * start, insn_info_t insn_info)
1777{
48e1416a 1778 fprintf (dump_file, "%s insn=%d %s\n", start,
3072d30e 1779 INSN_UID (insn_info->insn),
1780 insn_info->store_rec ? "has store" : "naked");
1781}
1782
1783
5c9051a4 1784/* If the modes are different and the value's source and target do not
1785 line up, we need to extract the value from lower part of the rhs of
1786 the store, shift it, and then put it into a form that can be shoved
1787 into the read_insn. This function generates a right SHIFT of a
1788 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1789 shift sequence is returned or NULL if we failed to find a
1790 shift. */
1791
1792static rtx
10d4de0e 1793find_shift_sequence (int access_size,
5c9051a4 1794 store_info_t store_info,
3754d046 1795 machine_mode read_mode,
aa140b76 1796 int shift, bool speed, bool require_cst)
5c9051a4 1797{
3754d046 1798 machine_mode store_mode = GET_MODE (store_info->mem);
1799 machine_mode new_mode;
10d4de0e 1800 rtx read_reg = NULL;
5c9051a4 1801
1802 /* Some machines like the x86 have shift insns for each size of
1803 operand. Other machines like the ppc or the ia-64 may only have
1804 shift insns that shift values within 32 or 64 bit registers.
1805 This loop tries to find the smallest shift insn that will right
1806 justify the value we want to read but is available in one insn on
1807 the machine. */
1808
10d4de0e 1809 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1810 MODE_INT);
1811 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1812 new_mode = GET_MODE_WIDER_MODE (new_mode))
5c9051a4 1813 {
4cd001d5 1814 rtx target, new_reg, new_lhs;
1815 rtx_insn *shift_seq, *insn;
171557e8 1816 int cost;
af97461e 1817
4ed4afb9 1818 /* If a constant was stored into memory, try to simplify it here,
1819 otherwise the cost of the shift might preclude this optimization
1820 e.g. at -Os, even when no actual shift will be needed. */
aa140b76 1821 if (store_info->const_rhs)
4ed4afb9 1822 {
1823 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
aa140b76 1824 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1825 store_mode, byte);
4ed4afb9 1826 if (ret && CONSTANT_P (ret))
1827 {
1828 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1829 ret, GEN_INT (shift));
1830 if (ret && CONSTANT_P (ret))
1831 {
1832 byte = subreg_lowpart_offset (read_mode, new_mode);
1833 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1834 if (ret && CONSTANT_P (ret)
7013e87c 1835 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
4ed4afb9 1836 return ret;
1837 }
1838 }
1839 }
1840
aa140b76 1841 if (require_cst)
1842 return NULL_RTX;
1843
10d4de0e 1844 /* Try a wider mode if truncating the store mode to NEW_MODE
1845 requires a real instruction. */
1846 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
396f2130 1847 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
af97461e 1848 continue;
1849
10d4de0e 1850 /* Also try a wider mode if the necessary punning is either not
1851 desirable or not possible. */
1852 if (!CONSTANT_P (store_info->rhs)
1853 && !MODES_TIEABLE_P (new_mode, store_mode))
1854 continue;
10d4de0e 1855
af97461e 1856 new_reg = gen_reg_rtx (new_mode);
5c9051a4 1857
1858 start_sequence ();
1859
1860 /* In theory we could also check for an ashr. Ian Taylor knows
1861 of one dsp where the cost of these two was not the same. But
1862 this really is a rare case anyway. */
1863 target = expand_binop (new_mode, lshr_optab, new_reg,
1864 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1865
597d5470 1866 shift_seq = get_insns ();
1867 end_sequence ();
5c9051a4 1868
597d5470 1869 if (target != new_reg || shift_seq == NULL)
1870 continue;
1871
1872 cost = 0;
1873 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1874 if (INSN_P (insn))
f529eb25 1875 cost += insn_rtx_cost (PATTERN (insn), speed);
597d5470 1876
1877 /* The computation up to here is essentially independent
1878 of the arguments and could be precomputed. It may
1879 not be worth doing so. We could precompute if
1880 worthwhile or at least cache the results. The result
5133fd21 1881 technically depends on both SHIFT and ACCESS_SIZE,
1882 but in practice the answer will depend only on ACCESS_SIZE. */
597d5470 1883
1884 if (cost > COSTS_N_INSNS (1))
1885 continue;
1886
171557e8 1887 new_lhs = extract_low_bits (new_mode, store_mode,
1888 copy_rtx (store_info->rhs));
1889 if (new_lhs == NULL_RTX)
1890 continue;
1891
597d5470 1892 /* We found an acceptable shift. Generate a move to
1893 take the value from the store and put it into the
1894 shift pseudo, then shift it, then generate another
1895 move to put in into the target of the read. */
10d4de0e 1896 emit_move_insn (new_reg, new_lhs);
597d5470 1897 emit_insn (shift_seq);
10d4de0e 1898 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
597d5470 1899 break;
5c9051a4 1900 }
1901
10d4de0e 1902 return read_reg;
5c9051a4 1903}
1904
1905
a1b0a968 1906/* Call back for note_stores to find the hard regs set or clobbered by
1907 insn. Data is a bitmap of the hardregs set so far. */
1908
1909static void
1910look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1911{
1912 bitmap regs_set = (bitmap) data;
1913
1914 if (REG_P (x)
771d4616 1915 && HARD_REGISTER_P (x))
0933f1d9 1916 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
a1b0a968 1917}
1918
aa140b76 1919/* Helper function for replace_read and record_store.
1920 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1921 to one before READ_END bytes read in READ_MODE. Return NULL
1922 if not successful. If REQUIRE_CST is true, return always constant. */
1923
1924static rtx
3754d046 1925get_stored_val (store_info_t store_info, machine_mode read_mode,
aa140b76 1926 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1927 basic_block bb, bool require_cst)
1928{
3754d046 1929 machine_mode store_mode = GET_MODE (store_info->mem);
aa140b76 1930 int shift;
1931 int access_size; /* In bytes. */
1932 rtx read_reg;
1933
1934 /* To get here the read is within the boundaries of the write so
1935 shift will never be negative. Start out with the shift being in
1936 bytes. */
1937 if (store_mode == BLKmode)
1938 shift = 0;
1939 else if (BYTES_BIG_ENDIAN)
1940 shift = store_info->end - read_end;
1941 else
1942 shift = read_begin - store_info->begin;
1943
1944 access_size = shift + GET_MODE_SIZE (read_mode);
1945
1946 /* From now on it is bits. */
1947 shift *= BITS_PER_UNIT;
1948
1949 if (shift)
1950 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1951 optimize_bb_for_speed_p (bb),
1952 require_cst);
1953 else if (store_mode == BLKmode)
1954 {
1955 /* The store is a memset (addr, const_val, const_size). */
1956 gcc_assert (CONST_INT_P (store_info->rhs));
1957 store_mode = int_mode_for_mode (read_mode);
1958 if (store_mode == BLKmode)
1959 read_reg = NULL_RTX;
1960 else if (store_info->rhs == const0_rtx)
1961 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1962 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1963 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1964 read_reg = NULL_RTX;
1965 else
1966 {
1967 unsigned HOST_WIDE_INT c
1968 = INTVAL (store_info->rhs)
1969 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1970 int shift = BITS_PER_UNIT;
1971 while (shift < HOST_BITS_PER_WIDE_INT)
1972 {
1973 c |= (c << shift);
1974 shift <<= 1;
1975 }
f62058c3 1976 read_reg = gen_int_mode (c, store_mode);
aa140b76 1977 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1978 }
1979 }
1980 else if (store_info->const_rhs
1981 && (require_cst
1982 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1983 read_reg = extract_low_bits (read_mode, store_mode,
1984 copy_rtx (store_info->const_rhs));
1985 else
1986 read_reg = extract_low_bits (read_mode, store_mode,
1987 copy_rtx (store_info->rhs));
1988 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1989 read_reg = NULL_RTX;
1990 return read_reg;
1991}
a1b0a968 1992
3072d30e 1993/* Take a sequence of:
1994 A <- r1
1995 ...
1996 ... <- A
1997
48e1416a 1998 and change it into
3072d30e 1999 r2 <- r1
2000 A <- r1
2001 ...
2002 ... <- r2
2003
5c9051a4 2004 or
2005
2006 r3 <- extract (r1)
2007 r3 <- r3 >> shift
2008 r2 <- extract (r3)
2009 ... <- r2
2010
2011 or
2012
2013 r2 <- extract (r1)
2014 ... <- r2
2015
2016 Depending on the alignment and the mode of the store and
2017 subsequent load.
2018
2019
2020 The STORE_INFO and STORE_INSN are for the store and READ_INFO
3072d30e 2021 and READ_INSN are for the read. Return true if the replacement
2022 went ok. */
2023
2024static bool
48e1416a 2025replace_read (store_info_t store_info, insn_info_t store_insn,
aa140b76 2026 read_info_t read_info, insn_info_t read_insn, rtx *loc,
2027 bitmap regs_live)
3072d30e 2028{
3754d046 2029 machine_mode store_mode = GET_MODE (store_info->mem);
2030 machine_mode read_mode = GET_MODE (read_info->mem);
4cd001d5 2031 rtx_insn *insns, *this_insn;
2032 rtx read_reg;
aa140b76 2033 basic_block bb;
5c9051a4 2034
3072d30e 2035 if (!dbg_cnt (dse))
2036 return false;
2037
10d4de0e 2038 /* Create a sequence of instructions to set up the read register.
2039 This sequence goes immediately before the store and its result
2040 is read by the load.
2041
2042 We need to keep this in perspective. We are replacing a read
5c9051a4 2043 with a sequence of insns, but the read will almost certainly be
2044 in cache, so it is not going to be an expensive one. Thus, we
2045 are not willing to do a multi insn shift or worse a subroutine
2046 call to get rid of the read. */
1ca59310 2047 if (dump_file && (dump_flags & TDF_DETAILS))
10d4de0e 2048 fprintf (dump_file, "trying to replace %smode load in insn %d"
2049 " from %smode store in insn %d\n",
2050 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2051 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2052 start_sequence ();
aa140b76 2053 bb = BLOCK_FOR_INSN (read_insn->insn);
2054 read_reg = get_stored_val (store_info,
2055 read_mode, read_info->begin, read_info->end,
2056 bb, false);
10d4de0e 2057 if (read_reg == NULL_RTX)
5c9051a4 2058 {
10d4de0e 2059 end_sequence ();
1ca59310 2060 if (dump_file && (dump_flags & TDF_DETAILS))
10d4de0e 2061 fprintf (dump_file, " -- could not extract bits of stored value\n");
2062 return false;
5c9051a4 2063 }
10d4de0e 2064 /* Force the value into a new register so that it won't be clobbered
2065 between the store and the load. */
2066 read_reg = copy_to_mode_reg (read_mode, read_reg);
2067 insns = get_insns ();
2068 end_sequence ();
5c9051a4 2069
a1b0a968 2070 if (insns != NULL_RTX)
2071 {
2072 /* Now we have to scan the set of new instructions to see if the
2073 sequence contains and sets of hardregs that happened to be
2074 live at this point. For instance, this can happen if one of
2075 the insns sets the CC and the CC happened to be live at that
2076 point. This does occasionally happen, see PR 37922. */
4fb07d00 2077 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
a1b0a968 2078
2079 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2080 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
48e1416a 2081
a1b0a968 2082 bitmap_and_into (regs_set, regs_live);
2083 if (!bitmap_empty_p (regs_set))
2084 {
1ca59310 2085 if (dump_file && (dump_flags & TDF_DETAILS))
a1b0a968 2086 {
48e1416a 2087 fprintf (dump_file,
a1b0a968 2088 "abandoning replacement because sequence clobbers live hardregs:");
2089 df_print_regset (dump_file, regs_set);
2090 }
48e1416a 2091
a1b0a968 2092 BITMAP_FREE (regs_set);
2093 return false;
2094 }
2095 BITMAP_FREE (regs_set);
2096 }
2097
5c9051a4 2098 if (validate_change (read_insn->insn, loc, read_reg, 0))
3072d30e 2099 {
55c5ac9f 2100 deferred_change_t change = new deferred_change;
48e1416a 2101
5c9051a4 2102 /* Insert this right before the store insn where it will be safe
2103 from later insns that might change it before the read. */
2104 emit_insn_before (insns, store_insn->insn);
48e1416a 2105
5c9051a4 2106 /* And now for the kludge part: cselib croaks if you just
2107 return at this point. There are two reasons for this:
48e1416a 2108
5c9051a4 2109 1) Cselib has an idea of how many pseudos there are and
2110 that does not include the new ones we just added.
48e1416a 2111
5c9051a4 2112 2) Cselib does not know about the move insn we added
2113 above the store_info, and there is no way to tell it
2114 about it, because it has "moved on".
48e1416a 2115
5c9051a4 2116 Problem (1) is fixable with a certain amount of engineering.
2117 Problem (2) is requires starting the bb from scratch. This
2118 could be expensive.
48e1416a 2119
5c9051a4 2120 So we are just going to have to lie. The move/extraction
2121 insns are not really an issue, cselib did not see them. But
2122 the use of the new pseudo read_insn is a real problem because
2123 cselib has not scanned this insn. The way that we solve this
2124 problem is that we are just going to put the mem back for now
2125 and when we are finished with the block, we undo this. We
2126 keep a table of mems to get rid of. At the end of the basic
2127 block we can put them back. */
48e1416a 2128
5c9051a4 2129 *loc = read_info->mem;
55c5ac9f 2130 change->next = deferred_change_list;
2131 deferred_change_list = change;
2132 change->loc = loc;
2133 change->reg = read_reg;
48e1416a 2134
5c9051a4 2135 /* Get rid of the read_info, from the point of view of the
2136 rest of dse, play like this read never happened. */
2137 read_insn->read_rec = read_info->next;
55c5ac9f 2138 delete read_info;
1ca59310 2139 if (dump_file && (dump_flags & TDF_DETAILS))
10d4de0e 2140 {
2141 fprintf (dump_file, " -- replaced the loaded MEM with ");
2142 print_simple_rtl (dump_file, read_reg);
2143 fprintf (dump_file, "\n");
2144 }
5c9051a4 2145 return true;
3072d30e 2146 }
48e1416a 2147 else
3072d30e 2148 {
1ca59310 2149 if (dump_file && (dump_flags & TDF_DETAILS))
10d4de0e 2150 {
2151 fprintf (dump_file, " -- replacing the loaded MEM with ");
2152 print_simple_rtl (dump_file, read_reg);
2153 fprintf (dump_file, " led to an invalid instruction\n");
2154 }
3072d30e 2155 return false;
2156 }
2157}
2158
ec1203cd 2159/* Check the address of MEM *LOC and kill any appropriate stores that may
2160 be active. */
3072d30e 2161
ec1203cd 2162static void
2163check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
3072d30e 2164{
82d2c88b 2165 rtx mem = *loc, mem_addr;
3072d30e 2166 insn_info_t insn_info;
2167 HOST_WIDE_INT offset = 0;
2168 HOST_WIDE_INT width = 0;
32c2fdea 2169 alias_set_type spill_alias_set = 0;
48e1416a 2170 cselib_val *base = NULL;
3072d30e 2171 int group_id;
2172 read_info_t read_info;
2173
3072d30e 2174 insn_info = bb_info->last_insn;
2175
2176 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2177 || (MEM_VOLATILE_P (mem)))
2178 {
1ca59310 2179 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2180 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2181 add_wild_read (bb_info);
2182 insn_info->cannot_delete = true;
ec1203cd 2183 return;
3072d30e 2184 }
2185
2186 /* If it is reading readonly mem, then there can be no conflict with
2187 another write. */
2188 if (MEM_READONLY_P (mem))
ec1203cd 2189 return;
3072d30e 2190
2191 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2192 {
1ca59310 2193 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2194 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2195 add_wild_read (bb_info);
ec1203cd 2196 return;
3072d30e 2197 }
2198
2199 if (GET_MODE (mem) == BLKmode)
2200 width = -1;
2201 else
2202 width = GET_MODE_SIZE (GET_MODE (mem));
2203
55c5ac9f 2204 read_info = new read_info_type;
3072d30e 2205 read_info->group_id = group_id;
2206 read_info->mem = mem;
2207 read_info->alias_set = spill_alias_set;
2208 read_info->begin = offset;
2209 read_info->end = offset + width;
2210 read_info->next = insn_info->read_rec;
2211 insn_info->read_rec = read_info;
82d2c88b 2212 /* For alias_set != 0 canon_true_dependence should be never called. */
2213 if (spill_alias_set)
2214 mem_addr = NULL_RTX;
2215 else
2216 {
2217 if (group_id < 0)
2218 mem_addr = base->val_rtx;
2219 else
2220 {
2221 group_info_t group
f1f41a6c 2222 = rtx_group_vec[group_id];
82d2c88b 2223 mem_addr = group->canon_base_addr;
2224 }
90f3e775 2225 /* get_addr can only handle VALUE but cannot handle expr like:
2226 VALUE + OFFSET, so call get_addr to get original addr for
2227 mem_addr before plus_constant. */
2228 mem_addr = get_addr (mem_addr);
82d2c88b 2229 if (offset)
87cf5753 2230 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
82d2c88b 2231 }
3072d30e 2232
bef304b8 2233 /* We ignore the clobbers in store_info. The is mildly aggressive,
3072d30e 2234 but there really should not be a clobber followed by a read. */
2235
2236 if (spill_alias_set)
2237 {
2238 insn_info_t i_ptr = active_local_stores;
2239 insn_info_t last = NULL;
2240
1ca59310 2241 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2242 fprintf (dump_file, " processing spill load %d\n",
32c2fdea 2243 (int) spill_alias_set);
3072d30e 2244
2245 while (i_ptr)
2246 {
2247 store_info_t store_info = i_ptr->store_rec;
2248
2249 /* Skip the clobbers. */
2250 while (!store_info->is_set)
2251 store_info = store_info->next;
48e1416a 2252
3072d30e 2253 if (store_info->alias_set == spill_alias_set)
2254 {
1ca59310 2255 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2256 dump_insn_info ("removing from active", i_ptr);
2257
1242bee6 2258 active_local_stores_len--;
3072d30e 2259 if (last)
2260 last->next_local_store = i_ptr->next_local_store;
2261 else
2262 active_local_stores = i_ptr->next_local_store;
2263 }
2264 else
2265 last = i_ptr;
2266 i_ptr = i_ptr->next_local_store;
2267 }
2268 }
2269 else if (group_id >= 0)
2270 {
2271 /* This is the restricted case where the base is a constant or
2272 the frame pointer and offset is a constant. */
2273 insn_info_t i_ptr = active_local_stores;
2274 insn_info_t last = NULL;
48e1416a 2275
1ca59310 2276 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2277 {
2278 if (width == -1)
2279 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2280 group_id);
2281 else
2282 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2283 group_id, (int)offset, (int)(offset+width));
2284 }
2285
2286 while (i_ptr)
2287 {
2288 bool remove = false;
2289 store_info_t store_info = i_ptr->store_rec;
48e1416a 2290
3072d30e 2291 /* Skip the clobbers. */
2292 while (!store_info->is_set)
2293 store_info = store_info->next;
48e1416a 2294
3072d30e 2295 /* There are three cases here. */
2296 if (store_info->group_id < 0)
2297 /* We have a cselib store followed by a read from a
2298 const base. */
48e1416a 2299 remove
2300 = canon_true_dependence (store_info->mem,
3072d30e 2301 GET_MODE (store_info->mem),
2302 store_info->mem_addr,
376a287d 2303 mem, mem_addr);
48e1416a 2304
3072d30e 2305 else if (group_id == store_info->group_id)
2306 {
2307 /* This is a block mode load. We may get lucky and
2308 canon_true_dependence may save the day. */
2309 if (width == -1)
48e1416a 2310 remove
2311 = canon_true_dependence (store_info->mem,
3072d30e 2312 GET_MODE (store_info->mem),
2313 store_info->mem_addr,
376a287d 2314 mem, mem_addr);
48e1416a 2315
3072d30e 2316 /* If this read is just reading back something that we just
2317 stored, rewrite the read. */
48e1416a 2318 else
3072d30e 2319 {
2320 if (store_info->rhs
aa140b76 2321 && offset >= store_info->begin
2322 && offset + width <= store_info->end
2323 && all_positions_needed_p (store_info,
2324 offset - store_info->begin,
2325 width)
2326 && replace_read (store_info, i_ptr, read_info,
2327 insn_info, loc, bb_info->regs_live))
ec1203cd 2328 return;
aa140b76 2329
3072d30e 2330 /* The bases are the same, just see if the offsets
2331 overlap. */
48e1416a 2332 if ((offset < store_info->end)
3072d30e 2333 && (offset + width > store_info->begin))
2334 remove = true;
2335 }
2336 }
48e1416a 2337
2338 /* else
3072d30e 2339 The else case that is missing here is that the
2340 bases are constant but different. There is nothing
2341 to do here because there is no overlap. */
48e1416a 2342
3072d30e 2343 if (remove)
2344 {
1ca59310 2345 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2346 dump_insn_info ("removing from active", i_ptr);
2347
1242bee6 2348 active_local_stores_len--;
3072d30e 2349 if (last)
2350 last->next_local_store = i_ptr->next_local_store;
2351 else
2352 active_local_stores = i_ptr->next_local_store;
2353 }
2354 else
2355 last = i_ptr;
2356 i_ptr = i_ptr->next_local_store;
2357 }
2358 }
48e1416a 2359 else
3072d30e 2360 {
2361 insn_info_t i_ptr = active_local_stores;
2362 insn_info_t last = NULL;
1ca59310 2363 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2364 {
2365 fprintf (dump_file, " processing cselib load mem:");
2366 print_inline_rtx (dump_file, mem, 0);
2367 fprintf (dump_file, "\n");
2368 }
2369
2370 while (i_ptr)
2371 {
2372 bool remove = false;
2373 store_info_t store_info = i_ptr->store_rec;
48e1416a 2374
1ca59310 2375 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2376 fprintf (dump_file, " processing cselib load against insn %d\n",
2377 INSN_UID (i_ptr->insn));
2378
2379 /* Skip the clobbers. */
2380 while (!store_info->is_set)
2381 store_info = store_info->next;
2382
2383 /* If this read is just reading back something that we just
2384 stored, rewrite the read. */
2385 if (store_info->rhs
2386 && store_info->group_id == -1
2387 && store_info->cse_base == base
2ffe5515 2388 && width != -1
aa140b76 2389 && offset >= store_info->begin
2390 && offset + width <= store_info->end
2391 && all_positions_needed_p (store_info,
2392 offset - store_info->begin, width)
2393 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2394 bb_info->regs_live))
ec1203cd 2395 return;
3072d30e 2396
2397 if (!store_info->alias_set)
48e1416a 2398 remove = canon_true_dependence (store_info->mem,
3072d30e 2399 GET_MODE (store_info->mem),
2400 store_info->mem_addr,
376a287d 2401 mem, mem_addr);
48e1416a 2402
3072d30e 2403 if (remove)
2404 {
1ca59310 2405 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2406 dump_insn_info ("removing from active", i_ptr);
48e1416a 2407
1242bee6 2408 active_local_stores_len--;
3072d30e 2409 if (last)
2410 last->next_local_store = i_ptr->next_local_store;
2411 else
2412 active_local_stores = i_ptr->next_local_store;
2413 }
2414 else
2415 last = i_ptr;
2416 i_ptr = i_ptr->next_local_store;
2417 }
2418 }
3072d30e 2419}
2420
ec1203cd 2421/* A note_uses callback in which DATA points the INSN_INFO for
3072d30e 2422 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2423 true for any part of *LOC. */
2424
2425static void
2426check_mem_read_use (rtx *loc, void *data)
2427{
ec1203cd 2428 subrtx_ptr_iterator::array_type array;
2429 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2430 {
2431 rtx *loc = *iter;
2432 if (MEM_P (*loc))
2433 check_mem_read_rtx (loc, (bb_info_t) data);
2434 }
3072d30e 2435}
2436
aa140b76 2437
2438/* Get arguments passed to CALL_INSN. Return TRUE if successful.
2439 So far it only handles arguments passed in registers. */
2440
2441static bool
2442get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2443{
39cba157 2444 CUMULATIVE_ARGS args_so_far_v;
2445 cumulative_args_t args_so_far;
aa140b76 2446 tree arg;
2447 int idx;
2448
39cba157 2449 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2450 args_so_far = pack_cumulative_args (&args_so_far_v);
aa140b76 2451
2452 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2453 for (idx = 0;
2454 arg != void_list_node && idx < nargs;
2455 arg = TREE_CHAIN (arg), idx++)
2456 {
3754d046 2457 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
f387af4f 2458 rtx reg, link, tmp;
39cba157 2459 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
aa140b76 2460 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2461 || GET_MODE_CLASS (mode) != MODE_INT)
2462 return false;
2463
2464 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2465 link;
2466 link = XEXP (link, 1))
2467 if (GET_CODE (XEXP (link, 0)) == USE)
2468 {
2469 args[idx] = XEXP (XEXP (link, 0), 0);
2470 if (REG_P (args[idx])
2471 && REGNO (args[idx]) == REGNO (reg)
2472 && (GET_MODE (args[idx]) == mode
2473 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2474 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2475 <= UNITS_PER_WORD)
2476 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2477 > GET_MODE_SIZE (mode)))))
2478 break;
2479 }
2480 if (!link)
2481 return false;
2482
2483 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2484 if (GET_MODE (args[idx]) != mode)
2485 {
2486 if (!tmp || !CONST_INT_P (tmp))
2487 return false;
f62058c3 2488 tmp = gen_int_mode (INTVAL (tmp), mode);
aa140b76 2489 }
2490 if (tmp)
2491 args[idx] = tmp;
2492
39cba157 2493 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
aa140b76 2494 }
2495 if (arg != void_list_node || idx != nargs)
2496 return false;
2497 return true;
2498}
2499
5a9ecd4a 2500/* Return a bitmap of the fixed registers contained in IN. */
2501
2502static bitmap
2503copy_fixed_regs (const_bitmap in)
2504{
2505 bitmap ret;
2506
2507 ret = ALLOC_REG_SET (NULL);
2508 bitmap_and (ret, in, fixed_reg_set_regset);
2509 return ret;
2510}
aa140b76 2511
3072d30e 2512/* Apply record_store to all candidate stores in INSN. Mark INSN
2513 if some part of it is not a candidate store and assigns to a
2514 non-register target. */
2515
2516static void
ebabb7a3 2517scan_insn (bb_info_t bb_info, rtx_insn *insn)
3072d30e 2518{
2519 rtx body;
55c5ac9f 2520 insn_info_type *insn_info = new insn_info_type;
3072d30e 2521 int mems_found = 0;
55c5ac9f 2522 memset (insn_info, 0, sizeof (struct insn_info_type));
3072d30e 2523
1ca59310 2524 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2525 fprintf (dump_file, "\n**scanning insn=%d\n",
2526 INSN_UID (insn));
2527
2528 insn_info->prev_insn = bb_info->last_insn;
2529 insn_info->insn = insn;
2530 bb_info->last_insn = insn_info;
48e1416a 2531
9845d120 2532 if (DEBUG_INSN_P (insn))
2533 {
2534 insn_info->cannot_delete = true;
2535 return;
2536 }
3072d30e 2537
3072d30e 2538 /* Look at all of the uses in the insn. */
2539 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2540
2541 if (CALL_P (insn))
2542 {
aa140b76 2543 bool const_call;
2544 tree memset_call = NULL_TREE;
2545
3072d30e 2546 insn_info->cannot_delete = true;
17e1318c 2547
3072d30e 2548 /* Const functions cannot do anything bad i.e. read memory,
17e1318c 2549 however, they can read their parameters which may have
aa140b76 2550 been pushed onto the stack.
2551 memset and bzero don't read memory either. */
2552 const_call = RTL_CONST_CALL_P (insn);
2553 if (!const_call)
2554 {
cf7fb72d 2555 rtx call = get_call_rtx_from (insn);
2556 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
aa140b76 2557 {
2558 rtx symbol = XEXP (XEXP (call, 0), 0);
2559 if (SYMBOL_REF_DECL (symbol)
2560 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2561 {
2562 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2563 == BUILT_IN_NORMAL
2564 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2565 == BUILT_IN_MEMSET))
2566 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2567 memset_call = SYMBOL_REF_DECL (symbol);
2568 }
2569 }
2570 }
2571 if (const_call || memset_call)
3072d30e 2572 {
2573 insn_info_t i_ptr = active_local_stores;
2574 insn_info_t last = NULL;
2575
1ca59310 2576 if (dump_file && (dump_flags & TDF_DETAILS))
aa140b76 2577 fprintf (dump_file, "%s call %d\n",
2578 const_call ? "const" : "memset", INSN_UID (insn));
3072d30e 2579
16bf64db 2580 /* See the head comment of the frame_read field. */
17853422 2581 if (reload_completed
2582 /* Tail calls are storing their arguments using
2583 arg pointer. If it is a frame pointer on the target,
2584 even before reload we need to kill frame pointer based
2585 stores. */
2586 || (SIBLING_CALL_P (insn)
2587 && HARD_FRAME_POINTER_IS_ARG_POINTER))
16bf64db 2588 insn_info->frame_read = true;
2589
2590 /* Loop over the active stores and remove those which are
2591 killed by the const function call. */
3072d30e 2592 while (i_ptr)
2593 {
16bf64db 2594 bool remove_store = false;
2595
2596 /* The stack pointer based stores are always killed. */
17e1318c 2597 if (i_ptr->stack_pointer_based)
16bf64db 2598 remove_store = true;
2599
2600 /* If the frame is read, the frame related stores are killed. */
2601 else if (insn_info->frame_read)
2602 {
2603 store_info_t store_info = i_ptr->store_rec;
2604
2605 /* Skip the clobbers. */
2606 while (!store_info->is_set)
2607 store_info = store_info->next;
2608
2609 if (store_info->group_id >= 0
f1f41a6c 2610 && rtx_group_vec[store_info->group_id]->frame_related)
16bf64db 2611 remove_store = true;
2612 }
2613
2614 if (remove_store)
3072d30e 2615 {
1ca59310 2616 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2617 dump_insn_info ("removing from active", i_ptr);
48e1416a 2618
1242bee6 2619 active_local_stores_len--;
3072d30e 2620 if (last)
2621 last->next_local_store = i_ptr->next_local_store;
2622 else
2623 active_local_stores = i_ptr->next_local_store;
2624 }
2625 else
2626 last = i_ptr;
16bf64db 2627
3072d30e 2628 i_ptr = i_ptr->next_local_store;
2629 }
aa140b76 2630
2631 if (memset_call)
2632 {
2633 rtx args[3];
2634 if (get_call_args (insn, memset_call, args, 3)
2635 && CONST_INT_P (args[1])
2636 && CONST_INT_P (args[2])
2637 && INTVAL (args[2]) > 0)
2638 {
2639 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
5b2a69fa 2640 set_mem_size (mem, INTVAL (args[2]));
d1f9b275 2641 body = gen_rtx_SET (mem, args[1]);
aa140b76 2642 mems_found += record_store (body, bb_info);
1ca59310 2643 if (dump_file && (dump_flags & TDF_DETAILS))
aa140b76 2644 fprintf (dump_file, "handling memset as BLKmode store\n");
2645 if (mems_found == 1)
2646 {
1242bee6 2647 if (active_local_stores_len++
2648 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2649 {
2650 active_local_stores_len = 1;
2651 active_local_stores = NULL;
2652 }
5a9ecd4a 2653 insn_info->fixed_regs_live
2654 = copy_fixed_regs (bb_info->regs_live);
aa140b76 2655 insn_info->next_local_store = active_local_stores;
2656 active_local_stores = insn_info;
2657 }
2658 }
2659 }
3072d30e 2660 }
17853422 2661 else if (SIBLING_CALL_P (insn) && reload_completed)
2662 /* Arguments for a sibling call that are pushed to memory are passed
2663 using the incoming argument pointer of the current function. After
2664 reload that might be (and likely is) frame pointer based. */
2665 add_wild_read (bb_info);
17e1318c 2666 else
b4a708fb 2667 /* Every other call, including pure functions, may read any memory
2668 that is not relative to the frame. */
2669 add_non_frame_wild_read (bb_info);
17e1318c 2670
3072d30e 2671 return;
2672 }
2673
2674 /* Assuming that there are sets in these insns, we cannot delete
2675 them. */
2676 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
4aafe72f 2677 || volatile_refs_p (PATTERN (insn))
bc0dfc8d 2678 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
3072d30e 2679 || (RTX_FRAME_RELATED_P (insn))
2680 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2681 insn_info->cannot_delete = true;
48e1416a 2682
3072d30e 2683 body = PATTERN (insn);
2684 if (GET_CODE (body) == PARALLEL)
2685 {
2686 int i;
2687 for (i = 0; i < XVECLEN (body, 0); i++)
2688 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2689 }
2690 else
2691 mems_found += record_store (body, bb_info);
2692
1ca59310 2693 if (dump_file && (dump_flags & TDF_DETAILS))
48e1416a 2694 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
3072d30e 2695 mems_found, insn_info->cannot_delete ? "true" : "false");
2696
aa140b76 2697 /* If we found some sets of mems, add it into the active_local_stores so
2698 that it can be locally deleted if found dead or used for
2699 replace_read and redundant constant store elimination. Otherwise mark
2700 it as cannot delete. This simplifies the processing later. */
2701 if (mems_found == 1)
3072d30e 2702 {
1242bee6 2703 if (active_local_stores_len++
2704 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2705 {
2706 active_local_stores_len = 1;
2707 active_local_stores = NULL;
2708 }
5a9ecd4a 2709 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
3072d30e 2710 insn_info->next_local_store = active_local_stores;
2711 active_local_stores = insn_info;
2712 }
2713 else
2714 insn_info->cannot_delete = true;
2715}
2716
2717
2718/* Remove BASE from the set of active_local_stores. This is a
2719 callback from cselib that is used to get rid of the stores in
2720 active_local_stores. */
2721
2722static void
2723remove_useless_values (cselib_val *base)
2724{
2725 insn_info_t insn_info = active_local_stores;
2726 insn_info_t last = NULL;
2727
2728 while (insn_info)
2729 {
2730 store_info_t store_info = insn_info->store_rec;
9ce37fa7 2731 bool del = false;
3072d30e 2732
2733 /* If ANY of the store_infos match the cselib group that is
2734 being deleted, then the insn can not be deleted. */
2735 while (store_info)
2736 {
48e1416a 2737 if ((store_info->group_id == -1)
3072d30e 2738 && (store_info->cse_base == base))
2739 {
9ce37fa7 2740 del = true;
3072d30e 2741 break;
2742 }
2743 store_info = store_info->next;
2744 }
2745
9ce37fa7 2746 if (del)
3072d30e 2747 {
1242bee6 2748 active_local_stores_len--;
3072d30e 2749 if (last)
2750 last->next_local_store = insn_info->next_local_store;
2751 else
2752 active_local_stores = insn_info->next_local_store;
2753 free_store_info (insn_info);
2754 }
2755 else
2756 last = insn_info;
48e1416a 2757
3072d30e 2758 insn_info = insn_info->next_local_store;
2759 }
2760}
2761
2762
2763/* Do all of step 1. */
2764
2765static void
2766dse_step1 (void)
2767{
2768 basic_block bb;
4fb07d00 2769 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
48e1416a 2770
35af0188 2771 cselib_init (0);
3072d30e 2772 all_blocks = BITMAP_ALLOC (NULL);
2773 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2774 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2775
ed7d889a 2776 FOR_ALL_BB_FN (bb, cfun)
3072d30e 2777 {
2778 insn_info_t ptr;
55c5ac9f 2779 bb_info_t bb_info = new dse_bb_info_type;
3072d30e 2780
55c5ac9f 2781 memset (bb_info, 0, sizeof (dse_bb_info_type));
3072d30e 2782 bitmap_set_bit (all_blocks, bb->index);
a1b0a968 2783 bb_info->regs_live = regs_live;
2784
2785 bitmap_copy (regs_live, DF_LR_IN (bb));
2786 df_simulate_initialize_forwards (bb, regs_live);
3072d30e 2787
2788 bb_table[bb->index] = bb_info;
2789 cselib_discard_hook = remove_useless_values;
2790
2791 if (bb->index >= NUM_FIXED_BLOCKS)
2792 {
ebabb7a3 2793 rtx_insn *insn;
3072d30e 2794
3072d30e 2795 active_local_stores = NULL;
1242bee6 2796 active_local_stores_len = 0;
3072d30e 2797 cselib_clear_table ();
48e1416a 2798
3072d30e 2799 /* Scan the insns. */
2800 FOR_BB_INSNS (bb, insn)
2801 {
2802 if (INSN_P (insn))
2803 scan_insn (bb_info, insn);
2804 cselib_process_insn (insn);
a1b0a968 2805 if (INSN_P (insn))
2806 df_simulate_one_insn_forwards (bb, insn, regs_live);
3072d30e 2807 }
48e1416a 2808
3072d30e 2809 /* This is something of a hack, because the global algorithm
2810 is supposed to take care of the case where stores go dead
2811 at the end of the function. However, the global
2812 algorithm must take a more conservative view of block
2813 mode reads than the local alg does. So to get the case
2814 where you have a store to the frame followed by a non
bef304b8 2815 overlapping block more read, we look at the active local
3072d30e 2816 stores at the end of the function and delete all of the
2817 frame and spill based ones. */
2818 if (stores_off_frame_dead_at_return
2819 && (EDGE_COUNT (bb->succs) == 0
2820 || (single_succ_p (bb)
34154e27 2821 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
18d50ae6 2822 && ! crtl->calls_eh_return)))
3072d30e 2823 {
2824 insn_info_t i_ptr = active_local_stores;
2825 while (i_ptr)
2826 {
2827 store_info_t store_info = i_ptr->store_rec;
2828
2829 /* Skip the clobbers. */
2830 while (!store_info->is_set)
2831 store_info = store_info->next;
aa140b76 2832 if (store_info->alias_set && !i_ptr->cannot_delete)
3072d30e 2833 delete_dead_store_insn (i_ptr);
48e1416a 2834 else
3072d30e 2835 if (store_info->group_id >= 0)
2836 {
48e1416a 2837 group_info_t group
f1f41a6c 2838 = rtx_group_vec[store_info->group_id];
aa140b76 2839 if (group->frame_related && !i_ptr->cannot_delete)
3072d30e 2840 delete_dead_store_insn (i_ptr);
2841 }
2842
2843 i_ptr = i_ptr->next_local_store;
2844 }
2845 }
2846
2847 /* Get rid of the loads that were discovered in
2848 replace_read. Cselib is finished with this block. */
2849 while (deferred_change_list)
2850 {
2851 deferred_change_t next = deferred_change_list->next;
2852
2853 /* There is no reason to validate this change. That was
2854 done earlier. */
2855 *deferred_change_list->loc = deferred_change_list->reg;
55c5ac9f 2856 delete deferred_change_list;
3072d30e 2857 deferred_change_list = next;
2858 }
2859
2860 /* Get rid of all of the cselib based store_infos in this
2861 block and mark the containing insns as not being
2862 deletable. */
2863 ptr = bb_info->last_insn;
2864 while (ptr)
2865 {
2866 if (ptr->contains_cselib_groups)
aa140b76 2867 {
2868 store_info_t s_info = ptr->store_rec;
2869 while (s_info && !s_info->is_set)
2870 s_info = s_info->next;
2871 if (s_info
2872 && s_info->redundant_reason
2873 && s_info->redundant_reason->insn
2874 && !ptr->cannot_delete)
2875 {
1ca59310 2876 if (dump_file && (dump_flags & TDF_DETAILS))
aa140b76 2877 fprintf (dump_file, "Locally deleting insn %d "
2878 "because insn %d stores the "
2879 "same value and couldn't be "
2880 "eliminated\n",
2881 INSN_UID (ptr->insn),
2882 INSN_UID (s_info->redundant_reason->insn));
2883 delete_dead_store_insn (ptr);
2884 }
aa140b76 2885 free_store_info (ptr);
2886 }
2887 else
2888 {
2889 store_info_t s_info;
2890
2891 /* Free at least positions_needed bitmaps. */
2892 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2893 if (s_info->is_large)
2894 {
843bd2fa 2895 BITMAP_FREE (s_info->positions_needed.large.bmap);
aa140b76 2896 s_info->is_large = false;
2897 }
2898 }
3072d30e 2899 ptr = ptr->prev_insn;
2900 }
2901
55c5ac9f 2902 cse_store_info_pool.release ();
3072d30e 2903 }
a1b0a968 2904 bb_info->regs_live = NULL;
3072d30e 2905 }
2906
a1b0a968 2907 BITMAP_FREE (regs_live);
3072d30e 2908 cselib_finish ();
c1f445d2 2909 rtx_group_table->empty ();
3072d30e 2910}
2911
2912\f
2913/*----------------------------------------------------------------------------
2914 Second step.
2915
2916 Assign each byte position in the stores that we are going to
2917 analyze globally to a position in the bitmaps. Returns true if
6dfdc153 2918 there are any bit positions assigned.
3072d30e 2919----------------------------------------------------------------------------*/
2920
2921static void
2922dse_step2_init (void)
2923{
2924 unsigned int i;
2925 group_info_t group;
2926
f1f41a6c 2927 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3072d30e 2928 {
2929 /* For all non stack related bases, we only consider a store to
2930 be deletable if there are two or more stores for that
2931 position. This is because it takes one store to make the
2932 other store redundant. However, for the stores that are
2933 stack related, we consider them if there is only one store
2934 for the position. We do this because the stack related
2935 stores can be deleted if their is no read between them and
2936 the end of the function.
48e1416a 2937
3072d30e 2938 To make this work in the current framework, we take the stack
2939 related bases add all of the bits from store1 into store2.
2940 This has the effect of making the eligible even if there is
2941 only one store. */
2942
2943 if (stores_off_frame_dead_at_return && group->frame_related)
2944 {
2945 bitmap_ior_into (group->store2_n, group->store1_n);
2946 bitmap_ior_into (group->store2_p, group->store1_p);
1ca59310 2947 if (dump_file && (dump_flags & TDF_DETAILS))
48e1416a 2948 fprintf (dump_file, "group %d is frame related ", i);
3072d30e 2949 }
2950
2951 group->offset_map_size_n++;
4fb07d00 2952 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2953 group->offset_map_size_n);
3072d30e 2954 group->offset_map_size_p++;
4fb07d00 2955 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2956 group->offset_map_size_p);
3072d30e 2957 group->process_globally = false;
1ca59310 2958 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 2959 {
48e1416a 2960 fprintf (dump_file, "group %d(%d+%d): ", i,
3072d30e 2961 (int)bitmap_count_bits (group->store2_n),
2962 (int)bitmap_count_bits (group->store2_p));
2963 bitmap_print (dump_file, group->store2_n, "n ", " ");
2964 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2965 }
2966 }
2967}
2968
2969
2970/* Init the offset tables for the normal case. */
2971
2972static bool
2973dse_step2_nospill (void)
2974{
2975 unsigned int i;
2976 group_info_t group;
2977 /* Position 0 is unused because 0 is used in the maps to mean
2978 unused. */
2979 current_position = 1;
f1f41a6c 2980 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3072d30e 2981 {
2982 bitmap_iterator bi;
2983 unsigned int j;
2984
2985 if (group == clear_alias_group)
2986 continue;
2987
9af5ce0c 2988 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2989 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
3072d30e 2990 bitmap_clear (group->group_kill);
2991
2992 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2993 {
2994 bitmap_set_bit (group->group_kill, current_position);
b4a708fb 2995 if (bitmap_bit_p (group->escaped_n, j))
2996 bitmap_set_bit (kill_on_calls, current_position);
3072d30e 2997 group->offset_map_n[j] = current_position++;
2998 group->process_globally = true;
2999 }
3000 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3001 {
48e1416a 3002 bitmap_set_bit (group->group_kill, current_position);
b4a708fb 3003 if (bitmap_bit_p (group->escaped_p, j))
3004 bitmap_set_bit (kill_on_calls, current_position);
3072d30e 3005 group->offset_map_p[j] = current_position++;
3006 group->process_globally = true;
3007 }
3008 }
3009 return current_position != 1;
3010}
3011
3012
3072d30e 3013\f
3014/*----------------------------------------------------------------------------
3015 Third step.
48e1416a 3016
3072d30e 3017 Build the bit vectors for the transfer functions.
3018----------------------------------------------------------------------------*/
3019
3020
3072d30e 3021/* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3022 there, return 0. */
3023
3024static int
3025get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3026{
3027 if (offset < 0)
3028 {
3029 HOST_WIDE_INT offset_p = -offset;
3030 if (offset_p >= group_info->offset_map_size_n)
3031 return 0;
3032 return group_info->offset_map_n[offset_p];
3033 }
3034 else
3035 {
3036 if (offset >= group_info->offset_map_size_p)
3037 return 0;
3038 return group_info->offset_map_p[offset];
3039 }
3040}
3041
3042
3043/* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3044 may be NULL. */
3045
48e1416a 3046static void
3072d30e 3047scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3048{
3049 while (store_info)
3050 {
3051 HOST_WIDE_INT i;
48e1416a 3052 group_info_t group_info
f1f41a6c 3053 = rtx_group_vec[store_info->group_id];
3072d30e 3054 if (group_info->process_globally)
3055 for (i = store_info->begin; i < store_info->end; i++)
3056 {
3057 int index = get_bitmap_index (group_info, i);
3058 if (index != 0)
3059 {
3060 bitmap_set_bit (gen, index);
3061 if (kill)
3062 bitmap_clear_bit (kill, index);
3063 }
3064 }
3065 store_info = store_info->next;
3066 }
3067}
3068
3069
3070/* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3071 may be NULL. */
3072
48e1416a 3073static void
3072d30e 3074scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3075{
3076 while (store_info)
3077 {
3078 if (store_info->alias_set)
3079 {
48e1416a 3080 int index = get_bitmap_index (clear_alias_group,
3072d30e 3081 store_info->alias_set);
3082 if (index != 0)
3083 {
3084 bitmap_set_bit (gen, index);
3085 if (kill)
3086 bitmap_clear_bit (kill, index);
3087 }
3088 }
3089 store_info = store_info->next;
3090 }
3091}
3092
3093
3094/* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3095 may be NULL. */
3096
3097static void
3098scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3099{
3100 read_info_t read_info = insn_info->read_rec;
3101 int i;
3102 group_info_t group;
3103
16bf64db 3104 /* If this insn reads the frame, kill all the frame related stores. */
3105 if (insn_info->frame_read)
3106 {
f1f41a6c 3107 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
16bf64db 3108 if (group->process_globally && group->frame_related)
3109 {
3110 if (kill)
3111 bitmap_ior_into (kill, group->group_kill);
48e1416a 3112 bitmap_and_compl_into (gen, group->group_kill);
16bf64db 3113 }
3114 }
b4a708fb 3115 if (insn_info->non_frame_wild_read)
3116 {
3117 /* Kill all non-frame related stores. Kill all stores of variables that
3118 escape. */
3119 if (kill)
3120 bitmap_ior_into (kill, kill_on_calls);
3121 bitmap_and_compl_into (gen, kill_on_calls);
f1f41a6c 3122 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
b4a708fb 3123 if (group->process_globally && !group->frame_related)
3124 {
3125 if (kill)
3126 bitmap_ior_into (kill, group->group_kill);
3127 bitmap_and_compl_into (gen, group->group_kill);
3128 }
3129 }
3072d30e 3130 while (read_info)
3131 {
f1f41a6c 3132 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3072d30e 3133 {
3134 if (group->process_globally)
3135 {
3136 if (i == read_info->group_id)
3137 {
3138 if (read_info->begin > read_info->end)
3139 {
3140 /* Begin > end for block mode reads. */
3141 if (kill)
3142 bitmap_ior_into (kill, group->group_kill);
3143 bitmap_and_compl_into (gen, group->group_kill);
3144 }
3145 else
3146 {
3147 /* The groups are the same, just process the
3148 offsets. */
3149 HOST_WIDE_INT j;
3150 for (j = read_info->begin; j < read_info->end; j++)
3151 {
3152 int index = get_bitmap_index (group, j);
3153 if (index != 0)
3154 {
3155 if (kill)
3156 bitmap_set_bit (kill, index);
3157 bitmap_clear_bit (gen, index);
3158 }
3159 }
3160 }
3161 }
3162 else
3163 {
3164 /* The groups are different, if the alias sets
3165 conflict, clear the entire group. We only need
3166 to apply this test if the read_info is a cselib
3167 read. Anything with a constant base cannot alias
3168 something else with a different constant
3169 base. */
3170 if ((read_info->group_id < 0)
48e1416a 3171 && canon_true_dependence (group->base_mem,
ec410bf1 3172 GET_MODE (group->base_mem),
82d2c88b 3173 group->canon_base_addr,
376a287d 3174 read_info->mem, NULL_RTX))
3072d30e 3175 {
3176 if (kill)
3177 bitmap_ior_into (kill, group->group_kill);
3178 bitmap_and_compl_into (gen, group->group_kill);
3179 }
3180 }
3181 }
3182 }
48e1416a 3183
3072d30e 3184 read_info = read_info->next;
3185 }
3186}
3187
3188/* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3189 may be NULL. */
3190
3191static void
3192scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3193{
3194 while (read_info)
3195 {
3196 if (read_info->alias_set)
3197 {
48e1416a 3198 int index = get_bitmap_index (clear_alias_group,
3072d30e 3199 read_info->alias_set);
3200 if (index != 0)
3201 {
3202 if (kill)
3203 bitmap_set_bit (kill, index);
3204 bitmap_clear_bit (gen, index);
3205 }
3206 }
48e1416a 3207
3072d30e 3208 read_info = read_info->next;
3209 }
3210}
3211
3212
3213/* Return the insn in BB_INFO before the first wild read or if there
3214 are no wild reads in the block, return the last insn. */
3215
3216static insn_info_t
3217find_insn_before_first_wild_read (bb_info_t bb_info)
3218{
3219 insn_info_t insn_info = bb_info->last_insn;
3220 insn_info_t last_wild_read = NULL;
3221
3222 while (insn_info)
3223 {
3224 if (insn_info->wild_read)
3225 {
3226 last_wild_read = insn_info->prev_insn;
3227 /* Block starts with wild read. */
3228 if (!last_wild_read)
3229 return NULL;
3230 }
3231
3232 insn_info = insn_info->prev_insn;
3233 }
3234
3235 if (last_wild_read)
3236 return last_wild_read;
3237 else
3238 return bb_info->last_insn;
3239}
3240
3241
3242/* Scan the insns in BB_INFO starting at PTR and going to the top of
3243 the block in order to build the gen and kill sets for the block.
3244 We start at ptr which may be the last insn in the block or may be
3245 the first insn with a wild read. In the latter case we are able to
3246 skip the rest of the block because it just does not matter:
3247 anything that happens is hidden by the wild read. */
3248
3249static void
3250dse_step3_scan (bool for_spills, basic_block bb)
3251{
3252 bb_info_t bb_info = bb_table[bb->index];
3253 insn_info_t insn_info;
3254
3255 if (for_spills)
3256 /* There are no wild reads in the spill case. */
3257 insn_info = bb_info->last_insn;
3258 else
3259 insn_info = find_insn_before_first_wild_read (bb_info);
48e1416a 3260
3072d30e 3261 /* In the spill case or in the no_spill case if there is no wild
3262 read in the block, we will need a kill set. */
3263 if (insn_info == bb_info->last_insn)
3264 {
3265 if (bb_info->kill)
3266 bitmap_clear (bb_info->kill);
3267 else
4fb07d00 3268 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 3269 }
48e1416a 3270 else
3072d30e 3271 if (bb_info->kill)
3272 BITMAP_FREE (bb_info->kill);
3273
3274 while (insn_info)
3275 {
3276 /* There may have been code deleted by the dce pass run before
3277 this phase. */
3278 if (insn_info->insn && INSN_P (insn_info->insn))
3279 {
48e1416a 3280 /* Process the read(s) last. */
3072d30e 3281 if (for_spills)
3282 {
3283 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3284 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3285 }
3286 else
3287 {
3288 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3289 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3290 }
48e1416a 3291 }
3072d30e 3292
3293 insn_info = insn_info->prev_insn;
3294 }
3295}
3296
3297
3298/* Set the gen set of the exit block, and also any block with no
3299 successors that does not have a wild read. */
3300
3301static void
3302dse_step3_exit_block_scan (bb_info_t bb_info)
3303{
3304 /* The gen set is all 0's for the exit block except for the
3305 frame_pointer_group. */
48e1416a 3306
3072d30e 3307 if (stores_off_frame_dead_at_return)
3308 {
3309 unsigned int i;
3310 group_info_t group;
48e1416a 3311
f1f41a6c 3312 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3072d30e 3313 {
3314 if (group->process_globally && group->frame_related)
3315 bitmap_ior_into (bb_info->gen, group->group_kill);
3316 }
3317 }
3318}
3319
3320
3321/* Find all of the blocks that are not backwards reachable from the
3322 exit block or any block with no successors (BB). These are the
3323 infinite loops or infinite self loops. These blocks will still
3324 have their bits set in UNREACHABLE_BLOCKS. */
3325
3326static void
3327mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3328{
3329 edge e;
3330 edge_iterator ei;
3331
08b7917c 3332 if (bitmap_bit_p (unreachable_blocks, bb->index))
3072d30e 3333 {
08b7917c 3334 bitmap_clear_bit (unreachable_blocks, bb->index);
3072d30e 3335 FOR_EACH_EDGE (e, ei, bb->preds)
48e1416a 3336 {
3072d30e 3337 mark_reachable_blocks (unreachable_blocks, e->src);
48e1416a 3338 }
3072d30e 3339 }
3340}
3341
3342/* Build the transfer functions for the function. */
3343
3344static void
3345dse_step3 (bool for_spills)
3346{
3347 basic_block bb;
fe672ac0 3348 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3072d30e 3349 sbitmap_iterator sbi;
3350 bitmap all_ones = NULL;
3351 unsigned int i;
48e1416a 3352
53c5d9d4 3353 bitmap_ones (unreachable_blocks);
3072d30e 3354
ed7d889a 3355 FOR_ALL_BB_FN (bb, cfun)
3072d30e 3356 {
3357 bb_info_t bb_info = bb_table[bb->index];
3358 if (bb_info->gen)
3359 bitmap_clear (bb_info->gen);
3360 else
4fb07d00 3361 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 3362
3363 if (bb->index == ENTRY_BLOCK)
3364 ;
3365 else if (bb->index == EXIT_BLOCK)
3366 dse_step3_exit_block_scan (bb_info);
3367 else
3368 dse_step3_scan (for_spills, bb);
3369 if (EDGE_COUNT (bb->succs) == 0)
3370 mark_reachable_blocks (unreachable_blocks, bb);
3371
3372 /* If this is the second time dataflow is run, delete the old
3373 sets. */
3374 if (bb_info->in)
3375 BITMAP_FREE (bb_info->in);
3376 if (bb_info->out)
3377 BITMAP_FREE (bb_info->out);
3378 }
3379
3380 /* For any block in an infinite loop, we must initialize the out set
3381 to all ones. This could be expensive, but almost never occurs in
3382 practice. However, it is common in regression tests. */
0d211963 3383 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3072d30e 3384 {
3385 if (bitmap_bit_p (all_blocks, i))
3386 {
3387 bb_info_t bb_info = bb_table[i];
3388 if (!all_ones)
3389 {
3390 unsigned int j;
3391 group_info_t group;
3392
4fb07d00 3393 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
f1f41a6c 3394 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3072d30e 3395 bitmap_ior_into (all_ones, group->group_kill);
3396 }
3397 if (!bb_info->out)
3398 {
4fb07d00 3399 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 3400 bitmap_copy (bb_info->out, all_ones);
3401 }
3402 }
3403 }
3404
3405 if (all_ones)
3406 BITMAP_FREE (all_ones);
3407 sbitmap_free (unreachable_blocks);
3408}
3409
3410
3411\f
3412/*----------------------------------------------------------------------------
3413 Fourth step.
3414
3415 Solve the bitvector equations.
3416----------------------------------------------------------------------------*/
3417
3418
3419/* Confluence function for blocks with no successors. Create an out
3420 set from the gen set of the exit block. This block logically has
3421 the exit block as a successor. */
3422
3423
3424
3425static void
3426dse_confluence_0 (basic_block bb)
3427{
3428 bb_info_t bb_info = bb_table[bb->index];
3429
3430 if (bb->index == EXIT_BLOCK)
3431 return;
3432
3433 if (!bb_info->out)
3434 {
4fb07d00 3435 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 3436 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3437 }
3438}
3439
3440/* Propagate the information from the in set of the dest of E to the
3441 out set of the src of E. If the various in or out sets are not
3442 there, that means they are all ones. */
3443
a703ca31 3444static bool
3072d30e 3445dse_confluence_n (edge e)
3446{
3447 bb_info_t src_info = bb_table[e->src->index];
3448 bb_info_t dest_info = bb_table[e->dest->index];
3449
3450 if (dest_info->in)
3451 {
3452 if (src_info->out)
3453 bitmap_and_into (src_info->out, dest_info->in);
3454 else
3455 {
4fb07d00 3456 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 3457 bitmap_copy (src_info->out, dest_info->in);
3458 }
3459 }
a703ca31 3460 return true;
3072d30e 3461}
3462
3463
3464/* Propagate the info from the out to the in set of BB_INDEX's basic
48e1416a 3465 block. There are three cases:
3072d30e 3466
3467 1) The block has no kill set. In this case the kill set is all
3468 ones. It does not matter what the out set of the block is, none of
3469 the info can reach the top. The only thing that reaches the top is
3470 the gen set and we just copy the set.
3471
3472 2) There is a kill set but no out set and bb has successors. In
3473 this case we just return. Eventually an out set will be created and
3474 it is better to wait than to create a set of ones.
3475
3476 3) There is both a kill and out set. We apply the obvious transfer
3477 function.
3478*/
3479
3480static bool
3481dse_transfer_function (int bb_index)
3482{
3483 bb_info_t bb_info = bb_table[bb_index];
3484
3485 if (bb_info->kill)
3486 {
3487 if (bb_info->out)
3488 {
3489 /* Case 3 above. */
3490 if (bb_info->in)
48e1416a 3491 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3072d30e 3492 bb_info->out, bb_info->kill);
3493 else
3494 {
4fb07d00 3495 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
48e1416a 3496 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3072d30e 3497 bb_info->out, bb_info->kill);
3498 return true;
3499 }
3500 }
3501 else
3502 /* Case 2 above. */
3503 return false;
3504 }
3505 else
3506 {
3507 /* Case 1 above. If there is already an in set, nothing
3508 happens. */
3509 if (bb_info->in)
3510 return false;
3511 else
3512 {
4fb07d00 3513 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3072d30e 3514 bitmap_copy (bb_info->in, bb_info->gen);
3515 return true;
3516 }
3517 }
3518}
3519
3520/* Solve the dataflow equations. */
3521
3522static void
3523dse_step4 (void)
3524{
48e1416a 3525 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3526 dse_confluence_n, dse_transfer_function,
3527 all_blocks, df_get_postorder (DF_BACKWARD),
3072d30e 3528 df_get_n_blocks (DF_BACKWARD));
1ca59310 3529 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 3530 {
3531 basic_block bb;
3532
3533 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
ed7d889a 3534 FOR_ALL_BB_FN (bb, cfun)
3072d30e 3535 {
3536 bb_info_t bb_info = bb_table[bb->index];
3537
3538 df_print_bb_index (bb, dump_file);
3539 if (bb_info->in)
3540 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3541 else
3542 fprintf (dump_file, " in: *MISSING*\n");
3543 if (bb_info->gen)
3544 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3545 else
3546 fprintf (dump_file, " gen: *MISSING*\n");
3547 if (bb_info->kill)
3548 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3549 else
3550 fprintf (dump_file, " kill: *MISSING*\n");
3551 if (bb_info->out)
3552 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3553 else
3554 fprintf (dump_file, " out: *MISSING*\n\n");
3555 }
3556 }
3557}
3558
3559
3560\f
3561/*----------------------------------------------------------------------------
3562 Fifth step.
3563
bef304b8 3564 Delete the stores that can only be deleted using the global information.
3072d30e 3565----------------------------------------------------------------------------*/
3566
3567
3568static void
3569dse_step5_nospill (void)
3570{
3571 basic_block bb;
fc00614f 3572 FOR_EACH_BB_FN (bb, cfun)
3072d30e 3573 {
3574 bb_info_t bb_info = bb_table[bb->index];
3575 insn_info_t insn_info = bb_info->last_insn;
3576 bitmap v = bb_info->out;
3577
3578 while (insn_info)
3579 {
3580 bool deleted = false;
3581 if (dump_file && insn_info->insn)
3582 {
3583 fprintf (dump_file, "starting to process insn %d\n",
3584 INSN_UID (insn_info->insn));
3585 bitmap_print (dump_file, v, " v: ", "\n");
3586 }
3587
3588 /* There may have been code deleted by the dce pass run before
3589 this phase. */
48e1416a 3590 if (insn_info->insn
3072d30e 3591 && INSN_P (insn_info->insn)
3592 && (!insn_info->cannot_delete)
3593 && (!bitmap_empty_p (v)))
3594 {
3595 store_info_t store_info = insn_info->store_rec;
3596
3597 /* Try to delete the current insn. */
3598 deleted = true;
48e1416a 3599
3072d30e 3600 /* Skip the clobbers. */
3601 while (!store_info->is_set)
3602 store_info = store_info->next;
3603
3604 if (store_info->alias_set)
3605 deleted = false;
3606 else
3607 {
3608 HOST_WIDE_INT i;
48e1416a 3609 group_info_t group_info
f1f41a6c 3610 = rtx_group_vec[store_info->group_id];
48e1416a 3611
3072d30e 3612 for (i = store_info->begin; i < store_info->end; i++)
3613 {
3614 int index = get_bitmap_index (group_info, i);
48e1416a 3615
1ca59310 3616 if (dump_file && (dump_flags & TDF_DETAILS))
48e1416a 3617 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3072d30e 3618 if (index == 0 || !bitmap_bit_p (v, index))
3619 {
1ca59310 3620 if (dump_file && (dump_flags & TDF_DETAILS))
48e1416a 3621 fprintf (dump_file, "failing at i = %d\n", (int)i);
3072d30e 3622 deleted = false;
3623 break;
3624 }
3625 }
3626 }
3627 if (deleted)
3628 {
5a9ecd4a 3629 if (dbg_cnt (dse)
3630 && check_for_inc_dec_1 (insn_info))
3072d30e 3631 {
3072d30e 3632 delete_insn (insn_info->insn);
3633 insn_info->insn = NULL;
3634 globally_deleted++;
3635 }
3636 }
3637 }
3638 /* We do want to process the local info if the insn was
6dfdc153 3639 deleted. For instance, if the insn did a wild read, we
3072d30e 3640 no longer need to trash the info. */
48e1416a 3641 if (insn_info->insn
3072d30e 3642 && INSN_P (insn_info->insn)
3643 && (!deleted))
3644 {
3645 scan_stores_nospill (insn_info->store_rec, v, NULL);
3646 if (insn_info->wild_read)
3647 {
1ca59310 3648 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 3649 fprintf (dump_file, "wild read\n");
3650 bitmap_clear (v);
3651 }
b4a708fb 3652 else if (insn_info->read_rec
3653 || insn_info->non_frame_wild_read)
3072d30e 3654 {
b4a708fb 3655 if (dump_file && !insn_info->non_frame_wild_read)
3072d30e 3656 fprintf (dump_file, "regular read\n");
1ca59310 3657 else if (dump_file && (dump_flags & TDF_DETAILS))
b4a708fb 3658 fprintf (dump_file, "non-frame wild read\n");
3072d30e 3659 scan_reads_nospill (insn_info, v, NULL);
3660 }
3661 }
48e1416a 3662
3072d30e 3663 insn_info = insn_info->prev_insn;
3664 }
3665 }
3666}
3667
3668
3072d30e 3669\f
3670/*----------------------------------------------------------------------------
3671 Sixth step.
3672
aa140b76 3673 Delete stores made redundant by earlier stores (which store the same
3674 value) that couldn't be eliminated.
3675----------------------------------------------------------------------------*/
3676
3677static void
3678dse_step6 (void)
3679{
3680 basic_block bb;
3681
ed7d889a 3682 FOR_ALL_BB_FN (bb, cfun)
aa140b76 3683 {
3684 bb_info_t bb_info = bb_table[bb->index];
3685 insn_info_t insn_info = bb_info->last_insn;
3686
3687 while (insn_info)
3688 {
3689 /* There may have been code deleted by the dce pass run before
3690 this phase. */
3691 if (insn_info->insn
3692 && INSN_P (insn_info->insn)
3693 && !insn_info->cannot_delete)
3694 {
3695 store_info_t s_info = insn_info->store_rec;
3696
3697 while (s_info && !s_info->is_set)
3698 s_info = s_info->next;
3699 if (s_info
3700 && s_info->redundant_reason
3701 && s_info->redundant_reason->insn
3702 && INSN_P (s_info->redundant_reason->insn))
3703 {
cccc26f6 3704 rtx_insn *rinsn = s_info->redundant_reason->insn;
1ca59310 3705 if (dump_file && (dump_flags & TDF_DETAILS))
aa140b76 3706 fprintf (dump_file, "Locally deleting insn %d "
3707 "because insn %d stores the "
3708 "same value and couldn't be "
3709 "eliminated\n",
3710 INSN_UID (insn_info->insn),
3711 INSN_UID (rinsn));
3712 delete_dead_store_insn (insn_info);
3713 }
3714 }
3715 insn_info = insn_info->prev_insn;
3716 }
3717 }
3718}
3719\f
3720/*----------------------------------------------------------------------------
3721 Seventh step.
3722
48e1416a 3723 Destroy everything left standing.
3072d30e 3724----------------------------------------------------------------------------*/
3725
48e1416a 3726static void
4fb07d00 3727dse_step7 (void)
3072d30e 3728{
4fb07d00 3729 bitmap_obstack_release (&dse_bitmap_obstack);
3730 obstack_free (&dse_obstack, NULL);
ce299759 3731
3072d30e 3732 end_alias_analysis ();
3733 free (bb_table);
c1f445d2 3734 delete rtx_group_table;
3735 rtx_group_table = NULL;
f1f41a6c 3736 rtx_group_vec.release ();
3072d30e 3737 BITMAP_FREE (all_blocks);
3738 BITMAP_FREE (scratch);
3739
55c5ac9f 3740 rtx_store_info_pool.release ();
3741 read_info_type::pool.release ();
3742 insn_info_type::pool.release ();
3743 dse_bb_info_type::pool.release ();
3744 group_info::pool.release ();
3745 deferred_change::pool.release ();
3072d30e 3746}
3747
3748
3072d30e 3749/* -------------------------------------------------------------------------
3750 DSE
3751 ------------------------------------------------------------------------- */
3752
3753/* Callback for running pass_rtl_dse. */
3754
3755static unsigned int
3756rest_of_handle_dse (void)
3757{
3072d30e 3758 df_set_flags (DF_DEFER_INSN_RESCAN);
3759
a1b0a968 3760 /* Need the notes since we must track live hardregs in the forwards
3761 direction. */
3762 df_note_add_problem ();
3763 df_analyze ();
3764
3072d30e 3765 dse_step0 ();
3766 dse_step1 ();
3767 dse_step2_init ();
3768 if (dse_step2_nospill ())
3769 {
3770 df_set_flags (DF_LR_RUN_DCE);
3771 df_analyze ();
1ca59310 3772 if (dump_file && (dump_flags & TDF_DETAILS))
3072d30e 3773 fprintf (dump_file, "doing global processing\n");
3774 dse_step3 (false);
3775 dse_step4 ();
3776 dse_step5_nospill ();
3777 }
3778
aa140b76 3779 dse_step6 ();
4fb07d00 3780 dse_step7 ();
3072d30e 3781
3782 if (dump_file)
3783 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3784 locally_deleted, globally_deleted, spill_deleted);
1f91a12d 3785
3786 /* DSE can eliminate potentially-trapping MEMs.
3787 Remove any EH edges associated with them. */
3788 if ((locally_deleted || globally_deleted)
3789 && cfun->can_throw_non_call_exceptions
3790 && purge_all_dead_edges ())
3791 cleanup_cfg (0);
3792
3072d30e 3793 return 0;
3794}
3795
cbe8bda8 3796namespace {
3797
3798const pass_data pass_data_rtl_dse1 =
3799{
3800 RTL_PASS, /* type */
3801 "dse1", /* name */
3802 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3803 TV_DSE1, /* tv_id */
3804 0, /* properties_required */
3805 0, /* properties_provided */
3806 0, /* properties_destroyed */
3807 0, /* todo_flags_start */
8b88439e 3808 TODO_df_finish, /* todo_flags_finish */
3072d30e 3809};
3810
cbe8bda8 3811class pass_rtl_dse1 : public rtl_opt_pass
3812{
3813public:
9af5ce0c 3814 pass_rtl_dse1 (gcc::context *ctxt)
3815 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
cbe8bda8 3816 {}
3817
3818 /* opt_pass methods: */
31315c24 3819 virtual bool gate (function *)
3820 {
3821 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3822 }
3823
65b0537f 3824 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
cbe8bda8 3825
3826}; // class pass_rtl_dse1
3827
3828} // anon namespace
3829
3830rtl_opt_pass *
3831make_pass_rtl_dse1 (gcc::context *ctxt)
3832{
3833 return new pass_rtl_dse1 (ctxt);
3834}
3835
3836namespace {
3837
3838const pass_data pass_data_rtl_dse2 =
3839{
3840 RTL_PASS, /* type */
3841 "dse2", /* name */
3842 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3843 TV_DSE2, /* tv_id */
3844 0, /* properties_required */
3845 0, /* properties_provided */
3846 0, /* properties_destroyed */
3847 0, /* todo_flags_start */
8b88439e 3848 TODO_df_finish, /* todo_flags_finish */
3072d30e 3849};
cbe8bda8 3850
3851class pass_rtl_dse2 : public rtl_opt_pass
3852{
3853public:
9af5ce0c 3854 pass_rtl_dse2 (gcc::context *ctxt)
3855 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
cbe8bda8 3856 {}
3857
3858 /* opt_pass methods: */
31315c24 3859 virtual bool gate (function *)
3860 {
3861 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3862 }
3863
65b0537f 3864 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
cbe8bda8 3865
3866}; // class pass_rtl_dse2
3867
3868} // anon namespace
3869
3870rtl_opt_pass *
3871make_pass_rtl_dse2 (gcc::context *ctxt)
3872{
3873 return new pass_rtl_dse2 (ctxt);
3874}