]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dse.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / dse.c
1 /* RTL dead store elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #undef BASELINE
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "df.h"
35 #include "tm_p.h"
36 #include "gimple-ssa.h"
37 #include "expmed.h"
38 #include "optabs.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "flags.h"
46 #include "cfgrtl.h"
47 #include "cselib.h"
48 #include "tree-pass.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "calls.h"
52 #include "varasm.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "dbgcnt.h"
56 #include "params.h"
57 #include "internal-fn.h"
58 #include "rtl-iter.h"
59 #include "cfgcleanup.h"
60
61 /* This file contains three techniques for performing Dead Store
62 Elimination (dse).
63
64 * The first technique performs dse locally on any base address. It
65 is based on the cselib which is a local value numbering technique.
66 This technique is local to a basic block but deals with a fairly
67 general addresses.
68
69 * The second technique performs dse globally but is restricted to
70 base addresses that are either constant or are relative to the
71 frame_pointer.
72
73 * The third technique, (which is only done after register allocation)
74 processes the spill slots. This differs from the second
75 technique because it takes advantage of the fact that spilling is
76 completely free from the effects of aliasing.
77
78 Logically, dse is a backwards dataflow problem. A store can be
79 deleted if it if cannot be reached in the backward direction by any
80 use of the value being stored. However, the local technique uses a
81 forwards scan of the basic block because cselib requires that the
82 block be processed in that order.
83
84 The pass is logically broken into 7 steps:
85
86 0) Initialization.
87
88 1) The local algorithm, as well as scanning the insns for the two
89 global algorithms.
90
91 2) Analysis to see if the global algs are necessary. In the case
92 of stores base on a constant address, there must be at least two
93 stores to that address, to make it possible to delete some of the
94 stores. In the case of stores off of the frame or spill related
95 stores, only one store to an address is necessary because those
96 stores die at the end of the function.
97
98 3) Set up the global dataflow equations based on processing the
99 info parsed in the first step.
100
101 4) Solve the dataflow equations.
102
103 5) Delete the insns that the global analysis has indicated are
104 unnecessary.
105
106 6) Delete insns that store the same value as preceding store
107 where the earlier store couldn't be eliminated.
108
109 7) Cleanup.
110
111 This step uses cselib and canon_rtx to build the largest expression
112 possible for each address. This pass is a forwards pass through
113 each basic block. From the point of view of the global technique,
114 the first pass could examine a block in either direction. The
115 forwards ordering is to accommodate cselib.
116
117 We make a simplifying assumption: addresses fall into four broad
118 categories:
119
120 1) base has rtx_varies_p == false, offset is constant.
121 2) base has rtx_varies_p == false, offset variable.
122 3) base has rtx_varies_p == true, offset constant.
123 4) base has rtx_varies_p == true, offset variable.
124
125 The local passes are able to process all 4 kinds of addresses. The
126 global pass only handles 1).
127
128 The global problem is formulated as follows:
129
130 A store, S1, to address A, where A is not relative to the stack
131 frame, can be eliminated if all paths from S1 to the end of the
132 function contain another store to A before a read to A.
133
134 If the address A is relative to the stack frame, a store S2 to A
135 can be eliminated if there are no paths from S2 that reach the
136 end of the function that read A before another store to A. In
137 this case S2 can be deleted if there are paths from S2 to the
138 end of the function that have no reads or writes to A. This
139 second case allows stores to the stack frame to be deleted that
140 would otherwise die when the function returns. This cannot be
141 done if stores_off_frame_dead_at_return is not true. See the doc
142 for that variable for when this variable is false.
143
144 The global problem is formulated as a backwards set union
145 dataflow problem where the stores are the gens and reads are the
146 kills. Set union problems are rare and require some special
147 handling given our representation of bitmaps. A straightforward
148 implementation requires a lot of bitmaps filled with 1s.
149 These are expensive and cumbersome in our bitmap formulation so
150 care has been taken to avoid large vectors filled with 1s. See
151 the comments in bb_info and in the dataflow confluence functions
152 for details.
153
154 There are two places for further enhancements to this algorithm:
155
156 1) The original dse which was embedded in a pass called flow also
157 did local address forwarding. For example in
158
159 A <- r100
160 ... <- A
161
162 flow would replace the right hand side of the second insn with a
163 reference to r100. Most of the information is available to add this
164 to this pass. It has not done it because it is a lot of work in
165 the case that either r100 is assigned to between the first and
166 second insn and/or the second insn is a load of part of the value
167 stored by the first insn.
168
169 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
170 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
171 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
172 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
173
174 2) The cleaning up of spill code is quite profitable. It currently
175 depends on reading tea leaves and chicken entrails left by reload.
176 This pass depends on reload creating a singleton alias set for each
177 spill slot and telling the next dse pass which of these alias sets
178 are the singletons. Rather than analyze the addresses of the
179 spills, dse's spill processing just does analysis of the loads and
180 stores that use those alias sets. There are three cases where this
181 falls short:
182
183 a) Reload sometimes creates the slot for one mode of access, and
184 then inserts loads and/or stores for a smaller mode. In this
185 case, the current code just punts on the slot. The proper thing
186 to do is to back out and use one bit vector position for each
187 byte of the entity associated with the slot. This depends on
188 KNOWING that reload always generates the accesses for each of the
189 bytes in some canonical (read that easy to understand several
190 passes after reload happens) way.
191
192 b) Reload sometimes decides that spill slot it allocated was not
193 large enough for the mode and goes back and allocates more slots
194 with the same mode and alias set. The backout in this case is a
195 little more graceful than (a). In this case the slot is unmarked
196 as being a spill slot and if final address comes out to be based
197 off the frame pointer, the global algorithm handles this slot.
198
199 c) For any pass that may prespill, there is currently no
200 mechanism to tell the dse pass that the slot being used has the
201 special properties that reload uses. It may be that all that is
202 required is to have those passes make the same calls that reload
203 does, assuming that the alias sets can be manipulated in the same
204 way. */
205
206 /* There are limits to the size of constant offsets we model for the
207 global problem. There are certainly test cases, that exceed this
208 limit, however, it is unlikely that there are important programs
209 that really have constant offsets this size. */
210 #define MAX_OFFSET (64 * 1024)
211
212 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
213 on the default obstack because these bitmaps can grow quite large
214 (~2GB for the small (!) test case of PR54146) and we'll hold on to
215 all that memory until the end of the compiler run.
216 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
217 releasing the whole obstack. */
218 static bitmap_obstack dse_bitmap_obstack;
219
220 /* Obstack for other data. As for above: Kinda nice to be able to
221 throw it all away at the end in one big sweep. */
222 static struct obstack dse_obstack;
223
224 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
225 static bitmap scratch = NULL;
226
227 struct insn_info_type;
228
229 /* This structure holds information about a candidate store. */
230 struct store_info
231 {
232
233 /* False means this is a clobber. */
234 bool is_set;
235
236 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
237 bool is_large;
238
239 /* The id of the mem group of the base address. If rtx_varies_p is
240 true, this is -1. Otherwise, it is the index into the group
241 table. */
242 int group_id;
243
244 /* This is the cselib value. */
245 cselib_val *cse_base;
246
247 /* This canonized mem. */
248 rtx mem;
249
250 /* Canonized MEM address for use by canon_true_dependence. */
251 rtx mem_addr;
252
253 /* If this is non-zero, it is the alias set of a spill location. */
254 alias_set_type alias_set;
255
256 /* The offset of the first and byte before the last byte associated
257 with the operation. */
258 HOST_WIDE_INT begin, end;
259
260 union
261 {
262 /* A bitmask as wide as the number of bytes in the word that
263 contains a 1 if the byte may be needed. The store is unused if
264 all of the bits are 0. This is used if IS_LARGE is false. */
265 unsigned HOST_WIDE_INT small_bitmask;
266
267 struct
268 {
269 /* A bitmap with one bit per byte. Cleared bit means the position
270 is needed. Used if IS_LARGE is false. */
271 bitmap bmap;
272
273 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
274 equal to END - BEGIN, the whole store is unused. */
275 int count;
276 } large;
277 } positions_needed;
278
279 /* The next store info for this insn. */
280 struct store_info *next;
281
282 /* The right hand side of the store. This is used if there is a
283 subsequent reload of the mems address somewhere later in the
284 basic block. */
285 rtx rhs;
286
287 /* If rhs is or holds a constant, this contains that constant,
288 otherwise NULL. */
289 rtx const_rhs;
290
291 /* Set if this store stores the same constant value as REDUNDANT_REASON
292 insn stored. These aren't eliminated early, because doing that
293 might prevent the earlier larger store to be eliminated. */
294 struct insn_info_type *redundant_reason;
295 };
296
297 /* Return a bitmask with the first N low bits set. */
298
299 static unsigned HOST_WIDE_INT
300 lowpart_bitmask (int n)
301 {
302 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
303 return mask >> (HOST_BITS_PER_WIDE_INT - n);
304 }
305
306 static object_allocator<store_info> cse_store_info_pool ("cse_store_info_pool");
307
308 static object_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool");
309
310 /* This structure holds information about a load. These are only
311 built for rtx bases. */
312 struct read_info_type
313 {
314 /* The id of the mem group of the base address. */
315 int group_id;
316
317 /* If this is non-zero, it is the alias set of a spill location. */
318 alias_set_type alias_set;
319
320 /* The offset of the first and byte after the last byte associated
321 with the operation. If begin == end == 0, the read did not have
322 a constant offset. */
323 int begin, end;
324
325 /* The mem being read. */
326 rtx mem;
327
328 /* The next read_info for this insn. */
329 struct read_info_type *next;
330 };
331 typedef struct read_info_type *read_info_t;
332
333 static object_allocator<read_info_type> read_info_type_pool ("read_info_pool");
334
335 /* One of these records is created for each insn. */
336
337 struct insn_info_type
338 {
339 /* Set true if the insn contains a store but the insn itself cannot
340 be deleted. This is set if the insn is a parallel and there is
341 more than one non dead output or if the insn is in some way
342 volatile. */
343 bool cannot_delete;
344
345 /* This field is only used by the global algorithm. It is set true
346 if the insn contains any read of mem except for a (1). This is
347 also set if the insn is a call or has a clobber mem. If the insn
348 contains a wild read, the use_rec will be null. */
349 bool wild_read;
350
351 /* This is true only for CALL instructions which could potentially read
352 any non-frame memory location. This field is used by the global
353 algorithm. */
354 bool non_frame_wild_read;
355
356 /* This field is only used for the processing of const functions.
357 These functions cannot read memory, but they can read the stack
358 because that is where they may get their parms. We need to be
359 this conservative because, like the store motion pass, we don't
360 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
361 Moreover, we need to distinguish two cases:
362 1. Before reload (register elimination), the stores related to
363 outgoing arguments are stack pointer based and thus deemed
364 of non-constant base in this pass. This requires special
365 handling but also means that the frame pointer based stores
366 need not be killed upon encountering a const function call.
367 2. After reload, the stores related to outgoing arguments can be
368 either stack pointer or hard frame pointer based. This means
369 that we have no other choice than also killing all the frame
370 pointer based stores upon encountering a const function call.
371 This field is set after reload for const function calls and before
372 reload for const tail function calls on targets where arg pointer
373 is the frame pointer. Having this set is less severe than a wild
374 read, it just means that all the frame related stores are killed
375 rather than all the stores. */
376 bool frame_read;
377
378 /* This field is only used for the processing of const functions.
379 It is set if the insn may contain a stack pointer based store. */
380 bool stack_pointer_based;
381
382 /* This is true if any of the sets within the store contains a
383 cselib base. Such stores can only be deleted by the local
384 algorithm. */
385 bool contains_cselib_groups;
386
387 /* The insn. */
388 rtx_insn *insn;
389
390 /* The list of mem sets or mem clobbers that are contained in this
391 insn. If the insn is deletable, it contains only one mem set.
392 But it could also contain clobbers. Insns that contain more than
393 one mem set are not deletable, but each of those mems are here in
394 order to provide info to delete other insns. */
395 store_info *store_rec;
396
397 /* The linked list of mem uses in this insn. Only the reads from
398 rtx bases are listed here. The reads to cselib bases are
399 completely processed during the first scan and so are never
400 created. */
401 read_info_t read_rec;
402
403 /* The live fixed registers. We assume only fixed registers can
404 cause trouble by being clobbered from an expanded pattern;
405 storing only the live fixed registers (rather than all registers)
406 means less memory needs to be allocated / copied for the individual
407 stores. */
408 regset fixed_regs_live;
409
410 /* The prev insn in the basic block. */
411 struct insn_info_type * prev_insn;
412
413 /* The linked list of insns that are in consideration for removal in
414 the forwards pass through the basic block. This pointer may be
415 trash as it is not cleared when a wild read occurs. The only
416 time it is guaranteed to be correct is when the traversal starts
417 at active_local_stores. */
418 struct insn_info_type * next_local_store;
419 };
420 typedef struct insn_info_type *insn_info_t;
421
422 static object_allocator<insn_info_type> insn_info_type_pool ("insn_info_pool");
423
424 /* The linked list of stores that are under consideration in this
425 basic block. */
426 static insn_info_t active_local_stores;
427 static int active_local_stores_len;
428
429 struct dse_bb_info_type
430 {
431 /* Pointer to the insn info for the last insn in the block. These
432 are linked so this is how all of the insns are reached. During
433 scanning this is the current insn being scanned. */
434 insn_info_t last_insn;
435
436 /* The info for the global dataflow problem. */
437
438
439 /* This is set if the transfer function should and in the wild_read
440 bitmap before applying the kill and gen sets. That vector knocks
441 out most of the bits in the bitmap and thus speeds up the
442 operations. */
443 bool apply_wild_read;
444
445 /* The following 4 bitvectors hold information about which positions
446 of which stores are live or dead. They are indexed by
447 get_bitmap_index. */
448
449 /* The set of store positions that exist in this block before a wild read. */
450 bitmap gen;
451
452 /* The set of load positions that exist in this block above the
453 same position of a store. */
454 bitmap kill;
455
456 /* The set of stores that reach the top of the block without being
457 killed by a read.
458
459 Do not represent the in if it is all ones. Note that this is
460 what the bitvector should logically be initialized to for a set
461 intersection problem. However, like the kill set, this is too
462 expensive. So initially, the in set will only be created for the
463 exit block and any block that contains a wild read. */
464 bitmap in;
465
466 /* The set of stores that reach the bottom of the block from it's
467 successors.
468
469 Do not represent the in if it is all ones. Note that this is
470 what the bitvector should logically be initialized to for a set
471 intersection problem. However, like the kill and in set, this is
472 too expensive. So what is done is that the confluence operator
473 just initializes the vector from one of the out sets of the
474 successors of the block. */
475 bitmap out;
476
477 /* The following bitvector is indexed by the reg number. It
478 contains the set of regs that are live at the current instruction
479 being processed. While it contains info for all of the
480 registers, only the hard registers are actually examined. It is used
481 to assure that shift and/or add sequences that are inserted do not
482 accidentally clobber live hard regs. */
483 bitmap regs_live;
484 };
485
486 typedef struct dse_bb_info_type *bb_info_t;
487
488 static object_allocator<dse_bb_info_type> dse_bb_info_type_pool
489 ("bb_info_pool");
490
491 /* Table to hold all bb_infos. */
492 static bb_info_t *bb_table;
493
494 /* There is a group_info for each rtx base that is used to reference
495 memory. There are also not many of the rtx bases because they are
496 very limited in scope. */
497
498 struct group_info
499 {
500 /* The actual base of the address. */
501 rtx rtx_base;
502
503 /* The sequential id of the base. This allows us to have a
504 canonical ordering of these that is not based on addresses. */
505 int id;
506
507 /* True if there are any positions that are to be processed
508 globally. */
509 bool process_globally;
510
511 /* True if the base of this group is either the frame_pointer or
512 hard_frame_pointer. */
513 bool frame_related;
514
515 /* A mem wrapped around the base pointer for the group in order to do
516 read dependency. It must be given BLKmode in order to encompass all
517 the possible offsets from the base. */
518 rtx base_mem;
519
520 /* Canonized version of base_mem's address. */
521 rtx canon_base_addr;
522
523 /* These two sets of two bitmaps are used to keep track of how many
524 stores are actually referencing that position from this base. We
525 only do this for rtx bases as this will be used to assign
526 positions in the bitmaps for the global problem. Bit N is set in
527 store1 on the first store for offset N. Bit N is set in store2
528 for the second store to offset N. This is all we need since we
529 only care about offsets that have two or more stores for them.
530
531 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
532 for 0 and greater offsets.
533
534 There is one special case here, for stores into the stack frame,
535 we will or store1 into store2 before deciding which stores look
536 at globally. This is because stores to the stack frame that have
537 no other reads before the end of the function can also be
538 deleted. */
539 bitmap store1_n, store1_p, store2_n, store2_p;
540
541 /* These bitmaps keep track of offsets in this group escape this function.
542 An offset escapes if it corresponds to a named variable whose
543 addressable flag is set. */
544 bitmap escaped_n, escaped_p;
545
546 /* The positions in this bitmap have the same assignments as the in,
547 out, gen and kill bitmaps. This bitmap is all zeros except for
548 the positions that are occupied by stores for this group. */
549 bitmap group_kill;
550
551 /* The offset_map is used to map the offsets from this base into
552 positions in the global bitmaps. It is only created after all of
553 the all of stores have been scanned and we know which ones we
554 care about. */
555 int *offset_map_n, *offset_map_p;
556 int offset_map_size_n, offset_map_size_p;
557 };
558
559 static object_allocator<group_info> group_info_pool ("rtx_group_info_pool");
560
561 /* Index into the rtx_group_vec. */
562 static int rtx_group_next_id;
563
564
565 static vec<group_info *> rtx_group_vec;
566
567
568 /* This structure holds the set of changes that are being deferred
569 when removing read operation. See replace_read. */
570 struct deferred_change
571 {
572
573 /* The mem that is being replaced. */
574 rtx *loc;
575
576 /* The reg it is being replaced with. */
577 rtx reg;
578
579 struct deferred_change *next;
580 };
581
582 static object_allocator<deferred_change> deferred_change_pool
583 ("deferred_change_pool");
584
585 static deferred_change *deferred_change_list = NULL;
586
587 /* The group that holds all of the clear_alias_sets. */
588 static group_info *clear_alias_group;
589
590 /* The modes of the clear_alias_sets. */
591 static htab_t clear_alias_mode_table;
592
593 /* Hash table element to look up the mode for an alias set. */
594 struct clear_alias_mode_holder
595 {
596 alias_set_type alias_set;
597 machine_mode mode;
598 };
599
600 /* This is true except if cfun->stdarg -- i.e. we cannot do
601 this for vararg functions because they play games with the frame. */
602 static bool stores_off_frame_dead_at_return;
603
604 /* Counter for stats. */
605 static int globally_deleted;
606 static int locally_deleted;
607 static int spill_deleted;
608
609 static bitmap all_blocks;
610
611 /* Locations that are killed by calls in the global phase. */
612 static bitmap kill_on_calls;
613
614 /* The number of bits used in the global bitmaps. */
615 static unsigned int current_position;
616 \f
617 /*----------------------------------------------------------------------------
618 Zeroth step.
619
620 Initialization.
621 ----------------------------------------------------------------------------*/
622
623
624 /* Find the entry associated with ALIAS_SET. */
625
626 static struct clear_alias_mode_holder *
627 clear_alias_set_lookup (alias_set_type alias_set)
628 {
629 struct clear_alias_mode_holder tmp_holder;
630 void **slot;
631
632 tmp_holder.alias_set = alias_set;
633 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
634 gcc_assert (*slot);
635
636 return (struct clear_alias_mode_holder *) *slot;
637 }
638
639
640 /* Hashtable callbacks for maintaining the "bases" field of
641 store_group_info, given that the addresses are function invariants. */
642
643 struct invariant_group_base_hasher : nofree_ptr_hash <group_info>
644 {
645 static inline hashval_t hash (const group_info *);
646 static inline bool equal (const group_info *, const group_info *);
647 };
648
649 inline bool
650 invariant_group_base_hasher::equal (const group_info *gi1,
651 const group_info *gi2)
652 {
653 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
654 }
655
656 inline hashval_t
657 invariant_group_base_hasher::hash (const group_info *gi)
658 {
659 int do_not_record;
660 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
661 }
662
663 /* Tables of group_info structures, hashed by base value. */
664 static hash_table<invariant_group_base_hasher> *rtx_group_table;
665
666
667 /* Get the GROUP for BASE. Add a new group if it is not there. */
668
669 static group_info *
670 get_group_info (rtx base)
671 {
672 struct group_info tmp_gi;
673 group_info *gi;
674 group_info **slot;
675
676 if (base)
677 {
678 /* Find the store_base_info structure for BASE, creating a new one
679 if necessary. */
680 tmp_gi.rtx_base = base;
681 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
682 gi = *slot;
683 }
684 else
685 {
686 if (!clear_alias_group)
687 {
688 clear_alias_group = gi = group_info_pool.allocate ();
689 memset (gi, 0, sizeof (struct group_info));
690 gi->id = rtx_group_next_id++;
691 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
692 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
693 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
694 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
695 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
696 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
697 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
698 gi->process_globally = false;
699 gi->offset_map_size_n = 0;
700 gi->offset_map_size_p = 0;
701 gi->offset_map_n = NULL;
702 gi->offset_map_p = NULL;
703 rtx_group_vec.safe_push (gi);
704 }
705 return clear_alias_group;
706 }
707
708 if (gi == NULL)
709 {
710 *slot = gi = group_info_pool.allocate ();
711 gi->rtx_base = base;
712 gi->id = rtx_group_next_id++;
713 gi->base_mem = gen_rtx_MEM (BLKmode, base);
714 gi->canon_base_addr = canon_rtx (base);
715 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
716 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
717 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
718 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
719 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
720 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
721 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
722 gi->process_globally = false;
723 gi->frame_related =
724 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
725 gi->offset_map_size_n = 0;
726 gi->offset_map_size_p = 0;
727 gi->offset_map_n = NULL;
728 gi->offset_map_p = NULL;
729 rtx_group_vec.safe_push (gi);
730 }
731
732 return gi;
733 }
734
735
736 /* Initialization of data structures. */
737
738 static void
739 dse_step0 (void)
740 {
741 locally_deleted = 0;
742 globally_deleted = 0;
743 spill_deleted = 0;
744
745 bitmap_obstack_initialize (&dse_bitmap_obstack);
746 gcc_obstack_init (&dse_obstack);
747
748 scratch = BITMAP_ALLOC (&reg_obstack);
749 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
750
751
752 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
753
754 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
755 rtx_group_next_id = 0;
756
757 stores_off_frame_dead_at_return = !cfun->stdarg;
758
759 init_alias_analysis ();
760
761 clear_alias_group = NULL;
762 }
763
764
765 \f
766 /*----------------------------------------------------------------------------
767 First step.
768
769 Scan all of the insns. Any random ordering of the blocks is fine.
770 Each block is scanned in forward order to accommodate cselib which
771 is used to remove stores with non-constant bases.
772 ----------------------------------------------------------------------------*/
773
774 /* Delete all of the store_info recs from INSN_INFO. */
775
776 static void
777 free_store_info (insn_info_t insn_info)
778 {
779 store_info *cur = insn_info->store_rec;
780 while (cur)
781 {
782 store_info *next = cur->next;
783 if (cur->is_large)
784 BITMAP_FREE (cur->positions_needed.large.bmap);
785 if (cur->cse_base)
786 cse_store_info_pool.remove (cur);
787 else
788 rtx_store_info_pool.remove (cur);
789 cur = next;
790 }
791
792 insn_info->cannot_delete = true;
793 insn_info->contains_cselib_groups = false;
794 insn_info->store_rec = NULL;
795 }
796
797 struct note_add_store_info
798 {
799 rtx_insn *first, *current;
800 regset fixed_regs_live;
801 bool failure;
802 };
803
804 /* Callback for emit_inc_dec_insn_before via note_stores.
805 Check if a register is clobbered which is live afterwards. */
806
807 static void
808 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
809 {
810 rtx_insn *insn;
811 note_add_store_info *info = (note_add_store_info *) data;
812
813 if (!REG_P (loc))
814 return;
815
816 /* If this register is referenced by the current or an earlier insn,
817 that's OK. E.g. this applies to the register that is being incremented
818 with this addition. */
819 for (insn = info->first;
820 insn != NEXT_INSN (info->current);
821 insn = NEXT_INSN (insn))
822 if (reg_referenced_p (loc, PATTERN (insn)))
823 return;
824
825 /* If we come here, we have a clobber of a register that's only OK
826 if that register is not live. If we don't have liveness information
827 available, fail now. */
828 if (!info->fixed_regs_live)
829 {
830 info->failure = true;
831 return;
832 }
833 /* Now check if this is a live fixed register. */
834 unsigned int end_regno = END_REGNO (loc);
835 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
836 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
837 info->failure = true;
838 }
839
840 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
841 SRC + SRCOFF before insn ARG. */
842
843 static int
844 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
845 rtx op ATTRIBUTE_UNUSED,
846 rtx dest, rtx src, rtx srcoff, void *arg)
847 {
848 insn_info_t insn_info = (insn_info_t) arg;
849 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
850 note_add_store_info info;
851
852 /* We can reuse all operands without copying, because we are about
853 to delete the insn that contained it. */
854 if (srcoff)
855 {
856 start_sequence ();
857 emit_insn (gen_add3_insn (dest, src, srcoff));
858 new_insn = get_insns ();
859 end_sequence ();
860 }
861 else
862 new_insn = gen_move_insn (dest, src);
863 info.first = new_insn;
864 info.fixed_regs_live = insn_info->fixed_regs_live;
865 info.failure = false;
866 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
867 {
868 info.current = cur;
869 note_stores (PATTERN (cur), note_add_store, &info);
870 }
871
872 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
873 return it immediately, communicating the failure to its caller. */
874 if (info.failure)
875 return 1;
876
877 emit_insn_before (new_insn, insn);
878
879 return 0;
880 }
881
882 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
883 is there, is split into a separate insn.
884 Return true on success (or if there was nothing to do), false on failure. */
885
886 static bool
887 check_for_inc_dec_1 (insn_info_t insn_info)
888 {
889 rtx_insn *insn = insn_info->insn;
890 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
891 if (note)
892 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
893 insn_info) == 0;
894 return true;
895 }
896
897
898 /* Entry point for postreload. If you work on reload_cse, or you need this
899 anywhere else, consider if you can provide register liveness information
900 and add a parameter to this function so that it can be passed down in
901 insn_info.fixed_regs_live. */
902 bool
903 check_for_inc_dec (rtx_insn *insn)
904 {
905 insn_info_type insn_info;
906 rtx note;
907
908 insn_info.insn = insn;
909 insn_info.fixed_regs_live = NULL;
910 note = find_reg_note (insn, REG_INC, NULL_RTX);
911 if (note)
912 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
913 &insn_info) == 0;
914 return true;
915 }
916
917 /* Delete the insn and free all of the fields inside INSN_INFO. */
918
919 static void
920 delete_dead_store_insn (insn_info_t insn_info)
921 {
922 read_info_t read_info;
923
924 if (!dbg_cnt (dse))
925 return;
926
927 if (!check_for_inc_dec_1 (insn_info))
928 return;
929 if (dump_file && (dump_flags & TDF_DETAILS))
930 {
931 fprintf (dump_file, "Locally deleting insn %d ",
932 INSN_UID (insn_info->insn));
933 if (insn_info->store_rec->alias_set)
934 fprintf (dump_file, "alias set %d\n",
935 (int) insn_info->store_rec->alias_set);
936 else
937 fprintf (dump_file, "\n");
938 }
939
940 free_store_info (insn_info);
941 read_info = insn_info->read_rec;
942
943 while (read_info)
944 {
945 read_info_t next = read_info->next;
946 read_info_type_pool.remove (read_info);
947 read_info = next;
948 }
949 insn_info->read_rec = NULL;
950
951 delete_insn (insn_info->insn);
952 locally_deleted++;
953 insn_info->insn = NULL;
954
955 insn_info->wild_read = false;
956 }
957
958 /* Return whether DECL, a local variable, can possibly escape the current
959 function scope. */
960
961 static bool
962 local_variable_can_escape (tree decl)
963 {
964 if (TREE_ADDRESSABLE (decl))
965 return true;
966
967 /* If this is a partitioned variable, we need to consider all the variables
968 in the partition. This is necessary because a store into one of them can
969 be replaced with a store into another and this may not change the outcome
970 of the escape analysis. */
971 if (cfun->gimple_df->decls_to_pointers != NULL)
972 {
973 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
974 if (namep)
975 return TREE_ADDRESSABLE (*namep);
976 }
977
978 return false;
979 }
980
981 /* Return whether EXPR can possibly escape the current function scope. */
982
983 static bool
984 can_escape (tree expr)
985 {
986 tree base;
987 if (!expr)
988 return true;
989 base = get_base_address (expr);
990 if (DECL_P (base)
991 && !may_be_aliased (base)
992 && !(TREE_CODE (base) == VAR_DECL
993 && !DECL_EXTERNAL (base)
994 && !TREE_STATIC (base)
995 && local_variable_can_escape (base)))
996 return false;
997 return true;
998 }
999
1000 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1001 OFFSET and WIDTH. */
1002
1003 static void
1004 set_usage_bits (group_info *group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1005 tree expr)
1006 {
1007 HOST_WIDE_INT i;
1008 bool expr_escapes = can_escape (expr);
1009 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1010 for (i=offset; i<offset+width; i++)
1011 {
1012 bitmap store1;
1013 bitmap store2;
1014 bitmap escaped;
1015 int ai;
1016 if (i < 0)
1017 {
1018 store1 = group->store1_n;
1019 store2 = group->store2_n;
1020 escaped = group->escaped_n;
1021 ai = -i;
1022 }
1023 else
1024 {
1025 store1 = group->store1_p;
1026 store2 = group->store2_p;
1027 escaped = group->escaped_p;
1028 ai = i;
1029 }
1030
1031 if (!bitmap_set_bit (store1, ai))
1032 bitmap_set_bit (store2, ai);
1033 else
1034 {
1035 if (i < 0)
1036 {
1037 if (group->offset_map_size_n < ai)
1038 group->offset_map_size_n = ai;
1039 }
1040 else
1041 {
1042 if (group->offset_map_size_p < ai)
1043 group->offset_map_size_p = ai;
1044 }
1045 }
1046 if (expr_escapes)
1047 bitmap_set_bit (escaped, ai);
1048 }
1049 }
1050
1051 static void
1052 reset_active_stores (void)
1053 {
1054 active_local_stores = NULL;
1055 active_local_stores_len = 0;
1056 }
1057
1058 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1059
1060 static void
1061 free_read_records (bb_info_t bb_info)
1062 {
1063 insn_info_t insn_info = bb_info->last_insn;
1064 read_info_t *ptr = &insn_info->read_rec;
1065 while (*ptr)
1066 {
1067 read_info_t next = (*ptr)->next;
1068 if ((*ptr)->alias_set == 0)
1069 {
1070 read_info_type_pool.remove (*ptr);
1071 *ptr = next;
1072 }
1073 else
1074 ptr = &(*ptr)->next;
1075 }
1076 }
1077
1078 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1079
1080 static void
1081 add_wild_read (bb_info_t bb_info)
1082 {
1083 insn_info_t insn_info = bb_info->last_insn;
1084 insn_info->wild_read = true;
1085 free_read_records (bb_info);
1086 reset_active_stores ();
1087 }
1088
1089 /* Set the BB_INFO so that the last insn is marked as a wild read of
1090 non-frame locations. */
1091
1092 static void
1093 add_non_frame_wild_read (bb_info_t bb_info)
1094 {
1095 insn_info_t insn_info = bb_info->last_insn;
1096 insn_info->non_frame_wild_read = true;
1097 free_read_records (bb_info);
1098 reset_active_stores ();
1099 }
1100
1101 /* Return true if X is a constant or one of the registers that behave
1102 as a constant over the life of a function. This is equivalent to
1103 !rtx_varies_p for memory addresses. */
1104
1105 static bool
1106 const_or_frame_p (rtx x)
1107 {
1108 if (CONSTANT_P (x))
1109 return true;
1110
1111 if (GET_CODE (x) == REG)
1112 {
1113 /* Note that we have to test for the actual rtx used for the frame
1114 and arg pointers and not just the register number in case we have
1115 eliminated the frame and/or arg pointer and are using it
1116 for pseudos. */
1117 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1118 /* The arg pointer varies if it is not a fixed register. */
1119 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1120 || x == pic_offset_table_rtx)
1121 return true;
1122 return false;
1123 }
1124
1125 return false;
1126 }
1127
1128 /* Take all reasonable action to put the address of MEM into the form
1129 that we can do analysis on.
1130
1131 The gold standard is to get the address into the form: address +
1132 OFFSET where address is something that rtx_varies_p considers a
1133 constant. When we can get the address in this form, we can do
1134 global analysis on it. Note that for constant bases, address is
1135 not actually returned, only the group_id. The address can be
1136 obtained from that.
1137
1138 If that fails, we try cselib to get a value we can at least use
1139 locally. If that fails we return false.
1140
1141 The GROUP_ID is set to -1 for cselib bases and the index of the
1142 group for non_varying bases.
1143
1144 FOR_READ is true if this is a mem read and false if not. */
1145
1146 static bool
1147 canon_address (rtx mem,
1148 alias_set_type *alias_set_out,
1149 int *group_id,
1150 HOST_WIDE_INT *offset,
1151 cselib_val **base)
1152 {
1153 machine_mode address_mode = get_address_mode (mem);
1154 rtx mem_address = XEXP (mem, 0);
1155 rtx expanded_address, address;
1156 int expanded;
1157
1158 *alias_set_out = 0;
1159
1160 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1161
1162 if (dump_file && (dump_flags & TDF_DETAILS))
1163 {
1164 fprintf (dump_file, " mem: ");
1165 print_inline_rtx (dump_file, mem_address, 0);
1166 fprintf (dump_file, "\n");
1167 }
1168
1169 /* First see if just canon_rtx (mem_address) is const or frame,
1170 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1171 address = NULL_RTX;
1172 for (expanded = 0; expanded < 2; expanded++)
1173 {
1174 if (expanded)
1175 {
1176 /* Use cselib to replace all of the reg references with the full
1177 expression. This will take care of the case where we have
1178
1179 r_x = base + offset;
1180 val = *r_x;
1181
1182 by making it into
1183
1184 val = *(base + offset); */
1185
1186 expanded_address = cselib_expand_value_rtx (mem_address,
1187 scratch, 5);
1188
1189 /* If this fails, just go with the address from first
1190 iteration. */
1191 if (!expanded_address)
1192 break;
1193 }
1194 else
1195 expanded_address = mem_address;
1196
1197 /* Split the address into canonical BASE + OFFSET terms. */
1198 address = canon_rtx (expanded_address);
1199
1200 *offset = 0;
1201
1202 if (dump_file && (dump_flags & TDF_DETAILS))
1203 {
1204 if (expanded)
1205 {
1206 fprintf (dump_file, "\n after cselib_expand address: ");
1207 print_inline_rtx (dump_file, expanded_address, 0);
1208 fprintf (dump_file, "\n");
1209 }
1210
1211 fprintf (dump_file, "\n after canon_rtx address: ");
1212 print_inline_rtx (dump_file, address, 0);
1213 fprintf (dump_file, "\n");
1214 }
1215
1216 if (GET_CODE (address) == CONST)
1217 address = XEXP (address, 0);
1218
1219 if (GET_CODE (address) == PLUS
1220 && CONST_INT_P (XEXP (address, 1)))
1221 {
1222 *offset = INTVAL (XEXP (address, 1));
1223 address = XEXP (address, 0);
1224 }
1225
1226 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1227 && const_or_frame_p (address))
1228 {
1229 group_info *group = get_group_info (address);
1230
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1232 fprintf (dump_file, " gid=%d offset=%d \n",
1233 group->id, (int)*offset);
1234 *base = NULL;
1235 *group_id = group->id;
1236 return true;
1237 }
1238 }
1239
1240 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1241 *group_id = -1;
1242
1243 if (*base == NULL)
1244 {
1245 if (dump_file && (dump_flags & TDF_DETAILS))
1246 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1247 return false;
1248 }
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1251 (*base)->uid, (*base)->hash, (int)*offset);
1252 return true;
1253 }
1254
1255
1256 /* Clear the rhs field from the active_local_stores array. */
1257
1258 static void
1259 clear_rhs_from_active_local_stores (void)
1260 {
1261 insn_info_t ptr = active_local_stores;
1262
1263 while (ptr)
1264 {
1265 store_info *store_info = ptr->store_rec;
1266 /* Skip the clobbers. */
1267 while (!store_info->is_set)
1268 store_info = store_info->next;
1269
1270 store_info->rhs = NULL;
1271 store_info->const_rhs = NULL;
1272
1273 ptr = ptr->next_local_store;
1274 }
1275 }
1276
1277
1278 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1279
1280 static inline void
1281 set_position_unneeded (store_info *s_info, int pos)
1282 {
1283 if (__builtin_expect (s_info->is_large, false))
1284 {
1285 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1286 s_info->positions_needed.large.count++;
1287 }
1288 else
1289 s_info->positions_needed.small_bitmask
1290 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1291 }
1292
1293 /* Mark the whole store S_INFO as unneeded. */
1294
1295 static inline void
1296 set_all_positions_unneeded (store_info *s_info)
1297 {
1298 if (__builtin_expect (s_info->is_large, false))
1299 {
1300 int pos, end = s_info->end - s_info->begin;
1301 for (pos = 0; pos < end; pos++)
1302 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1303 s_info->positions_needed.large.count = end;
1304 }
1305 else
1306 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1307 }
1308
1309 /* Return TRUE if any bytes from S_INFO store are needed. */
1310
1311 static inline bool
1312 any_positions_needed_p (store_info *s_info)
1313 {
1314 if (__builtin_expect (s_info->is_large, false))
1315 return (s_info->positions_needed.large.count
1316 < s_info->end - s_info->begin);
1317 else
1318 return (s_info->positions_needed.small_bitmask
1319 != (unsigned HOST_WIDE_INT) 0);
1320 }
1321
1322 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1323 store are needed. */
1324
1325 static inline bool
1326 all_positions_needed_p (store_info *s_info, int start, int width)
1327 {
1328 if (__builtin_expect (s_info->is_large, false))
1329 {
1330 int end = start + width;
1331 while (start < end)
1332 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1333 return false;
1334 return true;
1335 }
1336 else
1337 {
1338 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1339 return (s_info->positions_needed.small_bitmask & mask) == mask;
1340 }
1341 }
1342
1343
1344 static rtx get_stored_val (store_info *, machine_mode, HOST_WIDE_INT,
1345 HOST_WIDE_INT, basic_block, bool);
1346
1347
1348 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1349 there is a candidate store, after adding it to the appropriate
1350 local store group if so. */
1351
1352 static int
1353 record_store (rtx body, bb_info_t bb_info)
1354 {
1355 rtx mem, rhs, const_rhs, mem_addr;
1356 HOST_WIDE_INT offset = 0;
1357 HOST_WIDE_INT width = 0;
1358 alias_set_type spill_alias_set;
1359 insn_info_t insn_info = bb_info->last_insn;
1360 store_info *store_info = NULL;
1361 int group_id;
1362 cselib_val *base = NULL;
1363 insn_info_t ptr, last, redundant_reason;
1364 bool store_is_unused;
1365
1366 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1367 return 0;
1368
1369 mem = SET_DEST (body);
1370
1371 /* If this is not used, then this cannot be used to keep the insn
1372 from being deleted. On the other hand, it does provide something
1373 that can be used to prove that another store is dead. */
1374 store_is_unused
1375 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1376
1377 /* Check whether that value is a suitable memory location. */
1378 if (!MEM_P (mem))
1379 {
1380 /* If the set or clobber is unused, then it does not effect our
1381 ability to get rid of the entire insn. */
1382 if (!store_is_unused)
1383 insn_info->cannot_delete = true;
1384 return 0;
1385 }
1386
1387 /* At this point we know mem is a mem. */
1388 if (GET_MODE (mem) == BLKmode)
1389 {
1390 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1391 {
1392 if (dump_file && (dump_flags & TDF_DETAILS))
1393 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1394 add_wild_read (bb_info);
1395 insn_info->cannot_delete = true;
1396 return 0;
1397 }
1398 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1399 as memset (addr, 0, 36); */
1400 else if (!MEM_SIZE_KNOWN_P (mem)
1401 || MEM_SIZE (mem) <= 0
1402 || MEM_SIZE (mem) > MAX_OFFSET
1403 || GET_CODE (body) != SET
1404 || !CONST_INT_P (SET_SRC (body)))
1405 {
1406 if (!store_is_unused)
1407 {
1408 /* If the set or clobber is unused, then it does not effect our
1409 ability to get rid of the entire insn. */
1410 insn_info->cannot_delete = true;
1411 clear_rhs_from_active_local_stores ();
1412 }
1413 return 0;
1414 }
1415 }
1416
1417 /* We can still process a volatile mem, we just cannot delete it. */
1418 if (MEM_VOLATILE_P (mem))
1419 insn_info->cannot_delete = true;
1420
1421 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1422 {
1423 clear_rhs_from_active_local_stores ();
1424 return 0;
1425 }
1426
1427 if (GET_MODE (mem) == BLKmode)
1428 width = MEM_SIZE (mem);
1429 else
1430 width = GET_MODE_SIZE (GET_MODE (mem));
1431
1432 if (spill_alias_set)
1433 {
1434 bitmap store1 = clear_alias_group->store1_p;
1435 bitmap store2 = clear_alias_group->store2_p;
1436
1437 gcc_assert (GET_MODE (mem) != BLKmode);
1438
1439 if (!bitmap_set_bit (store1, spill_alias_set))
1440 bitmap_set_bit (store2, spill_alias_set);
1441
1442 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1443 clear_alias_group->offset_map_size_p = spill_alias_set;
1444
1445 store_info = rtx_store_info_pool.allocate ();
1446
1447 if (dump_file && (dump_flags & TDF_DETAILS))
1448 fprintf (dump_file, " processing spill store %d(%s)\n",
1449 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1450 }
1451 else if (group_id >= 0)
1452 {
1453 /* In the restrictive case where the base is a constant or the
1454 frame pointer we can do global analysis. */
1455
1456 group_info *group
1457 = rtx_group_vec[group_id];
1458 tree expr = MEM_EXPR (mem);
1459
1460 store_info = rtx_store_info_pool.allocate ();
1461 set_usage_bits (group, offset, width, expr);
1462
1463 if (dump_file && (dump_flags & TDF_DETAILS))
1464 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1465 group_id, (int)offset, (int)(offset+width));
1466 }
1467 else
1468 {
1469 if (may_be_sp_based_p (XEXP (mem, 0)))
1470 insn_info->stack_pointer_based = true;
1471 insn_info->contains_cselib_groups = true;
1472
1473 store_info = cse_store_info_pool.allocate ();
1474 group_id = -1;
1475
1476 if (dump_file && (dump_flags & TDF_DETAILS))
1477 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1478 (int)offset, (int)(offset+width));
1479 }
1480
1481 const_rhs = rhs = NULL_RTX;
1482 if (GET_CODE (body) == SET
1483 /* No place to keep the value after ra. */
1484 && !reload_completed
1485 && (REG_P (SET_SRC (body))
1486 || GET_CODE (SET_SRC (body)) == SUBREG
1487 || CONSTANT_P (SET_SRC (body)))
1488 && !MEM_VOLATILE_P (mem)
1489 /* Sometimes the store and reload is used for truncation and
1490 rounding. */
1491 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1492 {
1493 rhs = SET_SRC (body);
1494 if (CONSTANT_P (rhs))
1495 const_rhs = rhs;
1496 else if (body == PATTERN (insn_info->insn))
1497 {
1498 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1499 if (tem && CONSTANT_P (XEXP (tem, 0)))
1500 const_rhs = XEXP (tem, 0);
1501 }
1502 if (const_rhs == NULL_RTX && REG_P (rhs))
1503 {
1504 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1505
1506 if (tem && CONSTANT_P (tem))
1507 const_rhs = tem;
1508 }
1509 }
1510
1511 /* Check to see if this stores causes some other stores to be
1512 dead. */
1513 ptr = active_local_stores;
1514 last = NULL;
1515 redundant_reason = NULL;
1516 mem = canon_rtx (mem);
1517 /* For alias_set != 0 canon_true_dependence should be never called. */
1518 if (spill_alias_set)
1519 mem_addr = NULL_RTX;
1520 else
1521 {
1522 if (group_id < 0)
1523 mem_addr = base->val_rtx;
1524 else
1525 {
1526 group_info *group
1527 = rtx_group_vec[group_id];
1528 mem_addr = group->canon_base_addr;
1529 }
1530 /* get_addr can only handle VALUE but cannot handle expr like:
1531 VALUE + OFFSET, so call get_addr to get original addr for
1532 mem_addr before plus_constant. */
1533 mem_addr = get_addr (mem_addr);
1534 if (offset)
1535 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1536 }
1537
1538 while (ptr)
1539 {
1540 insn_info_t next = ptr->next_local_store;
1541 struct store_info *s_info = ptr->store_rec;
1542 bool del = true;
1543
1544 /* Skip the clobbers. We delete the active insn if this insn
1545 shadows the set. To have been put on the active list, it
1546 has exactly on set. */
1547 while (!s_info->is_set)
1548 s_info = s_info->next;
1549
1550 if (s_info->alias_set != spill_alias_set)
1551 del = false;
1552 else if (s_info->alias_set)
1553 {
1554 struct clear_alias_mode_holder *entry
1555 = clear_alias_set_lookup (s_info->alias_set);
1556 /* Generally, spills cannot be processed if and of the
1557 references to the slot have a different mode. But if
1558 we are in the same block and mode is exactly the same
1559 between this store and one before in the same block,
1560 we can still delete it. */
1561 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1562 && (GET_MODE (mem) == entry->mode))
1563 {
1564 del = true;
1565 set_all_positions_unneeded (s_info);
1566 }
1567 if (dump_file && (dump_flags & TDF_DETAILS))
1568 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1569 INSN_UID (ptr->insn), (int) s_info->alias_set);
1570 }
1571 else if ((s_info->group_id == group_id)
1572 && (s_info->cse_base == base))
1573 {
1574 HOST_WIDE_INT i;
1575 if (dump_file && (dump_flags & TDF_DETAILS))
1576 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1577 INSN_UID (ptr->insn), s_info->group_id,
1578 (int)s_info->begin, (int)s_info->end);
1579
1580 /* Even if PTR won't be eliminated as unneeded, if both
1581 PTR and this insn store the same constant value, we might
1582 eliminate this insn instead. */
1583 if (s_info->const_rhs
1584 && const_rhs
1585 && offset >= s_info->begin
1586 && offset + width <= s_info->end
1587 && all_positions_needed_p (s_info, offset - s_info->begin,
1588 width))
1589 {
1590 if (GET_MODE (mem) == BLKmode)
1591 {
1592 if (GET_MODE (s_info->mem) == BLKmode
1593 && s_info->const_rhs == const_rhs)
1594 redundant_reason = ptr;
1595 }
1596 else if (s_info->const_rhs == const0_rtx
1597 && const_rhs == const0_rtx)
1598 redundant_reason = ptr;
1599 else
1600 {
1601 rtx val;
1602 start_sequence ();
1603 val = get_stored_val (s_info, GET_MODE (mem),
1604 offset, offset + width,
1605 BLOCK_FOR_INSN (insn_info->insn),
1606 true);
1607 if (get_insns () != NULL)
1608 val = NULL_RTX;
1609 end_sequence ();
1610 if (val && rtx_equal_p (val, const_rhs))
1611 redundant_reason = ptr;
1612 }
1613 }
1614
1615 for (i = MAX (offset, s_info->begin);
1616 i < offset + width && i < s_info->end;
1617 i++)
1618 set_position_unneeded (s_info, i - s_info->begin);
1619 }
1620 else if (s_info->rhs)
1621 /* Need to see if it is possible for this store to overwrite
1622 the value of store_info. If it is, set the rhs to NULL to
1623 keep it from being used to remove a load. */
1624 {
1625 if (canon_true_dependence (s_info->mem,
1626 GET_MODE (s_info->mem),
1627 s_info->mem_addr,
1628 mem, mem_addr))
1629 {
1630 s_info->rhs = NULL;
1631 s_info->const_rhs = NULL;
1632 }
1633 }
1634
1635 /* An insn can be deleted if every position of every one of
1636 its s_infos is zero. */
1637 if (any_positions_needed_p (s_info))
1638 del = false;
1639
1640 if (del)
1641 {
1642 insn_info_t insn_to_delete = ptr;
1643
1644 active_local_stores_len--;
1645 if (last)
1646 last->next_local_store = ptr->next_local_store;
1647 else
1648 active_local_stores = ptr->next_local_store;
1649
1650 if (!insn_to_delete->cannot_delete)
1651 delete_dead_store_insn (insn_to_delete);
1652 }
1653 else
1654 last = ptr;
1655
1656 ptr = next;
1657 }
1658
1659 /* Finish filling in the store_info. */
1660 store_info->next = insn_info->store_rec;
1661 insn_info->store_rec = store_info;
1662 store_info->mem = mem;
1663 store_info->alias_set = spill_alias_set;
1664 store_info->mem_addr = mem_addr;
1665 store_info->cse_base = base;
1666 if (width > HOST_BITS_PER_WIDE_INT)
1667 {
1668 store_info->is_large = true;
1669 store_info->positions_needed.large.count = 0;
1670 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1671 }
1672 else
1673 {
1674 store_info->is_large = false;
1675 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1676 }
1677 store_info->group_id = group_id;
1678 store_info->begin = offset;
1679 store_info->end = offset + width;
1680 store_info->is_set = GET_CODE (body) == SET;
1681 store_info->rhs = rhs;
1682 store_info->const_rhs = const_rhs;
1683 store_info->redundant_reason = redundant_reason;
1684
1685 /* If this is a clobber, we return 0. We will only be able to
1686 delete this insn if there is only one store USED store, but we
1687 can use the clobber to delete other stores earlier. */
1688 return store_info->is_set ? 1 : 0;
1689 }
1690
1691
1692 static void
1693 dump_insn_info (const char * start, insn_info_t insn_info)
1694 {
1695 fprintf (dump_file, "%s insn=%d %s\n", start,
1696 INSN_UID (insn_info->insn),
1697 insn_info->store_rec ? "has store" : "naked");
1698 }
1699
1700
1701 /* If the modes are different and the value's source and target do not
1702 line up, we need to extract the value from lower part of the rhs of
1703 the store, shift it, and then put it into a form that can be shoved
1704 into the read_insn. This function generates a right SHIFT of a
1705 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1706 shift sequence is returned or NULL if we failed to find a
1707 shift. */
1708
1709 static rtx
1710 find_shift_sequence (int access_size,
1711 store_info *store_info,
1712 machine_mode read_mode,
1713 int shift, bool speed, bool require_cst)
1714 {
1715 machine_mode store_mode = GET_MODE (store_info->mem);
1716 machine_mode new_mode;
1717 rtx read_reg = NULL;
1718
1719 /* Some machines like the x86 have shift insns for each size of
1720 operand. Other machines like the ppc or the ia-64 may only have
1721 shift insns that shift values within 32 or 64 bit registers.
1722 This loop tries to find the smallest shift insn that will right
1723 justify the value we want to read but is available in one insn on
1724 the machine. */
1725
1726 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1727 MODE_INT);
1728 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1729 new_mode = GET_MODE_WIDER_MODE (new_mode))
1730 {
1731 rtx target, new_reg, new_lhs;
1732 rtx_insn *shift_seq, *insn;
1733 int cost;
1734
1735 /* If a constant was stored into memory, try to simplify it here,
1736 otherwise the cost of the shift might preclude this optimization
1737 e.g. at -Os, even when no actual shift will be needed. */
1738 if (store_info->const_rhs)
1739 {
1740 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1741 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1742 store_mode, byte);
1743 if (ret && CONSTANT_P (ret))
1744 {
1745 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1746 ret, GEN_INT (shift));
1747 if (ret && CONSTANT_P (ret))
1748 {
1749 byte = subreg_lowpart_offset (read_mode, new_mode);
1750 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1751 if (ret && CONSTANT_P (ret)
1752 && (set_src_cost (ret, read_mode, speed)
1753 <= COSTS_N_INSNS (1)))
1754 return ret;
1755 }
1756 }
1757 }
1758
1759 if (require_cst)
1760 return NULL_RTX;
1761
1762 /* Try a wider mode if truncating the store mode to NEW_MODE
1763 requires a real instruction. */
1764 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1765 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1766 continue;
1767
1768 /* Also try a wider mode if the necessary punning is either not
1769 desirable or not possible. */
1770 if (!CONSTANT_P (store_info->rhs)
1771 && !MODES_TIEABLE_P (new_mode, store_mode))
1772 continue;
1773
1774 new_reg = gen_reg_rtx (new_mode);
1775
1776 start_sequence ();
1777
1778 /* In theory we could also check for an ashr. Ian Taylor knows
1779 of one dsp where the cost of these two was not the same. But
1780 this really is a rare case anyway. */
1781 target = expand_binop (new_mode, lshr_optab, new_reg,
1782 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1783
1784 shift_seq = get_insns ();
1785 end_sequence ();
1786
1787 if (target != new_reg || shift_seq == NULL)
1788 continue;
1789
1790 cost = 0;
1791 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1792 if (INSN_P (insn))
1793 cost += insn_rtx_cost (PATTERN (insn), speed);
1794
1795 /* The computation up to here is essentially independent
1796 of the arguments and could be precomputed. It may
1797 not be worth doing so. We could precompute if
1798 worthwhile or at least cache the results. The result
1799 technically depends on both SHIFT and ACCESS_SIZE,
1800 but in practice the answer will depend only on ACCESS_SIZE. */
1801
1802 if (cost > COSTS_N_INSNS (1))
1803 continue;
1804
1805 new_lhs = extract_low_bits (new_mode, store_mode,
1806 copy_rtx (store_info->rhs));
1807 if (new_lhs == NULL_RTX)
1808 continue;
1809
1810 /* We found an acceptable shift. Generate a move to
1811 take the value from the store and put it into the
1812 shift pseudo, then shift it, then generate another
1813 move to put in into the target of the read. */
1814 emit_move_insn (new_reg, new_lhs);
1815 emit_insn (shift_seq);
1816 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1817 break;
1818 }
1819
1820 return read_reg;
1821 }
1822
1823
1824 /* Call back for note_stores to find the hard regs set or clobbered by
1825 insn. Data is a bitmap of the hardregs set so far. */
1826
1827 static void
1828 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1829 {
1830 bitmap regs_set = (bitmap) data;
1831
1832 if (REG_P (x)
1833 && HARD_REGISTER_P (x))
1834 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
1835 }
1836
1837 /* Helper function for replace_read and record_store.
1838 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1839 to one before READ_END bytes read in READ_MODE. Return NULL
1840 if not successful. If REQUIRE_CST is true, return always constant. */
1841
1842 static rtx
1843 get_stored_val (store_info *store_info, machine_mode read_mode,
1844 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1845 basic_block bb, bool require_cst)
1846 {
1847 machine_mode store_mode = GET_MODE (store_info->mem);
1848 int shift;
1849 int access_size; /* In bytes. */
1850 rtx read_reg;
1851
1852 /* To get here the read is within the boundaries of the write so
1853 shift will never be negative. Start out with the shift being in
1854 bytes. */
1855 if (store_mode == BLKmode)
1856 shift = 0;
1857 else if (BYTES_BIG_ENDIAN)
1858 shift = store_info->end - read_end;
1859 else
1860 shift = read_begin - store_info->begin;
1861
1862 access_size = shift + GET_MODE_SIZE (read_mode);
1863
1864 /* From now on it is bits. */
1865 shift *= BITS_PER_UNIT;
1866
1867 if (shift)
1868 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1869 optimize_bb_for_speed_p (bb),
1870 require_cst);
1871 else if (store_mode == BLKmode)
1872 {
1873 /* The store is a memset (addr, const_val, const_size). */
1874 gcc_assert (CONST_INT_P (store_info->rhs));
1875 store_mode = int_mode_for_mode (read_mode);
1876 if (store_mode == BLKmode)
1877 read_reg = NULL_RTX;
1878 else if (store_info->rhs == const0_rtx)
1879 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1880 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1881 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1882 read_reg = NULL_RTX;
1883 else
1884 {
1885 unsigned HOST_WIDE_INT c
1886 = INTVAL (store_info->rhs)
1887 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1888 int shift = BITS_PER_UNIT;
1889 while (shift < HOST_BITS_PER_WIDE_INT)
1890 {
1891 c |= (c << shift);
1892 shift <<= 1;
1893 }
1894 read_reg = gen_int_mode (c, store_mode);
1895 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1896 }
1897 }
1898 else if (store_info->const_rhs
1899 && (require_cst
1900 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1901 read_reg = extract_low_bits (read_mode, store_mode,
1902 copy_rtx (store_info->const_rhs));
1903 else
1904 read_reg = extract_low_bits (read_mode, store_mode,
1905 copy_rtx (store_info->rhs));
1906 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1907 read_reg = NULL_RTX;
1908 return read_reg;
1909 }
1910
1911 /* Take a sequence of:
1912 A <- r1
1913 ...
1914 ... <- A
1915
1916 and change it into
1917 r2 <- r1
1918 A <- r1
1919 ...
1920 ... <- r2
1921
1922 or
1923
1924 r3 <- extract (r1)
1925 r3 <- r3 >> shift
1926 r2 <- extract (r3)
1927 ... <- r2
1928
1929 or
1930
1931 r2 <- extract (r1)
1932 ... <- r2
1933
1934 Depending on the alignment and the mode of the store and
1935 subsequent load.
1936
1937
1938 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1939 and READ_INSN are for the read. Return true if the replacement
1940 went ok. */
1941
1942 static bool
1943 replace_read (store_info *store_info, insn_info_t store_insn,
1944 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1945 bitmap regs_live)
1946 {
1947 machine_mode store_mode = GET_MODE (store_info->mem);
1948 machine_mode read_mode = GET_MODE (read_info->mem);
1949 rtx_insn *insns, *this_insn;
1950 rtx read_reg;
1951 basic_block bb;
1952
1953 if (!dbg_cnt (dse))
1954 return false;
1955
1956 /* Create a sequence of instructions to set up the read register.
1957 This sequence goes immediately before the store and its result
1958 is read by the load.
1959
1960 We need to keep this in perspective. We are replacing a read
1961 with a sequence of insns, but the read will almost certainly be
1962 in cache, so it is not going to be an expensive one. Thus, we
1963 are not willing to do a multi insn shift or worse a subroutine
1964 call to get rid of the read. */
1965 if (dump_file && (dump_flags & TDF_DETAILS))
1966 fprintf (dump_file, "trying to replace %smode load in insn %d"
1967 " from %smode store in insn %d\n",
1968 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
1969 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
1970 start_sequence ();
1971 bb = BLOCK_FOR_INSN (read_insn->insn);
1972 read_reg = get_stored_val (store_info,
1973 read_mode, read_info->begin, read_info->end,
1974 bb, false);
1975 if (read_reg == NULL_RTX)
1976 {
1977 end_sequence ();
1978 if (dump_file && (dump_flags & TDF_DETAILS))
1979 fprintf (dump_file, " -- could not extract bits of stored value\n");
1980 return false;
1981 }
1982 /* Force the value into a new register so that it won't be clobbered
1983 between the store and the load. */
1984 read_reg = copy_to_mode_reg (read_mode, read_reg);
1985 insns = get_insns ();
1986 end_sequence ();
1987
1988 if (insns != NULL_RTX)
1989 {
1990 /* Now we have to scan the set of new instructions to see if the
1991 sequence contains and sets of hardregs that happened to be
1992 live at this point. For instance, this can happen if one of
1993 the insns sets the CC and the CC happened to be live at that
1994 point. This does occasionally happen, see PR 37922. */
1995 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
1996
1997 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
1998 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
1999
2000 bitmap_and_into (regs_set, regs_live);
2001 if (!bitmap_empty_p (regs_set))
2002 {
2003 if (dump_file && (dump_flags & TDF_DETAILS))
2004 {
2005 fprintf (dump_file,
2006 "abandoning replacement because sequence clobbers live hardregs:");
2007 df_print_regset (dump_file, regs_set);
2008 }
2009
2010 BITMAP_FREE (regs_set);
2011 return false;
2012 }
2013 BITMAP_FREE (regs_set);
2014 }
2015
2016 if (validate_change (read_insn->insn, loc, read_reg, 0))
2017 {
2018 deferred_change *change = deferred_change_pool.allocate ();
2019
2020 /* Insert this right before the store insn where it will be safe
2021 from later insns that might change it before the read. */
2022 emit_insn_before (insns, store_insn->insn);
2023
2024 /* And now for the kludge part: cselib croaks if you just
2025 return at this point. There are two reasons for this:
2026
2027 1) Cselib has an idea of how many pseudos there are and
2028 that does not include the new ones we just added.
2029
2030 2) Cselib does not know about the move insn we added
2031 above the store_info, and there is no way to tell it
2032 about it, because it has "moved on".
2033
2034 Problem (1) is fixable with a certain amount of engineering.
2035 Problem (2) is requires starting the bb from scratch. This
2036 could be expensive.
2037
2038 So we are just going to have to lie. The move/extraction
2039 insns are not really an issue, cselib did not see them. But
2040 the use of the new pseudo read_insn is a real problem because
2041 cselib has not scanned this insn. The way that we solve this
2042 problem is that we are just going to put the mem back for now
2043 and when we are finished with the block, we undo this. We
2044 keep a table of mems to get rid of. At the end of the basic
2045 block we can put them back. */
2046
2047 *loc = read_info->mem;
2048 change->next = deferred_change_list;
2049 deferred_change_list = change;
2050 change->loc = loc;
2051 change->reg = read_reg;
2052
2053 /* Get rid of the read_info, from the point of view of the
2054 rest of dse, play like this read never happened. */
2055 read_insn->read_rec = read_info->next;
2056 read_info_type_pool.remove (read_info);
2057 if (dump_file && (dump_flags & TDF_DETAILS))
2058 {
2059 fprintf (dump_file, " -- replaced the loaded MEM with ");
2060 print_simple_rtl (dump_file, read_reg);
2061 fprintf (dump_file, "\n");
2062 }
2063 return true;
2064 }
2065 else
2066 {
2067 if (dump_file && (dump_flags & TDF_DETAILS))
2068 {
2069 fprintf (dump_file, " -- replacing the loaded MEM with ");
2070 print_simple_rtl (dump_file, read_reg);
2071 fprintf (dump_file, " led to an invalid instruction\n");
2072 }
2073 return false;
2074 }
2075 }
2076
2077 /* Check the address of MEM *LOC and kill any appropriate stores that may
2078 be active. */
2079
2080 static void
2081 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2082 {
2083 rtx mem = *loc, mem_addr;
2084 insn_info_t insn_info;
2085 HOST_WIDE_INT offset = 0;
2086 HOST_WIDE_INT width = 0;
2087 alias_set_type spill_alias_set = 0;
2088 cselib_val *base = NULL;
2089 int group_id;
2090 read_info_t read_info;
2091
2092 insn_info = bb_info->last_insn;
2093
2094 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2095 || (MEM_VOLATILE_P (mem)))
2096 {
2097 if (dump_file && (dump_flags & TDF_DETAILS))
2098 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2099 add_wild_read (bb_info);
2100 insn_info->cannot_delete = true;
2101 return;
2102 }
2103
2104 /* If it is reading readonly mem, then there can be no conflict with
2105 another write. */
2106 if (MEM_READONLY_P (mem))
2107 return;
2108
2109 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2110 {
2111 if (dump_file && (dump_flags & TDF_DETAILS))
2112 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2113 add_wild_read (bb_info);
2114 return;
2115 }
2116
2117 if (GET_MODE (mem) == BLKmode)
2118 width = -1;
2119 else
2120 width = GET_MODE_SIZE (GET_MODE (mem));
2121
2122 read_info = read_info_type_pool.allocate ();
2123 read_info->group_id = group_id;
2124 read_info->mem = mem;
2125 read_info->alias_set = spill_alias_set;
2126 read_info->begin = offset;
2127 read_info->end = offset + width;
2128 read_info->next = insn_info->read_rec;
2129 insn_info->read_rec = read_info;
2130 /* For alias_set != 0 canon_true_dependence should be never called. */
2131 if (spill_alias_set)
2132 mem_addr = NULL_RTX;
2133 else
2134 {
2135 if (group_id < 0)
2136 mem_addr = base->val_rtx;
2137 else
2138 {
2139 group_info *group
2140 = rtx_group_vec[group_id];
2141 mem_addr = group->canon_base_addr;
2142 }
2143 /* get_addr can only handle VALUE but cannot handle expr like:
2144 VALUE + OFFSET, so call get_addr to get original addr for
2145 mem_addr before plus_constant. */
2146 mem_addr = get_addr (mem_addr);
2147 if (offset)
2148 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2149 }
2150
2151 /* We ignore the clobbers in store_info. The is mildly aggressive,
2152 but there really should not be a clobber followed by a read. */
2153
2154 if (spill_alias_set)
2155 {
2156 insn_info_t i_ptr = active_local_stores;
2157 insn_info_t last = NULL;
2158
2159 if (dump_file && (dump_flags & TDF_DETAILS))
2160 fprintf (dump_file, " processing spill load %d\n",
2161 (int) spill_alias_set);
2162
2163 while (i_ptr)
2164 {
2165 store_info *store_info = i_ptr->store_rec;
2166
2167 /* Skip the clobbers. */
2168 while (!store_info->is_set)
2169 store_info = store_info->next;
2170
2171 if (store_info->alias_set == spill_alias_set)
2172 {
2173 if (dump_file && (dump_flags & TDF_DETAILS))
2174 dump_insn_info ("removing from active", i_ptr);
2175
2176 active_local_stores_len--;
2177 if (last)
2178 last->next_local_store = i_ptr->next_local_store;
2179 else
2180 active_local_stores = i_ptr->next_local_store;
2181 }
2182 else
2183 last = i_ptr;
2184 i_ptr = i_ptr->next_local_store;
2185 }
2186 }
2187 else if (group_id >= 0)
2188 {
2189 /* This is the restricted case where the base is a constant or
2190 the frame pointer and offset is a constant. */
2191 insn_info_t i_ptr = active_local_stores;
2192 insn_info_t last = NULL;
2193
2194 if (dump_file && (dump_flags & TDF_DETAILS))
2195 {
2196 if (width == -1)
2197 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2198 group_id);
2199 else
2200 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2201 group_id, (int)offset, (int)(offset+width));
2202 }
2203
2204 while (i_ptr)
2205 {
2206 bool remove = false;
2207 store_info *store_info = i_ptr->store_rec;
2208
2209 /* Skip the clobbers. */
2210 while (!store_info->is_set)
2211 store_info = store_info->next;
2212
2213 /* There are three cases here. */
2214 if (store_info->group_id < 0)
2215 /* We have a cselib store followed by a read from a
2216 const base. */
2217 remove
2218 = canon_true_dependence (store_info->mem,
2219 GET_MODE (store_info->mem),
2220 store_info->mem_addr,
2221 mem, mem_addr);
2222
2223 else if (group_id == store_info->group_id)
2224 {
2225 /* This is a block mode load. We may get lucky and
2226 canon_true_dependence may save the day. */
2227 if (width == -1)
2228 remove
2229 = canon_true_dependence (store_info->mem,
2230 GET_MODE (store_info->mem),
2231 store_info->mem_addr,
2232 mem, mem_addr);
2233
2234 /* If this read is just reading back something that we just
2235 stored, rewrite the read. */
2236 else
2237 {
2238 if (store_info->rhs
2239 && offset >= store_info->begin
2240 && offset + width <= store_info->end
2241 && all_positions_needed_p (store_info,
2242 offset - store_info->begin,
2243 width)
2244 && replace_read (store_info, i_ptr, read_info,
2245 insn_info, loc, bb_info->regs_live))
2246 return;
2247
2248 /* The bases are the same, just see if the offsets
2249 overlap. */
2250 if ((offset < store_info->end)
2251 && (offset + width > store_info->begin))
2252 remove = true;
2253 }
2254 }
2255
2256 /* else
2257 The else case that is missing here is that the
2258 bases are constant but different. There is nothing
2259 to do here because there is no overlap. */
2260
2261 if (remove)
2262 {
2263 if (dump_file && (dump_flags & TDF_DETAILS))
2264 dump_insn_info ("removing from active", i_ptr);
2265
2266 active_local_stores_len--;
2267 if (last)
2268 last->next_local_store = i_ptr->next_local_store;
2269 else
2270 active_local_stores = i_ptr->next_local_store;
2271 }
2272 else
2273 last = i_ptr;
2274 i_ptr = i_ptr->next_local_store;
2275 }
2276 }
2277 else
2278 {
2279 insn_info_t i_ptr = active_local_stores;
2280 insn_info_t last = NULL;
2281 if (dump_file && (dump_flags & TDF_DETAILS))
2282 {
2283 fprintf (dump_file, " processing cselib load mem:");
2284 print_inline_rtx (dump_file, mem, 0);
2285 fprintf (dump_file, "\n");
2286 }
2287
2288 while (i_ptr)
2289 {
2290 bool remove = false;
2291 store_info *store_info = i_ptr->store_rec;
2292
2293 if (dump_file && (dump_flags & TDF_DETAILS))
2294 fprintf (dump_file, " processing cselib load against insn %d\n",
2295 INSN_UID (i_ptr->insn));
2296
2297 /* Skip the clobbers. */
2298 while (!store_info->is_set)
2299 store_info = store_info->next;
2300
2301 /* If this read is just reading back something that we just
2302 stored, rewrite the read. */
2303 if (store_info->rhs
2304 && store_info->group_id == -1
2305 && store_info->cse_base == base
2306 && width != -1
2307 && offset >= store_info->begin
2308 && offset + width <= store_info->end
2309 && all_positions_needed_p (store_info,
2310 offset - store_info->begin, width)
2311 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2312 bb_info->regs_live))
2313 return;
2314
2315 if (!store_info->alias_set)
2316 remove = canon_true_dependence (store_info->mem,
2317 GET_MODE (store_info->mem),
2318 store_info->mem_addr,
2319 mem, mem_addr);
2320
2321 if (remove)
2322 {
2323 if (dump_file && (dump_flags & TDF_DETAILS))
2324 dump_insn_info ("removing from active", i_ptr);
2325
2326 active_local_stores_len--;
2327 if (last)
2328 last->next_local_store = i_ptr->next_local_store;
2329 else
2330 active_local_stores = i_ptr->next_local_store;
2331 }
2332 else
2333 last = i_ptr;
2334 i_ptr = i_ptr->next_local_store;
2335 }
2336 }
2337 }
2338
2339 /* A note_uses callback in which DATA points the INSN_INFO for
2340 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2341 true for any part of *LOC. */
2342
2343 static void
2344 check_mem_read_use (rtx *loc, void *data)
2345 {
2346 subrtx_ptr_iterator::array_type array;
2347 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2348 {
2349 rtx *loc = *iter;
2350 if (MEM_P (*loc))
2351 check_mem_read_rtx (loc, (bb_info_t) data);
2352 }
2353 }
2354
2355
2356 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2357 So far it only handles arguments passed in registers. */
2358
2359 static bool
2360 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2361 {
2362 CUMULATIVE_ARGS args_so_far_v;
2363 cumulative_args_t args_so_far;
2364 tree arg;
2365 int idx;
2366
2367 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2368 args_so_far = pack_cumulative_args (&args_so_far_v);
2369
2370 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2371 for (idx = 0;
2372 arg != void_list_node && idx < nargs;
2373 arg = TREE_CHAIN (arg), idx++)
2374 {
2375 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2376 rtx reg, link, tmp;
2377 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2378 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2379 || GET_MODE_CLASS (mode) != MODE_INT)
2380 return false;
2381
2382 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2383 link;
2384 link = XEXP (link, 1))
2385 if (GET_CODE (XEXP (link, 0)) == USE)
2386 {
2387 args[idx] = XEXP (XEXP (link, 0), 0);
2388 if (REG_P (args[idx])
2389 && REGNO (args[idx]) == REGNO (reg)
2390 && (GET_MODE (args[idx]) == mode
2391 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2392 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2393 <= UNITS_PER_WORD)
2394 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2395 > GET_MODE_SIZE (mode)))))
2396 break;
2397 }
2398 if (!link)
2399 return false;
2400
2401 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2402 if (GET_MODE (args[idx]) != mode)
2403 {
2404 if (!tmp || !CONST_INT_P (tmp))
2405 return false;
2406 tmp = gen_int_mode (INTVAL (tmp), mode);
2407 }
2408 if (tmp)
2409 args[idx] = tmp;
2410
2411 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2412 }
2413 if (arg != void_list_node || idx != nargs)
2414 return false;
2415 return true;
2416 }
2417
2418 /* Return a bitmap of the fixed registers contained in IN. */
2419
2420 static bitmap
2421 copy_fixed_regs (const_bitmap in)
2422 {
2423 bitmap ret;
2424
2425 ret = ALLOC_REG_SET (NULL);
2426 bitmap_and (ret, in, fixed_reg_set_regset);
2427 return ret;
2428 }
2429
2430 /* Apply record_store to all candidate stores in INSN. Mark INSN
2431 if some part of it is not a candidate store and assigns to a
2432 non-register target. */
2433
2434 static void
2435 scan_insn (bb_info_t bb_info, rtx_insn *insn)
2436 {
2437 rtx body;
2438 insn_info_type *insn_info = insn_info_type_pool.allocate ();
2439 int mems_found = 0;
2440 memset (insn_info, 0, sizeof (struct insn_info_type));
2441
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2443 fprintf (dump_file, "\n**scanning insn=%d\n",
2444 INSN_UID (insn));
2445
2446 insn_info->prev_insn = bb_info->last_insn;
2447 insn_info->insn = insn;
2448 bb_info->last_insn = insn_info;
2449
2450 if (DEBUG_INSN_P (insn))
2451 {
2452 insn_info->cannot_delete = true;
2453 return;
2454 }
2455
2456 /* Look at all of the uses in the insn. */
2457 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2458
2459 if (CALL_P (insn))
2460 {
2461 bool const_call;
2462 tree memset_call = NULL_TREE;
2463
2464 insn_info->cannot_delete = true;
2465
2466 /* Const functions cannot do anything bad i.e. read memory,
2467 however, they can read their parameters which may have
2468 been pushed onto the stack.
2469 memset and bzero don't read memory either. */
2470 const_call = RTL_CONST_CALL_P (insn);
2471 if (!const_call)
2472 {
2473 rtx call = get_call_rtx_from (insn);
2474 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2475 {
2476 rtx symbol = XEXP (XEXP (call, 0), 0);
2477 if (SYMBOL_REF_DECL (symbol)
2478 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2479 {
2480 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2481 == BUILT_IN_NORMAL
2482 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2483 == BUILT_IN_MEMSET))
2484 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2485 memset_call = SYMBOL_REF_DECL (symbol);
2486 }
2487 }
2488 }
2489 if (const_call || memset_call)
2490 {
2491 insn_info_t i_ptr = active_local_stores;
2492 insn_info_t last = NULL;
2493
2494 if (dump_file && (dump_flags & TDF_DETAILS))
2495 fprintf (dump_file, "%s call %d\n",
2496 const_call ? "const" : "memset", INSN_UID (insn));
2497
2498 /* See the head comment of the frame_read field. */
2499 if (reload_completed
2500 /* Tail calls are storing their arguments using
2501 arg pointer. If it is a frame pointer on the target,
2502 even before reload we need to kill frame pointer based
2503 stores. */
2504 || (SIBLING_CALL_P (insn)
2505 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2506 insn_info->frame_read = true;
2507
2508 /* Loop over the active stores and remove those which are
2509 killed by the const function call. */
2510 while (i_ptr)
2511 {
2512 bool remove_store = false;
2513
2514 /* The stack pointer based stores are always killed. */
2515 if (i_ptr->stack_pointer_based)
2516 remove_store = true;
2517
2518 /* If the frame is read, the frame related stores are killed. */
2519 else if (insn_info->frame_read)
2520 {
2521 store_info *store_info = i_ptr->store_rec;
2522
2523 /* Skip the clobbers. */
2524 while (!store_info->is_set)
2525 store_info = store_info->next;
2526
2527 if (store_info->group_id >= 0
2528 && rtx_group_vec[store_info->group_id]->frame_related)
2529 remove_store = true;
2530 }
2531
2532 if (remove_store)
2533 {
2534 if (dump_file && (dump_flags & TDF_DETAILS))
2535 dump_insn_info ("removing from active", i_ptr);
2536
2537 active_local_stores_len--;
2538 if (last)
2539 last->next_local_store = i_ptr->next_local_store;
2540 else
2541 active_local_stores = i_ptr->next_local_store;
2542 }
2543 else
2544 last = i_ptr;
2545
2546 i_ptr = i_ptr->next_local_store;
2547 }
2548
2549 if (memset_call)
2550 {
2551 rtx args[3];
2552 if (get_call_args (insn, memset_call, args, 3)
2553 && CONST_INT_P (args[1])
2554 && CONST_INT_P (args[2])
2555 && INTVAL (args[2]) > 0)
2556 {
2557 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2558 set_mem_size (mem, INTVAL (args[2]));
2559 body = gen_rtx_SET (mem, args[1]);
2560 mems_found += record_store (body, bb_info);
2561 if (dump_file && (dump_flags & TDF_DETAILS))
2562 fprintf (dump_file, "handling memset as BLKmode store\n");
2563 if (mems_found == 1)
2564 {
2565 if (active_local_stores_len++
2566 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2567 {
2568 active_local_stores_len = 1;
2569 active_local_stores = NULL;
2570 }
2571 insn_info->fixed_regs_live
2572 = copy_fixed_regs (bb_info->regs_live);
2573 insn_info->next_local_store = active_local_stores;
2574 active_local_stores = insn_info;
2575 }
2576 }
2577 }
2578 }
2579 else if (SIBLING_CALL_P (insn) && reload_completed)
2580 /* Arguments for a sibling call that are pushed to memory are passed
2581 using the incoming argument pointer of the current function. After
2582 reload that might be (and likely is) frame pointer based. */
2583 add_wild_read (bb_info);
2584 else
2585 /* Every other call, including pure functions, may read any memory
2586 that is not relative to the frame. */
2587 add_non_frame_wild_read (bb_info);
2588
2589 return;
2590 }
2591
2592 /* Assuming that there are sets in these insns, we cannot delete
2593 them. */
2594 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2595 || volatile_refs_p (PATTERN (insn))
2596 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2597 || (RTX_FRAME_RELATED_P (insn))
2598 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2599 insn_info->cannot_delete = true;
2600
2601 body = PATTERN (insn);
2602 if (GET_CODE (body) == PARALLEL)
2603 {
2604 int i;
2605 for (i = 0; i < XVECLEN (body, 0); i++)
2606 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2607 }
2608 else
2609 mems_found += record_store (body, bb_info);
2610
2611 if (dump_file && (dump_flags & TDF_DETAILS))
2612 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2613 mems_found, insn_info->cannot_delete ? "true" : "false");
2614
2615 /* If we found some sets of mems, add it into the active_local_stores so
2616 that it can be locally deleted if found dead or used for
2617 replace_read and redundant constant store elimination. Otherwise mark
2618 it as cannot delete. This simplifies the processing later. */
2619 if (mems_found == 1)
2620 {
2621 if (active_local_stores_len++
2622 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2623 {
2624 active_local_stores_len = 1;
2625 active_local_stores = NULL;
2626 }
2627 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2628 insn_info->next_local_store = active_local_stores;
2629 active_local_stores = insn_info;
2630 }
2631 else
2632 insn_info->cannot_delete = true;
2633 }
2634
2635
2636 /* Remove BASE from the set of active_local_stores. This is a
2637 callback from cselib that is used to get rid of the stores in
2638 active_local_stores. */
2639
2640 static void
2641 remove_useless_values (cselib_val *base)
2642 {
2643 insn_info_t insn_info = active_local_stores;
2644 insn_info_t last = NULL;
2645
2646 while (insn_info)
2647 {
2648 store_info *store_info = insn_info->store_rec;
2649 bool del = false;
2650
2651 /* If ANY of the store_infos match the cselib group that is
2652 being deleted, then the insn can not be deleted. */
2653 while (store_info)
2654 {
2655 if ((store_info->group_id == -1)
2656 && (store_info->cse_base == base))
2657 {
2658 del = true;
2659 break;
2660 }
2661 store_info = store_info->next;
2662 }
2663
2664 if (del)
2665 {
2666 active_local_stores_len--;
2667 if (last)
2668 last->next_local_store = insn_info->next_local_store;
2669 else
2670 active_local_stores = insn_info->next_local_store;
2671 free_store_info (insn_info);
2672 }
2673 else
2674 last = insn_info;
2675
2676 insn_info = insn_info->next_local_store;
2677 }
2678 }
2679
2680
2681 /* Do all of step 1. */
2682
2683 static void
2684 dse_step1 (void)
2685 {
2686 basic_block bb;
2687 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2688
2689 cselib_init (0);
2690 all_blocks = BITMAP_ALLOC (NULL);
2691 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2692 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2693
2694 FOR_ALL_BB_FN (bb, cfun)
2695 {
2696 insn_info_t ptr;
2697 bb_info_t bb_info = dse_bb_info_type_pool.allocate ();
2698
2699 memset (bb_info, 0, sizeof (dse_bb_info_type));
2700 bitmap_set_bit (all_blocks, bb->index);
2701 bb_info->regs_live = regs_live;
2702
2703 bitmap_copy (regs_live, DF_LR_IN (bb));
2704 df_simulate_initialize_forwards (bb, regs_live);
2705
2706 bb_table[bb->index] = bb_info;
2707 cselib_discard_hook = remove_useless_values;
2708
2709 if (bb->index >= NUM_FIXED_BLOCKS)
2710 {
2711 rtx_insn *insn;
2712
2713 active_local_stores = NULL;
2714 active_local_stores_len = 0;
2715 cselib_clear_table ();
2716
2717 /* Scan the insns. */
2718 FOR_BB_INSNS (bb, insn)
2719 {
2720 if (INSN_P (insn))
2721 scan_insn (bb_info, insn);
2722 cselib_process_insn (insn);
2723 if (INSN_P (insn))
2724 df_simulate_one_insn_forwards (bb, insn, regs_live);
2725 }
2726
2727 /* This is something of a hack, because the global algorithm
2728 is supposed to take care of the case where stores go dead
2729 at the end of the function. However, the global
2730 algorithm must take a more conservative view of block
2731 mode reads than the local alg does. So to get the case
2732 where you have a store to the frame followed by a non
2733 overlapping block more read, we look at the active local
2734 stores at the end of the function and delete all of the
2735 frame and spill based ones. */
2736 if (stores_off_frame_dead_at_return
2737 && (EDGE_COUNT (bb->succs) == 0
2738 || (single_succ_p (bb)
2739 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2740 && ! crtl->calls_eh_return)))
2741 {
2742 insn_info_t i_ptr = active_local_stores;
2743 while (i_ptr)
2744 {
2745 store_info *store_info = i_ptr->store_rec;
2746
2747 /* Skip the clobbers. */
2748 while (!store_info->is_set)
2749 store_info = store_info->next;
2750 if (store_info->alias_set && !i_ptr->cannot_delete)
2751 delete_dead_store_insn (i_ptr);
2752 else
2753 if (store_info->group_id >= 0)
2754 {
2755 group_info *group
2756 = rtx_group_vec[store_info->group_id];
2757 if (group->frame_related && !i_ptr->cannot_delete)
2758 delete_dead_store_insn (i_ptr);
2759 }
2760
2761 i_ptr = i_ptr->next_local_store;
2762 }
2763 }
2764
2765 /* Get rid of the loads that were discovered in
2766 replace_read. Cselib is finished with this block. */
2767 while (deferred_change_list)
2768 {
2769 deferred_change *next = deferred_change_list->next;
2770
2771 /* There is no reason to validate this change. That was
2772 done earlier. */
2773 *deferred_change_list->loc = deferred_change_list->reg;
2774 deferred_change_pool.remove (deferred_change_list);
2775 deferred_change_list = next;
2776 }
2777
2778 /* Get rid of all of the cselib based store_infos in this
2779 block and mark the containing insns as not being
2780 deletable. */
2781 ptr = bb_info->last_insn;
2782 while (ptr)
2783 {
2784 if (ptr->contains_cselib_groups)
2785 {
2786 store_info *s_info = ptr->store_rec;
2787 while (s_info && !s_info->is_set)
2788 s_info = s_info->next;
2789 if (s_info
2790 && s_info->redundant_reason
2791 && s_info->redundant_reason->insn
2792 && !ptr->cannot_delete)
2793 {
2794 if (dump_file && (dump_flags & TDF_DETAILS))
2795 fprintf (dump_file, "Locally deleting insn %d "
2796 "because insn %d stores the "
2797 "same value and couldn't be "
2798 "eliminated\n",
2799 INSN_UID (ptr->insn),
2800 INSN_UID (s_info->redundant_reason->insn));
2801 delete_dead_store_insn (ptr);
2802 }
2803 free_store_info (ptr);
2804 }
2805 else
2806 {
2807 store_info *s_info;
2808
2809 /* Free at least positions_needed bitmaps. */
2810 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2811 if (s_info->is_large)
2812 {
2813 BITMAP_FREE (s_info->positions_needed.large.bmap);
2814 s_info->is_large = false;
2815 }
2816 }
2817 ptr = ptr->prev_insn;
2818 }
2819
2820 cse_store_info_pool.release ();
2821 }
2822 bb_info->regs_live = NULL;
2823 }
2824
2825 BITMAP_FREE (regs_live);
2826 cselib_finish ();
2827 rtx_group_table->empty ();
2828 }
2829
2830 \f
2831 /*----------------------------------------------------------------------------
2832 Second step.
2833
2834 Assign each byte position in the stores that we are going to
2835 analyze globally to a position in the bitmaps. Returns true if
2836 there are any bit positions assigned.
2837 ----------------------------------------------------------------------------*/
2838
2839 static void
2840 dse_step2_init (void)
2841 {
2842 unsigned int i;
2843 group_info *group;
2844
2845 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2846 {
2847 /* For all non stack related bases, we only consider a store to
2848 be deletable if there are two or more stores for that
2849 position. This is because it takes one store to make the
2850 other store redundant. However, for the stores that are
2851 stack related, we consider them if there is only one store
2852 for the position. We do this because the stack related
2853 stores can be deleted if their is no read between them and
2854 the end of the function.
2855
2856 To make this work in the current framework, we take the stack
2857 related bases add all of the bits from store1 into store2.
2858 This has the effect of making the eligible even if there is
2859 only one store. */
2860
2861 if (stores_off_frame_dead_at_return && group->frame_related)
2862 {
2863 bitmap_ior_into (group->store2_n, group->store1_n);
2864 bitmap_ior_into (group->store2_p, group->store1_p);
2865 if (dump_file && (dump_flags & TDF_DETAILS))
2866 fprintf (dump_file, "group %d is frame related ", i);
2867 }
2868
2869 group->offset_map_size_n++;
2870 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2871 group->offset_map_size_n);
2872 group->offset_map_size_p++;
2873 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2874 group->offset_map_size_p);
2875 group->process_globally = false;
2876 if (dump_file && (dump_flags & TDF_DETAILS))
2877 {
2878 fprintf (dump_file, "group %d(%d+%d): ", i,
2879 (int)bitmap_count_bits (group->store2_n),
2880 (int)bitmap_count_bits (group->store2_p));
2881 bitmap_print (dump_file, group->store2_n, "n ", " ");
2882 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2883 }
2884 }
2885 }
2886
2887
2888 /* Init the offset tables for the normal case. */
2889
2890 static bool
2891 dse_step2_nospill (void)
2892 {
2893 unsigned int i;
2894 group_info *group;
2895 /* Position 0 is unused because 0 is used in the maps to mean
2896 unused. */
2897 current_position = 1;
2898 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2899 {
2900 bitmap_iterator bi;
2901 unsigned int j;
2902
2903 if (group == clear_alias_group)
2904 continue;
2905
2906 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2907 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
2908 bitmap_clear (group->group_kill);
2909
2910 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2911 {
2912 bitmap_set_bit (group->group_kill, current_position);
2913 if (bitmap_bit_p (group->escaped_n, j))
2914 bitmap_set_bit (kill_on_calls, current_position);
2915 group->offset_map_n[j] = current_position++;
2916 group->process_globally = true;
2917 }
2918 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2919 {
2920 bitmap_set_bit (group->group_kill, current_position);
2921 if (bitmap_bit_p (group->escaped_p, j))
2922 bitmap_set_bit (kill_on_calls, current_position);
2923 group->offset_map_p[j] = current_position++;
2924 group->process_globally = true;
2925 }
2926 }
2927 return current_position != 1;
2928 }
2929
2930
2931 \f
2932 /*----------------------------------------------------------------------------
2933 Third step.
2934
2935 Build the bit vectors for the transfer functions.
2936 ----------------------------------------------------------------------------*/
2937
2938
2939 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2940 there, return 0. */
2941
2942 static int
2943 get_bitmap_index (group_info *group_info, HOST_WIDE_INT offset)
2944 {
2945 if (offset < 0)
2946 {
2947 HOST_WIDE_INT offset_p = -offset;
2948 if (offset_p >= group_info->offset_map_size_n)
2949 return 0;
2950 return group_info->offset_map_n[offset_p];
2951 }
2952 else
2953 {
2954 if (offset >= group_info->offset_map_size_p)
2955 return 0;
2956 return group_info->offset_map_p[offset];
2957 }
2958 }
2959
2960
2961 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2962 may be NULL. */
2963
2964 static void
2965 scan_stores_nospill (store_info *store_info, bitmap gen, bitmap kill)
2966 {
2967 while (store_info)
2968 {
2969 HOST_WIDE_INT i;
2970 group_info *group_info
2971 = rtx_group_vec[store_info->group_id];
2972 if (group_info->process_globally)
2973 for (i = store_info->begin; i < store_info->end; i++)
2974 {
2975 int index = get_bitmap_index (group_info, i);
2976 if (index != 0)
2977 {
2978 bitmap_set_bit (gen, index);
2979 if (kill)
2980 bitmap_clear_bit (kill, index);
2981 }
2982 }
2983 store_info = store_info->next;
2984 }
2985 }
2986
2987
2988 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2989 may be NULL. */
2990
2991 static void
2992 scan_stores_spill (store_info *store_info, bitmap gen, bitmap kill)
2993 {
2994 while (store_info)
2995 {
2996 if (store_info->alias_set)
2997 {
2998 int index = get_bitmap_index (clear_alias_group,
2999 store_info->alias_set);
3000 if (index != 0)
3001 {
3002 bitmap_set_bit (gen, index);
3003 if (kill)
3004 bitmap_clear_bit (kill, index);
3005 }
3006 }
3007 store_info = store_info->next;
3008 }
3009 }
3010
3011
3012 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3013 may be NULL. */
3014
3015 static void
3016 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3017 {
3018 read_info_t read_info = insn_info->read_rec;
3019 int i;
3020 group_info *group;
3021
3022 /* If this insn reads the frame, kill all the frame related stores. */
3023 if (insn_info->frame_read)
3024 {
3025 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3026 if (group->process_globally && group->frame_related)
3027 {
3028 if (kill)
3029 bitmap_ior_into (kill, group->group_kill);
3030 bitmap_and_compl_into (gen, group->group_kill);
3031 }
3032 }
3033 if (insn_info->non_frame_wild_read)
3034 {
3035 /* Kill all non-frame related stores. Kill all stores of variables that
3036 escape. */
3037 if (kill)
3038 bitmap_ior_into (kill, kill_on_calls);
3039 bitmap_and_compl_into (gen, kill_on_calls);
3040 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3041 if (group->process_globally && !group->frame_related)
3042 {
3043 if (kill)
3044 bitmap_ior_into (kill, group->group_kill);
3045 bitmap_and_compl_into (gen, group->group_kill);
3046 }
3047 }
3048 while (read_info)
3049 {
3050 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3051 {
3052 if (group->process_globally)
3053 {
3054 if (i == read_info->group_id)
3055 {
3056 if (read_info->begin > read_info->end)
3057 {
3058 /* Begin > end for block mode reads. */
3059 if (kill)
3060 bitmap_ior_into (kill, group->group_kill);
3061 bitmap_and_compl_into (gen, group->group_kill);
3062 }
3063 else
3064 {
3065 /* The groups are the same, just process the
3066 offsets. */
3067 HOST_WIDE_INT j;
3068 for (j = read_info->begin; j < read_info->end; j++)
3069 {
3070 int index = get_bitmap_index (group, j);
3071 if (index != 0)
3072 {
3073 if (kill)
3074 bitmap_set_bit (kill, index);
3075 bitmap_clear_bit (gen, index);
3076 }
3077 }
3078 }
3079 }
3080 else
3081 {
3082 /* The groups are different, if the alias sets
3083 conflict, clear the entire group. We only need
3084 to apply this test if the read_info is a cselib
3085 read. Anything with a constant base cannot alias
3086 something else with a different constant
3087 base. */
3088 if ((read_info->group_id < 0)
3089 && canon_true_dependence (group->base_mem,
3090 GET_MODE (group->base_mem),
3091 group->canon_base_addr,
3092 read_info->mem, NULL_RTX))
3093 {
3094 if (kill)
3095 bitmap_ior_into (kill, group->group_kill);
3096 bitmap_and_compl_into (gen, group->group_kill);
3097 }
3098 }
3099 }
3100 }
3101
3102 read_info = read_info->next;
3103 }
3104 }
3105
3106 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3107 may be NULL. */
3108
3109 static void
3110 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3111 {
3112 while (read_info)
3113 {
3114 if (read_info->alias_set)
3115 {
3116 int index = get_bitmap_index (clear_alias_group,
3117 read_info->alias_set);
3118 if (index != 0)
3119 {
3120 if (kill)
3121 bitmap_set_bit (kill, index);
3122 bitmap_clear_bit (gen, index);
3123 }
3124 }
3125
3126 read_info = read_info->next;
3127 }
3128 }
3129
3130
3131 /* Return the insn in BB_INFO before the first wild read or if there
3132 are no wild reads in the block, return the last insn. */
3133
3134 static insn_info_t
3135 find_insn_before_first_wild_read (bb_info_t bb_info)
3136 {
3137 insn_info_t insn_info = bb_info->last_insn;
3138 insn_info_t last_wild_read = NULL;
3139
3140 while (insn_info)
3141 {
3142 if (insn_info->wild_read)
3143 {
3144 last_wild_read = insn_info->prev_insn;
3145 /* Block starts with wild read. */
3146 if (!last_wild_read)
3147 return NULL;
3148 }
3149
3150 insn_info = insn_info->prev_insn;
3151 }
3152
3153 if (last_wild_read)
3154 return last_wild_read;
3155 else
3156 return bb_info->last_insn;
3157 }
3158
3159
3160 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3161 the block in order to build the gen and kill sets for the block.
3162 We start at ptr which may be the last insn in the block or may be
3163 the first insn with a wild read. In the latter case we are able to
3164 skip the rest of the block because it just does not matter:
3165 anything that happens is hidden by the wild read. */
3166
3167 static void
3168 dse_step3_scan (bool for_spills, basic_block bb)
3169 {
3170 bb_info_t bb_info = bb_table[bb->index];
3171 insn_info_t insn_info;
3172
3173 if (for_spills)
3174 /* There are no wild reads in the spill case. */
3175 insn_info = bb_info->last_insn;
3176 else
3177 insn_info = find_insn_before_first_wild_read (bb_info);
3178
3179 /* In the spill case or in the no_spill case if there is no wild
3180 read in the block, we will need a kill set. */
3181 if (insn_info == bb_info->last_insn)
3182 {
3183 if (bb_info->kill)
3184 bitmap_clear (bb_info->kill);
3185 else
3186 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3187 }
3188 else
3189 if (bb_info->kill)
3190 BITMAP_FREE (bb_info->kill);
3191
3192 while (insn_info)
3193 {
3194 /* There may have been code deleted by the dce pass run before
3195 this phase. */
3196 if (insn_info->insn && INSN_P (insn_info->insn))
3197 {
3198 /* Process the read(s) last. */
3199 if (for_spills)
3200 {
3201 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3202 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3203 }
3204 else
3205 {
3206 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3207 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3208 }
3209 }
3210
3211 insn_info = insn_info->prev_insn;
3212 }
3213 }
3214
3215
3216 /* Set the gen set of the exit block, and also any block with no
3217 successors that does not have a wild read. */
3218
3219 static void
3220 dse_step3_exit_block_scan (bb_info_t bb_info)
3221 {
3222 /* The gen set is all 0's for the exit block except for the
3223 frame_pointer_group. */
3224
3225 if (stores_off_frame_dead_at_return)
3226 {
3227 unsigned int i;
3228 group_info *group;
3229
3230 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3231 {
3232 if (group->process_globally && group->frame_related)
3233 bitmap_ior_into (bb_info->gen, group->group_kill);
3234 }
3235 }
3236 }
3237
3238
3239 /* Find all of the blocks that are not backwards reachable from the
3240 exit block or any block with no successors (BB). These are the
3241 infinite loops or infinite self loops. These blocks will still
3242 have their bits set in UNREACHABLE_BLOCKS. */
3243
3244 static void
3245 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3246 {
3247 edge e;
3248 edge_iterator ei;
3249
3250 if (bitmap_bit_p (unreachable_blocks, bb->index))
3251 {
3252 bitmap_clear_bit (unreachable_blocks, bb->index);
3253 FOR_EACH_EDGE (e, ei, bb->preds)
3254 {
3255 mark_reachable_blocks (unreachable_blocks, e->src);
3256 }
3257 }
3258 }
3259
3260 /* Build the transfer functions for the function. */
3261
3262 static void
3263 dse_step3 (bool for_spills)
3264 {
3265 basic_block bb;
3266 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3267 sbitmap_iterator sbi;
3268 bitmap all_ones = NULL;
3269 unsigned int i;
3270
3271 bitmap_ones (unreachable_blocks);
3272
3273 FOR_ALL_BB_FN (bb, cfun)
3274 {
3275 bb_info_t bb_info = bb_table[bb->index];
3276 if (bb_info->gen)
3277 bitmap_clear (bb_info->gen);
3278 else
3279 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3280
3281 if (bb->index == ENTRY_BLOCK)
3282 ;
3283 else if (bb->index == EXIT_BLOCK)
3284 dse_step3_exit_block_scan (bb_info);
3285 else
3286 dse_step3_scan (for_spills, bb);
3287 if (EDGE_COUNT (bb->succs) == 0)
3288 mark_reachable_blocks (unreachable_blocks, bb);
3289
3290 /* If this is the second time dataflow is run, delete the old
3291 sets. */
3292 if (bb_info->in)
3293 BITMAP_FREE (bb_info->in);
3294 if (bb_info->out)
3295 BITMAP_FREE (bb_info->out);
3296 }
3297
3298 /* For any block in an infinite loop, we must initialize the out set
3299 to all ones. This could be expensive, but almost never occurs in
3300 practice. However, it is common in regression tests. */
3301 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3302 {
3303 if (bitmap_bit_p (all_blocks, i))
3304 {
3305 bb_info_t bb_info = bb_table[i];
3306 if (!all_ones)
3307 {
3308 unsigned int j;
3309 group_info *group;
3310
3311 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3312 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3313 bitmap_ior_into (all_ones, group->group_kill);
3314 }
3315 if (!bb_info->out)
3316 {
3317 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3318 bitmap_copy (bb_info->out, all_ones);
3319 }
3320 }
3321 }
3322
3323 if (all_ones)
3324 BITMAP_FREE (all_ones);
3325 sbitmap_free (unreachable_blocks);
3326 }
3327
3328
3329 \f
3330 /*----------------------------------------------------------------------------
3331 Fourth step.
3332
3333 Solve the bitvector equations.
3334 ----------------------------------------------------------------------------*/
3335
3336
3337 /* Confluence function for blocks with no successors. Create an out
3338 set from the gen set of the exit block. This block logically has
3339 the exit block as a successor. */
3340
3341
3342
3343 static void
3344 dse_confluence_0 (basic_block bb)
3345 {
3346 bb_info_t bb_info = bb_table[bb->index];
3347
3348 if (bb->index == EXIT_BLOCK)
3349 return;
3350
3351 if (!bb_info->out)
3352 {
3353 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3354 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3355 }
3356 }
3357
3358 /* Propagate the information from the in set of the dest of E to the
3359 out set of the src of E. If the various in or out sets are not
3360 there, that means they are all ones. */
3361
3362 static bool
3363 dse_confluence_n (edge e)
3364 {
3365 bb_info_t src_info = bb_table[e->src->index];
3366 bb_info_t dest_info = bb_table[e->dest->index];
3367
3368 if (dest_info->in)
3369 {
3370 if (src_info->out)
3371 bitmap_and_into (src_info->out, dest_info->in);
3372 else
3373 {
3374 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3375 bitmap_copy (src_info->out, dest_info->in);
3376 }
3377 }
3378 return true;
3379 }
3380
3381
3382 /* Propagate the info from the out to the in set of BB_INDEX's basic
3383 block. There are three cases:
3384
3385 1) The block has no kill set. In this case the kill set is all
3386 ones. It does not matter what the out set of the block is, none of
3387 the info can reach the top. The only thing that reaches the top is
3388 the gen set and we just copy the set.
3389
3390 2) There is a kill set but no out set and bb has successors. In
3391 this case we just return. Eventually an out set will be created and
3392 it is better to wait than to create a set of ones.
3393
3394 3) There is both a kill and out set. We apply the obvious transfer
3395 function.
3396 */
3397
3398 static bool
3399 dse_transfer_function (int bb_index)
3400 {
3401 bb_info_t bb_info = bb_table[bb_index];
3402
3403 if (bb_info->kill)
3404 {
3405 if (bb_info->out)
3406 {
3407 /* Case 3 above. */
3408 if (bb_info->in)
3409 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3410 bb_info->out, bb_info->kill);
3411 else
3412 {
3413 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3414 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3415 bb_info->out, bb_info->kill);
3416 return true;
3417 }
3418 }
3419 else
3420 /* Case 2 above. */
3421 return false;
3422 }
3423 else
3424 {
3425 /* Case 1 above. If there is already an in set, nothing
3426 happens. */
3427 if (bb_info->in)
3428 return false;
3429 else
3430 {
3431 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3432 bitmap_copy (bb_info->in, bb_info->gen);
3433 return true;
3434 }
3435 }
3436 }
3437
3438 /* Solve the dataflow equations. */
3439
3440 static void
3441 dse_step4 (void)
3442 {
3443 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3444 dse_confluence_n, dse_transfer_function,
3445 all_blocks, df_get_postorder (DF_BACKWARD),
3446 df_get_n_blocks (DF_BACKWARD));
3447 if (dump_file && (dump_flags & TDF_DETAILS))
3448 {
3449 basic_block bb;
3450
3451 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3452 FOR_ALL_BB_FN (bb, cfun)
3453 {
3454 bb_info_t bb_info = bb_table[bb->index];
3455
3456 df_print_bb_index (bb, dump_file);
3457 if (bb_info->in)
3458 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3459 else
3460 fprintf (dump_file, " in: *MISSING*\n");
3461 if (bb_info->gen)
3462 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3463 else
3464 fprintf (dump_file, " gen: *MISSING*\n");
3465 if (bb_info->kill)
3466 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3467 else
3468 fprintf (dump_file, " kill: *MISSING*\n");
3469 if (bb_info->out)
3470 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3471 else
3472 fprintf (dump_file, " out: *MISSING*\n\n");
3473 }
3474 }
3475 }
3476
3477
3478 \f
3479 /*----------------------------------------------------------------------------
3480 Fifth step.
3481
3482 Delete the stores that can only be deleted using the global information.
3483 ----------------------------------------------------------------------------*/
3484
3485
3486 static void
3487 dse_step5_nospill (void)
3488 {
3489 basic_block bb;
3490 FOR_EACH_BB_FN (bb, cfun)
3491 {
3492 bb_info_t bb_info = bb_table[bb->index];
3493 insn_info_t insn_info = bb_info->last_insn;
3494 bitmap v = bb_info->out;
3495
3496 while (insn_info)
3497 {
3498 bool deleted = false;
3499 if (dump_file && insn_info->insn)
3500 {
3501 fprintf (dump_file, "starting to process insn %d\n",
3502 INSN_UID (insn_info->insn));
3503 bitmap_print (dump_file, v, " v: ", "\n");
3504 }
3505
3506 /* There may have been code deleted by the dce pass run before
3507 this phase. */
3508 if (insn_info->insn
3509 && INSN_P (insn_info->insn)
3510 && (!insn_info->cannot_delete)
3511 && (!bitmap_empty_p (v)))
3512 {
3513 store_info *store_info = insn_info->store_rec;
3514
3515 /* Try to delete the current insn. */
3516 deleted = true;
3517
3518 /* Skip the clobbers. */
3519 while (!store_info->is_set)
3520 store_info = store_info->next;
3521
3522 if (store_info->alias_set)
3523 deleted = false;
3524 else
3525 {
3526 HOST_WIDE_INT i;
3527 group_info *group_info
3528 = rtx_group_vec[store_info->group_id];
3529
3530 for (i = store_info->begin; i < store_info->end; i++)
3531 {
3532 int index = get_bitmap_index (group_info, i);
3533
3534 if (dump_file && (dump_flags & TDF_DETAILS))
3535 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3536 if (index == 0 || !bitmap_bit_p (v, index))
3537 {
3538 if (dump_file && (dump_flags & TDF_DETAILS))
3539 fprintf (dump_file, "failing at i = %d\n", (int)i);
3540 deleted = false;
3541 break;
3542 }
3543 }
3544 }
3545 if (deleted)
3546 {
3547 if (dbg_cnt (dse)
3548 && check_for_inc_dec_1 (insn_info))
3549 {
3550 delete_insn (insn_info->insn);
3551 insn_info->insn = NULL;
3552 globally_deleted++;
3553 }
3554 }
3555 }
3556 /* We do want to process the local info if the insn was
3557 deleted. For instance, if the insn did a wild read, we
3558 no longer need to trash the info. */
3559 if (insn_info->insn
3560 && INSN_P (insn_info->insn)
3561 && (!deleted))
3562 {
3563 scan_stores_nospill (insn_info->store_rec, v, NULL);
3564 if (insn_info->wild_read)
3565 {
3566 if (dump_file && (dump_flags & TDF_DETAILS))
3567 fprintf (dump_file, "wild read\n");
3568 bitmap_clear (v);
3569 }
3570 else if (insn_info->read_rec
3571 || insn_info->non_frame_wild_read)
3572 {
3573 if (dump_file && !insn_info->non_frame_wild_read)
3574 fprintf (dump_file, "regular read\n");
3575 else if (dump_file && (dump_flags & TDF_DETAILS))
3576 fprintf (dump_file, "non-frame wild read\n");
3577 scan_reads_nospill (insn_info, v, NULL);
3578 }
3579 }
3580
3581 insn_info = insn_info->prev_insn;
3582 }
3583 }
3584 }
3585
3586
3587 \f
3588 /*----------------------------------------------------------------------------
3589 Sixth step.
3590
3591 Delete stores made redundant by earlier stores (which store the same
3592 value) that couldn't be eliminated.
3593 ----------------------------------------------------------------------------*/
3594
3595 static void
3596 dse_step6 (void)
3597 {
3598 basic_block bb;
3599
3600 FOR_ALL_BB_FN (bb, cfun)
3601 {
3602 bb_info_t bb_info = bb_table[bb->index];
3603 insn_info_t insn_info = bb_info->last_insn;
3604
3605 while (insn_info)
3606 {
3607 /* There may have been code deleted by the dce pass run before
3608 this phase. */
3609 if (insn_info->insn
3610 && INSN_P (insn_info->insn)
3611 && !insn_info->cannot_delete)
3612 {
3613 store_info *s_info = insn_info->store_rec;
3614
3615 while (s_info && !s_info->is_set)
3616 s_info = s_info->next;
3617 if (s_info
3618 && s_info->redundant_reason
3619 && s_info->redundant_reason->insn
3620 && INSN_P (s_info->redundant_reason->insn))
3621 {
3622 rtx_insn *rinsn = s_info->redundant_reason->insn;
3623 if (dump_file && (dump_flags & TDF_DETAILS))
3624 fprintf (dump_file, "Locally deleting insn %d "
3625 "because insn %d stores the "
3626 "same value and couldn't be "
3627 "eliminated\n",
3628 INSN_UID (insn_info->insn),
3629 INSN_UID (rinsn));
3630 delete_dead_store_insn (insn_info);
3631 }
3632 }
3633 insn_info = insn_info->prev_insn;
3634 }
3635 }
3636 }
3637 \f
3638 /*----------------------------------------------------------------------------
3639 Seventh step.
3640
3641 Destroy everything left standing.
3642 ----------------------------------------------------------------------------*/
3643
3644 static void
3645 dse_step7 (void)
3646 {
3647 bitmap_obstack_release (&dse_bitmap_obstack);
3648 obstack_free (&dse_obstack, NULL);
3649
3650 end_alias_analysis ();
3651 free (bb_table);
3652 delete rtx_group_table;
3653 rtx_group_table = NULL;
3654 rtx_group_vec.release ();
3655 BITMAP_FREE (all_blocks);
3656 BITMAP_FREE (scratch);
3657
3658 rtx_store_info_pool.release ();
3659 read_info_type_pool.release ();
3660 insn_info_type_pool.release ();
3661 dse_bb_info_type_pool.release ();
3662 group_info_pool.release ();
3663 deferred_change_pool.release ();
3664 }
3665
3666
3667 /* -------------------------------------------------------------------------
3668 DSE
3669 ------------------------------------------------------------------------- */
3670
3671 /* Callback for running pass_rtl_dse. */
3672
3673 static unsigned int
3674 rest_of_handle_dse (void)
3675 {
3676 df_set_flags (DF_DEFER_INSN_RESCAN);
3677
3678 /* Need the notes since we must track live hardregs in the forwards
3679 direction. */
3680 df_note_add_problem ();
3681 df_analyze ();
3682
3683 dse_step0 ();
3684 dse_step1 ();
3685 dse_step2_init ();
3686 if (dse_step2_nospill ())
3687 {
3688 df_set_flags (DF_LR_RUN_DCE);
3689 df_analyze ();
3690 if (dump_file && (dump_flags & TDF_DETAILS))
3691 fprintf (dump_file, "doing global processing\n");
3692 dse_step3 (false);
3693 dse_step4 ();
3694 dse_step5_nospill ();
3695 }
3696
3697 dse_step6 ();
3698 dse_step7 ();
3699
3700 if (dump_file)
3701 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3702 locally_deleted, globally_deleted, spill_deleted);
3703
3704 /* DSE can eliminate potentially-trapping MEMs.
3705 Remove any EH edges associated with them. */
3706 if ((locally_deleted || globally_deleted)
3707 && cfun->can_throw_non_call_exceptions
3708 && purge_all_dead_edges ())
3709 cleanup_cfg (0);
3710
3711 return 0;
3712 }
3713
3714 namespace {
3715
3716 const pass_data pass_data_rtl_dse1 =
3717 {
3718 RTL_PASS, /* type */
3719 "dse1", /* name */
3720 OPTGROUP_NONE, /* optinfo_flags */
3721 TV_DSE1, /* tv_id */
3722 0, /* properties_required */
3723 0, /* properties_provided */
3724 0, /* properties_destroyed */
3725 0, /* todo_flags_start */
3726 TODO_df_finish, /* todo_flags_finish */
3727 };
3728
3729 class pass_rtl_dse1 : public rtl_opt_pass
3730 {
3731 public:
3732 pass_rtl_dse1 (gcc::context *ctxt)
3733 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3734 {}
3735
3736 /* opt_pass methods: */
3737 virtual bool gate (function *)
3738 {
3739 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3740 }
3741
3742 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3743
3744 }; // class pass_rtl_dse1
3745
3746 } // anon namespace
3747
3748 rtl_opt_pass *
3749 make_pass_rtl_dse1 (gcc::context *ctxt)
3750 {
3751 return new pass_rtl_dse1 (ctxt);
3752 }
3753
3754 namespace {
3755
3756 const pass_data pass_data_rtl_dse2 =
3757 {
3758 RTL_PASS, /* type */
3759 "dse2", /* name */
3760 OPTGROUP_NONE, /* optinfo_flags */
3761 TV_DSE2, /* tv_id */
3762 0, /* properties_required */
3763 0, /* properties_provided */
3764 0, /* properties_destroyed */
3765 0, /* todo_flags_start */
3766 TODO_df_finish, /* todo_flags_finish */
3767 };
3768
3769 class pass_rtl_dse2 : public rtl_opt_pass
3770 {
3771 public:
3772 pass_rtl_dse2 (gcc::context *ctxt)
3773 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3774 {}
3775
3776 /* opt_pass methods: */
3777 virtual bool gate (function *)
3778 {
3779 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3780 }
3781
3782 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3783
3784 }; // class pass_rtl_dse2
3785
3786 } // anon namespace
3787
3788 rtl_opt_pass *
3789 make_pass_rtl_dse2 (gcc::context *ctxt)
3790 {
3791 return new pass_rtl_dse2 (ctxt);
3792 }