]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dse.c
coretypes.h: Include machmode.h...
[thirdparty/gcc.git] / gcc / dse.c
1 /* RTL dead store elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #undef BASELINE
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hash-table.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "hash-set.h"
32 #include "vec.h"
33 #include "input.h"
34 #include "alias.h"
35 #include "symtab.h"
36 #include "inchash.h"
37 #include "tree.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "tm_p.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "regset.h"
44 #include "flags.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfgrtl.h"
48 #include "predict.h"
49 #include "basic-block.h"
50 #include "df.h"
51 #include "cselib.h"
52 #include "tree-pass.h"
53 #include "alloc-pool.h"
54 #include "insn-config.h"
55 #include "hashtab.h"
56 #include "function.h"
57 #include "statistics.h"
58 #include "expmed.h"
59 #include "dojump.h"
60 #include "explow.h"
61 #include "calls.h"
62 #include "emit-rtl.h"
63 #include "varasm.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "recog.h"
67 #include "insn-codes.h"
68 #include "optabs.h"
69 #include "dbgcnt.h"
70 #include "target.h"
71 #include "params.h"
72 #include "tree-ssa-alias.h"
73 #include "internal-fn.h"
74 #include "gimple-expr.h"
75 #include "is-a.h"
76 #include "gimple.h"
77 #include "gimple-ssa.h"
78 #include "rtl-iter.h"
79 #include "cfgcleanup.h"
80
81 /* This file contains three techniques for performing Dead Store
82 Elimination (dse).
83
84 * The first technique performs dse locally on any base address. It
85 is based on the cselib which is a local value numbering technique.
86 This technique is local to a basic block but deals with a fairly
87 general addresses.
88
89 * The second technique performs dse globally but is restricted to
90 base addresses that are either constant or are relative to the
91 frame_pointer.
92
93 * The third technique, (which is only done after register allocation)
94 processes the spill spill slots. This differs from the second
95 technique because it takes advantage of the fact that spilling is
96 completely free from the effects of aliasing.
97
98 Logically, dse is a backwards dataflow problem. A store can be
99 deleted if it if cannot be reached in the backward direction by any
100 use of the value being stored. However, the local technique uses a
101 forwards scan of the basic block because cselib requires that the
102 block be processed in that order.
103
104 The pass is logically broken into 7 steps:
105
106 0) Initialization.
107
108 1) The local algorithm, as well as scanning the insns for the two
109 global algorithms.
110
111 2) Analysis to see if the global algs are necessary. In the case
112 of stores base on a constant address, there must be at least two
113 stores to that address, to make it possible to delete some of the
114 stores. In the case of stores off of the frame or spill related
115 stores, only one store to an address is necessary because those
116 stores die at the end of the function.
117
118 3) Set up the global dataflow equations based on processing the
119 info parsed in the first step.
120
121 4) Solve the dataflow equations.
122
123 5) Delete the insns that the global analysis has indicated are
124 unnecessary.
125
126 6) Delete insns that store the same value as preceding store
127 where the earlier store couldn't be eliminated.
128
129 7) Cleanup.
130
131 This step uses cselib and canon_rtx to build the largest expression
132 possible for each address. This pass is a forwards pass through
133 each basic block. From the point of view of the global technique,
134 the first pass could examine a block in either direction. The
135 forwards ordering is to accommodate cselib.
136
137 We make a simplifying assumption: addresses fall into four broad
138 categories:
139
140 1) base has rtx_varies_p == false, offset is constant.
141 2) base has rtx_varies_p == false, offset variable.
142 3) base has rtx_varies_p == true, offset constant.
143 4) base has rtx_varies_p == true, offset variable.
144
145 The local passes are able to process all 4 kinds of addresses. The
146 global pass only handles 1).
147
148 The global problem is formulated as follows:
149
150 A store, S1, to address A, where A is not relative to the stack
151 frame, can be eliminated if all paths from S1 to the end of the
152 function contain another store to A before a read to A.
153
154 If the address A is relative to the stack frame, a store S2 to A
155 can be eliminated if there are no paths from S2 that reach the
156 end of the function that read A before another store to A. In
157 this case S2 can be deleted if there are paths from S2 to the
158 end of the function that have no reads or writes to A. This
159 second case allows stores to the stack frame to be deleted that
160 would otherwise die when the function returns. This cannot be
161 done if stores_off_frame_dead_at_return is not true. See the doc
162 for that variable for when this variable is false.
163
164 The global problem is formulated as a backwards set union
165 dataflow problem where the stores are the gens and reads are the
166 kills. Set union problems are rare and require some special
167 handling given our representation of bitmaps. A straightforward
168 implementation requires a lot of bitmaps filled with 1s.
169 These are expensive and cumbersome in our bitmap formulation so
170 care has been taken to avoid large vectors filled with 1s. See
171 the comments in bb_info and in the dataflow confluence functions
172 for details.
173
174 There are two places for further enhancements to this algorithm:
175
176 1) The original dse which was embedded in a pass called flow also
177 did local address forwarding. For example in
178
179 A <- r100
180 ... <- A
181
182 flow would replace the right hand side of the second insn with a
183 reference to r100. Most of the information is available to add this
184 to this pass. It has not done it because it is a lot of work in
185 the case that either r100 is assigned to between the first and
186 second insn and/or the second insn is a load of part of the value
187 stored by the first insn.
188
189 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
190 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
191 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
192 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
193
194 2) The cleaning up of spill code is quite profitable. It currently
195 depends on reading tea leaves and chicken entrails left by reload.
196 This pass depends on reload creating a singleton alias set for each
197 spill slot and telling the next dse pass which of these alias sets
198 are the singletons. Rather than analyze the addresses of the
199 spills, dse's spill processing just does analysis of the loads and
200 stores that use those alias sets. There are three cases where this
201 falls short:
202
203 a) Reload sometimes creates the slot for one mode of access, and
204 then inserts loads and/or stores for a smaller mode. In this
205 case, the current code just punts on the slot. The proper thing
206 to do is to back out and use one bit vector position for each
207 byte of the entity associated with the slot. This depends on
208 KNOWING that reload always generates the accesses for each of the
209 bytes in some canonical (read that easy to understand several
210 passes after reload happens) way.
211
212 b) Reload sometimes decides that spill slot it allocated was not
213 large enough for the mode and goes back and allocates more slots
214 with the same mode and alias set. The backout in this case is a
215 little more graceful than (a). In this case the slot is unmarked
216 as being a spill slot and if final address comes out to be based
217 off the frame pointer, the global algorithm handles this slot.
218
219 c) For any pass that may prespill, there is currently no
220 mechanism to tell the dse pass that the slot being used has the
221 special properties that reload uses. It may be that all that is
222 required is to have those passes make the same calls that reload
223 does, assuming that the alias sets can be manipulated in the same
224 way. */
225
226 /* There are limits to the size of constant offsets we model for the
227 global problem. There are certainly test cases, that exceed this
228 limit, however, it is unlikely that there are important programs
229 that really have constant offsets this size. */
230 #define MAX_OFFSET (64 * 1024)
231
232 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
233 on the default obstack because these bitmaps can grow quite large
234 (~2GB for the small (!) test case of PR54146) and we'll hold on to
235 all that memory until the end of the compiler run.
236 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
237 releasing the whole obstack. */
238 static bitmap_obstack dse_bitmap_obstack;
239
240 /* Obstack for other data. As for above: Kinda nice to be able to
241 throw it all away at the end in one big sweep. */
242 static struct obstack dse_obstack;
243
244 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
245 static bitmap scratch = NULL;
246
247 struct insn_info_type;
248
249 /* This structure holds information about a candidate store. */
250 struct store_info
251 {
252
253 /* False means this is a clobber. */
254 bool is_set;
255
256 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
257 bool is_large;
258
259 /* The id of the mem group of the base address. If rtx_varies_p is
260 true, this is -1. Otherwise, it is the index into the group
261 table. */
262 int group_id;
263
264 /* This is the cselib value. */
265 cselib_val *cse_base;
266
267 /* This canonized mem. */
268 rtx mem;
269
270 /* Canonized MEM address for use by canon_true_dependence. */
271 rtx mem_addr;
272
273 /* If this is non-zero, it is the alias set of a spill location. */
274 alias_set_type alias_set;
275
276 /* The offset of the first and byte before the last byte associated
277 with the operation. */
278 HOST_WIDE_INT begin, end;
279
280 union
281 {
282 /* A bitmask as wide as the number of bytes in the word that
283 contains a 1 if the byte may be needed. The store is unused if
284 all of the bits are 0. This is used if IS_LARGE is false. */
285 unsigned HOST_WIDE_INT small_bitmask;
286
287 struct
288 {
289 /* A bitmap with one bit per byte. Cleared bit means the position
290 is needed. Used if IS_LARGE is false. */
291 bitmap bmap;
292
293 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
294 equal to END - BEGIN, the whole store is unused. */
295 int count;
296 } large;
297 } positions_needed;
298
299 /* The next store info for this insn. */
300 struct store_info *next;
301
302 /* The right hand side of the store. This is used if there is a
303 subsequent reload of the mems address somewhere later in the
304 basic block. */
305 rtx rhs;
306
307 /* If rhs is or holds a constant, this contains that constant,
308 otherwise NULL. */
309 rtx const_rhs;
310
311 /* Set if this store stores the same constant value as REDUNDANT_REASON
312 insn stored. These aren't eliminated early, because doing that
313 might prevent the earlier larger store to be eliminated. */
314 struct insn_info_type *redundant_reason;
315 };
316
317 /* Return a bitmask with the first N low bits set. */
318
319 static unsigned HOST_WIDE_INT
320 lowpart_bitmask (int n)
321 {
322 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
323 return mask >> (HOST_BITS_PER_WIDE_INT - n);
324 }
325
326 typedef struct store_info *store_info_t;
327 static pool_allocator<store_info> cse_store_info_pool ("cse_store_info_pool",
328 100);
329
330 static pool_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool",
331 100);
332
333 /* This structure holds information about a load. These are only
334 built for rtx bases. */
335 struct read_info_type
336 {
337 /* The id of the mem group of the base address. */
338 int group_id;
339
340 /* If this is non-zero, it is the alias set of a spill location. */
341 alias_set_type alias_set;
342
343 /* The offset of the first and byte after the last byte associated
344 with the operation. If begin == end == 0, the read did not have
345 a constant offset. */
346 int begin, end;
347
348 /* The mem being read. */
349 rtx mem;
350
351 /* The next read_info for this insn. */
352 struct read_info_type *next;
353
354 /* Pool allocation new operator. */
355 inline void *operator new (size_t)
356 {
357 return pool.allocate ();
358 }
359
360 /* Delete operator utilizing pool allocation. */
361 inline void operator delete (void *ptr)
362 {
363 pool.remove ((read_info_type *) ptr);
364 }
365
366 /* Memory allocation pool. */
367 static pool_allocator<read_info_type> pool;
368 };
369 typedef struct read_info_type *read_info_t;
370
371 pool_allocator<read_info_type> read_info_type::pool ("read_info_pool", 100);
372
373 /* One of these records is created for each insn. */
374
375 struct insn_info_type
376 {
377 /* Set true if the insn contains a store but the insn itself cannot
378 be deleted. This is set if the insn is a parallel and there is
379 more than one non dead output or if the insn is in some way
380 volatile. */
381 bool cannot_delete;
382
383 /* This field is only used by the global algorithm. It is set true
384 if the insn contains any read of mem except for a (1). This is
385 also set if the insn is a call or has a clobber mem. If the insn
386 contains a wild read, the use_rec will be null. */
387 bool wild_read;
388
389 /* This is true only for CALL instructions which could potentially read
390 any non-frame memory location. This field is used by the global
391 algorithm. */
392 bool non_frame_wild_read;
393
394 /* This field is only used for the processing of const functions.
395 These functions cannot read memory, but they can read the stack
396 because that is where they may get their parms. We need to be
397 this conservative because, like the store motion pass, we don't
398 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
399 Moreover, we need to distinguish two cases:
400 1. Before reload (register elimination), the stores related to
401 outgoing arguments are stack pointer based and thus deemed
402 of non-constant base in this pass. This requires special
403 handling but also means that the frame pointer based stores
404 need not be killed upon encountering a const function call.
405 2. After reload, the stores related to outgoing arguments can be
406 either stack pointer or hard frame pointer based. This means
407 that we have no other choice than also killing all the frame
408 pointer based stores upon encountering a const function call.
409 This field is set after reload for const function calls and before
410 reload for const tail function calls on targets where arg pointer
411 is the frame pointer. Having this set is less severe than a wild
412 read, it just means that all the frame related stores are killed
413 rather than all the stores. */
414 bool frame_read;
415
416 /* This field is only used for the processing of const functions.
417 It is set if the insn may contain a stack pointer based store. */
418 bool stack_pointer_based;
419
420 /* This is true if any of the sets within the store contains a
421 cselib base. Such stores can only be deleted by the local
422 algorithm. */
423 bool contains_cselib_groups;
424
425 /* The insn. */
426 rtx_insn *insn;
427
428 /* The list of mem sets or mem clobbers that are contained in this
429 insn. If the insn is deletable, it contains only one mem set.
430 But it could also contain clobbers. Insns that contain more than
431 one mem set are not deletable, but each of those mems are here in
432 order to provide info to delete other insns. */
433 store_info_t store_rec;
434
435 /* The linked list of mem uses in this insn. Only the reads from
436 rtx bases are listed here. The reads to cselib bases are
437 completely processed during the first scan and so are never
438 created. */
439 read_info_t read_rec;
440
441 /* The live fixed registers. We assume only fixed registers can
442 cause trouble by being clobbered from an expanded pattern;
443 storing only the live fixed registers (rather than all registers)
444 means less memory needs to be allocated / copied for the individual
445 stores. */
446 regset fixed_regs_live;
447
448 /* The prev insn in the basic block. */
449 struct insn_info_type * prev_insn;
450
451 /* The linked list of insns that are in consideration for removal in
452 the forwards pass through the basic block. This pointer may be
453 trash as it is not cleared when a wild read occurs. The only
454 time it is guaranteed to be correct is when the traversal starts
455 at active_local_stores. */
456 struct insn_info_type * next_local_store;
457
458 /* Pool allocation new operator. */
459 inline void *operator new (size_t)
460 {
461 return pool.allocate ();
462 }
463
464 /* Delete operator utilizing pool allocation. */
465 inline void operator delete (void *ptr)
466 {
467 pool.remove ((insn_info_type *) ptr);
468 }
469
470 /* Memory allocation pool. */
471 static pool_allocator<insn_info_type> pool;
472 };
473 typedef struct insn_info_type *insn_info_t;
474
475 pool_allocator<insn_info_type> insn_info_type::pool ("insn_info_pool", 100);
476
477 /* The linked list of stores that are under consideration in this
478 basic block. */
479 static insn_info_t active_local_stores;
480 static int active_local_stores_len;
481
482 struct dse_bb_info_type
483 {
484 /* Pointer to the insn info for the last insn in the block. These
485 are linked so this is how all of the insns are reached. During
486 scanning this is the current insn being scanned. */
487 insn_info_t last_insn;
488
489 /* The info for the global dataflow problem. */
490
491
492 /* This is set if the transfer function should and in the wild_read
493 bitmap before applying the kill and gen sets. That vector knocks
494 out most of the bits in the bitmap and thus speeds up the
495 operations. */
496 bool apply_wild_read;
497
498 /* The following 4 bitvectors hold information about which positions
499 of which stores are live or dead. They are indexed by
500 get_bitmap_index. */
501
502 /* The set of store positions that exist in this block before a wild read. */
503 bitmap gen;
504
505 /* The set of load positions that exist in this block above the
506 same position of a store. */
507 bitmap kill;
508
509 /* The set of stores that reach the top of the block without being
510 killed by a read.
511
512 Do not represent the in if it is all ones. Note that this is
513 what the bitvector should logically be initialized to for a set
514 intersection problem. However, like the kill set, this is too
515 expensive. So initially, the in set will only be created for the
516 exit block and any block that contains a wild read. */
517 bitmap in;
518
519 /* The set of stores that reach the bottom of the block from it's
520 successors.
521
522 Do not represent the in if it is all ones. Note that this is
523 what the bitvector should logically be initialized to for a set
524 intersection problem. However, like the kill and in set, this is
525 too expensive. So what is done is that the confluence operator
526 just initializes the vector from one of the out sets of the
527 successors of the block. */
528 bitmap out;
529
530 /* The following bitvector is indexed by the reg number. It
531 contains the set of regs that are live at the current instruction
532 being processed. While it contains info for all of the
533 registers, only the hard registers are actually examined. It is used
534 to assure that shift and/or add sequences that are inserted do not
535 accidentally clobber live hard regs. */
536 bitmap regs_live;
537
538 /* Pool allocation new operator. */
539 inline void *operator new (size_t)
540 {
541 return pool.allocate ();
542 }
543
544 /* Delete operator utilizing pool allocation. */
545 inline void operator delete (void *ptr)
546 {
547 pool.remove ((dse_bb_info_type *) ptr);
548 }
549
550 /* Memory allocation pool. */
551 static pool_allocator<dse_bb_info_type> pool;
552 };
553
554 typedef struct dse_bb_info_type *bb_info_t;
555 pool_allocator<dse_bb_info_type> dse_bb_info_type::pool ("bb_info_pool", 100);
556
557 /* Table to hold all bb_infos. */
558 static bb_info_t *bb_table;
559
560 /* There is a group_info for each rtx base that is used to reference
561 memory. There are also not many of the rtx bases because they are
562 very limited in scope. */
563
564 struct group_info
565 {
566 /* The actual base of the address. */
567 rtx rtx_base;
568
569 /* The sequential id of the base. This allows us to have a
570 canonical ordering of these that is not based on addresses. */
571 int id;
572
573 /* True if there are any positions that are to be processed
574 globally. */
575 bool process_globally;
576
577 /* True if the base of this group is either the frame_pointer or
578 hard_frame_pointer. */
579 bool frame_related;
580
581 /* A mem wrapped around the base pointer for the group in order to do
582 read dependency. It must be given BLKmode in order to encompass all
583 the possible offsets from the base. */
584 rtx base_mem;
585
586 /* Canonized version of base_mem's address. */
587 rtx canon_base_addr;
588
589 /* These two sets of two bitmaps are used to keep track of how many
590 stores are actually referencing that position from this base. We
591 only do this for rtx bases as this will be used to assign
592 positions in the bitmaps for the global problem. Bit N is set in
593 store1 on the first store for offset N. Bit N is set in store2
594 for the second store to offset N. This is all we need since we
595 only care about offsets that have two or more stores for them.
596
597 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
598 for 0 and greater offsets.
599
600 There is one special case here, for stores into the stack frame,
601 we will or store1 into store2 before deciding which stores look
602 at globally. This is because stores to the stack frame that have
603 no other reads before the end of the function can also be
604 deleted. */
605 bitmap store1_n, store1_p, store2_n, store2_p;
606
607 /* These bitmaps keep track of offsets in this group escape this function.
608 An offset escapes if it corresponds to a named variable whose
609 addressable flag is set. */
610 bitmap escaped_n, escaped_p;
611
612 /* The positions in this bitmap have the same assignments as the in,
613 out, gen and kill bitmaps. This bitmap is all zeros except for
614 the positions that are occupied by stores for this group. */
615 bitmap group_kill;
616
617 /* The offset_map is used to map the offsets from this base into
618 positions in the global bitmaps. It is only created after all of
619 the all of stores have been scanned and we know which ones we
620 care about. */
621 int *offset_map_n, *offset_map_p;
622 int offset_map_size_n, offset_map_size_p;
623
624 /* Pool allocation new operator. */
625 inline void *operator new (size_t)
626 {
627 return pool.allocate ();
628 }
629
630 /* Delete operator utilizing pool allocation. */
631 inline void operator delete (void *ptr)
632 {
633 pool.remove ((group_info *) ptr);
634 }
635
636 /* Memory allocation pool. */
637 static pool_allocator<group_info> pool;
638 };
639 typedef struct group_info *group_info_t;
640 typedef const struct group_info *const_group_info_t;
641
642 pool_allocator<group_info> group_info::pool ("rtx_group_info_pool", 100);
643
644 /* Index into the rtx_group_vec. */
645 static int rtx_group_next_id;
646
647
648 static vec<group_info_t> rtx_group_vec;
649
650
651 /* This structure holds the set of changes that are being deferred
652 when removing read operation. See replace_read. */
653 struct deferred_change
654 {
655
656 /* The mem that is being replaced. */
657 rtx *loc;
658
659 /* The reg it is being replaced with. */
660 rtx reg;
661
662 struct deferred_change *next;
663
664 /* Pool allocation new operator. */
665 inline void *operator new (size_t)
666 {
667 return pool.allocate ();
668 }
669
670 /* Delete operator utilizing pool allocation. */
671 inline void operator delete (void *ptr)
672 {
673 pool.remove ((deferred_change *) ptr);
674 }
675
676 /* Memory allocation pool. */
677 static pool_allocator<deferred_change> pool;
678 };
679
680 typedef struct deferred_change *deferred_change_t;
681
682 pool_allocator<deferred_change> deferred_change::pool
683 ("deferred_change_pool", 10);
684
685 static deferred_change_t deferred_change_list = NULL;
686
687 /* The group that holds all of the clear_alias_sets. */
688 static group_info_t clear_alias_group;
689
690 /* The modes of the clear_alias_sets. */
691 static htab_t clear_alias_mode_table;
692
693 /* Hash table element to look up the mode for an alias set. */
694 struct clear_alias_mode_holder
695 {
696 alias_set_type alias_set;
697 machine_mode mode;
698 };
699
700 /* This is true except if cfun->stdarg -- i.e. we cannot do
701 this for vararg functions because they play games with the frame. */
702 static bool stores_off_frame_dead_at_return;
703
704 /* Counter for stats. */
705 static int globally_deleted;
706 static int locally_deleted;
707 static int spill_deleted;
708
709 static bitmap all_blocks;
710
711 /* Locations that are killed by calls in the global phase. */
712 static bitmap kill_on_calls;
713
714 /* The number of bits used in the global bitmaps. */
715 static unsigned int current_position;
716 \f
717 /*----------------------------------------------------------------------------
718 Zeroth step.
719
720 Initialization.
721 ----------------------------------------------------------------------------*/
722
723
724 /* Find the entry associated with ALIAS_SET. */
725
726 static struct clear_alias_mode_holder *
727 clear_alias_set_lookup (alias_set_type alias_set)
728 {
729 struct clear_alias_mode_holder tmp_holder;
730 void **slot;
731
732 tmp_holder.alias_set = alias_set;
733 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
734 gcc_assert (*slot);
735
736 return (struct clear_alias_mode_holder *) *slot;
737 }
738
739
740 /* Hashtable callbacks for maintaining the "bases" field of
741 store_group_info, given that the addresses are function invariants. */
742
743 struct invariant_group_base_hasher : typed_noop_remove <group_info>
744 {
745 typedef group_info *value_type;
746 typedef group_info *compare_type;
747 static inline hashval_t hash (const group_info *);
748 static inline bool equal (const group_info *, const group_info *);
749 };
750
751 inline bool
752 invariant_group_base_hasher::equal (const group_info *gi1,
753 const group_info *gi2)
754 {
755 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
756 }
757
758 inline hashval_t
759 invariant_group_base_hasher::hash (const group_info *gi)
760 {
761 int do_not_record;
762 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
763 }
764
765 /* Tables of group_info structures, hashed by base value. */
766 static hash_table<invariant_group_base_hasher> *rtx_group_table;
767
768
769 /* Get the GROUP for BASE. Add a new group if it is not there. */
770
771 static group_info_t
772 get_group_info (rtx base)
773 {
774 struct group_info tmp_gi;
775 group_info_t gi;
776 group_info **slot;
777
778 if (base)
779 {
780 /* Find the store_base_info structure for BASE, creating a new one
781 if necessary. */
782 tmp_gi.rtx_base = base;
783 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
784 gi = (group_info_t) *slot;
785 }
786 else
787 {
788 if (!clear_alias_group)
789 {
790 clear_alias_group = gi = new group_info;
791 memset (gi, 0, sizeof (struct group_info));
792 gi->id = rtx_group_next_id++;
793 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
794 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
795 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
796 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
797 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
798 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
799 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
800 gi->process_globally = false;
801 gi->offset_map_size_n = 0;
802 gi->offset_map_size_p = 0;
803 gi->offset_map_n = NULL;
804 gi->offset_map_p = NULL;
805 rtx_group_vec.safe_push (gi);
806 }
807 return clear_alias_group;
808 }
809
810 if (gi == NULL)
811 {
812 *slot = gi = new group_info;
813 gi->rtx_base = base;
814 gi->id = rtx_group_next_id++;
815 gi->base_mem = gen_rtx_MEM (BLKmode, base);
816 gi->canon_base_addr = canon_rtx (base);
817 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
818 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
819 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
820 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
821 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
822 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
823 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
824 gi->process_globally = false;
825 gi->frame_related =
826 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
827 gi->offset_map_size_n = 0;
828 gi->offset_map_size_p = 0;
829 gi->offset_map_n = NULL;
830 gi->offset_map_p = NULL;
831 rtx_group_vec.safe_push (gi);
832 }
833
834 return gi;
835 }
836
837
838 /* Initialization of data structures. */
839
840 static void
841 dse_step0 (void)
842 {
843 locally_deleted = 0;
844 globally_deleted = 0;
845 spill_deleted = 0;
846
847 bitmap_obstack_initialize (&dse_bitmap_obstack);
848 gcc_obstack_init (&dse_obstack);
849
850 scratch = BITMAP_ALLOC (&reg_obstack);
851 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
852
853
854 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
855
856 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
857 rtx_group_next_id = 0;
858
859 stores_off_frame_dead_at_return = !cfun->stdarg;
860
861 init_alias_analysis ();
862
863 clear_alias_group = NULL;
864 }
865
866
867 \f
868 /*----------------------------------------------------------------------------
869 First step.
870
871 Scan all of the insns. Any random ordering of the blocks is fine.
872 Each block is scanned in forward order to accommodate cselib which
873 is used to remove stores with non-constant bases.
874 ----------------------------------------------------------------------------*/
875
876 /* Delete all of the store_info recs from INSN_INFO. */
877
878 static void
879 free_store_info (insn_info_t insn_info)
880 {
881 store_info_t store_info = insn_info->store_rec;
882 while (store_info)
883 {
884 store_info_t next = store_info->next;
885 if (store_info->is_large)
886 BITMAP_FREE (store_info->positions_needed.large.bmap);
887 if (store_info->cse_base)
888 cse_store_info_pool.remove (store_info);
889 else
890 rtx_store_info_pool.remove (store_info);
891 store_info = next;
892 }
893
894 insn_info->cannot_delete = true;
895 insn_info->contains_cselib_groups = false;
896 insn_info->store_rec = NULL;
897 }
898
899 typedef struct
900 {
901 rtx_insn *first, *current;
902 regset fixed_regs_live;
903 bool failure;
904 } note_add_store_info;
905
906 /* Callback for emit_inc_dec_insn_before via note_stores.
907 Check if a register is clobbered which is live afterwards. */
908
909 static void
910 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
911 {
912 rtx_insn *insn;
913 note_add_store_info *info = (note_add_store_info *) data;
914
915 if (!REG_P (loc))
916 return;
917
918 /* If this register is referenced by the current or an earlier insn,
919 that's OK. E.g. this applies to the register that is being incremented
920 with this addition. */
921 for (insn = info->first;
922 insn != NEXT_INSN (info->current);
923 insn = NEXT_INSN (insn))
924 if (reg_referenced_p (loc, PATTERN (insn)))
925 return;
926
927 /* If we come here, we have a clobber of a register that's only OK
928 if that register is not live. If we don't have liveness information
929 available, fail now. */
930 if (!info->fixed_regs_live)
931 {
932 info->failure = true;
933 return;
934 }
935 /* Now check if this is a live fixed register. */
936 unsigned int end_regno = END_REGNO (loc);
937 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
938 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
939 info->failure = true;
940 }
941
942 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
943 SRC + SRCOFF before insn ARG. */
944
945 static int
946 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
947 rtx op ATTRIBUTE_UNUSED,
948 rtx dest, rtx src, rtx srcoff, void *arg)
949 {
950 insn_info_t insn_info = (insn_info_t) arg;
951 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
952 note_add_store_info info;
953
954 /* We can reuse all operands without copying, because we are about
955 to delete the insn that contained it. */
956 if (srcoff)
957 {
958 start_sequence ();
959 emit_insn (gen_add3_insn (dest, src, srcoff));
960 new_insn = get_insns ();
961 end_sequence ();
962 }
963 else
964 new_insn = gen_move_insn (dest, src);
965 info.first = new_insn;
966 info.fixed_regs_live = insn_info->fixed_regs_live;
967 info.failure = false;
968 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
969 {
970 info.current = cur;
971 note_stores (PATTERN (cur), note_add_store, &info);
972 }
973
974 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
975 return it immediately, communicating the failure to its caller. */
976 if (info.failure)
977 return 1;
978
979 emit_insn_before (new_insn, insn);
980
981 return 0;
982 }
983
984 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
985 is there, is split into a separate insn.
986 Return true on success (or if there was nothing to do), false on failure. */
987
988 static bool
989 check_for_inc_dec_1 (insn_info_t insn_info)
990 {
991 rtx_insn *insn = insn_info->insn;
992 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
993 if (note)
994 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
995 insn_info) == 0;
996 return true;
997 }
998
999
1000 /* Entry point for postreload. If you work on reload_cse, or you need this
1001 anywhere else, consider if you can provide register liveness information
1002 and add a parameter to this function so that it can be passed down in
1003 insn_info.fixed_regs_live. */
1004 bool
1005 check_for_inc_dec (rtx_insn *insn)
1006 {
1007 insn_info_type insn_info;
1008 rtx note;
1009
1010 insn_info.insn = insn;
1011 insn_info.fixed_regs_live = NULL;
1012 note = find_reg_note (insn, REG_INC, NULL_RTX);
1013 if (note)
1014 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
1015 &insn_info) == 0;
1016 return true;
1017 }
1018
1019 /* Delete the insn and free all of the fields inside INSN_INFO. */
1020
1021 static void
1022 delete_dead_store_insn (insn_info_t insn_info)
1023 {
1024 read_info_t read_info;
1025
1026 if (!dbg_cnt (dse))
1027 return;
1028
1029 if (!check_for_inc_dec_1 (insn_info))
1030 return;
1031 if (dump_file && (dump_flags & TDF_DETAILS))
1032 {
1033 fprintf (dump_file, "Locally deleting insn %d ",
1034 INSN_UID (insn_info->insn));
1035 if (insn_info->store_rec->alias_set)
1036 fprintf (dump_file, "alias set %d\n",
1037 (int) insn_info->store_rec->alias_set);
1038 else
1039 fprintf (dump_file, "\n");
1040 }
1041
1042 free_store_info (insn_info);
1043 read_info = insn_info->read_rec;
1044
1045 while (read_info)
1046 {
1047 read_info_t next = read_info->next;
1048 delete read_info;
1049 read_info = next;
1050 }
1051 insn_info->read_rec = NULL;
1052
1053 delete_insn (insn_info->insn);
1054 locally_deleted++;
1055 insn_info->insn = NULL;
1056
1057 insn_info->wild_read = false;
1058 }
1059
1060 /* Return whether DECL, a local variable, can possibly escape the current
1061 function scope. */
1062
1063 static bool
1064 local_variable_can_escape (tree decl)
1065 {
1066 if (TREE_ADDRESSABLE (decl))
1067 return true;
1068
1069 /* If this is a partitioned variable, we need to consider all the variables
1070 in the partition. This is necessary because a store into one of them can
1071 be replaced with a store into another and this may not change the outcome
1072 of the escape analysis. */
1073 if (cfun->gimple_df->decls_to_pointers != NULL)
1074 {
1075 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
1076 if (namep)
1077 return TREE_ADDRESSABLE (*namep);
1078 }
1079
1080 return false;
1081 }
1082
1083 /* Return whether EXPR can possibly escape the current function scope. */
1084
1085 static bool
1086 can_escape (tree expr)
1087 {
1088 tree base;
1089 if (!expr)
1090 return true;
1091 base = get_base_address (expr);
1092 if (DECL_P (base)
1093 && !may_be_aliased (base)
1094 && !(TREE_CODE (base) == VAR_DECL
1095 && !DECL_EXTERNAL (base)
1096 && !TREE_STATIC (base)
1097 && local_variable_can_escape (base)))
1098 return false;
1099 return true;
1100 }
1101
1102 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1103 OFFSET and WIDTH. */
1104
1105 static void
1106 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1107 tree expr)
1108 {
1109 HOST_WIDE_INT i;
1110 bool expr_escapes = can_escape (expr);
1111 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1112 for (i=offset; i<offset+width; i++)
1113 {
1114 bitmap store1;
1115 bitmap store2;
1116 bitmap escaped;
1117 int ai;
1118 if (i < 0)
1119 {
1120 store1 = group->store1_n;
1121 store2 = group->store2_n;
1122 escaped = group->escaped_n;
1123 ai = -i;
1124 }
1125 else
1126 {
1127 store1 = group->store1_p;
1128 store2 = group->store2_p;
1129 escaped = group->escaped_p;
1130 ai = i;
1131 }
1132
1133 if (!bitmap_set_bit (store1, ai))
1134 bitmap_set_bit (store2, ai);
1135 else
1136 {
1137 if (i < 0)
1138 {
1139 if (group->offset_map_size_n < ai)
1140 group->offset_map_size_n = ai;
1141 }
1142 else
1143 {
1144 if (group->offset_map_size_p < ai)
1145 group->offset_map_size_p = ai;
1146 }
1147 }
1148 if (expr_escapes)
1149 bitmap_set_bit (escaped, ai);
1150 }
1151 }
1152
1153 static void
1154 reset_active_stores (void)
1155 {
1156 active_local_stores = NULL;
1157 active_local_stores_len = 0;
1158 }
1159
1160 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1161
1162 static void
1163 free_read_records (bb_info_t bb_info)
1164 {
1165 insn_info_t insn_info = bb_info->last_insn;
1166 read_info_t *ptr = &insn_info->read_rec;
1167 while (*ptr)
1168 {
1169 read_info_t next = (*ptr)->next;
1170 if ((*ptr)->alias_set == 0)
1171 {
1172 delete *ptr;
1173 *ptr = next;
1174 }
1175 else
1176 ptr = &(*ptr)->next;
1177 }
1178 }
1179
1180 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1181
1182 static void
1183 add_wild_read (bb_info_t bb_info)
1184 {
1185 insn_info_t insn_info = bb_info->last_insn;
1186 insn_info->wild_read = true;
1187 free_read_records (bb_info);
1188 reset_active_stores ();
1189 }
1190
1191 /* Set the BB_INFO so that the last insn is marked as a wild read of
1192 non-frame locations. */
1193
1194 static void
1195 add_non_frame_wild_read (bb_info_t bb_info)
1196 {
1197 insn_info_t insn_info = bb_info->last_insn;
1198 insn_info->non_frame_wild_read = true;
1199 free_read_records (bb_info);
1200 reset_active_stores ();
1201 }
1202
1203 /* Return true if X is a constant or one of the registers that behave
1204 as a constant over the life of a function. This is equivalent to
1205 !rtx_varies_p for memory addresses. */
1206
1207 static bool
1208 const_or_frame_p (rtx x)
1209 {
1210 if (CONSTANT_P (x))
1211 return true;
1212
1213 if (GET_CODE (x) == REG)
1214 {
1215 /* Note that we have to test for the actual rtx used for the frame
1216 and arg pointers and not just the register number in case we have
1217 eliminated the frame and/or arg pointer and are using it
1218 for pseudos. */
1219 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1220 /* The arg pointer varies if it is not a fixed register. */
1221 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1222 || x == pic_offset_table_rtx)
1223 return true;
1224 return false;
1225 }
1226
1227 return false;
1228 }
1229
1230 /* Take all reasonable action to put the address of MEM into the form
1231 that we can do analysis on.
1232
1233 The gold standard is to get the address into the form: address +
1234 OFFSET where address is something that rtx_varies_p considers a
1235 constant. When we can get the address in this form, we can do
1236 global analysis on it. Note that for constant bases, address is
1237 not actually returned, only the group_id. The address can be
1238 obtained from that.
1239
1240 If that fails, we try cselib to get a value we can at least use
1241 locally. If that fails we return false.
1242
1243 The GROUP_ID is set to -1 for cselib bases and the index of the
1244 group for non_varying bases.
1245
1246 FOR_READ is true if this is a mem read and false if not. */
1247
1248 static bool
1249 canon_address (rtx mem,
1250 alias_set_type *alias_set_out,
1251 int *group_id,
1252 HOST_WIDE_INT *offset,
1253 cselib_val **base)
1254 {
1255 machine_mode address_mode = get_address_mode (mem);
1256 rtx mem_address = XEXP (mem, 0);
1257 rtx expanded_address, address;
1258 int expanded;
1259
1260 *alias_set_out = 0;
1261
1262 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1263
1264 if (dump_file && (dump_flags & TDF_DETAILS))
1265 {
1266 fprintf (dump_file, " mem: ");
1267 print_inline_rtx (dump_file, mem_address, 0);
1268 fprintf (dump_file, "\n");
1269 }
1270
1271 /* First see if just canon_rtx (mem_address) is const or frame,
1272 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1273 address = NULL_RTX;
1274 for (expanded = 0; expanded < 2; expanded++)
1275 {
1276 if (expanded)
1277 {
1278 /* Use cselib to replace all of the reg references with the full
1279 expression. This will take care of the case where we have
1280
1281 r_x = base + offset;
1282 val = *r_x;
1283
1284 by making it into
1285
1286 val = *(base + offset); */
1287
1288 expanded_address = cselib_expand_value_rtx (mem_address,
1289 scratch, 5);
1290
1291 /* If this fails, just go with the address from first
1292 iteration. */
1293 if (!expanded_address)
1294 break;
1295 }
1296 else
1297 expanded_address = mem_address;
1298
1299 /* Split the address into canonical BASE + OFFSET terms. */
1300 address = canon_rtx (expanded_address);
1301
1302 *offset = 0;
1303
1304 if (dump_file && (dump_flags & TDF_DETAILS))
1305 {
1306 if (expanded)
1307 {
1308 fprintf (dump_file, "\n after cselib_expand address: ");
1309 print_inline_rtx (dump_file, expanded_address, 0);
1310 fprintf (dump_file, "\n");
1311 }
1312
1313 fprintf (dump_file, "\n after canon_rtx address: ");
1314 print_inline_rtx (dump_file, address, 0);
1315 fprintf (dump_file, "\n");
1316 }
1317
1318 if (GET_CODE (address) == CONST)
1319 address = XEXP (address, 0);
1320
1321 if (GET_CODE (address) == PLUS
1322 && CONST_INT_P (XEXP (address, 1)))
1323 {
1324 *offset = INTVAL (XEXP (address, 1));
1325 address = XEXP (address, 0);
1326 }
1327
1328 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1329 && const_or_frame_p (address))
1330 {
1331 group_info_t group = get_group_info (address);
1332
1333 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, " gid=%d offset=%d \n",
1335 group->id, (int)*offset);
1336 *base = NULL;
1337 *group_id = group->id;
1338 return true;
1339 }
1340 }
1341
1342 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1343 *group_id = -1;
1344
1345 if (*base == NULL)
1346 {
1347 if (dump_file && (dump_flags & TDF_DETAILS))
1348 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1349 return false;
1350 }
1351 if (dump_file && (dump_flags & TDF_DETAILS))
1352 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1353 (*base)->uid, (*base)->hash, (int)*offset);
1354 return true;
1355 }
1356
1357
1358 /* Clear the rhs field from the active_local_stores array. */
1359
1360 static void
1361 clear_rhs_from_active_local_stores (void)
1362 {
1363 insn_info_t ptr = active_local_stores;
1364
1365 while (ptr)
1366 {
1367 store_info_t store_info = ptr->store_rec;
1368 /* Skip the clobbers. */
1369 while (!store_info->is_set)
1370 store_info = store_info->next;
1371
1372 store_info->rhs = NULL;
1373 store_info->const_rhs = NULL;
1374
1375 ptr = ptr->next_local_store;
1376 }
1377 }
1378
1379
1380 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1381
1382 static inline void
1383 set_position_unneeded (store_info_t s_info, int pos)
1384 {
1385 if (__builtin_expect (s_info->is_large, false))
1386 {
1387 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1388 s_info->positions_needed.large.count++;
1389 }
1390 else
1391 s_info->positions_needed.small_bitmask
1392 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1393 }
1394
1395 /* Mark the whole store S_INFO as unneeded. */
1396
1397 static inline void
1398 set_all_positions_unneeded (store_info_t s_info)
1399 {
1400 if (__builtin_expect (s_info->is_large, false))
1401 {
1402 int pos, end = s_info->end - s_info->begin;
1403 for (pos = 0; pos < end; pos++)
1404 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1405 s_info->positions_needed.large.count = end;
1406 }
1407 else
1408 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1409 }
1410
1411 /* Return TRUE if any bytes from S_INFO store are needed. */
1412
1413 static inline bool
1414 any_positions_needed_p (store_info_t s_info)
1415 {
1416 if (__builtin_expect (s_info->is_large, false))
1417 return (s_info->positions_needed.large.count
1418 < s_info->end - s_info->begin);
1419 else
1420 return (s_info->positions_needed.small_bitmask
1421 != (unsigned HOST_WIDE_INT) 0);
1422 }
1423
1424 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1425 store are needed. */
1426
1427 static inline bool
1428 all_positions_needed_p (store_info_t s_info, int start, int width)
1429 {
1430 if (__builtin_expect (s_info->is_large, false))
1431 {
1432 int end = start + width;
1433 while (start < end)
1434 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1435 return false;
1436 return true;
1437 }
1438 else
1439 {
1440 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1441 return (s_info->positions_needed.small_bitmask & mask) == mask;
1442 }
1443 }
1444
1445
1446 static rtx get_stored_val (store_info_t, machine_mode, HOST_WIDE_INT,
1447 HOST_WIDE_INT, basic_block, bool);
1448
1449
1450 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1451 there is a candidate store, after adding it to the appropriate
1452 local store group if so. */
1453
1454 static int
1455 record_store (rtx body, bb_info_t bb_info)
1456 {
1457 rtx mem, rhs, const_rhs, mem_addr;
1458 HOST_WIDE_INT offset = 0;
1459 HOST_WIDE_INT width = 0;
1460 alias_set_type spill_alias_set;
1461 insn_info_t insn_info = bb_info->last_insn;
1462 store_info_t store_info = NULL;
1463 int group_id;
1464 cselib_val *base = NULL;
1465 insn_info_t ptr, last, redundant_reason;
1466 bool store_is_unused;
1467
1468 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1469 return 0;
1470
1471 mem = SET_DEST (body);
1472
1473 /* If this is not used, then this cannot be used to keep the insn
1474 from being deleted. On the other hand, it does provide something
1475 that can be used to prove that another store is dead. */
1476 store_is_unused
1477 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1478
1479 /* Check whether that value is a suitable memory location. */
1480 if (!MEM_P (mem))
1481 {
1482 /* If the set or clobber is unused, then it does not effect our
1483 ability to get rid of the entire insn. */
1484 if (!store_is_unused)
1485 insn_info->cannot_delete = true;
1486 return 0;
1487 }
1488
1489 /* At this point we know mem is a mem. */
1490 if (GET_MODE (mem) == BLKmode)
1491 {
1492 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1493 {
1494 if (dump_file && (dump_flags & TDF_DETAILS))
1495 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1496 add_wild_read (bb_info);
1497 insn_info->cannot_delete = true;
1498 return 0;
1499 }
1500 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1501 as memset (addr, 0, 36); */
1502 else if (!MEM_SIZE_KNOWN_P (mem)
1503 || MEM_SIZE (mem) <= 0
1504 || MEM_SIZE (mem) > MAX_OFFSET
1505 || GET_CODE (body) != SET
1506 || !CONST_INT_P (SET_SRC (body)))
1507 {
1508 if (!store_is_unused)
1509 {
1510 /* If the set or clobber is unused, then it does not effect our
1511 ability to get rid of the entire insn. */
1512 insn_info->cannot_delete = true;
1513 clear_rhs_from_active_local_stores ();
1514 }
1515 return 0;
1516 }
1517 }
1518
1519 /* We can still process a volatile mem, we just cannot delete it. */
1520 if (MEM_VOLATILE_P (mem))
1521 insn_info->cannot_delete = true;
1522
1523 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1524 {
1525 clear_rhs_from_active_local_stores ();
1526 return 0;
1527 }
1528
1529 if (GET_MODE (mem) == BLKmode)
1530 width = MEM_SIZE (mem);
1531 else
1532 width = GET_MODE_SIZE (GET_MODE (mem));
1533
1534 if (spill_alias_set)
1535 {
1536 bitmap store1 = clear_alias_group->store1_p;
1537 bitmap store2 = clear_alias_group->store2_p;
1538
1539 gcc_assert (GET_MODE (mem) != BLKmode);
1540
1541 if (!bitmap_set_bit (store1, spill_alias_set))
1542 bitmap_set_bit (store2, spill_alias_set);
1543
1544 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1545 clear_alias_group->offset_map_size_p = spill_alias_set;
1546
1547 store_info = rtx_store_info_pool.allocate ();
1548
1549 if (dump_file && (dump_flags & TDF_DETAILS))
1550 fprintf (dump_file, " processing spill store %d(%s)\n",
1551 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1552 }
1553 else if (group_id >= 0)
1554 {
1555 /* In the restrictive case where the base is a constant or the
1556 frame pointer we can do global analysis. */
1557
1558 group_info_t group
1559 = rtx_group_vec[group_id];
1560 tree expr = MEM_EXPR (mem);
1561
1562 store_info = rtx_store_info_pool.allocate ();
1563 set_usage_bits (group, offset, width, expr);
1564
1565 if (dump_file && (dump_flags & TDF_DETAILS))
1566 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1567 group_id, (int)offset, (int)(offset+width));
1568 }
1569 else
1570 {
1571 if (may_be_sp_based_p (XEXP (mem, 0)))
1572 insn_info->stack_pointer_based = true;
1573 insn_info->contains_cselib_groups = true;
1574
1575 store_info = cse_store_info_pool.allocate ();
1576 group_id = -1;
1577
1578 if (dump_file && (dump_flags & TDF_DETAILS))
1579 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1580 (int)offset, (int)(offset+width));
1581 }
1582
1583 const_rhs = rhs = NULL_RTX;
1584 if (GET_CODE (body) == SET
1585 /* No place to keep the value after ra. */
1586 && !reload_completed
1587 && (REG_P (SET_SRC (body))
1588 || GET_CODE (SET_SRC (body)) == SUBREG
1589 || CONSTANT_P (SET_SRC (body)))
1590 && !MEM_VOLATILE_P (mem)
1591 /* Sometimes the store and reload is used for truncation and
1592 rounding. */
1593 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1594 {
1595 rhs = SET_SRC (body);
1596 if (CONSTANT_P (rhs))
1597 const_rhs = rhs;
1598 else if (body == PATTERN (insn_info->insn))
1599 {
1600 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1601 if (tem && CONSTANT_P (XEXP (tem, 0)))
1602 const_rhs = XEXP (tem, 0);
1603 }
1604 if (const_rhs == NULL_RTX && REG_P (rhs))
1605 {
1606 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1607
1608 if (tem && CONSTANT_P (tem))
1609 const_rhs = tem;
1610 }
1611 }
1612
1613 /* Check to see if this stores causes some other stores to be
1614 dead. */
1615 ptr = active_local_stores;
1616 last = NULL;
1617 redundant_reason = NULL;
1618 mem = canon_rtx (mem);
1619 /* For alias_set != 0 canon_true_dependence should be never called. */
1620 if (spill_alias_set)
1621 mem_addr = NULL_RTX;
1622 else
1623 {
1624 if (group_id < 0)
1625 mem_addr = base->val_rtx;
1626 else
1627 {
1628 group_info_t group
1629 = rtx_group_vec[group_id];
1630 mem_addr = group->canon_base_addr;
1631 }
1632 /* get_addr can only handle VALUE but cannot handle expr like:
1633 VALUE + OFFSET, so call get_addr to get original addr for
1634 mem_addr before plus_constant. */
1635 mem_addr = get_addr (mem_addr);
1636 if (offset)
1637 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1638 }
1639
1640 while (ptr)
1641 {
1642 insn_info_t next = ptr->next_local_store;
1643 store_info_t s_info = ptr->store_rec;
1644 bool del = true;
1645
1646 /* Skip the clobbers. We delete the active insn if this insn
1647 shadows the set. To have been put on the active list, it
1648 has exactly on set. */
1649 while (!s_info->is_set)
1650 s_info = s_info->next;
1651
1652 if (s_info->alias_set != spill_alias_set)
1653 del = false;
1654 else if (s_info->alias_set)
1655 {
1656 struct clear_alias_mode_holder *entry
1657 = clear_alias_set_lookup (s_info->alias_set);
1658 /* Generally, spills cannot be processed if and of the
1659 references to the slot have a different mode. But if
1660 we are in the same block and mode is exactly the same
1661 between this store and one before in the same block,
1662 we can still delete it. */
1663 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1664 && (GET_MODE (mem) == entry->mode))
1665 {
1666 del = true;
1667 set_all_positions_unneeded (s_info);
1668 }
1669 if (dump_file && (dump_flags & TDF_DETAILS))
1670 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1671 INSN_UID (ptr->insn), (int) s_info->alias_set);
1672 }
1673 else if ((s_info->group_id == group_id)
1674 && (s_info->cse_base == base))
1675 {
1676 HOST_WIDE_INT i;
1677 if (dump_file && (dump_flags & TDF_DETAILS))
1678 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1679 INSN_UID (ptr->insn), s_info->group_id,
1680 (int)s_info->begin, (int)s_info->end);
1681
1682 /* Even if PTR won't be eliminated as unneeded, if both
1683 PTR and this insn store the same constant value, we might
1684 eliminate this insn instead. */
1685 if (s_info->const_rhs
1686 && const_rhs
1687 && offset >= s_info->begin
1688 && offset + width <= s_info->end
1689 && all_positions_needed_p (s_info, offset - s_info->begin,
1690 width))
1691 {
1692 if (GET_MODE (mem) == BLKmode)
1693 {
1694 if (GET_MODE (s_info->mem) == BLKmode
1695 && s_info->const_rhs == const_rhs)
1696 redundant_reason = ptr;
1697 }
1698 else if (s_info->const_rhs == const0_rtx
1699 && const_rhs == const0_rtx)
1700 redundant_reason = ptr;
1701 else
1702 {
1703 rtx val;
1704 start_sequence ();
1705 val = get_stored_val (s_info, GET_MODE (mem),
1706 offset, offset + width,
1707 BLOCK_FOR_INSN (insn_info->insn),
1708 true);
1709 if (get_insns () != NULL)
1710 val = NULL_RTX;
1711 end_sequence ();
1712 if (val && rtx_equal_p (val, const_rhs))
1713 redundant_reason = ptr;
1714 }
1715 }
1716
1717 for (i = MAX (offset, s_info->begin);
1718 i < offset + width && i < s_info->end;
1719 i++)
1720 set_position_unneeded (s_info, i - s_info->begin);
1721 }
1722 else if (s_info->rhs)
1723 /* Need to see if it is possible for this store to overwrite
1724 the value of store_info. If it is, set the rhs to NULL to
1725 keep it from being used to remove a load. */
1726 {
1727 if (canon_true_dependence (s_info->mem,
1728 GET_MODE (s_info->mem),
1729 s_info->mem_addr,
1730 mem, mem_addr))
1731 {
1732 s_info->rhs = NULL;
1733 s_info->const_rhs = NULL;
1734 }
1735 }
1736
1737 /* An insn can be deleted if every position of every one of
1738 its s_infos is zero. */
1739 if (any_positions_needed_p (s_info))
1740 del = false;
1741
1742 if (del)
1743 {
1744 insn_info_t insn_to_delete = ptr;
1745
1746 active_local_stores_len--;
1747 if (last)
1748 last->next_local_store = ptr->next_local_store;
1749 else
1750 active_local_stores = ptr->next_local_store;
1751
1752 if (!insn_to_delete->cannot_delete)
1753 delete_dead_store_insn (insn_to_delete);
1754 }
1755 else
1756 last = ptr;
1757
1758 ptr = next;
1759 }
1760
1761 /* Finish filling in the store_info. */
1762 store_info->next = insn_info->store_rec;
1763 insn_info->store_rec = store_info;
1764 store_info->mem = mem;
1765 store_info->alias_set = spill_alias_set;
1766 store_info->mem_addr = mem_addr;
1767 store_info->cse_base = base;
1768 if (width > HOST_BITS_PER_WIDE_INT)
1769 {
1770 store_info->is_large = true;
1771 store_info->positions_needed.large.count = 0;
1772 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1773 }
1774 else
1775 {
1776 store_info->is_large = false;
1777 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1778 }
1779 store_info->group_id = group_id;
1780 store_info->begin = offset;
1781 store_info->end = offset + width;
1782 store_info->is_set = GET_CODE (body) == SET;
1783 store_info->rhs = rhs;
1784 store_info->const_rhs = const_rhs;
1785 store_info->redundant_reason = redundant_reason;
1786
1787 /* If this is a clobber, we return 0. We will only be able to
1788 delete this insn if there is only one store USED store, but we
1789 can use the clobber to delete other stores earlier. */
1790 return store_info->is_set ? 1 : 0;
1791 }
1792
1793
1794 static void
1795 dump_insn_info (const char * start, insn_info_t insn_info)
1796 {
1797 fprintf (dump_file, "%s insn=%d %s\n", start,
1798 INSN_UID (insn_info->insn),
1799 insn_info->store_rec ? "has store" : "naked");
1800 }
1801
1802
1803 /* If the modes are different and the value's source and target do not
1804 line up, we need to extract the value from lower part of the rhs of
1805 the store, shift it, and then put it into a form that can be shoved
1806 into the read_insn. This function generates a right SHIFT of a
1807 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1808 shift sequence is returned or NULL if we failed to find a
1809 shift. */
1810
1811 static rtx
1812 find_shift_sequence (int access_size,
1813 store_info_t store_info,
1814 machine_mode read_mode,
1815 int shift, bool speed, bool require_cst)
1816 {
1817 machine_mode store_mode = GET_MODE (store_info->mem);
1818 machine_mode new_mode;
1819 rtx read_reg = NULL;
1820
1821 /* Some machines like the x86 have shift insns for each size of
1822 operand. Other machines like the ppc or the ia-64 may only have
1823 shift insns that shift values within 32 or 64 bit registers.
1824 This loop tries to find the smallest shift insn that will right
1825 justify the value we want to read but is available in one insn on
1826 the machine. */
1827
1828 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1829 MODE_INT);
1830 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1831 new_mode = GET_MODE_WIDER_MODE (new_mode))
1832 {
1833 rtx target, new_reg, new_lhs;
1834 rtx_insn *shift_seq, *insn;
1835 int cost;
1836
1837 /* If a constant was stored into memory, try to simplify it here,
1838 otherwise the cost of the shift might preclude this optimization
1839 e.g. at -Os, even when no actual shift will be needed. */
1840 if (store_info->const_rhs)
1841 {
1842 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1843 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1844 store_mode, byte);
1845 if (ret && CONSTANT_P (ret))
1846 {
1847 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1848 ret, GEN_INT (shift));
1849 if (ret && CONSTANT_P (ret))
1850 {
1851 byte = subreg_lowpart_offset (read_mode, new_mode);
1852 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1853 if (ret && CONSTANT_P (ret)
1854 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1855 return ret;
1856 }
1857 }
1858 }
1859
1860 if (require_cst)
1861 return NULL_RTX;
1862
1863 /* Try a wider mode if truncating the store mode to NEW_MODE
1864 requires a real instruction. */
1865 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1866 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1867 continue;
1868
1869 /* Also try a wider mode if the necessary punning is either not
1870 desirable or not possible. */
1871 if (!CONSTANT_P (store_info->rhs)
1872 && !MODES_TIEABLE_P (new_mode, store_mode))
1873 continue;
1874
1875 new_reg = gen_reg_rtx (new_mode);
1876
1877 start_sequence ();
1878
1879 /* In theory we could also check for an ashr. Ian Taylor knows
1880 of one dsp where the cost of these two was not the same. But
1881 this really is a rare case anyway. */
1882 target = expand_binop (new_mode, lshr_optab, new_reg,
1883 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1884
1885 shift_seq = get_insns ();
1886 end_sequence ();
1887
1888 if (target != new_reg || shift_seq == NULL)
1889 continue;
1890
1891 cost = 0;
1892 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1893 if (INSN_P (insn))
1894 cost += insn_rtx_cost (PATTERN (insn), speed);
1895
1896 /* The computation up to here is essentially independent
1897 of the arguments and could be precomputed. It may
1898 not be worth doing so. We could precompute if
1899 worthwhile or at least cache the results. The result
1900 technically depends on both SHIFT and ACCESS_SIZE,
1901 but in practice the answer will depend only on ACCESS_SIZE. */
1902
1903 if (cost > COSTS_N_INSNS (1))
1904 continue;
1905
1906 new_lhs = extract_low_bits (new_mode, store_mode,
1907 copy_rtx (store_info->rhs));
1908 if (new_lhs == NULL_RTX)
1909 continue;
1910
1911 /* We found an acceptable shift. Generate a move to
1912 take the value from the store and put it into the
1913 shift pseudo, then shift it, then generate another
1914 move to put in into the target of the read. */
1915 emit_move_insn (new_reg, new_lhs);
1916 emit_insn (shift_seq);
1917 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1918 break;
1919 }
1920
1921 return read_reg;
1922 }
1923
1924
1925 /* Call back for note_stores to find the hard regs set or clobbered by
1926 insn. Data is a bitmap of the hardregs set so far. */
1927
1928 static void
1929 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1930 {
1931 bitmap regs_set = (bitmap) data;
1932
1933 if (REG_P (x)
1934 && HARD_REGISTER_P (x))
1935 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
1936 }
1937
1938 /* Helper function for replace_read and record_store.
1939 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1940 to one before READ_END bytes read in READ_MODE. Return NULL
1941 if not successful. If REQUIRE_CST is true, return always constant. */
1942
1943 static rtx
1944 get_stored_val (store_info_t store_info, machine_mode read_mode,
1945 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1946 basic_block bb, bool require_cst)
1947 {
1948 machine_mode store_mode = GET_MODE (store_info->mem);
1949 int shift;
1950 int access_size; /* In bytes. */
1951 rtx read_reg;
1952
1953 /* To get here the read is within the boundaries of the write so
1954 shift will never be negative. Start out with the shift being in
1955 bytes. */
1956 if (store_mode == BLKmode)
1957 shift = 0;
1958 else if (BYTES_BIG_ENDIAN)
1959 shift = store_info->end - read_end;
1960 else
1961 shift = read_begin - store_info->begin;
1962
1963 access_size = shift + GET_MODE_SIZE (read_mode);
1964
1965 /* From now on it is bits. */
1966 shift *= BITS_PER_UNIT;
1967
1968 if (shift)
1969 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1970 optimize_bb_for_speed_p (bb),
1971 require_cst);
1972 else if (store_mode == BLKmode)
1973 {
1974 /* The store is a memset (addr, const_val, const_size). */
1975 gcc_assert (CONST_INT_P (store_info->rhs));
1976 store_mode = int_mode_for_mode (read_mode);
1977 if (store_mode == BLKmode)
1978 read_reg = NULL_RTX;
1979 else if (store_info->rhs == const0_rtx)
1980 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1981 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1982 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1983 read_reg = NULL_RTX;
1984 else
1985 {
1986 unsigned HOST_WIDE_INT c
1987 = INTVAL (store_info->rhs)
1988 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1989 int shift = BITS_PER_UNIT;
1990 while (shift < HOST_BITS_PER_WIDE_INT)
1991 {
1992 c |= (c << shift);
1993 shift <<= 1;
1994 }
1995 read_reg = gen_int_mode (c, store_mode);
1996 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1997 }
1998 }
1999 else if (store_info->const_rhs
2000 && (require_cst
2001 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
2002 read_reg = extract_low_bits (read_mode, store_mode,
2003 copy_rtx (store_info->const_rhs));
2004 else
2005 read_reg = extract_low_bits (read_mode, store_mode,
2006 copy_rtx (store_info->rhs));
2007 if (require_cst && read_reg && !CONSTANT_P (read_reg))
2008 read_reg = NULL_RTX;
2009 return read_reg;
2010 }
2011
2012 /* Take a sequence of:
2013 A <- r1
2014 ...
2015 ... <- A
2016
2017 and change it into
2018 r2 <- r1
2019 A <- r1
2020 ...
2021 ... <- r2
2022
2023 or
2024
2025 r3 <- extract (r1)
2026 r3 <- r3 >> shift
2027 r2 <- extract (r3)
2028 ... <- r2
2029
2030 or
2031
2032 r2 <- extract (r1)
2033 ... <- r2
2034
2035 Depending on the alignment and the mode of the store and
2036 subsequent load.
2037
2038
2039 The STORE_INFO and STORE_INSN are for the store and READ_INFO
2040 and READ_INSN are for the read. Return true if the replacement
2041 went ok. */
2042
2043 static bool
2044 replace_read (store_info_t store_info, insn_info_t store_insn,
2045 read_info_t read_info, insn_info_t read_insn, rtx *loc,
2046 bitmap regs_live)
2047 {
2048 machine_mode store_mode = GET_MODE (store_info->mem);
2049 machine_mode read_mode = GET_MODE (read_info->mem);
2050 rtx_insn *insns, *this_insn;
2051 rtx read_reg;
2052 basic_block bb;
2053
2054 if (!dbg_cnt (dse))
2055 return false;
2056
2057 /* Create a sequence of instructions to set up the read register.
2058 This sequence goes immediately before the store and its result
2059 is read by the load.
2060
2061 We need to keep this in perspective. We are replacing a read
2062 with a sequence of insns, but the read will almost certainly be
2063 in cache, so it is not going to be an expensive one. Thus, we
2064 are not willing to do a multi insn shift or worse a subroutine
2065 call to get rid of the read. */
2066 if (dump_file && (dump_flags & TDF_DETAILS))
2067 fprintf (dump_file, "trying to replace %smode load in insn %d"
2068 " from %smode store in insn %d\n",
2069 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2070 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2071 start_sequence ();
2072 bb = BLOCK_FOR_INSN (read_insn->insn);
2073 read_reg = get_stored_val (store_info,
2074 read_mode, read_info->begin, read_info->end,
2075 bb, false);
2076 if (read_reg == NULL_RTX)
2077 {
2078 end_sequence ();
2079 if (dump_file && (dump_flags & TDF_DETAILS))
2080 fprintf (dump_file, " -- could not extract bits of stored value\n");
2081 return false;
2082 }
2083 /* Force the value into a new register so that it won't be clobbered
2084 between the store and the load. */
2085 read_reg = copy_to_mode_reg (read_mode, read_reg);
2086 insns = get_insns ();
2087 end_sequence ();
2088
2089 if (insns != NULL_RTX)
2090 {
2091 /* Now we have to scan the set of new instructions to see if the
2092 sequence contains and sets of hardregs that happened to be
2093 live at this point. For instance, this can happen if one of
2094 the insns sets the CC and the CC happened to be live at that
2095 point. This does occasionally happen, see PR 37922. */
2096 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
2097
2098 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2099 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2100
2101 bitmap_and_into (regs_set, regs_live);
2102 if (!bitmap_empty_p (regs_set))
2103 {
2104 if (dump_file && (dump_flags & TDF_DETAILS))
2105 {
2106 fprintf (dump_file,
2107 "abandoning replacement because sequence clobbers live hardregs:");
2108 df_print_regset (dump_file, regs_set);
2109 }
2110
2111 BITMAP_FREE (regs_set);
2112 return false;
2113 }
2114 BITMAP_FREE (regs_set);
2115 }
2116
2117 if (validate_change (read_insn->insn, loc, read_reg, 0))
2118 {
2119 deferred_change_t change = new deferred_change;
2120
2121 /* Insert this right before the store insn where it will be safe
2122 from later insns that might change it before the read. */
2123 emit_insn_before (insns, store_insn->insn);
2124
2125 /* And now for the kludge part: cselib croaks if you just
2126 return at this point. There are two reasons for this:
2127
2128 1) Cselib has an idea of how many pseudos there are and
2129 that does not include the new ones we just added.
2130
2131 2) Cselib does not know about the move insn we added
2132 above the store_info, and there is no way to tell it
2133 about it, because it has "moved on".
2134
2135 Problem (1) is fixable with a certain amount of engineering.
2136 Problem (2) is requires starting the bb from scratch. This
2137 could be expensive.
2138
2139 So we are just going to have to lie. The move/extraction
2140 insns are not really an issue, cselib did not see them. But
2141 the use of the new pseudo read_insn is a real problem because
2142 cselib has not scanned this insn. The way that we solve this
2143 problem is that we are just going to put the mem back for now
2144 and when we are finished with the block, we undo this. We
2145 keep a table of mems to get rid of. At the end of the basic
2146 block we can put them back. */
2147
2148 *loc = read_info->mem;
2149 change->next = deferred_change_list;
2150 deferred_change_list = change;
2151 change->loc = loc;
2152 change->reg = read_reg;
2153
2154 /* Get rid of the read_info, from the point of view of the
2155 rest of dse, play like this read never happened. */
2156 read_insn->read_rec = read_info->next;
2157 delete read_info;
2158 if (dump_file && (dump_flags & TDF_DETAILS))
2159 {
2160 fprintf (dump_file, " -- replaced the loaded MEM with ");
2161 print_simple_rtl (dump_file, read_reg);
2162 fprintf (dump_file, "\n");
2163 }
2164 return true;
2165 }
2166 else
2167 {
2168 if (dump_file && (dump_flags & TDF_DETAILS))
2169 {
2170 fprintf (dump_file, " -- replacing the loaded MEM with ");
2171 print_simple_rtl (dump_file, read_reg);
2172 fprintf (dump_file, " led to an invalid instruction\n");
2173 }
2174 return false;
2175 }
2176 }
2177
2178 /* Check the address of MEM *LOC and kill any appropriate stores that may
2179 be active. */
2180
2181 static void
2182 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2183 {
2184 rtx mem = *loc, mem_addr;
2185 insn_info_t insn_info;
2186 HOST_WIDE_INT offset = 0;
2187 HOST_WIDE_INT width = 0;
2188 alias_set_type spill_alias_set = 0;
2189 cselib_val *base = NULL;
2190 int group_id;
2191 read_info_t read_info;
2192
2193 insn_info = bb_info->last_insn;
2194
2195 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2196 || (MEM_VOLATILE_P (mem)))
2197 {
2198 if (dump_file && (dump_flags & TDF_DETAILS))
2199 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2200 add_wild_read (bb_info);
2201 insn_info->cannot_delete = true;
2202 return;
2203 }
2204
2205 /* If it is reading readonly mem, then there can be no conflict with
2206 another write. */
2207 if (MEM_READONLY_P (mem))
2208 return;
2209
2210 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2211 {
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2214 add_wild_read (bb_info);
2215 return;
2216 }
2217
2218 if (GET_MODE (mem) == BLKmode)
2219 width = -1;
2220 else
2221 width = GET_MODE_SIZE (GET_MODE (mem));
2222
2223 read_info = new read_info_type;
2224 read_info->group_id = group_id;
2225 read_info->mem = mem;
2226 read_info->alias_set = spill_alias_set;
2227 read_info->begin = offset;
2228 read_info->end = offset + width;
2229 read_info->next = insn_info->read_rec;
2230 insn_info->read_rec = read_info;
2231 /* For alias_set != 0 canon_true_dependence should be never called. */
2232 if (spill_alias_set)
2233 mem_addr = NULL_RTX;
2234 else
2235 {
2236 if (group_id < 0)
2237 mem_addr = base->val_rtx;
2238 else
2239 {
2240 group_info_t group
2241 = rtx_group_vec[group_id];
2242 mem_addr = group->canon_base_addr;
2243 }
2244 /* get_addr can only handle VALUE but cannot handle expr like:
2245 VALUE + OFFSET, so call get_addr to get original addr for
2246 mem_addr before plus_constant. */
2247 mem_addr = get_addr (mem_addr);
2248 if (offset)
2249 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2250 }
2251
2252 /* We ignore the clobbers in store_info. The is mildly aggressive,
2253 but there really should not be a clobber followed by a read. */
2254
2255 if (spill_alias_set)
2256 {
2257 insn_info_t i_ptr = active_local_stores;
2258 insn_info_t last = NULL;
2259
2260 if (dump_file && (dump_flags & TDF_DETAILS))
2261 fprintf (dump_file, " processing spill load %d\n",
2262 (int) spill_alias_set);
2263
2264 while (i_ptr)
2265 {
2266 store_info_t store_info = i_ptr->store_rec;
2267
2268 /* Skip the clobbers. */
2269 while (!store_info->is_set)
2270 store_info = store_info->next;
2271
2272 if (store_info->alias_set == spill_alias_set)
2273 {
2274 if (dump_file && (dump_flags & TDF_DETAILS))
2275 dump_insn_info ("removing from active", i_ptr);
2276
2277 active_local_stores_len--;
2278 if (last)
2279 last->next_local_store = i_ptr->next_local_store;
2280 else
2281 active_local_stores = i_ptr->next_local_store;
2282 }
2283 else
2284 last = i_ptr;
2285 i_ptr = i_ptr->next_local_store;
2286 }
2287 }
2288 else if (group_id >= 0)
2289 {
2290 /* This is the restricted case where the base is a constant or
2291 the frame pointer and offset is a constant. */
2292 insn_info_t i_ptr = active_local_stores;
2293 insn_info_t last = NULL;
2294
2295 if (dump_file && (dump_flags & TDF_DETAILS))
2296 {
2297 if (width == -1)
2298 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2299 group_id);
2300 else
2301 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2302 group_id, (int)offset, (int)(offset+width));
2303 }
2304
2305 while (i_ptr)
2306 {
2307 bool remove = false;
2308 store_info_t store_info = i_ptr->store_rec;
2309
2310 /* Skip the clobbers. */
2311 while (!store_info->is_set)
2312 store_info = store_info->next;
2313
2314 /* There are three cases here. */
2315 if (store_info->group_id < 0)
2316 /* We have a cselib store followed by a read from a
2317 const base. */
2318 remove
2319 = canon_true_dependence (store_info->mem,
2320 GET_MODE (store_info->mem),
2321 store_info->mem_addr,
2322 mem, mem_addr);
2323
2324 else if (group_id == store_info->group_id)
2325 {
2326 /* This is a block mode load. We may get lucky and
2327 canon_true_dependence may save the day. */
2328 if (width == -1)
2329 remove
2330 = canon_true_dependence (store_info->mem,
2331 GET_MODE (store_info->mem),
2332 store_info->mem_addr,
2333 mem, mem_addr);
2334
2335 /* If this read is just reading back something that we just
2336 stored, rewrite the read. */
2337 else
2338 {
2339 if (store_info->rhs
2340 && offset >= store_info->begin
2341 && offset + width <= store_info->end
2342 && all_positions_needed_p (store_info,
2343 offset - store_info->begin,
2344 width)
2345 && replace_read (store_info, i_ptr, read_info,
2346 insn_info, loc, bb_info->regs_live))
2347 return;
2348
2349 /* The bases are the same, just see if the offsets
2350 overlap. */
2351 if ((offset < store_info->end)
2352 && (offset + width > store_info->begin))
2353 remove = true;
2354 }
2355 }
2356
2357 /* else
2358 The else case that is missing here is that the
2359 bases are constant but different. There is nothing
2360 to do here because there is no overlap. */
2361
2362 if (remove)
2363 {
2364 if (dump_file && (dump_flags & TDF_DETAILS))
2365 dump_insn_info ("removing from active", i_ptr);
2366
2367 active_local_stores_len--;
2368 if (last)
2369 last->next_local_store = i_ptr->next_local_store;
2370 else
2371 active_local_stores = i_ptr->next_local_store;
2372 }
2373 else
2374 last = i_ptr;
2375 i_ptr = i_ptr->next_local_store;
2376 }
2377 }
2378 else
2379 {
2380 insn_info_t i_ptr = active_local_stores;
2381 insn_info_t last = NULL;
2382 if (dump_file && (dump_flags & TDF_DETAILS))
2383 {
2384 fprintf (dump_file, " processing cselib load mem:");
2385 print_inline_rtx (dump_file, mem, 0);
2386 fprintf (dump_file, "\n");
2387 }
2388
2389 while (i_ptr)
2390 {
2391 bool remove = false;
2392 store_info_t store_info = i_ptr->store_rec;
2393
2394 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file, " processing cselib load against insn %d\n",
2396 INSN_UID (i_ptr->insn));
2397
2398 /* Skip the clobbers. */
2399 while (!store_info->is_set)
2400 store_info = store_info->next;
2401
2402 /* If this read is just reading back something that we just
2403 stored, rewrite the read. */
2404 if (store_info->rhs
2405 && store_info->group_id == -1
2406 && store_info->cse_base == base
2407 && width != -1
2408 && offset >= store_info->begin
2409 && offset + width <= store_info->end
2410 && all_positions_needed_p (store_info,
2411 offset - store_info->begin, width)
2412 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2413 bb_info->regs_live))
2414 return;
2415
2416 if (!store_info->alias_set)
2417 remove = canon_true_dependence (store_info->mem,
2418 GET_MODE (store_info->mem),
2419 store_info->mem_addr,
2420 mem, mem_addr);
2421
2422 if (remove)
2423 {
2424 if (dump_file && (dump_flags & TDF_DETAILS))
2425 dump_insn_info ("removing from active", i_ptr);
2426
2427 active_local_stores_len--;
2428 if (last)
2429 last->next_local_store = i_ptr->next_local_store;
2430 else
2431 active_local_stores = i_ptr->next_local_store;
2432 }
2433 else
2434 last = i_ptr;
2435 i_ptr = i_ptr->next_local_store;
2436 }
2437 }
2438 }
2439
2440 /* A note_uses callback in which DATA points the INSN_INFO for
2441 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2442 true for any part of *LOC. */
2443
2444 static void
2445 check_mem_read_use (rtx *loc, void *data)
2446 {
2447 subrtx_ptr_iterator::array_type array;
2448 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2449 {
2450 rtx *loc = *iter;
2451 if (MEM_P (*loc))
2452 check_mem_read_rtx (loc, (bb_info_t) data);
2453 }
2454 }
2455
2456
2457 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2458 So far it only handles arguments passed in registers. */
2459
2460 static bool
2461 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2462 {
2463 CUMULATIVE_ARGS args_so_far_v;
2464 cumulative_args_t args_so_far;
2465 tree arg;
2466 int idx;
2467
2468 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2469 args_so_far = pack_cumulative_args (&args_so_far_v);
2470
2471 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2472 for (idx = 0;
2473 arg != void_list_node && idx < nargs;
2474 arg = TREE_CHAIN (arg), idx++)
2475 {
2476 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2477 rtx reg, link, tmp;
2478 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2479 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2480 || GET_MODE_CLASS (mode) != MODE_INT)
2481 return false;
2482
2483 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2484 link;
2485 link = XEXP (link, 1))
2486 if (GET_CODE (XEXP (link, 0)) == USE)
2487 {
2488 args[idx] = XEXP (XEXP (link, 0), 0);
2489 if (REG_P (args[idx])
2490 && REGNO (args[idx]) == REGNO (reg)
2491 && (GET_MODE (args[idx]) == mode
2492 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2493 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2494 <= UNITS_PER_WORD)
2495 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2496 > GET_MODE_SIZE (mode)))))
2497 break;
2498 }
2499 if (!link)
2500 return false;
2501
2502 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2503 if (GET_MODE (args[idx]) != mode)
2504 {
2505 if (!tmp || !CONST_INT_P (tmp))
2506 return false;
2507 tmp = gen_int_mode (INTVAL (tmp), mode);
2508 }
2509 if (tmp)
2510 args[idx] = tmp;
2511
2512 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2513 }
2514 if (arg != void_list_node || idx != nargs)
2515 return false;
2516 return true;
2517 }
2518
2519 /* Return a bitmap of the fixed registers contained in IN. */
2520
2521 static bitmap
2522 copy_fixed_regs (const_bitmap in)
2523 {
2524 bitmap ret;
2525
2526 ret = ALLOC_REG_SET (NULL);
2527 bitmap_and (ret, in, fixed_reg_set_regset);
2528 return ret;
2529 }
2530
2531 /* Apply record_store to all candidate stores in INSN. Mark INSN
2532 if some part of it is not a candidate store and assigns to a
2533 non-register target. */
2534
2535 static void
2536 scan_insn (bb_info_t bb_info, rtx_insn *insn)
2537 {
2538 rtx body;
2539 insn_info_type *insn_info = new insn_info_type;
2540 int mems_found = 0;
2541 memset (insn_info, 0, sizeof (struct insn_info_type));
2542
2543 if (dump_file && (dump_flags & TDF_DETAILS))
2544 fprintf (dump_file, "\n**scanning insn=%d\n",
2545 INSN_UID (insn));
2546
2547 insn_info->prev_insn = bb_info->last_insn;
2548 insn_info->insn = insn;
2549 bb_info->last_insn = insn_info;
2550
2551 if (DEBUG_INSN_P (insn))
2552 {
2553 insn_info->cannot_delete = true;
2554 return;
2555 }
2556
2557 /* Look at all of the uses in the insn. */
2558 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2559
2560 if (CALL_P (insn))
2561 {
2562 bool const_call;
2563 tree memset_call = NULL_TREE;
2564
2565 insn_info->cannot_delete = true;
2566
2567 /* Const functions cannot do anything bad i.e. read memory,
2568 however, they can read their parameters which may have
2569 been pushed onto the stack.
2570 memset and bzero don't read memory either. */
2571 const_call = RTL_CONST_CALL_P (insn);
2572 if (!const_call)
2573 {
2574 rtx call = get_call_rtx_from (insn);
2575 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2576 {
2577 rtx symbol = XEXP (XEXP (call, 0), 0);
2578 if (SYMBOL_REF_DECL (symbol)
2579 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2580 {
2581 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2582 == BUILT_IN_NORMAL
2583 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2584 == BUILT_IN_MEMSET))
2585 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2586 memset_call = SYMBOL_REF_DECL (symbol);
2587 }
2588 }
2589 }
2590 if (const_call || memset_call)
2591 {
2592 insn_info_t i_ptr = active_local_stores;
2593 insn_info_t last = NULL;
2594
2595 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "%s call %d\n",
2597 const_call ? "const" : "memset", INSN_UID (insn));
2598
2599 /* See the head comment of the frame_read field. */
2600 if (reload_completed
2601 /* Tail calls are storing their arguments using
2602 arg pointer. If it is a frame pointer on the target,
2603 even before reload we need to kill frame pointer based
2604 stores. */
2605 || (SIBLING_CALL_P (insn)
2606 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2607 insn_info->frame_read = true;
2608
2609 /* Loop over the active stores and remove those which are
2610 killed by the const function call. */
2611 while (i_ptr)
2612 {
2613 bool remove_store = false;
2614
2615 /* The stack pointer based stores are always killed. */
2616 if (i_ptr->stack_pointer_based)
2617 remove_store = true;
2618
2619 /* If the frame is read, the frame related stores are killed. */
2620 else if (insn_info->frame_read)
2621 {
2622 store_info_t store_info = i_ptr->store_rec;
2623
2624 /* Skip the clobbers. */
2625 while (!store_info->is_set)
2626 store_info = store_info->next;
2627
2628 if (store_info->group_id >= 0
2629 && rtx_group_vec[store_info->group_id]->frame_related)
2630 remove_store = true;
2631 }
2632
2633 if (remove_store)
2634 {
2635 if (dump_file && (dump_flags & TDF_DETAILS))
2636 dump_insn_info ("removing from active", i_ptr);
2637
2638 active_local_stores_len--;
2639 if (last)
2640 last->next_local_store = i_ptr->next_local_store;
2641 else
2642 active_local_stores = i_ptr->next_local_store;
2643 }
2644 else
2645 last = i_ptr;
2646
2647 i_ptr = i_ptr->next_local_store;
2648 }
2649
2650 if (memset_call)
2651 {
2652 rtx args[3];
2653 if (get_call_args (insn, memset_call, args, 3)
2654 && CONST_INT_P (args[1])
2655 && CONST_INT_P (args[2])
2656 && INTVAL (args[2]) > 0)
2657 {
2658 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2659 set_mem_size (mem, INTVAL (args[2]));
2660 body = gen_rtx_SET (mem, args[1]);
2661 mems_found += record_store (body, bb_info);
2662 if (dump_file && (dump_flags & TDF_DETAILS))
2663 fprintf (dump_file, "handling memset as BLKmode store\n");
2664 if (mems_found == 1)
2665 {
2666 if (active_local_stores_len++
2667 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2668 {
2669 active_local_stores_len = 1;
2670 active_local_stores = NULL;
2671 }
2672 insn_info->fixed_regs_live
2673 = copy_fixed_regs (bb_info->regs_live);
2674 insn_info->next_local_store = active_local_stores;
2675 active_local_stores = insn_info;
2676 }
2677 }
2678 }
2679 }
2680 else if (SIBLING_CALL_P (insn) && reload_completed)
2681 /* Arguments for a sibling call that are pushed to memory are passed
2682 using the incoming argument pointer of the current function. After
2683 reload that might be (and likely is) frame pointer based. */
2684 add_wild_read (bb_info);
2685 else
2686 /* Every other call, including pure functions, may read any memory
2687 that is not relative to the frame. */
2688 add_non_frame_wild_read (bb_info);
2689
2690 return;
2691 }
2692
2693 /* Assuming that there are sets in these insns, we cannot delete
2694 them. */
2695 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2696 || volatile_refs_p (PATTERN (insn))
2697 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2698 || (RTX_FRAME_RELATED_P (insn))
2699 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2700 insn_info->cannot_delete = true;
2701
2702 body = PATTERN (insn);
2703 if (GET_CODE (body) == PARALLEL)
2704 {
2705 int i;
2706 for (i = 0; i < XVECLEN (body, 0); i++)
2707 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2708 }
2709 else
2710 mems_found += record_store (body, bb_info);
2711
2712 if (dump_file && (dump_flags & TDF_DETAILS))
2713 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2714 mems_found, insn_info->cannot_delete ? "true" : "false");
2715
2716 /* If we found some sets of mems, add it into the active_local_stores so
2717 that it can be locally deleted if found dead or used for
2718 replace_read and redundant constant store elimination. Otherwise mark
2719 it as cannot delete. This simplifies the processing later. */
2720 if (mems_found == 1)
2721 {
2722 if (active_local_stores_len++
2723 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2724 {
2725 active_local_stores_len = 1;
2726 active_local_stores = NULL;
2727 }
2728 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2729 insn_info->next_local_store = active_local_stores;
2730 active_local_stores = insn_info;
2731 }
2732 else
2733 insn_info->cannot_delete = true;
2734 }
2735
2736
2737 /* Remove BASE from the set of active_local_stores. This is a
2738 callback from cselib that is used to get rid of the stores in
2739 active_local_stores. */
2740
2741 static void
2742 remove_useless_values (cselib_val *base)
2743 {
2744 insn_info_t insn_info = active_local_stores;
2745 insn_info_t last = NULL;
2746
2747 while (insn_info)
2748 {
2749 store_info_t store_info = insn_info->store_rec;
2750 bool del = false;
2751
2752 /* If ANY of the store_infos match the cselib group that is
2753 being deleted, then the insn can not be deleted. */
2754 while (store_info)
2755 {
2756 if ((store_info->group_id == -1)
2757 && (store_info->cse_base == base))
2758 {
2759 del = true;
2760 break;
2761 }
2762 store_info = store_info->next;
2763 }
2764
2765 if (del)
2766 {
2767 active_local_stores_len--;
2768 if (last)
2769 last->next_local_store = insn_info->next_local_store;
2770 else
2771 active_local_stores = insn_info->next_local_store;
2772 free_store_info (insn_info);
2773 }
2774 else
2775 last = insn_info;
2776
2777 insn_info = insn_info->next_local_store;
2778 }
2779 }
2780
2781
2782 /* Do all of step 1. */
2783
2784 static void
2785 dse_step1 (void)
2786 {
2787 basic_block bb;
2788 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2789
2790 cselib_init (0);
2791 all_blocks = BITMAP_ALLOC (NULL);
2792 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2793 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2794
2795 FOR_ALL_BB_FN (bb, cfun)
2796 {
2797 insn_info_t ptr;
2798 bb_info_t bb_info = new dse_bb_info_type;
2799
2800 memset (bb_info, 0, sizeof (dse_bb_info_type));
2801 bitmap_set_bit (all_blocks, bb->index);
2802 bb_info->regs_live = regs_live;
2803
2804 bitmap_copy (regs_live, DF_LR_IN (bb));
2805 df_simulate_initialize_forwards (bb, regs_live);
2806
2807 bb_table[bb->index] = bb_info;
2808 cselib_discard_hook = remove_useless_values;
2809
2810 if (bb->index >= NUM_FIXED_BLOCKS)
2811 {
2812 rtx_insn *insn;
2813
2814 active_local_stores = NULL;
2815 active_local_stores_len = 0;
2816 cselib_clear_table ();
2817
2818 /* Scan the insns. */
2819 FOR_BB_INSNS (bb, insn)
2820 {
2821 if (INSN_P (insn))
2822 scan_insn (bb_info, insn);
2823 cselib_process_insn (insn);
2824 if (INSN_P (insn))
2825 df_simulate_one_insn_forwards (bb, insn, regs_live);
2826 }
2827
2828 /* This is something of a hack, because the global algorithm
2829 is supposed to take care of the case where stores go dead
2830 at the end of the function. However, the global
2831 algorithm must take a more conservative view of block
2832 mode reads than the local alg does. So to get the case
2833 where you have a store to the frame followed by a non
2834 overlapping block more read, we look at the active local
2835 stores at the end of the function and delete all of the
2836 frame and spill based ones. */
2837 if (stores_off_frame_dead_at_return
2838 && (EDGE_COUNT (bb->succs) == 0
2839 || (single_succ_p (bb)
2840 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2841 && ! crtl->calls_eh_return)))
2842 {
2843 insn_info_t i_ptr = active_local_stores;
2844 while (i_ptr)
2845 {
2846 store_info_t store_info = i_ptr->store_rec;
2847
2848 /* Skip the clobbers. */
2849 while (!store_info->is_set)
2850 store_info = store_info->next;
2851 if (store_info->alias_set && !i_ptr->cannot_delete)
2852 delete_dead_store_insn (i_ptr);
2853 else
2854 if (store_info->group_id >= 0)
2855 {
2856 group_info_t group
2857 = rtx_group_vec[store_info->group_id];
2858 if (group->frame_related && !i_ptr->cannot_delete)
2859 delete_dead_store_insn (i_ptr);
2860 }
2861
2862 i_ptr = i_ptr->next_local_store;
2863 }
2864 }
2865
2866 /* Get rid of the loads that were discovered in
2867 replace_read. Cselib is finished with this block. */
2868 while (deferred_change_list)
2869 {
2870 deferred_change_t next = deferred_change_list->next;
2871
2872 /* There is no reason to validate this change. That was
2873 done earlier. */
2874 *deferred_change_list->loc = deferred_change_list->reg;
2875 delete deferred_change_list;
2876 deferred_change_list = next;
2877 }
2878
2879 /* Get rid of all of the cselib based store_infos in this
2880 block and mark the containing insns as not being
2881 deletable. */
2882 ptr = bb_info->last_insn;
2883 while (ptr)
2884 {
2885 if (ptr->contains_cselib_groups)
2886 {
2887 store_info_t s_info = ptr->store_rec;
2888 while (s_info && !s_info->is_set)
2889 s_info = s_info->next;
2890 if (s_info
2891 && s_info->redundant_reason
2892 && s_info->redundant_reason->insn
2893 && !ptr->cannot_delete)
2894 {
2895 if (dump_file && (dump_flags & TDF_DETAILS))
2896 fprintf (dump_file, "Locally deleting insn %d "
2897 "because insn %d stores the "
2898 "same value and couldn't be "
2899 "eliminated\n",
2900 INSN_UID (ptr->insn),
2901 INSN_UID (s_info->redundant_reason->insn));
2902 delete_dead_store_insn (ptr);
2903 }
2904 free_store_info (ptr);
2905 }
2906 else
2907 {
2908 store_info_t s_info;
2909
2910 /* Free at least positions_needed bitmaps. */
2911 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2912 if (s_info->is_large)
2913 {
2914 BITMAP_FREE (s_info->positions_needed.large.bmap);
2915 s_info->is_large = false;
2916 }
2917 }
2918 ptr = ptr->prev_insn;
2919 }
2920
2921 cse_store_info_pool.release ();
2922 }
2923 bb_info->regs_live = NULL;
2924 }
2925
2926 BITMAP_FREE (regs_live);
2927 cselib_finish ();
2928 rtx_group_table->empty ();
2929 }
2930
2931 \f
2932 /*----------------------------------------------------------------------------
2933 Second step.
2934
2935 Assign each byte position in the stores that we are going to
2936 analyze globally to a position in the bitmaps. Returns true if
2937 there are any bit positions assigned.
2938 ----------------------------------------------------------------------------*/
2939
2940 static void
2941 dse_step2_init (void)
2942 {
2943 unsigned int i;
2944 group_info_t group;
2945
2946 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2947 {
2948 /* For all non stack related bases, we only consider a store to
2949 be deletable if there are two or more stores for that
2950 position. This is because it takes one store to make the
2951 other store redundant. However, for the stores that are
2952 stack related, we consider them if there is only one store
2953 for the position. We do this because the stack related
2954 stores can be deleted if their is no read between them and
2955 the end of the function.
2956
2957 To make this work in the current framework, we take the stack
2958 related bases add all of the bits from store1 into store2.
2959 This has the effect of making the eligible even if there is
2960 only one store. */
2961
2962 if (stores_off_frame_dead_at_return && group->frame_related)
2963 {
2964 bitmap_ior_into (group->store2_n, group->store1_n);
2965 bitmap_ior_into (group->store2_p, group->store1_p);
2966 if (dump_file && (dump_flags & TDF_DETAILS))
2967 fprintf (dump_file, "group %d is frame related ", i);
2968 }
2969
2970 group->offset_map_size_n++;
2971 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2972 group->offset_map_size_n);
2973 group->offset_map_size_p++;
2974 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2975 group->offset_map_size_p);
2976 group->process_globally = false;
2977 if (dump_file && (dump_flags & TDF_DETAILS))
2978 {
2979 fprintf (dump_file, "group %d(%d+%d): ", i,
2980 (int)bitmap_count_bits (group->store2_n),
2981 (int)bitmap_count_bits (group->store2_p));
2982 bitmap_print (dump_file, group->store2_n, "n ", " ");
2983 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2984 }
2985 }
2986 }
2987
2988
2989 /* Init the offset tables for the normal case. */
2990
2991 static bool
2992 dse_step2_nospill (void)
2993 {
2994 unsigned int i;
2995 group_info_t group;
2996 /* Position 0 is unused because 0 is used in the maps to mean
2997 unused. */
2998 current_position = 1;
2999 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3000 {
3001 bitmap_iterator bi;
3002 unsigned int j;
3003
3004 if (group == clear_alias_group)
3005 continue;
3006
3007 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
3008 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
3009 bitmap_clear (group->group_kill);
3010
3011 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
3012 {
3013 bitmap_set_bit (group->group_kill, current_position);
3014 if (bitmap_bit_p (group->escaped_n, j))
3015 bitmap_set_bit (kill_on_calls, current_position);
3016 group->offset_map_n[j] = current_position++;
3017 group->process_globally = true;
3018 }
3019 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3020 {
3021 bitmap_set_bit (group->group_kill, current_position);
3022 if (bitmap_bit_p (group->escaped_p, j))
3023 bitmap_set_bit (kill_on_calls, current_position);
3024 group->offset_map_p[j] = current_position++;
3025 group->process_globally = true;
3026 }
3027 }
3028 return current_position != 1;
3029 }
3030
3031
3032 \f
3033 /*----------------------------------------------------------------------------
3034 Third step.
3035
3036 Build the bit vectors for the transfer functions.
3037 ----------------------------------------------------------------------------*/
3038
3039
3040 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3041 there, return 0. */
3042
3043 static int
3044 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3045 {
3046 if (offset < 0)
3047 {
3048 HOST_WIDE_INT offset_p = -offset;
3049 if (offset_p >= group_info->offset_map_size_n)
3050 return 0;
3051 return group_info->offset_map_n[offset_p];
3052 }
3053 else
3054 {
3055 if (offset >= group_info->offset_map_size_p)
3056 return 0;
3057 return group_info->offset_map_p[offset];
3058 }
3059 }
3060
3061
3062 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3063 may be NULL. */
3064
3065 static void
3066 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3067 {
3068 while (store_info)
3069 {
3070 HOST_WIDE_INT i;
3071 group_info_t group_info
3072 = rtx_group_vec[store_info->group_id];
3073 if (group_info->process_globally)
3074 for (i = store_info->begin; i < store_info->end; i++)
3075 {
3076 int index = get_bitmap_index (group_info, i);
3077 if (index != 0)
3078 {
3079 bitmap_set_bit (gen, index);
3080 if (kill)
3081 bitmap_clear_bit (kill, index);
3082 }
3083 }
3084 store_info = store_info->next;
3085 }
3086 }
3087
3088
3089 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3090 may be NULL. */
3091
3092 static void
3093 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3094 {
3095 while (store_info)
3096 {
3097 if (store_info->alias_set)
3098 {
3099 int index = get_bitmap_index (clear_alias_group,
3100 store_info->alias_set);
3101 if (index != 0)
3102 {
3103 bitmap_set_bit (gen, index);
3104 if (kill)
3105 bitmap_clear_bit (kill, index);
3106 }
3107 }
3108 store_info = store_info->next;
3109 }
3110 }
3111
3112
3113 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3114 may be NULL. */
3115
3116 static void
3117 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3118 {
3119 read_info_t read_info = insn_info->read_rec;
3120 int i;
3121 group_info_t group;
3122
3123 /* If this insn reads the frame, kill all the frame related stores. */
3124 if (insn_info->frame_read)
3125 {
3126 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3127 if (group->process_globally && group->frame_related)
3128 {
3129 if (kill)
3130 bitmap_ior_into (kill, group->group_kill);
3131 bitmap_and_compl_into (gen, group->group_kill);
3132 }
3133 }
3134 if (insn_info->non_frame_wild_read)
3135 {
3136 /* Kill all non-frame related stores. Kill all stores of variables that
3137 escape. */
3138 if (kill)
3139 bitmap_ior_into (kill, kill_on_calls);
3140 bitmap_and_compl_into (gen, kill_on_calls);
3141 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3142 if (group->process_globally && !group->frame_related)
3143 {
3144 if (kill)
3145 bitmap_ior_into (kill, group->group_kill);
3146 bitmap_and_compl_into (gen, group->group_kill);
3147 }
3148 }
3149 while (read_info)
3150 {
3151 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3152 {
3153 if (group->process_globally)
3154 {
3155 if (i == read_info->group_id)
3156 {
3157 if (read_info->begin > read_info->end)
3158 {
3159 /* Begin > end for block mode reads. */
3160 if (kill)
3161 bitmap_ior_into (kill, group->group_kill);
3162 bitmap_and_compl_into (gen, group->group_kill);
3163 }
3164 else
3165 {
3166 /* The groups are the same, just process the
3167 offsets. */
3168 HOST_WIDE_INT j;
3169 for (j = read_info->begin; j < read_info->end; j++)
3170 {
3171 int index = get_bitmap_index (group, j);
3172 if (index != 0)
3173 {
3174 if (kill)
3175 bitmap_set_bit (kill, index);
3176 bitmap_clear_bit (gen, index);
3177 }
3178 }
3179 }
3180 }
3181 else
3182 {
3183 /* The groups are different, if the alias sets
3184 conflict, clear the entire group. We only need
3185 to apply this test if the read_info is a cselib
3186 read. Anything with a constant base cannot alias
3187 something else with a different constant
3188 base. */
3189 if ((read_info->group_id < 0)
3190 && canon_true_dependence (group->base_mem,
3191 GET_MODE (group->base_mem),
3192 group->canon_base_addr,
3193 read_info->mem, NULL_RTX))
3194 {
3195 if (kill)
3196 bitmap_ior_into (kill, group->group_kill);
3197 bitmap_and_compl_into (gen, group->group_kill);
3198 }
3199 }
3200 }
3201 }
3202
3203 read_info = read_info->next;
3204 }
3205 }
3206
3207 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3208 may be NULL. */
3209
3210 static void
3211 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3212 {
3213 while (read_info)
3214 {
3215 if (read_info->alias_set)
3216 {
3217 int index = get_bitmap_index (clear_alias_group,
3218 read_info->alias_set);
3219 if (index != 0)
3220 {
3221 if (kill)
3222 bitmap_set_bit (kill, index);
3223 bitmap_clear_bit (gen, index);
3224 }
3225 }
3226
3227 read_info = read_info->next;
3228 }
3229 }
3230
3231
3232 /* Return the insn in BB_INFO before the first wild read or if there
3233 are no wild reads in the block, return the last insn. */
3234
3235 static insn_info_t
3236 find_insn_before_first_wild_read (bb_info_t bb_info)
3237 {
3238 insn_info_t insn_info = bb_info->last_insn;
3239 insn_info_t last_wild_read = NULL;
3240
3241 while (insn_info)
3242 {
3243 if (insn_info->wild_read)
3244 {
3245 last_wild_read = insn_info->prev_insn;
3246 /* Block starts with wild read. */
3247 if (!last_wild_read)
3248 return NULL;
3249 }
3250
3251 insn_info = insn_info->prev_insn;
3252 }
3253
3254 if (last_wild_read)
3255 return last_wild_read;
3256 else
3257 return bb_info->last_insn;
3258 }
3259
3260
3261 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3262 the block in order to build the gen and kill sets for the block.
3263 We start at ptr which may be the last insn in the block or may be
3264 the first insn with a wild read. In the latter case we are able to
3265 skip the rest of the block because it just does not matter:
3266 anything that happens is hidden by the wild read. */
3267
3268 static void
3269 dse_step3_scan (bool for_spills, basic_block bb)
3270 {
3271 bb_info_t bb_info = bb_table[bb->index];
3272 insn_info_t insn_info;
3273
3274 if (for_spills)
3275 /* There are no wild reads in the spill case. */
3276 insn_info = bb_info->last_insn;
3277 else
3278 insn_info = find_insn_before_first_wild_read (bb_info);
3279
3280 /* In the spill case or in the no_spill case if there is no wild
3281 read in the block, we will need a kill set. */
3282 if (insn_info == bb_info->last_insn)
3283 {
3284 if (bb_info->kill)
3285 bitmap_clear (bb_info->kill);
3286 else
3287 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3288 }
3289 else
3290 if (bb_info->kill)
3291 BITMAP_FREE (bb_info->kill);
3292
3293 while (insn_info)
3294 {
3295 /* There may have been code deleted by the dce pass run before
3296 this phase. */
3297 if (insn_info->insn && INSN_P (insn_info->insn))
3298 {
3299 /* Process the read(s) last. */
3300 if (for_spills)
3301 {
3302 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3303 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3304 }
3305 else
3306 {
3307 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3308 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3309 }
3310 }
3311
3312 insn_info = insn_info->prev_insn;
3313 }
3314 }
3315
3316
3317 /* Set the gen set of the exit block, and also any block with no
3318 successors that does not have a wild read. */
3319
3320 static void
3321 dse_step3_exit_block_scan (bb_info_t bb_info)
3322 {
3323 /* The gen set is all 0's for the exit block except for the
3324 frame_pointer_group. */
3325
3326 if (stores_off_frame_dead_at_return)
3327 {
3328 unsigned int i;
3329 group_info_t group;
3330
3331 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3332 {
3333 if (group->process_globally && group->frame_related)
3334 bitmap_ior_into (bb_info->gen, group->group_kill);
3335 }
3336 }
3337 }
3338
3339
3340 /* Find all of the blocks that are not backwards reachable from the
3341 exit block or any block with no successors (BB). These are the
3342 infinite loops or infinite self loops. These blocks will still
3343 have their bits set in UNREACHABLE_BLOCKS. */
3344
3345 static void
3346 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3347 {
3348 edge e;
3349 edge_iterator ei;
3350
3351 if (bitmap_bit_p (unreachable_blocks, bb->index))
3352 {
3353 bitmap_clear_bit (unreachable_blocks, bb->index);
3354 FOR_EACH_EDGE (e, ei, bb->preds)
3355 {
3356 mark_reachable_blocks (unreachable_blocks, e->src);
3357 }
3358 }
3359 }
3360
3361 /* Build the transfer functions for the function. */
3362
3363 static void
3364 dse_step3 (bool for_spills)
3365 {
3366 basic_block bb;
3367 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3368 sbitmap_iterator sbi;
3369 bitmap all_ones = NULL;
3370 unsigned int i;
3371
3372 bitmap_ones (unreachable_blocks);
3373
3374 FOR_ALL_BB_FN (bb, cfun)
3375 {
3376 bb_info_t bb_info = bb_table[bb->index];
3377 if (bb_info->gen)
3378 bitmap_clear (bb_info->gen);
3379 else
3380 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3381
3382 if (bb->index == ENTRY_BLOCK)
3383 ;
3384 else if (bb->index == EXIT_BLOCK)
3385 dse_step3_exit_block_scan (bb_info);
3386 else
3387 dse_step3_scan (for_spills, bb);
3388 if (EDGE_COUNT (bb->succs) == 0)
3389 mark_reachable_blocks (unreachable_blocks, bb);
3390
3391 /* If this is the second time dataflow is run, delete the old
3392 sets. */
3393 if (bb_info->in)
3394 BITMAP_FREE (bb_info->in);
3395 if (bb_info->out)
3396 BITMAP_FREE (bb_info->out);
3397 }
3398
3399 /* For any block in an infinite loop, we must initialize the out set
3400 to all ones. This could be expensive, but almost never occurs in
3401 practice. However, it is common in regression tests. */
3402 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3403 {
3404 if (bitmap_bit_p (all_blocks, i))
3405 {
3406 bb_info_t bb_info = bb_table[i];
3407 if (!all_ones)
3408 {
3409 unsigned int j;
3410 group_info_t group;
3411
3412 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3413 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3414 bitmap_ior_into (all_ones, group->group_kill);
3415 }
3416 if (!bb_info->out)
3417 {
3418 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3419 bitmap_copy (bb_info->out, all_ones);
3420 }
3421 }
3422 }
3423
3424 if (all_ones)
3425 BITMAP_FREE (all_ones);
3426 sbitmap_free (unreachable_blocks);
3427 }
3428
3429
3430 \f
3431 /*----------------------------------------------------------------------------
3432 Fourth step.
3433
3434 Solve the bitvector equations.
3435 ----------------------------------------------------------------------------*/
3436
3437
3438 /* Confluence function for blocks with no successors. Create an out
3439 set from the gen set of the exit block. This block logically has
3440 the exit block as a successor. */
3441
3442
3443
3444 static void
3445 dse_confluence_0 (basic_block bb)
3446 {
3447 bb_info_t bb_info = bb_table[bb->index];
3448
3449 if (bb->index == EXIT_BLOCK)
3450 return;
3451
3452 if (!bb_info->out)
3453 {
3454 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3455 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3456 }
3457 }
3458
3459 /* Propagate the information from the in set of the dest of E to the
3460 out set of the src of E. If the various in or out sets are not
3461 there, that means they are all ones. */
3462
3463 static bool
3464 dse_confluence_n (edge e)
3465 {
3466 bb_info_t src_info = bb_table[e->src->index];
3467 bb_info_t dest_info = bb_table[e->dest->index];
3468
3469 if (dest_info->in)
3470 {
3471 if (src_info->out)
3472 bitmap_and_into (src_info->out, dest_info->in);
3473 else
3474 {
3475 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3476 bitmap_copy (src_info->out, dest_info->in);
3477 }
3478 }
3479 return true;
3480 }
3481
3482
3483 /* Propagate the info from the out to the in set of BB_INDEX's basic
3484 block. There are three cases:
3485
3486 1) The block has no kill set. In this case the kill set is all
3487 ones. It does not matter what the out set of the block is, none of
3488 the info can reach the top. The only thing that reaches the top is
3489 the gen set and we just copy the set.
3490
3491 2) There is a kill set but no out set and bb has successors. In
3492 this case we just return. Eventually an out set will be created and
3493 it is better to wait than to create a set of ones.
3494
3495 3) There is both a kill and out set. We apply the obvious transfer
3496 function.
3497 */
3498
3499 static bool
3500 dse_transfer_function (int bb_index)
3501 {
3502 bb_info_t bb_info = bb_table[bb_index];
3503
3504 if (bb_info->kill)
3505 {
3506 if (bb_info->out)
3507 {
3508 /* Case 3 above. */
3509 if (bb_info->in)
3510 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3511 bb_info->out, bb_info->kill);
3512 else
3513 {
3514 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3515 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3516 bb_info->out, bb_info->kill);
3517 return true;
3518 }
3519 }
3520 else
3521 /* Case 2 above. */
3522 return false;
3523 }
3524 else
3525 {
3526 /* Case 1 above. If there is already an in set, nothing
3527 happens. */
3528 if (bb_info->in)
3529 return false;
3530 else
3531 {
3532 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3533 bitmap_copy (bb_info->in, bb_info->gen);
3534 return true;
3535 }
3536 }
3537 }
3538
3539 /* Solve the dataflow equations. */
3540
3541 static void
3542 dse_step4 (void)
3543 {
3544 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3545 dse_confluence_n, dse_transfer_function,
3546 all_blocks, df_get_postorder (DF_BACKWARD),
3547 df_get_n_blocks (DF_BACKWARD));
3548 if (dump_file && (dump_flags & TDF_DETAILS))
3549 {
3550 basic_block bb;
3551
3552 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3553 FOR_ALL_BB_FN (bb, cfun)
3554 {
3555 bb_info_t bb_info = bb_table[bb->index];
3556
3557 df_print_bb_index (bb, dump_file);
3558 if (bb_info->in)
3559 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3560 else
3561 fprintf (dump_file, " in: *MISSING*\n");
3562 if (bb_info->gen)
3563 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3564 else
3565 fprintf (dump_file, " gen: *MISSING*\n");
3566 if (bb_info->kill)
3567 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3568 else
3569 fprintf (dump_file, " kill: *MISSING*\n");
3570 if (bb_info->out)
3571 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3572 else
3573 fprintf (dump_file, " out: *MISSING*\n\n");
3574 }
3575 }
3576 }
3577
3578
3579 \f
3580 /*----------------------------------------------------------------------------
3581 Fifth step.
3582
3583 Delete the stores that can only be deleted using the global information.
3584 ----------------------------------------------------------------------------*/
3585
3586
3587 static void
3588 dse_step5_nospill (void)
3589 {
3590 basic_block bb;
3591 FOR_EACH_BB_FN (bb, cfun)
3592 {
3593 bb_info_t bb_info = bb_table[bb->index];
3594 insn_info_t insn_info = bb_info->last_insn;
3595 bitmap v = bb_info->out;
3596
3597 while (insn_info)
3598 {
3599 bool deleted = false;
3600 if (dump_file && insn_info->insn)
3601 {
3602 fprintf (dump_file, "starting to process insn %d\n",
3603 INSN_UID (insn_info->insn));
3604 bitmap_print (dump_file, v, " v: ", "\n");
3605 }
3606
3607 /* There may have been code deleted by the dce pass run before
3608 this phase. */
3609 if (insn_info->insn
3610 && INSN_P (insn_info->insn)
3611 && (!insn_info->cannot_delete)
3612 && (!bitmap_empty_p (v)))
3613 {
3614 store_info_t store_info = insn_info->store_rec;
3615
3616 /* Try to delete the current insn. */
3617 deleted = true;
3618
3619 /* Skip the clobbers. */
3620 while (!store_info->is_set)
3621 store_info = store_info->next;
3622
3623 if (store_info->alias_set)
3624 deleted = false;
3625 else
3626 {
3627 HOST_WIDE_INT i;
3628 group_info_t group_info
3629 = rtx_group_vec[store_info->group_id];
3630
3631 for (i = store_info->begin; i < store_info->end; i++)
3632 {
3633 int index = get_bitmap_index (group_info, i);
3634
3635 if (dump_file && (dump_flags & TDF_DETAILS))
3636 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3637 if (index == 0 || !bitmap_bit_p (v, index))
3638 {
3639 if (dump_file && (dump_flags & TDF_DETAILS))
3640 fprintf (dump_file, "failing at i = %d\n", (int)i);
3641 deleted = false;
3642 break;
3643 }
3644 }
3645 }
3646 if (deleted)
3647 {
3648 if (dbg_cnt (dse)
3649 && check_for_inc_dec_1 (insn_info))
3650 {
3651 delete_insn (insn_info->insn);
3652 insn_info->insn = NULL;
3653 globally_deleted++;
3654 }
3655 }
3656 }
3657 /* We do want to process the local info if the insn was
3658 deleted. For instance, if the insn did a wild read, we
3659 no longer need to trash the info. */
3660 if (insn_info->insn
3661 && INSN_P (insn_info->insn)
3662 && (!deleted))
3663 {
3664 scan_stores_nospill (insn_info->store_rec, v, NULL);
3665 if (insn_info->wild_read)
3666 {
3667 if (dump_file && (dump_flags & TDF_DETAILS))
3668 fprintf (dump_file, "wild read\n");
3669 bitmap_clear (v);
3670 }
3671 else if (insn_info->read_rec
3672 || insn_info->non_frame_wild_read)
3673 {
3674 if (dump_file && !insn_info->non_frame_wild_read)
3675 fprintf (dump_file, "regular read\n");
3676 else if (dump_file && (dump_flags & TDF_DETAILS))
3677 fprintf (dump_file, "non-frame wild read\n");
3678 scan_reads_nospill (insn_info, v, NULL);
3679 }
3680 }
3681
3682 insn_info = insn_info->prev_insn;
3683 }
3684 }
3685 }
3686
3687
3688 \f
3689 /*----------------------------------------------------------------------------
3690 Sixth step.
3691
3692 Delete stores made redundant by earlier stores (which store the same
3693 value) that couldn't be eliminated.
3694 ----------------------------------------------------------------------------*/
3695
3696 static void
3697 dse_step6 (void)
3698 {
3699 basic_block bb;
3700
3701 FOR_ALL_BB_FN (bb, cfun)
3702 {
3703 bb_info_t bb_info = bb_table[bb->index];
3704 insn_info_t insn_info = bb_info->last_insn;
3705
3706 while (insn_info)
3707 {
3708 /* There may have been code deleted by the dce pass run before
3709 this phase. */
3710 if (insn_info->insn
3711 && INSN_P (insn_info->insn)
3712 && !insn_info->cannot_delete)
3713 {
3714 store_info_t s_info = insn_info->store_rec;
3715
3716 while (s_info && !s_info->is_set)
3717 s_info = s_info->next;
3718 if (s_info
3719 && s_info->redundant_reason
3720 && s_info->redundant_reason->insn
3721 && INSN_P (s_info->redundant_reason->insn))
3722 {
3723 rtx_insn *rinsn = s_info->redundant_reason->insn;
3724 if (dump_file && (dump_flags & TDF_DETAILS))
3725 fprintf (dump_file, "Locally deleting insn %d "
3726 "because insn %d stores the "
3727 "same value and couldn't be "
3728 "eliminated\n",
3729 INSN_UID (insn_info->insn),
3730 INSN_UID (rinsn));
3731 delete_dead_store_insn (insn_info);
3732 }
3733 }
3734 insn_info = insn_info->prev_insn;
3735 }
3736 }
3737 }
3738 \f
3739 /*----------------------------------------------------------------------------
3740 Seventh step.
3741
3742 Destroy everything left standing.
3743 ----------------------------------------------------------------------------*/
3744
3745 static void
3746 dse_step7 (void)
3747 {
3748 bitmap_obstack_release (&dse_bitmap_obstack);
3749 obstack_free (&dse_obstack, NULL);
3750
3751 end_alias_analysis ();
3752 free (bb_table);
3753 delete rtx_group_table;
3754 rtx_group_table = NULL;
3755 rtx_group_vec.release ();
3756 BITMAP_FREE (all_blocks);
3757 BITMAP_FREE (scratch);
3758
3759 rtx_store_info_pool.release ();
3760 read_info_type::pool.release ();
3761 insn_info_type::pool.release ();
3762 dse_bb_info_type::pool.release ();
3763 group_info::pool.release ();
3764 deferred_change::pool.release ();
3765 }
3766
3767
3768 /* -------------------------------------------------------------------------
3769 DSE
3770 ------------------------------------------------------------------------- */
3771
3772 /* Callback for running pass_rtl_dse. */
3773
3774 static unsigned int
3775 rest_of_handle_dse (void)
3776 {
3777 df_set_flags (DF_DEFER_INSN_RESCAN);
3778
3779 /* Need the notes since we must track live hardregs in the forwards
3780 direction. */
3781 df_note_add_problem ();
3782 df_analyze ();
3783
3784 dse_step0 ();
3785 dse_step1 ();
3786 dse_step2_init ();
3787 if (dse_step2_nospill ())
3788 {
3789 df_set_flags (DF_LR_RUN_DCE);
3790 df_analyze ();
3791 if (dump_file && (dump_flags & TDF_DETAILS))
3792 fprintf (dump_file, "doing global processing\n");
3793 dse_step3 (false);
3794 dse_step4 ();
3795 dse_step5_nospill ();
3796 }
3797
3798 dse_step6 ();
3799 dse_step7 ();
3800
3801 if (dump_file)
3802 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3803 locally_deleted, globally_deleted, spill_deleted);
3804
3805 /* DSE can eliminate potentially-trapping MEMs.
3806 Remove any EH edges associated with them. */
3807 if ((locally_deleted || globally_deleted)
3808 && cfun->can_throw_non_call_exceptions
3809 && purge_all_dead_edges ())
3810 cleanup_cfg (0);
3811
3812 return 0;
3813 }
3814
3815 namespace {
3816
3817 const pass_data pass_data_rtl_dse1 =
3818 {
3819 RTL_PASS, /* type */
3820 "dse1", /* name */
3821 OPTGROUP_NONE, /* optinfo_flags */
3822 TV_DSE1, /* tv_id */
3823 0, /* properties_required */
3824 0, /* properties_provided */
3825 0, /* properties_destroyed */
3826 0, /* todo_flags_start */
3827 TODO_df_finish, /* todo_flags_finish */
3828 };
3829
3830 class pass_rtl_dse1 : public rtl_opt_pass
3831 {
3832 public:
3833 pass_rtl_dse1 (gcc::context *ctxt)
3834 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3835 {}
3836
3837 /* opt_pass methods: */
3838 virtual bool gate (function *)
3839 {
3840 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3841 }
3842
3843 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3844
3845 }; // class pass_rtl_dse1
3846
3847 } // anon namespace
3848
3849 rtl_opt_pass *
3850 make_pass_rtl_dse1 (gcc::context *ctxt)
3851 {
3852 return new pass_rtl_dse1 (ctxt);
3853 }
3854
3855 namespace {
3856
3857 const pass_data pass_data_rtl_dse2 =
3858 {
3859 RTL_PASS, /* type */
3860 "dse2", /* name */
3861 OPTGROUP_NONE, /* optinfo_flags */
3862 TV_DSE2, /* tv_id */
3863 0, /* properties_required */
3864 0, /* properties_provided */
3865 0, /* properties_destroyed */
3866 0, /* todo_flags_start */
3867 TODO_df_finish, /* todo_flags_finish */
3868 };
3869
3870 class pass_rtl_dse2 : public rtl_opt_pass
3871 {
3872 public:
3873 pass_rtl_dse2 (gcc::context *ctxt)
3874 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3875 {}
3876
3877 /* opt_pass methods: */
3878 virtual bool gate (function *)
3879 {
3880 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3881 }
3882
3883 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3884
3885 }; // class pass_rtl_dse2
3886
3887 } // anon namespace
3888
3889 rtl_opt_pass *
3890 make_pass_rtl_dse2 (gcc::context *ctxt)
3891 {
3892 return new pass_rtl_dse2 (ctxt);
3893 }