]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/postreload-gcse.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / postreload-gcse.c
1 /* Post reload partially redundant load elimination
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "diagnostic-core.h"
25
26 #include "hash-table.h"
27 #include "rtl.h"
28 #include "hash-set.h"
29 #include "vec.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "regs.h"
37 #include "hard-reg-set.h"
38 #include "flags.h"
39 #include "insn-config.h"
40 #include "recog.h"
41 #include "predict.h"
42 #include "function.h"
43 #include "dominance.h"
44 #include "cfg.h"
45 #include "cfgrtl.h"
46 #include "basic-block.h"
47 #include "profile.h"
48 #include "hashtab.h"
49 #include "statistics.h"
50 #include "expmed.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "calls.h"
54 #include "emit-rtl.h"
55 #include "varasm.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "except.h"
59 #include "intl.h"
60 #include "obstack.h"
61 #include "params.h"
62 #include "target.h"
63 #include "tree-pass.h"
64 #include "dbgcnt.h"
65 #include "df.h"
66 #include "gcse-common.h"
67
68 /* The following code implements gcse after reload, the purpose of this
69 pass is to cleanup redundant loads generated by reload and other
70 optimizations that come after gcse. It searches for simple inter-block
71 redundancies and tries to eliminate them by adding moves and loads
72 in cold places.
73
74 Perform partially redundant load elimination, try to eliminate redundant
75 loads created by the reload pass. We try to look for full or partial
76 redundant loads fed by one or more loads/stores in predecessor BBs,
77 and try adding loads to make them fully redundant. We also check if
78 it's worth adding loads to be able to delete the redundant load.
79
80 Algorithm:
81 1. Build available expressions hash table:
82 For each load/store instruction, if the loaded/stored memory didn't
83 change until the end of the basic block add this memory expression to
84 the hash table.
85 2. Perform Redundancy elimination:
86 For each load instruction do the following:
87 perform partial redundancy elimination, check if it's worth adding
88 loads to make the load fully redundant. If so add loads and
89 register copies and delete the load.
90 3. Delete instructions made redundant in step 2.
91
92 Future enhancement:
93 If the loaded register is used/defined between load and some store,
94 look for some other free register between load and all its stores,
95 and replace the load with a copy from this register to the loaded
96 register.
97 */
98 \f
99
100 /* Keep statistics of this pass. */
101 static struct
102 {
103 int moves_inserted;
104 int copies_inserted;
105 int insns_deleted;
106 } stats;
107
108 /* We need to keep a hash table of expressions. The table entries are of
109 type 'struct expr', and for each expression there is a single linked
110 list of occurrences. */
111
112 /* Expression elements in the hash table. */
113 struct expr
114 {
115 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
116 rtx expr;
117
118 /* The same hash for this entry. */
119 hashval_t hash;
120
121 /* Index in the transparent bitmaps. */
122 unsigned int bitmap_index;
123
124 /* List of available occurrence in basic blocks in the function. */
125 struct occr *avail_occr;
126 };
127
128 /* Hashtable helpers. */
129
130 struct expr_hasher : typed_noop_remove <expr>
131 {
132 typedef expr *value_type;
133 typedef expr *compare_type;
134 static inline hashval_t hash (const expr *);
135 static inline bool equal (const expr *, const expr *);
136 };
137
138
139 /* Hash expression X.
140 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
141 or if the expression contains something we don't want to insert in the
142 table. */
143
144 static hashval_t
145 hash_expr (rtx x, int *do_not_record_p)
146 {
147 *do_not_record_p = 0;
148 return hash_rtx (x, GET_MODE (x), do_not_record_p,
149 NULL, /*have_reg_qty=*/false);
150 }
151
152 /* Callback for hashtab.
153 Return the hash value for expression EXP. We don't actually hash
154 here, we just return the cached hash value. */
155
156 inline hashval_t
157 expr_hasher::hash (const expr *exp)
158 {
159 return exp->hash;
160 }
161
162 /* Callback for hashtab.
163 Return nonzero if exp1 is equivalent to exp2. */
164
165 inline bool
166 expr_hasher::equal (const expr *exp1, const expr *exp2)
167 {
168 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
169
170 gcc_assert (!equiv_p || exp1->hash == exp2->hash);
171 return equiv_p;
172 }
173
174 /* The table itself. */
175 static hash_table<expr_hasher> *expr_table;
176 \f
177
178 static struct obstack expr_obstack;
179
180 /* Occurrence of an expression.
181 There is at most one occurrence per basic block. If a pattern appears
182 more than once, the last appearance is used. */
183
184 struct occr
185 {
186 /* Next occurrence of this expression. */
187 struct occr *next;
188 /* The insn that computes the expression. */
189 rtx_insn *insn;
190 /* Nonzero if this [anticipatable] occurrence has been deleted. */
191 char deleted_p;
192 };
193
194 static struct obstack occr_obstack;
195
196 /* The following structure holds the information about the occurrences of
197 the redundant instructions. */
198 struct unoccr
199 {
200 struct unoccr *next;
201 edge pred;
202 rtx_insn *insn;
203 };
204
205 static struct obstack unoccr_obstack;
206
207 /* Array where each element is the CUID if the insn that last set the hard
208 register with the number of the element, since the start of the current
209 basic block.
210
211 This array is used during the building of the hash table (step 1) to
212 determine if a reg is killed before the end of a basic block.
213
214 It is also used when eliminating partial redundancies (step 2) to see
215 if a reg was modified since the start of a basic block. */
216 static int *reg_avail_info;
217
218 /* A list of insns that may modify memory within the current basic block. */
219 struct modifies_mem
220 {
221 rtx_insn *insn;
222 struct modifies_mem *next;
223 };
224 static struct modifies_mem *modifies_mem_list;
225
226 /* The modifies_mem structs also go on an obstack, only this obstack is
227 freed each time after completing the analysis or transformations on
228 a basic block. So we allocate a dummy modifies_mem_obstack_bottom
229 object on the obstack to keep track of the bottom of the obstack. */
230 static struct obstack modifies_mem_obstack;
231 static struct modifies_mem *modifies_mem_obstack_bottom;
232
233 /* Mapping of insn UIDs to CUIDs.
234 CUIDs are like UIDs except they increase monotonically in each basic
235 block, have no gaps, and only apply to real insns. */
236 static int *uid_cuid;
237 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
238
239 /* Bitmap of blocks which have memory stores. */
240 static bitmap modify_mem_list_set;
241
242 /* Bitmap of blocks which have calls. */
243 static bitmap blocks_with_calls;
244
245 /* Vector indexed by block # with a list of all the insns that
246 modify memory within the block. */
247 static vec<rtx_insn *> *modify_mem_list;
248
249 /* Vector indexed by block # with a canonicalized list of insns
250 that modify memory in the block. */
251 static vec<modify_pair> *canon_modify_mem_list;
252
253 /* Vector of simple bitmaps indexed by block number. Each component sbitmap
254 indicates which expressions are transparent through the block. */
255 static sbitmap *transp;
256 \f
257
258 /* Helpers for memory allocation/freeing. */
259 static void alloc_mem (void);
260 static void free_mem (void);
261
262 /* Support for hash table construction and transformations. */
263 static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
264 static void record_last_reg_set_info (rtx_insn *, rtx);
265 static void record_last_reg_set_info_regno (rtx_insn *, int);
266 static void record_last_mem_set_info (rtx_insn *);
267 static void record_last_set_info (rtx, const_rtx, void *);
268 static void record_opr_changes (rtx_insn *);
269
270 static void find_mem_conflicts (rtx, const_rtx, void *);
271 static int load_killed_in_block_p (int, rtx, bool);
272 static void reset_opr_set_tables (void);
273
274 /* Hash table support. */
275 static hashval_t hash_expr (rtx, int *);
276 static void insert_expr_in_table (rtx, rtx_insn *);
277 static struct expr *lookup_expr_in_table (rtx);
278 static void dump_hash_table (FILE *);
279
280 /* Helpers for eliminate_partially_redundant_load. */
281 static bool reg_killed_on_edge (rtx, edge);
282 static bool reg_used_on_edge (rtx, edge);
283
284 static rtx get_avail_load_store_reg (rtx_insn *);
285
286 static bool bb_has_well_behaved_predecessors (basic_block);
287 static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
288 static void hash_scan_set (rtx_insn *);
289 static void compute_hash_table (void);
290
291 /* The work horses of this pass. */
292 static void eliminate_partially_redundant_load (basic_block,
293 rtx_insn *,
294 struct expr *);
295 static void eliminate_partially_redundant_loads (void);
296 \f
297
298 /* Allocate memory for the CUID mapping array and register/memory
299 tracking tables. */
300
301 static void
302 alloc_mem (void)
303 {
304 int i;
305 basic_block bb;
306 rtx_insn *insn;
307
308 /* Find the largest UID and create a mapping from UIDs to CUIDs. */
309 uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
310 i = 1;
311 FOR_EACH_BB_FN (bb, cfun)
312 FOR_BB_INSNS (bb, insn)
313 {
314 if (INSN_P (insn))
315 uid_cuid[INSN_UID (insn)] = i++;
316 else
317 uid_cuid[INSN_UID (insn)] = i;
318 }
319
320 /* Allocate the available expressions hash table. We don't want to
321 make the hash table too small, but unnecessarily making it too large
322 also doesn't help. The i/4 is a gcse.c relic, and seems like a
323 reasonable choice. */
324 expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
325
326 /* We allocate everything on obstacks because we often can roll back
327 the whole obstack to some point. Freeing obstacks is very fast. */
328 gcc_obstack_init (&expr_obstack);
329 gcc_obstack_init (&occr_obstack);
330 gcc_obstack_init (&unoccr_obstack);
331 gcc_obstack_init (&modifies_mem_obstack);
332
333 /* Working array used to track the last set for each register
334 in the current block. */
335 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
336
337 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
338 can roll it back in reset_opr_set_tables. */
339 modifies_mem_obstack_bottom =
340 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
341 sizeof (struct modifies_mem));
342
343 blocks_with_calls = BITMAP_ALLOC (NULL);
344 modify_mem_list_set = BITMAP_ALLOC (NULL);
345
346 modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
347 sizeof (vec_rtx_heap));
348 canon_modify_mem_list
349 = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
350 sizeof (vec_modify_pair_heap));
351 }
352
353 /* Free memory allocated by alloc_mem. */
354
355 static void
356 free_mem (void)
357 {
358 free (uid_cuid);
359
360 delete expr_table;
361 expr_table = NULL;
362
363 obstack_free (&expr_obstack, NULL);
364 obstack_free (&occr_obstack, NULL);
365 obstack_free (&unoccr_obstack, NULL);
366 obstack_free (&modifies_mem_obstack, NULL);
367
368 unsigned i;
369 bitmap_iterator bi;
370 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
371 {
372 modify_mem_list[i].release ();
373 canon_modify_mem_list[i].release ();
374 }
375
376 BITMAP_FREE (blocks_with_calls);
377 BITMAP_FREE (modify_mem_list_set);
378 free (reg_avail_info);
379 }
380 \f
381
382 /* Insert expression X in INSN in the hash TABLE.
383 If it is already present, record it as the last occurrence in INSN's
384 basic block. */
385
386 static void
387 insert_expr_in_table (rtx x, rtx_insn *insn)
388 {
389 int do_not_record_p;
390 hashval_t hash;
391 struct expr *cur_expr, **slot;
392 struct occr *avail_occr, *last_occr = NULL;
393
394 hash = hash_expr (x, &do_not_record_p);
395
396 /* Do not insert expression in the table if it contains volatile operands,
397 or if hash_expr determines the expression is something we don't want
398 to or can't handle. */
399 if (do_not_record_p)
400 return;
401
402 /* We anticipate that redundant expressions are rare, so for convenience
403 allocate a new hash table element here already and set its fields.
404 If we don't do this, we need a hack with a static struct expr. Anyway,
405 obstack_free is really fast and one more obstack_alloc doesn't hurt if
406 we're going to see more expressions later on. */
407 cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
408 sizeof (struct expr));
409 cur_expr->expr = x;
410 cur_expr->hash = hash;
411 cur_expr->avail_occr = NULL;
412
413 slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
414
415 if (! (*slot))
416 {
417 /* The expression isn't found, so insert it. */
418 *slot = cur_expr;
419
420 /* Anytime we add an entry to the table, record the index
421 of the new entry. The bitmap index starts counting
422 at zero. */
423 cur_expr->bitmap_index = expr_table->elements () - 1;
424 }
425 else
426 {
427 /* The expression is already in the table, so roll back the
428 obstack and use the existing table entry. */
429 obstack_free (&expr_obstack, cur_expr);
430 cur_expr = *slot;
431 }
432
433 /* Search for another occurrence in the same basic block. */
434 avail_occr = cur_expr->avail_occr;
435 while (avail_occr
436 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
437 {
438 /* If an occurrence isn't found, save a pointer to the end of
439 the list. */
440 last_occr = avail_occr;
441 avail_occr = avail_occr->next;
442 }
443
444 if (avail_occr)
445 /* Found another instance of the expression in the same basic block.
446 Prefer this occurrence to the currently recorded one. We want
447 the last one in the block and the block is scanned from start
448 to end. */
449 avail_occr->insn = insn;
450 else
451 {
452 /* First occurrence of this expression in this basic block. */
453 avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
454 sizeof (struct occr));
455
456 /* First occurrence of this expression in any block? */
457 if (cur_expr->avail_occr == NULL)
458 cur_expr->avail_occr = avail_occr;
459 else
460 last_occr->next = avail_occr;
461
462 avail_occr->insn = insn;
463 avail_occr->next = NULL;
464 avail_occr->deleted_p = 0;
465 }
466 }
467 \f
468
469 /* Lookup pattern PAT in the expression hash table.
470 The result is a pointer to the table entry, or NULL if not found. */
471
472 static struct expr *
473 lookup_expr_in_table (rtx pat)
474 {
475 int do_not_record_p;
476 struct expr **slot, *tmp_expr;
477 hashval_t hash = hash_expr (pat, &do_not_record_p);
478
479 if (do_not_record_p)
480 return NULL;
481
482 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
483 sizeof (struct expr));
484 tmp_expr->expr = pat;
485 tmp_expr->hash = hash;
486 tmp_expr->avail_occr = NULL;
487
488 slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
489 obstack_free (&expr_obstack, tmp_expr);
490
491 if (!slot)
492 return NULL;
493 else
494 return (*slot);
495 }
496 \f
497
498 /* Dump all expressions and occurrences that are currently in the
499 expression hash table to FILE. */
500
501 /* This helper is called via htab_traverse. */
502 int
503 dump_expr_hash_table_entry (expr **slot, FILE *file)
504 {
505 struct expr *exprs = *slot;
506 struct occr *occr;
507
508 fprintf (file, "expr: ");
509 print_rtl (file, exprs->expr);
510 fprintf (file,"\nhashcode: %u\n", exprs->hash);
511 fprintf (file,"list of occurrences:\n");
512 occr = exprs->avail_occr;
513 while (occr)
514 {
515 rtx_insn *insn = occr->insn;
516 print_rtl_single (file, insn);
517 fprintf (file, "\n");
518 occr = occr->next;
519 }
520 fprintf (file, "\n");
521 return 1;
522 }
523
524 static void
525 dump_hash_table (FILE *file)
526 {
527 fprintf (file, "\n\nexpression hash table\n");
528 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
529 (long) expr_table->size (),
530 (long) expr_table->elements (),
531 expr_table->collisions ());
532 if (expr_table->elements () > 0)
533 {
534 fprintf (file, "\n\ntable entries:\n");
535 expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
536 }
537 fprintf (file, "\n");
538 }
539 \f
540 /* Return true if register X is recorded as being set by an instruction
541 whose CUID is greater than the one given. */
542
543 static bool
544 reg_changed_after_insn_p (rtx x, int cuid)
545 {
546 unsigned int regno, end_regno;
547
548 regno = REGNO (x);
549 end_regno = END_REGNO (x);
550 do
551 if (reg_avail_info[regno] > cuid)
552 return true;
553 while (++regno < end_regno);
554 return false;
555 }
556
557 /* Return nonzero if the operands of expression X are unchanged
558 1) from the start of INSN's basic block up to but not including INSN
559 if AFTER_INSN is false, or
560 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
561
562 static bool
563 oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
564 {
565 int i, j;
566 enum rtx_code code;
567 const char *fmt;
568
569 if (x == 0)
570 return 1;
571
572 code = GET_CODE (x);
573 switch (code)
574 {
575 case REG:
576 /* We are called after register allocation. */
577 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
578 if (after_insn)
579 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
580 else
581 return !reg_changed_after_insn_p (x, 0);
582
583 case MEM:
584 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
585 return 0;
586 else
587 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
588
589 case PC:
590 case CC0: /*FIXME*/
591 case CONST:
592 CASE_CONST_ANY:
593 case SYMBOL_REF:
594 case LABEL_REF:
595 case ADDR_VEC:
596 case ADDR_DIFF_VEC:
597 return 1;
598
599 case PRE_DEC:
600 case PRE_INC:
601 case POST_DEC:
602 case POST_INC:
603 case PRE_MODIFY:
604 case POST_MODIFY:
605 if (after_insn)
606 return 0;
607 break;
608
609 default:
610 break;
611 }
612
613 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
614 {
615 if (fmt[i] == 'e')
616 {
617 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
618 return 0;
619 }
620 else if (fmt[i] == 'E')
621 for (j = 0; j < XVECLEN (x, i); j++)
622 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
623 return 0;
624 }
625
626 return 1;
627 }
628 \f
629
630 /* Used for communication between find_mem_conflicts and
631 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a
632 conflict between two memory references.
633 This is a bit of a hack to work around the limitations of note_stores. */
634 static int mems_conflict_p;
635
636 /* DEST is the output of an instruction. If it is a memory reference, and
637 possibly conflicts with the load found in DATA, then set mems_conflict_p
638 to a nonzero value. */
639
640 static void
641 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
642 void *data)
643 {
644 rtx mem_op = (rtx) data;
645
646 while (GET_CODE (dest) == SUBREG
647 || GET_CODE (dest) == ZERO_EXTRACT
648 || GET_CODE (dest) == STRICT_LOW_PART)
649 dest = XEXP (dest, 0);
650
651 /* If DEST is not a MEM, then it will not conflict with the load. Note
652 that function calls are assumed to clobber memory, but are handled
653 elsewhere. */
654 if (! MEM_P (dest))
655 return;
656
657 if (true_dependence (dest, GET_MODE (dest), mem_op))
658 mems_conflict_p = 1;
659 }
660 \f
661
662 /* Return nonzero if the expression in X (a memory reference) is killed
663 in the current basic block before (if AFTER_INSN is false) or after
664 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
665
666 This function assumes that the modifies_mem table is flushed when
667 the hash table construction or redundancy elimination phases start
668 processing a new basic block. */
669
670 static int
671 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
672 {
673 struct modifies_mem *list_entry = modifies_mem_list;
674
675 while (list_entry)
676 {
677 rtx_insn *setter = list_entry->insn;
678
679 /* Ignore entries in the list that do not apply. */
680 if ((after_insn
681 && INSN_CUID (setter) < uid_limit)
682 || (! after_insn
683 && INSN_CUID (setter) > uid_limit))
684 {
685 list_entry = list_entry->next;
686 continue;
687 }
688
689 /* If SETTER is a call everything is clobbered. Note that calls
690 to pure functions are never put on the list, so we need not
691 worry about them. */
692 if (CALL_P (setter))
693 return 1;
694
695 /* SETTER must be an insn of some kind that sets memory. Call
696 note_stores to examine each hunk of memory that is modified.
697 It will set mems_conflict_p to nonzero if there may be a
698 conflict between X and SETTER. */
699 mems_conflict_p = 0;
700 note_stores (PATTERN (setter), find_mem_conflicts, x);
701 if (mems_conflict_p)
702 return 1;
703
704 list_entry = list_entry->next;
705 }
706 return 0;
707 }
708 \f
709
710 /* Record register first/last/block set information for REGNO in INSN. */
711
712 static inline void
713 record_last_reg_set_info (rtx_insn *insn, rtx reg)
714 {
715 unsigned int regno, end_regno;
716
717 regno = REGNO (reg);
718 end_regno = END_REGNO (reg);
719 do
720 reg_avail_info[regno] = INSN_CUID (insn);
721 while (++regno < end_regno);
722 }
723
724 static inline void
725 record_last_reg_set_info_regno (rtx_insn *insn, int regno)
726 {
727 reg_avail_info[regno] = INSN_CUID (insn);
728 }
729
730
731 /* Record memory modification information for INSN. We do not actually care
732 about the memory location(s) that are set, or even how they are set (consider
733 a CALL_INSN). We merely need to record which insns modify memory. */
734
735 static void
736 record_last_mem_set_info (rtx_insn *insn)
737 {
738 struct modifies_mem *list_entry;
739
740 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
741 sizeof (struct modifies_mem));
742 list_entry->insn = insn;
743 list_entry->next = modifies_mem_list;
744 modifies_mem_list = list_entry;
745
746 record_last_mem_set_info_common (insn, modify_mem_list,
747 canon_modify_mem_list,
748 modify_mem_list_set,
749 blocks_with_calls);
750 }
751
752 /* Called from compute_hash_table via note_stores to handle one
753 SET or CLOBBER in an insn. DATA is really the instruction in which
754 the SET is taking place. */
755
756 static void
757 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
758 {
759 rtx_insn *last_set_insn = (rtx_insn *) data;
760
761 if (GET_CODE (dest) == SUBREG)
762 dest = SUBREG_REG (dest);
763
764 if (REG_P (dest))
765 record_last_reg_set_info (last_set_insn, dest);
766 else if (MEM_P (dest))
767 {
768 /* Ignore pushes, they don't clobber memory. They may still
769 clobber the stack pointer though. Some targets do argument
770 pushes without adding REG_INC notes. See e.g. PR25196,
771 where a pushsi2 on i386 doesn't have REG_INC notes. Note
772 such changes here too. */
773 if (! push_operand (dest, GET_MODE (dest)))
774 record_last_mem_set_info (last_set_insn);
775 else
776 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
777 }
778 }
779
780
781 /* Reset tables used to keep track of what's still available since the
782 start of the block. */
783
784 static void
785 reset_opr_set_tables (void)
786 {
787 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
788 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
789 modifies_mem_list = NULL;
790 }
791 \f
792
793 /* Record things set by INSN.
794 This data is used by oprs_unchanged_p. */
795
796 static void
797 record_opr_changes (rtx_insn *insn)
798 {
799 rtx note;
800
801 /* Find all stores and record them. */
802 note_stores (PATTERN (insn), record_last_set_info, insn);
803
804 /* Also record autoincremented REGs for this insn as changed. */
805 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
806 if (REG_NOTE_KIND (note) == REG_INC)
807 record_last_reg_set_info (insn, XEXP (note, 0));
808
809 /* Finally, if this is a call, record all call clobbers. */
810 if (CALL_P (insn))
811 {
812 unsigned int regno;
813 rtx link, x;
814 hard_reg_set_iterator hrsi;
815 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
816 record_last_reg_set_info_regno (insn, regno);
817
818 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
819 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
820 {
821 x = XEXP (XEXP (link, 0), 0);
822 if (REG_P (x))
823 {
824 gcc_assert (HARD_REGISTER_P (x));
825 record_last_reg_set_info (insn, x);
826 }
827 }
828
829 if (! RTL_CONST_OR_PURE_CALL_P (insn))
830 record_last_mem_set_info (insn);
831 }
832 }
833 \f
834
835 /* Scan the pattern of INSN and add an entry to the hash TABLE.
836 After reload we are interested in loads/stores only. */
837
838 static void
839 hash_scan_set (rtx_insn *insn)
840 {
841 rtx pat = PATTERN (insn);
842 rtx src = SET_SRC (pat);
843 rtx dest = SET_DEST (pat);
844
845 /* We are only interested in loads and stores. */
846 if (! MEM_P (src) && ! MEM_P (dest))
847 return;
848
849 /* Don't mess with jumps and nops. */
850 if (JUMP_P (insn) || set_noop_p (pat))
851 return;
852
853 if (REG_P (dest))
854 {
855 if (/* Don't CSE something if we can't do a reg/reg copy. */
856 can_copy_p (GET_MODE (dest))
857 /* Is SET_SRC something we want to gcse? */
858 && general_operand (src, GET_MODE (src))
859 #ifdef STACK_REGS
860 /* Never consider insns touching the register stack. It may
861 create situations that reg-stack cannot handle (e.g. a stack
862 register live across an abnormal edge). */
863 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
864 #endif
865 /* An expression is not available if its operands are
866 subsequently modified, including this insn. */
867 && oprs_unchanged_p (src, insn, true))
868 {
869 insert_expr_in_table (src, insn);
870 }
871 }
872 else if (REG_P (src))
873 {
874 /* Only record sets of pseudo-regs in the hash table. */
875 if (/* Don't CSE something if we can't do a reg/reg copy. */
876 can_copy_p (GET_MODE (src))
877 /* Is SET_DEST something we want to gcse? */
878 && general_operand (dest, GET_MODE (dest))
879 #ifdef STACK_REGS
880 /* As above for STACK_REGS. */
881 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
882 #endif
883 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
884 /* Check if the memory expression is killed after insn. */
885 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
886 && oprs_unchanged_p (XEXP (dest, 0), insn, true))
887 {
888 insert_expr_in_table (dest, insn);
889 }
890 }
891 }
892 \f
893
894 /* Create hash table of memory expressions available at end of basic
895 blocks. Basically you should think of this hash table as the
896 representation of AVAIL_OUT. This is the set of expressions that
897 is generated in a basic block and not killed before the end of the
898 same basic block. Notice that this is really a local computation. */
899
900 static void
901 compute_hash_table (void)
902 {
903 basic_block bb;
904
905 FOR_EACH_BB_FN (bb, cfun)
906 {
907 rtx_insn *insn;
908
909 /* First pass over the instructions records information used to
910 determine when registers and memory are last set.
911 Since we compute a "local" AVAIL_OUT, reset the tables that
912 help us keep track of what has been modified since the start
913 of the block. */
914 reset_opr_set_tables ();
915 FOR_BB_INSNS (bb, insn)
916 {
917 if (INSN_P (insn))
918 record_opr_changes (insn);
919 }
920
921 /* The next pass actually builds the hash table. */
922 FOR_BB_INSNS (bb, insn)
923 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
924 hash_scan_set (insn);
925 }
926 }
927 \f
928
929 /* Check if register REG is killed in any insn waiting to be inserted on
930 edge E. This function is required to check that our data flow analysis
931 is still valid prior to commit_edge_insertions. */
932
933 static bool
934 reg_killed_on_edge (rtx reg, edge e)
935 {
936 rtx_insn *insn;
937
938 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
939 if (INSN_P (insn) && reg_set_p (reg, insn))
940 return true;
941
942 return false;
943 }
944
945 /* Similar to above - check if register REG is used in any insn waiting
946 to be inserted on edge E.
947 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
948 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */
949
950 static bool
951 reg_used_on_edge (rtx reg, edge e)
952 {
953 rtx_insn *insn;
954
955 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
956 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
957 return true;
958
959 return false;
960 }
961 \f
962 /* Return the loaded/stored register of a load/store instruction. */
963
964 static rtx
965 get_avail_load_store_reg (rtx_insn *insn)
966 {
967 if (REG_P (SET_DEST (PATTERN (insn))))
968 /* A load. */
969 return SET_DEST (PATTERN (insn));
970 else
971 {
972 /* A store. */
973 gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
974 return SET_SRC (PATTERN (insn));
975 }
976 }
977
978 /* Return nonzero if the predecessors of BB are "well behaved". */
979
980 static bool
981 bb_has_well_behaved_predecessors (basic_block bb)
982 {
983 edge pred;
984 edge_iterator ei;
985
986 if (EDGE_COUNT (bb->preds) == 0)
987 return false;
988
989 FOR_EACH_EDGE (pred, ei, bb->preds)
990 {
991 if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
992 return false;
993
994 if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
995 return false;
996
997 if (tablejump_p (BB_END (pred->src), NULL, NULL))
998 return false;
999 }
1000 return true;
1001 }
1002
1003
1004 /* Search for the occurrences of expression in BB. */
1005
1006 static struct occr*
1007 get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
1008 {
1009 struct occr *occr = orig_occr;
1010
1011 for (; occr != NULL; occr = occr->next)
1012 if (BLOCK_FOR_INSN (occr->insn) == bb)
1013 return occr;
1014
1015 /* If we could not find an occurrence in BB, see if BB
1016 has a single predecessor with an occurrence that is
1017 transparent through BB. */
1018 if (single_pred_p (bb)
1019 && bitmap_bit_p (transp[bb->index], bitmap_index)
1020 && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
1021 {
1022 rtx avail_reg = get_avail_load_store_reg (occr->insn);
1023 if (!reg_set_between_p (avail_reg,
1024 PREV_INSN (BB_HEAD (bb)),
1025 NEXT_INSN (BB_END (bb)))
1026 && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
1027 return occr;
1028 }
1029
1030 return NULL;
1031 }
1032
1033
1034 /* This helper is called via htab_traverse. */
1035 int
1036 compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
1037 {
1038 struct expr *expr = *slot;
1039
1040 compute_transp (expr->expr, expr->bitmap_index, transp,
1041 blocks_with_calls, modify_mem_list_set,
1042 canon_modify_mem_list);
1043 return 1;
1044 }
1045
1046 /* This handles the case where several stores feed a partially redundant
1047 load. It checks if the redundancy elimination is possible and if it's
1048 worth it.
1049
1050 Redundancy elimination is possible if,
1051 1) None of the operands of an insn have been modified since the start
1052 of the current basic block.
1053 2) In any predecessor of the current basic block, the same expression
1054 is generated.
1055
1056 See the function body for the heuristics that determine if eliminating
1057 a redundancy is also worth doing, assuming it is possible. */
1058
1059 static void
1060 eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
1061 struct expr *expr)
1062 {
1063 edge pred;
1064 rtx_insn *avail_insn = NULL;
1065 rtx avail_reg;
1066 rtx dest, pat;
1067 struct occr *a_occr;
1068 struct unoccr *occr, *avail_occrs = NULL;
1069 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1070 int npred_ok = 0;
1071 gcov_type ok_count = 0; /* Redundant load execution count. */
1072 gcov_type critical_count = 0; /* Execution count of critical edges. */
1073 edge_iterator ei;
1074 bool critical_edge_split = false;
1075
1076 /* The execution count of the loads to be added to make the
1077 load fully redundant. */
1078 gcov_type not_ok_count = 0;
1079 basic_block pred_bb;
1080
1081 pat = PATTERN (insn);
1082 dest = SET_DEST (pat);
1083
1084 /* Check that the loaded register is not used, set, or killed from the
1085 beginning of the block. */
1086 if (reg_changed_after_insn_p (dest, 0)
1087 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
1088 return;
1089
1090 /* Check potential for replacing load with copy for predecessors. */
1091 FOR_EACH_EDGE (pred, ei, bb->preds)
1092 {
1093 rtx_insn *next_pred_bb_end;
1094
1095 avail_insn = NULL;
1096 avail_reg = NULL_RTX;
1097 pred_bb = pred->src;
1098 for (a_occr = get_bb_avail_insn (pred_bb,
1099 expr->avail_occr,
1100 expr->bitmap_index);
1101 a_occr;
1102 a_occr = get_bb_avail_insn (pred_bb,
1103 a_occr->next,
1104 expr->bitmap_index))
1105 {
1106 /* Check if the loaded register is not used. */
1107 avail_insn = a_occr->insn;
1108 avail_reg = get_avail_load_store_reg (avail_insn);
1109 gcc_assert (avail_reg);
1110
1111 /* Make sure we can generate a move from register avail_reg to
1112 dest. */
1113 rtx_insn *move = gen_move_insn (copy_rtx (dest),
1114 copy_rtx (avail_reg));
1115 extract_insn (move);
1116 if (! constrain_operands (1, get_preferred_alternatives (insn,
1117 pred_bb))
1118 || reg_killed_on_edge (avail_reg, pred)
1119 || reg_used_on_edge (dest, pred))
1120 {
1121 avail_insn = NULL;
1122 continue;
1123 }
1124 next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
1125 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1126 /* AVAIL_INSN remains non-null. */
1127 break;
1128 else
1129 avail_insn = NULL;
1130 }
1131
1132 if (EDGE_CRITICAL_P (pred))
1133 critical_count += pred->count;
1134
1135 if (avail_insn != NULL_RTX)
1136 {
1137 npred_ok++;
1138 ok_count += pred->count;
1139 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1140 copy_rtx (avail_reg)))))
1141 {
1142 /* Check if there is going to be a split. */
1143 if (EDGE_CRITICAL_P (pred))
1144 critical_edge_split = true;
1145 }
1146 else /* Its a dead move no need to generate. */
1147 continue;
1148 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1149 sizeof (struct unoccr));
1150 occr->insn = avail_insn;
1151 occr->pred = pred;
1152 occr->next = avail_occrs;
1153 avail_occrs = occr;
1154 if (! rollback_unoccr)
1155 rollback_unoccr = occr;
1156 }
1157 else
1158 {
1159 /* Adding a load on a critical edge will cause a split. */
1160 if (EDGE_CRITICAL_P (pred))
1161 critical_edge_split = true;
1162 not_ok_count += pred->count;
1163 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1164 sizeof (struct unoccr));
1165 unoccr->insn = NULL;
1166 unoccr->pred = pred;
1167 unoccr->next = unavail_occrs;
1168 unavail_occrs = unoccr;
1169 if (! rollback_unoccr)
1170 rollback_unoccr = unoccr;
1171 }
1172 }
1173
1174 if (/* No load can be replaced by copy. */
1175 npred_ok == 0
1176 /* Prevent exploding the code. */
1177 || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1178 /* If we don't have profile information we cannot tell if splitting
1179 a critical edge is profitable or not so don't do it. */
1180 || ((! profile_info || ! flag_branch_probabilities
1181 || targetm.cannot_modify_jumps_p ())
1182 && critical_edge_split))
1183 goto cleanup;
1184
1185 /* Check if it's worth applying the partial redundancy elimination. */
1186 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1187 goto cleanup;
1188 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1189 goto cleanup;
1190
1191 /* Generate moves to the loaded register from where
1192 the memory is available. */
1193 for (occr = avail_occrs; occr; occr = occr->next)
1194 {
1195 avail_insn = occr->insn;
1196 pred = occr->pred;
1197 /* Set avail_reg to be the register having the value of the
1198 memory. */
1199 avail_reg = get_avail_load_store_reg (avail_insn);
1200 gcc_assert (avail_reg);
1201
1202 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1203 copy_rtx (avail_reg)),
1204 pred);
1205 stats.moves_inserted++;
1206
1207 if (dump_file)
1208 fprintf (dump_file,
1209 "generating move from %d to %d on edge from %d to %d\n",
1210 REGNO (avail_reg),
1211 REGNO (dest),
1212 pred->src->index,
1213 pred->dest->index);
1214 }
1215
1216 /* Regenerate loads where the memory is unavailable. */
1217 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1218 {
1219 pred = unoccr->pred;
1220 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1221 stats.copies_inserted++;
1222
1223 if (dump_file)
1224 {
1225 fprintf (dump_file,
1226 "generating on edge from %d to %d a copy of load: ",
1227 pred->src->index,
1228 pred->dest->index);
1229 print_rtl (dump_file, PATTERN (insn));
1230 fprintf (dump_file, "\n");
1231 }
1232 }
1233
1234 /* Delete the insn if it is not available in this block and mark it
1235 for deletion if it is available. If insn is available it may help
1236 discover additional redundancies, so mark it for later deletion. */
1237 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
1238 a_occr && (a_occr->insn != insn);
1239 a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
1240 ;
1241
1242 if (!a_occr)
1243 {
1244 stats.insns_deleted++;
1245
1246 if (dump_file)
1247 {
1248 fprintf (dump_file, "deleting insn:\n");
1249 print_rtl_single (dump_file, insn);
1250 fprintf (dump_file, "\n");
1251 }
1252 delete_insn (insn);
1253 }
1254 else
1255 a_occr->deleted_p = 1;
1256
1257 cleanup:
1258 if (rollback_unoccr)
1259 obstack_free (&unoccr_obstack, rollback_unoccr);
1260 }
1261
1262 /* Performing the redundancy elimination as described before. */
1263
1264 static void
1265 eliminate_partially_redundant_loads (void)
1266 {
1267 rtx_insn *insn;
1268 basic_block bb;
1269
1270 /* Note we start at block 1. */
1271
1272 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1273 return;
1274
1275 FOR_BB_BETWEEN (bb,
1276 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1277 EXIT_BLOCK_PTR_FOR_FN (cfun),
1278 next_bb)
1279 {
1280 /* Don't try anything on basic blocks with strange predecessors. */
1281 if (! bb_has_well_behaved_predecessors (bb))
1282 continue;
1283
1284 /* Do not try anything on cold basic blocks. */
1285 if (optimize_bb_for_size_p (bb))
1286 continue;
1287
1288 /* Reset the table of things changed since the start of the current
1289 basic block. */
1290 reset_opr_set_tables ();
1291
1292 /* Look at all insns in the current basic block and see if there are
1293 any loads in it that we can record. */
1294 FOR_BB_INSNS (bb, insn)
1295 {
1296 /* Is it a load - of the form (set (reg) (mem))? */
1297 if (NONJUMP_INSN_P (insn)
1298 && GET_CODE (PATTERN (insn)) == SET
1299 && REG_P (SET_DEST (PATTERN (insn)))
1300 && MEM_P (SET_SRC (PATTERN (insn))))
1301 {
1302 rtx pat = PATTERN (insn);
1303 rtx src = SET_SRC (pat);
1304 struct expr *expr;
1305
1306 if (!MEM_VOLATILE_P (src)
1307 && GET_MODE (src) != BLKmode
1308 && general_operand (src, GET_MODE (src))
1309 /* Are the operands unchanged since the start of the
1310 block? */
1311 && oprs_unchanged_p (src, insn, false)
1312 && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1313 && !side_effects_p (src)
1314 /* Is the expression recorded? */
1315 && (expr = lookup_expr_in_table (src)) != NULL)
1316 {
1317 /* We now have a load (insn) and an available memory at
1318 its BB start (expr). Try to remove the loads if it is
1319 redundant. */
1320 eliminate_partially_redundant_load (bb, insn, expr);
1321 }
1322 }
1323
1324 /* Keep track of everything modified by this insn, so that we
1325 know what has been modified since the start of the current
1326 basic block. */
1327 if (INSN_P (insn))
1328 record_opr_changes (insn);
1329 }
1330 }
1331
1332 commit_edge_insertions ();
1333 }
1334
1335 /* Go over the expression hash table and delete insns that were
1336 marked for later deletion. */
1337
1338 /* This helper is called via htab_traverse. */
1339 int
1340 delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
1341 {
1342 struct expr *exprs = *slot;
1343 struct occr *occr;
1344
1345 for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
1346 {
1347 if (occr->deleted_p && dbg_cnt (gcse2_delete))
1348 {
1349 delete_insn (occr->insn);
1350 stats.insns_deleted++;
1351
1352 if (dump_file)
1353 {
1354 fprintf (dump_file, "deleting insn:\n");
1355 print_rtl_single (dump_file, occr->insn);
1356 fprintf (dump_file, "\n");
1357 }
1358 }
1359 }
1360
1361 return 1;
1362 }
1363
1364 static void
1365 delete_redundant_insns (void)
1366 {
1367 expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
1368 if (dump_file)
1369 fprintf (dump_file, "\n");
1370 }
1371
1372 /* Main entry point of the GCSE after reload - clean some redundant loads
1373 due to spilling. */
1374
1375 static void
1376 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1377 {
1378
1379 memset (&stats, 0, sizeof (stats));
1380
1381 /* Allocate memory for this pass.
1382 Also computes and initializes the insns' CUIDs. */
1383 alloc_mem ();
1384
1385 /* We need alias analysis. */
1386 init_alias_analysis ();
1387
1388 compute_hash_table ();
1389
1390 if (dump_file)
1391 dump_hash_table (dump_file);
1392
1393 if (expr_table->elements () > 0)
1394 {
1395 /* Knowing which MEMs are transparent through a block can signifiantly
1396 increase the number of redundant loads found. So compute transparency
1397 information for each memory expression in the hash table. */
1398 df_analyze ();
1399 /* This can not be part of the normal allocation routine because
1400 we have to know the number of elements in the hash table. */
1401 transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1402 expr_table->elements ());
1403 bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1404 expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1405 eliminate_partially_redundant_loads ();
1406 delete_redundant_insns ();
1407 sbitmap_vector_free (transp);
1408
1409 if (dump_file)
1410 {
1411 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1412 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1413 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted);
1414 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted);
1415 fprintf (dump_file, "\n\n");
1416 }
1417
1418 statistics_counter_event (cfun, "copies inserted",
1419 stats.copies_inserted);
1420 statistics_counter_event (cfun, "moves inserted",
1421 stats.moves_inserted);
1422 statistics_counter_event (cfun, "insns deleted",
1423 stats.insns_deleted);
1424 }
1425
1426 /* We are finished with alias. */
1427 end_alias_analysis ();
1428
1429 free_mem ();
1430 }
1431
1432 \f
1433
1434 static unsigned int
1435 rest_of_handle_gcse2 (void)
1436 {
1437 gcse_after_reload_main (get_insns ());
1438 rebuild_jump_labels (get_insns ());
1439 return 0;
1440 }
1441
1442 namespace {
1443
1444 const pass_data pass_data_gcse2 =
1445 {
1446 RTL_PASS, /* type */
1447 "gcse2", /* name */
1448 OPTGROUP_NONE, /* optinfo_flags */
1449 TV_GCSE_AFTER_RELOAD, /* tv_id */
1450 0, /* properties_required */
1451 0, /* properties_provided */
1452 0, /* properties_destroyed */
1453 0, /* todo_flags_start */
1454 0, /* todo_flags_finish */
1455 };
1456
1457 class pass_gcse2 : public rtl_opt_pass
1458 {
1459 public:
1460 pass_gcse2 (gcc::context *ctxt)
1461 : rtl_opt_pass (pass_data_gcse2, ctxt)
1462 {}
1463
1464 /* opt_pass methods: */
1465 virtual bool gate (function *fun)
1466 {
1467 return (optimize > 0 && flag_gcse_after_reload
1468 && optimize_function_for_speed_p (fun));
1469 }
1470
1471 virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
1472
1473 }; // class pass_gcse2
1474
1475 } // anon namespace
1476
1477 rtl_opt_pass *
1478 make_pass_gcse2 (gcc::context *ctxt)
1479 {
1480 return new pass_gcse2 (ctxt);
1481 }