]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/postreload-gcse.c
[Ada] Define the -fdump-scos option in lang.opt
[thirdparty/gcc.git] / gcc / postreload-gcse.c
CommitLineData
78d140c9 1/* Post reload partially redundant load elimination
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
78d140c9 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
78d140c9 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
78d140c9 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
9ef16211 25#include "rtl.h"
7c29e30e 26#include "tree.h"
27#include "predict.h"
9ef16211 28#include "df.h"
ad7b10a2 29#include "memmodel.h"
7c29e30e 30#include "tm_p.h"
7c29e30e 31#include "insn-config.h"
7c29e30e 32#include "emit-rtl.h"
33#include "recog.h"
78d140c9 34
94ea8568 35#include "cfgrtl.h"
886c1262 36#include "profile.h"
78d140c9 37#include "expr.h"
78d140c9 38#include "params.h"
77fce4cd 39#include "tree-pass.h"
3072d30e 40#include "dbgcnt.h"
d2ac64b1 41#include "gcse-common.h"
78d140c9 42
43/* The following code implements gcse after reload, the purpose of this
44 pass is to cleanup redundant loads generated by reload and other
45 optimizations that come after gcse. It searches for simple inter-block
46 redundancies and tries to eliminate them by adding moves and loads
47 in cold places.
48
49 Perform partially redundant load elimination, try to eliminate redundant
50 loads created by the reload pass. We try to look for full or partial
51 redundant loads fed by one or more loads/stores in predecessor BBs,
52 and try adding loads to make them fully redundant. We also check if
53 it's worth adding loads to be able to delete the redundant load.
54
55 Algorithm:
56 1. Build available expressions hash table:
57 For each load/store instruction, if the loaded/stored memory didn't
58 change until the end of the basic block add this memory expression to
59 the hash table.
60 2. Perform Redundancy elimination:
61 For each load instruction do the following:
62 perform partial redundancy elimination, check if it's worth adding
63 loads to make the load fully redundant. If so add loads and
64 register copies and delete the load.
65 3. Delete instructions made redundant in step 2.
66
67 Future enhancement:
68 If the loaded register is used/defined between load and some store,
69 look for some other free register between load and all its stores,
70 and replace the load with a copy from this register to the loaded
71 register.
72*/
73\f
74
75/* Keep statistics of this pass. */
76static struct
77{
78 int moves_inserted;
79 int copies_inserted;
80 int insns_deleted;
81} stats;
82
83/* We need to keep a hash table of expressions. The table entries are of
84 type 'struct expr', and for each expression there is a single linked
91275768 85 list of occurrences. */
78d140c9 86
78d140c9 87/* Expression elements in the hash table. */
88struct expr
89{
90 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
91 rtx expr;
92
93 /* The same hash for this entry. */
94 hashval_t hash;
95
d2ac64b1 96 /* Index in the transparent bitmaps. */
97 unsigned int bitmap_index;
98
78d140c9 99 /* List of available occurrence in basic blocks in the function. */
100 struct occr *avail_occr;
101};
102
d9dd21a8 103/* Hashtable helpers. */
104
770ff93b 105struct expr_hasher : nofree_ptr_hash <expr>
d9dd21a8 106{
9969c043 107 static inline hashval_t hash (const expr *);
108 static inline bool equal (const expr *, const expr *);
d9dd21a8 109};
110
111
112/* Hash expression X.
113 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
114 or if the expression contains something we don't want to insert in the
115 table. */
116
117static hashval_t
118hash_expr (rtx x, int *do_not_record_p)
119{
120 *do_not_record_p = 0;
121 return hash_rtx (x, GET_MODE (x), do_not_record_p,
122 NULL, /*have_reg_qty=*/false);
123}
124
125/* Callback for hashtab.
126 Return the hash value for expression EXP. We don't actually hash
127 here, we just return the cached hash value. */
128
129inline hashval_t
9969c043 130expr_hasher::hash (const expr *exp)
d9dd21a8 131{
132 return exp->hash;
133}
134
135/* Callback for hashtab.
136 Return nonzero if exp1 is equivalent to exp2. */
137
138inline bool
9969c043 139expr_hasher::equal (const expr *exp1, const expr *exp2)
d9dd21a8 140{
141 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
142
143 gcc_assert (!equiv_p || exp1->hash == exp2->hash);
144 return equiv_p;
145}
146
147/* The table itself. */
c1f445d2 148static hash_table<expr_hasher> *expr_table;
d9dd21a8 149\f
150
78d140c9 151static struct obstack expr_obstack;
152
153/* Occurrence of an expression.
91275768 154 There is at most one occurrence per basic block. If a pattern appears
78d140c9 155 more than once, the last appearance is used. */
156
157struct occr
158{
159 /* Next occurrence of this expression. */
160 struct occr *next;
161 /* The insn that computes the expression. */
cdace3a0 162 rtx_insn *insn;
78d140c9 163 /* Nonzero if this [anticipatable] occurrence has been deleted. */
164 char deleted_p;
165};
166
167static struct obstack occr_obstack;
168
169/* The following structure holds the information about the occurrences of
170 the redundant instructions. */
171struct unoccr
172{
173 struct unoccr *next;
174 edge pred;
cdace3a0 175 rtx_insn *insn;
78d140c9 176};
177
178static struct obstack unoccr_obstack;
179
180/* Array where each element is the CUID if the insn that last set the hard
181 register with the number of the element, since the start of the current
d447762f 182 basic block.
183
184 This array is used during the building of the hash table (step 1) to
185 determine if a reg is killed before the end of a basic block.
186
187 It is also used when eliminating partial redundancies (step 2) to see
188 if a reg was modified since the start of a basic block. */
78d140c9 189static int *reg_avail_info;
190
191/* A list of insns that may modify memory within the current basic block. */
192struct modifies_mem
193{
cdace3a0 194 rtx_insn *insn;
78d140c9 195 struct modifies_mem *next;
196};
197static struct modifies_mem *modifies_mem_list;
198
199/* The modifies_mem structs also go on an obstack, only this obstack is
200 freed each time after completing the analysis or transformations on
201 a basic block. So we allocate a dummy modifies_mem_obstack_bottom
202 object on the obstack to keep track of the bottom of the obstack. */
203static struct obstack modifies_mem_obstack;
204static struct modifies_mem *modifies_mem_obstack_bottom;
205
206/* Mapping of insn UIDs to CUIDs.
207 CUIDs are like UIDs except they increase monotonically in each basic
208 block, have no gaps, and only apply to real insns. */
209static int *uid_cuid;
210#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
d2ac64b1 211
212/* Bitmap of blocks which have memory stores. */
213static bitmap modify_mem_list_set;
214
215/* Bitmap of blocks which have calls. */
216static bitmap blocks_with_calls;
217
218/* Vector indexed by block # with a list of all the insns that
219 modify memory within the block. */
220static vec<rtx_insn *> *modify_mem_list;
221
222/* Vector indexed by block # with a canonicalized list of insns
223 that modify memory in the block. */
224static vec<modify_pair> *canon_modify_mem_list;
225
226/* Vector of simple bitmaps indexed by block number. Each component sbitmap
227 indicates which expressions are transparent through the block. */
228static sbitmap *transp;
78d140c9 229\f
230
231/* Helpers for memory allocation/freeing. */
232static void alloc_mem (void);
233static void free_mem (void);
234
235/* Support for hash table construction and transformations. */
cdace3a0 236static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
237static void record_last_reg_set_info (rtx_insn *, rtx);
238static void record_last_reg_set_info_regno (rtx_insn *, int);
239static void record_last_mem_set_info (rtx_insn *);
81a410b1 240static void record_last_set_info (rtx, const_rtx, void *);
cdace3a0 241static void record_opr_changes (rtx_insn *);
78d140c9 242
81a410b1 243static void find_mem_conflicts (rtx, const_rtx, void *);
78d140c9 244static int load_killed_in_block_p (int, rtx, bool);
245static void reset_opr_set_tables (void);
246
247/* Hash table support. */
248static hashval_t hash_expr (rtx, int *);
cdace3a0 249static void insert_expr_in_table (rtx, rtx_insn *);
78d140c9 250static struct expr *lookup_expr_in_table (rtx);
78d140c9 251static void dump_hash_table (FILE *);
252
253/* Helpers for eliminate_partially_redundant_load. */
254static bool reg_killed_on_edge (rtx, edge);
255static bool reg_used_on_edge (rtx, edge);
256
cdace3a0 257static rtx get_avail_load_store_reg (rtx_insn *);
78d140c9 258
259static bool bb_has_well_behaved_predecessors (basic_block);
d2ac64b1 260static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
cdace3a0 261static void hash_scan_set (rtx_insn *);
78d140c9 262static void compute_hash_table (void);
263
264/* The work horses of this pass. */
265static void eliminate_partially_redundant_load (basic_block,
cdace3a0 266 rtx_insn *,
78d140c9 267 struct expr *);
268static void eliminate_partially_redundant_loads (void);
269\f
270
271/* Allocate memory for the CUID mapping array and register/memory
272 tracking tables. */
273
274static void
275alloc_mem (void)
276{
277 int i;
278 basic_block bb;
cdace3a0 279 rtx_insn *insn;
78d140c9 280
281 /* Find the largest UID and create a mapping from UIDs to CUIDs. */
4c36ffe6 282 uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
1539153f 283 i = 1;
fc00614f 284 FOR_EACH_BB_FN (bb, cfun)
78d140c9 285 FOR_BB_INSNS (bb, insn)
286 {
287 if (INSN_P (insn))
288 uid_cuid[INSN_UID (insn)] = i++;
289 else
290 uid_cuid[INSN_UID (insn)] = i;
291 }
292
293 /* Allocate the available expressions hash table. We don't want to
294 make the hash table too small, but unnecessarily making it too large
295 also doesn't help. The i/4 is a gcse.c relic, and seems like a
296 reasonable choice. */
c1f445d2 297 expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
78d140c9 298
299 /* We allocate everything on obstacks because we often can roll back
300 the whole obstack to some point. Freeing obstacks is very fast. */
301 gcc_obstack_init (&expr_obstack);
302 gcc_obstack_init (&occr_obstack);
303 gcc_obstack_init (&unoccr_obstack);
304 gcc_obstack_init (&modifies_mem_obstack);
305
306 /* Working array used to track the last set for each register
307 in the current block. */
308 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
309
310 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
311 can roll it back in reset_opr_set_tables. */
312 modifies_mem_obstack_bottom =
313 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
314 sizeof (struct modifies_mem));
d2ac64b1 315
316 blocks_with_calls = BITMAP_ALLOC (NULL);
317 modify_mem_list_set = BITMAP_ALLOC (NULL);
318
319 modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
320 sizeof (vec_rtx_heap));
321 canon_modify_mem_list
322 = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
323 sizeof (vec_modify_pair_heap));
78d140c9 324}
325
326/* Free memory allocated by alloc_mem. */
327
328static void
329free_mem (void)
330{
331 free (uid_cuid);
332
c1f445d2 333 delete expr_table;
334 expr_table = NULL;
78d140c9 335
336 obstack_free (&expr_obstack, NULL);
337 obstack_free (&occr_obstack, NULL);
338 obstack_free (&unoccr_obstack, NULL);
339 obstack_free (&modifies_mem_obstack, NULL);
340
d2ac64b1 341 unsigned i;
342 bitmap_iterator bi;
343 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
344 {
345 modify_mem_list[i].release ();
346 canon_modify_mem_list[i].release ();
347 }
348
349 BITMAP_FREE (blocks_with_calls);
350 BITMAP_FREE (modify_mem_list_set);
78d140c9 351 free (reg_avail_info);
c45ca67d 352 free (modify_mem_list);
353 free (canon_modify_mem_list);
78d140c9 354}
355\f
356
78d140c9 357/* Insert expression X in INSN in the hash TABLE.
358 If it is already present, record it as the last occurrence in INSN's
359 basic block. */
360
361static void
cdace3a0 362insert_expr_in_table (rtx x, rtx_insn *insn)
78d140c9 363{
364 int do_not_record_p;
365 hashval_t hash;
366 struct expr *cur_expr, **slot;
367 struct occr *avail_occr, *last_occr = NULL;
368
369 hash = hash_expr (x, &do_not_record_p);
370
371 /* Do not insert expression in the table if it contains volatile operands,
372 or if hash_expr determines the expression is something we don't want
373 to or can't handle. */
374 if (do_not_record_p)
375 return;
376
377 /* We anticipate that redundant expressions are rare, so for convenience
378 allocate a new hash table element here already and set its fields.
379 If we don't do this, we need a hack with a static struct expr. Anyway,
380 obstack_free is really fast and one more obstack_alloc doesn't hurt if
381 we're going to see more expressions later on. */
382 cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
383 sizeof (struct expr));
384 cur_expr->expr = x;
385 cur_expr->hash = hash;
386 cur_expr->avail_occr = NULL;
387
c1f445d2 388 slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
48e1416a 389
78d140c9 390 if (! (*slot))
d2ac64b1 391 {
392 /* The expression isn't found, so insert it. */
393 *slot = cur_expr;
394
395 /* Anytime we add an entry to the table, record the index
396 of the new entry. The bitmap index starts counting
397 at zero. */
398 cur_expr->bitmap_index = expr_table->elements () - 1;
399 }
78d140c9 400 else
401 {
402 /* The expression is already in the table, so roll back the
403 obstack and use the existing table entry. */
404 obstack_free (&expr_obstack, cur_expr);
405 cur_expr = *slot;
406 }
407
408 /* Search for another occurrence in the same basic block. */
409 avail_occr = cur_expr->avail_occr;
90bd219d 410 while (avail_occr
411 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
78d140c9 412 {
413 /* If an occurrence isn't found, save a pointer to the end of
414 the list. */
415 last_occr = avail_occr;
416 avail_occr = avail_occr->next;
417 }
418
419 if (avail_occr)
420 /* Found another instance of the expression in the same basic block.
421 Prefer this occurrence to the currently recorded one. We want
422 the last one in the block and the block is scanned from start
423 to end. */
424 avail_occr->insn = insn;
425 else
426 {
427 /* First occurrence of this expression in this basic block. */
428 avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
429 sizeof (struct occr));
430
431 /* First occurrence of this expression in any block? */
432 if (cur_expr->avail_occr == NULL)
433 cur_expr->avail_occr = avail_occr;
434 else
435 last_occr->next = avail_occr;
436
437 avail_occr->insn = insn;
438 avail_occr->next = NULL;
439 avail_occr->deleted_p = 0;
440 }
441}
442\f
443
444/* Lookup pattern PAT in the expression hash table.
445 The result is a pointer to the table entry, or NULL if not found. */
446
447static struct expr *
448lookup_expr_in_table (rtx pat)
449{
450 int do_not_record_p;
451 struct expr **slot, *tmp_expr;
452 hashval_t hash = hash_expr (pat, &do_not_record_p);
453
454 if (do_not_record_p)
455 return NULL;
456
457 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
458 sizeof (struct expr));
459 tmp_expr->expr = pat;
460 tmp_expr->hash = hash;
461 tmp_expr->avail_occr = NULL;
462
c1f445d2 463 slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
78d140c9 464 obstack_free (&expr_obstack, tmp_expr);
465
466 if (!slot)
467 return NULL;
468 else
469 return (*slot);
470}
471\f
472
91275768 473/* Dump all expressions and occurrences that are currently in the
78d140c9 474 expression hash table to FILE. */
475
476/* This helper is called via htab_traverse. */
d9dd21a8 477int
478dump_expr_hash_table_entry (expr **slot, FILE *file)
78d140c9 479{
d9dd21a8 480 struct expr *exprs = *slot;
78d140c9 481 struct occr *occr;
482
483 fprintf (file, "expr: ");
d9dd21a8 484 print_rtl (file, exprs->expr);
485 fprintf (file,"\nhashcode: %u\n", exprs->hash);
4133d091 486 fprintf (file,"list of occurrences:\n");
d9dd21a8 487 occr = exprs->avail_occr;
78d140c9 488 while (occr)
489 {
cdace3a0 490 rtx_insn *insn = occr->insn;
78d140c9 491 print_rtl_single (file, insn);
492 fprintf (file, "\n");
493 occr = occr->next;
494 }
495 fprintf (file, "\n");
496 return 1;
497}
498
499static void
500dump_hash_table (FILE *file)
501{
502 fprintf (file, "\n\nexpression hash table\n");
503 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
c1f445d2 504 (long) expr_table->size (),
505 (long) expr_table->elements (),
506 expr_table->collisions ());
9a78b979 507 if (!expr_table->is_empty ())
78d140c9 508 {
509 fprintf (file, "\n\ntable entries:\n");
c1f445d2 510 expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
78d140c9 511 }
512 fprintf (file, "\n");
513}
514\f
20128b13 515/* Return true if register X is recorded as being set by an instruction
516 whose CUID is greater than the one given. */
517
518static bool
519reg_changed_after_insn_p (rtx x, int cuid)
520{
521 unsigned int regno, end_regno;
522
523 regno = REGNO (x);
788bed51 524 end_regno = END_REGNO (x);
20128b13 525 do
526 if (reg_avail_info[regno] > cuid)
527 return true;
528 while (++regno < end_regno);
529 return false;
530}
78d140c9 531
d447762f 532/* Return nonzero if the operands of expression X are unchanged
533 1) from the start of INSN's basic block up to but not including INSN
534 if AFTER_INSN is false, or
535 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
78d140c9 536
537static bool
cdace3a0 538oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
78d140c9 539{
540 int i, j;
541 enum rtx_code code;
542 const char *fmt;
543
544 if (x == 0)
545 return 1;
546
547 code = GET_CODE (x);
548 switch (code)
549 {
550 case REG:
78d140c9 551 /* We are called after register allocation. */
876760f6 552 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
78d140c9 553 if (after_insn)
20128b13 554 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
78d140c9 555 else
20128b13 556 return !reg_changed_after_insn_p (x, 0);
78d140c9 557
558 case MEM:
559 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
560 return 0;
561 else
562 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
563
564 case PC:
565 case CC0: /*FIXME*/
566 case CONST:
0349edce 567 CASE_CONST_ANY:
78d140c9 568 case SYMBOL_REF:
569 case LABEL_REF:
570 case ADDR_VEC:
571 case ADDR_DIFF_VEC:
572 return 1;
573
574 case PRE_DEC:
575 case PRE_INC:
576 case POST_DEC:
577 case POST_INC:
578 case PRE_MODIFY:
579 case POST_MODIFY:
580 if (after_insn)
581 return 0;
582 break;
583
584 default:
585 break;
586 }
587
588 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
589 {
590 if (fmt[i] == 'e')
591 {
592 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
593 return 0;
594 }
595 else if (fmt[i] == 'E')
596 for (j = 0; j < XVECLEN (x, i); j++)
597 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
598 return 0;
599 }
600
601 return 1;
602}
603\f
604
605/* Used for communication between find_mem_conflicts and
606 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a
607 conflict between two memory references.
608 This is a bit of a hack to work around the limitations of note_stores. */
609static int mems_conflict_p;
610
611/* DEST is the output of an instruction. If it is a memory reference, and
612 possibly conflicts with the load found in DATA, then set mems_conflict_p
613 to a nonzero value. */
614
615static void
81a410b1 616find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
78d140c9 617 void *data)
618{
619 rtx mem_op = (rtx) data;
620
621 while (GET_CODE (dest) == SUBREG
622 || GET_CODE (dest) == ZERO_EXTRACT
78d140c9 623 || GET_CODE (dest) == STRICT_LOW_PART)
624 dest = XEXP (dest, 0);
625
626 /* If DEST is not a MEM, then it will not conflict with the load. Note
627 that function calls are assumed to clobber memory, but are handled
628 elsewhere. */
629 if (! MEM_P (dest))
630 return;
631
376a287d 632 if (true_dependence (dest, GET_MODE (dest), mem_op))
78d140c9 633 mems_conflict_p = 1;
634}
635\f
636
637/* Return nonzero if the expression in X (a memory reference) is killed
d447762f 638 in the current basic block before (if AFTER_INSN is false) or after
639 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
640
641 This function assumes that the modifies_mem table is flushed when
642 the hash table construction or redundancy elimination phases start
643 processing a new basic block. */
78d140c9 644
645static int
646load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
647{
648 struct modifies_mem *list_entry = modifies_mem_list;
649
650 while (list_entry)
651 {
cdace3a0 652 rtx_insn *setter = list_entry->insn;
78d140c9 653
654 /* Ignore entries in the list that do not apply. */
655 if ((after_insn
656 && INSN_CUID (setter) < uid_limit)
657 || (! after_insn
658 && INSN_CUID (setter) > uid_limit))
659 {
660 list_entry = list_entry->next;
661 continue;
662 }
663
664 /* If SETTER is a call everything is clobbered. Note that calls
665 to pure functions are never put on the list, so we need not
666 worry about them. */
667 if (CALL_P (setter))
668 return 1;
669
670 /* SETTER must be an insn of some kind that sets memory. Call
671 note_stores to examine each hunk of memory that is modified.
672 It will set mems_conflict_p to nonzero if there may be a
673 conflict between X and SETTER. */
674 mems_conflict_p = 0;
675 note_stores (PATTERN (setter), find_mem_conflicts, x);
676 if (mems_conflict_p)
677 return 1;
678
679 list_entry = list_entry->next;
680 }
681 return 0;
682}
683\f
684
685/* Record register first/last/block set information for REGNO in INSN. */
686
d447762f 687static inline void
cdace3a0 688record_last_reg_set_info (rtx_insn *insn, rtx reg)
ce53880e 689{
690 unsigned int regno, end_regno;
691
692 regno = REGNO (reg);
788bed51 693 end_regno = END_REGNO (reg);
ce53880e 694 do
695 reg_avail_info[regno] = INSN_CUID (insn);
696 while (++regno < end_regno);
697}
698
699static inline void
cdace3a0 700record_last_reg_set_info_regno (rtx_insn *insn, int regno)
78d140c9 701{
702 reg_avail_info[regno] = INSN_CUID (insn);
703}
704
705
706/* Record memory modification information for INSN. We do not actually care
707 about the memory location(s) that are set, or even how they are set (consider
708 a CALL_INSN). We merely need to record which insns modify memory. */
709
710static void
cdace3a0 711record_last_mem_set_info (rtx_insn *insn)
78d140c9 712{
713 struct modifies_mem *list_entry;
714
715 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
716 sizeof (struct modifies_mem));
717 list_entry->insn = insn;
718 list_entry->next = modifies_mem_list;
719 modifies_mem_list = list_entry;
d2ac64b1 720
721 record_last_mem_set_info_common (insn, modify_mem_list,
722 canon_modify_mem_list,
723 modify_mem_list_set,
724 blocks_with_calls);
78d140c9 725}
726
727/* Called from compute_hash_table via note_stores to handle one
728 SET or CLOBBER in an insn. DATA is really the instruction in which
729 the SET is taking place. */
730
731static void
81a410b1 732record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
78d140c9 733{
cdace3a0 734 rtx_insn *last_set_insn = (rtx_insn *) data;
78d140c9 735
736 if (GET_CODE (dest) == SUBREG)
737 dest = SUBREG_REG (dest);
738
739 if (REG_P (dest))
ce53880e 740 record_last_reg_set_info (last_set_insn, dest);
5630a23e 741 else if (MEM_P (dest))
742 {
743 /* Ignore pushes, they don't clobber memory. They may still
744 clobber the stack pointer though. Some targets do argument
745 pushes without adding REG_INC notes. See e.g. PR25196,
746 where a pushsi2 on i386 doesn't have REG_INC notes. Note
747 such changes here too. */
748 if (! push_operand (dest, GET_MODE (dest)))
749 record_last_mem_set_info (last_set_insn);
750 else
ce53880e 751 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
5630a23e 752 }
78d140c9 753}
d447762f 754
78d140c9 755
756/* Reset tables used to keep track of what's still available since the
757 start of the block. */
758
759static void
760reset_opr_set_tables (void)
761{
762 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
763 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
764 modifies_mem_list = NULL;
765}
d447762f 766\f
78d140c9 767
768/* Record things set by INSN.
769 This data is used by oprs_unchanged_p. */
770
771static void
cdace3a0 772record_opr_changes (rtx_insn *insn)
78d140c9 773{
d447762f 774 rtx note;
78d140c9 775
d447762f 776 /* Find all stores and record them. */
777 note_stores (PATTERN (insn), record_last_set_info, insn);
78d140c9 778
d447762f 779 /* Also record autoincremented REGs for this insn as changed. */
780 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
781 if (REG_NOTE_KIND (note) == REG_INC)
ce53880e 782 record_last_reg_set_info (insn, XEXP (note, 0));
78d140c9 783
d447762f 784 /* Finally, if this is a call, record all call clobbers. */
785 if (CALL_P (insn))
786 {
ce53880e 787 unsigned int regno;
20128b13 788 rtx link, x;
24ec6636 789 hard_reg_set_iterator hrsi;
790 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
791 record_last_reg_set_info_regno (insn, regno);
78d140c9 792
20128b13 793 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
70bdfe23 794 {
795 gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
796 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
797 {
798 x = XEXP (XEXP (link, 0), 0);
799 if (REG_P (x))
800 {
801 gcc_assert (HARD_REGISTER_P (x));
802 record_last_reg_set_info (insn, x);
803 }
804 }
805 }
20128b13 806
9c2a0c05 807 if (! RTL_CONST_OR_PURE_CALL_P (insn))
d447762f 808 record_last_mem_set_info (insn);
809 }
78d140c9 810}
811\f
812
813/* Scan the pattern of INSN and add an entry to the hash TABLE.
814 After reload we are interested in loads/stores only. */
815
816static void
cdace3a0 817hash_scan_set (rtx_insn *insn)
78d140c9 818{
819 rtx pat = PATTERN (insn);
820 rtx src = SET_SRC (pat);
821 rtx dest = SET_DEST (pat);
822
823 /* We are only interested in loads and stores. */
824 if (! MEM_P (src) && ! MEM_P (dest))
825 return;
826
827 /* Don't mess with jumps and nops. */
828 if (JUMP_P (insn) || set_noop_p (pat))
829 return;
830
78d140c9 831 if (REG_P (dest))
832 {
d447762f 833 if (/* Don't CSE something if we can't do a reg/reg copy. */
78d140c9 834 can_copy_p (GET_MODE (dest))
835 /* Is SET_SRC something we want to gcse? */
836 && general_operand (src, GET_MODE (src))
868e8f12 837#ifdef STACK_REGS
838 /* Never consider insns touching the register stack. It may
839 create situations that reg-stack cannot handle (e.g. a stack
840 register live across an abnormal edge). */
841 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
842#endif
78d140c9 843 /* An expression is not available if its operands are
844 subsequently modified, including this insn. */
845 && oprs_unchanged_p (src, insn, true))
846 {
847 insert_expr_in_table (src, insn);
848 }
849 }
850 else if (REG_P (src))
851 {
852 /* Only record sets of pseudo-regs in the hash table. */
d447762f 853 if (/* Don't CSE something if we can't do a reg/reg copy. */
78d140c9 854 can_copy_p (GET_MODE (src))
855 /* Is SET_DEST something we want to gcse? */
856 && general_operand (dest, GET_MODE (dest))
868e8f12 857#ifdef STACK_REGS
858 /* As above for STACK_REGS. */
859 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
860#endif
78d140c9 861 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
862 /* Check if the memory expression is killed after insn. */
863 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
864 && oprs_unchanged_p (XEXP (dest, 0), insn, true))
865 {
866 insert_expr_in_table (dest, insn);
867 }
868 }
869}
870\f
d447762f 871
78d140c9 872/* Create hash table of memory expressions available at end of basic
d447762f 873 blocks. Basically you should think of this hash table as the
874 representation of AVAIL_OUT. This is the set of expressions that
875 is generated in a basic block and not killed before the end of the
876 same basic block. Notice that this is really a local computation. */
78d140c9 877
878static void
879compute_hash_table (void)
880{
881 basic_block bb;
882
fc00614f 883 FOR_EACH_BB_FN (bb, cfun)
78d140c9 884 {
cdace3a0 885 rtx_insn *insn;
78d140c9 886
887 /* First pass over the instructions records information used to
d447762f 888 determine when registers and memory are last set.
889 Since we compute a "local" AVAIL_OUT, reset the tables that
890 help us keep track of what has been modified since the start
891 of the block. */
892 reset_opr_set_tables ();
78d140c9 893 FOR_BB_INSNS (bb, insn)
894 {
d447762f 895 if (INSN_P (insn))
896 record_opr_changes (insn);
897 }
78d140c9 898
d447762f 899 /* The next pass actually builds the hash table. */
78d140c9 900 FOR_BB_INSNS (bb, insn)
901 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
902 hash_scan_set (insn);
903 }
904}
905\f
906
907/* Check if register REG is killed in any insn waiting to be inserted on
908 edge E. This function is required to check that our data flow analysis
909 is still valid prior to commit_edge_insertions. */
910
911static bool
912reg_killed_on_edge (rtx reg, edge e)
913{
ae5e6486 914 rtx_insn *insn;
78d140c9 915
916 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
917 if (INSN_P (insn) && reg_set_p (reg, insn))
918 return true;
919
920 return false;
921}
922
923/* Similar to above - check if register REG is used in any insn waiting
924 to be inserted on edge E.
925 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
926 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */
927
928static bool
929reg_used_on_edge (rtx reg, edge e)
930{
ae5e6486 931 rtx_insn *insn;
78d140c9 932
933 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
934 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
935 return true;
936
937 return false;
938}
939\f
78d140c9 940/* Return the loaded/stored register of a load/store instruction. */
941
942static rtx
cdace3a0 943get_avail_load_store_reg (rtx_insn *insn)
78d140c9 944{
876760f6 945 if (REG_P (SET_DEST (PATTERN (insn))))
946 /* A load. */
9af5ce0c 947 return SET_DEST (PATTERN (insn));
876760f6 948 else
949 {
950 /* A store. */
951 gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
952 return SET_SRC (PATTERN (insn));
953 }
78d140c9 954}
955
956/* Return nonzero if the predecessors of BB are "well behaved". */
957
958static bool
959bb_has_well_behaved_predecessors (basic_block bb)
960{
961 edge pred;
cd665a06 962 edge_iterator ei;
78d140c9 963
4c43a998 964 if (EDGE_COUNT (bb->preds) == 0)
78d140c9 965 return false;
966
cd665a06 967 FOR_EACH_EDGE (pred, ei, bb->preds)
78d140c9 968 {
98023bfd 969 /* commit_one_edge_insertion refuses to insert on abnormal edges even if
970 the source has only one successor so EDGE_CRITICAL_P is too weak. */
971 if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (pred->dest))
78d140c9 972 return false;
973
4c43a998 974 if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
975 return false;
976
91f71fa3 977 if (tablejump_p (BB_END (pred->src), NULL, NULL))
78d140c9 978 return false;
979 }
980 return true;
981}
982
983
984/* Search for the occurrences of expression in BB. */
985
986static struct occr*
d2ac64b1 987get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
78d140c9 988{
d2ac64b1 989 struct occr *occr = orig_occr;
990
78d140c9 991 for (; occr != NULL; occr = occr->next)
992 if (BLOCK_FOR_INSN (occr->insn) == bb)
993 return occr;
d2ac64b1 994
995 /* If we could not find an occurrence in BB, see if BB
996 has a single predecessor with an occurrence that is
997 transparent through BB. */
998 if (single_pred_p (bb)
999 && bitmap_bit_p (transp[bb->index], bitmap_index)
1000 && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
1001 {
1002 rtx avail_reg = get_avail_load_store_reg (occr->insn);
1003 if (!reg_set_between_p (avail_reg,
1004 PREV_INSN (BB_HEAD (bb)),
1005 NEXT_INSN (BB_END (bb)))
1006 && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
1007 return occr;
1008 }
1009
78d140c9 1010 return NULL;
1011}
1012
1013
d2ac64b1 1014/* This helper is called via htab_traverse. */
1015int
1016compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
1017{
1018 struct expr *expr = *slot;
1019
1020 compute_transp (expr->expr, expr->bitmap_index, transp,
1021 blocks_with_calls, modify_mem_list_set,
1022 canon_modify_mem_list);
1023 return 1;
1024}
1025
78d140c9 1026/* This handles the case where several stores feed a partially redundant
1027 load. It checks if the redundancy elimination is possible and if it's
d447762f 1028 worth it.
1029
1030 Redundancy elimination is possible if,
1031 1) None of the operands of an insn have been modified since the start
1032 of the current basic block.
1033 2) In any predecessor of the current basic block, the same expression
1034 is generated.
1035
1036 See the function body for the heuristics that determine if eliminating
1037 a redundancy is also worth doing, assuming it is possible. */
78d140c9 1038
1039static void
cdace3a0 1040eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
78d140c9 1041 struct expr *expr)
1042{
1043 edge pred;
cdace3a0 1044 rtx_insn *avail_insn = NULL;
78d140c9 1045 rtx avail_reg;
1046 rtx dest, pat;
1047 struct occr *a_occr;
1048 struct unoccr *occr, *avail_occrs = NULL;
1049 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1050 int npred_ok = 0;
db9cef39 1051 profile_count ok_count = profile_count::zero ();
1052 /* Redundant load execution count. */
1053 profile_count critical_count = profile_count::zero ();
1054 /* Execution count of critical edges. */
cd665a06 1055 edge_iterator ei;
b0596095 1056 bool critical_edge_split = false;
78d140c9 1057
1058 /* The execution count of the loads to be added to make the
1059 load fully redundant. */
db9cef39 1060 profile_count not_ok_count = profile_count::zero ();
78d140c9 1061 basic_block pred_bb;
1062
1063 pat = PATTERN (insn);
1064 dest = SET_DEST (pat);
1065
1066 /* Check that the loaded register is not used, set, or killed from the
1067 beginning of the block. */
20128b13 1068 if (reg_changed_after_insn_p (dest, 0)
1069 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
78d140c9 1070 return;
1071
1072 /* Check potential for replacing load with copy for predecessors. */
cd665a06 1073 FOR_EACH_EDGE (pred, ei, bb->preds)
78d140c9 1074 {
cdace3a0 1075 rtx_insn *next_pred_bb_end;
78d140c9 1076
cdace3a0 1077 avail_insn = NULL;
b0596095 1078 avail_reg = NULL_RTX;
78d140c9 1079 pred_bb = pred->src;
d2ac64b1 1080 for (a_occr = get_bb_avail_insn (pred_bb,
1081 expr->avail_occr,
1082 expr->bitmap_index);
1083 a_occr;
1084 a_occr = get_bb_avail_insn (pred_bb,
1085 a_occr->next,
1086 expr->bitmap_index))
78d140c9 1087 {
1088 /* Check if the loaded register is not used. */
1089 avail_insn = a_occr->insn;
876760f6 1090 avail_reg = get_avail_load_store_reg (avail_insn);
1091 gcc_assert (avail_reg);
48e1416a 1092
78d140c9 1093 /* Make sure we can generate a move from register avail_reg to
1094 dest. */
f9a00e9e 1095 rtx_insn *move = gen_move_insn (copy_rtx (dest),
1096 copy_rtx (avail_reg));
e2f730a9 1097 extract_insn (move);
1098 if (! constrain_operands (1, get_preferred_alternatives (insn,
1099 pred_bb))
78d140c9 1100 || reg_killed_on_edge (avail_reg, pred)
1101 || reg_used_on_edge (dest, pred))
1102 {
1103 avail_insn = NULL;
1104 continue;
1105 }
d2ac64b1 1106 next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
20128b13 1107 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
78d140c9 1108 /* AVAIL_INSN remains non-null. */
1109 break;
1110 else
1111 avail_insn = NULL;
1112 }
1113
ea5d3981 1114 if (EDGE_CRITICAL_P (pred) && pred->count ().initialized_p ())
1115 critical_count += pred->count ();
78d140c9 1116
1117 if (avail_insn != NULL_RTX)
1118 {
1119 npred_ok++;
ea5d3981 1120 if (pred->count ().initialized_p ())
1121 ok_count = ok_count + pred->count ();
b0596095 1122 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1123 copy_rtx (avail_reg)))))
1124 {
1125 /* Check if there is going to be a split. */
1126 if (EDGE_CRITICAL_P (pred))
1127 critical_edge_split = true;
1128 }
1129 else /* Its a dead move no need to generate. */
1130 continue;
78d140c9 1131 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
beab9d47 1132 sizeof (struct unoccr));
78d140c9 1133 occr->insn = avail_insn;
1134 occr->pred = pred;
1135 occr->next = avail_occrs;
1136 avail_occrs = occr;
1137 if (! rollback_unoccr)
1138 rollback_unoccr = occr;
1139 }
1140 else
1141 {
7063afc3 1142 /* Adding a load on a critical edge will cause a split. */
b0596095 1143 if (EDGE_CRITICAL_P (pred))
1144 critical_edge_split = true;
ea5d3981 1145 if (pred->count ().initialized_p ())
1146 not_ok_count = not_ok_count + pred->count ();
78d140c9 1147 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1148 sizeof (struct unoccr));
cdace3a0 1149 unoccr->insn = NULL;
78d140c9 1150 unoccr->pred = pred;
1151 unoccr->next = unavail_occrs;
1152 unavail_occrs = unoccr;
1153 if (! rollback_unoccr)
1154 rollback_unoccr = unoccr;
1155 }
1156 }
1157
1158 if (/* No load can be replaced by copy. */
1159 npred_ok == 0
48e1416a 1160 /* Prevent exploding the code. */
0bfd8d5c 1161 || (optimize_bb_for_size_p (bb) && npred_ok > 1)
48e1416a 1162 /* If we don't have profile information we cannot tell if splitting
b0596095 1163 a critical edge is profitable or not so don't do it. */
56621355 1164 || ((!profile_info || profile_status_for_fn (cfun) != PROFILE_READ
b0596095 1165 || targetm.cannot_modify_jumps_p ())
1166 && critical_edge_split))
78d140c9 1167 goto cleanup;
1168
1169 /* Check if it's worth applying the partial redundancy elimination. */
db9cef39 1170 if (ok_count.to_gcov_type ()
1171 < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count.to_gcov_type ())
78d140c9 1172 goto cleanup;
fc018a4c 1173
1174 gcov_type threshold;
1175#if (GCC_VERSION >= 5000)
1176 if (__builtin_mul_overflow (GCSE_AFTER_RELOAD_CRITICAL_FRACTION,
1177 critical_count.to_gcov_type (), &threshold))
1178 threshold = profile_count::max_count;
1179#else
1180 threshold
1181 = GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count.to_gcov_type ();
1182#endif
1183
1184 if (ok_count.to_gcov_type () < threshold)
78d140c9 1185 goto cleanup;
1186
1187 /* Generate moves to the loaded register from where
1188 the memory is available. */
1189 for (occr = avail_occrs; occr; occr = occr->next)
1190 {
1191 avail_insn = occr->insn;
1192 pred = occr->pred;
1193 /* Set avail_reg to be the register having the value of the
1194 memory. */
1195 avail_reg = get_avail_load_store_reg (avail_insn);
876760f6 1196 gcc_assert (avail_reg);
78d140c9 1197
1198 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1199 copy_rtx (avail_reg)),
1200 pred);
1201 stats.moves_inserted++;
1202
1203 if (dump_file)
1204 fprintf (dump_file,
1205 "generating move from %d to %d on edge from %d to %d\n",
1206 REGNO (avail_reg),
1207 REGNO (dest),
1208 pred->src->index,
1209 pred->dest->index);
1210 }
1211
1212 /* Regenerate loads where the memory is unavailable. */
1213 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1214 {
1215 pred = unoccr->pred;
1216 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1217 stats.copies_inserted++;
1218
1219 if (dump_file)
1220 {
1221 fprintf (dump_file,
1222 "generating on edge from %d to %d a copy of load: ",
1223 pred->src->index,
1224 pred->dest->index);
1225 print_rtl (dump_file, PATTERN (insn));
1226 fprintf (dump_file, "\n");
1227 }
1228 }
1229
1230 /* Delete the insn if it is not available in this block and mark it
1231 for deletion if it is available. If insn is available it may help
1232 discover additional redundancies, so mark it for later deletion. */
d2ac64b1 1233 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
78d140c9 1234 a_occr && (a_occr->insn != insn);
d2ac64b1 1235 a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
3c802a1e 1236 ;
78d140c9 1237
1238 if (!a_occr)
b0596095 1239 {
1240 stats.insns_deleted++;
1241
1242 if (dump_file)
1243 {
1244 fprintf (dump_file, "deleting insn:\n");
1245 print_rtl_single (dump_file, insn);
1246 fprintf (dump_file, "\n");
1247 }
1248 delete_insn (insn);
1249 }
78d140c9 1250 else
1251 a_occr->deleted_p = 1;
1252
1253cleanup:
1254 if (rollback_unoccr)
1255 obstack_free (&unoccr_obstack, rollback_unoccr);
1256}
1257
1258/* Performing the redundancy elimination as described before. */
1259
1260static void
1261eliminate_partially_redundant_loads (void)
1262{
cdace3a0 1263 rtx_insn *insn;
78d140c9 1264 basic_block bb;
1265
1266 /* Note we start at block 1. */
1267
34154e27 1268 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
78d140c9 1269 return;
1270
1271 FOR_BB_BETWEEN (bb,
34154e27 1272 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1273 EXIT_BLOCK_PTR_FOR_FN (cfun),
78d140c9 1274 next_bb)
1275 {
d447762f 1276 /* Don't try anything on basic blocks with strange predecessors. */
78d140c9 1277 if (! bb_has_well_behaved_predecessors (bb))
1278 continue;
1279
d447762f 1280 /* Do not try anything on cold basic blocks. */
f29b326e 1281 if (optimize_bb_for_size_p (bb))
78d140c9 1282 continue;
1283
d447762f 1284 /* Reset the table of things changed since the start of the current
1285 basic block. */
78d140c9 1286 reset_opr_set_tables ();
1287
d447762f 1288 /* Look at all insns in the current basic block and see if there are
1289 any loads in it that we can record. */
78d140c9 1290 FOR_BB_INSNS (bb, insn)
1291 {
1292 /* Is it a load - of the form (set (reg) (mem))? */
1293 if (NONJUMP_INSN_P (insn)
1294 && GET_CODE (PATTERN (insn)) == SET
1295 && REG_P (SET_DEST (PATTERN (insn)))
1296 && MEM_P (SET_SRC (PATTERN (insn))))
1297 {
1298 rtx pat = PATTERN (insn);
1299 rtx src = SET_SRC (pat);
1300 struct expr *expr;
1301
1302 if (!MEM_VOLATILE_P (src)
1303 && GET_MODE (src) != BLKmode
1304 && general_operand (src, GET_MODE (src))
1305 /* Are the operands unchanged since the start of the
1306 block? */
1307 && oprs_unchanged_p (src, insn, false)
cbeb677e 1308 && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
78d140c9 1309 && !side_effects_p (src)
1310 /* Is the expression recorded? */
1311 && (expr = lookup_expr_in_table (src)) != NULL)
1312 {
1313 /* We now have a load (insn) and an available memory at
1314 its BB start (expr). Try to remove the loads if it is
1315 redundant. */
1316 eliminate_partially_redundant_load (bb, insn, expr);
1317 }
1318 }
1319
d447762f 1320 /* Keep track of everything modified by this insn, so that we
1321 know what has been modified since the start of the current
1322 basic block. */
78d140c9 1323 if (INSN_P (insn))
d447762f 1324 record_opr_changes (insn);
78d140c9 1325 }
1326 }
1327
1328 commit_edge_insertions ();
1329}
1330
1331/* Go over the expression hash table and delete insns that were
1332 marked for later deletion. */
1333
1334/* This helper is called via htab_traverse. */
d9dd21a8 1335int
1336delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
78d140c9 1337{
d9dd21a8 1338 struct expr *exprs = *slot;
78d140c9 1339 struct occr *occr;
1340
d9dd21a8 1341 for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
78d140c9 1342 {
3072d30e 1343 if (occr->deleted_p && dbg_cnt (gcse2_delete))
78d140c9 1344 {
1345 delete_insn (occr->insn);
1346 stats.insns_deleted++;
1347
1348 if (dump_file)
1349 {
1350 fprintf (dump_file, "deleting insn:\n");
1351 print_rtl_single (dump_file, occr->insn);
1352 fprintf (dump_file, "\n");
1353 }
1354 }
1355 }
1356
1357 return 1;
1358}
1359
1360static void
1361delete_redundant_insns (void)
1362{
c1f445d2 1363 expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
78d140c9 1364 if (dump_file)
1365 fprintf (dump_file, "\n");
1366}
1367
1368/* Main entry point of the GCSE after reload - clean some redundant loads
1369 due to spilling. */
1370
7f80955e 1371static void
78d140c9 1372gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1373{
5ee7391d 1374
78d140c9 1375 memset (&stats, 0, sizeof (stats));
1376
f0b5f617 1377 /* Allocate memory for this pass.
78d140c9 1378 Also computes and initializes the insns' CUIDs. */
1379 alloc_mem ();
1380
1381 /* We need alias analysis. */
1382 init_alias_analysis ();
1383
1384 compute_hash_table ();
1385
1386 if (dump_file)
1387 dump_hash_table (dump_file);
1388
9a78b979 1389 if (!expr_table->is_empty ())
78d140c9 1390 {
d2ac64b1 1391 /* Knowing which MEMs are transparent through a block can signifiantly
1392 increase the number of redundant loads found. So compute transparency
1393 information for each memory expression in the hash table. */
1394 df_analyze ();
f4d3c071 1395 /* This cannot be part of the normal allocation routine because
d2ac64b1 1396 we have to know the number of elements in the hash table. */
1397 transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1398 expr_table->elements ());
1399 bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1400 expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
78d140c9 1401 eliminate_partially_redundant_loads ();
1402 delete_redundant_insns ();
d2ac64b1 1403 sbitmap_vector_free (transp);
78d140c9 1404
1405 if (dump_file)
1406 {
1407 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1408 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1409 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted);
1410 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted);
1411 fprintf (dump_file, "\n\n");
1412 }
30c4e60d 1413
1414 statistics_counter_event (cfun, "copies inserted",
1415 stats.copies_inserted);
1416 statistics_counter_event (cfun, "moves inserted",
1417 stats.moves_inserted);
1418 statistics_counter_event (cfun, "insns deleted",
1419 stats.insns_deleted);
78d140c9 1420 }
48e1416a 1421
78d140c9 1422 /* We are finished with alias. */
1423 end_alias_analysis ();
1424
1425 free_mem ();
1426}
1427
77fce4cd 1428\f
77fce4cd 1429
2a1990e9 1430static unsigned int
77fce4cd 1431rest_of_handle_gcse2 (void)
1432{
1433 gcse_after_reload_main (get_insns ());
1434 rebuild_jump_labels (get_insns ());
2a1990e9 1435 return 0;
77fce4cd 1436}
1437
cbe8bda8 1438namespace {
1439
1440const pass_data pass_data_gcse2 =
77fce4cd 1441{
cbe8bda8 1442 RTL_PASS, /* type */
1443 "gcse2", /* name */
1444 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 1445 TV_GCSE_AFTER_RELOAD, /* tv_id */
1446 0, /* properties_required */
1447 0, /* properties_provided */
1448 0, /* properties_destroyed */
1449 0, /* todo_flags_start */
8b88439e 1450 0, /* todo_flags_finish */
77fce4cd 1451};
cbe8bda8 1452
1453class pass_gcse2 : public rtl_opt_pass
1454{
1455public:
9af5ce0c 1456 pass_gcse2 (gcc::context *ctxt)
1457 : rtl_opt_pass (pass_data_gcse2, ctxt)
cbe8bda8 1458 {}
1459
1460 /* opt_pass methods: */
31315c24 1461 virtual bool gate (function *fun)
1462 {
1463 return (optimize > 0 && flag_gcse_after_reload
1464 && optimize_function_for_speed_p (fun));
1465 }
1466
65b0537f 1467 virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
cbe8bda8 1468
1469}; // class pass_gcse2
1470
1471} // anon namespace
1472
1473rtl_opt_pass *
1474make_pass_gcse2 (gcc::context *ctxt)
1475{
1476 return new pass_gcse2 (ctxt);
1477}