]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/postreload-gcse.c
output.h (__gcc_host_wide_int__): Move to hwint.h.
[thirdparty/gcc.git] / gcc / postreload-gcse.c
1 /* Post reload partially redundant load elimination
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "except.h"
39 #include "intl.h"
40 #include "obstack.h"
41 #include "hashtab.h"
42 #include "params.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "dbgcnt.h"
47
48 /* The following code implements gcse after reload, the purpose of this
49 pass is to cleanup redundant loads generated by reload and other
50 optimizations that come after gcse. It searches for simple inter-block
51 redundancies and tries to eliminate them by adding moves and loads
52 in cold places.
53
54 Perform partially redundant load elimination, try to eliminate redundant
55 loads created by the reload pass. We try to look for full or partial
56 redundant loads fed by one or more loads/stores in predecessor BBs,
57 and try adding loads to make them fully redundant. We also check if
58 it's worth adding loads to be able to delete the redundant load.
59
60 Algorithm:
61 1. Build available expressions hash table:
62 For each load/store instruction, if the loaded/stored memory didn't
63 change until the end of the basic block add this memory expression to
64 the hash table.
65 2. Perform Redundancy elimination:
66 For each load instruction do the following:
67 perform partial redundancy elimination, check if it's worth adding
68 loads to make the load fully redundant. If so add loads and
69 register copies and delete the load.
70 3. Delete instructions made redundant in step 2.
71
72 Future enhancement:
73 If the loaded register is used/defined between load and some store,
74 look for some other free register between load and all its stores,
75 and replace the load with a copy from this register to the loaded
76 register.
77 */
78 \f
79
80 /* Keep statistics of this pass. */
81 static struct
82 {
83 int moves_inserted;
84 int copies_inserted;
85 int insns_deleted;
86 } stats;
87
88 /* We need to keep a hash table of expressions. The table entries are of
89 type 'struct expr', and for each expression there is a single linked
90 list of occurrences. */
91
92 /* The table itself. */
93 static htab_t expr_table;
94
95 /* Expression elements in the hash table. */
96 struct expr
97 {
98 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
99 rtx expr;
100
101 /* The same hash for this entry. */
102 hashval_t hash;
103
104 /* List of available occurrence in basic blocks in the function. */
105 struct occr *avail_occr;
106 };
107
108 static struct obstack expr_obstack;
109
110 /* Occurrence of an expression.
111 There is at most one occurrence per basic block. If a pattern appears
112 more than once, the last appearance is used. */
113
114 struct occr
115 {
116 /* Next occurrence of this expression. */
117 struct occr *next;
118 /* The insn that computes the expression. */
119 rtx insn;
120 /* Nonzero if this [anticipatable] occurrence has been deleted. */
121 char deleted_p;
122 };
123
124 static struct obstack occr_obstack;
125
126 /* The following structure holds the information about the occurrences of
127 the redundant instructions. */
128 struct unoccr
129 {
130 struct unoccr *next;
131 edge pred;
132 rtx insn;
133 };
134
135 static struct obstack unoccr_obstack;
136
137 /* Array where each element is the CUID if the insn that last set the hard
138 register with the number of the element, since the start of the current
139 basic block.
140
141 This array is used during the building of the hash table (step 1) to
142 determine if a reg is killed before the end of a basic block.
143
144 It is also used when eliminating partial redundancies (step 2) to see
145 if a reg was modified since the start of a basic block. */
146 static int *reg_avail_info;
147
148 /* A list of insns that may modify memory within the current basic block. */
149 struct modifies_mem
150 {
151 rtx insn;
152 struct modifies_mem *next;
153 };
154 static struct modifies_mem *modifies_mem_list;
155
156 /* The modifies_mem structs also go on an obstack, only this obstack is
157 freed each time after completing the analysis or transformations on
158 a basic block. So we allocate a dummy modifies_mem_obstack_bottom
159 object on the obstack to keep track of the bottom of the obstack. */
160 static struct obstack modifies_mem_obstack;
161 static struct modifies_mem *modifies_mem_obstack_bottom;
162
163 /* Mapping of insn UIDs to CUIDs.
164 CUIDs are like UIDs except they increase monotonically in each basic
165 block, have no gaps, and only apply to real insns. */
166 static int *uid_cuid;
167 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
168 \f
169
170 /* Helpers for memory allocation/freeing. */
171 static void alloc_mem (void);
172 static void free_mem (void);
173
174 /* Support for hash table construction and transformations. */
175 static bool oprs_unchanged_p (rtx, rtx, bool);
176 static void record_last_reg_set_info (rtx, rtx);
177 static void record_last_reg_set_info_regno (rtx, int);
178 static void record_last_mem_set_info (rtx);
179 static void record_last_set_info (rtx, const_rtx, void *);
180 static void record_opr_changes (rtx);
181
182 static void find_mem_conflicts (rtx, const_rtx, void *);
183 static int load_killed_in_block_p (int, rtx, bool);
184 static void reset_opr_set_tables (void);
185
186 /* Hash table support. */
187 static hashval_t hash_expr (rtx, int *);
188 static hashval_t hash_expr_for_htab (const void *);
189 static int expr_equiv_p (const void *, const void *);
190 static void insert_expr_in_table (rtx, rtx);
191 static struct expr *lookup_expr_in_table (rtx);
192 static int dump_hash_table_entry (void **, void *);
193 static void dump_hash_table (FILE *);
194
195 /* Helpers for eliminate_partially_redundant_load. */
196 static bool reg_killed_on_edge (rtx, edge);
197 static bool reg_used_on_edge (rtx, edge);
198
199 static rtx get_avail_load_store_reg (rtx);
200
201 static bool bb_has_well_behaved_predecessors (basic_block);
202 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
203 static void hash_scan_set (rtx);
204 static void compute_hash_table (void);
205
206 /* The work horses of this pass. */
207 static void eliminate_partially_redundant_load (basic_block,
208 rtx,
209 struct expr *);
210 static void eliminate_partially_redundant_loads (void);
211 \f
212
213 /* Allocate memory for the CUID mapping array and register/memory
214 tracking tables. */
215
216 static void
217 alloc_mem (void)
218 {
219 int i;
220 basic_block bb;
221 rtx insn;
222
223 /* Find the largest UID and create a mapping from UIDs to CUIDs. */
224 uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
225 i = 1;
226 FOR_EACH_BB (bb)
227 FOR_BB_INSNS (bb, insn)
228 {
229 if (INSN_P (insn))
230 uid_cuid[INSN_UID (insn)] = i++;
231 else
232 uid_cuid[INSN_UID (insn)] = i;
233 }
234
235 /* Allocate the available expressions hash table. We don't want to
236 make the hash table too small, but unnecessarily making it too large
237 also doesn't help. The i/4 is a gcse.c relic, and seems like a
238 reasonable choice. */
239 expr_table = htab_create (MAX (i / 4, 13),
240 hash_expr_for_htab, expr_equiv_p, NULL);
241
242 /* We allocate everything on obstacks because we often can roll back
243 the whole obstack to some point. Freeing obstacks is very fast. */
244 gcc_obstack_init (&expr_obstack);
245 gcc_obstack_init (&occr_obstack);
246 gcc_obstack_init (&unoccr_obstack);
247 gcc_obstack_init (&modifies_mem_obstack);
248
249 /* Working array used to track the last set for each register
250 in the current block. */
251 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
252
253 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
254 can roll it back in reset_opr_set_tables. */
255 modifies_mem_obstack_bottom =
256 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
257 sizeof (struct modifies_mem));
258 }
259
260 /* Free memory allocated by alloc_mem. */
261
262 static void
263 free_mem (void)
264 {
265 free (uid_cuid);
266
267 htab_delete (expr_table);
268
269 obstack_free (&expr_obstack, NULL);
270 obstack_free (&occr_obstack, NULL);
271 obstack_free (&unoccr_obstack, NULL);
272 obstack_free (&modifies_mem_obstack, NULL);
273
274 free (reg_avail_info);
275 }
276 \f
277
278 /* Hash expression X.
279 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
280 or if the expression contains something we don't want to insert in the
281 table. */
282
283 static hashval_t
284 hash_expr (rtx x, int *do_not_record_p)
285 {
286 *do_not_record_p = 0;
287 return hash_rtx (x, GET_MODE (x), do_not_record_p,
288 NULL, /*have_reg_qty=*/false);
289 }
290
291 /* Callback for hashtab.
292 Return the hash value for expression EXP. We don't actually hash
293 here, we just return the cached hash value. */
294
295 static hashval_t
296 hash_expr_for_htab (const void *expp)
297 {
298 const struct expr *const exp = (const struct expr *) expp;
299 return exp->hash;
300 }
301
302 /* Callback for hashtab.
303 Return nonzero if exp1 is equivalent to exp2. */
304
305 static int
306 expr_equiv_p (const void *exp1p, const void *exp2p)
307 {
308 const struct expr *const exp1 = (const struct expr *) exp1p;
309 const struct expr *const exp2 = (const struct expr *) exp2p;
310 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
311
312 gcc_assert (!equiv_p || exp1->hash == exp2->hash);
313 return equiv_p;
314 }
315 \f
316
317 /* Insert expression X in INSN in the hash TABLE.
318 If it is already present, record it as the last occurrence in INSN's
319 basic block. */
320
321 static void
322 insert_expr_in_table (rtx x, rtx insn)
323 {
324 int do_not_record_p;
325 hashval_t hash;
326 struct expr *cur_expr, **slot;
327 struct occr *avail_occr, *last_occr = NULL;
328
329 hash = hash_expr (x, &do_not_record_p);
330
331 /* Do not insert expression in the table if it contains volatile operands,
332 or if hash_expr determines the expression is something we don't want
333 to or can't handle. */
334 if (do_not_record_p)
335 return;
336
337 /* We anticipate that redundant expressions are rare, so for convenience
338 allocate a new hash table element here already and set its fields.
339 If we don't do this, we need a hack with a static struct expr. Anyway,
340 obstack_free is really fast and one more obstack_alloc doesn't hurt if
341 we're going to see more expressions later on. */
342 cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
343 sizeof (struct expr));
344 cur_expr->expr = x;
345 cur_expr->hash = hash;
346 cur_expr->avail_occr = NULL;
347
348 slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
349 hash, INSERT);
350
351 if (! (*slot))
352 /* The expression isn't found, so insert it. */
353 *slot = cur_expr;
354 else
355 {
356 /* The expression is already in the table, so roll back the
357 obstack and use the existing table entry. */
358 obstack_free (&expr_obstack, cur_expr);
359 cur_expr = *slot;
360 }
361
362 /* Search for another occurrence in the same basic block. */
363 avail_occr = cur_expr->avail_occr;
364 while (avail_occr
365 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
366 {
367 /* If an occurrence isn't found, save a pointer to the end of
368 the list. */
369 last_occr = avail_occr;
370 avail_occr = avail_occr->next;
371 }
372
373 if (avail_occr)
374 /* Found another instance of the expression in the same basic block.
375 Prefer this occurrence to the currently recorded one. We want
376 the last one in the block and the block is scanned from start
377 to end. */
378 avail_occr->insn = insn;
379 else
380 {
381 /* First occurrence of this expression in this basic block. */
382 avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
383 sizeof (struct occr));
384
385 /* First occurrence of this expression in any block? */
386 if (cur_expr->avail_occr == NULL)
387 cur_expr->avail_occr = avail_occr;
388 else
389 last_occr->next = avail_occr;
390
391 avail_occr->insn = insn;
392 avail_occr->next = NULL;
393 avail_occr->deleted_p = 0;
394 }
395 }
396 \f
397
398 /* Lookup pattern PAT in the expression hash table.
399 The result is a pointer to the table entry, or NULL if not found. */
400
401 static struct expr *
402 lookup_expr_in_table (rtx pat)
403 {
404 int do_not_record_p;
405 struct expr **slot, *tmp_expr;
406 hashval_t hash = hash_expr (pat, &do_not_record_p);
407
408 if (do_not_record_p)
409 return NULL;
410
411 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
412 sizeof (struct expr));
413 tmp_expr->expr = pat;
414 tmp_expr->hash = hash;
415 tmp_expr->avail_occr = NULL;
416
417 slot = (struct expr **) htab_find_slot_with_hash (expr_table, tmp_expr,
418 hash, INSERT);
419 obstack_free (&expr_obstack, tmp_expr);
420
421 if (!slot)
422 return NULL;
423 else
424 return (*slot);
425 }
426 \f
427
428 /* Dump all expressions and occurrences that are currently in the
429 expression hash table to FILE. */
430
431 /* This helper is called via htab_traverse. */
432 static int
433 dump_hash_table_entry (void **slot, void *filep)
434 {
435 struct expr *expr = (struct expr *) *slot;
436 FILE *file = (FILE *) filep;
437 struct occr *occr;
438
439 fprintf (file, "expr: ");
440 print_rtl (file, expr->expr);
441 fprintf (file,"\nhashcode: %u\n", expr->hash);
442 fprintf (file,"list of occurrences:\n");
443 occr = expr->avail_occr;
444 while (occr)
445 {
446 rtx insn = occr->insn;
447 print_rtl_single (file, insn);
448 fprintf (file, "\n");
449 occr = occr->next;
450 }
451 fprintf (file, "\n");
452 return 1;
453 }
454
455 static void
456 dump_hash_table (FILE *file)
457 {
458 fprintf (file, "\n\nexpression hash table\n");
459 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
460 (long) htab_size (expr_table),
461 (long) htab_elements (expr_table),
462 htab_collisions (expr_table));
463 if (htab_elements (expr_table) > 0)
464 {
465 fprintf (file, "\n\ntable entries:\n");
466 htab_traverse (expr_table, dump_hash_table_entry, file);
467 }
468 fprintf (file, "\n");
469 }
470 \f
471 /* Return true if register X is recorded as being set by an instruction
472 whose CUID is greater than the one given. */
473
474 static bool
475 reg_changed_after_insn_p (rtx x, int cuid)
476 {
477 unsigned int regno, end_regno;
478
479 regno = REGNO (x);
480 end_regno = END_HARD_REGNO (x);
481 do
482 if (reg_avail_info[regno] > cuid)
483 return true;
484 while (++regno < end_regno);
485 return false;
486 }
487
488 /* Return nonzero if the operands of expression X are unchanged
489 1) from the start of INSN's basic block up to but not including INSN
490 if AFTER_INSN is false, or
491 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
492
493 static bool
494 oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
495 {
496 int i, j;
497 enum rtx_code code;
498 const char *fmt;
499
500 if (x == 0)
501 return 1;
502
503 code = GET_CODE (x);
504 switch (code)
505 {
506 case REG:
507 /* We are called after register allocation. */
508 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
509 if (after_insn)
510 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
511 else
512 return !reg_changed_after_insn_p (x, 0);
513
514 case MEM:
515 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
516 return 0;
517 else
518 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
519
520 case PC:
521 case CC0: /*FIXME*/
522 case CONST:
523 case CONST_INT:
524 case CONST_DOUBLE:
525 case CONST_FIXED:
526 case CONST_VECTOR:
527 case SYMBOL_REF:
528 case LABEL_REF:
529 case ADDR_VEC:
530 case ADDR_DIFF_VEC:
531 return 1;
532
533 case PRE_DEC:
534 case PRE_INC:
535 case POST_DEC:
536 case POST_INC:
537 case PRE_MODIFY:
538 case POST_MODIFY:
539 if (after_insn)
540 return 0;
541 break;
542
543 default:
544 break;
545 }
546
547 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
548 {
549 if (fmt[i] == 'e')
550 {
551 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
552 return 0;
553 }
554 else if (fmt[i] == 'E')
555 for (j = 0; j < XVECLEN (x, i); j++)
556 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
557 return 0;
558 }
559
560 return 1;
561 }
562 \f
563
564 /* Used for communication between find_mem_conflicts and
565 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a
566 conflict between two memory references.
567 This is a bit of a hack to work around the limitations of note_stores. */
568 static int mems_conflict_p;
569
570 /* DEST is the output of an instruction. If it is a memory reference, and
571 possibly conflicts with the load found in DATA, then set mems_conflict_p
572 to a nonzero value. */
573
574 static void
575 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
576 void *data)
577 {
578 rtx mem_op = (rtx) data;
579
580 while (GET_CODE (dest) == SUBREG
581 || GET_CODE (dest) == ZERO_EXTRACT
582 || GET_CODE (dest) == STRICT_LOW_PART)
583 dest = XEXP (dest, 0);
584
585 /* If DEST is not a MEM, then it will not conflict with the load. Note
586 that function calls are assumed to clobber memory, but are handled
587 elsewhere. */
588 if (! MEM_P (dest))
589 return;
590
591 if (true_dependence (dest, GET_MODE (dest), mem_op))
592 mems_conflict_p = 1;
593 }
594 \f
595
596 /* Return nonzero if the expression in X (a memory reference) is killed
597 in the current basic block before (if AFTER_INSN is false) or after
598 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
599
600 This function assumes that the modifies_mem table is flushed when
601 the hash table construction or redundancy elimination phases start
602 processing a new basic block. */
603
604 static int
605 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
606 {
607 struct modifies_mem *list_entry = modifies_mem_list;
608
609 while (list_entry)
610 {
611 rtx setter = list_entry->insn;
612
613 /* Ignore entries in the list that do not apply. */
614 if ((after_insn
615 && INSN_CUID (setter) < uid_limit)
616 || (! after_insn
617 && INSN_CUID (setter) > uid_limit))
618 {
619 list_entry = list_entry->next;
620 continue;
621 }
622
623 /* If SETTER is a call everything is clobbered. Note that calls
624 to pure functions are never put on the list, so we need not
625 worry about them. */
626 if (CALL_P (setter))
627 return 1;
628
629 /* SETTER must be an insn of some kind that sets memory. Call
630 note_stores to examine each hunk of memory that is modified.
631 It will set mems_conflict_p to nonzero if there may be a
632 conflict between X and SETTER. */
633 mems_conflict_p = 0;
634 note_stores (PATTERN (setter), find_mem_conflicts, x);
635 if (mems_conflict_p)
636 return 1;
637
638 list_entry = list_entry->next;
639 }
640 return 0;
641 }
642 \f
643
644 /* Record register first/last/block set information for REGNO in INSN. */
645
646 static inline void
647 record_last_reg_set_info (rtx insn, rtx reg)
648 {
649 unsigned int regno, end_regno;
650
651 regno = REGNO (reg);
652 end_regno = END_HARD_REGNO (reg);
653 do
654 reg_avail_info[regno] = INSN_CUID (insn);
655 while (++regno < end_regno);
656 }
657
658 static inline void
659 record_last_reg_set_info_regno (rtx insn, int regno)
660 {
661 reg_avail_info[regno] = INSN_CUID (insn);
662 }
663
664
665 /* Record memory modification information for INSN. We do not actually care
666 about the memory location(s) that are set, or even how they are set (consider
667 a CALL_INSN). We merely need to record which insns modify memory. */
668
669 static void
670 record_last_mem_set_info (rtx insn)
671 {
672 struct modifies_mem *list_entry;
673
674 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
675 sizeof (struct modifies_mem));
676 list_entry->insn = insn;
677 list_entry->next = modifies_mem_list;
678 modifies_mem_list = list_entry;
679 }
680
681 /* Called from compute_hash_table via note_stores to handle one
682 SET or CLOBBER in an insn. DATA is really the instruction in which
683 the SET is taking place. */
684
685 static void
686 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
687 {
688 rtx last_set_insn = (rtx) data;
689
690 if (GET_CODE (dest) == SUBREG)
691 dest = SUBREG_REG (dest);
692
693 if (REG_P (dest))
694 record_last_reg_set_info (last_set_insn, dest);
695 else if (MEM_P (dest))
696 {
697 /* Ignore pushes, they don't clobber memory. They may still
698 clobber the stack pointer though. Some targets do argument
699 pushes without adding REG_INC notes. See e.g. PR25196,
700 where a pushsi2 on i386 doesn't have REG_INC notes. Note
701 such changes here too. */
702 if (! push_operand (dest, GET_MODE (dest)))
703 record_last_mem_set_info (last_set_insn);
704 else
705 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
706 }
707 }
708
709
710 /* Reset tables used to keep track of what's still available since the
711 start of the block. */
712
713 static void
714 reset_opr_set_tables (void)
715 {
716 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
717 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
718 modifies_mem_list = NULL;
719 }
720 \f
721
722 /* Record things set by INSN.
723 This data is used by oprs_unchanged_p. */
724
725 static void
726 record_opr_changes (rtx insn)
727 {
728 rtx note;
729
730 /* Find all stores and record them. */
731 note_stores (PATTERN (insn), record_last_set_info, insn);
732
733 /* Also record autoincremented REGs for this insn as changed. */
734 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
735 if (REG_NOTE_KIND (note) == REG_INC)
736 record_last_reg_set_info (insn, XEXP (note, 0));
737
738 /* Finally, if this is a call, record all call clobbers. */
739 if (CALL_P (insn))
740 {
741 unsigned int regno;
742 rtx link, x;
743
744 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
745 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
746 record_last_reg_set_info_regno (insn, regno);
747
748 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
749 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
750 {
751 x = XEXP (XEXP (link, 0), 0);
752 if (REG_P (x))
753 {
754 gcc_assert (HARD_REGISTER_P (x));
755 record_last_reg_set_info (insn, x);
756 }
757 }
758
759 if (! RTL_CONST_OR_PURE_CALL_P (insn))
760 record_last_mem_set_info (insn);
761 }
762 }
763 \f
764
765 /* Scan the pattern of INSN and add an entry to the hash TABLE.
766 After reload we are interested in loads/stores only. */
767
768 static void
769 hash_scan_set (rtx insn)
770 {
771 rtx pat = PATTERN (insn);
772 rtx src = SET_SRC (pat);
773 rtx dest = SET_DEST (pat);
774
775 /* We are only interested in loads and stores. */
776 if (! MEM_P (src) && ! MEM_P (dest))
777 return;
778
779 /* Don't mess with jumps and nops. */
780 if (JUMP_P (insn) || set_noop_p (pat))
781 return;
782
783 if (REG_P (dest))
784 {
785 if (/* Don't CSE something if we can't do a reg/reg copy. */
786 can_copy_p (GET_MODE (dest))
787 /* Is SET_SRC something we want to gcse? */
788 && general_operand (src, GET_MODE (src))
789 #ifdef STACK_REGS
790 /* Never consider insns touching the register stack. It may
791 create situations that reg-stack cannot handle (e.g. a stack
792 register live across an abnormal edge). */
793 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
794 #endif
795 /* An expression is not available if its operands are
796 subsequently modified, including this insn. */
797 && oprs_unchanged_p (src, insn, true))
798 {
799 insert_expr_in_table (src, insn);
800 }
801 }
802 else if (REG_P (src))
803 {
804 /* Only record sets of pseudo-regs in the hash table. */
805 if (/* Don't CSE something if we can't do a reg/reg copy. */
806 can_copy_p (GET_MODE (src))
807 /* Is SET_DEST something we want to gcse? */
808 && general_operand (dest, GET_MODE (dest))
809 #ifdef STACK_REGS
810 /* As above for STACK_REGS. */
811 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
812 #endif
813 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
814 /* Check if the memory expression is killed after insn. */
815 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
816 && oprs_unchanged_p (XEXP (dest, 0), insn, true))
817 {
818 insert_expr_in_table (dest, insn);
819 }
820 }
821 }
822 \f
823
824 /* Create hash table of memory expressions available at end of basic
825 blocks. Basically you should think of this hash table as the
826 representation of AVAIL_OUT. This is the set of expressions that
827 is generated in a basic block and not killed before the end of the
828 same basic block. Notice that this is really a local computation. */
829
830 static void
831 compute_hash_table (void)
832 {
833 basic_block bb;
834
835 FOR_EACH_BB (bb)
836 {
837 rtx insn;
838
839 /* First pass over the instructions records information used to
840 determine when registers and memory are last set.
841 Since we compute a "local" AVAIL_OUT, reset the tables that
842 help us keep track of what has been modified since the start
843 of the block. */
844 reset_opr_set_tables ();
845 FOR_BB_INSNS (bb, insn)
846 {
847 if (INSN_P (insn))
848 record_opr_changes (insn);
849 }
850
851 /* The next pass actually builds the hash table. */
852 FOR_BB_INSNS (bb, insn)
853 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
854 hash_scan_set (insn);
855 }
856 }
857 \f
858
859 /* Check if register REG is killed in any insn waiting to be inserted on
860 edge E. This function is required to check that our data flow analysis
861 is still valid prior to commit_edge_insertions. */
862
863 static bool
864 reg_killed_on_edge (rtx reg, edge e)
865 {
866 rtx insn;
867
868 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
869 if (INSN_P (insn) && reg_set_p (reg, insn))
870 return true;
871
872 return false;
873 }
874
875 /* Similar to above - check if register REG is used in any insn waiting
876 to be inserted on edge E.
877 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
878 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */
879
880 static bool
881 reg_used_on_edge (rtx reg, edge e)
882 {
883 rtx insn;
884
885 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
886 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
887 return true;
888
889 return false;
890 }
891 \f
892 /* Return the loaded/stored register of a load/store instruction. */
893
894 static rtx
895 get_avail_load_store_reg (rtx insn)
896 {
897 if (REG_P (SET_DEST (PATTERN (insn))))
898 /* A load. */
899 return SET_DEST(PATTERN(insn));
900 else
901 {
902 /* A store. */
903 gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
904 return SET_SRC (PATTERN (insn));
905 }
906 }
907
908 /* Return nonzero if the predecessors of BB are "well behaved". */
909
910 static bool
911 bb_has_well_behaved_predecessors (basic_block bb)
912 {
913 edge pred;
914 edge_iterator ei;
915
916 if (EDGE_COUNT (bb->preds) == 0)
917 return false;
918
919 FOR_EACH_EDGE (pred, ei, bb->preds)
920 {
921 if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
922 return false;
923
924 if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
925 return false;
926
927 if (JUMP_TABLE_DATA_P (BB_END (pred->src)))
928 return false;
929 }
930 return true;
931 }
932
933
934 /* Search for the occurrences of expression in BB. */
935
936 static struct occr*
937 get_bb_avail_insn (basic_block bb, struct occr *occr)
938 {
939 for (; occr != NULL; occr = occr->next)
940 if (BLOCK_FOR_INSN (occr->insn) == bb)
941 return occr;
942 return NULL;
943 }
944
945
946 /* This handles the case where several stores feed a partially redundant
947 load. It checks if the redundancy elimination is possible and if it's
948 worth it.
949
950 Redundancy elimination is possible if,
951 1) None of the operands of an insn have been modified since the start
952 of the current basic block.
953 2) In any predecessor of the current basic block, the same expression
954 is generated.
955
956 See the function body for the heuristics that determine if eliminating
957 a redundancy is also worth doing, assuming it is possible. */
958
959 static void
960 eliminate_partially_redundant_load (basic_block bb, rtx insn,
961 struct expr *expr)
962 {
963 edge pred;
964 rtx avail_insn = NULL_RTX;
965 rtx avail_reg;
966 rtx dest, pat;
967 struct occr *a_occr;
968 struct unoccr *occr, *avail_occrs = NULL;
969 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
970 int npred_ok = 0;
971 gcov_type ok_count = 0; /* Redundant load execution count. */
972 gcov_type critical_count = 0; /* Execution count of critical edges. */
973 edge_iterator ei;
974 bool critical_edge_split = false;
975
976 /* The execution count of the loads to be added to make the
977 load fully redundant. */
978 gcov_type not_ok_count = 0;
979 basic_block pred_bb;
980
981 pat = PATTERN (insn);
982 dest = SET_DEST (pat);
983
984 /* Check that the loaded register is not used, set, or killed from the
985 beginning of the block. */
986 if (reg_changed_after_insn_p (dest, 0)
987 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
988 return;
989
990 /* Check potential for replacing load with copy for predecessors. */
991 FOR_EACH_EDGE (pred, ei, bb->preds)
992 {
993 rtx next_pred_bb_end;
994
995 avail_insn = NULL_RTX;
996 avail_reg = NULL_RTX;
997 pred_bb = pred->src;
998 next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
999 for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
1000 a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
1001 {
1002 /* Check if the loaded register is not used. */
1003 avail_insn = a_occr->insn;
1004 avail_reg = get_avail_load_store_reg (avail_insn);
1005 gcc_assert (avail_reg);
1006
1007 /* Make sure we can generate a move from register avail_reg to
1008 dest. */
1009 extract_insn (gen_move_insn (copy_rtx (dest),
1010 copy_rtx (avail_reg)));
1011 if (! constrain_operands (1)
1012 || reg_killed_on_edge (avail_reg, pred)
1013 || reg_used_on_edge (dest, pred))
1014 {
1015 avail_insn = NULL;
1016 continue;
1017 }
1018 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1019 /* AVAIL_INSN remains non-null. */
1020 break;
1021 else
1022 avail_insn = NULL;
1023 }
1024
1025 if (EDGE_CRITICAL_P (pred))
1026 critical_count += pred->count;
1027
1028 if (avail_insn != NULL_RTX)
1029 {
1030 npred_ok++;
1031 ok_count += pred->count;
1032 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1033 copy_rtx (avail_reg)))))
1034 {
1035 /* Check if there is going to be a split. */
1036 if (EDGE_CRITICAL_P (pred))
1037 critical_edge_split = true;
1038 }
1039 else /* Its a dead move no need to generate. */
1040 continue;
1041 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1042 sizeof (struct unoccr));
1043 occr->insn = avail_insn;
1044 occr->pred = pred;
1045 occr->next = avail_occrs;
1046 avail_occrs = occr;
1047 if (! rollback_unoccr)
1048 rollback_unoccr = occr;
1049 }
1050 else
1051 {
1052 /* Adding a load on a critical edge will cause a split. */
1053 if (EDGE_CRITICAL_P (pred))
1054 critical_edge_split = true;
1055 not_ok_count += pred->count;
1056 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1057 sizeof (struct unoccr));
1058 unoccr->insn = NULL_RTX;
1059 unoccr->pred = pred;
1060 unoccr->next = unavail_occrs;
1061 unavail_occrs = unoccr;
1062 if (! rollback_unoccr)
1063 rollback_unoccr = unoccr;
1064 }
1065 }
1066
1067 if (/* No load can be replaced by copy. */
1068 npred_ok == 0
1069 /* Prevent exploding the code. */
1070 || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1071 /* If we don't have profile information we cannot tell if splitting
1072 a critical edge is profitable or not so don't do it. */
1073 || ((! profile_info || ! flag_branch_probabilities
1074 || targetm.cannot_modify_jumps_p ())
1075 && critical_edge_split))
1076 goto cleanup;
1077
1078 /* Check if it's worth applying the partial redundancy elimination. */
1079 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1080 goto cleanup;
1081 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1082 goto cleanup;
1083
1084 /* Generate moves to the loaded register from where
1085 the memory is available. */
1086 for (occr = avail_occrs; occr; occr = occr->next)
1087 {
1088 avail_insn = occr->insn;
1089 pred = occr->pred;
1090 /* Set avail_reg to be the register having the value of the
1091 memory. */
1092 avail_reg = get_avail_load_store_reg (avail_insn);
1093 gcc_assert (avail_reg);
1094
1095 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1096 copy_rtx (avail_reg)),
1097 pred);
1098 stats.moves_inserted++;
1099
1100 if (dump_file)
1101 fprintf (dump_file,
1102 "generating move from %d to %d on edge from %d to %d\n",
1103 REGNO (avail_reg),
1104 REGNO (dest),
1105 pred->src->index,
1106 pred->dest->index);
1107 }
1108
1109 /* Regenerate loads where the memory is unavailable. */
1110 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1111 {
1112 pred = unoccr->pred;
1113 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1114 stats.copies_inserted++;
1115
1116 if (dump_file)
1117 {
1118 fprintf (dump_file,
1119 "generating on edge from %d to %d a copy of load: ",
1120 pred->src->index,
1121 pred->dest->index);
1122 print_rtl (dump_file, PATTERN (insn));
1123 fprintf (dump_file, "\n");
1124 }
1125 }
1126
1127 /* Delete the insn if it is not available in this block and mark it
1128 for deletion if it is available. If insn is available it may help
1129 discover additional redundancies, so mark it for later deletion. */
1130 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
1131 a_occr && (a_occr->insn != insn);
1132 a_occr = get_bb_avail_insn (bb, a_occr->next))
1133 ;
1134
1135 if (!a_occr)
1136 {
1137 stats.insns_deleted++;
1138
1139 if (dump_file)
1140 {
1141 fprintf (dump_file, "deleting insn:\n");
1142 print_rtl_single (dump_file, insn);
1143 fprintf (dump_file, "\n");
1144 }
1145 delete_insn (insn);
1146 }
1147 else
1148 a_occr->deleted_p = 1;
1149
1150 cleanup:
1151 if (rollback_unoccr)
1152 obstack_free (&unoccr_obstack, rollback_unoccr);
1153 }
1154
1155 /* Performing the redundancy elimination as described before. */
1156
1157 static void
1158 eliminate_partially_redundant_loads (void)
1159 {
1160 rtx insn;
1161 basic_block bb;
1162
1163 /* Note we start at block 1. */
1164
1165 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1166 return;
1167
1168 FOR_BB_BETWEEN (bb,
1169 ENTRY_BLOCK_PTR->next_bb->next_bb,
1170 EXIT_BLOCK_PTR,
1171 next_bb)
1172 {
1173 /* Don't try anything on basic blocks with strange predecessors. */
1174 if (! bb_has_well_behaved_predecessors (bb))
1175 continue;
1176
1177 /* Do not try anything on cold basic blocks. */
1178 if (optimize_bb_for_size_p (bb))
1179 continue;
1180
1181 /* Reset the table of things changed since the start of the current
1182 basic block. */
1183 reset_opr_set_tables ();
1184
1185 /* Look at all insns in the current basic block and see if there are
1186 any loads in it that we can record. */
1187 FOR_BB_INSNS (bb, insn)
1188 {
1189 /* Is it a load - of the form (set (reg) (mem))? */
1190 if (NONJUMP_INSN_P (insn)
1191 && GET_CODE (PATTERN (insn)) == SET
1192 && REG_P (SET_DEST (PATTERN (insn)))
1193 && MEM_P (SET_SRC (PATTERN (insn))))
1194 {
1195 rtx pat = PATTERN (insn);
1196 rtx src = SET_SRC (pat);
1197 struct expr *expr;
1198
1199 if (!MEM_VOLATILE_P (src)
1200 && GET_MODE (src) != BLKmode
1201 && general_operand (src, GET_MODE (src))
1202 /* Are the operands unchanged since the start of the
1203 block? */
1204 && oprs_unchanged_p (src, insn, false)
1205 && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1206 && !side_effects_p (src)
1207 /* Is the expression recorded? */
1208 && (expr = lookup_expr_in_table (src)) != NULL)
1209 {
1210 /* We now have a load (insn) and an available memory at
1211 its BB start (expr). Try to remove the loads if it is
1212 redundant. */
1213 eliminate_partially_redundant_load (bb, insn, expr);
1214 }
1215 }
1216
1217 /* Keep track of everything modified by this insn, so that we
1218 know what has been modified since the start of the current
1219 basic block. */
1220 if (INSN_P (insn))
1221 record_opr_changes (insn);
1222 }
1223 }
1224
1225 commit_edge_insertions ();
1226 }
1227
1228 /* Go over the expression hash table and delete insns that were
1229 marked for later deletion. */
1230
1231 /* This helper is called via htab_traverse. */
1232 static int
1233 delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED)
1234 {
1235 struct expr *expr = (struct expr *) *slot;
1236 struct occr *occr;
1237
1238 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1239 {
1240 if (occr->deleted_p && dbg_cnt (gcse2_delete))
1241 {
1242 delete_insn (occr->insn);
1243 stats.insns_deleted++;
1244
1245 if (dump_file)
1246 {
1247 fprintf (dump_file, "deleting insn:\n");
1248 print_rtl_single (dump_file, occr->insn);
1249 fprintf (dump_file, "\n");
1250 }
1251 }
1252 }
1253
1254 return 1;
1255 }
1256
1257 static void
1258 delete_redundant_insns (void)
1259 {
1260 htab_traverse (expr_table, delete_redundant_insns_1, NULL);
1261 if (dump_file)
1262 fprintf (dump_file, "\n");
1263 }
1264
1265 /* Main entry point of the GCSE after reload - clean some redundant loads
1266 due to spilling. */
1267
1268 static void
1269 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1270 {
1271
1272 memset (&stats, 0, sizeof (stats));
1273
1274 /* Allocate memory for this pass.
1275 Also computes and initializes the insns' CUIDs. */
1276 alloc_mem ();
1277
1278 /* We need alias analysis. */
1279 init_alias_analysis ();
1280
1281 compute_hash_table ();
1282
1283 if (dump_file)
1284 dump_hash_table (dump_file);
1285
1286 if (htab_elements (expr_table) > 0)
1287 {
1288 eliminate_partially_redundant_loads ();
1289 delete_redundant_insns ();
1290
1291 if (dump_file)
1292 {
1293 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1294 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1295 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted);
1296 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted);
1297 fprintf (dump_file, "\n\n");
1298 }
1299
1300 statistics_counter_event (cfun, "copies inserted",
1301 stats.copies_inserted);
1302 statistics_counter_event (cfun, "moves inserted",
1303 stats.moves_inserted);
1304 statistics_counter_event (cfun, "insns deleted",
1305 stats.insns_deleted);
1306 }
1307
1308 /* We are finished with alias. */
1309 end_alias_analysis ();
1310
1311 free_mem ();
1312 }
1313
1314 \f
1315 static bool
1316 gate_handle_gcse2 (void)
1317 {
1318 return (optimize > 0 && flag_gcse_after_reload
1319 && optimize_function_for_speed_p (cfun));
1320 }
1321
1322
1323 static unsigned int
1324 rest_of_handle_gcse2 (void)
1325 {
1326 gcse_after_reload_main (get_insns ());
1327 rebuild_jump_labels (get_insns ());
1328 return 0;
1329 }
1330
1331 struct rtl_opt_pass pass_gcse2 =
1332 {
1333 {
1334 RTL_PASS,
1335 "gcse2", /* name */
1336 gate_handle_gcse2, /* gate */
1337 rest_of_handle_gcse2, /* execute */
1338 NULL, /* sub */
1339 NULL, /* next */
1340 0, /* static_pass_number */
1341 TV_GCSE_AFTER_RELOAD, /* tv_id */
1342 0, /* properties_required */
1343 0, /* properties_provided */
1344 0, /* properties_destroyed */
1345 0, /* todo_flags_start */
1346 TODO_verify_rtl_sharing
1347 | TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
1348 }
1349 };