]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
* ChangeLog, configure: Fix spelling errors.
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
5b1ef594 3 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
7506f491 4
1322177d 5This file is part of GCC.
7506f491 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
7506f491 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
7506f491
DE
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
7506f491
DE
21
22/* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
7506f491
DE
30 - ability to realloc sbitmap vectors would allow one initial computation
31 of reg_set_in_block with only subsequent additions, rather than
32 recomputing it for each pass
33
7506f491
DE
34*/
35
36/* References searched while implementing this.
7506f491
DE
37
38 Compilers Principles, Techniques and Tools
39 Aho, Sethi, Ullman
40 Addison-Wesley, 1988
41
42 Global Optimization by Suppression of Partial Redundancies
43 E. Morel, C. Renvoise
44 communications of the acm, Vol. 22, Num. 2, Feb. 1979
45
46 A Portable Machine-Independent Global Optimizer - Design and Measurements
47 Frederick Chow
48 Stanford Ph.D. thesis, Dec. 1983
49
7506f491
DE
50 A Fast Algorithm for Code Movement Optimization
51 D.M. Dhamdhere
52 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
53
54 A Solution to a Problem with Morel and Renvoise's
55 Global Optimization by Suppression of Partial Redundancies
56 K-H Drechsler, M.P. Stadel
57 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
58
59 Practical Adaptation of the Global Optimization
60 Algorithm of Morel and Renvoise
61 D.M. Dhamdhere
62 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
63
64 Efficiently Computing Static Single Assignment Form and the Control
65 Dependence Graph
66 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
67 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
68
7506f491
DE
69 Lazy Code Motion
70 J. Knoop, O. Ruthing, B. Steffen
71 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
72
73 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
74 Time for Reducible Flow Control
75 Thomas Ball
76 ACM Letters on Programming Languages and Systems,
77 Vol. 2, Num. 1-4, Mar-Dec 1993
78
79 An Efficient Representation for Sparse Sets
80 Preston Briggs, Linda Torczon
81 ACM Letters on Programming Languages and Systems,
82 Vol. 2, Num. 1-4, Mar-Dec 1993
83
84 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
85 K-H Drechsler, M.P. Stadel
86 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
87
88 Partial Dead Code Elimination
89 J. Knoop, O. Ruthing, B. Steffen
90 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
91
92 Effective Partial Redundancy Elimination
93 P. Briggs, K.D. Cooper
94 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
95
96 The Program Structure Tree: Computing Control Regions in Linear Time
97 R. Johnson, D. Pearson, K. Pingali
98 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
99
100 Optimal Code Motion: Theory and Practice
101 J. Knoop, O. Ruthing, B. Steffen
102 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
103
104 The power of assignment motion
105 J. Knoop, O. Ruthing, B. Steffen
106 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
107
108 Global code motion / global value numbering
109 C. Click
110 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
111
112 Value Driven Redundancy Elimination
113 L.T. Simpson
114 Rice University Ph.D. thesis, Apr. 1996
115
116 Value Numbering
117 L.T. Simpson
118 Massively Scalar Compiler Project, Rice University, Sep. 1996
119
120 High Performance Compilers for Parallel Computing
121 Michael Wolfe
122 Addison-Wesley, 1996
123
f4e584dc
JL
124 Advanced Compiler Design and Implementation
125 Steven Muchnick
126 Morgan Kaufmann, 1997
127
a42cd965
AM
128 Building an Optimizing Compiler
129 Robert Morgan
130 Digital Press, 1998
131
f4e584dc
JL
132 People wishing to speed up the code here should read:
133 Elimination Algorithms for Data Flow Analysis
134 B.G. Ryder, M.C. Paull
135 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
136
137 How to Analyze Large Programs Efficiently and Informatively
138 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
139 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
140
7506f491
DE
141 People wishing to do something different can find various possibilities
142 in the above papers and elsewhere.
143*/
144
145#include "config.h"
50b2596f 146#include "system.h"
01198c2f 147#include "toplev.h"
7506f491
DE
148
149#include "rtl.h"
6baf1cc8 150#include "tm_p.h"
7506f491
DE
151#include "regs.h"
152#include "hard-reg-set.h"
153#include "flags.h"
154#include "real.h"
155#include "insn-config.h"
156#include "recog.h"
157#include "basic-block.h"
50b2596f 158#include "output.h"
49ad7cfa 159#include "function.h"
3cdbd1f8 160#include "expr.h"
fb0c0a12 161#include "ggc.h"
f1fa37ff 162#include "params.h"
aaa4ca30 163
7506f491
DE
164#include "obstack.h"
165#define obstack_chunk_alloc gmalloc
166#define obstack_chunk_free free
167
7506f491
DE
168/* Propagate flow information through back edges and thus enable PRE's
169 moving loop invariant calculations out of loops.
170
171 Originally this tended to create worse overall code, but several
172 improvements during the development of PRE seem to have made following
173 back edges generally a win.
174
175 Note much of the loop invariant code motion done here would normally
176 be done by loop.c, which has more heuristics for when to move invariants
177 out of loops. At some point we might need to move some of those
178 heuristics into gcse.c. */
179#define FOLLOW_BACK_EDGES 1
180
f4e584dc
JL
181/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
182 are a superset of those done by GCSE.
7506f491 183
f4e584dc 184 We perform the following steps:
7506f491
DE
185
186 1) Compute basic block information.
187
188 2) Compute table of places where registers are set.
189
190 3) Perform copy/constant propagation.
191
192 4) Perform global cse.
193
e78d9500 194 5) Perform another pass of copy/constant propagation.
7506f491
DE
195
196 Two passes of copy/constant propagation are done because the first one
197 enables more GCSE and the second one helps to clean up the copies that
198 GCSE creates. This is needed more for PRE than for Classic because Classic
199 GCSE will try to use an existing register containing the common
200 subexpression rather than create a new one. This is harder to do for PRE
201 because of the code motion (which Classic GCSE doesn't do).
202
203 Expressions we are interested in GCSE-ing are of the form
204 (set (pseudo-reg) (expression)).
205 Function want_to_gcse_p says what these are.
206
207 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 208 partially redundant).
7506f491
DE
209
210 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
211 assignment) based GVN (global value numbering). L. T. Simpson's paper
212 (Rice University) on value numbering is a useful reference for this.
213
214 **********************
215
216 We used to support multiple passes but there are diminishing returns in
217 doing so. The first pass usually makes 90% of the changes that are doable.
218 A second pass can make a few more changes made possible by the first pass.
219 Experiments show any further passes don't make enough changes to justify
220 the expense.
221
222 A study of spec92 using an unlimited number of passes:
223 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
224 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
225 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
226
227 It was found doing copy propagation between each pass enables further
228 substitutions.
229
230 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 231 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
232 be modified if one wants to experiment.
233
234 **********************
235
236 The steps for PRE are:
237
238 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
239
240 2) Perform the data flow analysis for PRE.
241
242 3) Delete the redundant instructions
243
244 4) Insert the required copies [if any] that make the partially
245 redundant instructions fully redundant.
246
247 5) For other reaching expressions, insert an instruction to copy the value
248 to a newly created pseudo that will reach the redundant instruction.
249
250 The deletion is done first so that when we do insertions we
251 know which pseudo reg to use.
252
253 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
254 argue it is not. The number of iterations for the algorithm to converge
255 is typically 2-4 so I don't view it as that expensive (relatively speaking).
256
f4e584dc 257 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
258 we create. To make an expression reach the place where it's redundant,
259 the result of the expression is copied to a new register, and the redundant
260 expression is deleted by replacing it with this new register. Classic GCSE
261 doesn't have this problem as much as it computes the reaching defs of
262 each register in each block and thus can try to use an existing register.
263
264 **********************
265
7506f491
DE
266 A fair bit of simplicity is created by creating small functions for simple
267 tasks, even when the function is only called in one place. This may
268 measurably slow things down [or may not] by creating more function call
269 overhead than is necessary. The source is laid out so that it's trivial
270 to make the affected functions inline so that one can measure what speed
271 up, if any, can be achieved, and maybe later when things settle things can
272 be rearranged.
273
274 Help stamp out big monolithic functions! */
275\f
276/* GCSE global vars. */
277
278/* -dG dump file. */
279static FILE *gcse_file;
280
f4e584dc
JL
281/* Note whether or not we should run jump optimization after gcse. We
282 want to do this for two cases.
283
284 * If we changed any jumps via cprop.
285
286 * If we added any labels via edge splitting. */
287
288static int run_jump_opt_after_gcse;
289
7506f491
DE
290/* Bitmaps are normally not included in debugging dumps.
291 However it's useful to be able to print them from GDB.
292 We could create special functions for this, but it's simpler to
293 just allow passing stderr to the dump_foo fns. Since stderr can
294 be a macro, we store a copy here. */
295static FILE *debug_stderr;
296
297/* An obstack for our working variables. */
298static struct obstack gcse_obstack;
299
300/* Non-zero for each mode that supports (set (reg) (reg)).
301 This is trivially true for integer and floating point values.
302 It may or may not be true for condition codes. */
303static char can_copy_p[(int) NUM_MACHINE_MODES];
304
305/* Non-zero if can_copy_p has been initialized. */
306static int can_copy_init_p;
307
c4c81601 308struct reg_use {rtx reg_rtx; };
abd535b6 309
7506f491
DE
310/* Hash table of expressions. */
311
312struct expr
313{
314 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
315 rtx expr;
316 /* Index in the available expression bitmaps. */
317 int bitmap_index;
318 /* Next entry with the same hash. */
319 struct expr *next_same_hash;
320 /* List of anticipatable occurrences in basic blocks in the function.
321 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
322 basic block, the operands are not modified in the basic block prior
323 to the occurrence and the output is not used between the start of
324 the block and the occurrence. */
7506f491
DE
325 struct occr *antic_occr;
326 /* List of available occurrence in basic blocks in the function.
327 An "available occurrence" is one that is the last occurrence in the
328 basic block and the operands are not modified by following statements in
329 the basic block [including this insn]. */
330 struct occr *avail_occr;
331 /* Non-null if the computation is PRE redundant.
332 The value is the newly created pseudo-reg to record a copy of the
333 expression in all the places that reach the redundant copy. */
334 rtx reaching_reg;
335};
336
337/* Occurrence of an expression.
338 There is one per basic block. If a pattern appears more than once the
339 last appearance is used [or first for anticipatable expressions]. */
340
341struct occr
342{
343 /* Next occurrence of this expression. */
344 struct occr *next;
345 /* The insn that computes the expression. */
346 rtx insn;
347 /* Non-zero if this [anticipatable] occurrence has been deleted. */
348 char deleted_p;
349 /* Non-zero if this [available] occurrence has been copied to
350 reaching_reg. */
351 /* ??? This is mutually exclusive with deleted_p, so they could share
352 the same byte. */
353 char copied_p;
354};
355
356/* Expression and copy propagation hash tables.
357 Each hash table is an array of buckets.
358 ??? It is known that if it were an array of entries, structure elements
359 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
360 not clear whether in the final analysis a sufficient amount of memory would
361 be saved as the size of the available expression bitmaps would be larger
362 [one could build a mapping table without holes afterwards though].
c4c81601 363 Someday I'll perform the computation and figure it out. */
7506f491
DE
364
365/* Total size of the expression hash table, in elements. */
2e653e39
RK
366static unsigned int expr_hash_table_size;
367
7506f491
DE
368/* The table itself.
369 This is an array of `expr_hash_table_size' elements. */
370static struct expr **expr_hash_table;
371
372/* Total size of the copy propagation hash table, in elements. */
ebb13e7e 373static unsigned int set_hash_table_size;
c4c81601 374
7506f491
DE
375/* The table itself.
376 This is an array of `set_hash_table_size' elements. */
377static struct expr **set_hash_table;
378
379/* Mapping of uids to cuids.
380 Only real insns get cuids. */
381static int *uid_cuid;
382
383/* Highest UID in UID_CUID. */
384static int max_uid;
385
386/* Get the cuid of an insn. */
b86db3eb
BS
387#ifdef ENABLE_CHECKING
388#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
389#else
7506f491 390#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 391#endif
7506f491
DE
392
393/* Number of cuids. */
394static int max_cuid;
395
396/* Mapping of cuids to insns. */
397static rtx *cuid_insn;
398
399/* Get insn from cuid. */
400#define CUID_INSN(CUID) (cuid_insn[CUID])
401
402/* Maximum register number in function prior to doing gcse + 1.
403 Registers created during this pass have regno >= max_gcse_regno.
404 This is named with "gcse" to not collide with global of same name. */
770ae6cc 405static unsigned int max_gcse_regno;
7506f491
DE
406
407/* Maximum number of cse-able expressions found. */
408static int n_exprs;
c4c81601 409
7506f491
DE
410/* Maximum number of assignments for copy propagation found. */
411static int n_sets;
412
413/* Table of registers that are modified.
c4c81601 414
7506f491
DE
415 For each register, each element is a list of places where the pseudo-reg
416 is set.
417
418 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
419 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 420 a bitmap instead of the lists `reg_set_table' uses].
7506f491 421
c4c81601
RK
422 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
423 num-regs) [however perhaps it may be useful to keep the data as is]. One
424 advantage of recording things this way is that `reg_set_table' is fairly
425 sparse with respect to pseudo regs but for hard regs could be fairly dense
426 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
427 up functions like compute_transp since in the case of pseudo-regs we only
428 need to iterate over the number of times a pseudo-reg is set, not over the
429 number of basic blocks [clearly there is a bit of a slow down in the cases
430 where a pseudo is set more than once in a block, however it is believed
431 that the net effect is to speed things up]. This isn't done for hard-regs
432 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
433 function call can consume a fair bit of memory, and iterating over
434 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 435
c4c81601
RK
436typedef struct reg_set
437{
7506f491
DE
438 /* The next setting of this register. */
439 struct reg_set *next;
440 /* The insn where it was set. */
441 rtx insn;
442} reg_set;
c4c81601 443
7506f491 444static reg_set **reg_set_table;
c4c81601 445
7506f491
DE
446/* Size of `reg_set_table'.
447 The table starts out at max_gcse_regno + slop, and is enlarged as
448 necessary. */
449static int reg_set_table_size;
c4c81601 450
7506f491
DE
451/* Amount to grow `reg_set_table' by when it's full. */
452#define REG_SET_TABLE_SLOP 100
453
a13d4ebf
AM
454/* This is a list of expressions which are MEMs and will be used by load
455 or store motion.
456 Load motion tracks MEMs which aren't killed by
457 anything except itself. (ie, loads and stores to a single location).
458 We can then allow movement of these MEM refs with a little special
459 allowance. (all stores copy the same value to the reaching reg used
460 for the loads). This means all values used to store into memory must have
461 no side effects so we can re-issue the setter value.
462 Store Motion uses this structure as an expression table to track stores
463 which look interesting, and might be moveable towards the exit block. */
464
465struct ls_expr
466{
467 struct expr * expr; /* Gcse expression reference for LM. */
468 rtx pattern; /* Pattern of this mem. */
aaa4ca30
AJ
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
476};
477
478/* Head of the list of load/store memory refs. */
479static struct ls_expr * pre_ldst_mems = NULL;
480
7506f491
DE
481/* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484static sbitmap reg_set_bitmap;
485
486/* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491static sbitmap *reg_set_in_block;
492
a13d4ebf
AM
493/* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495static rtx * modify_mem_list;
496
497/* This array parallels modify_mem_list, but is kept canonicalized. */
498static rtx * canon_modify_mem_list;
7506f491
DE
499/* Various variables for statistics gathering. */
500
501/* Memory used in a pass.
502 This isn't intended to be absolutely precise. Its intent is only
503 to keep an eye on memory usage. */
504static int bytes_used;
c4c81601 505
7506f491
DE
506/* GCSE substitutions made. */
507static int gcse_subst_count;
508/* Number of copy instructions created. */
509static int gcse_create_count;
510/* Number of constants propagated. */
511static int const_prop_count;
512/* Number of copys propagated. */
513static int copy_prop_count;
7506f491
DE
514\f
515/* These variables are used by classic GCSE.
516 Normally they'd be defined a bit later, but `rd_gen' needs to
517 be declared sooner. */
518
7506f491
DE
519/* Each block has a bitmap of each type.
520 The length of each blocks bitmap is:
521
522 max_cuid - for reaching definitions
523 n_exprs - for available expressions
524
525 Thus we view the bitmaps as 2 dimensional arrays. i.e.
526 rd_kill[block_num][cuid_num]
c4c81601 527 ae_kill[block_num][expr_num] */
7506f491
DE
528
529/* For reaching defs */
530static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
531
532/* for available exprs */
533static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 534
0511851c
MM
535/* Objects of this type are passed around by the null-pointer check
536 removal routines. */
c4c81601
RK
537struct null_pointer_info
538{
0511851c
MM
539 /* The basic block being processed. */
540 int current_block;
541 /* The first register to be handled in this pass. */
770ae6cc 542 unsigned int min_reg;
0511851c 543 /* One greater than the last register to be handled in this pass. */
770ae6cc 544 unsigned int max_reg;
0511851c
MM
545 sbitmap *nonnull_local;
546 sbitmap *nonnull_killed;
547};
7506f491 548\f
c4c81601
RK
549static void compute_can_copy PARAMS ((void));
550static char *gmalloc PARAMS ((unsigned int));
551static char *grealloc PARAMS ((char *, unsigned int));
552static char *gcse_alloc PARAMS ((unsigned long));
553static void alloc_gcse_mem PARAMS ((rtx));
554static void free_gcse_mem PARAMS ((void));
555static void alloc_reg_set_mem PARAMS ((int));
556static void free_reg_set_mem PARAMS ((void));
557static int get_bitmap_width PARAMS ((int, int, int));
558static void record_one_set PARAMS ((int, rtx));
559static void record_set_info PARAMS ((rtx, rtx, void *));
560static void compute_sets PARAMS ((rtx));
561static void hash_scan_insn PARAMS ((rtx, int, int));
562static void hash_scan_set PARAMS ((rtx, rtx, int));
563static void hash_scan_clobber PARAMS ((rtx, rtx));
564static void hash_scan_call PARAMS ((rtx, rtx));
565static int want_to_gcse_p PARAMS ((rtx));
566static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
567static int oprs_anticipatable_p PARAMS ((rtx, rtx));
568static int oprs_available_p PARAMS ((rtx, rtx));
569static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
570 int, int));
571static void insert_set_in_table PARAMS ((rtx, rtx));
572static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
573static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
c0712acb 574static unsigned int hash_string_1 PARAMS ((const char *));
c4c81601
RK
575static unsigned int hash_set PARAMS ((int, int));
576static int expr_equiv_p PARAMS ((rtx, rtx));
577static void record_last_reg_set_info PARAMS ((rtx, int));
578static void record_last_mem_set_info PARAMS ((rtx));
579static void record_last_set_info PARAMS ((rtx, rtx, void *));
711d877c 580static void compute_hash_table PARAMS ((int));
c4c81601
RK
581static void alloc_set_hash_table PARAMS ((int));
582static void free_set_hash_table PARAMS ((void));
583static void compute_set_hash_table PARAMS ((void));
2e653e39 584static void alloc_expr_hash_table PARAMS ((unsigned int));
c4c81601
RK
585static void free_expr_hash_table PARAMS ((void));
586static void compute_expr_hash_table PARAMS ((void));
587static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
588 int, int));
589static struct expr *lookup_expr PARAMS ((rtx));
770ae6cc
RK
590static struct expr *lookup_set PARAMS ((unsigned int, rtx));
591static struct expr *next_set PARAMS ((unsigned int, struct expr *));
c4c81601
RK
592static void reset_opr_set_tables PARAMS ((void));
593static int oprs_not_set_p PARAMS ((rtx, rtx));
594static void mark_call PARAMS ((rtx));
595static void mark_set PARAMS ((rtx, rtx));
596static void mark_clobber PARAMS ((rtx, rtx));
597static void mark_oprs_set PARAMS ((rtx));
598static void alloc_cprop_mem PARAMS ((int, int));
599static void free_cprop_mem PARAMS ((void));
600static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601static void compute_transpout PARAMS ((void));
602static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
603 int));
711d877c 604static void compute_cprop_data PARAMS ((void));
9e71c818 605static void find_used_regs PARAMS ((rtx *, void *));
c4c81601
RK
606static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607static struct expr *find_avail_set PARAMS ((int, rtx));
0005550b 608static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx));
e2bef702 609#ifdef HAVE_cc0
0005550b 610static int cprop_cc0_jump PARAMS ((basic_block, rtx, struct reg_use *, rtx));
e2bef702 611#endif
a13d4ebf 612static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
e2d2ed72 613static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
a13d4ebf 614static void canon_list_insert PARAMS ((rtx, rtx, void *));
0005550b 615static int cprop_insn PARAMS ((basic_block, rtx, int));
c4c81601
RK
616static int cprop PARAMS ((int));
617static int one_cprop_pass PARAMS ((int, int));
618static void alloc_pre_mem PARAMS ((int, int));
619static void free_pre_mem PARAMS ((void));
620static void compute_pre_data PARAMS ((void));
e2d2ed72
AM
621static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
622 basic_block));
623static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
c4c81601
RK
624static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
625static void pre_insert_copies PARAMS ((void));
626static int pre_delete PARAMS ((void));
627static int pre_gcse PARAMS ((void));
628static int one_pre_gcse_pass PARAMS ((int));
629static void add_label_notes PARAMS ((rtx, rtx));
630static void alloc_code_hoist_mem PARAMS ((int, int));
631static void free_code_hoist_mem PARAMS ((void));
711d877c 632static void compute_code_hoist_vbeinout PARAMS ((void));
c4c81601 633static void compute_code_hoist_data PARAMS ((void));
e2d2ed72
AM
634static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
635 char *));
c4c81601
RK
636static void hoist_code PARAMS ((void));
637static int one_code_hoisting_pass PARAMS ((void));
638static void alloc_rd_mem PARAMS ((int, int));
639static void free_rd_mem PARAMS ((void));
e2d2ed72 640static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
c4c81601 641static void compute_kill_rd PARAMS ((void));
711d877c 642static void compute_rd PARAMS ((void));
c4c81601
RK
643static void alloc_avail_expr_mem PARAMS ((int, int));
644static void free_avail_expr_mem PARAMS ((void));
645static void compute_ae_gen PARAMS ((void));
e2d2ed72 646static int expr_killed_p PARAMS ((rtx, basic_block));
c4c81601 647static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
711d877c 648static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
e2d2ed72 649 basic_block, int));
c4c81601
RK
650static rtx computing_insn PARAMS ((struct expr *, rtx));
651static int def_reaches_here_p PARAMS ((rtx, rtx));
652static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
653static int handle_avail_expr PARAMS ((rtx, struct expr *));
654static int classic_gcse PARAMS ((void));
655static int one_classic_gcse_pass PARAMS ((int));
656static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
8e184d9c
JJ
657static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *,
658 sbitmap *, sbitmap *,
711d877c
KG
659 struct null_pointer_info *));
660static rtx process_insert_insn PARAMS ((struct expr *));
661static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
c4c81601 662static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
e2d2ed72
AM
663 basic_block, int, char *));
664static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
665 basic_block, char *));
a13d4ebf
AM
666static struct ls_expr * ldst_entry PARAMS ((rtx));
667static void free_ldst_entry PARAMS ((struct ls_expr *));
668static void free_ldst_mems PARAMS ((void));
669static void print_ldst_list PARAMS ((FILE *));
670static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
671static int enumerate_ldsts PARAMS ((void));
672static inline struct ls_expr * first_ls_expr PARAMS ((void));
673static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
674static int simple_mem PARAMS ((rtx));
675static void invalidate_any_buried_refs PARAMS ((rtx));
676static void compute_ld_motion_mems PARAMS ((void));
677static void trim_ld_motion_mems PARAMS ((void));
678static void update_ld_motion_stores PARAMS ((struct expr *));
aaa4ca30
AJ
679static void reg_set_info PARAMS ((rtx, rtx, void *));
680static int store_ops_ok PARAMS ((rtx, basic_block));
a13d4ebf
AM
681static void find_moveable_store PARAMS ((rtx));
682static int compute_store_table PARAMS ((void));
683static int load_kills_store PARAMS ((rtx, rtx));
684static int find_loads PARAMS ((rtx, rtx));
685static int store_killed_in_insn PARAMS ((rtx, rtx));
aaa4ca30 686static int store_killed_after PARAMS ((rtx, rtx, basic_block));
e2d2ed72 687static int store_killed_before PARAMS ((rtx, rtx, basic_block));
a13d4ebf 688static void build_store_vectors PARAMS ((void));
e2d2ed72 689static void insert_insn_start_bb PARAMS ((rtx, basic_block));
a13d4ebf 690static int insert_store PARAMS ((struct ls_expr *, edge));
e2d2ed72
AM
691static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
692static void delete_store PARAMS ((struct ls_expr *,
693 basic_block));
a13d4ebf
AM
694static void free_store_memory PARAMS ((void));
695static void store_motion PARAMS ((void));
7506f491
DE
696\f
697/* Entry point for global common subexpression elimination.
698 F is the first instruction in the function. */
699
e78d9500 700int
7506f491
DE
701gcse_main (f, file)
702 rtx f;
703 FILE *file;
704{
705 int changed, pass;
706 /* Bytes used at start of pass. */
707 int initial_bytes_used;
708 /* Maximum number of bytes used by a pass. */
709 int max_pass_bytes;
710 /* Point to release obstack data from for each pass. */
711 char *gcse_obstack_bottom;
712
a13d4ebf
AM
713 /* Insertion of instructions on edges can create new basic blocks; we
714 need the original basic block count so that we can properly deallocate
715 arrays sized on the number of basic blocks originally in the cfg. */
716 int orig_bb_count;
b5ce41ff
JL
717 /* We do not construct an accurate cfg in functions which call
718 setjmp, so just punt to be safe. */
7506f491 719 if (current_function_calls_setjmp)
e78d9500 720 return 0;
7506f491 721
b5ce41ff
JL
722 /* Assume that we do not need to run jump optimizations after gcse. */
723 run_jump_opt_after_gcse = 0;
724
7506f491
DE
725 /* For calling dump_foo fns from gdb. */
726 debug_stderr = stderr;
b5ce41ff 727 gcse_file = file;
7506f491 728
b5ce41ff
JL
729 /* Identify the basic block information for this function, including
730 successors and predecessors. */
7506f491 731 max_gcse_regno = max_reg_num ();
7506f491 732
a42cd965
AM
733 if (file)
734 dump_flow_info (file);
735
a13d4ebf 736 orig_bb_count = n_basic_blocks;
7506f491
DE
737 /* Return if there's nothing to do. */
738 if (n_basic_blocks <= 1)
a18820c6 739 return 0;
7506f491 740
55f7891b
JL
741 /* Trying to perform global optimizations on flow graphs which have
742 a high connectivity will take a long time and is unlikely to be
743 particularly useful.
744
43e72072 745 In normal circumstances a cfg should have about twice as many edges
55f7891b
JL
746 as blocks. But we do not want to punish small functions which have
747 a couple switch statements. So we require a relatively large number
748 of basic blocks and the ratio of edges to blocks to be high. */
749 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
750 {
751 if (warn_disabled_optimization)
752 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
753 n_basic_blocks, n_edges / n_basic_blocks);
754 return 0;
755 }
55f7891b 756
f1fa37ff
MM
757 /* If allocating memory for the cprop bitmap would take up too much
758 storage it's better just to disable the optimization. */
759 if ((n_basic_blocks
760 * SBITMAP_SET_SIZE (max_gcse_regno)
761 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
762 {
763 if (warn_disabled_optimization)
764 warning ("GCSE disabled: %d basic blocks and %d registers",
765 n_basic_blocks, max_gcse_regno);
766
767 return 0;
768 }
769
7506f491
DE
770 /* See what modes support reg/reg copy operations. */
771 if (! can_copy_init_p)
772 {
773 compute_can_copy ();
774 can_copy_init_p = 1;
775 }
776
777 gcc_obstack_init (&gcse_obstack);
a42cd965 778 bytes_used = 0;
7506f491 779
a13d4ebf
AM
780 /* We need alias. */
781 init_alias_analysis ();
c4c81601
RK
782 /* Record where pseudo-registers are set. This data is kept accurate
783 during each pass. ??? We could also record hard-reg information here
784 [since it's unchanging], however it is currently done during hash table
785 computation.
b5ce41ff 786
c4c81601
RK
787 It may be tempting to compute MEM set information here too, but MEM sets
788 will be subject to code motion one day and thus we need to compute
b5ce41ff 789 information about memory sets when we build the hash tables. */
7506f491
DE
790
791 alloc_reg_set_mem (max_gcse_regno);
792 compute_sets (f);
793
794 pass = 0;
795 initial_bytes_used = bytes_used;
796 max_pass_bytes = 0;
797 gcse_obstack_bottom = gcse_alloc (1);
798 changed = 1;
740f35a0 799 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
800 {
801 changed = 0;
802 if (file)
803 fprintf (file, "GCSE pass %d\n\n", pass + 1);
804
805 /* Initialize bytes_used to the space for the pred/succ lists,
806 and the reg_set_table data. */
807 bytes_used = initial_bytes_used;
808
809 /* Each pass may create new registers, so recalculate each time. */
810 max_gcse_regno = max_reg_num ();
811
812 alloc_gcse_mem (f);
813
b5ce41ff
JL
814 /* Don't allow constant propagation to modify jumps
815 during this pass. */
816 changed = one_cprop_pass (pass + 1, 0);
7506f491
DE
817
818 if (optimize_size)
b5ce41ff 819 changed |= one_classic_gcse_pass (pass + 1);
7506f491 820 else
a42cd965
AM
821 {
822 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
823 /* We may have just created new basic blocks. Release and
824 recompute various things which are sized on the number of
825 basic blocks. */
826 if (changed)
827 {
828 int i;
829
830 for (i = 0; i < orig_bb_count; i++)
831 {
832 if (modify_mem_list[i])
833 free_INSN_LIST_list (modify_mem_list + i);
834 if (canon_modify_mem_list[i])
835 free_INSN_LIST_list (canon_modify_mem_list + i);
836 }
837 modify_mem_list
838 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
839 canon_modify_mem_list
840 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
841 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
842 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
843 orig_bb_count = n_basic_blocks;
844 }
a42cd965
AM
845 free_reg_set_mem ();
846 alloc_reg_set_mem (max_reg_num ());
847 compute_sets (f);
848 run_jump_opt_after_gcse = 1;
849 }
7506f491
DE
850
851 if (max_pass_bytes < bytes_used)
852 max_pass_bytes = bytes_used;
853
bb457bd9
JL
854 /* Free up memory, then reallocate for code hoisting. We can
855 not re-use the existing allocated memory because the tables
856 will not have info for the insns or registers created by
857 partial redundancy elimination. */
7506f491
DE
858 free_gcse_mem ();
859
bb457bd9
JL
860 /* It does not make sense to run code hoisting unless we optimizing
861 for code size -- it rarely makes programs faster, and can make
862 them bigger if we did partial redundancy elimination (when optimizing
863 for space, we use a classic gcse algorithm instead of partial
864 redundancy algorithms). */
865 if (optimize_size)
866 {
867 max_gcse_regno = max_reg_num ();
868 alloc_gcse_mem (f);
869 changed |= one_code_hoisting_pass ();
870 free_gcse_mem ();
871
872 if (max_pass_bytes < bytes_used)
873 max_pass_bytes = bytes_used;
874 }
875
7506f491
DE
876 if (file)
877 {
878 fprintf (file, "\n");
879 fflush (file);
880 }
c4c81601 881
7506f491
DE
882 obstack_free (&gcse_obstack, gcse_obstack_bottom);
883 pass++;
884 }
885
b5ce41ff
JL
886 /* Do one last pass of copy propagation, including cprop into
887 conditional jumps. */
888
889 max_gcse_regno = max_reg_num ();
890 alloc_gcse_mem (f);
891 /* This time, go ahead and allow cprop to alter jumps. */
892 one_cprop_pass (pass + 1, 1);
893 free_gcse_mem ();
7506f491
DE
894
895 if (file)
896 {
897 fprintf (file, "GCSE of %s: %d basic blocks, ",
898 current_function_name, n_basic_blocks);
899 fprintf (file, "%d pass%s, %d bytes\n\n",
900 pass, pass > 1 ? "es" : "", max_pass_bytes);
901 }
902
6496a589 903 obstack_free (&gcse_obstack, NULL);
7506f491 904 free_reg_set_mem ();
a13d4ebf
AM
905 /* We are finished with alias. */
906 end_alias_analysis ();
907 allocate_reg_info (max_reg_num (), FALSE, FALSE);
908
909 if (!optimize_size && flag_gcse_sm)
910 store_motion ();
911 /* Record where pseudo-registers are set. */
e78d9500 912 return run_jump_opt_after_gcse;
7506f491
DE
913}
914\f
915/* Misc. utilities. */
916
917/* Compute which modes support reg/reg copy operations. */
918
919static void
920compute_can_copy ()
921{
922 int i;
50b2596f 923#ifndef AVOID_CCMODE_COPIES
7506f491 924 rtx reg,insn;
50b2596f 925#endif
961192e1 926 memset (can_copy_p, 0, NUM_MACHINE_MODES);
7506f491
DE
927
928 start_sequence ();
929 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
930 if (GET_MODE_CLASS (i) == MODE_CC)
931 {
7506f491 932#ifdef AVOID_CCMODE_COPIES
c4c81601 933 can_copy_p[i] = 0;
7506f491 934#else
c4c81601
RK
935 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
936 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 937 if (recog (PATTERN (insn), insn, NULL) >= 0)
c4c81601 938 can_copy_p[i] = 1;
7506f491 939#endif
c4c81601 940 }
141b5810
AO
941 else
942 can_copy_p[i] = 1;
c4c81601 943
7506f491 944 end_sequence ();
7506f491
DE
945}
946\f
947/* Cover function to xmalloc to record bytes allocated. */
948
949static char *
950gmalloc (size)
951 unsigned int size;
952{
953 bytes_used += size;
954 return xmalloc (size);
955}
956
957/* Cover function to xrealloc.
958 We don't record the additional size since we don't know it.
959 It won't affect memory usage stats much anyway. */
960
961static char *
962grealloc (ptr, size)
963 char *ptr;
964 unsigned int size;
965{
966 return xrealloc (ptr, size);
967}
968
969/* Cover function to obstack_alloc.
970 We don't need to record the bytes allocated here since
971 obstack_chunk_alloc is set to gmalloc. */
972
973static char *
974gcse_alloc (size)
975 unsigned long size;
976{
977 return (char *) obstack_alloc (&gcse_obstack, size);
978}
979
980/* Allocate memory for the cuid mapping array,
981 and reg/memory set tracking tables.
982
983 This is called at the start of each pass. */
984
985static void
986alloc_gcse_mem (f)
987 rtx f;
988{
989 int i,n;
990 rtx insn;
991
992 /* Find the largest UID and create a mapping from UIDs to CUIDs.
993 CUIDs are like UIDs except they increase monotonically, have no gaps,
994 and only apply to real insns. */
995
996 max_uid = get_max_uid ();
997 n = (max_uid + 1) * sizeof (int);
998 uid_cuid = (int *) gmalloc (n);
961192e1 999 memset ((char *) uid_cuid, 0, n);
7506f491
DE
1000 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1001 {
2c3c49de 1002 if (INSN_P (insn))
b86db3eb 1003 uid_cuid[INSN_UID (insn)] = i++;
7506f491 1004 else
b86db3eb 1005 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
1006 }
1007
1008 /* Create a table mapping cuids to insns. */
1009
1010 max_cuid = i;
1011 n = (max_cuid + 1) * sizeof (rtx);
1012 cuid_insn = (rtx *) gmalloc (n);
961192e1 1013 memset ((char *) cuid_insn, 0, n);
7506f491 1014 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 1015 if (INSN_P (insn))
c4c81601 1016 CUID_INSN (i++) = insn;
7506f491
DE
1017
1018 /* Allocate vars to track sets of regs. */
7506f491
DE
1019 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
1020
1021 /* Allocate vars to track sets of regs, memory per block. */
7506f491
DE
1022 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
1023 max_gcse_regno);
a13d4ebf
AM
1024 /* Allocate array to keep a list of insns which modify memory in each
1025 basic block. */
1026 modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1027 canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1028 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
1029 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
7506f491
DE
1030}
1031
1032/* Free memory allocated by alloc_gcse_mem. */
1033
1034static void
1035free_gcse_mem ()
1036{
1037 free (uid_cuid);
1038 free (cuid_insn);
1039
1040 free (reg_set_bitmap);
1041
5a660bff 1042 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
1043 /* re-Cache any INSN_LIST nodes we have allocated. */
1044 {
1045 int i;
1046
1047 for (i = 0; i < n_basic_blocks; i++)
1048 {
1049 if (modify_mem_list[i])
1050 free_INSN_LIST_list (modify_mem_list + i);
1051 if (canon_modify_mem_list[i])
1052 free_INSN_LIST_list (canon_modify_mem_list + i);
1053 }
1054
1055 free (modify_mem_list);
1056 free (canon_modify_mem_list);
1057 modify_mem_list = 0;
1058 canon_modify_mem_list = 0;
1059 }
7506f491
DE
1060}
1061
0511851c
MM
1062/* Many of the global optimization algorithms work by solving dataflow
1063 equations for various expressions. Initially, some local value is
c4c81601
RK
1064 computed for each expression in each block. Then, the values across the
1065 various blocks are combined (by following flow graph edges) to arrive at
1066 global values. Conceptually, each set of equations is independent. We
1067 may therefore solve all the equations in parallel, solve them one at a
1068 time, or pick any intermediate approach.
1069
1070 When you're going to need N two-dimensional bitmaps, each X (say, the
1071 number of blocks) by Y (say, the number of expressions), call this
1072 function. It's not important what X and Y represent; only that Y
1073 correspond to the things that can be done in parallel. This function will
1074 return an appropriate chunking factor C; you should solve C sets of
1075 equations in parallel. By going through this function, we can easily
1076 trade space against time; by solving fewer equations in parallel we use
1077 less space. */
0511851c
MM
1078
1079static int
1080get_bitmap_width (n, x, y)
1081 int n;
1082 int x;
1083 int y;
1084{
1085 /* It's not really worth figuring out *exactly* how much memory will
1086 be used by a particular choice. The important thing is to get
1087 something approximately right. */
1088 size_t max_bitmap_memory = 10 * 1024 * 1024;
1089
1090 /* The number of bytes we'd use for a single column of minimum
1091 width. */
1092 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1093
1094 /* Often, it's reasonable just to solve all the equations in
1095 parallel. */
1096 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1097 return y;
1098
1099 /* Otherwise, pick the largest width we can, without going over the
1100 limit. */
1101 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1102 / column_size);
1103}
b5ce41ff
JL
1104\f
1105/* Compute the local properties of each recorded expression.
c4c81601
RK
1106
1107 Local properties are those that are defined by the block, irrespective of
1108 other blocks.
b5ce41ff
JL
1109
1110 An expression is transparent in a block if its operands are not modified
1111 in the block.
1112
1113 An expression is computed (locally available) in a block if it is computed
1114 at least once and expression would contain the same value if the
1115 computation was moved to the end of the block.
1116
1117 An expression is locally anticipatable in a block if it is computed at
1118 least once and expression would contain the same value if the computation
1119 was moved to the beginning of the block.
1120
c4c81601
RK
1121 We call this routine for cprop, pre and code hoisting. They all compute
1122 basically the same information and thus can easily share this code.
7506f491 1123
c4c81601
RK
1124 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1125 properties. If NULL, then it is not necessary to compute or record that
1126 particular property.
b5ce41ff 1127
c4c81601
RK
1128 SETP controls which hash table to look at. If zero, this routine looks at
1129 the expr hash table; if nonzero this routine looks at the set hash table.
1130 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1131 ABSALTERED. */
b5ce41ff
JL
1132
1133static void
1134compute_local_properties (transp, comp, antloc, setp)
1135 sbitmap *transp;
1136 sbitmap *comp;
1137 sbitmap *antloc;
1138 int setp;
1139{
2e653e39 1140 unsigned int i, hash_table_size;
b5ce41ff
JL
1141 struct expr **hash_table;
1142
1143 /* Initialize any bitmaps that were passed in. */
1144 if (transp)
695ab36a
BS
1145 {
1146 if (setp)
1147 sbitmap_vector_zero (transp, n_basic_blocks);
1148 else
1149 sbitmap_vector_ones (transp, n_basic_blocks);
1150 }
c4c81601 1151
b5ce41ff
JL
1152 if (comp)
1153 sbitmap_vector_zero (comp, n_basic_blocks);
1154 if (antloc)
1155 sbitmap_vector_zero (antloc, n_basic_blocks);
1156
1157 /* We use the same code for cprop, pre and hoisting. For cprop
1158 we care about the set hash table, for pre and hoisting we
1159 care about the expr hash table. */
1160 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1161 hash_table = setp ? set_hash_table : expr_hash_table;
1162
1163 for (i = 0; i < hash_table_size; i++)
7506f491 1164 {
b5ce41ff
JL
1165 struct expr *expr;
1166
1167 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1168 {
b5ce41ff 1169 int indx = expr->bitmap_index;
c4c81601 1170 struct occr *occr;
b5ce41ff
JL
1171
1172 /* The expression is transparent in this block if it is not killed.
1173 We start by assuming all are transparent [none are killed], and
1174 then reset the bits for those that are. */
b5ce41ff
JL
1175 if (transp)
1176 compute_transp (expr->expr, indx, transp, setp);
1177
1178 /* The occurrences recorded in antic_occr are exactly those that
1179 we want to set to non-zero in ANTLOC. */
b5ce41ff 1180 if (antloc)
c4c81601
RK
1181 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1182 {
1183 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1184
c4c81601
RK
1185 /* While we're scanning the table, this is a good place to
1186 initialize this. */
1187 occr->deleted_p = 0;
1188 }
b5ce41ff
JL
1189
1190 /* The occurrences recorded in avail_occr are exactly those that
1191 we want to set to non-zero in COMP. */
1192 if (comp)
c4c81601
RK
1193 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1194 {
1195 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1196
c4c81601
RK
1197 /* While we're scanning the table, this is a good place to
1198 initialize this. */
1199 occr->copied_p = 0;
1200 }
b5ce41ff
JL
1201
1202 /* While we're scanning the table, this is a good place to
1203 initialize this. */
1204 expr->reaching_reg = 0;
1205 }
7506f491 1206 }
7506f491
DE
1207}
1208\f
1209/* Register set information.
1210
1211 `reg_set_table' records where each register is set or otherwise
1212 modified. */
1213
1214static struct obstack reg_set_obstack;
1215
1216static void
1217alloc_reg_set_mem (n_regs)
1218 int n_regs;
1219{
c4c81601 1220 unsigned int n;
7506f491
DE
1221
1222 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1223 n = reg_set_table_size * sizeof (struct reg_set *);
1224 reg_set_table = (struct reg_set **) gmalloc (n);
961192e1 1225 memset ((char *) reg_set_table, 0, n);
7506f491
DE
1226
1227 gcc_obstack_init (&reg_set_obstack);
1228}
1229
1230static void
1231free_reg_set_mem ()
1232{
1233 free (reg_set_table);
6496a589 1234 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1235}
1236
1237/* Record REGNO in the reg_set table. */
1238
1239static void
1240record_one_set (regno, insn)
1241 int regno;
1242 rtx insn;
1243{
172890a2 1244 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1245 struct reg_set *new_reg_info;
7506f491
DE
1246
1247 /* If the table isn't big enough, enlarge it. */
1248 if (regno >= reg_set_table_size)
1249 {
1250 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601
RK
1251
1252 reg_set_table
1253 = (struct reg_set **) grealloc ((char *) reg_set_table,
1254 new_size * sizeof (struct reg_set *));
961192e1 1255 memset ((char *) (reg_set_table + reg_set_table_size), 0,
7506f491
DE
1256 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1257 reg_set_table_size = new_size;
1258 }
1259
1260 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1261 sizeof (struct reg_set));
1262 bytes_used += sizeof (struct reg_set);
1263 new_reg_info->insn = insn;
274969ea
MM
1264 new_reg_info->next = reg_set_table[regno];
1265 reg_set_table[regno] = new_reg_info;
7506f491
DE
1266}
1267
c4c81601
RK
1268/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1269 an insn. The DATA is really the instruction in which the SET is
1270 occurring. */
7506f491
DE
1271
1272static void
84832317 1273record_set_info (dest, setter, data)
50b2596f 1274 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 1275 void *data;
7506f491 1276{
84832317
MM
1277 rtx record_set_insn = (rtx) data;
1278
c4c81601
RK
1279 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1280 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1281}
1282
1283/* Scan the function and record each set of each pseudo-register.
1284
c4c81601
RK
1285 This is called once, at the start of the gcse pass. See the comments for
1286 `reg_set_table' for further documenation. */
7506f491
DE
1287
1288static void
1289compute_sets (f)
1290 rtx f;
1291{
c4c81601 1292 rtx insn;
7506f491 1293
c4c81601 1294 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1295 if (INSN_P (insn))
c4c81601 1296 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1297}
1298\f
1299/* Hash table support. */
1300
80c29cc4
RZ
1301/* For each register, the cuid of the first/last insn in the block
1302 that set it, or -1 if not set. */
c4c81601 1303#define NEVER_SET -1
80c29cc4
RZ
1304
1305struct reg_avail_info
1306{
1307 int last_bb;
1308 int first_set;
1309 int last_set;
1310};
1311
1312static struct reg_avail_info *reg_avail_info;
1313static int current_bb;
7506f491 1314
7506f491 1315
fb0c0a12
RK
1316/* See whether X, the source of a set, is something we want to consider for
1317 GCSE. */
7506f491
DE
1318
1319static int
1320want_to_gcse_p (x)
1321 rtx x;
1322{
fb0c0a12
RK
1323 static rtx test_insn = 0;
1324 int num_clobbers = 0;
1325 int icode;
1326
c4c81601 1327 switch (GET_CODE (x))
7506f491
DE
1328 {
1329 case REG:
1330 case SUBREG:
1331 case CONST_INT:
1332 case CONST_DOUBLE:
1333 case CALL:
1334 return 0;
1335
1336 default:
1337 break;
1338 }
1339
fb0c0a12
RK
1340 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1341 if (general_operand (x, GET_MODE (x)))
1342 return 1;
1343 else if (GET_MODE (x) == VOIDmode)
1344 return 0;
1345
1346 /* Otherwise, check if we can make a valid insn from it. First initialize
1347 our test insn if we haven't already. */
1348 if (test_insn == 0)
1349 {
1350 test_insn
1351 = make_insn_raw (gen_rtx_SET (VOIDmode,
1352 gen_rtx_REG (word_mode,
1353 FIRST_PSEUDO_REGISTER * 2),
1354 const0_rtx));
1355 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1356 ggc_add_rtx_root (&test_insn, 1);
1357 }
1358
1359 /* Now make an insn like the one we would make when GCSE'ing and see if
1360 valid. */
1361 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1362 SET_SRC (PATTERN (test_insn)) = x;
1363 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1364 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1365}
1366
1367/* Return non-zero if the operands of expression X are unchanged from the
1368 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1369 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1370
1371static int
1372oprs_unchanged_p (x, insn, avail_p)
1373 rtx x, insn;
1374 int avail_p;
1375{
c4c81601 1376 int i, j;
7506f491 1377 enum rtx_code code;
6f7d635c 1378 const char *fmt;
7506f491 1379
7506f491
DE
1380 if (x == 0)
1381 return 1;
1382
1383 code = GET_CODE (x);
1384 switch (code)
1385 {
1386 case REG:
80c29cc4
RZ
1387 {
1388 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1389
1390 if (info->last_bb != current_bb)
1391 return 1;
1392 if (avail_p)
1393 return info->last_set < INSN_CUID (insn);
1394 else
1395 return info->first_set >= INSN_CUID (insn);
1396 }
7506f491
DE
1397
1398 case MEM:
80c29cc4 1399 if (load_killed_in_block_p (BASIC_BLOCK (current_bb), INSN_CUID (insn),
a13d4ebf
AM
1400 x, avail_p))
1401 return 0;
7506f491 1402 else
c4c81601 1403 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1404
1405 case PRE_DEC:
1406 case PRE_INC:
1407 case POST_DEC:
1408 case POST_INC:
4b983fdc
RH
1409 case PRE_MODIFY:
1410 case POST_MODIFY:
7506f491
DE
1411 return 0;
1412
1413 case PC:
1414 case CC0: /*FIXME*/
1415 case CONST:
1416 case CONST_INT:
1417 case CONST_DOUBLE:
1418 case SYMBOL_REF:
1419 case LABEL_REF:
1420 case ADDR_VEC:
1421 case ADDR_DIFF_VEC:
1422 return 1;
1423
1424 default:
1425 break;
1426 }
1427
c4c81601 1428 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1429 {
1430 if (fmt[i] == 'e')
1431 {
c4c81601
RK
1432 /* If we are about to do the last recursive call needed at this
1433 level, change it into iteration. This function is called enough
1434 to be worth it. */
7506f491 1435 if (i == 0)
c4c81601
RK
1436 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1437
1438 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1439 return 0;
1440 }
1441 else if (fmt[i] == 'E')
c4c81601
RK
1442 for (j = 0; j < XVECLEN (x, i); j++)
1443 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1444 return 0;
7506f491
DE
1445 }
1446
1447 return 1;
1448}
1449
a13d4ebf
AM
1450/* Used for communication between mems_conflict_for_gcse_p and
1451 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1452 conflict between two memory references. */
1453static int gcse_mems_conflict_p;
1454
1455/* Used for communication between mems_conflict_for_gcse_p and
1456 load_killed_in_block_p. A memory reference for a load instruction,
1457 mems_conflict_for_gcse_p will see if a memory store conflicts with
1458 this memory load. */
1459static rtx gcse_mem_operand;
1460
1461/* DEST is the output of an instruction. If it is a memory reference, and
1462 possibly conflicts with the load found in gcse_mem_operand, then set
1463 gcse_mems_conflict_p to a nonzero value. */
1464
1465static void
1466mems_conflict_for_gcse_p (dest, setter, data)
1467 rtx dest, setter ATTRIBUTE_UNUSED;
1468 void *data ATTRIBUTE_UNUSED;
1469{
1470 while (GET_CODE (dest) == SUBREG
1471 || GET_CODE (dest) == ZERO_EXTRACT
1472 || GET_CODE (dest) == SIGN_EXTRACT
1473 || GET_CODE (dest) == STRICT_LOW_PART)
1474 dest = XEXP (dest, 0);
1475
1476 /* If DEST is not a MEM, then it will not conflict with the load. Note
1477 that function calls are assumed to clobber memory, but are handled
1478 elsewhere. */
1479 if (GET_CODE (dest) != MEM)
1480 return;
aaa4ca30 1481
a13d4ebf
AM
1482 /* If we are setting a MEM in our list of specially recognized MEMs,
1483 don't mark as killed this time. */
1484
1485 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1486 {
1487 if (!find_rtx_in_ldst (dest))
1488 gcse_mems_conflict_p = 1;
1489 return;
1490 }
aaa4ca30 1491
a13d4ebf
AM
1492 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1493 rtx_addr_varies_p))
1494 gcse_mems_conflict_p = 1;
1495}
1496
1497/* Return nonzero if the expression in X (a memory reference) is killed
1498 in block BB before or after the insn with the CUID in UID_LIMIT.
1499 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1500 before UID_LIMIT.
1501
1502 To check the entire block, set UID_LIMIT to max_uid + 1 and
1503 AVAIL_P to 0. */
1504
1505static int
1506load_killed_in_block_p (bb, uid_limit, x, avail_p)
e2d2ed72 1507 basic_block bb;
a13d4ebf
AM
1508 int uid_limit;
1509 rtx x;
1510 int avail_p;
1511{
e2d2ed72 1512 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1513 while (list_entry)
1514 {
1515 rtx setter;
1516 /* Ignore entries in the list that do not apply. */
1517 if ((avail_p
1518 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1519 || (! avail_p
1520 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1521 {
1522 list_entry = XEXP (list_entry, 1);
1523 continue;
1524 }
1525
1526 setter = XEXP (list_entry, 0);
1527
1528 /* If SETTER is a call everything is clobbered. Note that calls
1529 to pure functions are never put on the list, so we need not
1530 worry about them. */
1531 if (GET_CODE (setter) == CALL_INSN)
1532 return 1;
1533
1534 /* SETTER must be an INSN of some kind that sets memory. Call
1535 note_stores to examine each hunk of memory that is modified.
1536
1537 The note_stores interface is pretty limited, so we have to
1538 communicate via global variables. Yuk. */
1539 gcse_mem_operand = x;
1540 gcse_mems_conflict_p = 0;
1541 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1542 if (gcse_mems_conflict_p)
1543 return 1;
1544 list_entry = XEXP (list_entry, 1);
1545 }
1546 return 0;
1547}
1548
7506f491
DE
1549/* Return non-zero if the operands of expression X are unchanged from
1550 the start of INSN's basic block up to but not including INSN. */
1551
1552static int
1553oprs_anticipatable_p (x, insn)
1554 rtx x, insn;
1555{
1556 return oprs_unchanged_p (x, insn, 0);
1557}
1558
1559/* Return non-zero if the operands of expression X are unchanged from
1560 INSN to the end of INSN's basic block. */
1561
1562static int
1563oprs_available_p (x, insn)
1564 rtx x, insn;
1565{
1566 return oprs_unchanged_p (x, insn, 1);
1567}
1568
1569/* Hash expression X.
c4c81601
RK
1570
1571 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1572 indicating if a volatile operand is found or if the expression contains
1573 something we don't want to insert in the table.
7506f491
DE
1574
1575 ??? One might want to merge this with canon_hash. Later. */
1576
1577static unsigned int
1578hash_expr (x, mode, do_not_record_p, hash_table_size)
1579 rtx x;
1580 enum machine_mode mode;
1581 int *do_not_record_p;
1582 int hash_table_size;
1583{
1584 unsigned int hash;
1585
1586 *do_not_record_p = 0;
1587
1588 hash = hash_expr_1 (x, mode, do_not_record_p);
1589 return hash % hash_table_size;
1590}
172890a2 1591
6462bb43 1592/* Hash a string. Just add its bytes up. */
172890a2 1593
6462bb43
AO
1594static inline unsigned
1595hash_string_1 (ps)
1596 const char *ps;
1597{
1598 unsigned hash = 0;
1599 const unsigned char *p = (const unsigned char *)ps;
1600
1601 if (p)
1602 while (*p)
1603 hash += *p++;
1604
1605 return hash;
1606}
7506f491
DE
1607
1608/* Subroutine of hash_expr to do the actual work. */
1609
1610static unsigned int
1611hash_expr_1 (x, mode, do_not_record_p)
1612 rtx x;
1613 enum machine_mode mode;
1614 int *do_not_record_p;
1615{
1616 int i, j;
1617 unsigned hash = 0;
1618 enum rtx_code code;
6f7d635c 1619 const char *fmt;
7506f491 1620
c4c81601
RK
1621 /* Used to turn recursion into iteration. We can't rely on GCC's
1622 tail-recursion eliminatio since we need to keep accumulating values
1623 in HASH. */
7506f491
DE
1624
1625 if (x == 0)
1626 return hash;
1627
c4c81601 1628 repeat:
7506f491
DE
1629 code = GET_CODE (x);
1630 switch (code)
1631 {
1632 case REG:
c4c81601
RK
1633 hash += ((unsigned int) REG << 7) + REGNO (x);
1634 return hash;
7506f491
DE
1635
1636 case CONST_INT:
c4c81601
RK
1637 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1638 + (unsigned int) INTVAL (x));
1639 return hash;
7506f491
DE
1640
1641 case CONST_DOUBLE:
1642 /* This is like the general case, except that it only counts
1643 the integers representing the constant. */
c4c81601 1644 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1645 if (GET_MODE (x) != VOIDmode)
1646 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1647 hash += (unsigned int) XWINT (x, i);
7506f491 1648 else
c4c81601
RK
1649 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1650 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1651 return hash;
1652
1653 /* Assume there is only one rtx object for any given label. */
1654 case LABEL_REF:
1655 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1656 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1657 hash += (((unsigned int) LABEL_REF << 7)
1658 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1659 return hash;
1660
1661 case SYMBOL_REF:
1662 {
1663 /* Don't hash on the symbol's address to avoid bootstrap differences.
1664 Different hash values may cause expressions to be recorded in
1665 different orders and thus different registers to be used in the
1666 final assembler. This also avoids differences in the dump files
1667 between various stages. */
1668 unsigned int h = 0;
3cce094d 1669 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1670
7506f491
DE
1671 while (*p)
1672 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1673
1674 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1675 return hash;
1676 }
1677
1678 case MEM:
1679 if (MEM_VOLATILE_P (x))
1680 {
1681 *do_not_record_p = 1;
1682 return 0;
1683 }
c4c81601
RK
1684
1685 hash += (unsigned int) MEM;
297c3335 1686 hash += MEM_ALIAS_SET (x);
7506f491
DE
1687 x = XEXP (x, 0);
1688 goto repeat;
1689
1690 case PRE_DEC:
1691 case PRE_INC:
1692 case POST_DEC:
1693 case POST_INC:
1694 case PC:
1695 case CC0:
1696 case CALL:
1697 case UNSPEC_VOLATILE:
1698 *do_not_record_p = 1;
1699 return 0;
1700
1701 case ASM_OPERANDS:
1702 if (MEM_VOLATILE_P (x))
1703 {
1704 *do_not_record_p = 1;
1705 return 0;
1706 }
6462bb43
AO
1707 else
1708 {
1709 /* We don't want to take the filename and line into account. */
1710 hash += (unsigned) code + (unsigned) GET_MODE (x)
1711 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1712 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1713 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1714
1715 if (ASM_OPERANDS_INPUT_LENGTH (x))
1716 {
1717 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1718 {
1719 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1720 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1721 do_not_record_p)
1722 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1723 (x, i)));
1724 }
1725
1726 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1727 x = ASM_OPERANDS_INPUT (x, 0);
1728 mode = GET_MODE (x);
1729 goto repeat;
1730 }
1731 return hash;
1732 }
7506f491
DE
1733
1734 default:
1735 break;
1736 }
1737
7506f491 1738 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1739 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1740 {
1741 if (fmt[i] == 'e')
1742 {
7506f491
DE
1743 /* If we are about to do the last recursive call
1744 needed at this level, change it into iteration.
1745 This function is called enough to be worth it. */
1746 if (i == 0)
1747 {
c4c81601 1748 x = XEXP (x, i);
7506f491
DE
1749 goto repeat;
1750 }
c4c81601
RK
1751
1752 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1753 if (*do_not_record_p)
1754 return 0;
1755 }
c4c81601 1756
7506f491
DE
1757 else if (fmt[i] == 'E')
1758 for (j = 0; j < XVECLEN (x, i); j++)
1759 {
1760 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1761 if (*do_not_record_p)
1762 return 0;
1763 }
c4c81601 1764
7506f491 1765 else if (fmt[i] == 's')
6462bb43 1766 hash += hash_string_1 (XSTR (x, i));
7506f491 1767 else if (fmt[i] == 'i')
c4c81601 1768 hash += (unsigned int) XINT (x, i);
7506f491
DE
1769 else
1770 abort ();
1771 }
1772
1773 return hash;
1774}
1775
1776/* Hash a set of register REGNO.
1777
c4c81601
RK
1778 Sets are hashed on the register that is set. This simplifies the PRE copy
1779 propagation code.
7506f491
DE
1780
1781 ??? May need to make things more elaborate. Later, as necessary. */
1782
1783static unsigned int
1784hash_set (regno, hash_table_size)
1785 int regno;
1786 int hash_table_size;
1787{
1788 unsigned int hash;
1789
1790 hash = regno;
1791 return hash % hash_table_size;
1792}
1793
1794/* Return non-zero if exp1 is equivalent to exp2.
1795 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1796
1797static int
1798expr_equiv_p (x, y)
1799 rtx x, y;
1800{
b3694847
SS
1801 int i, j;
1802 enum rtx_code code;
1803 const char *fmt;
7506f491
DE
1804
1805 if (x == y)
1806 return 1;
c4c81601 1807
7506f491
DE
1808 if (x == 0 || y == 0)
1809 return x == y;
1810
1811 code = GET_CODE (x);
1812 if (code != GET_CODE (y))
1813 return 0;
1814
1815 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1816 if (GET_MODE (x) != GET_MODE (y))
1817 return 0;
1818
1819 switch (code)
1820 {
1821 case PC:
1822 case CC0:
1823 return x == y;
1824
1825 case CONST_INT:
1826 return INTVAL (x) == INTVAL (y);
1827
1828 case LABEL_REF:
1829 return XEXP (x, 0) == XEXP (y, 0);
1830
1831 case SYMBOL_REF:
1832 return XSTR (x, 0) == XSTR (y, 0);
1833
1834 case REG:
1835 return REGNO (x) == REGNO (y);
1836
297c3335
RH
1837 case MEM:
1838 /* Can't merge two expressions in different alias sets, since we can
1839 decide that the expression is transparent in a block when it isn't,
1840 due to it being set with the different alias set. */
1841 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1842 return 0;
1843 break;
1844
7506f491
DE
1845 /* For commutative operations, check both orders. */
1846 case PLUS:
1847 case MULT:
1848 case AND:
1849 case IOR:
1850 case XOR:
1851 case NE:
1852 case EQ:
1853 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1854 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1855 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1856 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1857
6462bb43
AO
1858 case ASM_OPERANDS:
1859 /* We don't use the generic code below because we want to
1860 disregard filename and line numbers. */
1861
1862 /* A volatile asm isn't equivalent to any other. */
1863 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1864 return 0;
1865
1866 if (GET_MODE (x) != GET_MODE (y)
1867 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1868 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1869 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1870 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1871 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1872 return 0;
1873
1874 if (ASM_OPERANDS_INPUT_LENGTH (x))
1875 {
1876 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1877 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1878 ASM_OPERANDS_INPUT (y, i))
1879 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1880 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1881 return 0;
1882 }
1883
1884 return 1;
1885
7506f491
DE
1886 default:
1887 break;
1888 }
1889
1890 /* Compare the elements. If any pair of corresponding elements
1891 fail to match, return 0 for the whole thing. */
1892
1893 fmt = GET_RTX_FORMAT (code);
1894 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1895 {
1896 switch (fmt[i])
1897 {
1898 case 'e':
1899 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1900 return 0;
1901 break;
1902
1903 case 'E':
1904 if (XVECLEN (x, i) != XVECLEN (y, i))
1905 return 0;
1906 for (j = 0; j < XVECLEN (x, i); j++)
1907 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1908 return 0;
1909 break;
1910
1911 case 's':
1912 if (strcmp (XSTR (x, i), XSTR (y, i)))
1913 return 0;
1914 break;
1915
1916 case 'i':
1917 if (XINT (x, i) != XINT (y, i))
1918 return 0;
1919 break;
1920
1921 case 'w':
1922 if (XWINT (x, i) != XWINT (y, i))
1923 return 0;
1924 break;
1925
1926 case '0':
1927 break;
aaa4ca30 1928
7506f491
DE
1929 default:
1930 abort ();
1931 }
1932 }
1933
1934 return 1;
1935}
1936
1937/* Insert expression X in INSN in the hash table.
1938 If it is already present, record it as the last occurrence in INSN's
1939 basic block.
1940
1941 MODE is the mode of the value X is being stored into.
1942 It is only used if X is a CONST_INT.
1943
1944 ANTIC_P is non-zero if X is an anticipatable expression.
1945 AVAIL_P is non-zero if X is an available expression. */
1946
1947static void
1948insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1949 rtx x;
1950 enum machine_mode mode;
1951 rtx insn;
1952 int antic_p, avail_p;
1953{
1954 int found, do_not_record_p;
1955 unsigned int hash;
1956 struct expr *cur_expr, *last_expr = NULL;
1957 struct occr *antic_occr, *avail_occr;
1958 struct occr *last_occr = NULL;
1959
1960 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1961
1962 /* Do not insert expression in table if it contains volatile operands,
1963 or if hash_expr determines the expression is something we don't want
1964 to or can't handle. */
1965 if (do_not_record_p)
1966 return;
1967
1968 cur_expr = expr_hash_table[hash];
1969 found = 0;
1970
c4c81601 1971 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1972 {
1973 /* If the expression isn't found, save a pointer to the end of
1974 the list. */
1975 last_expr = cur_expr;
1976 cur_expr = cur_expr->next_same_hash;
1977 }
1978
1979 if (! found)
1980 {
1981 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1982 bytes_used += sizeof (struct expr);
1983 if (expr_hash_table[hash] == NULL)
c4c81601
RK
1984 /* This is the first pattern that hashed to this index. */
1985 expr_hash_table[hash] = cur_expr;
7506f491 1986 else
c4c81601
RK
1987 /* Add EXPR to end of this hash chain. */
1988 last_expr->next_same_hash = cur_expr;
1989
7506f491
DE
1990 /* Set the fields of the expr element. */
1991 cur_expr->expr = x;
1992 cur_expr->bitmap_index = n_exprs++;
1993 cur_expr->next_same_hash = NULL;
1994 cur_expr->antic_occr = NULL;
1995 cur_expr->avail_occr = NULL;
1996 }
1997
1998 /* Now record the occurrence(s). */
7506f491
DE
1999 if (antic_p)
2000 {
2001 antic_occr = cur_expr->antic_occr;
2002
2003 /* Search for another occurrence in the same basic block. */
2004 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2005 {
2006 /* If an occurrence isn't found, save a pointer to the end of
2007 the list. */
2008 last_occr = antic_occr;
2009 antic_occr = antic_occr->next;
2010 }
2011
2012 if (antic_occr)
c4c81601
RK
2013 /* Found another instance of the expression in the same basic block.
2014 Prefer the currently recorded one. We want the first one in the
2015 block and the block is scanned from start to end. */
2016 ; /* nothing to do */
7506f491
DE
2017 else
2018 {
2019 /* First occurrence of this expression in this basic block. */
2020 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2021 bytes_used += sizeof (struct occr);
2022 /* First occurrence of this expression in any block? */
2023 if (cur_expr->antic_occr == NULL)
2024 cur_expr->antic_occr = antic_occr;
2025 else
2026 last_occr->next = antic_occr;
c4c81601 2027
7506f491
DE
2028 antic_occr->insn = insn;
2029 antic_occr->next = NULL;
2030 }
2031 }
2032
2033 if (avail_p)
2034 {
2035 avail_occr = cur_expr->avail_occr;
2036
2037 /* Search for another occurrence in the same basic block. */
2038 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2039 {
2040 /* If an occurrence isn't found, save a pointer to the end of
2041 the list. */
2042 last_occr = avail_occr;
2043 avail_occr = avail_occr->next;
2044 }
2045
2046 if (avail_occr)
c4c81601
RK
2047 /* Found another instance of the expression in the same basic block.
2048 Prefer this occurrence to the currently recorded one. We want
2049 the last one in the block and the block is scanned from start
2050 to end. */
2051 avail_occr->insn = insn;
7506f491
DE
2052 else
2053 {
2054 /* First occurrence of this expression in this basic block. */
2055 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2056 bytes_used += sizeof (struct occr);
c4c81601 2057
7506f491
DE
2058 /* First occurrence of this expression in any block? */
2059 if (cur_expr->avail_occr == NULL)
2060 cur_expr->avail_occr = avail_occr;
2061 else
2062 last_occr->next = avail_occr;
c4c81601 2063
7506f491
DE
2064 avail_occr->insn = insn;
2065 avail_occr->next = NULL;
2066 }
2067 }
2068}
2069
2070/* Insert pattern X in INSN in the hash table.
2071 X is a SET of a reg to either another reg or a constant.
2072 If it is already present, record it as the last occurrence in INSN's
2073 basic block. */
2074
2075static void
2076insert_set_in_table (x, insn)
2077 rtx x;
2078 rtx insn;
2079{
2080 int found;
2081 unsigned int hash;
2082 struct expr *cur_expr, *last_expr = NULL;
2083 struct occr *cur_occr, *last_occr = NULL;
2084
2085 if (GET_CODE (x) != SET
2086 || GET_CODE (SET_DEST (x)) != REG)
2087 abort ();
2088
2089 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
2090
2091 cur_expr = set_hash_table[hash];
2092 found = 0;
2093
c4c81601 2094 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2095 {
2096 /* If the expression isn't found, save a pointer to the end of
2097 the list. */
2098 last_expr = cur_expr;
2099 cur_expr = cur_expr->next_same_hash;
2100 }
2101
2102 if (! found)
2103 {
2104 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2105 bytes_used += sizeof (struct expr);
2106 if (set_hash_table[hash] == NULL)
c4c81601
RK
2107 /* This is the first pattern that hashed to this index. */
2108 set_hash_table[hash] = cur_expr;
7506f491 2109 else
c4c81601
RK
2110 /* Add EXPR to end of this hash chain. */
2111 last_expr->next_same_hash = cur_expr;
2112
7506f491
DE
2113 /* Set the fields of the expr element.
2114 We must copy X because it can be modified when copy propagation is
2115 performed on its operands. */
7506f491
DE
2116 cur_expr->expr = copy_rtx (x);
2117 cur_expr->bitmap_index = n_sets++;
2118 cur_expr->next_same_hash = NULL;
2119 cur_expr->antic_occr = NULL;
2120 cur_expr->avail_occr = NULL;
2121 }
2122
2123 /* Now record the occurrence. */
7506f491
DE
2124 cur_occr = cur_expr->avail_occr;
2125
2126 /* Search for another occurrence in the same basic block. */
2127 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2128 {
2129 /* If an occurrence isn't found, save a pointer to the end of
2130 the list. */
2131 last_occr = cur_occr;
2132 cur_occr = cur_occr->next;
2133 }
2134
2135 if (cur_occr)
c4c81601
RK
2136 /* Found another instance of the expression in the same basic block.
2137 Prefer this occurrence to the currently recorded one. We want the
2138 last one in the block and the block is scanned from start to end. */
2139 cur_occr->insn = insn;
7506f491
DE
2140 else
2141 {
2142 /* First occurrence of this expression in this basic block. */
2143 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2144 bytes_used += sizeof (struct occr);
c4c81601 2145
7506f491
DE
2146 /* First occurrence of this expression in any block? */
2147 if (cur_expr->avail_occr == NULL)
2148 cur_expr->avail_occr = cur_occr;
2149 else
2150 last_occr->next = cur_occr;
c4c81601 2151
7506f491
DE
2152 cur_occr->insn = insn;
2153 cur_occr->next = NULL;
2154 }
2155}
2156
c4c81601
RK
2157/* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
2158 non-zero, this is for the assignment hash table, otherwise it is for the
2159 expression hash table. */
7506f491
DE
2160
2161static void
2162hash_scan_set (pat, insn, set_p)
2163 rtx pat, insn;
2164 int set_p;
2165{
2166 rtx src = SET_SRC (pat);
2167 rtx dest = SET_DEST (pat);
172890a2 2168 rtx note;
7506f491
DE
2169
2170 if (GET_CODE (src) == CALL)
2171 hash_scan_call (src, insn);
2172
172890a2 2173 else if (GET_CODE (dest) == REG)
7506f491 2174 {
172890a2 2175 unsigned int regno = REGNO (dest);
7506f491
DE
2176 rtx tmp;
2177
172890a2
RK
2178 /* If this is a single set and we are doing constant propagation,
2179 see if a REG_NOTE shows this equivalent to a constant. */
2180 if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2181 && CONSTANT_P (XEXP (note, 0)))
2182 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2183
7506f491
DE
2184 /* Only record sets of pseudo-regs in the hash table. */
2185 if (! set_p
2186 && regno >= FIRST_PSEUDO_REGISTER
2187 /* Don't GCSE something if we can't do a reg/reg copy. */
2188 && can_copy_p [GET_MODE (dest)]
2189 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2190 && want_to_gcse_p (src)
2191 /* Don't CSE a nop. */
43e72072
JJ
2192 && ! set_noop_p (pat)
2193 /* Don't GCSE if it has attached REG_EQUIV note.
2194 At this point this only function parameters should have
2195 REG_EQUIV notes and if the argument slot is used somewhere
2196 explicitely, it means address of parameter has been taken,
2197 so we should not extend the lifetime of the pseudo. */
2198 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2199 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2200 {
2201 /* An expression is not anticipatable if its operands are
52d76e11
RK
2202 modified before this insn or if this is not the only SET in
2203 this insn. */
2204 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2205 /* An expression is not available if its operands are
eb296bd9
GK
2206 subsequently modified, including this insn. It's also not
2207 available if this is a branch, because we can't insert
2208 a set after the branch. */
2209 int avail_p = (oprs_available_p (src, insn)
2210 && ! JUMP_P (insn));
c4c81601 2211
7506f491
DE
2212 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
2213 }
c4c81601 2214
7506f491
DE
2215 /* Record sets for constant/copy propagation. */
2216 else if (set_p
2217 && regno >= FIRST_PSEUDO_REGISTER
2218 && ((GET_CODE (src) == REG
2219 && REGNO (src) >= FIRST_PSEUDO_REGISTER
172890a2
RK
2220 && can_copy_p [GET_MODE (dest)]
2221 && REGNO (src) != regno)
e78d9500 2222 || GET_CODE (src) == CONST_INT
05f6f07c 2223 || GET_CODE (src) == SYMBOL_REF
e78d9500 2224 || GET_CODE (src) == CONST_DOUBLE)
7506f491
DE
2225 /* A copy is not available if its src or dest is subsequently
2226 modified. Here we want to search from INSN+1 on, but
2227 oprs_available_p searches from INSN on. */
2228 && (insn == BLOCK_END (BLOCK_NUM (insn))
2229 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2230 && oprs_available_p (pat, tmp))))
2231 insert_set_in_table (pat, insn);
2232 }
7506f491
DE
2233}
2234
2235static void
2236hash_scan_clobber (x, insn)
50b2596f 2237 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
2238{
2239 /* Currently nothing to do. */
2240}
2241
2242static void
2243hash_scan_call (x, insn)
50b2596f 2244 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
2245{
2246 /* Currently nothing to do. */
2247}
2248
2249/* Process INSN and add hash table entries as appropriate.
2250
2251 Only available expressions that set a single pseudo-reg are recorded.
2252
2253 Single sets in a PARALLEL could be handled, but it's an extra complication
2254 that isn't dealt with right now. The trick is handling the CLOBBERs that
2255 are also in the PARALLEL. Later.
2256
2257 If SET_P is non-zero, this is for the assignment hash table,
ed79bb3d
R
2258 otherwise it is for the expression hash table.
2259 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2260 not record any expressions. */
7506f491
DE
2261
2262static void
ed79bb3d 2263hash_scan_insn (insn, set_p, in_libcall_block)
7506f491
DE
2264 rtx insn;
2265 int set_p;
48e87cef 2266 int in_libcall_block;
7506f491
DE
2267{
2268 rtx pat = PATTERN (insn);
c4c81601 2269 int i;
7506f491 2270
172890a2
RK
2271 if (in_libcall_block)
2272 return;
2273
7506f491
DE
2274 /* Pick out the sets of INSN and for other forms of instructions record
2275 what's been modified. */
2276
172890a2
RK
2277 if (GET_CODE (pat) == SET)
2278 hash_scan_set (pat, insn, set_p);
7506f491 2279 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2280 for (i = 0; i < XVECLEN (pat, 0); i++)
2281 {
2282 rtx x = XVECEXP (pat, 0, i);
7506f491 2283
c4c81601 2284 if (GET_CODE (x) == SET)
172890a2 2285 hash_scan_set (x, insn, set_p);
c4c81601
RK
2286 else if (GET_CODE (x) == CLOBBER)
2287 hash_scan_clobber (x, insn);
2288 else if (GET_CODE (x) == CALL)
2289 hash_scan_call (x, insn);
2290 }
7506f491 2291
7506f491
DE
2292 else if (GET_CODE (pat) == CLOBBER)
2293 hash_scan_clobber (pat, insn);
2294 else if (GET_CODE (pat) == CALL)
2295 hash_scan_call (pat, insn);
2296}
2297
2298static void
2299dump_hash_table (file, name, table, table_size, total_size)
2300 FILE *file;
dff01034 2301 const char *name;
7506f491
DE
2302 struct expr **table;
2303 int table_size, total_size;
2304{
2305 int i;
2306 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2307 struct expr **flat_table;
2308 unsigned int *hash_val;
c4c81601 2309 struct expr *expr;
4da896b2
MM
2310
2311 flat_table
2312 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2313 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
7506f491 2314
7506f491 2315 for (i = 0; i < table_size; i++)
c4c81601
RK
2316 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2317 {
2318 flat_table[expr->bitmap_index] = expr;
2319 hash_val[expr->bitmap_index] = i;
2320 }
7506f491
DE
2321
2322 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2323 name, table_size, total_size);
2324
2325 for (i = 0; i < total_size; i++)
21318741
RK
2326 if (flat_table[i] != 0)
2327 {
a0ac9e5a 2328 expr = flat_table[i];
21318741
RK
2329 fprintf (file, "Index %d (hash value %d)\n ",
2330 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2331 print_rtl (file, expr->expr);
21318741
RK
2332 fprintf (file, "\n");
2333 }
7506f491
DE
2334
2335 fprintf (file, "\n");
4da896b2 2336
4da896b2
MM
2337 free (flat_table);
2338 free (hash_val);
7506f491
DE
2339}
2340
2341/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2342
80c29cc4 2343 first_set records the first place in the block where the register
7506f491 2344 is set and is used to compute "anticipatability".
c4c81601 2345
80c29cc4 2346 last_set records the last place in the block where the register
7506f491 2347 is set and is used to compute "availability".
c4c81601 2348
80c29cc4
RZ
2349 last_bb records the block for which first_set and last_set are
2350 valid, as a quick test to invalidate them.
2351
7506f491
DE
2352 reg_set_in_block records whether the register is set in the block
2353 and is used to compute "transparency". */
2354
2355static void
2356record_last_reg_set_info (insn, regno)
2357 rtx insn;
2358 int regno;
2359{
80c29cc4
RZ
2360 struct reg_avail_info *info = &reg_avail_info[regno];
2361 int cuid = INSN_CUID (insn);
c4c81601 2362
80c29cc4
RZ
2363 info->last_set = cuid;
2364 if (info->last_bb != current_bb)
2365 {
2366 info->last_bb = current_bb;
2367 info->first_set = cuid;
2368 SET_BIT (reg_set_in_block[current_bb], regno);
2369 }
7506f491
DE
2370}
2371
a13d4ebf
AM
2372
2373/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2374 Note we store a pair of elements in the list, so they have to be
2375 taken off pairwise. */
2376
2377static void
2378canon_list_insert (dest, unused1, v_insn)
2379 rtx dest ATTRIBUTE_UNUSED;
2380 rtx unused1 ATTRIBUTE_UNUSED;
2381 void * v_insn;
2382{
2383 rtx dest_addr, insn;
2384
2385 while (GET_CODE (dest) == SUBREG
2386 || GET_CODE (dest) == ZERO_EXTRACT
2387 || GET_CODE (dest) == SIGN_EXTRACT
2388 || GET_CODE (dest) == STRICT_LOW_PART)
2389 dest = XEXP (dest, 0);
2390
2391 /* If DEST is not a MEM, then it will not conflict with a load. Note
2392 that function calls are assumed to clobber memory, but are handled
2393 elsewhere. */
2394
2395 if (GET_CODE (dest) != MEM)
2396 return;
2397
2398 dest_addr = get_addr (XEXP (dest, 0));
2399 dest_addr = canon_rtx (dest_addr);
2400 insn = (rtx) v_insn;
2401
2402 canon_modify_mem_list[BLOCK_NUM (insn)] =
2403 alloc_INSN_LIST (dest_addr, canon_modify_mem_list[BLOCK_NUM (insn)]);
2404 canon_modify_mem_list[BLOCK_NUM (insn)] =
2405 alloc_INSN_LIST (dest, canon_modify_mem_list[BLOCK_NUM (insn)]);
2406}
2407
a13d4ebf
AM
2408/* Record memory modification information for INSN. We do not actually care
2409 about the memory location(s) that are set, or even how they are set (consider
2410 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2411
2412static void
2413record_last_mem_set_info (insn)
2414 rtx insn;
2415{
ccef9ef5 2416 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2417 everything. */
a13d4ebf
AM
2418 modify_mem_list[BLOCK_NUM (insn)] =
2419 alloc_INSN_LIST (insn, modify_mem_list[BLOCK_NUM (insn)]);
2420
2421 if (GET_CODE (insn) == CALL_INSN)
2422 {
2423 /* Note that traversals of this loop (other than for free-ing)
2424 will break after encountering a CALL_INSN. So, there's no
dc297297 2425 need to insert a pair of items, as canon_list_insert does. */
a13d4ebf
AM
2426 canon_modify_mem_list[BLOCK_NUM (insn)] =
2427 alloc_INSN_LIST (insn, canon_modify_mem_list[BLOCK_NUM (insn)]);
2428 }
2429 else
2430 note_stores (PATTERN (insn), canon_list_insert, (void*)insn );
7506f491
DE
2431}
2432
7506f491 2433/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2434 SET or CLOBBER in an insn. DATA is really the instruction in which
2435 the SET is taking place. */
7506f491
DE
2436
2437static void
84832317 2438record_last_set_info (dest, setter, data)
50b2596f 2439 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 2440 void *data;
7506f491 2441{
84832317
MM
2442 rtx last_set_insn = (rtx) data;
2443
7506f491
DE
2444 if (GET_CODE (dest) == SUBREG)
2445 dest = SUBREG_REG (dest);
2446
2447 if (GET_CODE (dest) == REG)
2448 record_last_reg_set_info (last_set_insn, REGNO (dest));
2449 else if (GET_CODE (dest) == MEM
2450 /* Ignore pushes, they clobber nothing. */
2451 && ! push_operand (dest, GET_MODE (dest)))
2452 record_last_mem_set_info (last_set_insn);
2453}
2454
2455/* Top level function to create an expression or assignment hash table.
2456
2457 Expression entries are placed in the hash table if
2458 - they are of the form (set (pseudo-reg) src),
2459 - src is something we want to perform GCSE on,
2460 - none of the operands are subsequently modified in the block
2461
2462 Assignment entries are placed in the hash table if
2463 - they are of the form (set (pseudo-reg) src),
2464 - src is something we want to perform const/copy propagation on,
2465 - none of the operands or target are subsequently modified in the block
c4c81601 2466
7506f491
DE
2467 Currently src must be a pseudo-reg or a const_int.
2468
2469 F is the first insn.
2470 SET_P is non-zero for computing the assignment hash table. */
2471
2472static void
b5ce41ff 2473compute_hash_table (set_p)
7506f491
DE
2474 int set_p;
2475{
80c29cc4 2476 unsigned int i;
7506f491
DE
2477
2478 /* While we compute the hash table we also compute a bit array of which
2479 registers are set in which blocks.
7506f491
DE
2480 ??? This isn't needed during const/copy propagation, but it's cheap to
2481 compute. Later. */
2482 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
7506f491 2483
a13d4ebf
AM
2484 /* re-Cache any INSN_LIST nodes we have allocated. */
2485 {
2486 int i;
2487 for (i = 0; i < n_basic_blocks; i++)
2488 {
2489 if (modify_mem_list[i])
2490 free_INSN_LIST_list (modify_mem_list + i);
2491 if (canon_modify_mem_list[i])
2492 free_INSN_LIST_list (canon_modify_mem_list + i);
2493 }
2494 }
7506f491 2495 /* Some working arrays used to track first and last set in each block. */
80c29cc4
RZ
2496 reg_avail_info = (struct reg_avail_info*)
2497 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2498
2499 for (i = 0; i < max_gcse_regno; ++i)
2500 reg_avail_info[i].last_bb = NEVER_SET;
7506f491 2501
80c29cc4 2502 for (current_bb = 0; current_bb < n_basic_blocks; current_bb++)
7506f491
DE
2503 {
2504 rtx insn;
770ae6cc 2505 unsigned int regno;
ed79bb3d 2506 int in_libcall_block;
7506f491
DE
2507
2508 /* First pass over the instructions records information used to
2509 determine when registers and memory are first and last set.
ccef9ef5 2510 ??? hard-reg reg_set_in_block computation
7506f491
DE
2511 could be moved to compute_sets since they currently don't change. */
2512
80c29cc4
RZ
2513 for (insn = BLOCK_HEAD (current_bb);
2514 insn && insn != NEXT_INSN (BLOCK_END (current_bb));
7506f491
DE
2515 insn = NEXT_INSN (insn))
2516 {
2c3c49de 2517 if (! INSN_P (insn))
7506f491
DE
2518 continue;
2519
2520 if (GET_CODE (insn) == CALL_INSN)
2521 {
19652adf
ZW
2522 bool clobbers_all = false;
2523#ifdef NON_SAVING_SETJMP
2524 if (NON_SAVING_SETJMP
2525 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2526 clobbers_all = true;
2527#endif
2528
7506f491 2529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2530 if (clobbers_all
2531 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2532 record_last_reg_set_info (insn, regno);
c4c81601 2533
24a28584 2534 mark_call (insn);
7506f491
DE
2535 }
2536
84832317 2537 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2538 }
2539
2540 /* The next pass builds the hash table. */
2541
80c29cc4
RZ
2542 for (insn = BLOCK_HEAD (current_bb), in_libcall_block = 0;
2543 insn && insn != NEXT_INSN (BLOCK_END (current_bb));
7506f491 2544 insn = NEXT_INSN (insn))
2c3c49de 2545 if (INSN_P (insn))
c4c81601
RK
2546 {
2547 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
c63b1ae8
AM
2548 in_libcall_block = 1;
2549 else if (set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2550 in_libcall_block = 0;
2551 hash_scan_insn (insn, set_p, in_libcall_block);
2552 if (!set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2553 in_libcall_block = 0;
7506f491
DE
2554 }
2555 }
2556
80c29cc4
RZ
2557 free (reg_avail_info);
2558 reg_avail_info = NULL;
7506f491
DE
2559}
2560
2561/* Allocate space for the set hash table.
2562 N_INSNS is the number of instructions in the function.
2563 It is used to determine the number of buckets to use. */
2564
2565static void
2566alloc_set_hash_table (n_insns)
2567 int n_insns;
2568{
2569 int n;
2570
2571 set_hash_table_size = n_insns / 4;
2572 if (set_hash_table_size < 11)
2573 set_hash_table_size = 11;
c4c81601 2574
7506f491
DE
2575 /* Attempt to maintain efficient use of hash table.
2576 Making it an odd number is simplest for now.
2577 ??? Later take some measurements. */
2578 set_hash_table_size |= 1;
2579 n = set_hash_table_size * sizeof (struct expr *);
2580 set_hash_table = (struct expr **) gmalloc (n);
2581}
2582
2583/* Free things allocated by alloc_set_hash_table. */
2584
2585static void
2586free_set_hash_table ()
2587{
2588 free (set_hash_table);
2589}
2590
2591/* Compute the hash table for doing copy/const propagation. */
2592
2593static void
b5ce41ff 2594compute_set_hash_table ()
7506f491
DE
2595{
2596 /* Initialize count of number of entries in hash table. */
2597 n_sets = 0;
961192e1 2598 memset ((char *) set_hash_table, 0,
c4c81601 2599 set_hash_table_size * sizeof (struct expr *));
7506f491 2600
b5ce41ff 2601 compute_hash_table (1);
7506f491
DE
2602}
2603
2604/* Allocate space for the expression hash table.
2605 N_INSNS is the number of instructions in the function.
2606 It is used to determine the number of buckets to use. */
2607
2608static void
2609alloc_expr_hash_table (n_insns)
2e653e39 2610 unsigned int n_insns;
7506f491
DE
2611{
2612 int n;
2613
2614 expr_hash_table_size = n_insns / 2;
2615 /* Make sure the amount is usable. */
2616 if (expr_hash_table_size < 11)
2617 expr_hash_table_size = 11;
c4c81601 2618
7506f491
DE
2619 /* Attempt to maintain efficient use of hash table.
2620 Making it an odd number is simplest for now.
2621 ??? Later take some measurements. */
2622 expr_hash_table_size |= 1;
2623 n = expr_hash_table_size * sizeof (struct expr *);
2624 expr_hash_table = (struct expr **) gmalloc (n);
2625}
2626
2627/* Free things allocated by alloc_expr_hash_table. */
2628
2629static void
2630free_expr_hash_table ()
2631{
2632 free (expr_hash_table);
2633}
2634
2635/* Compute the hash table for doing GCSE. */
2636
2637static void
b5ce41ff 2638compute_expr_hash_table ()
7506f491
DE
2639{
2640 /* Initialize count of number of entries in hash table. */
2641 n_exprs = 0;
961192e1 2642 memset ((char *) expr_hash_table, 0,
c4c81601 2643 expr_hash_table_size * sizeof (struct expr *));
7506f491 2644
b5ce41ff 2645 compute_hash_table (0);
7506f491
DE
2646}
2647\f
2648/* Expression tracking support. */
2649
2650/* Lookup pattern PAT in the expression table.
2651 The result is a pointer to the table entry, or NULL if not found. */
2652
2653static struct expr *
2654lookup_expr (pat)
2655 rtx pat;
2656{
2657 int do_not_record_p;
2658 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2659 expr_hash_table_size);
2660 struct expr *expr;
2661
2662 if (do_not_record_p)
2663 return NULL;
2664
2665 expr = expr_hash_table[hash];
2666
2667 while (expr && ! expr_equiv_p (expr->expr, pat))
2668 expr = expr->next_same_hash;
2669
2670 return expr;
2671}
2672
c4c81601
RK
2673/* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2674 matches it, otherwise return the first entry for REGNO. The result is a
2675 pointer to the table entry, or NULL if not found. */
7506f491
DE
2676
2677static struct expr *
2678lookup_set (regno, pat)
770ae6cc 2679 unsigned int regno;
7506f491
DE
2680 rtx pat;
2681{
2682 unsigned int hash = hash_set (regno, set_hash_table_size);
2683 struct expr *expr;
2684
2685 expr = set_hash_table[hash];
2686
2687 if (pat)
2688 {
2689 while (expr && ! expr_equiv_p (expr->expr, pat))
2690 expr = expr->next_same_hash;
2691 }
2692 else
2693 {
2694 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2695 expr = expr->next_same_hash;
2696 }
2697
2698 return expr;
2699}
2700
2701/* Return the next entry for REGNO in list EXPR. */
2702
2703static struct expr *
2704next_set (regno, expr)
770ae6cc 2705 unsigned int regno;
7506f491
DE
2706 struct expr *expr;
2707{
2708 do
2709 expr = expr->next_same_hash;
2710 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2711
7506f491
DE
2712 return expr;
2713}
2714
2715/* Reset tables used to keep track of what's still available [since the
2716 start of the block]. */
2717
2718static void
2719reset_opr_set_tables ()
2720{
2721 /* Maintain a bitmap of which regs have been set since beginning of
2722 the block. */
2723 sbitmap_zero (reg_set_bitmap);
c4c81601 2724
7506f491
DE
2725 /* Also keep a record of the last instruction to modify memory.
2726 For now this is very trivial, we only record whether any memory
2727 location has been modified. */
a13d4ebf
AM
2728 {
2729 int i;
2730
2731 /* re-Cache any INSN_LIST nodes we have allocated. */
2732 for (i = 0; i < n_basic_blocks; i++)
2733 {
2734 if (modify_mem_list[i])
2735 free_INSN_LIST_list (modify_mem_list + i);
2736 if (canon_modify_mem_list[i])
2737 free_INSN_LIST_list (canon_modify_mem_list + i);
2738 }
2739 }
7506f491
DE
2740}
2741
2742/* Return non-zero if the operands of X are not set before INSN in
2743 INSN's basic block. */
2744
2745static int
2746oprs_not_set_p (x, insn)
2747 rtx x, insn;
2748{
c4c81601 2749 int i, j;
7506f491 2750 enum rtx_code code;
6f7d635c 2751 const char *fmt;
7506f491 2752
7506f491
DE
2753 if (x == 0)
2754 return 1;
2755
2756 code = GET_CODE (x);
2757 switch (code)
2758 {
2759 case PC:
2760 case CC0:
2761 case CONST:
2762 case CONST_INT:
2763 case CONST_DOUBLE:
2764 case SYMBOL_REF:
2765 case LABEL_REF:
2766 case ADDR_VEC:
2767 case ADDR_DIFF_VEC:
2768 return 1;
2769
2770 case MEM:
e2d2ed72
AM
2771 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2772 INSN_CUID (insn), x, 0))
a13d4ebf 2773 return 0;
c4c81601
RK
2774 else
2775 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2776
2777 case REG:
2778 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2779
2780 default:
2781 break;
2782 }
2783
c4c81601 2784 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2785 {
2786 if (fmt[i] == 'e')
2787 {
7506f491
DE
2788 /* If we are about to do the last recursive call
2789 needed at this level, change it into iteration.
2790 This function is called enough to be worth it. */
2791 if (i == 0)
c4c81601
RK
2792 return oprs_not_set_p (XEXP (x, i), insn);
2793
2794 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2795 return 0;
2796 }
2797 else if (fmt[i] == 'E')
c4c81601
RK
2798 for (j = 0; j < XVECLEN (x, i); j++)
2799 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2800 return 0;
7506f491
DE
2801 }
2802
2803 return 1;
2804}
2805
2806/* Mark things set by a CALL. */
2807
2808static void
b5ce41ff
JL
2809mark_call (insn)
2810 rtx insn;
7506f491 2811{
24a28584 2812 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2813 record_last_mem_set_info (insn);
7506f491
DE
2814}
2815
2816/* Mark things set by a SET. */
2817
2818static void
2819mark_set (pat, insn)
2820 rtx pat, insn;
2821{
2822 rtx dest = SET_DEST (pat);
2823
2824 while (GET_CODE (dest) == SUBREG
2825 || GET_CODE (dest) == ZERO_EXTRACT
2826 || GET_CODE (dest) == SIGN_EXTRACT
2827 || GET_CODE (dest) == STRICT_LOW_PART)
2828 dest = XEXP (dest, 0);
2829
a13d4ebf
AM
2830 if (GET_CODE (dest) == REG)
2831 SET_BIT (reg_set_bitmap, REGNO (dest));
2832 else if (GET_CODE (dest) == MEM)
2833 record_last_mem_set_info (insn);
2834
7506f491 2835 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2836 mark_call (insn);
7506f491
DE
2837}
2838
2839/* Record things set by a CLOBBER. */
2840
2841static void
2842mark_clobber (pat, insn)
2843 rtx pat, insn;
2844{
2845 rtx clob = XEXP (pat, 0);
2846
2847 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2848 clob = XEXP (clob, 0);
2849
a13d4ebf
AM
2850 if (GET_CODE (clob) == REG)
2851 SET_BIT (reg_set_bitmap, REGNO (clob));
2852 else
2853 record_last_mem_set_info (insn);
7506f491
DE
2854}
2855
2856/* Record things set by INSN.
2857 This data is used by oprs_not_set_p. */
2858
2859static void
2860mark_oprs_set (insn)
2861 rtx insn;
2862{
2863 rtx pat = PATTERN (insn);
c4c81601 2864 int i;
7506f491
DE
2865
2866 if (GET_CODE (pat) == SET)
2867 mark_set (pat, insn);
2868 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2869 for (i = 0; i < XVECLEN (pat, 0); i++)
2870 {
2871 rtx x = XVECEXP (pat, 0, i);
2872
2873 if (GET_CODE (x) == SET)
2874 mark_set (x, insn);
2875 else if (GET_CODE (x) == CLOBBER)
2876 mark_clobber (x, insn);
2877 else if (GET_CODE (x) == CALL)
2878 mark_call (insn);
2879 }
7506f491 2880
7506f491
DE
2881 else if (GET_CODE (pat) == CLOBBER)
2882 mark_clobber (pat, insn);
2883 else if (GET_CODE (pat) == CALL)
b5ce41ff 2884 mark_call (insn);
7506f491 2885}
b5ce41ff 2886
7506f491
DE
2887\f
2888/* Classic GCSE reaching definition support. */
2889
2890/* Allocate reaching def variables. */
2891
2892static void
2893alloc_rd_mem (n_blocks, n_insns)
2894 int n_blocks, n_insns;
2895{
2896 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2897 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2898
2899 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2900 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2901
2902 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2903 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2904
2905 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2906 sbitmap_vector_zero (rd_out, n_basic_blocks);
2907}
2908
2909/* Free reaching def variables. */
2910
2911static void
2912free_rd_mem ()
2913{
5a660bff
DB
2914 sbitmap_vector_free (rd_kill);
2915 sbitmap_vector_free (rd_gen);
2916 sbitmap_vector_free (reaching_defs);
2917 sbitmap_vector_free (rd_out);
7506f491
DE
2918}
2919
c4c81601 2920/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2921
2922static void
2923handle_rd_kill_set (insn, regno, bb)
2924 rtx insn;
e2d2ed72
AM
2925 int regno;
2926 basic_block bb;
7506f491 2927{
c4c81601 2928 struct reg_set *this_reg;
7506f491 2929
c4c81601
RK
2930 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2931 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
e2d2ed72 2932 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2933}
2934
7506f491
DE
2935/* Compute the set of kill's for reaching definitions. */
2936
2937static void
2938compute_kill_rd ()
2939{
c4c81601 2940 int bb, cuid;
172890a2
RK
2941 unsigned int regno;
2942 int i;
7506f491
DE
2943
2944 /* For each block
2945 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2946 generates a definition in the block)
2947 Call the reg set by the insn corresponding to that bit regx
2948 Look at the linked list starting at reg_set_table[regx]
2949 For each setting of regx in the linked list, which is not in
2950 this block
6d2f8887 2951 Set the bit in `kill' corresponding to that insn. */
7506f491 2952 for (bb = 0; bb < n_basic_blocks; bb++)
c4c81601
RK
2953 for (cuid = 0; cuid < max_cuid; cuid++)
2954 if (TEST_BIT (rd_gen[bb], cuid))
7506f491 2955 {
c4c81601
RK
2956 rtx insn = CUID_INSN (cuid);
2957 rtx pat = PATTERN (insn);
7506f491 2958
c4c81601
RK
2959 if (GET_CODE (insn) == CALL_INSN)
2960 {
2961 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584
RH
2962 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2963 handle_rd_kill_set (insn, regno, BASIC_BLOCK (bb));
c4c81601 2964 }
7506f491 2965
c4c81601
RK
2966 if (GET_CODE (pat) == PARALLEL)
2967 {
2968 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2969 {
c4c81601 2970 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2971
c4c81601
RK
2972 if ((code == SET || code == CLOBBER)
2973 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2974 handle_rd_kill_set (insn,
2975 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e2d2ed72 2976 BASIC_BLOCK (bb));
ac7c5af5 2977 }
ac7c5af5 2978 }
c4c81601
RK
2979 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2980 /* Each setting of this register outside of this block
2981 must be marked in the set of kills in this block. */
e2d2ed72 2982 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), BASIC_BLOCK (bb));
7506f491 2983 }
7506f491
DE
2984}
2985
2986/* Compute the reaching definitions as in
2987 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2988 Chapter 10. It is the same algorithm as used for computing available
2989 expressions but applied to the gens and kills of reaching definitions. */
2990
2991static void
2992compute_rd ()
2993{
2994 int bb, changed, passes;
2995
2996 for (bb = 0; bb < n_basic_blocks; bb++)
2997 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2998
2999 passes = 0;
3000 changed = 1;
3001 while (changed)
3002 {
3003 changed = 0;
3004 for (bb = 0; bb < n_basic_blocks; bb++)
ac7c5af5 3005 {
36349f8b 3006 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
7506f491
DE
3007 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
3008 reaching_defs[bb], rd_kill[bb]);
ac7c5af5 3009 }
7506f491
DE
3010 passes++;
3011 }
3012
3013 if (gcse_file)
3014 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3015}
3016\f
3017/* Classic GCSE available expression support. */
3018
3019/* Allocate memory for available expression computation. */
3020
3021static void
3022alloc_avail_expr_mem (n_blocks, n_exprs)
3023 int n_blocks, n_exprs;
3024{
3025 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3026 sbitmap_vector_zero (ae_kill, n_basic_blocks);
3027
3028 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_gen, n_basic_blocks);
3030
3031 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_in, n_basic_blocks);
3033
3034 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_out, n_basic_blocks);
7506f491
DE
3036}
3037
3038static void
3039free_avail_expr_mem ()
3040{
5a660bff
DB
3041 sbitmap_vector_free (ae_kill);
3042 sbitmap_vector_free (ae_gen);
3043 sbitmap_vector_free (ae_in);
3044 sbitmap_vector_free (ae_out);
7506f491
DE
3045}
3046
3047/* Compute the set of available expressions generated in each basic block. */
3048
3049static void
3050compute_ae_gen ()
3051{
2e653e39 3052 unsigned int i;
c4c81601
RK
3053 struct expr *expr;
3054 struct occr *occr;
7506f491
DE
3055
3056 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3057 This is all we have to do because an expression is not recorded if it
3058 is not available, and the only expressions we want to work with are the
3059 ones that are recorded. */
7506f491 3060 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
3061 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
3062 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3063 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3064}
3065
3066/* Return non-zero if expression X is killed in BB. */
3067
3068static int
3069expr_killed_p (x, bb)
3070 rtx x;
e2d2ed72 3071 basic_block bb;
7506f491 3072{
c4c81601 3073 int i, j;
7506f491 3074 enum rtx_code code;
6f7d635c 3075 const char *fmt;
7506f491 3076
7506f491
DE
3077 if (x == 0)
3078 return 1;
3079
3080 code = GET_CODE (x);
3081 switch (code)
3082 {
3083 case REG:
e2d2ed72 3084 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3085
3086 case MEM:
a13d4ebf
AM
3087 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3088 return 1;
c4c81601
RK
3089 else
3090 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3091
3092 case PC:
3093 case CC0: /*FIXME*/
3094 case CONST:
3095 case CONST_INT:
3096 case CONST_DOUBLE:
3097 case SYMBOL_REF:
3098 case LABEL_REF:
3099 case ADDR_VEC:
3100 case ADDR_DIFF_VEC:
3101 return 0;
3102
3103 default:
3104 break;
3105 }
3106
c4c81601 3107 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3108 {
3109 if (fmt[i] == 'e')
3110 {
7506f491
DE
3111 /* If we are about to do the last recursive call
3112 needed at this level, change it into iteration.
3113 This function is called enough to be worth it. */
3114 if (i == 0)
c4c81601
RK
3115 return expr_killed_p (XEXP (x, i), bb);
3116 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3117 return 1;
3118 }
3119 else if (fmt[i] == 'E')
c4c81601
RK
3120 for (j = 0; j < XVECLEN (x, i); j++)
3121 if (expr_killed_p (XVECEXP (x, i, j), bb))
3122 return 1;
7506f491
DE
3123 }
3124
3125 return 0;
3126}
3127
3128/* Compute the set of available expressions killed in each basic block. */
3129
3130static void
a42cd965
AM
3131compute_ae_kill (ae_gen, ae_kill)
3132 sbitmap *ae_gen, *ae_kill;
7506f491 3133{
2e653e39
RK
3134 int bb;
3135 unsigned int i;
c4c81601 3136 struct expr *expr;
7506f491
DE
3137
3138 for (bb = 0; bb < n_basic_blocks; bb++)
c4c81601
RK
3139 for (i = 0; i < expr_hash_table_size; i++)
3140 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
7506f491 3141 {
c4c81601
RK
3142 /* Skip EXPR if generated in this block. */
3143 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
3144 continue;
7506f491 3145
e2d2ed72 3146 if (expr_killed_p (expr->expr, BASIC_BLOCK (bb)))
c4c81601 3147 SET_BIT (ae_kill[bb], expr->bitmap_index);
7506f491 3148 }
7506f491 3149}
7506f491
DE
3150\f
3151/* Actually perform the Classic GCSE optimizations. */
3152
3153/* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
3154
3155 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
3156 as a positive reach. We want to do this when there are two computations
3157 of the expression in the block.
3158
3159 VISITED is a pointer to a working buffer for tracking which BB's have
3160 been visited. It is NULL for the top-level call.
3161
3162 We treat reaching expressions that go through blocks containing the same
3163 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3164 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3165 2 as not reaching. The intent is to improve the probability of finding
3166 only one reaching expression and to reduce register lifetimes by picking
3167 the closest such expression. */
3168
3169static int
283a2545 3170expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
7506f491
DE
3171 struct occr *occr;
3172 struct expr *expr;
e2d2ed72 3173 basic_block bb;
7506f491
DE
3174 int check_self_loop;
3175 char *visited;
3176{
36349f8b 3177 edge pred;
7506f491 3178
e2d2ed72 3179 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3180 {
e2d2ed72 3181 basic_block pred_bb = pred->src;
7506f491 3182
e2d2ed72 3183 if (visited[pred_bb->index])
c4c81601 3184 /* This predecessor has already been visited. Nothing to do. */
7506f491 3185 ;
7506f491 3186 else if (pred_bb == bb)
ac7c5af5 3187 {
7506f491
DE
3188 /* BB loops on itself. */
3189 if (check_self_loop
e2d2ed72
AM
3190 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3191 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3192 return 1;
c4c81601 3193
e2d2ed72 3194 visited[pred_bb->index] = 1;
ac7c5af5 3195 }
c4c81601 3196
7506f491 3197 /* Ignore this predecessor if it kills the expression. */
e2d2ed72
AM
3198 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3199 visited[pred_bb->index] = 1;
c4c81601 3200
7506f491 3201 /* Does this predecessor generate this expression? */
e2d2ed72 3202 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3203 {
3204 /* Is this the occurrence we're looking for?
3205 Note that there's only one generating occurrence per block
3206 so we just need to check the block number. */
e2d2ed72 3207 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3208 return 1;
c4c81601 3209
e2d2ed72 3210 visited[pred_bb->index] = 1;
7506f491 3211 }
c4c81601 3212
7506f491
DE
3213 /* Neither gen nor kill. */
3214 else
ac7c5af5 3215 {
e2d2ed72 3216 visited[pred_bb->index] = 1;
283a2545
RL
3217 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3218 visited))
c4c81601 3219
7506f491 3220 return 1;
ac7c5af5 3221 }
7506f491
DE
3222 }
3223
3224 /* All paths have been checked. */
3225 return 0;
3226}
3227
283a2545 3228/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3229 memory allocated for that function is returned. */
283a2545
RL
3230
3231static int
3232expr_reaches_here_p (occr, expr, bb, check_self_loop)
3233 struct occr *occr;
3234 struct expr *expr;
e2d2ed72 3235 basic_block bb;
283a2545
RL
3236 int check_self_loop;
3237{
3238 int rval;
c4c81601 3239 char *visited = (char *) xcalloc (n_basic_blocks, 1);
283a2545 3240
c4c81601 3241 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
283a2545
RL
3242
3243 free (visited);
c4c81601 3244 return rval;
283a2545
RL
3245}
3246
7506f491
DE
3247/* Return the instruction that computes EXPR that reaches INSN's basic block.
3248 If there is more than one such instruction, return NULL.
3249
3250 Called only by handle_avail_expr. */
3251
3252static rtx
3253computing_insn (expr, insn)
3254 struct expr *expr;
3255 rtx insn;
3256{
e2d2ed72 3257 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3258
3259 if (expr->avail_occr->next == NULL)
3260 {
e2d2ed72 3261 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3262 /* The available expression is actually itself
3263 (i.e. a loop in the flow graph) so do nothing. */
3264 return NULL;
3265
7506f491
DE
3266 /* (FIXME) Case that we found a pattern that was created by
3267 a substitution that took place. */
3268 return expr->avail_occr->insn;
3269 }
3270 else
3271 {
3272 /* Pattern is computed more than once.
3273 Search backwards from this insn to see how many of these
3274 computations actually reach this insn. */
3275 struct occr *occr;
3276 rtx insn_computes_expr = NULL;
3277 int can_reach = 0;
3278
3279 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3280 {
e2d2ed72 3281 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3282 {
3283 /* The expression is generated in this block.
3284 The only time we care about this is when the expression
3285 is generated later in the block [and thus there's a loop].
3286 We let the normal cse pass handle the other cases. */
c4c81601
RK
3287 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3288 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3289 {
3290 can_reach++;
3291 if (can_reach > 1)
3292 return NULL;
c4c81601 3293
7506f491
DE
3294 insn_computes_expr = occr->insn;
3295 }
3296 }
c4c81601
RK
3297 else if (expr_reaches_here_p (occr, expr, bb, 0))
3298 {
3299 can_reach++;
3300 if (can_reach > 1)
3301 return NULL;
3302
3303 insn_computes_expr = occr->insn;
3304 }
7506f491
DE
3305 }
3306
3307 if (insn_computes_expr == NULL)
3308 abort ();
c4c81601 3309
7506f491
DE
3310 return insn_computes_expr;
3311 }
3312}
3313
3314/* Return non-zero if the definition in DEF_INSN can reach INSN.
3315 Only called by can_disregard_other_sets. */
3316
3317static int
3318def_reaches_here_p (insn, def_insn)
3319 rtx insn, def_insn;
3320{
3321 rtx reg;
3322
3323 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3324 return 1;
3325
3326 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3327 {
3328 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3329 {
7506f491
DE
3330 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3331 return 1;
c4c81601 3332 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3333 reg = XEXP (PATTERN (def_insn), 0);
3334 else if (GET_CODE (PATTERN (def_insn)) == SET)
3335 reg = SET_DEST (PATTERN (def_insn));
3336 else
3337 abort ();
c4c81601 3338
7506f491
DE
3339 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3340 }
3341 else
3342 return 0;
3343 }
3344
3345 return 0;
3346}
3347
c4c81601
RK
3348/* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3349 value returned is the number of definitions that reach INSN. Returning a
3350 value of zero means that [maybe] more than one definition reaches INSN and
3351 the caller can't perform whatever optimization it is trying. i.e. it is
3352 always safe to return zero. */
7506f491
DE
3353
3354static int
3355can_disregard_other_sets (addr_this_reg, insn, for_combine)
3356 struct reg_set **addr_this_reg;
3357 rtx insn;
3358 int for_combine;
3359{
3360 int number_of_reaching_defs = 0;
c4c81601 3361 struct reg_set *this_reg;
7506f491 3362
c4c81601
RK
3363 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3364 if (def_reaches_here_p (insn, this_reg->insn))
3365 {
3366 number_of_reaching_defs++;
3367 /* Ignore parallels for now. */
3368 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3369 return 0;
3370
3371 if (!for_combine
3372 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3373 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3374 SET_SRC (PATTERN (insn)))))
3375 /* A setting of the reg to a different value reaches INSN. */
3376 return 0;
3377
3378 if (number_of_reaching_defs > 1)
3379 {
3380 /* If in this setting the value the register is being set to is
3381 equal to the previous value the register was set to and this
3382 setting reaches the insn we are trying to do the substitution
3383 on then we are ok. */
3384 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3385 return 0;
c4c81601
RK
3386 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3387 SET_SRC (PATTERN (insn))))
3388 return 0;
3389 }
7506f491 3390
c4c81601
RK
3391 *addr_this_reg = this_reg;
3392 }
7506f491
DE
3393
3394 return number_of_reaching_defs;
3395}
3396
3397/* Expression computed by insn is available and the substitution is legal,
3398 so try to perform the substitution.
3399
3400 The result is non-zero if any changes were made. */
3401
3402static int
3403handle_avail_expr (insn, expr)
3404 rtx insn;
3405 struct expr *expr;
3406{
0631e0bf 3407 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3408 rtx to;
3409 struct reg_set *this_reg;
3410 int found_setting, use_src;
3411 int changed = 0;
3412
3413 /* We only handle the case where one computation of the expression
3414 reaches this instruction. */
3415 insn_computes_expr = computing_insn (expr, insn);
3416 if (insn_computes_expr == NULL)
3417 return 0;
0631e0bf
JH
3418 expr_set = single_set (insn_computes_expr);
3419 if (!expr_set)
3420 abort ();
7506f491
DE
3421
3422 found_setting = 0;
3423 use_src = 0;
3424
3425 /* At this point we know only one computation of EXPR outside of this
3426 block reaches this insn. Now try to find a register that the
3427 expression is computed into. */
0631e0bf 3428 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3429 {
3430 /* This is the case when the available expression that reaches
3431 here has already been handled as an available expression. */
770ae6cc 3432 unsigned int regnum_for_replacing
0631e0bf 3433 = REGNO (SET_SRC (expr_set));
c4c81601 3434
7506f491
DE
3435 /* If the register was created by GCSE we can't use `reg_set_table',
3436 however we know it's set only once. */
3437 if (regnum_for_replacing >= max_gcse_regno
3438 /* If the register the expression is computed into is set only once,
3439 or only one set reaches this insn, we can use it. */
3440 || (((this_reg = reg_set_table[regnum_for_replacing]),
3441 this_reg->next == NULL)
3442 || can_disregard_other_sets (&this_reg, insn, 0)))
3443 {
3444 use_src = 1;
3445 found_setting = 1;
3446 }
3447 }
3448
3449 if (!found_setting)
3450 {
770ae6cc 3451 unsigned int regnum_for_replacing
0631e0bf 3452 = REGNO (SET_DEST (expr_set));
c4c81601 3453
7506f491
DE
3454 /* This shouldn't happen. */
3455 if (regnum_for_replacing >= max_gcse_regno)
3456 abort ();
c4c81601 3457
7506f491 3458 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3459
7506f491
DE
3460 /* If the register the expression is computed into is set only once,
3461 or only one set reaches this insn, use it. */
3462 if (this_reg->next == NULL
3463 || can_disregard_other_sets (&this_reg, insn, 0))
3464 found_setting = 1;
3465 }
3466
3467 if (found_setting)
3468 {
3469 pat = PATTERN (insn);
3470 if (use_src)
0631e0bf 3471 to = SET_SRC (expr_set);
7506f491 3472 else
0631e0bf 3473 to = SET_DEST (expr_set);
7506f491
DE
3474 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3475
3476 /* We should be able to ignore the return code from validate_change but
3477 to play it safe we check. */
3478 if (changed)
3479 {
3480 gcse_subst_count++;
3481 if (gcse_file != NULL)
3482 {
c4c81601
RK
3483 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3484 INSN_UID (insn));
3485 fprintf (gcse_file, " reg %d %s insn %d\n",
3486 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3487 INSN_UID (insn_computes_expr));
3488 }
7506f491
DE
3489 }
3490 }
c4c81601 3491
7506f491
DE
3492 /* The register that the expr is computed into is set more than once. */
3493 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3494 {
3495 /* Insert an insn after insnx that copies the reg set in insnx
3496 into a new pseudo register call this new register REGN.
3497 From insnb until end of basic block or until REGB is set
3498 replace all uses of REGB with REGN. */
3499 rtx new_insn;
3500
0631e0bf 3501 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3502
3503 /* Generate the new insn. */
3504 /* ??? If the change fails, we return 0, even though we created
3505 an insn. I think this is ok. */
9e6a5703
JC
3506 new_insn
3507 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3508 SET_DEST (expr_set)),
c4c81601
RK
3509 insn_computes_expr);
3510
7506f491
DE
3511 /* Keep register set table up to date. */
3512 record_one_set (REGNO (to), new_insn);
3513
3514 gcse_create_count++;
3515 if (gcse_file != NULL)
ac7c5af5 3516 {
c4c81601 3517 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3518 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3519 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3520 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3521 INSN_UID (insn_computes_expr));
c4c81601
RK
3522 fprintf (gcse_file, " into newly allocated reg %d\n",
3523 REGNO (to));
ac7c5af5 3524 }
7506f491
DE
3525
3526 pat = PATTERN (insn);
3527
3528 /* Do register replacement for INSN. */
3529 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3530 SET_DEST (PATTERN
3531 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3532 0);
3533
3534 /* We should be able to ignore the return code from validate_change but
3535 to play it safe we check. */
3536 if (changed)
3537 {
3538 gcse_subst_count++;
3539 if (gcse_file != NULL)
3540 {
c4c81601
RK
3541 fprintf (gcse_file,
3542 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3543 INSN_UID (insn),
c4c81601
RK
3544 REGNO (SET_DEST (PATTERN (NEXT_INSN
3545 (insn_computes_expr)))));
3546 fprintf (gcse_file, "set in insn %d\n",
7506f491
DE
3547 INSN_UID (insn_computes_expr));
3548 }
7506f491
DE
3549 }
3550 }
3551
3552 return changed;
3553}
3554
c4c81601
RK
3555/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3556 the dataflow analysis has been done.
7506f491
DE
3557
3558 The result is non-zero if a change was made. */
3559
3560static int
3561classic_gcse ()
3562{
3563 int bb, changed;
3564 rtx insn;
3565
3566 /* Note we start at block 1. */
3567
3568 changed = 0;
3569 for (bb = 1; bb < n_basic_blocks; bb++)
3570 {
3571 /* Reset tables used to keep track of what's still valid [since the
3572 start of the block]. */
3573 reset_opr_set_tables ();
3574
3b413743
RH
3575 for (insn = BLOCK_HEAD (bb);
3576 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
7506f491
DE
3577 insn = NEXT_INSN (insn))
3578 {
3579 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3580 if (GET_CODE (insn) == INSN
3581 && GET_CODE (PATTERN (insn)) == SET
3582 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3583 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3584 {
3585 rtx pat = PATTERN (insn);
3586 rtx src = SET_SRC (pat);
3587 struct expr *expr;
3588
3589 if (want_to_gcse_p (src)
3590 /* Is the expression recorded? */
3591 && ((expr = lookup_expr (src)) != NULL)
3592 /* Is the expression available [at the start of the
3593 block]? */
3594 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3595 /* Are the operands unchanged since the start of the
3596 block? */
3597 && oprs_not_set_p (src, insn))
3598 changed |= handle_avail_expr (insn, expr);
3599 }
3600
3601 /* Keep track of everything modified by this insn. */
3602 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3603 if (INSN_P (insn))
7506f491 3604 mark_oprs_set (insn);
ac7c5af5 3605 }
7506f491
DE
3606 }
3607
3608 return changed;
3609}
3610
3611/* Top level routine to perform one classic GCSE pass.
3612
3613 Return non-zero if a change was made. */
3614
3615static int
b5ce41ff 3616one_classic_gcse_pass (pass)
7506f491
DE
3617 int pass;
3618{
3619 int changed = 0;
3620
3621 gcse_subst_count = 0;
3622 gcse_create_count = 0;
3623
3624 alloc_expr_hash_table (max_cuid);
3625 alloc_rd_mem (n_basic_blocks, max_cuid);
b5ce41ff 3626 compute_expr_hash_table ();
7506f491
DE
3627 if (gcse_file)
3628 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3629 expr_hash_table_size, n_exprs);
c4c81601 3630
7506f491
DE
3631 if (n_exprs > 0)
3632 {
3633 compute_kill_rd ();
3634 compute_rd ();
3635 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3636 compute_ae_gen ();
a42cd965 3637 compute_ae_kill (ae_gen, ae_kill);
bd0eaec2 3638 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3639 changed = classic_gcse ();
3640 free_avail_expr_mem ();
3641 }
c4c81601 3642
7506f491
DE
3643 free_rd_mem ();
3644 free_expr_hash_table ();
3645
3646 if (gcse_file)
3647 {
3648 fprintf (gcse_file, "\n");
c4c81601
RK
3649 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3650 current_function_name, pass, bytes_used, gcse_subst_count);
3651 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3652 }
3653
3654 return changed;
3655}
3656\f
3657/* Compute copy/constant propagation working variables. */
3658
3659/* Local properties of assignments. */
7506f491
DE
3660static sbitmap *cprop_pavloc;
3661static sbitmap *cprop_absaltered;
3662
3663/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3664static sbitmap *cprop_avin;
3665static sbitmap *cprop_avout;
3666
c4c81601
RK
3667/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3668 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3669
3670static void
3671alloc_cprop_mem (n_blocks, n_sets)
3672 int n_blocks, n_sets;
3673{
3674 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3675 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3676
3677 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3678 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3679}
3680
3681/* Free vars used by copy/const propagation. */
3682
3683static void
3684free_cprop_mem ()
3685{
5a660bff
DB
3686 sbitmap_vector_free (cprop_pavloc);
3687 sbitmap_vector_free (cprop_absaltered);
3688 sbitmap_vector_free (cprop_avin);
3689 sbitmap_vector_free (cprop_avout);
7506f491
DE
3690}
3691
c4c81601
RK
3692/* For each block, compute whether X is transparent. X is either an
3693 expression or an assignment [though we don't care which, for this context
3694 an assignment is treated as an expression]. For each block where an
3695 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3696 bit in BMAP. */
7506f491
DE
3697
3698static void
3699compute_transp (x, indx, bmap, set_p)
3700 rtx x;
3701 int indx;
3702 sbitmap *bmap;
3703 int set_p;
3704{
c4c81601 3705 int bb, i, j;
7506f491 3706 enum rtx_code code;
c4c81601 3707 reg_set *r;
6f7d635c 3708 const char *fmt;
7506f491 3709
c4c81601
RK
3710 /* repeat is used to turn tail-recursion into iteration since GCC
3711 can't do it when there's no return value. */
7506f491
DE
3712 repeat:
3713
3714 if (x == 0)
3715 return;
3716
3717 code = GET_CODE (x);
3718 switch (code)
3719 {
3720 case REG:
c4c81601
RK
3721 if (set_p)
3722 {
3723 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3724 {
3725 for (bb = 0; bb < n_basic_blocks; bb++)
3726 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3727 SET_BIT (bmap[bb], indx);
3728 }
3729 else
3730 {
3731 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3732 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3733 }
3734 }
3735 else
3736 {
3737 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3738 {
3739 for (bb = 0; bb < n_basic_blocks; bb++)
3740 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3741 RESET_BIT (bmap[bb], indx);
3742 }
3743 else
3744 {
3745 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3746 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3747 }
3748 }
7506f491 3749
c4c81601 3750 return;
7506f491
DE
3751
3752 case MEM:
a13d4ebf
AM
3753 for (bb = 0; bb < n_basic_blocks; bb++)
3754 {
3755 rtx list_entry = canon_modify_mem_list[bb];
3756
3757 while (list_entry)
3758 {
3759 rtx dest, dest_addr;
3760
3761 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3762 {
3763 if (set_p)
3764 SET_BIT (bmap[bb], indx);
3765 else
3766 RESET_BIT (bmap[bb], indx);
3767 break;
3768 }
3769 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3770 Examine each hunk of memory that is modified. */
3771
3772 dest = XEXP (list_entry, 0);
3773 list_entry = XEXP (list_entry, 1);
3774 dest_addr = XEXP (list_entry, 0);
3775
3776 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3777 x, rtx_addr_varies_p))
3778 {
3779 if (set_p)
3780 SET_BIT (bmap[bb], indx);
3781 else
3782 RESET_BIT (bmap[bb], indx);
3783 break;
3784 }
3785 list_entry = XEXP (list_entry, 1);
3786 }
3787 }
c4c81601 3788
7506f491
DE
3789 x = XEXP (x, 0);
3790 goto repeat;
3791
3792 case PC:
3793 case CC0: /*FIXME*/
3794 case CONST:
3795 case CONST_INT:
3796 case CONST_DOUBLE:
3797 case SYMBOL_REF:
3798 case LABEL_REF:
3799 case ADDR_VEC:
3800 case ADDR_DIFF_VEC:
3801 return;
3802
3803 default:
3804 break;
3805 }
3806
c4c81601 3807 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3808 {
3809 if (fmt[i] == 'e')
3810 {
7506f491
DE
3811 /* If we are about to do the last recursive call
3812 needed at this level, change it into iteration.
3813 This function is called enough to be worth it. */
3814 if (i == 0)
3815 {
c4c81601 3816 x = XEXP (x, i);
7506f491
DE
3817 goto repeat;
3818 }
c4c81601
RK
3819
3820 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3821 }
3822 else if (fmt[i] == 'E')
c4c81601
RK
3823 for (j = 0; j < XVECLEN (x, i); j++)
3824 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3825 }
3826}
3827
7506f491
DE
3828/* Top level routine to do the dataflow analysis needed by copy/const
3829 propagation. */
3830
3831static void
3832compute_cprop_data ()
3833{
b5ce41ff 3834 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
ce724250
JL
3835 compute_available (cprop_pavloc, cprop_absaltered,
3836 cprop_avout, cprop_avin);
7506f491
DE
3837}
3838\f
3839/* Copy/constant propagation. */
3840
7506f491
DE
3841/* Maximum number of register uses in an insn that we handle. */
3842#define MAX_USES 8
3843
3844/* Table of uses found in an insn.
3845 Allocated statically to avoid alloc/free complexity and overhead. */
3846static struct reg_use reg_use_table[MAX_USES];
3847
3848/* Index into `reg_use_table' while building it. */
3849static int reg_use_count;
3850
c4c81601
RK
3851/* Set up a list of register numbers used in INSN. The found uses are stored
3852 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3853 and contains the number of uses in the table upon exit.
7506f491 3854
c4c81601
RK
3855 ??? If a register appears multiple times we will record it multiple times.
3856 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3857
3858static void
9e71c818
JH
3859find_used_regs (xptr, data)
3860 rtx *xptr;
3861 void *data ATTRIBUTE_UNUSED;
7506f491 3862{
c4c81601 3863 int i, j;
7506f491 3864 enum rtx_code code;
6f7d635c 3865 const char *fmt;
9e71c818 3866 rtx x = *xptr;
7506f491 3867
c4c81601
RK
3868 /* repeat is used to turn tail-recursion into iteration since GCC
3869 can't do it when there's no return value. */
7506f491 3870 repeat:
7506f491
DE
3871 if (x == 0)
3872 return;
3873
3874 code = GET_CODE (x);
9e71c818 3875 if (REG_P (x))
7506f491 3876 {
7506f491
DE
3877 if (reg_use_count == MAX_USES)
3878 return;
c4c81601 3879
7506f491
DE
3880 reg_use_table[reg_use_count].reg_rtx = x;
3881 reg_use_count++;
7506f491
DE
3882 }
3883
3884 /* Recursively scan the operands of this expression. */
3885
c4c81601 3886 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3887 {
3888 if (fmt[i] == 'e')
3889 {
3890 /* If we are about to do the last recursive call
3891 needed at this level, change it into iteration.
3892 This function is called enough to be worth it. */
3893 if (i == 0)
3894 {
3895 x = XEXP (x, 0);
3896 goto repeat;
3897 }
c4c81601 3898
9e71c818 3899 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3900 }
3901 else if (fmt[i] == 'E')
c4c81601 3902 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3903 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3904 }
3905}
3906
3907/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3908 Returns non-zero is successful. */
3909
3910static int
3911try_replace_reg (from, to, insn)
3912 rtx from, to, insn;
3913{
172890a2 3914 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3915 rtx src = 0;
172890a2
RK
3916 int success = 0;
3917 rtx set = single_set (insn);
833fc3ad 3918
9e71c818
JH
3919 success = validate_replace_src (from, to, insn);
3920
3921 /* If above failed and this is a single set, try to simplify the source of
3922 the set given our substitution. We could perhaps try this for multiple
3923 SETs, but it probably won't buy us anything. */
3924 if (!success && set != 0)
833fc3ad 3925 {
172890a2
RK
3926 src = simplify_replace_rtx (SET_SRC (set), from, to);
3927
9e71c818
JH
3928 if (!rtx_equal_p (src, SET_SRC (set))
3929 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3930 success = 1;
833fc3ad
JH
3931 }
3932
fb0c0a12
RK
3933 /* If we've failed to do replacement, have a single SET, and don't already
3934 have a note, add a REG_EQUAL note to not lose information. */
172890a2 3935 if (!success && note == 0 && set != 0)
fb0c0a12 3936 note = REG_NOTES (insn)
172890a2 3937 = gen_rtx_EXPR_LIST (REG_EQUAL, src, REG_NOTES (insn));
e251e2a2 3938
172890a2
RK
3939 /* If there is already a NOTE, update the expression in it with our
3940 replacement. */
3941 else if (note != 0)
3942 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
833fc3ad 3943
172890a2
RK
3944 /* REG_EQUAL may get simplified into register.
3945 We don't allow that. Remove that note. This code ought
3946 not to hapen, because previous code ought to syntetize
3947 reg-reg move, but be on the safe side. */
3948 if (note && REG_P (XEXP (note, 0)))
3949 remove_note (insn, note);
833fc3ad 3950
833fc3ad
JH
3951 return success;
3952}
c4c81601
RK
3953
3954/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3955 NULL no such set is found. */
7506f491
DE
3956
3957static struct expr *
3958find_avail_set (regno, insn)
3959 int regno;
3960 rtx insn;
3961{
cafba495
BS
3962 /* SET1 contains the last set found that can be returned to the caller for
3963 use in a substitution. */
3964 struct expr *set1 = 0;
3965
3966 /* Loops are not possible here. To get a loop we would need two sets
3967 available at the start of the block containing INSN. ie we would
3968 need two sets like this available at the start of the block:
3969
3970 (set (reg X) (reg Y))
3971 (set (reg Y) (reg X))
3972
3973 This can not happen since the set of (reg Y) would have killed the
3974 set of (reg X) making it unavailable at the start of this block. */
3975 while (1)
3976 {
3977 rtx src;
3978 struct expr *set = lookup_set (regno, NULL_RTX);
3979
3980 /* Find a set that is available at the start of the block
3981 which contains INSN. */
3982 while (set)
3983 {
3984 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3985 break;
3986 set = next_set (regno, set);
3987 }
7506f491 3988
cafba495
BS
3989 /* If no available set was found we've reached the end of the
3990 (possibly empty) copy chain. */
3991 if (set == 0)
3992 break;
3993
3994 if (GET_CODE (set->expr) != SET)
3995 abort ();
3996
3997 src = SET_SRC (set->expr);
3998
3999 /* We know the set is available.
4000 Now check that SRC is ANTLOC (i.e. none of the source operands
4001 have changed since the start of the block).
4002
4003 If the source operand changed, we may still use it for the next
4004 iteration of this loop, but we may not use it for substitutions. */
c4c81601 4005
cafba495
BS
4006 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4007 set1 = set;
4008
4009 /* If the source of the set is anything except a register, then
4010 we have reached the end of the copy chain. */
4011 if (GET_CODE (src) != REG)
7506f491 4012 break;
7506f491 4013
cafba495
BS
4014 /* Follow the copy chain, ie start another iteration of the loop
4015 and see if we have an available copy into SRC. */
4016 regno = REGNO (src);
4017 }
4018
4019 /* SET1 holds the last set that was available and anticipatable at
4020 INSN. */
4021 return set1;
7506f491
DE
4022}
4023
abd535b6 4024/* Subroutine of cprop_insn that tries to propagate constants into
172890a2
RK
4025 JUMP_INSNS. INSN must be a conditional jump. FROM is what we will try to
4026 replace, SRC is the constant we will try to substitute for it. Returns
4027 nonzero if a change was made. We know INSN has just a SET. */
c4c81601 4028
abd535b6 4029static int
0005550b 4030cprop_jump (bb, insn, from, src)
172890a2
RK
4031 rtx insn;
4032 rtx from;
abd535b6 4033 rtx src;
0005550b 4034 basic_block bb;
abd535b6 4035{
172890a2
RK
4036 rtx set = PATTERN (insn);
4037 rtx new = simplify_replace_rtx (SET_SRC (set), from, src);
abd535b6
BS
4038
4039 /* If no simplification can be made, then try the next
4040 register. */
172890a2 4041 if (rtx_equal_p (new, SET_SRC (set)))
abd535b6
BS
4042 return 0;
4043
172890a2
RK
4044 /* If this is now a no-op leave it that way, but update LABEL_NUSED if
4045 necessary. */
4046 if (new == pc_rtx)
abd535b6 4047 {
172890a2
RK
4048 SET_SRC (set) = new;
4049
4050 if (JUMP_LABEL (insn) != 0)
abd535b6 4051 --LABEL_NUSES (JUMP_LABEL (insn));
172890a2 4052 }
abd535b6 4053
172890a2
RK
4054 /* Otherwise, this must be a valid instruction. */
4055 else if (! validate_change (insn, &SET_SRC (set), new, 0))
4056 return 0;
abd535b6 4057
172890a2
RK
4058 /* If this has turned into an unconditional jump,
4059 then put a barrier after it so that the unreachable
4060 code will be deleted. */
4061 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4062 emit_barrier_after (insn);
abd535b6 4063
172890a2 4064 run_jump_opt_after_gcse = 1;
c4c81601 4065
172890a2
RK
4066 const_prop_count++;
4067 if (gcse_file != NULL)
4068 {
4069 fprintf (gcse_file,
4070 "CONST-PROP: Replacing reg %d in insn %d with constant ",
4071 REGNO (from), INSN_UID (insn));
4072 print_rtl (gcse_file, src);
4073 fprintf (gcse_file, "\n");
abd535b6 4074 }
0005550b 4075 purge_dead_edges (bb);
172890a2
RK
4076
4077 return 1;
abd535b6
BS
4078}
4079
4080#ifdef HAVE_cc0
c4c81601
RK
4081
4082/* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
4083 for machines that have CC0. INSN is a single set that stores into CC0;
4084 the insn following it is a conditional jump. REG_USED is the use we will
4085 try to replace, SRC is the constant we will try to substitute for it.
abd535b6 4086 Returns nonzero if a change was made. */
c4c81601 4087
abd535b6 4088static int
0005550b
JH
4089cprop_cc0_jump (bb, insn, reg_used, src)
4090 basic_block bb;
abd535b6
BS
4091 rtx insn;
4092 struct reg_use *reg_used;
4093 rtx src;
4094{
172890a2
RK
4095 /* First substitute in the SET_SRC of INSN, then substitute that for
4096 CC0 in JUMP. */
abd535b6 4097 rtx jump = NEXT_INSN (insn);
172890a2
RK
4098 rtx new_src = simplify_replace_rtx (SET_SRC (PATTERN (insn)),
4099 reg_used->reg_rtx, src);
abd535b6 4100
0005550b 4101 if (! cprop_jump (bb, jump, cc0_rtx, new_src))
abd535b6
BS
4102 return 0;
4103
4104 /* If we succeeded, delete the cc0 setter. */
ca6c03ca 4105 delete_insn (insn);
172890a2 4106
abd535b6
BS
4107 return 1;
4108 }
4109#endif
4110
7506f491
DE
4111/* Perform constant and copy propagation on INSN.
4112 The result is non-zero if a change was made. */
4113
4114static int
0005550b
JH
4115cprop_insn (bb, insn, alter_jumps)
4116 basic_block bb;
7506f491 4117 rtx insn;
b5ce41ff 4118 int alter_jumps;
7506f491
DE
4119{
4120 struct reg_use *reg_used;
4121 int changed = 0;
833fc3ad 4122 rtx note;
7506f491 4123
9e71c818 4124 if (!INSN_P (insn))
7506f491
DE
4125 return 0;
4126
4127 reg_use_count = 0;
9e71c818 4128 note_uses (&PATTERN (insn), find_used_regs, NULL);
833fc3ad 4129
172890a2 4130 note = find_reg_equal_equiv_note (insn);
833fc3ad 4131
dc297297 4132 /* We may win even when propagating constants into notes. */
833fc3ad 4133 if (note)
9e71c818 4134 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4135
c4c81601
RK
4136 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4137 reg_used++, reg_use_count--)
7506f491 4138 {
770ae6cc 4139 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4140 rtx pat, src;
4141 struct expr *set;
7506f491
DE
4142
4143 /* Ignore registers created by GCSE.
dc297297 4144 We do this because ... */
7506f491
DE
4145 if (regno >= max_gcse_regno)
4146 continue;
4147
4148 /* If the register has already been set in this block, there's
4149 nothing we can do. */
4150 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4151 continue;
4152
4153 /* Find an assignment that sets reg_used and is available
4154 at the start of the block. */
4155 set = find_avail_set (regno, insn);
4156 if (! set)
4157 continue;
4158
4159 pat = set->expr;
4160 /* ??? We might be able to handle PARALLELs. Later. */
4161 if (GET_CODE (pat) != SET)
4162 abort ();
c4c81601 4163
7506f491
DE
4164 src = SET_SRC (pat);
4165
e78d9500 4166 /* Constant propagation. */
05f6f07c
BS
4167 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
4168 || GET_CODE (src) == SYMBOL_REF)
7506f491 4169 {
e78d9500
JL
4170 /* Handle normal insns first. */
4171 if (GET_CODE (insn) == INSN
4172 && try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491
DE
4173 {
4174 changed = 1;
4175 const_prop_count++;
4176 if (gcse_file != NULL)
4177 {
c4c81601
RK
4178 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ",
4179 regno);
4180 fprintf (gcse_file, "insn %d with constant ",
4181 INSN_UID (insn));
e78d9500 4182 print_rtl (gcse_file, src);
7506f491
DE
4183 fprintf (gcse_file, "\n");
4184 }
4185
4186 /* The original insn setting reg_used may or may not now be
4187 deletable. We leave the deletion to flow. */
4188 }
e78d9500
JL
4189
4190 /* Try to propagate a CONST_INT into a conditional jump.
4191 We're pretty specific about what we will handle in this
4192 code, we can extend this as necessary over time.
4193
4194 Right now the insn in question must look like
abd535b6 4195 (set (pc) (if_then_else ...)) */
b5ce41ff 4196 else if (alter_jumps
6e9a3c38
JL
4197 && GET_CODE (insn) == JUMP_INSN
4198 && condjump_p (insn)
4199 && ! simplejump_p (insn))
0005550b 4200 changed |= cprop_jump (bb, insn, reg_used->reg_rtx, src);
172890a2 4201
abd535b6
BS
4202#ifdef HAVE_cc0
4203 /* Similar code for machines that use a pair of CC0 setter and
4204 conditional jump insn. */
4205 else if (alter_jumps
4206 && GET_CODE (PATTERN (insn)) == SET
4207 && SET_DEST (PATTERN (insn)) == cc0_rtx
4208 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4209 && condjump_p (NEXT_INSN (insn))
172890a2 4210 && ! simplejump_p (NEXT_INSN (insn))
21715220 4211 && cprop_cc0_jump (bb, insn, reg_used, src))
172890a2
RK
4212 {
4213 changed = 1;
4214 break;
d7836e38 4215 }
abd535b6 4216#endif
7506f491
DE
4217 }
4218 else if (GET_CODE (src) == REG
4219 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4220 && REGNO (src) != regno)
4221 {
cafba495 4222 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4223 {
cafba495
BS
4224 changed = 1;
4225 copy_prop_count++;
4226 if (gcse_file != NULL)
7506f491 4227 {
c4c81601
RK
4228 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d",
4229 regno, INSN_UID (insn));
4230 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4231 }
cafba495
BS
4232
4233 /* The original insn setting reg_used may or may not now be
4234 deletable. We leave the deletion to flow. */
4235 /* FIXME: If it turns out that the insn isn't deletable,
4236 then we may have unnecessarily extended register lifetimes
4237 and made things worse. */
7506f491
DE
4238 }
4239 }
4240 }
4241
4242 return changed;
4243}
4244
c4c81601
RK
4245/* Forward propagate copies. This includes copies and constants. Return
4246 non-zero if a change was made. */
7506f491
DE
4247
4248static int
b5ce41ff
JL
4249cprop (alter_jumps)
4250 int alter_jumps;
7506f491
DE
4251{
4252 int bb, changed;
4253 rtx insn;
4254
4255 /* Note we start at block 1. */
4256
4257 changed = 0;
4258 for (bb = 1; bb < n_basic_blocks; bb++)
4259 {
4260 /* Reset tables used to keep track of what's still valid [since the
4261 start of the block]. */
4262 reset_opr_set_tables ();
4263
3b413743
RH
4264 for (insn = BLOCK_HEAD (bb);
4265 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
7506f491 4266 insn = NEXT_INSN (insn))
172890a2
RK
4267 if (INSN_P (insn))
4268 {
0005550b 4269 changed |= cprop_insn (BASIC_BLOCK (bb), insn, alter_jumps);
7506f491 4270
172890a2
RK
4271 /* Keep track of everything modified by this insn. */
4272 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4273 call mark_oprs_set if we turned the insn into a NOTE. */
4274 if (GET_CODE (insn) != NOTE)
4275 mark_oprs_set (insn);
ac7c5af5 4276 }
7506f491
DE
4277 }
4278
4279 if (gcse_file != NULL)
4280 fprintf (gcse_file, "\n");
4281
4282 return changed;
4283}
4284
4285/* Perform one copy/constant propagation pass.
4286 F is the first insn in the function.
4287 PASS is the pass count. */
4288
4289static int
b5ce41ff 4290one_cprop_pass (pass, alter_jumps)
7506f491 4291 int pass;
b5ce41ff 4292 int alter_jumps;
7506f491
DE
4293{
4294 int changed = 0;
4295
4296 const_prop_count = 0;
4297 copy_prop_count = 0;
4298
4299 alloc_set_hash_table (max_cuid);
b5ce41ff 4300 compute_set_hash_table ();
7506f491
DE
4301 if (gcse_file)
4302 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4303 n_sets);
4304 if (n_sets > 0)
4305 {
4306 alloc_cprop_mem (n_basic_blocks, n_sets);
4307 compute_cprop_data ();
b5ce41ff 4308 changed = cprop (alter_jumps);
7506f491
DE
4309 free_cprop_mem ();
4310 }
c4c81601 4311
7506f491
DE
4312 free_set_hash_table ();
4313
4314 if (gcse_file)
4315 {
c4c81601
RK
4316 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4317 current_function_name, pass, bytes_used);
4318 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4319 const_prop_count, copy_prop_count);
7506f491
DE
4320 }
4321
4322 return changed;
4323}
4324\f
a65f3558 4325/* Compute PRE+LCM working variables. */
7506f491
DE
4326
4327/* Local properties of expressions. */
4328/* Nonzero for expressions that are transparent in the block. */
a65f3558 4329static sbitmap *transp;
7506f491 4330
5c35539b
RH
4331/* Nonzero for expressions that are transparent at the end of the block.
4332 This is only zero for expressions killed by abnormal critical edge
4333 created by a calls. */
a65f3558 4334static sbitmap *transpout;
5c35539b 4335
a65f3558
JL
4336/* Nonzero for expressions that are computed (available) in the block. */
4337static sbitmap *comp;
7506f491 4338
a65f3558
JL
4339/* Nonzero for expressions that are locally anticipatable in the block. */
4340static sbitmap *antloc;
7506f491 4341
a65f3558
JL
4342/* Nonzero for expressions where this block is an optimal computation
4343 point. */
4344static sbitmap *pre_optimal;
5c35539b 4345
a65f3558
JL
4346/* Nonzero for expressions which are redundant in a particular block. */
4347static sbitmap *pre_redundant;
7506f491 4348
a42cd965
AM
4349/* Nonzero for expressions which should be inserted on a specific edge. */
4350static sbitmap *pre_insert_map;
4351
4352/* Nonzero for expressions which should be deleted in a specific block. */
4353static sbitmap *pre_delete_map;
4354
4355/* Contains the edge_list returned by pre_edge_lcm. */
4356static struct edge_list *edge_list;
4357
a65f3558
JL
4358/* Redundant insns. */
4359static sbitmap pre_redundant_insns;
7506f491 4360
a65f3558 4361/* Allocate vars used for PRE analysis. */
7506f491
DE
4362
4363static void
a65f3558
JL
4364alloc_pre_mem (n_blocks, n_exprs)
4365 int n_blocks, n_exprs;
7506f491 4366{
a65f3558
JL
4367 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4368 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4369 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4370
a42cd965
AM
4371 pre_optimal = NULL;
4372 pre_redundant = NULL;
4373 pre_insert_map = NULL;
4374 pre_delete_map = NULL;
4375 ae_in = NULL;
4376 ae_out = NULL;
a42cd965 4377 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4378
a42cd965 4379 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4380}
4381
a65f3558 4382/* Free vars used for PRE analysis. */
7506f491
DE
4383
4384static void
a65f3558 4385free_pre_mem ()
7506f491 4386{
5a660bff
DB
4387 sbitmap_vector_free (transp);
4388 sbitmap_vector_free (comp);
bd3675fc
JL
4389
4390 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4391
a42cd965 4392 if (pre_optimal)
5a660bff 4393 sbitmap_vector_free (pre_optimal);
a42cd965 4394 if (pre_redundant)
5a660bff 4395 sbitmap_vector_free (pre_redundant);
a42cd965 4396 if (pre_insert_map)
5a660bff 4397 sbitmap_vector_free (pre_insert_map);
a42cd965 4398 if (pre_delete_map)
5a660bff 4399 sbitmap_vector_free (pre_delete_map);
a42cd965 4400 if (ae_in)
5a660bff 4401 sbitmap_vector_free (ae_in);
a42cd965 4402 if (ae_out)
5a660bff 4403 sbitmap_vector_free (ae_out);
a42cd965 4404
bd3675fc 4405 transp = comp = NULL;
a42cd965 4406 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4407 ae_in = ae_out = NULL;
7506f491
DE
4408}
4409
4410/* Top level routine to do the dataflow analysis needed by PRE. */
4411
4412static void
4413compute_pre_data ()
4414{
b614171e 4415 sbitmap trapping_expr;
c66e8ae9 4416 int i;
b614171e 4417 unsigned int ui;
c66e8ae9 4418
a65f3558 4419 compute_local_properties (transp, comp, antloc, 0);
a42cd965 4420 sbitmap_vector_zero (ae_kill, n_basic_blocks);
c66e8ae9 4421
b614171e
MM
4422 /* Collect expressions which might trap. */
4423 trapping_expr = sbitmap_alloc (n_exprs);
4424 sbitmap_zero (trapping_expr);
4425 for (ui = 0; ui < expr_hash_table_size; ui++)
4426 {
4427 struct expr *e;
4428 for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
4429 if (may_trap_p (e->expr))
4430 SET_BIT (trapping_expr, e->bitmap_index);
4431 }
4432
c66e8ae9
JL
4433 /* Compute ae_kill for each basic block using:
4434
4435 ~(TRANSP | COMP)
4436
a2e90653 4437 This is significantly faster than compute_ae_kill. */
c66e8ae9
JL
4438
4439 for (i = 0; i < n_basic_blocks; i++)
4440 {
b614171e
MM
4441 edge e;
4442
4443 /* If the current block is the destination of an abnormal edge, we
4444 kill all trapping expressions because we won't be able to properly
4445 place the instruction on the edge. So make them neither
4446 anticipatable nor transparent. This is fairly conservative. */
4447 for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
4448 if (e->flags & EDGE_ABNORMAL)
4449 {
4450 sbitmap_difference (antloc[i], antloc[i], trapping_expr);
4451 sbitmap_difference (transp[i], transp[i], trapping_expr);
4452 break;
4453 }
4454
c66e8ae9
JL
4455 sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
4456 sbitmap_not (ae_kill[i], ae_kill[i]);
4457 }
4458
a42cd965
AM
4459 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4460 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 4461 sbitmap_vector_free (antloc);
bd3675fc 4462 antloc = NULL;
5a660bff 4463 sbitmap_vector_free (ae_kill);
bd3675fc 4464 ae_kill = NULL;
b614171e 4465 free (trapping_expr);
7506f491
DE
4466}
4467\f
4468/* PRE utilities */
4469
a65f3558
JL
4470/* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4471 block BB.
7506f491
DE
4472
4473 VISITED is a pointer to a working buffer for tracking which BB's have
4474 been visited. It is NULL for the top-level call.
4475
4476 We treat reaching expressions that go through blocks containing the same
4477 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4478 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4479 2 as not reaching. The intent is to improve the probability of finding
4480 only one reaching expression and to reduce register lifetimes by picking
4481 the closest such expression. */
4482
4483static int
89e606c9 4484pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
e2d2ed72 4485 basic_block occr_bb;
7506f491 4486 struct expr *expr;
e2d2ed72 4487 basic_block bb;
7506f491
DE
4488 char *visited;
4489{
36349f8b 4490 edge pred;
7506f491 4491
e2d2ed72 4492 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 4493 {
e2d2ed72 4494 basic_block pred_bb = pred->src;
7506f491 4495
36349f8b 4496 if (pred->src == ENTRY_BLOCK_PTR
7506f491 4497 /* Has predecessor has already been visited? */
e2d2ed72 4498 || visited[pred_bb->index])
c4c81601
RK
4499 ;/* Nothing to do. */
4500
7506f491 4501 /* Does this predecessor generate this expression? */
e2d2ed72 4502 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
4503 {
4504 /* Is this the occurrence we're looking for?
4505 Note that there's only one generating occurrence per block
4506 so we just need to check the block number. */
a65f3558 4507 if (occr_bb == pred_bb)
7506f491 4508 return 1;
c4c81601 4509
e2d2ed72 4510 visited[pred_bb->index] = 1;
7506f491
DE
4511 }
4512 /* Ignore this predecessor if it kills the expression. */
e2d2ed72
AM
4513 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4514 visited[pred_bb->index] = 1;
c4c81601 4515
7506f491
DE
4516 /* Neither gen nor kill. */
4517 else
ac7c5af5 4518 {
e2d2ed72 4519 visited[pred_bb->index] = 1;
89e606c9 4520 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 4521 return 1;
ac7c5af5 4522 }
7506f491
DE
4523 }
4524
4525 /* All paths have been checked. */
4526 return 0;
4527}
283a2545
RL
4528
4529/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 4530 memory allocated for that function is returned. */
283a2545
RL
4531
4532static int
89e606c9 4533pre_expr_reaches_here_p (occr_bb, expr, bb)
e2d2ed72 4534 basic_block occr_bb;
283a2545 4535 struct expr *expr;
e2d2ed72 4536 basic_block bb;
283a2545
RL
4537{
4538 int rval;
c4c81601 4539 char *visited = (char *) xcalloc (n_basic_blocks, 1);
283a2545 4540
89e606c9 4541 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
283a2545
RL
4542
4543 free (visited);
c4c81601 4544 return rval;
283a2545 4545}
7506f491 4546\f
a42cd965
AM
4547
4548/* Given an expr, generate RTL which we can insert at the end of a BB,
4549 or on an edge. Set the block number of any insns generated to
4550 the value of BB. */
4551
4552static rtx
4553process_insert_insn (expr)
4554 struct expr *expr;
4555{
4556 rtx reg = expr->reaching_reg;
fb0c0a12
RK
4557 rtx exp = copy_rtx (expr->expr);
4558 rtx pat;
a42cd965
AM
4559
4560 start_sequence ();
fb0c0a12
RK
4561
4562 /* If the expression is something that's an operand, like a constant,
4563 just copy it to a register. */
4564 if (general_operand (exp, GET_MODE (reg)))
4565 emit_move_insn (reg, exp);
4566
4567 /* Otherwise, make a new insn to compute this expression and make sure the
4568 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4569 expression to make sure we don't have any sharing issues. */
8d444206 4570 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12
RK
4571 abort ();
4572
a42cd965
AM
4573 pat = gen_sequence ();
4574 end_sequence ();
4575
4576 return pat;
4577}
4578
a65f3558
JL
4579/* Add EXPR to the end of basic block BB.
4580
4581 This is used by both the PRE and code hoisting.
4582
4583 For PRE, we want to verify that the expr is either transparent
4584 or locally anticipatable in the target block. This check makes
4585 no sense for code hoisting. */
7506f491
DE
4586
4587static void
a65f3558 4588insert_insn_end_bb (expr, bb, pre)
7506f491 4589 struct expr *expr;
e2d2ed72 4590 basic_block bb;
a65f3558 4591 int pre;
7506f491 4592{
e2d2ed72 4593 rtx insn = bb->end;
7506f491
DE
4594 rtx new_insn;
4595 rtx reg = expr->reaching_reg;
4596 int regno = REGNO (reg);
a42cd965 4597 rtx pat;
c4c81601 4598 int i;
7506f491 4599
a42cd965 4600 pat = process_insert_insn (expr);
7506f491
DE
4601
4602 /* If the last insn is a jump, insert EXPR in front [taking care to
4603 handle cc0, etc. properly]. */
4604
4605 if (GET_CODE (insn) == JUMP_INSN)
4606 {
50b2596f 4607#ifdef HAVE_cc0
7506f491 4608 rtx note;
50b2596f 4609#endif
7506f491
DE
4610
4611 /* If this is a jump table, then we can't insert stuff here. Since
4612 we know the previous real insn must be the tablejump, we insert
4613 the new instruction just before the tablejump. */
4614 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4615 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4616 insn = prev_real_insn (insn);
4617
4618#ifdef HAVE_cc0
4619 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4620 if cc0 isn't set. */
4621 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4622 if (note)
4623 insn = XEXP (note, 0);
4624 else
4625 {
4626 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4627 if (maybe_cc0_setter
2c3c49de 4628 && INSN_P (maybe_cc0_setter)
7506f491
DE
4629 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4630 insn = maybe_cc0_setter;
4631 }
4632#endif
4633 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 4634 new_insn = emit_insn_before (pat, insn);
3947e2f9 4635 }
c4c81601 4636
3947e2f9
RH
4637 /* Likewise if the last insn is a call, as will happen in the presence
4638 of exception handling. */
5c35539b 4639 else if (GET_CODE (insn) == CALL_INSN)
3947e2f9 4640 {
3947e2f9
RH
4641 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4642 we search backward and place the instructions before the first
4643 parameter is loaded. Do this for everyone for consistency and a
c4c81601 4644 presumtion that we'll get better code elsewhere as well.
3947e2f9 4645
c4c81601 4646 It should always be the case that we can put these instructions
a65f3558
JL
4647 anywhere in the basic block with performing PRE optimizations.
4648 Check this. */
c4c81601 4649
a65f3558 4650 if (pre
e2d2ed72
AM
4651 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4652 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
4653 abort ();
4654
4655 /* Since different machines initialize their parameter registers
4656 in different orders, assume nothing. Collect the set of all
4657 parameter registers. */
833366d6 4658 insn = find_first_parameter_load (insn, bb->head);
3947e2f9 4659
b1d26727
JL
4660 /* If we found all the parameter loads, then we want to insert
4661 before the first parameter load.
4662
4663 If we did not find all the parameter loads, then we might have
4664 stopped on the head of the block, which could be a CODE_LABEL.
4665 If we inserted before the CODE_LABEL, then we would be putting
4666 the insn in the wrong basic block. In that case, put the insn
b5229628 4667 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 4668 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 4669 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 4670 insn = NEXT_INSN (insn);
c4c81601 4671
3c030e88 4672 new_insn = emit_insn_before (pat, insn);
7506f491
DE
4673 }
4674 else
3c030e88 4675 new_insn = emit_insn_after (pat, insn);
7506f491 4676
a65f3558
JL
4677 /* Keep block number table up to date.
4678 Note, PAT could be a multiple insn sequence, we have to make
4679 sure that each insn in the sequence is handled. */
4680 if (GET_CODE (pat) == SEQUENCE)
4681 {
a65f3558
JL
4682 for (i = 0; i < XVECLEN (pat, 0); i++)
4683 {
4684 rtx insn = XVECEXP (pat, 0, i);
2c3c49de 4685 if (INSN_P (insn))
a65f3558 4686 add_label_notes (PATTERN (insn), new_insn);
c4c81601 4687
84832317 4688 note_stores (PATTERN (insn), record_set_info, insn);
a65f3558
JL
4689 }
4690 }
4691 else
4692 {
157bd2bb 4693 add_label_notes (pat, new_insn);
c4c81601 4694
a65f3558
JL
4695 /* Keep register set table up to date. */
4696 record_one_set (regno, new_insn);
4697 }
3947e2f9 4698
7506f491
DE
4699 gcse_create_count++;
4700
4701 if (gcse_file)
4702 {
c4c81601 4703 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
e2d2ed72 4704 bb->index, INSN_UID (new_insn));
c4c81601
RK
4705 fprintf (gcse_file, "copying expression %d to reg %d\n",
4706 expr->bitmap_index, regno);
7506f491
DE
4707 }
4708}
4709
a42cd965
AM
4710/* Insert partially redundant expressions on edges in the CFG to make
4711 the expressions fully redundant. */
7506f491 4712
a42cd965
AM
4713static int
4714pre_edge_insert (edge_list, index_map)
4715 struct edge_list *edge_list;
7506f491
DE
4716 struct expr **index_map;
4717{
c4c81601 4718 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
4719 sbitmap *inserted;
4720
a42cd965
AM
4721 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4722 if it reaches any of the deleted expressions. */
7506f491 4723
a42cd965
AM
4724 set_size = pre_insert_map[0]->size;
4725 num_edges = NUM_EDGES (edge_list);
4726 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4727 sbitmap_vector_zero (inserted, num_edges);
7506f491 4728
a42cd965 4729 for (e = 0; e < num_edges; e++)
7506f491
DE
4730 {
4731 int indx;
e2d2ed72 4732 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 4733
a65f3558 4734 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 4735 {
a42cd965 4736 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 4737
a65f3558 4738 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
c4c81601
RK
4739 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4740 {
4741 struct expr *expr = index_map[j];
4742 struct occr *occr;
a65f3558 4743
c4c81601
RK
4744 /* Now look at each deleted occurence of this expression. */
4745 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4746 {
4747 if (! occr->deleted_p)
4748 continue;
4749
4750 /* Insert this expression on this edge if if it would
4751 reach the deleted occurence in BB. */
4752 if (!TEST_BIT (inserted[e], j))
4753 {
4754 rtx insn;
4755 edge eg = INDEX_EDGE (edge_list, e);
4756
4757 /* We can't insert anything on an abnormal and
4758 critical edge, so we insert the insn at the end of
4759 the previous block. There are several alternatives
4760 detailed in Morgans book P277 (sec 10.5) for
4761 handling this situation. This one is easiest for
4762 now. */
4763
4764 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4765 insert_insn_end_bb (index_map[j], bb, 0);
4766 else
4767 {
4768 insn = process_insert_insn (index_map[j]);
4769 insert_insn_on_edge (insn, eg);
4770 }
4771
4772 if (gcse_file)
4773 {
4774 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
e2d2ed72 4775 bb->index,
c4c81601
RK
4776 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4777 fprintf (gcse_file, "copy expression %d\n",
4778 expr->bitmap_index);
4779 }
4780
a13d4ebf 4781 update_ld_motion_stores (expr);
c4c81601
RK
4782 SET_BIT (inserted[e], j);
4783 did_insert = 1;
4784 gcse_create_count++;
4785 }
4786 }
4787 }
7506f491
DE
4788 }
4789 }
5faf03ae 4790
5a660bff 4791 sbitmap_vector_free (inserted);
a42cd965 4792 return did_insert;
7506f491
DE
4793}
4794
c4c81601 4795/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
4796
4797static void
4798pre_insert_copy_insn (expr, insn)
4799 struct expr *expr;
4800 rtx insn;
4801{
4802 rtx reg = expr->reaching_reg;
4803 int regno = REGNO (reg);
4804 int indx = expr->bitmap_index;
4805 rtx set = single_set (insn);
4806 rtx new_insn;
4807
4808 if (!set)
4809 abort ();
c4c81601 4810
cccf0ae8 4811 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
c4c81601 4812
7506f491
DE
4813 /* Keep register set table up to date. */
4814 record_one_set (regno, new_insn);
4815
4816 gcse_create_count++;
4817
4818 if (gcse_file)
a42cd965
AM
4819 fprintf (gcse_file,
4820 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4821 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4822 INSN_UID (insn), regno);
222f7ba9 4823 update_ld_motion_stores (expr);
7506f491
DE
4824}
4825
4826/* Copy available expressions that reach the redundant expression
4827 to `reaching_reg'. */
4828
4829static void
4830pre_insert_copies ()
4831{
2e653e39 4832 unsigned int i;
c4c81601
RK
4833 struct expr *expr;
4834 struct occr *occr;
4835 struct occr *avail;
a65f3558 4836
7506f491
DE
4837 /* For each available expression in the table, copy the result to
4838 `reaching_reg' if the expression reaches a deleted one.
4839
4840 ??? The current algorithm is rather brute force.
4841 Need to do some profiling. */
4842
4843 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4844 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4845 {
4846 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4847 we don't want to insert a copy here because the expression may not
4848 really be redundant. So only insert an insn if the expression was
4849 deleted. This test also avoids further processing if the
4850 expression wasn't deleted anywhere. */
4851 if (expr->reaching_reg == NULL)
4852 continue;
4853
4854 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4855 {
4856 if (! occr->deleted_p)
4857 continue;
7506f491 4858
c4c81601
RK
4859 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4860 {
4861 rtx insn = avail->insn;
7506f491 4862
c4c81601
RK
4863 /* No need to handle this one if handled already. */
4864 if (avail->copied_p)
4865 continue;
7506f491 4866
c4c81601
RK
4867 /* Don't handle this one if it's a redundant one. */
4868 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4869 continue;
7506f491 4870
c4c81601 4871 /* Or if the expression doesn't reach the deleted one. */
e2d2ed72
AM
4872 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4873 expr,
4874 BLOCK_FOR_INSN (occr->insn)))
c4c81601 4875 continue;
7506f491 4876
c4c81601
RK
4877 /* Copy the result of avail to reaching_reg. */
4878 pre_insert_copy_insn (expr, insn);
4879 avail->copied_p = 1;
4880 }
4881 }
4882 }
7506f491
DE
4883}
4884
4885/* Delete redundant computations.
7506f491
DE
4886 Deletion is done by changing the insn to copy the `reaching_reg' of
4887 the expression into the result of the SET. It is left to later passes
4888 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4889
4890 Returns non-zero if a change is made. */
4891
4892static int
4893pre_delete ()
4894{
2e653e39 4895 unsigned int i;
63bc1d05 4896 int changed;
c4c81601
RK
4897 struct expr *expr;
4898 struct occr *occr;
a65f3558 4899
7506f491
DE
4900 changed = 0;
4901 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4902 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4903 {
4904 int indx = expr->bitmap_index;
7506f491 4905
c4c81601
RK
4906 /* We only need to search antic_occr since we require
4907 ANTLOC != 0. */
7506f491 4908
c4c81601
RK
4909 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4910 {
4911 rtx insn = occr->insn;
4912 rtx set;
e2d2ed72 4913 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 4914
e2d2ed72 4915 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
4916 {
4917 set = single_set (insn);
4918 if (! set)
4919 abort ();
4920
4921 /* Create a pseudo-reg to store the result of reaching
4922 expressions into. Get the mode for the new pseudo from
4923 the mode of the original destination pseudo. */
4924 if (expr->reaching_reg == NULL)
4925 expr->reaching_reg
4926 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4927
4928 /* In theory this should never fail since we're creating
4929 a reg->reg copy.
4930
4931 However, on the x86 some of the movXX patterns actually
4932 contain clobbers of scratch regs. This may cause the
4933 insn created by validate_change to not match any pattern
6d2f8887 4934 and thus cause validate_change to fail. */
c4c81601
RK
4935 if (validate_change (insn, &SET_SRC (set),
4936 expr->reaching_reg, 0))
4937 {
4938 occr->deleted_p = 1;
4939 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4940 changed = 1;
4941 gcse_subst_count++;
4942 }
7506f491 4943
c4c81601
RK
4944 if (gcse_file)
4945 {
4946 fprintf (gcse_file,
4947 "PRE: redundant insn %d (expression %d) in ",
4948 INSN_UID (insn), indx);
4949 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
e2d2ed72 4950 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
4951 }
4952 }
4953 }
4954 }
7506f491
DE
4955
4956 return changed;
4957}
4958
4959/* Perform GCSE optimizations using PRE.
4960 This is called by one_pre_gcse_pass after all the dataflow analysis
4961 has been done.
4962
c4c81601
RK
4963 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4964 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4965 Compiler Design and Implementation.
7506f491 4966
c4c81601
RK
4967 ??? A new pseudo reg is created to hold the reaching expression. The nice
4968 thing about the classical approach is that it would try to use an existing
4969 reg. If the register can't be adequately optimized [i.e. we introduce
4970 reload problems], one could add a pass here to propagate the new register
4971 through the block.
7506f491 4972
c4c81601
RK
4973 ??? We don't handle single sets in PARALLELs because we're [currently] not
4974 able to copy the rest of the parallel when we insert copies to create full
4975 redundancies from partial redundancies. However, there's no reason why we
4976 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
4977 redundancies. */
4978
4979static int
4980pre_gcse ()
4981{
2e653e39
RK
4982 unsigned int i;
4983 int did_insert, changed;
7506f491 4984 struct expr **index_map;
c4c81601 4985 struct expr *expr;
7506f491
DE
4986
4987 /* Compute a mapping from expression number (`bitmap_index') to
4988 hash table entry. */
4989
dd1bd863 4990 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
7506f491 4991 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4992 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4993 index_map[expr->bitmap_index] = expr;
7506f491
DE
4994
4995 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
4996 pre_redundant_insns = sbitmap_alloc (max_cuid);
4997 sbitmap_zero (pre_redundant_insns);
7506f491
DE
4998
4999 /* Delete the redundant insns first so that
5000 - we know what register to use for the new insns and for the other
5001 ones with reaching expressions
5002 - we know which insns are redundant when we go to create copies */
c4c81601 5003
7506f491
DE
5004 changed = pre_delete ();
5005
a42cd965 5006 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5007
7506f491 5008 /* In other places with reaching expressions, copy the expression to the
a42cd965 5009 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5010 pre_insert_copies ();
a42cd965
AM
5011 if (did_insert)
5012 {
5013 commit_edge_insertions ();
5014 changed = 1;
5015 }
7506f491 5016
283a2545 5017 free (index_map);
a65f3558 5018 free (pre_redundant_insns);
7506f491
DE
5019 return changed;
5020}
5021
5022/* Top level routine to perform one PRE GCSE pass.
5023
5024 Return non-zero if a change was made. */
5025
5026static int
b5ce41ff 5027one_pre_gcse_pass (pass)
7506f491
DE
5028 int pass;
5029{
5030 int changed = 0;
5031
5032 gcse_subst_count = 0;
5033 gcse_create_count = 0;
5034
5035 alloc_expr_hash_table (max_cuid);
a42cd965 5036 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5037 if (flag_gcse_lm)
5038 compute_ld_motion_mems ();
5039
b5ce41ff 5040 compute_expr_hash_table ();
a13d4ebf 5041 trim_ld_motion_mems ();
7506f491
DE
5042 if (gcse_file)
5043 dump_hash_table (gcse_file, "Expression", expr_hash_table,
5044 expr_hash_table_size, n_exprs);
c4c81601 5045
7506f491
DE
5046 if (n_exprs > 0)
5047 {
5048 alloc_pre_mem (n_basic_blocks, n_exprs);
5049 compute_pre_data ();
5050 changed |= pre_gcse ();
a42cd965 5051 free_edge_list (edge_list);
7506f491
DE
5052 free_pre_mem ();
5053 }
c4c81601 5054
a13d4ebf 5055 free_ldst_mems ();
a42cd965 5056 remove_fake_edges ();
7506f491
DE
5057 free_expr_hash_table ();
5058
5059 if (gcse_file)
5060 {
c4c81601
RK
5061 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5062 current_function_name, pass, bytes_used);
5063 fprintf (gcse_file, "%d substs, %d insns created\n",
5064 gcse_subst_count, gcse_create_count);
7506f491
DE
5065 }
5066
5067 return changed;
5068}
aeb2f500
JW
5069\f
5070/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5071 If notes are added to an insn which references a CODE_LABEL, the
5072 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5073 because the following loop optimization pass requires them. */
aeb2f500
JW
5074
5075/* ??? This is very similar to the loop.c add_label_notes function. We
5076 could probably share code here. */
5077
5078/* ??? If there was a jump optimization pass after gcse and before loop,
5079 then we would not need to do this here, because jump would add the
5080 necessary REG_LABEL notes. */
5081
5082static void
5083add_label_notes (x, insn)
5084 rtx x;
5085 rtx insn;
5086{
5087 enum rtx_code code = GET_CODE (x);
5088 int i, j;
6f7d635c 5089 const char *fmt;
aeb2f500
JW
5090
5091 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5092 {
6b3603c2 5093 /* This code used to ignore labels that referred to dispatch tables to
ac7c5af5 5094 avoid flow generating (slighly) worse code.
6b3603c2 5095
ac7c5af5
JL
5096 We no longer ignore such label references (see LABEL_REF handling in
5097 mark_jump_label for additional information). */
c4c81601 5098
6b8c9327 5099 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5100 REG_NOTES (insn));
5b1ef594
JDA
5101 if (LABEL_P (XEXP (x, 0)))
5102 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5103 return;
5104 }
5105
c4c81601 5106 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5107 {
5108 if (fmt[i] == 'e')
5109 add_label_notes (XEXP (x, i), insn);
5110 else if (fmt[i] == 'E')
5111 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5112 add_label_notes (XVECEXP (x, i, j), insn);
5113 }
5114}
a65f3558
JL
5115
5116/* Compute transparent outgoing information for each block.
5117
5118 An expression is transparent to an edge unless it is killed by
5119 the edge itself. This can only happen with abnormal control flow,
5120 when the edge is traversed through a call. This happens with
5121 non-local labels and exceptions.
5122
5123 This would not be necessary if we split the edge. While this is
5124 normally impossible for abnormal critical edges, with some effort
5125 it should be possible with exception handling, since we still have
5126 control over which handler should be invoked. But due to increased
5127 EH table sizes, this may not be worthwhile. */
5128
5129static void
5130compute_transpout ()
5131{
5132 int bb;
2e653e39 5133 unsigned int i;
c4c81601 5134 struct expr *expr;
a65f3558
JL
5135
5136 sbitmap_vector_ones (transpout, n_basic_blocks);
5137
5138 for (bb = 0; bb < n_basic_blocks; ++bb)
5139 {
a65f3558
JL
5140 /* Note that flow inserted a nop a the end of basic blocks that
5141 end in call instructions for reasons other than abnormal
5142 control flow. */
5143 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
5144 continue;
5145
5146 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5147 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
5148 if (GET_CODE (expr->expr) == MEM)
5149 {
5150 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5151 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5152 continue;
a65f3558 5153
c4c81601
RK
5154 /* ??? Optimally, we would use interprocedural alias
5155 analysis to determine if this mem is actually killed
5156 by this call. */
5157 RESET_BIT (transpout[bb], expr->bitmap_index);
5158 }
a65f3558
JL
5159 }
5160}
dfdb644f
JL
5161
5162/* Removal of useless null pointer checks */
5163
dfdb644f 5164/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5165 invalidate nonnull_local and set nonnull_killed. DATA is really a
5166 `null_pointer_info *'.
dfdb644f
JL
5167
5168 We ignore hard registers. */
c4c81601 5169
dfdb644f 5170static void
84832317 5171invalidate_nonnull_info (x, setter, data)
dfdb644f
JL
5172 rtx x;
5173 rtx setter ATTRIBUTE_UNUSED;
0511851c 5174 void *data;
dfdb644f 5175{
770ae6cc
RK
5176 unsigned int regno;
5177 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5178
dfdb644f
JL
5179 while (GET_CODE (x) == SUBREG)
5180 x = SUBREG_REG (x);
5181
5182 /* Ignore anything that is not a register or is a hard register. */
5183 if (GET_CODE (x) != REG
0511851c
MM
5184 || REGNO (x) < npi->min_reg
5185 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5186 return;
5187
0511851c 5188 regno = REGNO (x) - npi->min_reg;
dfdb644f 5189
0511851c
MM
5190 RESET_BIT (npi->nonnull_local[npi->current_block], regno);
5191 SET_BIT (npi->nonnull_killed[npi->current_block], regno);
dfdb644f
JL
5192}
5193
0511851c
MM
5194/* Do null-pointer check elimination for the registers indicated in
5195 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5196 they are not our responsibility to free. */
dfdb644f 5197
0511851c 5198static void
8e184d9c
JJ
5199delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin,
5200 nonnull_avout, npi)
5201 varray_type *delete_list;
770ae6cc 5202 unsigned int *block_reg;
0511851c
MM
5203 sbitmap *nonnull_avin;
5204 sbitmap *nonnull_avout;
5205 struct null_pointer_info *npi;
dfdb644f 5206{
ce724250 5207 int bb;
0511851c
MM
5208 int current_block;
5209 sbitmap *nonnull_local = npi->nonnull_local;
5210 sbitmap *nonnull_killed = npi->nonnull_killed;
dfdb644f 5211
dfdb644f
JL
5212 /* Compute local properties, nonnull and killed. A register will have
5213 the nonnull property if at the end of the current block its value is
5214 known to be nonnull. The killed property indicates that somewhere in
5215 the block any information we had about the register is killed.
5216
5217 Note that a register can have both properties in a single block. That
5218 indicates that it's killed, then later in the block a new value is
5219 computed. */
5220 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
5221 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
c4c81601 5222
dfdb644f
JL
5223 for (current_block = 0; current_block < n_basic_blocks; current_block++)
5224 {
5225 rtx insn, stop_insn;
5226
0511851c
MM
5227 /* Set the current block for invalidate_nonnull_info. */
5228 npi->current_block = current_block;
5229
dfdb644f
JL
5230 /* Scan each insn in the basic block looking for memory references and
5231 register sets. */
5232 stop_insn = NEXT_INSN (BLOCK_END (current_block));
5233 for (insn = BLOCK_HEAD (current_block);
5234 insn != stop_insn;
5235 insn = NEXT_INSN (insn))
5236 {
5237 rtx set;
0511851c 5238 rtx reg;
dfdb644f
JL
5239
5240 /* Ignore anything that is not a normal insn. */
2c3c49de 5241 if (! INSN_P (insn))
dfdb644f
JL
5242 continue;
5243
5244 /* Basically ignore anything that is not a simple SET. We do have
5245 to make sure to invalidate nonnull_local and set nonnull_killed
5246 for such insns though. */
5247 set = single_set (insn);
5248 if (!set)
5249 {
0511851c 5250 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5251 continue;
5252 }
5253
5254 /* See if we've got a useable memory load. We handle it first
5255 in case it uses its address register as a dest (which kills
5256 the nonnull property). */
5257 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5258 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5259 && REGNO (reg) >= npi->min_reg
5260 && REGNO (reg) < npi->max_reg)
dfdb644f 5261 SET_BIT (nonnull_local[current_block],
0511851c 5262 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5263
5264 /* Now invalidate stuff clobbered by this insn. */
0511851c 5265 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5266
5267 /* And handle stores, we do these last since any sets in INSN can
5268 not kill the nonnull property if it is derived from a MEM
5269 appearing in a SET_DEST. */
5270 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5271 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5272 && REGNO (reg) >= npi->min_reg
5273 && REGNO (reg) < npi->max_reg)
dfdb644f 5274 SET_BIT (nonnull_local[current_block],
0511851c 5275 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5276 }
5277 }
5278
5279 /* Now compute global properties based on the local properties. This
5280 is a classic global availablity algorithm. */
ce724250
JL
5281 compute_available (nonnull_local, nonnull_killed,
5282 nonnull_avout, nonnull_avin);
dfdb644f
JL
5283
5284 /* Now look at each bb and see if it ends with a compare of a value
5285 against zero. */
5286 for (bb = 0; bb < n_basic_blocks; bb++)
5287 {
5288 rtx last_insn = BLOCK_END (bb);
0511851c 5289 rtx condition, earliest;
dfdb644f
JL
5290 int compare_and_branch;
5291
0511851c
MM
5292 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5293 since BLOCK_REG[BB] is zero if this block did not end with a
5294 comparison against zero, this condition works. */
5295 if (block_reg[bb] < npi->min_reg
5296 || block_reg[bb] >= npi->max_reg)
dfdb644f
JL
5297 continue;
5298
5299 /* LAST_INSN is a conditional jump. Get its condition. */
5300 condition = get_condition (last_insn, &earliest);
5301
40d7a3fe
NB
5302 /* If we can't determine the condition then skip. */
5303 if (! condition)
5304 continue;
5305
dfdb644f 5306 /* Is the register known to have a nonzero value? */
0511851c 5307 if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
dfdb644f
JL
5308 continue;
5309
5310 /* Try to compute whether the compare/branch at the loop end is one or
5311 two instructions. */
5312 if (earliest == last_insn)
5313 compare_and_branch = 1;
5314 else if (earliest == prev_nonnote_insn (last_insn))
5315 compare_and_branch = 2;
5316 else
5317 continue;
5318
5319 /* We know the register in this comparison is nonnull at exit from
5320 this block. We can optimize this comparison. */
5321 if (GET_CODE (condition) == NE)
5322 {
5323 rtx new_jump;
5324
5325 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5326 last_insn);
5327 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5328 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5329 emit_barrier_after (new_jump);
5330 }
8e184d9c
JJ
5331 if (!*delete_list)
5332 VARRAY_RTX_INIT (*delete_list, 10, "delete_list");
5333
5334 VARRAY_PUSH_RTX (*delete_list, last_insn);
dfdb644f 5335 if (compare_and_branch == 2)
8e184d9c 5336 VARRAY_PUSH_RTX (*delete_list, earliest);
0511851c
MM
5337
5338 /* Don't check this block again. (Note that BLOCK_END is
5339 invalid here; we deleted the last instruction in the
5340 block.) */
5341 block_reg[bb] = 0;
5342 }
5343}
5344
5345/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5346 at compile time.
5347
5348 This is conceptually similar to global constant/copy propagation and
5349 classic global CSE (it even uses the same dataflow equations as cprop).
5350
5351 If a register is used as memory address with the form (mem (reg)), then we
5352 know that REG can not be zero at that point in the program. Any instruction
5353 which sets REG "kills" this property.
5354
5355 So, if every path leading to a conditional branch has an available memory
5356 reference of that form, then we know the register can not have the value
5357 zero at the conditional branch.
5358
5359 So we merely need to compute the local properies and propagate that data
5360 around the cfg, then optimize where possible.
5361
5362 We run this pass two times. Once before CSE, then again after CSE. This
5363 has proven to be the most profitable approach. It is rare for new
5364 optimization opportunities of this nature to appear after the first CSE
5365 pass.
5366
5367 This could probably be integrated with global cprop with a little work. */
5368
5369void
5370delete_null_pointer_checks (f)
2e653e39 5371 rtx f ATTRIBUTE_UNUSED;
0511851c 5372{
0511851c 5373 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5374 unsigned int *block_reg;
8e184d9c 5375 varray_type delete_list = NULL;
0511851c
MM
5376 int bb;
5377 int reg;
5378 int regs_per_pass;
5379 int max_reg;
8e184d9c 5380 unsigned int i;
0511851c
MM
5381 struct null_pointer_info npi;
5382
0511851c
MM
5383 /* If we have only a single block, then there's nothing to do. */
5384 if (n_basic_blocks <= 1)
a18820c6 5385 return;
0511851c
MM
5386
5387 /* Trying to perform global optimizations on flow graphs which have
5388 a high connectivity will take a long time and is unlikely to be
5389 particularly useful.
5390
43e72072 5391 In normal circumstances a cfg should have about twice as many edges
0511851c
MM
5392 as blocks. But we do not want to punish small functions which have
5393 a couple switch statements. So we require a relatively large number
5394 of basic blocks and the ratio of edges to blocks to be high. */
5395 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
a18820c6 5396 return;
0511851c 5397
0511851c
MM
5398 /* We need four bitmaps, each with a bit for each register in each
5399 basic block. */
5400 max_reg = max_reg_num ();
5401 regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
5402
5403 /* Allocate bitmaps to hold local and global properties. */
5404 npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5405 npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5406 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5407 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5408
5409 /* Go through the basic blocks, seeing whether or not each block
5410 ends with a conditional branch whose condition is a comparison
5411 against zero. Record the register compared in BLOCK_REG. */
f9e158c3 5412 block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
0511851c
MM
5413 for (bb = 0; bb < n_basic_blocks; bb++)
5414 {
5415 rtx last_insn = BLOCK_END (bb);
5416 rtx condition, earliest, reg;
5417
5418 /* We only want conditional branches. */
5419 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5420 || !any_condjump_p (last_insn)
5421 || !onlyjump_p (last_insn))
0511851c
MM
5422 continue;
5423
5424 /* LAST_INSN is a conditional jump. Get its condition. */
5425 condition = get_condition (last_insn, &earliest);
5426
4fe9b91c 5427 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
5428 comparison against zero then there's nothing we can do. */
5429 if (!condition
5430 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5431 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5432 || (XEXP (condition, 1)
5433 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5434 continue;
5435
5436 /* We must be checking a register against zero. */
5437 reg = XEXP (condition, 0);
5438 if (GET_CODE (reg) != REG)
5439 continue;
5440
5441 block_reg[bb] = REGNO (reg);
5442 }
5443
5444 /* Go through the algorithm for each block of registers. */
5445 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5446 {
5447 npi.min_reg = reg;
5448 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
8e184d9c 5449 delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin,
0511851c 5450 nonnull_avout, &npi);
dfdb644f
JL
5451 }
5452
8e184d9c
JJ
5453 /* Now delete the instructions all at once. This breaks the CFG. */
5454 if (delete_list)
5455 {
5456 for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++)
53c17031 5457 delete_related_insns (VARRAY_RTX (delete_list, i));
8e184d9c
JJ
5458 VARRAY_FREE (delete_list);
5459 }
5460
0511851c
MM
5461 /* Free the table of registers compared at the end of every block. */
5462 free (block_reg);
5463
dfdb644f 5464 /* Free bitmaps. */
5a660bff
DB
5465 sbitmap_vector_free (npi.nonnull_local);
5466 sbitmap_vector_free (npi.nonnull_killed);
5467 sbitmap_vector_free (nonnull_avin);
5468 sbitmap_vector_free (nonnull_avout);
dfdb644f 5469}
bb457bd9
JL
5470
5471/* Code Hoisting variables and subroutines. */
5472
5473/* Very busy expressions. */
5474static sbitmap *hoist_vbein;
5475static sbitmap *hoist_vbeout;
5476
5477/* Hoistable expressions. */
5478static sbitmap *hoist_exprs;
5479
5480/* Dominator bitmaps. */
5481static sbitmap *dominators;
bb457bd9
JL
5482
5483/* ??? We could compute post dominators and run this algorithm in
5484 reverse to to perform tail merging, doing so would probably be
5485 more effective than the tail merging code in jump.c.
5486
5487 It's unclear if tail merging could be run in parallel with
5488 code hoisting. It would be nice. */
5489
5490/* Allocate vars used for code hoisting analysis. */
5491
5492static void
5493alloc_code_hoist_mem (n_blocks, n_exprs)
5494 int n_blocks, n_exprs;
5495{
5496 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5497 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5498 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5499
5500 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5501 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5502 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5503 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5504
5505 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
bb457bd9
JL
5506}
5507
5508/* Free vars used for code hoisting analysis. */
5509
5510static void
5511free_code_hoist_mem ()
5512{
5a660bff
DB
5513 sbitmap_vector_free (antloc);
5514 sbitmap_vector_free (transp);
5515 sbitmap_vector_free (comp);
bb457bd9 5516
5a660bff
DB
5517 sbitmap_vector_free (hoist_vbein);
5518 sbitmap_vector_free (hoist_vbeout);
5519 sbitmap_vector_free (hoist_exprs);
5520 sbitmap_vector_free (transpout);
bb457bd9 5521
5a660bff 5522 sbitmap_vector_free (dominators);
bb457bd9
JL
5523}
5524
5525/* Compute the very busy expressions at entry/exit from each block.
5526
5527 An expression is very busy if all paths from a given point
5528 compute the expression. */
5529
5530static void
5531compute_code_hoist_vbeinout ()
5532{
5533 int bb, changed, passes;
5534
5535 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5536 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5537
5538 passes = 0;
5539 changed = 1;
c4c81601 5540
bb457bd9
JL
5541 while (changed)
5542 {
5543 changed = 0;
c4c81601 5544
bb457bd9
JL
5545 /* We scan the blocks in the reverse order to speed up
5546 the convergence. */
5547 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5548 {
5549 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5550 hoist_vbeout[bb], transp[bb]);
5551 if (bb != n_basic_blocks - 1)
a42cd965 5552 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
bb457bd9 5553 }
c4c81601 5554
bb457bd9
JL
5555 passes++;
5556 }
5557
5558 if (gcse_file)
5559 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5560}
5561
5562/* Top level routine to do the dataflow analysis needed by code hoisting. */
5563
5564static void
5565compute_code_hoist_data ()
5566{
5567 compute_local_properties (transp, comp, antloc, 0);
5568 compute_transpout ();
5569 compute_code_hoist_vbeinout ();
f8032688 5570 calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
bb457bd9
JL
5571 if (gcse_file)
5572 fprintf (gcse_file, "\n");
5573}
5574
5575/* Determine if the expression identified by EXPR_INDEX would
5576 reach BB unimpared if it was placed at the end of EXPR_BB.
5577
5578 It's unclear exactly what Muchnick meant by "unimpared". It seems
5579 to me that the expression must either be computed or transparent in
5580 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5581 would allow the expression to be hoisted out of loops, even if
5582 the expression wasn't a loop invariant.
5583
5584 Contrast this to reachability for PRE where an expression is
5585 considered reachable if *any* path reaches instead of *all*
5586 paths. */
5587
5588static int
5589hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
e2d2ed72 5590 basic_block expr_bb;
bb457bd9 5591 int expr_index;
e2d2ed72 5592 basic_block bb;
bb457bd9
JL
5593 char *visited;
5594{
5595 edge pred;
283a2545
RL
5596 int visited_allocated_locally = 0;
5597
bb457bd9
JL
5598
5599 if (visited == NULL)
5600 {
283a2545
RL
5601 visited_allocated_locally = 1;
5602 visited = xcalloc (n_basic_blocks, 1);
bb457bd9
JL
5603 }
5604
e2d2ed72 5605 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 5606 {
e2d2ed72 5607 basic_block pred_bb = pred->src;
bb457bd9
JL
5608
5609 if (pred->src == ENTRY_BLOCK_PTR)
5610 break;
e2d2ed72 5611 else if (visited[pred_bb->index])
bb457bd9 5612 continue;
c4c81601 5613
bb457bd9 5614 /* Does this predecessor generate this expression? */
e2d2ed72 5615 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 5616 break;
e2d2ed72 5617 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 5618 break;
c4c81601 5619
bb457bd9
JL
5620 /* Not killed. */
5621 else
5622 {
e2d2ed72 5623 visited[pred_bb->index] = 1;
bb457bd9
JL
5624 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5625 pred_bb, visited))
5626 break;
5627 }
5628 }
283a2545
RL
5629 if (visited_allocated_locally)
5630 free (visited);
c4c81601 5631
bb457bd9
JL
5632 return (pred == NULL);
5633}
5634\f
5635/* Actually perform code hoisting. */
c4c81601 5636
bb457bd9
JL
5637static void
5638hoist_code ()
5639{
2e653e39
RK
5640 int bb, dominated;
5641 unsigned int i;
bb457bd9 5642 struct expr **index_map;
c4c81601 5643 struct expr *expr;
bb457bd9
JL
5644
5645 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5646
5647 /* Compute a mapping from expression number (`bitmap_index') to
5648 hash table entry. */
5649
dd1bd863 5650 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
bb457bd9 5651 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5652 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5653 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
5654
5655 /* Walk over each basic block looking for potentially hoistable
5656 expressions, nothing gets hoisted from the entry block. */
5657 for (bb = 0; bb < n_basic_blocks; bb++)
5658 {
5659 int found = 0;
5660 int insn_inserted_p;
5661
5662 /* Examine each expression that is very busy at the exit of this
5663 block. These are the potentially hoistable expressions. */
5664 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5665 {
5666 int hoistable = 0;
c4c81601
RK
5667
5668 if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
bb457bd9
JL
5669 {
5670 /* We've found a potentially hoistable expression, now
5671 we look at every block BB dominates to see if it
5672 computes the expression. */
5673 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5674 {
5675 /* Ignore self dominance. */
5676 if (bb == dominated
5677 || ! TEST_BIT (dominators[dominated], bb))
5678 continue;
5679
5680 /* We've found a dominated block, now see if it computes
5681 the busy expression and whether or not moving that
5682 expression to the "beginning" of that block is safe. */
5683 if (!TEST_BIT (antloc[dominated], i))
5684 continue;
5685
5686 /* Note if the expression would reach the dominated block
5687 unimpared if it was placed at the end of BB.
5688
5689 Keep track of how many times this expression is hoistable
5690 from a dominated block into BB. */
e2d2ed72
AM
5691 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5692 BASIC_BLOCK (dominated), NULL))
bb457bd9
JL
5693 hoistable++;
5694 }
5695
5696 /* If we found more than one hoistable occurence of this
5697 expression, then note it in the bitmap of expressions to
5698 hoist. It makes no sense to hoist things which are computed
5699 in only one BB, and doing so tends to pessimize register
5700 allocation. One could increase this value to try harder
5701 to avoid any possible code expansion due to register
5702 allocation issues; however experiments have shown that
5703 the vast majority of hoistable expressions are only movable
5704 from two successors, so raising this threshhold is likely
5705 to nullify any benefit we get from code hoisting. */
5706 if (hoistable > 1)
5707 {
5708 SET_BIT (hoist_exprs[bb], i);
5709 found = 1;
5710 }
5711 }
5712 }
5713
5714 /* If we found nothing to hoist, then quit now. */
5715 if (! found)
5716 continue;
5717
5718 /* Loop over all the hoistable expressions. */
5719 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5720 {
5721 /* We want to insert the expression into BB only once, so
5722 note when we've inserted it. */
5723 insn_inserted_p = 0;
5724
5725 /* These tests should be the same as the tests above. */
5726 if (TEST_BIT (hoist_vbeout[bb], i))
5727 {
5728 /* We've found a potentially hoistable expression, now
5729 we look at every block BB dominates to see if it
5730 computes the expression. */
5731 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5732 {
5733 /* Ignore self dominance. */
5734 if (bb == dominated
5735 || ! TEST_BIT (dominators[dominated], bb))
5736 continue;
5737
5738 /* We've found a dominated block, now see if it computes
5739 the busy expression and whether or not moving that
5740 expression to the "beginning" of that block is safe. */
5741 if (!TEST_BIT (antloc[dominated], i))
5742 continue;
5743
5744 /* The expression is computed in the dominated block and
5745 it would be safe to compute it at the start of the
5746 dominated block. Now we have to determine if the
5747 expresion would reach the dominated block if it was
5748 placed at the end of BB. */
e2d2ed72
AM
5749 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5750 BASIC_BLOCK (dominated), NULL))
bb457bd9
JL
5751 {
5752 struct expr *expr = index_map[i];
5753 struct occr *occr = expr->antic_occr;
5754 rtx insn;
5755 rtx set;
5756
bb457bd9
JL
5757 /* Find the right occurence of this expression. */
5758 while (BLOCK_NUM (occr->insn) != dominated && occr)
5759 occr = occr->next;
5760
5761 /* Should never happen. */
5762 if (!occr)
5763 abort ();
5764
5765 insn = occr->insn;
5766
5767 set = single_set (insn);
5768 if (! set)
5769 abort ();
5770
5771 /* Create a pseudo-reg to store the result of reaching
5772 expressions into. Get the mode for the new pseudo
5773 from the mode of the original destination pseudo. */
5774 if (expr->reaching_reg == NULL)
5775 expr->reaching_reg
5776 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5777
5778 /* In theory this should never fail since we're creating
5779 a reg->reg copy.
5780
c4c81601
RK
5781 However, on the x86 some of the movXX patterns
5782 actually contain clobbers of scratch regs. This may
5783 cause the insn created by validate_change to not
5784 match any pattern and thus cause validate_change to
5785 fail. */
bb457bd9
JL
5786 if (validate_change (insn, &SET_SRC (set),
5787 expr->reaching_reg, 0))
5788 {
5789 occr->deleted_p = 1;
5790 if (!insn_inserted_p)
5791 {
e2d2ed72
AM
5792 insert_insn_end_bb (index_map[i],
5793 BASIC_BLOCK (bb), 0);
bb457bd9
JL
5794 insn_inserted_p = 1;
5795 }
5796 }
5797 }
5798 }
5799 }
5800 }
5801 }
c4c81601 5802
283a2545 5803 free (index_map);
bb457bd9
JL
5804}
5805
5806/* Top level routine to perform one code hoisting (aka unification) pass
5807
5808 Return non-zero if a change was made. */
5809
5810static int
5811one_code_hoisting_pass ()
5812{
5813 int changed = 0;
5814
5815 alloc_expr_hash_table (max_cuid);
5816 compute_expr_hash_table ();
5817 if (gcse_file)
5818 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5819 expr_hash_table_size, n_exprs);
c4c81601 5820
bb457bd9
JL
5821 if (n_exprs > 0)
5822 {
5823 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5824 compute_code_hoist_data ();
5825 hoist_code ();
5826 free_code_hoist_mem ();
5827 }
c4c81601 5828
bb457bd9
JL
5829 free_expr_hash_table ();
5830
5831 return changed;
5832}
a13d4ebf
AM
5833\f
5834/* Here we provide the things required to do store motion towards
5835 the exit. In order for this to be effective, gcse also needed to
5836 be taught how to move a load when it is kill only by a store to itself.
5837
5838 int i;
5839 float a[10];
5840
5841 void foo(float scale)
5842 {
5843 for (i=0; i<10; i++)
5844 a[i] *= scale;
5845 }
5846
5847 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5848 the load out since its live around the loop, and stored at the bottom
5849 of the loop.
5850
5851 The 'Load Motion' referred to and implemented in this file is
5852 an enhancement to gcse which when using edge based lcm, recognizes
5853 this situation and allows gcse to move the load out of the loop.
5854
5855 Once gcse has hoisted the load, store motion can then push this
5856 load towards the exit, and we end up with no loads or stores of 'i'
5857 in the loop. */
5858
5859/* This will search the ldst list for a matching expresion. If it
5860 doesn't find one, we create one and initialize it. */
5861
5862static struct ls_expr *
5863ldst_entry (x)
5864 rtx x;
5865{
5866 struct ls_expr * ptr;
5867
5868 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5869 if (expr_equiv_p (ptr->pattern, x))
5870 break;
5871
5872 if (!ptr)
5873 {
5874 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
5875
5876 ptr->next = pre_ldst_mems;
5877 ptr->expr = NULL;
5878 ptr->pattern = x;
5879 ptr->loads = NULL_RTX;
5880 ptr->stores = NULL_RTX;
5881 ptr->reaching_reg = NULL_RTX;
5882 ptr->invalid = 0;
5883 ptr->index = 0;
5884 ptr->hash_index = 0;
5885 pre_ldst_mems = ptr;
5886 }
5887
5888 return ptr;
5889}
5890
5891/* Free up an individual ldst entry. */
5892
5893static void
5894free_ldst_entry (ptr)
5895 struct ls_expr * ptr;
5896{
aaa4ca30
AJ
5897 free_INSN_LIST_list (& ptr->loads);
5898 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
5899
5900 free (ptr);
5901}
5902
5903/* Free up all memory associated with the ldst list. */
5904
5905static void
5906free_ldst_mems ()
5907{
5908 while (pre_ldst_mems)
5909 {
5910 struct ls_expr * tmp = pre_ldst_mems;
5911
5912 pre_ldst_mems = pre_ldst_mems->next;
5913
5914 free_ldst_entry (tmp);
5915 }
5916
5917 pre_ldst_mems = NULL;
5918}
5919
5920/* Dump debugging info about the ldst list. */
5921
5922static void
5923print_ldst_list (file)
5924 FILE * file;
5925{
5926 struct ls_expr * ptr;
5927
5928 fprintf (file, "LDST list: \n");
5929
5930 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5931 {
5932 fprintf (file, " Pattern (%3d): ", ptr->index);
5933
5934 print_rtl (file, ptr->pattern);
5935
5936 fprintf (file, "\n Loads : ");
5937
5938 if (ptr->loads)
5939 print_rtl (file, ptr->loads);
5940 else
5941 fprintf (file, "(nil)");
5942
5943 fprintf (file, "\n Stores : ");
5944
5945 if (ptr->stores)
5946 print_rtl (file, ptr->stores);
5947 else
5948 fprintf (file, "(nil)");
5949
5950 fprintf (file, "\n\n");
5951 }
5952
5953 fprintf (file, "\n");
5954}
5955
5956/* Returns 1 if X is in the list of ldst only expressions. */
5957
5958static struct ls_expr *
5959find_rtx_in_ldst (x)
5960 rtx x;
5961{
5962 struct ls_expr * ptr;
5963
5964 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5965 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5966 return ptr;
5967
5968 return NULL;
5969}
5970
5971/* Assign each element of the list of mems a monotonically increasing value. */
5972
5973static int
5974enumerate_ldsts ()
5975{
5976 struct ls_expr * ptr;
5977 int n = 0;
5978
5979 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5980 ptr->index = n++;
5981
5982 return n;
5983}
5984
5985/* Return first item in the list. */
5986
5987static inline struct ls_expr *
5988first_ls_expr ()
5989{
5990 return pre_ldst_mems;
5991}
5992
5993/* Return the next item in ther list after the specified one. */
5994
5995static inline struct ls_expr *
5996next_ls_expr (ptr)
5997 struct ls_expr * ptr;
5998{
5999 return ptr->next;
6000}
6001\f
6002/* Load Motion for loads which only kill themselves. */
6003
6004/* Return true if x is a simple MEM operation, with no registers or
6005 side effects. These are the types of loads we consider for the
6006 ld_motion list, otherwise we let the usual aliasing take care of it. */
6007
6008static int
6009simple_mem (x)
6010 rtx x;
6011{
6012 if (GET_CODE (x) != MEM)
6013 return 0;
6014
6015 if (MEM_VOLATILE_P (x))
6016 return 0;
6017
6018 if (GET_MODE (x) == BLKmode)
6019 return 0;
aaa4ca30
AJ
6020
6021 if (!rtx_varies_p (XEXP (x, 0), 0))
a13d4ebf 6022 return 1;
aaa4ca30 6023
a13d4ebf
AM
6024 return 0;
6025}
6026
6027/* Make sure there isn't a buried reference in this pattern anywhere.
6028 If there is, invalidate the entry for it since we're not capable
6029 of fixing it up just yet.. We have to be sure we know about ALL
6030 loads since the aliasing code will allow all entries in the
6031 ld_motion list to not-alias itself. If we miss a load, we will get
6032 the wrong value since gcse might common it and we won't know to
6033 fix it up. */
6034
6035static void
6036invalidate_any_buried_refs (x)
6037 rtx x;
6038{
6039 const char * fmt;
6040 int i,j;
6041 struct ls_expr * ptr;
6042
6043 /* Invalidate it in the list. */
6044 if (GET_CODE (x) == MEM && simple_mem (x))
6045 {
6046 ptr = ldst_entry (x);
6047 ptr->invalid = 1;
6048 }
6049
6050 /* Recursively process the insn. */
6051 fmt = GET_RTX_FORMAT (GET_CODE (x));
6052
6053 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6054 {
6055 if (fmt[i] == 'e')
6056 invalidate_any_buried_refs (XEXP (x, i));
6057 else if (fmt[i] == 'E')
6058 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6059 invalidate_any_buried_refs (XVECEXP (x, i, j));
6060 }
6061}
6062
6063/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6064 being defined as MEM loads and stores to symbols, with no
6065 side effects and no registers in the expression. If there are any
6066 uses/defs which dont match this criteria, it is invalidated and
6067 trimmed out later. */
6068
6069static void
6070compute_ld_motion_mems ()
6071{
6072 struct ls_expr * ptr;
6073 int bb;
6074 rtx insn;
6075
6076 pre_ldst_mems = NULL;
6077
6078 for (bb = 0; bb < n_basic_blocks; bb++)
6079 {
6080 for (insn = BLOCK_HEAD (bb);
6081 insn && insn != NEXT_INSN (BLOCK_END (bb));
6082 insn = NEXT_INSN (insn))
6083 {
6084 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6085 {
6086 if (GET_CODE (PATTERN (insn)) == SET)
6087 {
6088 rtx src = SET_SRC (PATTERN (insn));
6089 rtx dest = SET_DEST (PATTERN (insn));
6090
6091 /* Check for a simple LOAD... */
6092 if (GET_CODE (src) == MEM && simple_mem (src))
6093 {
6094 ptr = ldst_entry (src);
6095 if (GET_CODE (dest) == REG)
6096 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6097 else
6098 ptr->invalid = 1;
6099 }
6100 else
6101 {
6102 /* Make sure there isn't a buried load somewhere. */
6103 invalidate_any_buried_refs (src);
6104 }
aaa4ca30 6105
a13d4ebf
AM
6106 /* Check for stores. Don't worry about aliased ones, they
6107 will block any movement we might do later. We only care
6108 about this exact pattern since those are the only
6109 circumstance that we will ignore the aliasing info. */
6110 if (GET_CODE (dest) == MEM && simple_mem (dest))
6111 {
6112 ptr = ldst_entry (dest);
6113
f54104df
AO
6114 if (GET_CODE (src) != MEM
6115 && GET_CODE (src) != ASM_OPERANDS)
a13d4ebf
AM
6116 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6117 else
6118 ptr->invalid = 1;
6119 }
6120 }
6121 else
6122 invalidate_any_buried_refs (PATTERN (insn));
6123 }
6124 }
6125 }
6126}
6127
6128/* Remove any references that have been either invalidated or are not in the
6129 expression list for pre gcse. */
6130
6131static void
6132trim_ld_motion_mems ()
6133{
6134 struct ls_expr * last = NULL;
6135 struct ls_expr * ptr = first_ls_expr ();
6136
6137 while (ptr != NULL)
6138 {
6139 int del = ptr->invalid;
6140 struct expr * expr = NULL;
6141
6142 /* Delete if entry has been made invalid. */
6143 if (!del)
6144 {
6145 unsigned int i;
6146
6147 del = 1;
6148 /* Delete if we cannot find this mem in the expression list. */
6149 for (i = 0; i < expr_hash_table_size && del; i++)
6150 {
6151 for (expr = expr_hash_table[i];
6152 expr != NULL;
6153 expr = expr->next_same_hash)
6154 if (expr_equiv_p (expr->expr, ptr->pattern))
6155 {
6156 del = 0;
6157 break;
6158 }
6159 }
6160 }
6161
6162 if (del)
6163 {
6164 if (last != NULL)
6165 {
6166 last->next = ptr->next;
6167 free_ldst_entry (ptr);
6168 ptr = last->next;
6169 }
6170 else
6171 {
6172 pre_ldst_mems = pre_ldst_mems->next;
6173 free_ldst_entry (ptr);
6174 ptr = pre_ldst_mems;
6175 }
6176 }
6177 else
6178 {
6179 /* Set the expression field if we are keeping it. */
6180 last = ptr;
6181 ptr->expr = expr;
6182 ptr = ptr->next;
6183 }
6184 }
6185
6186 /* Show the world what we've found. */
6187 if (gcse_file && pre_ldst_mems != NULL)
6188 print_ldst_list (gcse_file);
6189}
6190
6191/* This routine will take an expression which we are replacing with
6192 a reaching register, and update any stores that are needed if
6193 that expression is in the ld_motion list. Stores are updated by
6194 copying their SRC to the reaching register, and then storeing
6195 the reaching register into the store location. These keeps the
6196 correct value in the reaching register for the loads. */
6197
6198static void
6199update_ld_motion_stores (expr)
6200 struct expr * expr;
6201{
6202 struct ls_expr * mem_ptr;
6203
6204 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6205 {
6206 /* We can try to find just the REACHED stores, but is shouldn't
6207 matter to set the reaching reg everywhere... some might be
6208 dead and should be eliminated later. */
6209
6210 /* We replace SET mem = expr with
6211 SET reg = expr
6212 SET mem = reg , where reg is the
6213 reaching reg used in the load. */
6214 rtx list = mem_ptr->stores;
6215
6216 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6217 {
6218 rtx insn = XEXP (list, 0);
6219 rtx pat = PATTERN (insn);
6220 rtx src = SET_SRC (pat);
6221 rtx reg = expr->reaching_reg;
c57718d3 6222 rtx copy, new;
a13d4ebf
AM
6223
6224 /* If we've already copied it, continue. */
6225 if (expr->reaching_reg == src)
6226 continue;
6227
6228 if (gcse_file)
6229 {
6230 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6231 print_rtl (gcse_file, expr->reaching_reg);
6232 fprintf (gcse_file, ":\n ");
6233 print_inline_rtx (gcse_file, insn, 8);
6234 fprintf (gcse_file, "\n");
6235 }
6236
6237 copy = gen_move_insn ( reg, SET_SRC (pat));
c57718d3
RK
6238 new = emit_insn_before (copy, insn);
6239 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6240 SET_SRC (pat) = reg;
6241
6242 /* un-recognize this pattern since it's probably different now. */
6243 INSN_CODE (insn) = -1;
6244 gcse_create_count++;
6245 }
6246 }
6247}
6248\f
6249/* Store motion code. */
6250
aaa4ca30
AJ
6251/* This is used to communicate the target bitvector we want to use in the
6252 reg_set_info routine when called via the note_stores mechanism. */
6253static sbitmap * regvec;
6254
a13d4ebf
AM
6255/* Used in computing the reverse edge graph bit vectors. */
6256static sbitmap * st_antloc;
6257
6258/* Global holding the number of store expressions we are dealing with. */
6259static int num_stores;
6260
aaa4ca30 6261/* Checks to set if we need to mark a register set. Called from note_stores. */
a13d4ebf 6262
aaa4ca30
AJ
6263static void
6264reg_set_info (dest, setter, data)
6265 rtx dest, setter ATTRIBUTE_UNUSED;
6266 void * data ATTRIBUTE_UNUSED;
a13d4ebf 6267{
aaa4ca30
AJ
6268 if (GET_CODE (dest) == SUBREG)
6269 dest = SUBREG_REG (dest);
adfcce61 6270
aaa4ca30
AJ
6271 if (GET_CODE (dest) == REG)
6272 SET_BIT (*regvec, REGNO (dest));
a13d4ebf
AM
6273}
6274
6275/* Return non-zero if the register operands of expression X are killed
aaa4ca30 6276 anywhere in basic block BB. */
a13d4ebf
AM
6277
6278static int
aaa4ca30 6279store_ops_ok (x, bb)
a13d4ebf 6280 rtx x;
e2d2ed72 6281 basic_block bb;
a13d4ebf
AM
6282{
6283 int i;
6284 enum rtx_code code;
6285 const char * fmt;
6286
6287 /* Repeat is used to turn tail-recursion into iteration. */
6288 repeat:
6289
6290 if (x == 0)
6291 return 1;
6292
6293 code = GET_CODE (x);
6294 switch (code)
6295 {
6296 case REG:
aaa4ca30
AJ
6297 /* If a reg has changed after us in this
6298 block, the operand has been killed. */
6299 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
a13d4ebf
AM
6300
6301 case MEM:
6302 x = XEXP (x, 0);
6303 goto repeat;
6304
6305 case PRE_DEC:
6306 case PRE_INC:
6307 case POST_DEC:
6308 case POST_INC:
6309 return 0;
6310
6311 case PC:
6312 case CC0: /*FIXME*/
6313 case CONST:
6314 case CONST_INT:
6315 case CONST_DOUBLE:
6316 case SYMBOL_REF:
6317 case LABEL_REF:
6318 case ADDR_VEC:
6319 case ADDR_DIFF_VEC:
6320 return 1;
6321
6322 default:
6323 break;
6324 }
6325
6326 i = GET_RTX_LENGTH (code) - 1;
6327 fmt = GET_RTX_FORMAT (code);
6328
6329 for (; i >= 0; i--)
6330 {
6331 if (fmt[i] == 'e')
6332 {
6333 rtx tem = XEXP (x, i);
6334
6335 /* If we are about to do the last recursive call
6336 needed at this level, change it into iteration.
6337 This function is called enough to be worth it. */
6338 if (i == 0)
6339 {
6340 x = tem;
6341 goto repeat;
6342 }
6343
aaa4ca30 6344 if (! store_ops_ok (tem, bb))
a13d4ebf
AM
6345 return 0;
6346 }
6347 else if (fmt[i] == 'E')
6348 {
6349 int j;
6350
6351 for (j = 0; j < XVECLEN (x, i); j++)
6352 {
aaa4ca30 6353 if (! store_ops_ok (XVECEXP (x, i, j), bb))
a13d4ebf
AM
6354 return 0;
6355 }
6356 }
6357 }
6358
6359 return 1;
6360}
6361
aaa4ca30 6362/* Determine whether insn is MEM store pattern that we will consider moving. */
a13d4ebf
AM
6363
6364static void
6365find_moveable_store (insn)
6366 rtx insn;
6367{
6368 struct ls_expr * ptr;
6369 rtx dest = PATTERN (insn);
6370
f54104df
AO
6371 if (GET_CODE (dest) != SET
6372 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
a13d4ebf
AM
6373 return;
6374
6375 dest = SET_DEST (dest);
6376
6377 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6378 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
6379 return;
6380
6381 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
a13d4ebf 6382 return;
aaa4ca30
AJ
6383
6384 if (rtx_varies_p (XEXP (dest, 0), 0))
a13d4ebf 6385 return;
aaa4ca30 6386
a13d4ebf
AM
6387 ptr = ldst_entry (dest);
6388 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6389}
6390
aaa4ca30
AJ
6391/* Perform store motion. Much like gcse, except we move expressions the
6392 other way by looking at the flowgraph in reverse. */
a13d4ebf
AM
6393
6394static int
6395compute_store_table ()
6396{
6397 int bb, ret;
aaa4ca30 6398 unsigned regno;
a13d4ebf 6399 rtx insn, pat;
aaa4ca30 6400
a13d4ebf
AM
6401 max_gcse_regno = max_reg_num ();
6402
aaa4ca30
AJ
6403 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
6404 max_gcse_regno);
6405 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
a13d4ebf 6406 pre_ldst_mems = 0;
aaa4ca30 6407
a13d4ebf
AM
6408 /* Find all the stores we care about. */
6409 for (bb = 0; bb < n_basic_blocks; bb++)
6410 {
aaa4ca30 6411 regvec = & (reg_set_in_block[bb]);
a13d4ebf
AM
6412 for (insn = BLOCK_END (bb);
6413 insn && insn != PREV_INSN (BLOCK_HEAD (bb));
6414 insn = PREV_INSN (insn))
6415 {
19652adf
ZW
6416 /* Ignore anything that is not a normal insn. */
6417 if (! INSN_P (insn))
a13d4ebf
AM
6418 continue;
6419
aaa4ca30
AJ
6420 if (GET_CODE (insn) == CALL_INSN)
6421 {
19652adf
ZW
6422 bool clobbers_all = false;
6423#ifdef NON_SAVING_SETJMP
6424 if (NON_SAVING_SETJMP
6425 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6426 clobbers_all = true;
6427#endif
6428
aaa4ca30 6429 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
6430 if (clobbers_all
6431 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
aaa4ca30
AJ
6432 SET_BIT (reg_set_in_block[bb], regno);
6433 }
6434
a13d4ebf 6435 pat = PATTERN (insn);
aaa4ca30
AJ
6436 note_stores (pat, reg_set_info, NULL);
6437
a13d4ebf
AM
6438 /* Now that we've marked regs, look for stores. */
6439 if (GET_CODE (pat) == SET)
6440 find_moveable_store (insn);
6441 }
6442 }
6443
6444 ret = enumerate_ldsts ();
6445
6446 if (gcse_file)
6447 {
6448 fprintf (gcse_file, "Store Motion Expressions.\n");
6449 print_ldst_list (gcse_file);
6450 }
6451
6452 return ret;
6453}
6454
aaa4ca30 6455/* Check to see if the load X is aliased with STORE_PATTERN. */
a13d4ebf
AM
6456
6457static int
6458load_kills_store (x, store_pattern)
6459 rtx x, store_pattern;
6460{
6461 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6462 return 1;
6463 return 0;
6464}
6465
aaa4ca30
AJ
6466/* Go through the entire insn X, looking for any loads which might alias
6467 STORE_PATTERN. Return 1 if found. */
a13d4ebf
AM
6468
6469static int
6470find_loads (x, store_pattern)
6471 rtx x, store_pattern;
6472{
6473 const char * fmt;
6474 int i,j;
6475 int ret = 0;
6476
24a28584
JH
6477 if (!x)
6478 return 0;
6479
a13d4ebf
AM
6480 if (GET_CODE (x) == SET)
6481 x = SET_SRC (x);
6482
6483 if (GET_CODE (x) == MEM)
6484 {
6485 if (load_kills_store (x, store_pattern))
6486 return 1;
6487 }
6488
6489 /* Recursively process the insn. */
6490 fmt = GET_RTX_FORMAT (GET_CODE (x));
6491
6492 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6493 {
6494 if (fmt[i] == 'e')
6495 ret |= find_loads (XEXP (x, i), store_pattern);
6496 else if (fmt[i] == 'E')
6497 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6498 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6499 }
6500 return ret;
6501}
6502
6503/* Check if INSN kills the store pattern X (is aliased with it).
6504 Return 1 if it it does. */
6505
6506static int
6507store_killed_in_insn (x, insn)
6508 rtx x, insn;
6509{
6510 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6511 return 0;
6512
6513 if (GET_CODE (insn) == CALL_INSN)
6514 {
24a28584 6515 if (CONST_OR_PURE_CALL_P (insn))
a13d4ebf
AM
6516 return 0;
6517 else
6518 return 1;
6519 }
6520
6521 if (GET_CODE (PATTERN (insn)) == SET)
6522 {
6523 rtx pat = PATTERN (insn);
6524 /* Check for memory stores to aliased objects. */
6525 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
aaa4ca30 6526 /* pretend its a load and check for aliasing. */
a13d4ebf
AM
6527 if (find_loads (SET_DEST (pat), x))
6528 return 1;
6529 return find_loads (SET_SRC (pat), x);
6530 }
6531 else
6532 return find_loads (PATTERN (insn), x);
6533}
6534
6535/* Returns 1 if the expression X is loaded or clobbered on or after INSN
6536 within basic block BB. */
6537
6538static int
aaa4ca30 6539store_killed_after (x, insn, bb)
a13d4ebf 6540 rtx x, insn;
e2d2ed72 6541 basic_block bb;
a13d4ebf 6542{
e2d2ed72 6543 rtx last = bb->end;
a13d4ebf
AM
6544
6545 if (insn == last)
6546 return 0;
aaa4ca30
AJ
6547
6548 /* Check if the register operands of the store are OK in this block.
6549 Note that if registers are changed ANYWHERE in the block, we'll
6550 decide we can't move it, regardless of whether it changed above
6551 or below the store. This could be improved by checking the register
6552 operands while lookinng for aliasing in each insn. */
6553 if (!store_ops_ok (XEXP (x, 0), bb))
a13d4ebf
AM
6554 return 1;
6555
aaa4ca30 6556 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
a13d4ebf
AM
6557 if (store_killed_in_insn (x, insn))
6558 return 1;
6559
6560 return 0;
6561}
6562
aaa4ca30 6563/* Returns 1 if the expression X is loaded or clobbered on or before INSN
a13d4ebf
AM
6564 within basic block BB. */
6565static int
6566store_killed_before (x, insn, bb)
6567 rtx x, insn;
e2d2ed72 6568 basic_block bb;
a13d4ebf 6569{
e2d2ed72 6570 rtx first = bb->head;
a13d4ebf
AM
6571
6572 if (insn == first)
6573 return store_killed_in_insn (x, insn);
aaa4ca30
AJ
6574
6575 /* Check if the register operands of the store are OK in this block.
6576 Note that if registers are changed ANYWHERE in the block, we'll
6577 decide we can't move it, regardless of whether it changed above
6578 or below the store. This could be improved by checking the register
6579 operands while lookinng for aliasing in each insn. */
6580 if (!store_ops_ok (XEXP (x, 0), bb))
a13d4ebf
AM
6581 return 1;
6582
aaa4ca30 6583 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
a13d4ebf
AM
6584 if (store_killed_in_insn (x, insn))
6585 return 1;
6586
6587 return 0;
6588}
6589
6590#define ANTIC_STORE_LIST(x) ((x)->loads)
6591#define AVAIL_STORE_LIST(x) ((x)->stores)
6592
6593/* Given the table of available store insns at the end of blocks,
6594 determine which ones are not killed by aliasing, and generate
6595 the appropriate vectors for gen and killed. */
6596static void
6597build_store_vectors ()
6598{
e2d2ed72 6599 basic_block bb;
aaa4ca30 6600 int b;
a13d4ebf
AM
6601 rtx insn, st;
6602 struct ls_expr * ptr;
6603
6604 /* Build the gen_vector. This is any store in the table which is not killed
6605 by aliasing later in its block. */
6606 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6607 sbitmap_vector_zero (ae_gen, n_basic_blocks);
6608
6609 st_antloc = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6610 sbitmap_vector_zero (st_antloc, n_basic_blocks);
aaa4ca30 6611
a13d4ebf
AM
6612 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6613 {
6614 /* Put all the stores into either the antic list, or the avail list,
6615 or both. */
6616 rtx store_list = ptr->stores;
6617 ptr->stores = NULL_RTX;
6618
6619 for (st = store_list; st != NULL; st = XEXP (st, 1))
6620 {
6621 insn = XEXP (st, 0);
e2d2ed72 6622 bb = BLOCK_FOR_INSN (insn);
aaa4ca30
AJ
6623
6624 if (!store_killed_after (ptr->pattern, insn, bb))
a13d4ebf
AM
6625 {
6626 /* If we've already seen an availale expression in this block,
6627 we can delete the one we saw already (It occurs earlier in
6628 the block), and replace it with this one). We'll copy the
6629 old SRC expression to an unused register in case there
6630 are any side effects. */
e2d2ed72 6631 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf
AM
6632 {
6633 /* Find previous store. */
6634 rtx st;
6635 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
e2d2ed72 6636 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
a13d4ebf
AM
6637 break;
6638 if (st)
6639 {
6640 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6641 if (gcse_file)
6642 fprintf(gcse_file, "Removing redundant store:\n");
6643 replace_store_insn (r, XEXP (st, 0), bb);
6644 XEXP (st, 0) = insn;
6645 continue;
6646 }
6647 }
e2d2ed72 6648 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf
AM
6649 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6650 AVAIL_STORE_LIST (ptr));
6651 }
6652
6653 if (!store_killed_before (ptr->pattern, insn, bb))
6654 {
6655 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
6656 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6657 ANTIC_STORE_LIST (ptr));
6658 }
6659 }
6660
6661 /* Free the original list of store insns. */
6662 free_INSN_LIST_list (&store_list);
6663 }
6664
6665 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6666 sbitmap_vector_zero (ae_kill, n_basic_blocks);
6667
6668 transp = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
aaa4ca30 6669 sbitmap_vector_zero (transp, n_basic_blocks);
a13d4ebf
AM
6670
6671 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
aaa4ca30 6672 for (b = 0; b < n_basic_blocks; b++)
a13d4ebf 6673 {
aaa4ca30 6674 if (store_killed_after (ptr->pattern, BLOCK_HEAD (b), BASIC_BLOCK (b)))
a13d4ebf 6675 {
dc297297 6676 /* The anticipatable expression is not killed if it's gen'd. */
aaa4ca30
AJ
6677 /*
6678 We leave this check out for now. If we have a code sequence
6679 in a block which looks like:
6680 ST MEMa = x
6681 L y = MEMa
6682 ST MEMa = z
6683 We should flag this as having an ANTIC expression, NOT
6684 transparent, NOT killed, and AVAIL.
6685 Unfortunately, since we haven't re-written all loads to
6686 use the reaching reg, we'll end up doing an incorrect
6687 Load in the middle here if we push the store down. It happens in
6688 gcc.c-torture/execute/960311-1.c with -O3
6689 If we always kill it in this case, we'll sometimes do
6690 uneccessary work, but it shouldn't actually hurt anything.
6691 if (!TEST_BIT (ae_gen[b], ptr->index)). */
6692 SET_BIT (ae_kill[b], ptr->index);
6693 }
6694 else
6695 SET_BIT (transp[b], ptr->index);
6696 }
6697
6698 /* Any block with no exits calls some non-returning function, so
6699 we better mark the store killed here, or we might not store to
6700 it at all. If we knew it was abort, we wouldn't have to store,
6701 but we don't know that for sure. */
6702 if (gcse_file)
6703 {
6704 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
6705 print_ldst_list (gcse_file);
6706 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, n_basic_blocks);
6707 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, n_basic_blocks);
6708 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, n_basic_blocks);
6709 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, n_basic_blocks);
a13d4ebf
AM
6710 }
6711}
6712
6713/* Insert an instruction at the begining of a basic block, and update
6714 the BLOCK_HEAD if needed. */
6715
6716static void
6717insert_insn_start_bb (insn, bb)
6718 rtx insn;
e2d2ed72 6719 basic_block bb;
a13d4ebf
AM
6720{
6721 /* Insert at start of successor block. */
e2d2ed72
AM
6722 rtx prev = PREV_INSN (bb->head);
6723 rtx before = bb->head;
a13d4ebf
AM
6724 while (before != 0)
6725 {
6726 if (GET_CODE (before) != CODE_LABEL
6727 && (GET_CODE (before) != NOTE
6728 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6729 break;
6730 prev = before;
e2d2ed72 6731 if (prev == bb->end)
a13d4ebf
AM
6732 break;
6733 before = NEXT_INSN (before);
6734 }
6735
6736 insn = emit_insn_after (insn, prev);
6737
a13d4ebf
AM
6738 if (gcse_file)
6739 {
6740 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
e2d2ed72 6741 bb->index);
a13d4ebf
AM
6742 print_inline_rtx (gcse_file, insn, 6);
6743 fprintf (gcse_file, "\n");
6744 }
6745}
6746
6747/* This routine will insert a store on an edge. EXPR is the ldst entry for
6748 the memory reference, and E is the edge to insert it on. Returns non-zero
6749 if an edge insertion was performed. */
6750
6751static int
6752insert_store (expr, e)
6753 struct ls_expr * expr;
6754 edge e;
6755{
6756 rtx reg, insn;
e2d2ed72 6757 basic_block bb;
a13d4ebf
AM
6758 edge tmp;
6759
6760 /* We did all the deleted before this insert, so if we didn't delete a
6761 store, then we haven't set the reaching reg yet either. */
6762 if (expr->reaching_reg == NULL_RTX)
6763 return 0;
6764
6765 reg = expr->reaching_reg;
6766 insn = gen_move_insn (expr->pattern, reg);
6767
6768 /* If we are inserting this expression on ALL predecessor edges of a BB,
6769 insert it at the start of the BB, and reset the insert bits on the other
6770 edges so we don;t try to insert it on the other edges. */
e2d2ed72 6771 bb = e->dest;
a13d4ebf
AM
6772 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6773 {
6774 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6775 if (index == EDGE_INDEX_NO_EDGE)
6776 abort ();
6777 if (! TEST_BIT (pre_insert_map[index], expr->index))
6778 break;
6779 }
6780
6781 /* If tmp is NULL, we found an insertion on every edge, blank the
6782 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 6783 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
6784 {
6785 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6786 {
6787 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6788 RESET_BIT (pre_insert_map[index], expr->index);
6789 }
6790 insert_insn_start_bb (insn, bb);
6791 return 0;
6792 }
6793
6794 /* We can't insert on this edge, so we'll insert at the head of the
6795 successors block. See Morgan, sec 10.5. */
6796 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
6797 {
6798 insert_insn_start_bb (insn, bb);
6799 return 0;
6800 }
6801
6802 insert_insn_on_edge (insn, e);
6803
6804 if (gcse_file)
6805 {
6806 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6807 e->src->index, e->dest->index);
6808 print_inline_rtx (gcse_file, insn, 6);
6809 fprintf (gcse_file, "\n");
6810 }
6811
6812 return 1;
6813}
6814
6815/* This routine will replace a store with a SET to a specified register. */
6816
6817static void
6818replace_store_insn (reg, del, bb)
6819 rtx reg, del;
e2d2ed72 6820 basic_block bb;
a13d4ebf
AM
6821{
6822 rtx insn;
6823
6824 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
6825 insn = emit_insn_after (insn, del);
a13d4ebf
AM
6826
6827 if (gcse_file)
6828 {
6829 fprintf (gcse_file,
e2d2ed72 6830 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf
AM
6831 print_inline_rtx (gcse_file, del, 6);
6832 fprintf(gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6833 print_inline_rtx (gcse_file, insn, 6);
6834 fprintf(gcse_file, "\n");
6835 }
6836
49ce134f 6837 delete_insn (del);
a13d4ebf
AM
6838}
6839
6840
6841/* Delete a store, but copy the value that would have been stored into
6842 the reaching_reg for later storing. */
6843
6844static void
6845delete_store (expr, bb)
6846 struct ls_expr * expr;
e2d2ed72 6847 basic_block bb;
a13d4ebf
AM
6848{
6849 rtx reg, i, del;
6850
6851 if (expr->reaching_reg == NULL_RTX)
6852 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6853
6854
6855 /* If there is more than 1 store, the earlier ones will be dead,
6856 but it doesn't hurt to replace them here. */
6857 reg = expr->reaching_reg;
6858
6859 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6860 {
6861 del = XEXP (i, 0);
e2d2ed72 6862 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf
AM
6863 {
6864 /* We know there is only one since we deleted redundant
6865 ones during the available computation. */
6866 replace_store_insn (reg, del, bb);
6867 break;
6868 }
6869 }
6870}
6871
6872/* Free memory used by store motion. */
6873
6874static void
6875free_store_memory ()
6876{
6877 free_ldst_mems ();
aaa4ca30 6878
a13d4ebf 6879 if (ae_gen)
5a660bff 6880 sbitmap_vector_free (ae_gen);
a13d4ebf 6881 if (ae_kill)
5a660bff 6882 sbitmap_vector_free (ae_kill);
a13d4ebf 6883 if (transp)
5a660bff 6884 sbitmap_vector_free (transp);
a13d4ebf 6885 if (st_antloc)
5a660bff 6886 sbitmap_vector_free (st_antloc);
a13d4ebf 6887 if (pre_insert_map)
5a660bff 6888 sbitmap_vector_free (pre_insert_map);
a13d4ebf 6889 if (pre_delete_map)
5a660bff 6890 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
6891 if (reg_set_in_block)
6892 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
6893
6894 ae_gen = ae_kill = transp = st_antloc = NULL;
6895 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6896}
6897
6898/* Perform store motion. Much like gcse, except we move expressions the
6899 other way by looking at the flowgraph in reverse. */
6900
6901static void
6902store_motion ()
6903{
6904 int x;
6905 struct ls_expr * ptr;
adfcce61 6906 int update_flow = 0;
aaa4ca30 6907
a13d4ebf
AM
6908 if (gcse_file)
6909 {
6910 fprintf (gcse_file, "before store motion\n");
6911 print_rtl (gcse_file, get_insns ());
6912 }
6913
6914
6915 init_alias_analysis ();
aaa4ca30 6916
a13d4ebf
AM
6917 /* Find all the stores that are live to the end of their block. */
6918 num_stores = compute_store_table ();
6919 if (num_stores == 0)
6920 {
aaa4ca30 6921 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
6922 end_alias_analysis ();
6923 return;
6924 }
6925
6926 /* Now compute whats actually available to move. */
6927 add_noreturn_fake_exit_edges ();
6928 build_store_vectors ();
6929
6930 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6931 st_antloc, ae_kill, &pre_insert_map,
6932 &pre_delete_map);
6933
6934 /* Now we want to insert the new stores which are going to be needed. */
6935 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6936 {
6937 for (x = 0; x < n_basic_blocks; x++)
6938 if (TEST_BIT (pre_delete_map[x], ptr->index))
e2d2ed72 6939 delete_store (ptr, BASIC_BLOCK (x));
a13d4ebf
AM
6940
6941 for (x = 0; x < NUM_EDGES (edge_list); x++)
6942 if (TEST_BIT (pre_insert_map[x], ptr->index))
6943 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6944 }
6945
6946 if (update_flow)
6947 commit_edge_insertions ();
aaa4ca30 6948
a13d4ebf
AM
6949 free_store_memory ();
6950 free_edge_list (edge_list);
6951 remove_fake_edges ();
6952 end_alias_analysis ();
6953}