]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
Remove obstacks.
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
dd1bd863 3 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
7506f491
DE
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22/* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
01198c2f 148#include "toplev.h"
7506f491
DE
149
150#include "rtl.h"
6baf1cc8 151#include "tm_p.h"
7506f491
DE
152#include "regs.h"
153#include "hard-reg-set.h"
154#include "flags.h"
155#include "real.h"
156#include "insn-config.h"
157#include "recog.h"
158#include "basic-block.h"
50b2596f 159#include "output.h"
49ad7cfa 160#include "function.h"
3cdbd1f8 161#include "expr.h"
7506f491
DE
162
163#include "obstack.h"
164#define obstack_chunk_alloc gmalloc
165#define obstack_chunk_free free
166
167/* Maximum number of passes to perform. */
168#define MAX_PASSES 1
169
170/* Propagate flow information through back edges and thus enable PRE's
171 moving loop invariant calculations out of loops.
172
173 Originally this tended to create worse overall code, but several
174 improvements during the development of PRE seem to have made following
175 back edges generally a win.
176
177 Note much of the loop invariant code motion done here would normally
178 be done by loop.c, which has more heuristics for when to move invariants
179 out of loops. At some point we might need to move some of those
180 heuristics into gcse.c. */
181#define FOLLOW_BACK_EDGES 1
182
f4e584dc
JL
183/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
7506f491 185
f4e584dc 186 We perform the following steps:
7506f491
DE
187
188 1) Compute basic block information.
189
190 2) Compute table of places where registers are set.
191
192 3) Perform copy/constant propagation.
193
194 4) Perform global cse.
195
e78d9500 196 5) Perform another pass of copy/constant propagation.
7506f491
DE
197
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
204
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
208
209 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 210 partially redundant).
7506f491
DE
211
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
215
216 **********************
217
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
223
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
228
229 It was found doing copy propagation between each pass enables further
230 substitutions.
231
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
234 be modified if one wants to experiment.
235
236 **********************
237
238 The steps for PRE are:
239
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
241
242 2) Perform the data flow analysis for PRE.
243
244 3) Delete the redundant instructions
245
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
248
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
251
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
254
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
258
f4e584dc 259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
265
266 **********************
267
7506f491
DE
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
275
276 Help stamp out big monolithic functions! */
277\f
278/* GCSE global vars. */
279
280/* -dG dump file. */
281static FILE *gcse_file;
282
f4e584dc
JL
283/* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
285
286 * If we changed any jumps via cprop.
287
288 * If we added any labels via edge splitting. */
289
290static int run_jump_opt_after_gcse;
291
7506f491
DE
292/* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297static FILE *debug_stderr;
298
299/* An obstack for our working variables. */
300static struct obstack gcse_obstack;
301
302/* Non-zero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305static char can_copy_p[(int) NUM_MACHINE_MODES];
306
307/* Non-zero if can_copy_p has been initialized. */
308static int can_copy_init_p;
309
c4c81601 310struct reg_use {rtx reg_rtx; };
abd535b6 311
7506f491
DE
312/* Hash table of expressions. */
313
314struct expr
315{
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 rtx expr;
318 /* Index in the available expression bitmaps. */
319 int bitmap_index;
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
7506f491
DE
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
336 rtx reaching_reg;
337};
338
339/* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
342
343struct occr
344{
345 /* Next occurrence of this expression. */
346 struct occr *next;
347 /* The insn that computes the expression. */
348 rtx insn;
349 /* Non-zero if this [anticipatable] occurrence has been deleted. */
350 char deleted_p;
351 /* Non-zero if this [available] occurrence has been copied to
352 reaching_reg. */
353 /* ??? This is mutually exclusive with deleted_p, so they could share
354 the same byte. */
355 char copied_p;
356};
357
358/* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
c4c81601 365 Someday I'll perform the computation and figure it out. */
7506f491
DE
366
367/* Total size of the expression hash table, in elements. */
2e653e39
RK
368static unsigned int expr_hash_table_size;
369
7506f491
DE
370/* The table itself.
371 This is an array of `expr_hash_table_size' elements. */
372static struct expr **expr_hash_table;
373
374/* Total size of the copy propagation hash table, in elements. */
375static int set_hash_table_size;
c4c81601 376
7506f491
DE
377/* The table itself.
378 This is an array of `set_hash_table_size' elements. */
379static struct expr **set_hash_table;
380
381/* Mapping of uids to cuids.
382 Only real insns get cuids. */
383static int *uid_cuid;
384
385/* Highest UID in UID_CUID. */
386static int max_uid;
387
388/* Get the cuid of an insn. */
b86db3eb
BS
389#ifdef ENABLE_CHECKING
390#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391#else
7506f491 392#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 393#endif
7506f491
DE
394
395/* Number of cuids. */
396static int max_cuid;
397
398/* Mapping of cuids to insns. */
399static rtx *cuid_insn;
400
401/* Get insn from cuid. */
402#define CUID_INSN(CUID) (cuid_insn[CUID])
403
404/* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
770ae6cc 407static unsigned int max_gcse_regno;
7506f491
DE
408
409/* Maximum number of cse-able expressions found. */
410static int n_exprs;
c4c81601 411
7506f491
DE
412/* Maximum number of assignments for copy propagation found. */
413static int n_sets;
414
415/* Table of registers that are modified.
c4c81601 416
7506f491
DE
417 For each register, each element is a list of places where the pseudo-reg
418 is set.
419
420 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
421 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 422 a bitmap instead of the lists `reg_set_table' uses].
7506f491 423
c4c81601
RK
424 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
425 num-regs) [however perhaps it may be useful to keep the data as is]. One
426 advantage of recording things this way is that `reg_set_table' is fairly
427 sparse with respect to pseudo regs but for hard regs could be fairly dense
428 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
429 up functions like compute_transp since in the case of pseudo-regs we only
430 need to iterate over the number of times a pseudo-reg is set, not over the
431 number of basic blocks [clearly there is a bit of a slow down in the cases
432 where a pseudo is set more than once in a block, however it is believed
433 that the net effect is to speed things up]. This isn't done for hard-regs
434 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
435 function call can consume a fair bit of memory, and iterating over
436 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 437
c4c81601
RK
438typedef struct reg_set
439{
7506f491
DE
440 /* The next setting of this register. */
441 struct reg_set *next;
442 /* The insn where it was set. */
443 rtx insn;
444} reg_set;
c4c81601 445
7506f491 446static reg_set **reg_set_table;
c4c81601 447
7506f491
DE
448/* Size of `reg_set_table'.
449 The table starts out at max_gcse_regno + slop, and is enlarged as
450 necessary. */
451static int reg_set_table_size;
c4c81601 452
7506f491
DE
453/* Amount to grow `reg_set_table' by when it's full. */
454#define REG_SET_TABLE_SLOP 100
455
456/* Bitmap containing one bit for each register in the program.
457 Used when performing GCSE to track which registers have been set since
458 the start of the basic block. */
459static sbitmap reg_set_bitmap;
460
461/* For each block, a bitmap of registers set in the block.
462 This is used by expr_killed_p and compute_transp.
463 It is computed during hash table computation and not by compute_sets
464 as it includes registers added since the last pass (or between cprop and
465 gcse) and it's currently not easy to realloc sbitmap vectors. */
466static sbitmap *reg_set_in_block;
467
468/* For each block, non-zero if memory is set in that block.
469 This is computed during hash table computation and is used by
470 expr_killed_p and compute_transp.
471 ??? Handling of memory is very simple, we don't make any attempt
472 to optimize things (later).
473 ??? This can be computed by compute_sets since the information
474 doesn't change. */
475static char *mem_set_in_block;
476
477/* Various variables for statistics gathering. */
478
479/* Memory used in a pass.
480 This isn't intended to be absolutely precise. Its intent is only
481 to keep an eye on memory usage. */
482static int bytes_used;
c4c81601 483
7506f491
DE
484/* GCSE substitutions made. */
485static int gcse_subst_count;
486/* Number of copy instructions created. */
487static int gcse_create_count;
488/* Number of constants propagated. */
489static int const_prop_count;
490/* Number of copys propagated. */
491static int copy_prop_count;
7506f491
DE
492\f
493/* These variables are used by classic GCSE.
494 Normally they'd be defined a bit later, but `rd_gen' needs to
495 be declared sooner. */
496
7506f491
DE
497/* Each block has a bitmap of each type.
498 The length of each blocks bitmap is:
499
500 max_cuid - for reaching definitions
501 n_exprs - for available expressions
502
503 Thus we view the bitmaps as 2 dimensional arrays. i.e.
504 rd_kill[block_num][cuid_num]
c4c81601 505 ae_kill[block_num][expr_num] */
7506f491
DE
506
507/* For reaching defs */
508static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
509
510/* for available exprs */
511static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 512
0511851c
MM
513/* Objects of this type are passed around by the null-pointer check
514 removal routines. */
c4c81601
RK
515struct null_pointer_info
516{
0511851c
MM
517 /* The basic block being processed. */
518 int current_block;
519 /* The first register to be handled in this pass. */
770ae6cc 520 unsigned int min_reg;
0511851c 521 /* One greater than the last register to be handled in this pass. */
770ae6cc 522 unsigned int max_reg;
0511851c
MM
523 sbitmap *nonnull_local;
524 sbitmap *nonnull_killed;
525};
7506f491 526\f
c4c81601
RK
527static void compute_can_copy PARAMS ((void));
528static char *gmalloc PARAMS ((unsigned int));
529static char *grealloc PARAMS ((char *, unsigned int));
530static char *gcse_alloc PARAMS ((unsigned long));
531static void alloc_gcse_mem PARAMS ((rtx));
532static void free_gcse_mem PARAMS ((void));
533static void alloc_reg_set_mem PARAMS ((int));
534static void free_reg_set_mem PARAMS ((void));
535static int get_bitmap_width PARAMS ((int, int, int));
536static void record_one_set PARAMS ((int, rtx));
537static void record_set_info PARAMS ((rtx, rtx, void *));
538static void compute_sets PARAMS ((rtx));
539static void hash_scan_insn PARAMS ((rtx, int, int));
540static void hash_scan_set PARAMS ((rtx, rtx, int));
541static void hash_scan_clobber PARAMS ((rtx, rtx));
542static void hash_scan_call PARAMS ((rtx, rtx));
543static int want_to_gcse_p PARAMS ((rtx));
544static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
545static int oprs_anticipatable_p PARAMS ((rtx, rtx));
546static int oprs_available_p PARAMS ((rtx, rtx));
547static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
548 int, int));
549static void insert_set_in_table PARAMS ((rtx, rtx));
550static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
551static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
c0712acb 552static unsigned int hash_string_1 PARAMS ((const char *));
c4c81601
RK
553static unsigned int hash_set PARAMS ((int, int));
554static int expr_equiv_p PARAMS ((rtx, rtx));
555static void record_last_reg_set_info PARAMS ((rtx, int));
556static void record_last_mem_set_info PARAMS ((rtx));
557static void record_last_set_info PARAMS ((rtx, rtx, void *));
711d877c 558static void compute_hash_table PARAMS ((int));
c4c81601
RK
559static void alloc_set_hash_table PARAMS ((int));
560static void free_set_hash_table PARAMS ((void));
561static void compute_set_hash_table PARAMS ((void));
2e653e39 562static void alloc_expr_hash_table PARAMS ((unsigned int));
c4c81601
RK
563static void free_expr_hash_table PARAMS ((void));
564static void compute_expr_hash_table PARAMS ((void));
565static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
566 int, int));
567static struct expr *lookup_expr PARAMS ((rtx));
770ae6cc
RK
568static struct expr *lookup_set PARAMS ((unsigned int, rtx));
569static struct expr *next_set PARAMS ((unsigned int, struct expr *));
c4c81601
RK
570static void reset_opr_set_tables PARAMS ((void));
571static int oprs_not_set_p PARAMS ((rtx, rtx));
572static void mark_call PARAMS ((rtx));
573static void mark_set PARAMS ((rtx, rtx));
574static void mark_clobber PARAMS ((rtx, rtx));
575static void mark_oprs_set PARAMS ((rtx));
576static void alloc_cprop_mem PARAMS ((int, int));
577static void free_cprop_mem PARAMS ((void));
578static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
579static void compute_transpout PARAMS ((void));
580static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
581 int));
711d877c 582static void compute_cprop_data PARAMS ((void));
c4c81601
RK
583static void find_used_regs PARAMS ((rtx));
584static int try_replace_reg PARAMS ((rtx, rtx, rtx));
585static struct expr *find_avail_set PARAMS ((int, rtx));
586static int cprop_jump PARAMS ((rtx, rtx, struct reg_use *, rtx));
e2bef702 587#ifdef HAVE_cc0
c4c81601 588static int cprop_cc0_jump PARAMS ((rtx, struct reg_use *, rtx));
e2bef702 589#endif
c4c81601
RK
590static int cprop_insn PARAMS ((rtx, int));
591static int cprop PARAMS ((int));
592static int one_cprop_pass PARAMS ((int, int));
593static void alloc_pre_mem PARAMS ((int, int));
594static void free_pre_mem PARAMS ((void));
595static void compute_pre_data PARAMS ((void));
596static int pre_expr_reaches_here_p PARAMS ((int, struct expr *, int));
711d877c 597static void insert_insn_end_bb PARAMS ((struct expr *, int, int));
c4c81601
RK
598static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
599static void pre_insert_copies PARAMS ((void));
600static int pre_delete PARAMS ((void));
601static int pre_gcse PARAMS ((void));
602static int one_pre_gcse_pass PARAMS ((int));
603static void add_label_notes PARAMS ((rtx, rtx));
604static void alloc_code_hoist_mem PARAMS ((int, int));
605static void free_code_hoist_mem PARAMS ((void));
711d877c 606static void compute_code_hoist_vbeinout PARAMS ((void));
c4c81601
RK
607static void compute_code_hoist_data PARAMS ((void));
608static int hoist_expr_reaches_here_p PARAMS ((int, int, int, char *));
609static void hoist_code PARAMS ((void));
610static int one_code_hoisting_pass PARAMS ((void));
611static void alloc_rd_mem PARAMS ((int, int));
612static void free_rd_mem PARAMS ((void));
711d877c 613static void handle_rd_kill_set PARAMS ((rtx, int, int));
c4c81601 614static void compute_kill_rd PARAMS ((void));
711d877c 615static void compute_rd PARAMS ((void));
c4c81601
RK
616static void alloc_avail_expr_mem PARAMS ((int, int));
617static void free_avail_expr_mem PARAMS ((void));
618static void compute_ae_gen PARAMS ((void));
619static int expr_killed_p PARAMS ((rtx, int));
620static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
711d877c
KG
621static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
622 int, int));
c4c81601
RK
623static rtx computing_insn PARAMS ((struct expr *, rtx));
624static int def_reaches_here_p PARAMS ((rtx, rtx));
625static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
626static int handle_avail_expr PARAMS ((rtx, struct expr *));
627static int classic_gcse PARAMS ((void));
628static int one_classic_gcse_pass PARAMS ((int));
629static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
770ae6cc
RK
630static void delete_null_pointer_checks_1 PARAMS ((unsigned int *, sbitmap *,
631 sbitmap *,
711d877c
KG
632 struct null_pointer_info *));
633static rtx process_insert_insn PARAMS ((struct expr *));
634static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
c4c81601
RK
635static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
636 int, int, char *));
711d877c
KG
637static int pre_expr_reaches_here_p_work PARAMS ((int, struct expr *,
638 int, char *));
7506f491
DE
639\f
640/* Entry point for global common subexpression elimination.
641 F is the first instruction in the function. */
642
e78d9500 643int
7506f491
DE
644gcse_main (f, file)
645 rtx f;
646 FILE *file;
647{
648 int changed, pass;
649 /* Bytes used at start of pass. */
650 int initial_bytes_used;
651 /* Maximum number of bytes used by a pass. */
652 int max_pass_bytes;
653 /* Point to release obstack data from for each pass. */
654 char *gcse_obstack_bottom;
655
b5ce41ff
JL
656 /* We do not construct an accurate cfg in functions which call
657 setjmp, so just punt to be safe. */
7506f491 658 if (current_function_calls_setjmp)
e78d9500 659 return 0;
7506f491 660
b5ce41ff
JL
661 /* Assume that we do not need to run jump optimizations after gcse. */
662 run_jump_opt_after_gcse = 0;
663
7506f491
DE
664 /* For calling dump_foo fns from gdb. */
665 debug_stderr = stderr;
b5ce41ff 666 gcse_file = file;
7506f491 667
b5ce41ff
JL
668 /* Identify the basic block information for this function, including
669 successors and predecessors. */
7506f491 670 max_gcse_regno = max_reg_num ();
7506f491 671
a42cd965
AM
672 if (file)
673 dump_flow_info (file);
674
7506f491
DE
675 /* Return if there's nothing to do. */
676 if (n_basic_blocks <= 1)
a18820c6 677 return 0;
7506f491 678
55f7891b
JL
679 /* Trying to perform global optimizations on flow graphs which have
680 a high connectivity will take a long time and is unlikely to be
681 particularly useful.
682
683 In normal circumstances a cfg should have about twice has many edges
684 as blocks. But we do not want to punish small functions which have
685 a couple switch statements. So we require a relatively large number
686 of basic blocks and the ratio of edges to blocks to be high. */
687 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
688 {
689 if (warn_disabled_optimization)
690 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
691 n_basic_blocks, n_edges / n_basic_blocks);
692 return 0;
693 }
55f7891b 694
7506f491
DE
695 /* See what modes support reg/reg copy operations. */
696 if (! can_copy_init_p)
697 {
698 compute_can_copy ();
699 can_copy_init_p = 1;
700 }
701
702 gcc_obstack_init (&gcse_obstack);
a42cd965 703 bytes_used = 0;
7506f491 704
c4c81601
RK
705 /* Record where pseudo-registers are set. This data is kept accurate
706 during each pass. ??? We could also record hard-reg information here
707 [since it's unchanging], however it is currently done during hash table
708 computation.
b5ce41ff 709
c4c81601
RK
710 It may be tempting to compute MEM set information here too, but MEM sets
711 will be subject to code motion one day and thus we need to compute
b5ce41ff 712 information about memory sets when we build the hash tables. */
7506f491
DE
713
714 alloc_reg_set_mem (max_gcse_regno);
715 compute_sets (f);
716
717 pass = 0;
718 initial_bytes_used = bytes_used;
719 max_pass_bytes = 0;
720 gcse_obstack_bottom = gcse_alloc (1);
721 changed = 1;
722 while (changed && pass < MAX_PASSES)
723 {
724 changed = 0;
725 if (file)
726 fprintf (file, "GCSE pass %d\n\n", pass + 1);
727
728 /* Initialize bytes_used to the space for the pred/succ lists,
729 and the reg_set_table data. */
730 bytes_used = initial_bytes_used;
731
732 /* Each pass may create new registers, so recalculate each time. */
733 max_gcse_regno = max_reg_num ();
734
735 alloc_gcse_mem (f);
736
b5ce41ff
JL
737 /* Don't allow constant propagation to modify jumps
738 during this pass. */
739 changed = one_cprop_pass (pass + 1, 0);
7506f491
DE
740
741 if (optimize_size)
b5ce41ff 742 changed |= one_classic_gcse_pass (pass + 1);
7506f491 743 else
a42cd965
AM
744 {
745 changed |= one_pre_gcse_pass (pass + 1);
746 free_reg_set_mem ();
747 alloc_reg_set_mem (max_reg_num ());
748 compute_sets (f);
749 run_jump_opt_after_gcse = 1;
750 }
7506f491
DE
751
752 if (max_pass_bytes < bytes_used)
753 max_pass_bytes = bytes_used;
754
bb457bd9
JL
755 /* Free up memory, then reallocate for code hoisting. We can
756 not re-use the existing allocated memory because the tables
757 will not have info for the insns or registers created by
758 partial redundancy elimination. */
7506f491
DE
759 free_gcse_mem ();
760
bb457bd9
JL
761 /* It does not make sense to run code hoisting unless we optimizing
762 for code size -- it rarely makes programs faster, and can make
763 them bigger if we did partial redundancy elimination (when optimizing
764 for space, we use a classic gcse algorithm instead of partial
765 redundancy algorithms). */
766 if (optimize_size)
767 {
768 max_gcse_regno = max_reg_num ();
769 alloc_gcse_mem (f);
770 changed |= one_code_hoisting_pass ();
771 free_gcse_mem ();
772
773 if (max_pass_bytes < bytes_used)
774 max_pass_bytes = bytes_used;
775 }
776
7506f491
DE
777 if (file)
778 {
779 fprintf (file, "\n");
780 fflush (file);
781 }
c4c81601 782
7506f491
DE
783 obstack_free (&gcse_obstack, gcse_obstack_bottom);
784 pass++;
785 }
786
b5ce41ff
JL
787 /* Do one last pass of copy propagation, including cprop into
788 conditional jumps. */
789
790 max_gcse_regno = max_reg_num ();
791 alloc_gcse_mem (f);
792 /* This time, go ahead and allow cprop to alter jumps. */
793 one_cprop_pass (pass + 1, 1);
794 free_gcse_mem ();
7506f491
DE
795
796 if (file)
797 {
798 fprintf (file, "GCSE of %s: %d basic blocks, ",
799 current_function_name, n_basic_blocks);
800 fprintf (file, "%d pass%s, %d bytes\n\n",
801 pass, pass > 1 ? "es" : "", max_pass_bytes);
802 }
803
7506f491 804 obstack_free (&gcse_obstack, NULL_PTR);
7506f491 805 free_reg_set_mem ();
e78d9500 806 return run_jump_opt_after_gcse;
7506f491
DE
807}
808\f
809/* Misc. utilities. */
810
811/* Compute which modes support reg/reg copy operations. */
812
813static void
814compute_can_copy ()
815{
816 int i;
50b2596f 817#ifndef AVOID_CCMODE_COPIES
7506f491 818 rtx reg,insn;
50b2596f 819#endif
7506f491
DE
820 bzero (can_copy_p, NUM_MACHINE_MODES);
821
822 start_sequence ();
823 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
824 if (GET_MODE_CLASS (i) == MODE_CC)
825 {
7506f491 826#ifdef AVOID_CCMODE_COPIES
c4c81601 827 can_copy_p[i] = 0;
7506f491 828#else
c4c81601
RK
829 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
830 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
831 if (recog (PATTERN (insn), insn, NULL_PTR) >= 0)
832 can_copy_p[i] = 1;
7506f491 833#endif
c4c81601 834 }
141b5810
AO
835 else
836 can_copy_p[i] = 1;
c4c81601 837
7506f491 838 end_sequence ();
7506f491
DE
839}
840\f
841/* Cover function to xmalloc to record bytes allocated. */
842
843static char *
844gmalloc (size)
845 unsigned int size;
846{
847 bytes_used += size;
848 return xmalloc (size);
849}
850
851/* Cover function to xrealloc.
852 We don't record the additional size since we don't know it.
853 It won't affect memory usage stats much anyway. */
854
855static char *
856grealloc (ptr, size)
857 char *ptr;
858 unsigned int size;
859{
860 return xrealloc (ptr, size);
861}
862
863/* Cover function to obstack_alloc.
864 We don't need to record the bytes allocated here since
865 obstack_chunk_alloc is set to gmalloc. */
866
867static char *
868gcse_alloc (size)
869 unsigned long size;
870{
871 return (char *) obstack_alloc (&gcse_obstack, size);
872}
873
874/* Allocate memory for the cuid mapping array,
875 and reg/memory set tracking tables.
876
877 This is called at the start of each pass. */
878
879static void
880alloc_gcse_mem (f)
881 rtx f;
882{
883 int i,n;
884 rtx insn;
885
886 /* Find the largest UID and create a mapping from UIDs to CUIDs.
887 CUIDs are like UIDs except they increase monotonically, have no gaps,
888 and only apply to real insns. */
889
890 max_uid = get_max_uid ();
891 n = (max_uid + 1) * sizeof (int);
892 uid_cuid = (int *) gmalloc (n);
893 bzero ((char *) uid_cuid, n);
894 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
895 {
2c3c49de 896 if (INSN_P (insn))
b86db3eb 897 uid_cuid[INSN_UID (insn)] = i++;
7506f491 898 else
b86db3eb 899 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
900 }
901
902 /* Create a table mapping cuids to insns. */
903
904 max_cuid = i;
905 n = (max_cuid + 1) * sizeof (rtx);
906 cuid_insn = (rtx *) gmalloc (n);
907 bzero ((char *) cuid_insn, n);
908 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 909 if (INSN_P (insn))
c4c81601 910 CUID_INSN (i++) = insn;
7506f491
DE
911
912 /* Allocate vars to track sets of regs. */
7506f491
DE
913 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
914
915 /* Allocate vars to track sets of regs, memory per block. */
7506f491
DE
916 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
917 max_gcse_regno);
918 mem_set_in_block = (char *) gmalloc (n_basic_blocks);
919}
920
921/* Free memory allocated by alloc_gcse_mem. */
922
923static void
924free_gcse_mem ()
925{
926 free (uid_cuid);
927 free (cuid_insn);
928
929 free (reg_set_bitmap);
930
931 free (reg_set_in_block);
932 free (mem_set_in_block);
933}
934
0511851c
MM
935/* Many of the global optimization algorithms work by solving dataflow
936 equations for various expressions. Initially, some local value is
c4c81601
RK
937 computed for each expression in each block. Then, the values across the
938 various blocks are combined (by following flow graph edges) to arrive at
939 global values. Conceptually, each set of equations is independent. We
940 may therefore solve all the equations in parallel, solve them one at a
941 time, or pick any intermediate approach.
942
943 When you're going to need N two-dimensional bitmaps, each X (say, the
944 number of blocks) by Y (say, the number of expressions), call this
945 function. It's not important what X and Y represent; only that Y
946 correspond to the things that can be done in parallel. This function will
947 return an appropriate chunking factor C; you should solve C sets of
948 equations in parallel. By going through this function, we can easily
949 trade space against time; by solving fewer equations in parallel we use
950 less space. */
0511851c
MM
951
952static int
953get_bitmap_width (n, x, y)
954 int n;
955 int x;
956 int y;
957{
958 /* It's not really worth figuring out *exactly* how much memory will
959 be used by a particular choice. The important thing is to get
960 something approximately right. */
961 size_t max_bitmap_memory = 10 * 1024 * 1024;
962
963 /* The number of bytes we'd use for a single column of minimum
964 width. */
965 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
966
967 /* Often, it's reasonable just to solve all the equations in
968 parallel. */
969 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
970 return y;
971
972 /* Otherwise, pick the largest width we can, without going over the
973 limit. */
974 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
975 / column_size);
976}
b5ce41ff
JL
977\f
978/* Compute the local properties of each recorded expression.
c4c81601
RK
979
980 Local properties are those that are defined by the block, irrespective of
981 other blocks.
b5ce41ff
JL
982
983 An expression is transparent in a block if its operands are not modified
984 in the block.
985
986 An expression is computed (locally available) in a block if it is computed
987 at least once and expression would contain the same value if the
988 computation was moved to the end of the block.
989
990 An expression is locally anticipatable in a block if it is computed at
991 least once and expression would contain the same value if the computation
992 was moved to the beginning of the block.
993
c4c81601
RK
994 We call this routine for cprop, pre and code hoisting. They all compute
995 basically the same information and thus can easily share this code.
7506f491 996
c4c81601
RK
997 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
998 properties. If NULL, then it is not necessary to compute or record that
999 particular property.
b5ce41ff 1000
c4c81601
RK
1001 SETP controls which hash table to look at. If zero, this routine looks at
1002 the expr hash table; if nonzero this routine looks at the set hash table.
1003 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1004 ABSALTERED. */
b5ce41ff
JL
1005
1006static void
1007compute_local_properties (transp, comp, antloc, setp)
1008 sbitmap *transp;
1009 sbitmap *comp;
1010 sbitmap *antloc;
1011 int setp;
1012{
2e653e39 1013 unsigned int i, hash_table_size;
b5ce41ff
JL
1014 struct expr **hash_table;
1015
1016 /* Initialize any bitmaps that were passed in. */
1017 if (transp)
695ab36a
BS
1018 {
1019 if (setp)
1020 sbitmap_vector_zero (transp, n_basic_blocks);
1021 else
1022 sbitmap_vector_ones (transp, n_basic_blocks);
1023 }
c4c81601 1024
b5ce41ff
JL
1025 if (comp)
1026 sbitmap_vector_zero (comp, n_basic_blocks);
1027 if (antloc)
1028 sbitmap_vector_zero (antloc, n_basic_blocks);
1029
1030 /* We use the same code for cprop, pre and hoisting. For cprop
1031 we care about the set hash table, for pre and hoisting we
1032 care about the expr hash table. */
1033 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1034 hash_table = setp ? set_hash_table : expr_hash_table;
1035
1036 for (i = 0; i < hash_table_size; i++)
7506f491 1037 {
b5ce41ff
JL
1038 struct expr *expr;
1039
1040 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1041 {
b5ce41ff 1042 int indx = expr->bitmap_index;
c4c81601 1043 struct occr *occr;
b5ce41ff
JL
1044
1045 /* The expression is transparent in this block if it is not killed.
1046 We start by assuming all are transparent [none are killed], and
1047 then reset the bits for those that are. */
b5ce41ff
JL
1048 if (transp)
1049 compute_transp (expr->expr, indx, transp, setp);
1050
1051 /* The occurrences recorded in antic_occr are exactly those that
1052 we want to set to non-zero in ANTLOC. */
b5ce41ff 1053 if (antloc)
c4c81601
RK
1054 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1055 {
1056 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1057
c4c81601
RK
1058 /* While we're scanning the table, this is a good place to
1059 initialize this. */
1060 occr->deleted_p = 0;
1061 }
b5ce41ff
JL
1062
1063 /* The occurrences recorded in avail_occr are exactly those that
1064 we want to set to non-zero in COMP. */
1065 if (comp)
c4c81601
RK
1066 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1067 {
1068 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1069
c4c81601
RK
1070 /* While we're scanning the table, this is a good place to
1071 initialize this. */
1072 occr->copied_p = 0;
1073 }
b5ce41ff
JL
1074
1075 /* While we're scanning the table, this is a good place to
1076 initialize this. */
1077 expr->reaching_reg = 0;
1078 }
7506f491 1079 }
7506f491
DE
1080}
1081\f
1082/* Register set information.
1083
1084 `reg_set_table' records where each register is set or otherwise
1085 modified. */
1086
1087static struct obstack reg_set_obstack;
1088
1089static void
1090alloc_reg_set_mem (n_regs)
1091 int n_regs;
1092{
c4c81601 1093 unsigned int n;
7506f491
DE
1094
1095 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1096 n = reg_set_table_size * sizeof (struct reg_set *);
1097 reg_set_table = (struct reg_set **) gmalloc (n);
1098 bzero ((char *) reg_set_table, n);
1099
1100 gcc_obstack_init (&reg_set_obstack);
1101}
1102
1103static void
1104free_reg_set_mem ()
1105{
1106 free (reg_set_table);
1107 obstack_free (&reg_set_obstack, NULL_PTR);
1108}
1109
1110/* Record REGNO in the reg_set table. */
1111
1112static void
1113record_one_set (regno, insn)
1114 int regno;
1115 rtx insn;
1116{
1117 /* allocate a new reg_set element and link it onto the list */
63bc1d05 1118 struct reg_set *new_reg_info;
7506f491
DE
1119
1120 /* If the table isn't big enough, enlarge it. */
1121 if (regno >= reg_set_table_size)
1122 {
1123 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601
RK
1124
1125 reg_set_table
1126 = (struct reg_set **) grealloc ((char *) reg_set_table,
1127 new_size * sizeof (struct reg_set *));
7506f491
DE
1128 bzero ((char *) (reg_set_table + reg_set_table_size),
1129 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1130 reg_set_table_size = new_size;
1131 }
1132
1133 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1134 sizeof (struct reg_set));
1135 bytes_used += sizeof (struct reg_set);
1136 new_reg_info->insn = insn;
274969ea
MM
1137 new_reg_info->next = reg_set_table[regno];
1138 reg_set_table[regno] = new_reg_info;
7506f491
DE
1139}
1140
c4c81601
RK
1141/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1142 an insn. The DATA is really the instruction in which the SET is
1143 occurring. */
7506f491
DE
1144
1145static void
84832317 1146record_set_info (dest, setter, data)
50b2596f 1147 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 1148 void *data;
7506f491 1149{
84832317
MM
1150 rtx record_set_insn = (rtx) data;
1151
c4c81601
RK
1152 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1153 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1154}
1155
1156/* Scan the function and record each set of each pseudo-register.
1157
c4c81601
RK
1158 This is called once, at the start of the gcse pass. See the comments for
1159 `reg_set_table' for further documenation. */
7506f491
DE
1160
1161static void
1162compute_sets (f)
1163 rtx f;
1164{
c4c81601 1165 rtx insn;
7506f491 1166
c4c81601 1167 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1168 if (INSN_P (insn))
c4c81601 1169 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1170}
1171\f
1172/* Hash table support. */
1173
1174/* For each register, the cuid of the first/last insn in the block to set it,
e7d99f1e 1175 or -1 if not set. */
c4c81601 1176#define NEVER_SET -1
7506f491
DE
1177static int *reg_first_set;
1178static int *reg_last_set;
1179
1180/* While computing "first/last set" info, this is the CUID of first/last insn
e7d99f1e 1181 to set memory or -1 if not set. `mem_last_set' is also used when
7506f491
DE
1182 performing GCSE to record whether memory has been set since the beginning
1183 of the block.
c4c81601 1184
7506f491
DE
1185 Note that handling of memory is very simple, we don't make any attempt
1186 to optimize things (later). */
1187static int mem_first_set;
1188static int mem_last_set;
1189
7506f491
DE
1190/* Perform a quick check whether X, the source of a set, is something
1191 we want to consider for GCSE. */
1192
1193static int
1194want_to_gcse_p (x)
1195 rtx x;
1196{
c4c81601 1197 switch (GET_CODE (x))
7506f491
DE
1198 {
1199 case REG:
1200 case SUBREG:
1201 case CONST_INT:
1202 case CONST_DOUBLE:
1203 case CALL:
1204 return 0;
1205
1206 default:
1207 break;
1208 }
1209
1210 return 1;
1211}
1212
1213/* Return non-zero if the operands of expression X are unchanged from the
1214 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1215 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1216
1217static int
1218oprs_unchanged_p (x, insn, avail_p)
1219 rtx x, insn;
1220 int avail_p;
1221{
c4c81601 1222 int i, j;
7506f491 1223 enum rtx_code code;
6f7d635c 1224 const char *fmt;
7506f491 1225
7506f491
DE
1226 if (x == 0)
1227 return 1;
1228
1229 code = GET_CODE (x);
1230 switch (code)
1231 {
1232 case REG:
1233 if (avail_p)
b86ba9c8 1234 return (reg_last_set[REGNO (x)] == NEVER_SET
7506f491
DE
1235 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1236 else
b86ba9c8 1237 return (reg_first_set[REGNO (x)] == NEVER_SET
7506f491
DE
1238 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1239
1240 case MEM:
c4c81601
RK
1241 if (avail_p && mem_last_set != NEVER_SET
1242 && mem_last_set >= INSN_CUID (insn))
1243 return 0;
1244 else if (! avail_p && mem_first_set != NEVER_SET
1245 && mem_first_set < INSN_CUID (insn))
1246 return 0;
7506f491 1247 else
c4c81601 1248 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1249
1250 case PRE_DEC:
1251 case PRE_INC:
1252 case POST_DEC:
1253 case POST_INC:
4b983fdc
RH
1254 case PRE_MODIFY:
1255 case POST_MODIFY:
7506f491
DE
1256 return 0;
1257
1258 case PC:
1259 case CC0: /*FIXME*/
1260 case CONST:
1261 case CONST_INT:
1262 case CONST_DOUBLE:
1263 case SYMBOL_REF:
1264 case LABEL_REF:
1265 case ADDR_VEC:
1266 case ADDR_DIFF_VEC:
1267 return 1;
1268
1269 default:
1270 break;
1271 }
1272
c4c81601 1273 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1274 {
1275 if (fmt[i] == 'e')
1276 {
c4c81601
RK
1277 /* If we are about to do the last recursive call needed at this
1278 level, change it into iteration. This function is called enough
1279 to be worth it. */
7506f491 1280 if (i == 0)
c4c81601
RK
1281 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1282
1283 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1284 return 0;
1285 }
1286 else if (fmt[i] == 'E')
c4c81601
RK
1287 for (j = 0; j < XVECLEN (x, i); j++)
1288 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1289 return 0;
7506f491
DE
1290 }
1291
1292 return 1;
1293}
1294
1295/* Return non-zero if the operands of expression X are unchanged from
1296 the start of INSN's basic block up to but not including INSN. */
1297
1298static int
1299oprs_anticipatable_p (x, insn)
1300 rtx x, insn;
1301{
1302 return oprs_unchanged_p (x, insn, 0);
1303}
1304
1305/* Return non-zero if the operands of expression X are unchanged from
1306 INSN to the end of INSN's basic block. */
1307
1308static int
1309oprs_available_p (x, insn)
1310 rtx x, insn;
1311{
1312 return oprs_unchanged_p (x, insn, 1);
1313}
1314
1315/* Hash expression X.
c4c81601
RK
1316
1317 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1318 indicating if a volatile operand is found or if the expression contains
1319 something we don't want to insert in the table.
7506f491
DE
1320
1321 ??? One might want to merge this with canon_hash. Later. */
1322
1323static unsigned int
1324hash_expr (x, mode, do_not_record_p, hash_table_size)
1325 rtx x;
1326 enum machine_mode mode;
1327 int *do_not_record_p;
1328 int hash_table_size;
1329{
1330 unsigned int hash;
1331
1332 *do_not_record_p = 0;
1333
1334 hash = hash_expr_1 (x, mode, do_not_record_p);
1335 return hash % hash_table_size;
1336}
6462bb43
AO
1337/* Hash a string. Just add its bytes up. */
1338static inline unsigned
1339hash_string_1 (ps)
1340 const char *ps;
1341{
1342 unsigned hash = 0;
1343 const unsigned char *p = (const unsigned char *)ps;
1344
1345 if (p)
1346 while (*p)
1347 hash += *p++;
1348
1349 return hash;
1350}
7506f491
DE
1351
1352/* Subroutine of hash_expr to do the actual work. */
1353
1354static unsigned int
1355hash_expr_1 (x, mode, do_not_record_p)
1356 rtx x;
1357 enum machine_mode mode;
1358 int *do_not_record_p;
1359{
1360 int i, j;
1361 unsigned hash = 0;
1362 enum rtx_code code;
6f7d635c 1363 const char *fmt;
7506f491 1364
c4c81601
RK
1365 /* Used to turn recursion into iteration. We can't rely on GCC's
1366 tail-recursion eliminatio since we need to keep accumulating values
1367 in HASH. */
7506f491
DE
1368
1369 if (x == 0)
1370 return hash;
1371
c4c81601 1372 repeat:
7506f491
DE
1373 code = GET_CODE (x);
1374 switch (code)
1375 {
1376 case REG:
c4c81601
RK
1377 hash += ((unsigned int) REG << 7) + REGNO (x);
1378 return hash;
7506f491
DE
1379
1380 case CONST_INT:
c4c81601
RK
1381 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1382 + (unsigned int) INTVAL (x));
1383 return hash;
7506f491
DE
1384
1385 case CONST_DOUBLE:
1386 /* This is like the general case, except that it only counts
1387 the integers representing the constant. */
c4c81601 1388 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1389 if (GET_MODE (x) != VOIDmode)
1390 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1391 hash += (unsigned int) XWINT (x, i);
7506f491 1392 else
c4c81601
RK
1393 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1394 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1395 return hash;
1396
1397 /* Assume there is only one rtx object for any given label. */
1398 case LABEL_REF:
1399 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1400 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1401 hash += (((unsigned int) LABEL_REF << 7)
1402 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1403 return hash;
1404
1405 case SYMBOL_REF:
1406 {
1407 /* Don't hash on the symbol's address to avoid bootstrap differences.
1408 Different hash values may cause expressions to be recorded in
1409 different orders and thus different registers to be used in the
1410 final assembler. This also avoids differences in the dump files
1411 between various stages. */
1412 unsigned int h = 0;
3cce094d 1413 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1414
7506f491
DE
1415 while (*p)
1416 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1417
1418 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1419 return hash;
1420 }
1421
1422 case MEM:
1423 if (MEM_VOLATILE_P (x))
1424 {
1425 *do_not_record_p = 1;
1426 return 0;
1427 }
c4c81601
RK
1428
1429 hash += (unsigned int) MEM;
297c3335 1430 hash += MEM_ALIAS_SET (x);
7506f491
DE
1431 x = XEXP (x, 0);
1432 goto repeat;
1433
1434 case PRE_DEC:
1435 case PRE_INC:
1436 case POST_DEC:
1437 case POST_INC:
1438 case PC:
1439 case CC0:
1440 case CALL:
1441 case UNSPEC_VOLATILE:
1442 *do_not_record_p = 1;
1443 return 0;
1444
1445 case ASM_OPERANDS:
1446 if (MEM_VOLATILE_P (x))
1447 {
1448 *do_not_record_p = 1;
1449 return 0;
1450 }
6462bb43
AO
1451 else
1452 {
1453 /* We don't want to take the filename and line into account. */
1454 hash += (unsigned) code + (unsigned) GET_MODE (x)
1455 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1456 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1457 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1458
1459 if (ASM_OPERANDS_INPUT_LENGTH (x))
1460 {
1461 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1462 {
1463 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1464 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1465 do_not_record_p)
1466 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1467 (x, i)));
1468 }
1469
1470 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1471 x = ASM_OPERANDS_INPUT (x, 0);
1472 mode = GET_MODE (x);
1473 goto repeat;
1474 }
1475 return hash;
1476 }
7506f491
DE
1477
1478 default:
1479 break;
1480 }
1481
7506f491 1482 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1483 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1484 {
1485 if (fmt[i] == 'e')
1486 {
7506f491
DE
1487 /* If we are about to do the last recursive call
1488 needed at this level, change it into iteration.
1489 This function is called enough to be worth it. */
1490 if (i == 0)
1491 {
c4c81601 1492 x = XEXP (x, i);
7506f491
DE
1493 goto repeat;
1494 }
c4c81601
RK
1495
1496 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1497 if (*do_not_record_p)
1498 return 0;
1499 }
c4c81601 1500
7506f491
DE
1501 else if (fmt[i] == 'E')
1502 for (j = 0; j < XVECLEN (x, i); j++)
1503 {
1504 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1505 if (*do_not_record_p)
1506 return 0;
1507 }
c4c81601 1508
7506f491 1509 else if (fmt[i] == 's')
6462bb43 1510 hash += hash_string_1 (XSTR (x, i));
7506f491 1511 else if (fmt[i] == 'i')
c4c81601 1512 hash += (unsigned int) XINT (x, i);
7506f491
DE
1513 else
1514 abort ();
1515 }
1516
1517 return hash;
1518}
1519
1520/* Hash a set of register REGNO.
1521
c4c81601
RK
1522 Sets are hashed on the register that is set. This simplifies the PRE copy
1523 propagation code.
7506f491
DE
1524
1525 ??? May need to make things more elaborate. Later, as necessary. */
1526
1527static unsigned int
1528hash_set (regno, hash_table_size)
1529 int regno;
1530 int hash_table_size;
1531{
1532 unsigned int hash;
1533
1534 hash = regno;
1535 return hash % hash_table_size;
1536}
1537
1538/* Return non-zero if exp1 is equivalent to exp2.
1539 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1540
1541static int
1542expr_equiv_p (x, y)
1543 rtx x, y;
1544{
1545 register int i, j;
1546 register enum rtx_code code;
6f7d635c 1547 register const char *fmt;
7506f491
DE
1548
1549 if (x == y)
1550 return 1;
c4c81601 1551
7506f491
DE
1552 if (x == 0 || y == 0)
1553 return x == y;
1554
1555 code = GET_CODE (x);
1556 if (code != GET_CODE (y))
1557 return 0;
1558
1559 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1560 if (GET_MODE (x) != GET_MODE (y))
1561 return 0;
1562
1563 switch (code)
1564 {
1565 case PC:
1566 case CC0:
1567 return x == y;
1568
1569 case CONST_INT:
1570 return INTVAL (x) == INTVAL (y);
1571
1572 case LABEL_REF:
1573 return XEXP (x, 0) == XEXP (y, 0);
1574
1575 case SYMBOL_REF:
1576 return XSTR (x, 0) == XSTR (y, 0);
1577
1578 case REG:
1579 return REGNO (x) == REGNO (y);
1580
297c3335
RH
1581 case MEM:
1582 /* Can't merge two expressions in different alias sets, since we can
1583 decide that the expression is transparent in a block when it isn't,
1584 due to it being set with the different alias set. */
1585 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1586 return 0;
1587 break;
1588
7506f491
DE
1589 /* For commutative operations, check both orders. */
1590 case PLUS:
1591 case MULT:
1592 case AND:
1593 case IOR:
1594 case XOR:
1595 case NE:
1596 case EQ:
1597 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1598 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1599 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1600 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1601
6462bb43
AO
1602 case ASM_OPERANDS:
1603 /* We don't use the generic code below because we want to
1604 disregard filename and line numbers. */
1605
1606 /* A volatile asm isn't equivalent to any other. */
1607 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1608 return 0;
1609
1610 if (GET_MODE (x) != GET_MODE (y)
1611 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1612 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1613 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1614 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1615 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1616 return 0;
1617
1618 if (ASM_OPERANDS_INPUT_LENGTH (x))
1619 {
1620 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1621 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1622 ASM_OPERANDS_INPUT (y, i))
1623 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1624 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1625 return 0;
1626 }
1627
1628 return 1;
1629
7506f491
DE
1630 default:
1631 break;
1632 }
1633
1634 /* Compare the elements. If any pair of corresponding elements
1635 fail to match, return 0 for the whole thing. */
1636
1637 fmt = GET_RTX_FORMAT (code);
1638 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1639 {
1640 switch (fmt[i])
1641 {
1642 case 'e':
1643 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1644 return 0;
1645 break;
1646
1647 case 'E':
1648 if (XVECLEN (x, i) != XVECLEN (y, i))
1649 return 0;
1650 for (j = 0; j < XVECLEN (x, i); j++)
1651 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1652 return 0;
1653 break;
1654
1655 case 's':
1656 if (strcmp (XSTR (x, i), XSTR (y, i)))
1657 return 0;
1658 break;
1659
1660 case 'i':
1661 if (XINT (x, i) != XINT (y, i))
1662 return 0;
1663 break;
1664
1665 case 'w':
1666 if (XWINT (x, i) != XWINT (y, i))
1667 return 0;
1668 break;
1669
1670 case '0':
1671 break;
1672
1673 default:
1674 abort ();
1675 }
1676 }
1677
1678 return 1;
1679}
1680
1681/* Insert expression X in INSN in the hash table.
1682 If it is already present, record it as the last occurrence in INSN's
1683 basic block.
1684
1685 MODE is the mode of the value X is being stored into.
1686 It is only used if X is a CONST_INT.
1687
1688 ANTIC_P is non-zero if X is an anticipatable expression.
1689 AVAIL_P is non-zero if X is an available expression. */
1690
1691static void
1692insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1693 rtx x;
1694 enum machine_mode mode;
1695 rtx insn;
1696 int antic_p, avail_p;
1697{
1698 int found, do_not_record_p;
1699 unsigned int hash;
1700 struct expr *cur_expr, *last_expr = NULL;
1701 struct occr *antic_occr, *avail_occr;
1702 struct occr *last_occr = NULL;
1703
1704 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1705
1706 /* Do not insert expression in table if it contains volatile operands,
1707 or if hash_expr determines the expression is something we don't want
1708 to or can't handle. */
1709 if (do_not_record_p)
1710 return;
1711
1712 cur_expr = expr_hash_table[hash];
1713 found = 0;
1714
c4c81601 1715 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1716 {
1717 /* If the expression isn't found, save a pointer to the end of
1718 the list. */
1719 last_expr = cur_expr;
1720 cur_expr = cur_expr->next_same_hash;
1721 }
1722
1723 if (! found)
1724 {
1725 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1726 bytes_used += sizeof (struct expr);
1727 if (expr_hash_table[hash] == NULL)
c4c81601
RK
1728 /* This is the first pattern that hashed to this index. */
1729 expr_hash_table[hash] = cur_expr;
7506f491 1730 else
c4c81601
RK
1731 /* Add EXPR to end of this hash chain. */
1732 last_expr->next_same_hash = cur_expr;
1733
7506f491
DE
1734 /* Set the fields of the expr element. */
1735 cur_expr->expr = x;
1736 cur_expr->bitmap_index = n_exprs++;
1737 cur_expr->next_same_hash = NULL;
1738 cur_expr->antic_occr = NULL;
1739 cur_expr->avail_occr = NULL;
1740 }
1741
1742 /* Now record the occurrence(s). */
7506f491
DE
1743 if (antic_p)
1744 {
1745 antic_occr = cur_expr->antic_occr;
1746
1747 /* Search for another occurrence in the same basic block. */
1748 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1749 {
1750 /* If an occurrence isn't found, save a pointer to the end of
1751 the list. */
1752 last_occr = antic_occr;
1753 antic_occr = antic_occr->next;
1754 }
1755
1756 if (antic_occr)
c4c81601
RK
1757 /* Found another instance of the expression in the same basic block.
1758 Prefer the currently recorded one. We want the first one in the
1759 block and the block is scanned from start to end. */
1760 ; /* nothing to do */
7506f491
DE
1761 else
1762 {
1763 /* First occurrence of this expression in this basic block. */
1764 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1765 bytes_used += sizeof (struct occr);
1766 /* First occurrence of this expression in any block? */
1767 if (cur_expr->antic_occr == NULL)
1768 cur_expr->antic_occr = antic_occr;
1769 else
1770 last_occr->next = antic_occr;
c4c81601 1771
7506f491
DE
1772 antic_occr->insn = insn;
1773 antic_occr->next = NULL;
1774 }
1775 }
1776
1777 if (avail_p)
1778 {
1779 avail_occr = cur_expr->avail_occr;
1780
1781 /* Search for another occurrence in the same basic block. */
1782 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1783 {
1784 /* If an occurrence isn't found, save a pointer to the end of
1785 the list. */
1786 last_occr = avail_occr;
1787 avail_occr = avail_occr->next;
1788 }
1789
1790 if (avail_occr)
c4c81601
RK
1791 /* Found another instance of the expression in the same basic block.
1792 Prefer this occurrence to the currently recorded one. We want
1793 the last one in the block and the block is scanned from start
1794 to end. */
1795 avail_occr->insn = insn;
7506f491
DE
1796 else
1797 {
1798 /* First occurrence of this expression in this basic block. */
1799 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1800 bytes_used += sizeof (struct occr);
c4c81601 1801
7506f491
DE
1802 /* First occurrence of this expression in any block? */
1803 if (cur_expr->avail_occr == NULL)
1804 cur_expr->avail_occr = avail_occr;
1805 else
1806 last_occr->next = avail_occr;
c4c81601 1807
7506f491
DE
1808 avail_occr->insn = insn;
1809 avail_occr->next = NULL;
1810 }
1811 }
1812}
1813
1814/* Insert pattern X in INSN in the hash table.
1815 X is a SET of a reg to either another reg or a constant.
1816 If it is already present, record it as the last occurrence in INSN's
1817 basic block. */
1818
1819static void
1820insert_set_in_table (x, insn)
1821 rtx x;
1822 rtx insn;
1823{
1824 int found;
1825 unsigned int hash;
1826 struct expr *cur_expr, *last_expr = NULL;
1827 struct occr *cur_occr, *last_occr = NULL;
1828
1829 if (GET_CODE (x) != SET
1830 || GET_CODE (SET_DEST (x)) != REG)
1831 abort ();
1832
1833 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
1834
1835 cur_expr = set_hash_table[hash];
1836 found = 0;
1837
c4c81601 1838 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1839 {
1840 /* If the expression isn't found, save a pointer to the end of
1841 the list. */
1842 last_expr = cur_expr;
1843 cur_expr = cur_expr->next_same_hash;
1844 }
1845
1846 if (! found)
1847 {
1848 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1849 bytes_used += sizeof (struct expr);
1850 if (set_hash_table[hash] == NULL)
c4c81601
RK
1851 /* This is the first pattern that hashed to this index. */
1852 set_hash_table[hash] = cur_expr;
7506f491 1853 else
c4c81601
RK
1854 /* Add EXPR to end of this hash chain. */
1855 last_expr->next_same_hash = cur_expr;
1856
7506f491
DE
1857 /* Set the fields of the expr element.
1858 We must copy X because it can be modified when copy propagation is
1859 performed on its operands. */
1860 /* ??? Should this go in a different obstack? */
1861 cur_expr->expr = copy_rtx (x);
1862 cur_expr->bitmap_index = n_sets++;
1863 cur_expr->next_same_hash = NULL;
1864 cur_expr->antic_occr = NULL;
1865 cur_expr->avail_occr = NULL;
1866 }
1867
1868 /* Now record the occurrence. */
7506f491
DE
1869 cur_occr = cur_expr->avail_occr;
1870
1871 /* Search for another occurrence in the same basic block. */
1872 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1873 {
1874 /* If an occurrence isn't found, save a pointer to the end of
1875 the list. */
1876 last_occr = cur_occr;
1877 cur_occr = cur_occr->next;
1878 }
1879
1880 if (cur_occr)
c4c81601
RK
1881 /* Found another instance of the expression in the same basic block.
1882 Prefer this occurrence to the currently recorded one. We want the
1883 last one in the block and the block is scanned from start to end. */
1884 cur_occr->insn = insn;
7506f491
DE
1885 else
1886 {
1887 /* First occurrence of this expression in this basic block. */
1888 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1889 bytes_used += sizeof (struct occr);
c4c81601 1890
7506f491
DE
1891 /* First occurrence of this expression in any block? */
1892 if (cur_expr->avail_occr == NULL)
1893 cur_expr->avail_occr = cur_occr;
1894 else
1895 last_occr->next = cur_occr;
c4c81601 1896
7506f491
DE
1897 cur_occr->insn = insn;
1898 cur_occr->next = NULL;
1899 }
1900}
1901
c4c81601
RK
1902/* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
1903 non-zero, this is for the assignment hash table, otherwise it is for the
1904 expression hash table. */
7506f491
DE
1905
1906static void
1907hash_scan_set (pat, insn, set_p)
1908 rtx pat, insn;
1909 int set_p;
1910{
1911 rtx src = SET_SRC (pat);
1912 rtx dest = SET_DEST (pat);
1913
1914 if (GET_CODE (src) == CALL)
1915 hash_scan_call (src, insn);
1916
1917 if (GET_CODE (dest) == REG)
1918 {
1919 int regno = REGNO (dest);
1920 rtx tmp;
1921
1922 /* Only record sets of pseudo-regs in the hash table. */
1923 if (! set_p
1924 && regno >= FIRST_PSEUDO_REGISTER
1925 /* Don't GCSE something if we can't do a reg/reg copy. */
1926 && can_copy_p [GET_MODE (dest)]
1927 /* Is SET_SRC something we want to gcse? */
1928 && want_to_gcse_p (src))
1929 {
1930 /* An expression is not anticipatable if its operands are
1931 modified before this insn. */
3cce638b 1932 int antic_p = oprs_anticipatable_p (src, insn);
7506f491
DE
1933 /* An expression is not available if its operands are
1934 subsequently modified, including this insn. */
1935 int avail_p = oprs_available_p (src, insn);
c4c81601 1936
7506f491
DE
1937 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
1938 }
c4c81601 1939
7506f491
DE
1940 /* Record sets for constant/copy propagation. */
1941 else if (set_p
1942 && regno >= FIRST_PSEUDO_REGISTER
1943 && ((GET_CODE (src) == REG
1944 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1945 && can_copy_p [GET_MODE (dest)])
e78d9500 1946 || GET_CODE (src) == CONST_INT
05f6f07c 1947 || GET_CODE (src) == SYMBOL_REF
e78d9500 1948 || GET_CODE (src) == CONST_DOUBLE)
7506f491
DE
1949 /* A copy is not available if its src or dest is subsequently
1950 modified. Here we want to search from INSN+1 on, but
1951 oprs_available_p searches from INSN on. */
1952 && (insn == BLOCK_END (BLOCK_NUM (insn))
1953 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1954 && oprs_available_p (pat, tmp))))
1955 insert_set_in_table (pat, insn);
1956 }
7506f491
DE
1957}
1958
1959static void
1960hash_scan_clobber (x, insn)
50b2596f 1961 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
1962{
1963 /* Currently nothing to do. */
1964}
1965
1966static void
1967hash_scan_call (x, insn)
50b2596f 1968 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
1969{
1970 /* Currently nothing to do. */
1971}
1972
1973/* Process INSN and add hash table entries as appropriate.
1974
1975 Only available expressions that set a single pseudo-reg are recorded.
1976
1977 Single sets in a PARALLEL could be handled, but it's an extra complication
1978 that isn't dealt with right now. The trick is handling the CLOBBERs that
1979 are also in the PARALLEL. Later.
1980
1981 If SET_P is non-zero, this is for the assignment hash table,
ed79bb3d
R
1982 otherwise it is for the expression hash table.
1983 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1984 not record any expressions. */
7506f491
DE
1985
1986static void
ed79bb3d 1987hash_scan_insn (insn, set_p, in_libcall_block)
7506f491
DE
1988 rtx insn;
1989 int set_p;
48e87cef 1990 int in_libcall_block;
7506f491
DE
1991{
1992 rtx pat = PATTERN (insn);
c4c81601 1993 int i;
7506f491
DE
1994
1995 /* Pick out the sets of INSN and for other forms of instructions record
1996 what's been modified. */
1997
ed79bb3d 1998 if (GET_CODE (pat) == SET && ! in_libcall_block)
21e3a717
BS
1999 {
2000 /* Ignore obvious no-ops. */
2001 if (SET_SRC (pat) != SET_DEST (pat))
2002 hash_scan_set (pat, insn, set_p);
2003 }
7506f491 2004 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2005 for (i = 0; i < XVECLEN (pat, 0); i++)
2006 {
2007 rtx x = XVECEXP (pat, 0, i);
7506f491 2008
c4c81601
RK
2009 if (GET_CODE (x) == SET)
2010 {
2011 if (GET_CODE (SET_SRC (x)) == CALL)
2012 hash_scan_call (SET_SRC (x), insn);
2013 }
2014 else if (GET_CODE (x) == CLOBBER)
2015 hash_scan_clobber (x, insn);
2016 else if (GET_CODE (x) == CALL)
2017 hash_scan_call (x, insn);
2018 }
7506f491 2019
7506f491
DE
2020 else if (GET_CODE (pat) == CLOBBER)
2021 hash_scan_clobber (pat, insn);
2022 else if (GET_CODE (pat) == CALL)
2023 hash_scan_call (pat, insn);
2024}
2025
2026static void
2027dump_hash_table (file, name, table, table_size, total_size)
2028 FILE *file;
dff01034 2029 const char *name;
7506f491
DE
2030 struct expr **table;
2031 int table_size, total_size;
2032{
2033 int i;
2034 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2035 struct expr **flat_table;
2036 unsigned int *hash_val;
c4c81601 2037 struct expr *expr;
4da896b2
MM
2038
2039 flat_table
2040 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2041 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
7506f491 2042
7506f491 2043 for (i = 0; i < table_size; i++)
c4c81601
RK
2044 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2045 {
2046 flat_table[expr->bitmap_index] = expr;
2047 hash_val[expr->bitmap_index] = i;
2048 }
7506f491
DE
2049
2050 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2051 name, table_size, total_size);
2052
2053 for (i = 0; i < total_size; i++)
21318741
RK
2054 if (flat_table[i] != 0)
2055 {
a0ac9e5a 2056 expr = flat_table[i];
21318741
RK
2057 fprintf (file, "Index %d (hash value %d)\n ",
2058 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2059 print_rtl (file, expr->expr);
21318741
RK
2060 fprintf (file, "\n");
2061 }
7506f491
DE
2062
2063 fprintf (file, "\n");
4da896b2 2064
4da896b2
MM
2065 free (flat_table);
2066 free (hash_val);
7506f491
DE
2067}
2068
2069/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2070
7506f491
DE
2071 reg_first_set records the first place in the block where the register
2072 is set and is used to compute "anticipatability".
c4c81601 2073
7506f491
DE
2074 reg_last_set records the last place in the block where the register
2075 is set and is used to compute "availability".
c4c81601 2076
7506f491
DE
2077 reg_set_in_block records whether the register is set in the block
2078 and is used to compute "transparency". */
2079
2080static void
2081record_last_reg_set_info (insn, regno)
2082 rtx insn;
2083 int regno;
2084{
b86ba9c8 2085 if (reg_first_set[regno] == NEVER_SET)
7506f491 2086 reg_first_set[regno] = INSN_CUID (insn);
c4c81601 2087
7506f491
DE
2088 reg_last_set[regno] = INSN_CUID (insn);
2089 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
2090}
2091
2092/* Record memory first/last/block set information for INSN. */
2093
2094static void
2095record_last_mem_set_info (insn)
2096 rtx insn;
2097{
b86ba9c8 2098 if (mem_first_set == NEVER_SET)
7506f491 2099 mem_first_set = INSN_CUID (insn);
c4c81601 2100
7506f491
DE
2101 mem_last_set = INSN_CUID (insn);
2102 mem_set_in_block[BLOCK_NUM (insn)] = 1;
2103}
2104
7506f491 2105/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2106 SET or CLOBBER in an insn. DATA is really the instruction in which
2107 the SET is taking place. */
7506f491
DE
2108
2109static void
84832317 2110record_last_set_info (dest, setter, data)
50b2596f 2111 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 2112 void *data;
7506f491 2113{
84832317
MM
2114 rtx last_set_insn = (rtx) data;
2115
7506f491
DE
2116 if (GET_CODE (dest) == SUBREG)
2117 dest = SUBREG_REG (dest);
2118
2119 if (GET_CODE (dest) == REG)
2120 record_last_reg_set_info (last_set_insn, REGNO (dest));
2121 else if (GET_CODE (dest) == MEM
2122 /* Ignore pushes, they clobber nothing. */
2123 && ! push_operand (dest, GET_MODE (dest)))
2124 record_last_mem_set_info (last_set_insn);
2125}
2126
2127/* Top level function to create an expression or assignment hash table.
2128
2129 Expression entries are placed in the hash table if
2130 - they are of the form (set (pseudo-reg) src),
2131 - src is something we want to perform GCSE on,
2132 - none of the operands are subsequently modified in the block
2133
2134 Assignment entries are placed in the hash table if
2135 - they are of the form (set (pseudo-reg) src),
2136 - src is something we want to perform const/copy propagation on,
2137 - none of the operands or target are subsequently modified in the block
c4c81601 2138
7506f491
DE
2139 Currently src must be a pseudo-reg or a const_int.
2140
2141 F is the first insn.
2142 SET_P is non-zero for computing the assignment hash table. */
2143
2144static void
b5ce41ff 2145compute_hash_table (set_p)
7506f491
DE
2146 int set_p;
2147{
2148 int bb;
2149
2150 /* While we compute the hash table we also compute a bit array of which
2151 registers are set in which blocks.
2152 We also compute which blocks set memory, in the absence of aliasing
2153 support [which is TODO].
2154 ??? This isn't needed during const/copy propagation, but it's cheap to
2155 compute. Later. */
2156 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2157 bzero ((char *) mem_set_in_block, n_basic_blocks);
2158
2159 /* Some working arrays used to track first and last set in each block. */
2160 /* ??? One could use alloca here, but at some size a threshold is crossed
2161 beyond which one should use malloc. Are we at that threshold here? */
2162 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2163 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2164
2165 for (bb = 0; bb < n_basic_blocks; bb++)
2166 {
2167 rtx insn;
770ae6cc 2168 unsigned int regno;
ed79bb3d 2169 int in_libcall_block;
770ae6cc 2170 unsigned int i;
7506f491
DE
2171
2172 /* First pass over the instructions records information used to
2173 determine when registers and memory are first and last set.
2174 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2175 could be moved to compute_sets since they currently don't change. */
2176
b86ba9c8
GK
2177 for (i = 0; i < max_gcse_regno; i++)
2178 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
770ae6cc 2179
b86ba9c8
GK
2180 mem_first_set = NEVER_SET;
2181 mem_last_set = NEVER_SET;
7506f491 2182
3b413743
RH
2183 for (insn = BLOCK_HEAD (bb);
2184 insn && insn != NEXT_INSN (BLOCK_END (bb));
7506f491
DE
2185 insn = NEXT_INSN (insn))
2186 {
2187#ifdef NON_SAVING_SETJMP
2188 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2189 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2190 {
2191 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2192 record_last_reg_set_info (insn, regno);
2193 continue;
2194 }
2195#endif
2196
2c3c49de 2197 if (! INSN_P (insn))
7506f491
DE
2198 continue;
2199
2200 if (GET_CODE (insn) == CALL_INSN)
2201 {
2202 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
15f8470f
JL
2203 if ((call_used_regs[regno]
2204 && regno != STACK_POINTER_REGNUM
2205#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2206 && regno != HARD_FRAME_POINTER_REGNUM
2207#endif
2208#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2209 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2210#endif
2211#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2212 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2213#endif
2214
2215 && regno != FRAME_POINTER_REGNUM)
2216 || global_regs[regno])
7506f491 2217 record_last_reg_set_info (insn, regno);
c4c81601 2218
7506f491
DE
2219 if (! CONST_CALL_P (insn))
2220 record_last_mem_set_info (insn);
2221 }
2222
84832317 2223 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2224 }
2225
2226 /* The next pass builds the hash table. */
2227
3b413743
RH
2228 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2229 insn && insn != NEXT_INSN (BLOCK_END (bb));
7506f491 2230 insn = NEXT_INSN (insn))
2c3c49de 2231 if (INSN_P (insn))
c4c81601
RK
2232 {
2233 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2234 in_libcall_block = 1;
2235 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2236 in_libcall_block = 0;
2237 hash_scan_insn (insn, set_p, in_libcall_block);
7506f491
DE
2238 }
2239 }
2240
2241 free (reg_first_set);
2242 free (reg_last_set);
c4c81601 2243
7506f491
DE
2244 /* Catch bugs early. */
2245 reg_first_set = reg_last_set = 0;
2246}
2247
2248/* Allocate space for the set hash table.
2249 N_INSNS is the number of instructions in the function.
2250 It is used to determine the number of buckets to use. */
2251
2252static void
2253alloc_set_hash_table (n_insns)
2254 int n_insns;
2255{
2256 int n;
2257
2258 set_hash_table_size = n_insns / 4;
2259 if (set_hash_table_size < 11)
2260 set_hash_table_size = 11;
c4c81601 2261
7506f491
DE
2262 /* Attempt to maintain efficient use of hash table.
2263 Making it an odd number is simplest for now.
2264 ??? Later take some measurements. */
2265 set_hash_table_size |= 1;
2266 n = set_hash_table_size * sizeof (struct expr *);
2267 set_hash_table = (struct expr **) gmalloc (n);
2268}
2269
2270/* Free things allocated by alloc_set_hash_table. */
2271
2272static void
2273free_set_hash_table ()
2274{
2275 free (set_hash_table);
2276}
2277
2278/* Compute the hash table for doing copy/const propagation. */
2279
2280static void
b5ce41ff 2281compute_set_hash_table ()
7506f491
DE
2282{
2283 /* Initialize count of number of entries in hash table. */
2284 n_sets = 0;
c4c81601
RK
2285 bzero ((char *) set_hash_table,
2286 set_hash_table_size * sizeof (struct expr *));
7506f491 2287
b5ce41ff 2288 compute_hash_table (1);
7506f491
DE
2289}
2290
2291/* Allocate space for the expression hash table.
2292 N_INSNS is the number of instructions in the function.
2293 It is used to determine the number of buckets to use. */
2294
2295static void
2296alloc_expr_hash_table (n_insns)
2e653e39 2297 unsigned int n_insns;
7506f491
DE
2298{
2299 int n;
2300
2301 expr_hash_table_size = n_insns / 2;
2302 /* Make sure the amount is usable. */
2303 if (expr_hash_table_size < 11)
2304 expr_hash_table_size = 11;
c4c81601 2305
7506f491
DE
2306 /* Attempt to maintain efficient use of hash table.
2307 Making it an odd number is simplest for now.
2308 ??? Later take some measurements. */
2309 expr_hash_table_size |= 1;
2310 n = expr_hash_table_size * sizeof (struct expr *);
2311 expr_hash_table = (struct expr **) gmalloc (n);
2312}
2313
2314/* Free things allocated by alloc_expr_hash_table. */
2315
2316static void
2317free_expr_hash_table ()
2318{
2319 free (expr_hash_table);
2320}
2321
2322/* Compute the hash table for doing GCSE. */
2323
2324static void
b5ce41ff 2325compute_expr_hash_table ()
7506f491
DE
2326{
2327 /* Initialize count of number of entries in hash table. */
2328 n_exprs = 0;
c4c81601
RK
2329 bzero ((char *) expr_hash_table,
2330 expr_hash_table_size * sizeof (struct expr *));
7506f491 2331
b5ce41ff 2332 compute_hash_table (0);
7506f491
DE
2333}
2334\f
2335/* Expression tracking support. */
2336
2337/* Lookup pattern PAT in the expression table.
2338 The result is a pointer to the table entry, or NULL if not found. */
2339
2340static struct expr *
2341lookup_expr (pat)
2342 rtx pat;
2343{
2344 int do_not_record_p;
2345 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2346 expr_hash_table_size);
2347 struct expr *expr;
2348
2349 if (do_not_record_p)
2350 return NULL;
2351
2352 expr = expr_hash_table[hash];
2353
2354 while (expr && ! expr_equiv_p (expr->expr, pat))
2355 expr = expr->next_same_hash;
2356
2357 return expr;
2358}
2359
c4c81601
RK
2360/* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2361 matches it, otherwise return the first entry for REGNO. The result is a
2362 pointer to the table entry, or NULL if not found. */
7506f491
DE
2363
2364static struct expr *
2365lookup_set (regno, pat)
770ae6cc 2366 unsigned int regno;
7506f491
DE
2367 rtx pat;
2368{
2369 unsigned int hash = hash_set (regno, set_hash_table_size);
2370 struct expr *expr;
2371
2372 expr = set_hash_table[hash];
2373
2374 if (pat)
2375 {
2376 while (expr && ! expr_equiv_p (expr->expr, pat))
2377 expr = expr->next_same_hash;
2378 }
2379 else
2380 {
2381 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2382 expr = expr->next_same_hash;
2383 }
2384
2385 return expr;
2386}
2387
2388/* Return the next entry for REGNO in list EXPR. */
2389
2390static struct expr *
2391next_set (regno, expr)
770ae6cc 2392 unsigned int regno;
7506f491
DE
2393 struct expr *expr;
2394{
2395 do
2396 expr = expr->next_same_hash;
2397 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2398
7506f491
DE
2399 return expr;
2400}
2401
2402/* Reset tables used to keep track of what's still available [since the
2403 start of the block]. */
2404
2405static void
2406reset_opr_set_tables ()
2407{
2408 /* Maintain a bitmap of which regs have been set since beginning of
2409 the block. */
2410 sbitmap_zero (reg_set_bitmap);
c4c81601 2411
7506f491
DE
2412 /* Also keep a record of the last instruction to modify memory.
2413 For now this is very trivial, we only record whether any memory
2414 location has been modified. */
2415 mem_last_set = 0;
2416}
2417
2418/* Return non-zero if the operands of X are not set before INSN in
2419 INSN's basic block. */
2420
2421static int
2422oprs_not_set_p (x, insn)
2423 rtx x, insn;
2424{
c4c81601 2425 int i, j;
7506f491 2426 enum rtx_code code;
6f7d635c 2427 const char *fmt;
7506f491 2428
7506f491
DE
2429 if (x == 0)
2430 return 1;
2431
2432 code = GET_CODE (x);
2433 switch (code)
2434 {
2435 case PC:
2436 case CC0:
2437 case CONST:
2438 case CONST_INT:
2439 case CONST_DOUBLE:
2440 case SYMBOL_REF:
2441 case LABEL_REF:
2442 case ADDR_VEC:
2443 case ADDR_DIFF_VEC:
2444 return 1;
2445
2446 case MEM:
2447 if (mem_last_set != 0)
2448 return 0;
c4c81601
RK
2449 else
2450 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2451
2452 case REG:
2453 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2454
2455 default:
2456 break;
2457 }
2458
c4c81601 2459 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2460 {
2461 if (fmt[i] == 'e')
2462 {
7506f491
DE
2463 /* If we are about to do the last recursive call
2464 needed at this level, change it into iteration.
2465 This function is called enough to be worth it. */
2466 if (i == 0)
c4c81601
RK
2467 return oprs_not_set_p (XEXP (x, i), insn);
2468
2469 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2470 return 0;
2471 }
2472 else if (fmt[i] == 'E')
c4c81601
RK
2473 for (j = 0; j < XVECLEN (x, i); j++)
2474 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2475 return 0;
7506f491
DE
2476 }
2477
2478 return 1;
2479}
2480
2481/* Mark things set by a CALL. */
2482
2483static void
b5ce41ff
JL
2484mark_call (insn)
2485 rtx insn;
7506f491
DE
2486{
2487 mem_last_set = INSN_CUID (insn);
2488}
2489
2490/* Mark things set by a SET. */
2491
2492static void
2493mark_set (pat, insn)
2494 rtx pat, insn;
2495{
2496 rtx dest = SET_DEST (pat);
2497
2498 while (GET_CODE (dest) == SUBREG
2499 || GET_CODE (dest) == ZERO_EXTRACT
2500 || GET_CODE (dest) == SIGN_EXTRACT
2501 || GET_CODE (dest) == STRICT_LOW_PART)
2502 dest = XEXP (dest, 0);
2503
2504 if (GET_CODE (dest) == REG)
2505 SET_BIT (reg_set_bitmap, REGNO (dest));
2506 else if (GET_CODE (dest) == MEM)
2507 mem_last_set = INSN_CUID (insn);
2508
2509 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2510 mark_call (insn);
7506f491
DE
2511}
2512
2513/* Record things set by a CLOBBER. */
2514
2515static void
2516mark_clobber (pat, insn)
2517 rtx pat, insn;
2518{
2519 rtx clob = XEXP (pat, 0);
2520
2521 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2522 clob = XEXP (clob, 0);
2523
2524 if (GET_CODE (clob) == REG)
2525 SET_BIT (reg_set_bitmap, REGNO (clob));
2526 else
2527 mem_last_set = INSN_CUID (insn);
2528}
2529
2530/* Record things set by INSN.
2531 This data is used by oprs_not_set_p. */
2532
2533static void
2534mark_oprs_set (insn)
2535 rtx insn;
2536{
2537 rtx pat = PATTERN (insn);
c4c81601 2538 int i;
7506f491
DE
2539
2540 if (GET_CODE (pat) == SET)
2541 mark_set (pat, insn);
2542 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2543 for (i = 0; i < XVECLEN (pat, 0); i++)
2544 {
2545 rtx x = XVECEXP (pat, 0, i);
2546
2547 if (GET_CODE (x) == SET)
2548 mark_set (x, insn);
2549 else if (GET_CODE (x) == CLOBBER)
2550 mark_clobber (x, insn);
2551 else if (GET_CODE (x) == CALL)
2552 mark_call (insn);
2553 }
7506f491 2554
7506f491
DE
2555 else if (GET_CODE (pat) == CLOBBER)
2556 mark_clobber (pat, insn);
2557 else if (GET_CODE (pat) == CALL)
b5ce41ff 2558 mark_call (insn);
7506f491 2559}
b5ce41ff 2560
7506f491
DE
2561\f
2562/* Classic GCSE reaching definition support. */
2563
2564/* Allocate reaching def variables. */
2565
2566static void
2567alloc_rd_mem (n_blocks, n_insns)
2568 int n_blocks, n_insns;
2569{
2570 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2571 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2572
2573 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2574 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2575
2576 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2577 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2578
2579 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2580 sbitmap_vector_zero (rd_out, n_basic_blocks);
2581}
2582
2583/* Free reaching def variables. */
2584
2585static void
2586free_rd_mem ()
2587{
2588 free (rd_kill);
2589 free (rd_gen);
2590 free (reaching_defs);
2591 free (rd_out);
2592}
2593
c4c81601 2594/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2595
2596static void
2597handle_rd_kill_set (insn, regno, bb)
2598 rtx insn;
2599 int regno, bb;
2600{
c4c81601 2601 struct reg_set *this_reg;
7506f491 2602
c4c81601
RK
2603 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2604 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2605 SET_BIT (rd_kill[bb], INSN_CUID (this_reg->insn));
7506f491
DE
2606}
2607
7506f491
DE
2608/* Compute the set of kill's for reaching definitions. */
2609
2610static void
2611compute_kill_rd ()
2612{
c4c81601
RK
2613 int bb, cuid;
2614 int regno, i;
7506f491
DE
2615
2616 /* For each block
2617 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2618 generates a definition in the block)
2619 Call the reg set by the insn corresponding to that bit regx
2620 Look at the linked list starting at reg_set_table[regx]
2621 For each setting of regx in the linked list, which is not in
2622 this block
c4c81601 2623 Set the bit in `kill' corresponding to that insn. */
7506f491 2624 for (bb = 0; bb < n_basic_blocks; bb++)
c4c81601
RK
2625 for (cuid = 0; cuid < max_cuid; cuid++)
2626 if (TEST_BIT (rd_gen[bb], cuid))
7506f491 2627 {
c4c81601
RK
2628 rtx insn = CUID_INSN (cuid);
2629 rtx pat = PATTERN (insn);
7506f491 2630
c4c81601
RK
2631 if (GET_CODE (insn) == CALL_INSN)
2632 {
2633 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ac7c5af5 2634 {
c4c81601
RK
2635 if ((call_used_regs[regno]
2636 && regno != STACK_POINTER_REGNUM
15f8470f 2637#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
c4c81601 2638 && regno != HARD_FRAME_POINTER_REGNUM
15f8470f
JL
2639#endif
2640#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
c4c81601
RK
2641 && ! (regno == ARG_POINTER_REGNUM
2642 && fixed_regs[regno])
15f8470f
JL
2643#endif
2644#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
c4c81601 2645 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
15f8470f 2646#endif
c4c81601
RK
2647 && regno != FRAME_POINTER_REGNUM)
2648 || global_regs[regno])
2649 handle_rd_kill_set (insn, regno, bb);
ac7c5af5 2650 }
c4c81601 2651 }
7506f491 2652
c4c81601
RK
2653 if (GET_CODE (pat) == PARALLEL)
2654 {
2655 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2656 {
c4c81601 2657 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2658
c4c81601
RK
2659 if ((code == SET || code == CLOBBER)
2660 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2661 handle_rd_kill_set (insn,
2662 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2663 bb);
ac7c5af5 2664 }
ac7c5af5 2665 }
c4c81601
RK
2666 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2667 /* Each setting of this register outside of this block
2668 must be marked in the set of kills in this block. */
2669 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 2670 }
7506f491
DE
2671}
2672
2673/* Compute the reaching definitions as in
2674 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2675 Chapter 10. It is the same algorithm as used for computing available
2676 expressions but applied to the gens and kills of reaching definitions. */
2677
2678static void
2679compute_rd ()
2680{
2681 int bb, changed, passes;
2682
2683 for (bb = 0; bb < n_basic_blocks; bb++)
2684 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2685
2686 passes = 0;
2687 changed = 1;
2688 while (changed)
2689 {
2690 changed = 0;
2691 for (bb = 0; bb < n_basic_blocks; bb++)
ac7c5af5 2692 {
36349f8b 2693 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
7506f491
DE
2694 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
2695 reaching_defs[bb], rd_kill[bb]);
ac7c5af5 2696 }
7506f491
DE
2697 passes++;
2698 }
2699
2700 if (gcse_file)
2701 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2702}
2703\f
2704/* Classic GCSE available expression support. */
2705
2706/* Allocate memory for available expression computation. */
2707
2708static void
2709alloc_avail_expr_mem (n_blocks, n_exprs)
2710 int n_blocks, n_exprs;
2711{
2712 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2713 sbitmap_vector_zero (ae_kill, n_basic_blocks);
2714
2715 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2716 sbitmap_vector_zero (ae_gen, n_basic_blocks);
2717
2718 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2719 sbitmap_vector_zero (ae_in, n_basic_blocks);
2720
2721 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2722 sbitmap_vector_zero (ae_out, n_basic_blocks);
7506f491
DE
2723}
2724
2725static void
2726free_avail_expr_mem ()
2727{
2728 free (ae_kill);
2729 free (ae_gen);
2730 free (ae_in);
2731 free (ae_out);
7506f491
DE
2732}
2733
2734/* Compute the set of available expressions generated in each basic block. */
2735
2736static void
2737compute_ae_gen ()
2738{
2e653e39 2739 unsigned int i;
c4c81601
RK
2740 struct expr *expr;
2741 struct occr *occr;
7506f491
DE
2742
2743 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2744 This is all we have to do because an expression is not recorded if it
2745 is not available, and the only expressions we want to work with are the
2746 ones that are recorded. */
7506f491 2747 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
2748 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
2749 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
2750 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
2751}
2752
2753/* Return non-zero if expression X is killed in BB. */
2754
2755static int
2756expr_killed_p (x, bb)
2757 rtx x;
2758 int bb;
2759{
c4c81601 2760 int i, j;
7506f491 2761 enum rtx_code code;
6f7d635c 2762 const char *fmt;
7506f491 2763
7506f491
DE
2764 if (x == 0)
2765 return 1;
2766
2767 code = GET_CODE (x);
2768 switch (code)
2769 {
2770 case REG:
2771 return TEST_BIT (reg_set_in_block[bb], REGNO (x));
2772
2773 case MEM:
2774 if (mem_set_in_block[bb])
2775 return 1;
c4c81601
RK
2776 else
2777 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
2778
2779 case PC:
2780 case CC0: /*FIXME*/
2781 case CONST:
2782 case CONST_INT:
2783 case CONST_DOUBLE:
2784 case SYMBOL_REF:
2785 case LABEL_REF:
2786 case ADDR_VEC:
2787 case ADDR_DIFF_VEC:
2788 return 0;
2789
2790 default:
2791 break;
2792 }
2793
c4c81601 2794 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2795 {
2796 if (fmt[i] == 'e')
2797 {
7506f491
DE
2798 /* If we are about to do the last recursive call
2799 needed at this level, change it into iteration.
2800 This function is called enough to be worth it. */
2801 if (i == 0)
c4c81601
RK
2802 return expr_killed_p (XEXP (x, i), bb);
2803 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
2804 return 1;
2805 }
2806 else if (fmt[i] == 'E')
c4c81601
RK
2807 for (j = 0; j < XVECLEN (x, i); j++)
2808 if (expr_killed_p (XVECEXP (x, i, j), bb))
2809 return 1;
7506f491
DE
2810 }
2811
2812 return 0;
2813}
2814
2815/* Compute the set of available expressions killed in each basic block. */
2816
2817static void
a42cd965
AM
2818compute_ae_kill (ae_gen, ae_kill)
2819 sbitmap *ae_gen, *ae_kill;
7506f491 2820{
2e653e39
RK
2821 int bb;
2822 unsigned int i;
c4c81601 2823 struct expr *expr;
7506f491
DE
2824
2825 for (bb = 0; bb < n_basic_blocks; bb++)
c4c81601
RK
2826 for (i = 0; i < expr_hash_table_size; i++)
2827 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
7506f491 2828 {
c4c81601
RK
2829 /* Skip EXPR if generated in this block. */
2830 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
2831 continue;
7506f491 2832
c4c81601
RK
2833 if (expr_killed_p (expr->expr, bb))
2834 SET_BIT (ae_kill[bb], expr->bitmap_index);
7506f491 2835 }
7506f491 2836}
7506f491
DE
2837\f
2838/* Actually perform the Classic GCSE optimizations. */
2839
2840/* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2841
2842 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2843 as a positive reach. We want to do this when there are two computations
2844 of the expression in the block.
2845
2846 VISITED is a pointer to a working buffer for tracking which BB's have
2847 been visited. It is NULL for the top-level call.
2848
2849 We treat reaching expressions that go through blocks containing the same
2850 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2851 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2852 2 as not reaching. The intent is to improve the probability of finding
2853 only one reaching expression and to reduce register lifetimes by picking
2854 the closest such expression. */
2855
2856static int
283a2545 2857expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
7506f491
DE
2858 struct occr *occr;
2859 struct expr *expr;
2860 int bb;
2861 int check_self_loop;
2862 char *visited;
2863{
36349f8b 2864 edge pred;
7506f491 2865
36349f8b 2866 for (pred = BASIC_BLOCK(bb)->pred; pred != NULL; pred = pred->pred_next)
7506f491 2867 {
36349f8b 2868 int pred_bb = pred->src->index;
7506f491
DE
2869
2870 if (visited[pred_bb])
c4c81601 2871 /* This predecessor has already been visited. Nothing to do. */
7506f491 2872 ;
7506f491 2873 else if (pred_bb == bb)
ac7c5af5 2874 {
7506f491
DE
2875 /* BB loops on itself. */
2876 if (check_self_loop
2877 && TEST_BIT (ae_gen[pred_bb], expr->bitmap_index)
2878 && BLOCK_NUM (occr->insn) == pred_bb)
2879 return 1;
c4c81601 2880
7506f491 2881 visited[pred_bb] = 1;
ac7c5af5 2882 }
c4c81601 2883
7506f491
DE
2884 /* Ignore this predecessor if it kills the expression. */
2885 else if (TEST_BIT (ae_kill[pred_bb], expr->bitmap_index))
2886 visited[pred_bb] = 1;
c4c81601 2887
7506f491
DE
2888 /* Does this predecessor generate this expression? */
2889 else if (TEST_BIT (ae_gen[pred_bb], expr->bitmap_index))
2890 {
2891 /* Is this the occurrence we're looking for?
2892 Note that there's only one generating occurrence per block
2893 so we just need to check the block number. */
2894 if (BLOCK_NUM (occr->insn) == pred_bb)
2895 return 1;
c4c81601 2896
7506f491
DE
2897 visited[pred_bb] = 1;
2898 }
c4c81601 2899
7506f491
DE
2900 /* Neither gen nor kill. */
2901 else
ac7c5af5 2902 {
7506f491 2903 visited[pred_bb] = 1;
283a2545
RL
2904 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
2905 visited))
c4c81601 2906
7506f491 2907 return 1;
ac7c5af5 2908 }
7506f491
DE
2909 }
2910
2911 /* All paths have been checked. */
2912 return 0;
2913}
2914
283a2545
RL
2915/* This wrapper for expr_reaches_here_p_work() is to ensure that any
2916 memory allocated for that function is returned. */
2917
2918static int
2919expr_reaches_here_p (occr, expr, bb, check_self_loop)
2920 struct occr *occr;
2921 struct expr *expr;
2922 int bb;
2923 int check_self_loop;
2924{
2925 int rval;
c4c81601 2926 char *visited = (char *) xcalloc (n_basic_blocks, 1);
283a2545 2927
c4c81601 2928 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
283a2545
RL
2929
2930 free (visited);
c4c81601 2931 return rval;
283a2545
RL
2932}
2933
7506f491
DE
2934/* Return the instruction that computes EXPR that reaches INSN's basic block.
2935 If there is more than one such instruction, return NULL.
2936
2937 Called only by handle_avail_expr. */
2938
2939static rtx
2940computing_insn (expr, insn)
2941 struct expr *expr;
2942 rtx insn;
2943{
2944 int bb = BLOCK_NUM (insn);
2945
2946 if (expr->avail_occr->next == NULL)
2947 {
2948 if (BLOCK_NUM (expr->avail_occr->insn) == bb)
c4c81601
RK
2949 /* The available expression is actually itself
2950 (i.e. a loop in the flow graph) so do nothing. */
2951 return NULL;
2952
7506f491
DE
2953 /* (FIXME) Case that we found a pattern that was created by
2954 a substitution that took place. */
2955 return expr->avail_occr->insn;
2956 }
2957 else
2958 {
2959 /* Pattern is computed more than once.
2960 Search backwards from this insn to see how many of these
2961 computations actually reach this insn. */
2962 struct occr *occr;
2963 rtx insn_computes_expr = NULL;
2964 int can_reach = 0;
2965
2966 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
2967 {
2968 if (BLOCK_NUM (occr->insn) == bb)
2969 {
2970 /* The expression is generated in this block.
2971 The only time we care about this is when the expression
2972 is generated later in the block [and thus there's a loop].
2973 We let the normal cse pass handle the other cases. */
c4c81601
RK
2974 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
2975 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
2976 {
2977 can_reach++;
2978 if (can_reach > 1)
2979 return NULL;
c4c81601 2980
7506f491
DE
2981 insn_computes_expr = occr->insn;
2982 }
2983 }
c4c81601
RK
2984 else if (expr_reaches_here_p (occr, expr, bb, 0))
2985 {
2986 can_reach++;
2987 if (can_reach > 1)
2988 return NULL;
2989
2990 insn_computes_expr = occr->insn;
2991 }
7506f491
DE
2992 }
2993
2994 if (insn_computes_expr == NULL)
2995 abort ();
c4c81601 2996
7506f491
DE
2997 return insn_computes_expr;
2998 }
2999}
3000
3001/* Return non-zero if the definition in DEF_INSN can reach INSN.
3002 Only called by can_disregard_other_sets. */
3003
3004static int
3005def_reaches_here_p (insn, def_insn)
3006 rtx insn, def_insn;
3007{
3008 rtx reg;
3009
3010 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3011 return 1;
3012
3013 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3014 {
3015 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3016 {
7506f491
DE
3017 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3018 return 1;
c4c81601 3019 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3020 reg = XEXP (PATTERN (def_insn), 0);
3021 else if (GET_CODE (PATTERN (def_insn)) == SET)
3022 reg = SET_DEST (PATTERN (def_insn));
3023 else
3024 abort ();
c4c81601 3025
7506f491
DE
3026 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3027 }
3028 else
3029 return 0;
3030 }
3031
3032 return 0;
3033}
3034
c4c81601
RK
3035/* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3036 value returned is the number of definitions that reach INSN. Returning a
3037 value of zero means that [maybe] more than one definition reaches INSN and
3038 the caller can't perform whatever optimization it is trying. i.e. it is
3039 always safe to return zero. */
7506f491
DE
3040
3041static int
3042can_disregard_other_sets (addr_this_reg, insn, for_combine)
3043 struct reg_set **addr_this_reg;
3044 rtx insn;
3045 int for_combine;
3046{
3047 int number_of_reaching_defs = 0;
c4c81601 3048 struct reg_set *this_reg;
7506f491 3049
c4c81601
RK
3050 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3051 if (def_reaches_here_p (insn, this_reg->insn))
3052 {
3053 number_of_reaching_defs++;
3054 /* Ignore parallels for now. */
3055 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3056 return 0;
3057
3058 if (!for_combine
3059 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3060 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3061 SET_SRC (PATTERN (insn)))))
3062 /* A setting of the reg to a different value reaches INSN. */
3063 return 0;
3064
3065 if (number_of_reaching_defs > 1)
3066 {
3067 /* If in this setting the value the register is being set to is
3068 equal to the previous value the register was set to and this
3069 setting reaches the insn we are trying to do the substitution
3070 on then we are ok. */
3071 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3072 return 0;
c4c81601
RK
3073 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3074 SET_SRC (PATTERN (insn))))
3075 return 0;
3076 }
7506f491 3077
c4c81601
RK
3078 *addr_this_reg = this_reg;
3079 }
7506f491
DE
3080
3081 return number_of_reaching_defs;
3082}
3083
3084/* Expression computed by insn is available and the substitution is legal,
3085 so try to perform the substitution.
3086
3087 The result is non-zero if any changes were made. */
3088
3089static int
3090handle_avail_expr (insn, expr)
3091 rtx insn;
3092 struct expr *expr;
3093{
3094 rtx pat, insn_computes_expr;
3095 rtx to;
3096 struct reg_set *this_reg;
3097 int found_setting, use_src;
3098 int changed = 0;
3099
3100 /* We only handle the case where one computation of the expression
3101 reaches this instruction. */
3102 insn_computes_expr = computing_insn (expr, insn);
3103 if (insn_computes_expr == NULL)
3104 return 0;
3105
3106 found_setting = 0;
3107 use_src = 0;
3108
3109 /* At this point we know only one computation of EXPR outside of this
3110 block reaches this insn. Now try to find a register that the
3111 expression is computed into. */
7506f491
DE
3112 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG)
3113 {
3114 /* This is the case when the available expression that reaches
3115 here has already been handled as an available expression. */
770ae6cc 3116 unsigned int regnum_for_replacing
c4c81601
RK
3117 = REGNO (SET_SRC (PATTERN (insn_computes_expr)));
3118
7506f491
DE
3119 /* If the register was created by GCSE we can't use `reg_set_table',
3120 however we know it's set only once. */
3121 if (regnum_for_replacing >= max_gcse_regno
3122 /* If the register the expression is computed into is set only once,
3123 or only one set reaches this insn, we can use it. */
3124 || (((this_reg = reg_set_table[regnum_for_replacing]),
3125 this_reg->next == NULL)
3126 || can_disregard_other_sets (&this_reg, insn, 0)))
3127 {
3128 use_src = 1;
3129 found_setting = 1;
3130 }
3131 }
3132
3133 if (!found_setting)
3134 {
770ae6cc 3135 unsigned int regnum_for_replacing
c4c81601
RK
3136 = REGNO (SET_DEST (PATTERN (insn_computes_expr)));
3137
7506f491
DE
3138 /* This shouldn't happen. */
3139 if (regnum_for_replacing >= max_gcse_regno)
3140 abort ();
c4c81601 3141
7506f491 3142 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3143
7506f491
DE
3144 /* If the register the expression is computed into is set only once,
3145 or only one set reaches this insn, use it. */
3146 if (this_reg->next == NULL
3147 || can_disregard_other_sets (&this_reg, insn, 0))
3148 found_setting = 1;
3149 }
3150
3151 if (found_setting)
3152 {
3153 pat = PATTERN (insn);
3154 if (use_src)
3155 to = SET_SRC (PATTERN (insn_computes_expr));
3156 else
3157 to = SET_DEST (PATTERN (insn_computes_expr));
3158 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3159
3160 /* We should be able to ignore the return code from validate_change but
3161 to play it safe we check. */
3162 if (changed)
3163 {
3164 gcse_subst_count++;
3165 if (gcse_file != NULL)
3166 {
c4c81601
RK
3167 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3168 INSN_UID (insn));
3169 fprintf (gcse_file, " reg %d %s insn %d\n",
3170 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3171 INSN_UID (insn_computes_expr));
3172 }
7506f491
DE
3173 }
3174 }
c4c81601 3175
7506f491
DE
3176 /* The register that the expr is computed into is set more than once. */
3177 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3178 {
3179 /* Insert an insn after insnx that copies the reg set in insnx
3180 into a new pseudo register call this new register REGN.
3181 From insnb until end of basic block or until REGB is set
3182 replace all uses of REGB with REGN. */
3183 rtx new_insn;
3184
3185 to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr))));
3186
3187 /* Generate the new insn. */
3188 /* ??? If the change fails, we return 0, even though we created
3189 an insn. I think this is ok. */
9e6a5703
JC
3190 new_insn
3191 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
c4c81601
RK
3192 SET_DEST (PATTERN
3193 (insn_computes_expr))),
3194 insn_computes_expr);
3195
7506f491
DE
3196 /* Keep block number table up to date. */
3197 set_block_num (new_insn, BLOCK_NUM (insn_computes_expr));
c4c81601 3198
7506f491
DE
3199 /* Keep register set table up to date. */
3200 record_one_set (REGNO (to), new_insn);
3201
3202 gcse_create_count++;
3203 if (gcse_file != NULL)
ac7c5af5 3204 {
c4c81601 3205 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3206 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3207 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3208 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3209 INSN_UID (insn_computes_expr));
c4c81601
RK
3210 fprintf (gcse_file, " into newly allocated reg %d\n",
3211 REGNO (to));
ac7c5af5 3212 }
7506f491
DE
3213
3214 pat = PATTERN (insn);
3215
3216 /* Do register replacement for INSN. */
3217 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3218 SET_DEST (PATTERN
3219 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3220 0);
3221
3222 /* We should be able to ignore the return code from validate_change but
3223 to play it safe we check. */
3224 if (changed)
3225 {
3226 gcse_subst_count++;
3227 if (gcse_file != NULL)
3228 {
c4c81601
RK
3229 fprintf (gcse_file,
3230 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3231 INSN_UID (insn),
c4c81601
RK
3232 REGNO (SET_DEST (PATTERN (NEXT_INSN
3233 (insn_computes_expr)))));
3234 fprintf (gcse_file, "set in insn %d\n",
7506f491
DE
3235 INSN_UID (insn_computes_expr));
3236 }
7506f491
DE
3237 }
3238 }
3239
3240 return changed;
3241}
3242
c4c81601
RK
3243/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3244 the dataflow analysis has been done.
7506f491
DE
3245
3246 The result is non-zero if a change was made. */
3247
3248static int
3249classic_gcse ()
3250{
3251 int bb, changed;
3252 rtx insn;
3253
3254 /* Note we start at block 1. */
3255
3256 changed = 0;
3257 for (bb = 1; bb < n_basic_blocks; bb++)
3258 {
3259 /* Reset tables used to keep track of what's still valid [since the
3260 start of the block]. */
3261 reset_opr_set_tables ();
3262
3b413743
RH
3263 for (insn = BLOCK_HEAD (bb);
3264 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
7506f491
DE
3265 insn = NEXT_INSN (insn))
3266 {
3267 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3268 if (GET_CODE (insn) == INSN
3269 && GET_CODE (PATTERN (insn)) == SET
3270 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3271 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3272 {
3273 rtx pat = PATTERN (insn);
3274 rtx src = SET_SRC (pat);
3275 struct expr *expr;
3276
3277 if (want_to_gcse_p (src)
3278 /* Is the expression recorded? */
3279 && ((expr = lookup_expr (src)) != NULL)
3280 /* Is the expression available [at the start of the
3281 block]? */
3282 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3283 /* Are the operands unchanged since the start of the
3284 block? */
3285 && oprs_not_set_p (src, insn))
3286 changed |= handle_avail_expr (insn, expr);
3287 }
3288
3289 /* Keep track of everything modified by this insn. */
3290 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3291 if (INSN_P (insn))
7506f491 3292 mark_oprs_set (insn);
ac7c5af5 3293 }
7506f491
DE
3294 }
3295
3296 return changed;
3297}
3298
3299/* Top level routine to perform one classic GCSE pass.
3300
3301 Return non-zero if a change was made. */
3302
3303static int
b5ce41ff 3304one_classic_gcse_pass (pass)
7506f491
DE
3305 int pass;
3306{
3307 int changed = 0;
3308
3309 gcse_subst_count = 0;
3310 gcse_create_count = 0;
3311
3312 alloc_expr_hash_table (max_cuid);
3313 alloc_rd_mem (n_basic_blocks, max_cuid);
b5ce41ff 3314 compute_expr_hash_table ();
7506f491
DE
3315 if (gcse_file)
3316 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3317 expr_hash_table_size, n_exprs);
c4c81601 3318
7506f491
DE
3319 if (n_exprs > 0)
3320 {
3321 compute_kill_rd ();
3322 compute_rd ();
3323 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3324 compute_ae_gen ();
a42cd965 3325 compute_ae_kill (ae_gen, ae_kill);
bd0eaec2 3326 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3327 changed = classic_gcse ();
3328 free_avail_expr_mem ();
3329 }
c4c81601 3330
7506f491
DE
3331 free_rd_mem ();
3332 free_expr_hash_table ();
3333
3334 if (gcse_file)
3335 {
3336 fprintf (gcse_file, "\n");
c4c81601
RK
3337 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3338 current_function_name, pass, bytes_used, gcse_subst_count);
3339 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3340 }
3341
3342 return changed;
3343}
3344\f
3345/* Compute copy/constant propagation working variables. */
3346
3347/* Local properties of assignments. */
7506f491
DE
3348static sbitmap *cprop_pavloc;
3349static sbitmap *cprop_absaltered;
3350
3351/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3352static sbitmap *cprop_avin;
3353static sbitmap *cprop_avout;
3354
c4c81601
RK
3355/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3356 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3357
3358static void
3359alloc_cprop_mem (n_blocks, n_sets)
3360 int n_blocks, n_sets;
3361{
3362 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3363 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3364
3365 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3366 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3367}
3368
3369/* Free vars used by copy/const propagation. */
3370
3371static void
3372free_cprop_mem ()
3373{
3374 free (cprop_pavloc);
3375 free (cprop_absaltered);
3376 free (cprop_avin);
3377 free (cprop_avout);
3378}
3379
c4c81601
RK
3380/* For each block, compute whether X is transparent. X is either an
3381 expression or an assignment [though we don't care which, for this context
3382 an assignment is treated as an expression]. For each block where an
3383 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3384 bit in BMAP. */
7506f491
DE
3385
3386static void
3387compute_transp (x, indx, bmap, set_p)
3388 rtx x;
3389 int indx;
3390 sbitmap *bmap;
3391 int set_p;
3392{
c4c81601 3393 int bb, i, j;
7506f491 3394 enum rtx_code code;
c4c81601 3395 reg_set *r;
6f7d635c 3396 const char *fmt;
7506f491 3397
c4c81601
RK
3398 /* repeat is used to turn tail-recursion into iteration since GCC
3399 can't do it when there's no return value. */
7506f491
DE
3400 repeat:
3401
3402 if (x == 0)
3403 return;
3404
3405 code = GET_CODE (x);
3406 switch (code)
3407 {
3408 case REG:
c4c81601
RK
3409 if (set_p)
3410 {
3411 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3412 {
3413 for (bb = 0; bb < n_basic_blocks; bb++)
3414 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3415 SET_BIT (bmap[bb], indx);
3416 }
3417 else
3418 {
3419 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3420 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3421 }
3422 }
3423 else
3424 {
3425 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3426 {
3427 for (bb = 0; bb < n_basic_blocks; bb++)
3428 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3429 RESET_BIT (bmap[bb], indx);
3430 }
3431 else
3432 {
3433 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3434 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3435 }
3436 }
7506f491 3437
c4c81601 3438 return;
7506f491
DE
3439
3440 case MEM:
3441 if (set_p)
3442 {
3443 for (bb = 0; bb < n_basic_blocks; bb++)
3444 if (mem_set_in_block[bb])
3445 SET_BIT (bmap[bb], indx);
3446 }
3447 else
3448 {
3449 for (bb = 0; bb < n_basic_blocks; bb++)
3450 if (mem_set_in_block[bb])
3451 RESET_BIT (bmap[bb], indx);
3452 }
c4c81601 3453
7506f491
DE
3454 x = XEXP (x, 0);
3455 goto repeat;
3456
3457 case PC:
3458 case CC0: /*FIXME*/
3459 case CONST:
3460 case CONST_INT:
3461 case CONST_DOUBLE:
3462 case SYMBOL_REF:
3463 case LABEL_REF:
3464 case ADDR_VEC:
3465 case ADDR_DIFF_VEC:
3466 return;
3467
3468 default:
3469 break;
3470 }
3471
c4c81601 3472 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3473 {
3474 if (fmt[i] == 'e')
3475 {
7506f491
DE
3476 /* If we are about to do the last recursive call
3477 needed at this level, change it into iteration.
3478 This function is called enough to be worth it. */
3479 if (i == 0)
3480 {
c4c81601 3481 x = XEXP (x, i);
7506f491
DE
3482 goto repeat;
3483 }
c4c81601
RK
3484
3485 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3486 }
3487 else if (fmt[i] == 'E')
c4c81601
RK
3488 for (j = 0; j < XVECLEN (x, i); j++)
3489 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3490 }
3491}
3492
7506f491
DE
3493/* Top level routine to do the dataflow analysis needed by copy/const
3494 propagation. */
3495
3496static void
3497compute_cprop_data ()
3498{
b5ce41ff 3499 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
ce724250
JL
3500 compute_available (cprop_pavloc, cprop_absaltered,
3501 cprop_avout, cprop_avin);
7506f491
DE
3502}
3503\f
3504/* Copy/constant propagation. */
3505
7506f491
DE
3506/* Maximum number of register uses in an insn that we handle. */
3507#define MAX_USES 8
3508
3509/* Table of uses found in an insn.
3510 Allocated statically to avoid alloc/free complexity and overhead. */
3511static struct reg_use reg_use_table[MAX_USES];
3512
3513/* Index into `reg_use_table' while building it. */
3514static int reg_use_count;
3515
c4c81601
RK
3516/* Set up a list of register numbers used in INSN. The found uses are stored
3517 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3518 and contains the number of uses in the table upon exit.
7506f491 3519
c4c81601
RK
3520 ??? If a register appears multiple times we will record it multiple times.
3521 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3522
3523static void
3524find_used_regs (x)
3525 rtx x;
3526{
c4c81601 3527 int i, j;
7506f491 3528 enum rtx_code code;
6f7d635c 3529 const char *fmt;
7506f491 3530
c4c81601
RK
3531 /* repeat is used to turn tail-recursion into iteration since GCC
3532 can't do it when there's no return value. */
7506f491
DE
3533 repeat:
3534
3535 if (x == 0)
3536 return;
3537
3538 code = GET_CODE (x);
3539 switch (code)
3540 {
3541 case REG:
3542 if (reg_use_count == MAX_USES)
3543 return;
c4c81601 3544
7506f491
DE
3545 reg_use_table[reg_use_count].reg_rtx = x;
3546 reg_use_count++;
3547 return;
3548
3549 case MEM:
3550 x = XEXP (x, 0);
3551 goto repeat;
3552
3553 case PC:
3554 case CC0:
3555 case CONST:
3556 case CONST_INT:
3557 case CONST_DOUBLE:
3558 case SYMBOL_REF:
3559 case LABEL_REF:
3560 case CLOBBER:
3561 case ADDR_VEC:
3562 case ADDR_DIFF_VEC:
3563 case ASM_INPUT: /*FIXME*/
3564 return;
3565
3566 case SET:
3567 if (GET_CODE (SET_DEST (x)) == MEM)
3568 find_used_regs (SET_DEST (x));
3569 x = SET_SRC (x);
3570 goto repeat;
3571
3572 default:
3573 break;
3574 }
3575
3576 /* Recursively scan the operands of this expression. */
3577
c4c81601 3578 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3579 {
3580 if (fmt[i] == 'e')
3581 {
3582 /* If we are about to do the last recursive call
3583 needed at this level, change it into iteration.
3584 This function is called enough to be worth it. */
3585 if (i == 0)
3586 {
3587 x = XEXP (x, 0);
3588 goto repeat;
3589 }
c4c81601 3590
7506f491
DE
3591 find_used_regs (XEXP (x, i));
3592 }
3593 else if (fmt[i] == 'E')
c4c81601
RK
3594 for (j = 0; j < XVECLEN (x, i); j++)
3595 find_used_regs (XVECEXP (x, i, j));
7506f491
DE
3596 }
3597}
3598
3599/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3600 Returns non-zero is successful. */
3601
3602static int
3603try_replace_reg (from, to, insn)
3604 rtx from, to, insn;
3605{
833fc3ad
JH
3606 rtx note;
3607 rtx src;
3608 int success;
3609 rtx set;
3610
3611 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
3612
3613 if (!note)
3614 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
3615
e78d9500
JL
3616 /* If this fails we could try to simplify the result of the
3617 replacement and attempt to recognize the simplified insn.
3618
3619 But we need a general simplify_rtx that doesn't have pass
3620 specific state variables. I'm not aware of one at the moment. */
7506f491 3621
833fc3ad
JH
3622 success = validate_replace_src (from, to, insn);
3623 set = single_set (insn);
3624
3625 /* We've failed to do replacement. Try to add REG_EQUAL note to not loose
3626 information. */
3627 if (!success && !note)
3628 {
3629 if (!set)
3630 return 0;
c4c81601 3631
833fc3ad
JH
3632 note = REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
3633 copy_rtx (SET_SRC (set)),
3634 REG_NOTES (insn));
3635 }
3636
3637 /* Always do the replacement in REQ_EQUAL and REG_EQUIV notes. Also
3638 try to simplify them. */
3639 if (note)
3640 {
3641 rtx simplified;
c4c81601 3642
e251e2a2
JH
3643 if (!validate_replace_rtx_subexp (from, to, insn, &XEXP (note, 0)))
3644 abort();
3645
833fc3ad 3646 src = XEXP (note, 0);
833fc3ad
JH
3647
3648 /* Try to simplify resulting note. */
3649 simplified = simplify_rtx (src);
3650 if (simplified)
3651 {
3652 src = simplified;
3653 XEXP (note, 0) = src;
3654 }
3655
3656 /* REG_EQUAL may get simplified into register.
3657 We don't allow that. Remove that note. This code ought
3658 not to hapen, because previous code ought to syntetize
3659 reg-reg move, but be on the safe side. */
3660 else if (REG_P (src))
3661 remove_note (insn, note);
3662 }
3663 return success;
3664}
c4c81601
RK
3665
3666/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3667 NULL no such set is found. */
7506f491
DE
3668
3669static struct expr *
3670find_avail_set (regno, insn)
3671 int regno;
3672 rtx insn;
3673{
cafba495
BS
3674 /* SET1 contains the last set found that can be returned to the caller for
3675 use in a substitution. */
3676 struct expr *set1 = 0;
3677
3678 /* Loops are not possible here. To get a loop we would need two sets
3679 available at the start of the block containing INSN. ie we would
3680 need two sets like this available at the start of the block:
3681
3682 (set (reg X) (reg Y))
3683 (set (reg Y) (reg X))
3684
3685 This can not happen since the set of (reg Y) would have killed the
3686 set of (reg X) making it unavailable at the start of this block. */
3687 while (1)
3688 {
3689 rtx src;
3690 struct expr *set = lookup_set (regno, NULL_RTX);
3691
3692 /* Find a set that is available at the start of the block
3693 which contains INSN. */
3694 while (set)
3695 {
3696 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3697 break;
3698 set = next_set (regno, set);
3699 }
7506f491 3700
cafba495
BS
3701 /* If no available set was found we've reached the end of the
3702 (possibly empty) copy chain. */
3703 if (set == 0)
3704 break;
3705
3706 if (GET_CODE (set->expr) != SET)
3707 abort ();
3708
3709 src = SET_SRC (set->expr);
3710
3711 /* We know the set is available.
3712 Now check that SRC is ANTLOC (i.e. none of the source operands
3713 have changed since the start of the block).
3714
3715 If the source operand changed, we may still use it for the next
3716 iteration of this loop, but we may not use it for substitutions. */
c4c81601 3717
cafba495
BS
3718 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
3719 set1 = set;
3720
3721 /* If the source of the set is anything except a register, then
3722 we have reached the end of the copy chain. */
3723 if (GET_CODE (src) != REG)
7506f491 3724 break;
7506f491 3725
cafba495
BS
3726 /* Follow the copy chain, ie start another iteration of the loop
3727 and see if we have an available copy into SRC. */
3728 regno = REGNO (src);
3729 }
3730
3731 /* SET1 holds the last set that was available and anticipatable at
3732 INSN. */
3733 return set1;
7506f491
DE
3734}
3735
abd535b6
BS
3736/* Subroutine of cprop_insn that tries to propagate constants into
3737 JUMP_INSNS. INSN must be a conditional jump; COPY is a copy of it
3738 that we can use for substitutions.
3739 REG_USED is the use we will try to replace, SRC is the constant we
3740 will try to substitute for it.
3741 Returns nonzero if a change was made. */
c4c81601 3742
abd535b6
BS
3743static int
3744cprop_jump (insn, copy, reg_used, src)
3745 rtx insn, copy;
3746 struct reg_use *reg_used;
3747 rtx src;
3748{
3749 rtx set = PATTERN (copy);
3750 rtx temp;
3751
3752 /* Replace the register with the appropriate constant. */
3753 replace_rtx (SET_SRC (set), reg_used->reg_rtx, src);
3754
3755 temp = simplify_ternary_operation (GET_CODE (SET_SRC (set)),
3756 GET_MODE (SET_SRC (set)),
3757 GET_MODE (XEXP (SET_SRC (set), 0)),
3758 XEXP (SET_SRC (set), 0),
3759 XEXP (SET_SRC (set), 1),
3760 XEXP (SET_SRC (set), 2));
3761
3762 /* If no simplification can be made, then try the next
3763 register. */
3764 if (temp == 0)
3765 return 0;
3766
3767 SET_SRC (set) = temp;
3768
3769 /* That may have changed the structure of TEMP, so
3770 force it to be rerecognized if it has not turned
3771 into a nop or unconditional jump. */
3772
3773 INSN_CODE (copy) = -1;
3774 if ((SET_DEST (set) == pc_rtx
3775 && (SET_SRC (set) == pc_rtx
3776 || GET_CODE (SET_SRC (set)) == LABEL_REF))
3777 || recog (PATTERN (copy), copy, NULL) >= 0)
3778 {
3779 /* This has either become an unconditional jump
3780 or a nop-jump. We'd like to delete nop jumps
3781 here, but doing so confuses gcse. So we just
3782 make the replacement and let later passes
3783 sort things out. */
3784 PATTERN (insn) = set;
3785 INSN_CODE (insn) = -1;
3786
3787 /* One less use of the label this insn used to jump to
3788 if we turned this into a NOP jump. */
3789 if (SET_SRC (set) == pc_rtx && JUMP_LABEL (insn) != 0)
3790 --LABEL_NUSES (JUMP_LABEL (insn));
3791
3792 /* If this has turned into an unconditional jump,
3793 then put a barrier after it so that the unreachable
3794 code will be deleted. */
3795 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
3796 emit_barrier_after (insn);
3797
3798 run_jump_opt_after_gcse = 1;
3799
3800 const_prop_count++;
3801 if (gcse_file != NULL)
3802 {
c4c81601
RK
3803 fprintf (gcse_file,
3804 "CONST-PROP: Replacing reg %d in insn %d with constant ",
3805 REGNO (reg_used->reg_rtx), INSN_UID (insn));
abd535b6
BS
3806 print_rtl (gcse_file, src);
3807 fprintf (gcse_file, "\n");
3808 }
c4c81601 3809
abd535b6
BS
3810 return 1;
3811 }
3812 return 0;
3813}
3814
3815#ifdef HAVE_cc0
c4c81601
RK
3816
3817/* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
3818 for machines that have CC0. INSN is a single set that stores into CC0;
3819 the insn following it is a conditional jump. REG_USED is the use we will
3820 try to replace, SRC is the constant we will try to substitute for it.
abd535b6 3821 Returns nonzero if a change was made. */
c4c81601 3822
abd535b6
BS
3823static int
3824cprop_cc0_jump (insn, reg_used, src)
3825 rtx insn;
3826 struct reg_use *reg_used;
3827 rtx src;
3828{
3829 rtx jump = NEXT_INSN (insn);
3830 rtx copy = copy_rtx (jump);
3831 rtx set = PATTERN (copy);
3832
3833 /* We need to copy the source of the cc0 setter, as cprop_jump is going to
3834 substitute into it. */
3835 replace_rtx (SET_SRC (set), cc0_rtx, copy_rtx (SET_SRC (PATTERN (insn))));
3836 if (! cprop_jump (jump, copy, reg_used, src))
3837 return 0;
3838
3839 /* If we succeeded, delete the cc0 setter. */
3840 PUT_CODE (insn, NOTE);
3841 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3842 NOTE_SOURCE_FILE (insn) = 0;
3843 return 1;
3844 }
3845#endif
3846
7506f491
DE
3847/* Perform constant and copy propagation on INSN.
3848 The result is non-zero if a change was made. */
3849
3850static int
b5ce41ff 3851cprop_insn (insn, alter_jumps)
7506f491 3852 rtx insn;
b5ce41ff 3853 int alter_jumps;
7506f491
DE
3854{
3855 struct reg_use *reg_used;
3856 int changed = 0;
833fc3ad 3857 rtx note;
7506f491 3858
e78d9500
JL
3859 /* Only propagate into SETs. Note that a conditional jump is a
3860 SET with pc_rtx as the destination. */
3861 if ((GET_CODE (insn) != INSN
3862 && GET_CODE (insn) != JUMP_INSN)
7506f491
DE
3863 || GET_CODE (PATTERN (insn)) != SET)
3864 return 0;
3865
3866 reg_use_count = 0;
3867 find_used_regs (PATTERN (insn));
833fc3ad
JH
3868
3869 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
3870 if (!note)
3871 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
3872
3873 /* We may win even when propagating constants into notes. */
3874 if (note)
3875 find_used_regs (XEXP (note, 0));
7506f491 3876
c4c81601
RK
3877 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3878 reg_used++, reg_use_count--)
7506f491 3879 {
770ae6cc 3880 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
3881 rtx pat, src;
3882 struct expr *set;
7506f491
DE
3883
3884 /* Ignore registers created by GCSE.
3885 We do this because ... */
3886 if (regno >= max_gcse_regno)
3887 continue;
3888
3889 /* If the register has already been set in this block, there's
3890 nothing we can do. */
3891 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3892 continue;
3893
3894 /* Find an assignment that sets reg_used and is available
3895 at the start of the block. */
3896 set = find_avail_set (regno, insn);
3897 if (! set)
3898 continue;
3899
3900 pat = set->expr;
3901 /* ??? We might be able to handle PARALLELs. Later. */
3902 if (GET_CODE (pat) != SET)
3903 abort ();
c4c81601 3904
7506f491
DE
3905 src = SET_SRC (pat);
3906
e78d9500 3907 /* Constant propagation. */
05f6f07c
BS
3908 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
3909 || GET_CODE (src) == SYMBOL_REF)
7506f491 3910 {
e78d9500
JL
3911 /* Handle normal insns first. */
3912 if (GET_CODE (insn) == INSN
3913 && try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491
DE
3914 {
3915 changed = 1;
3916 const_prop_count++;
3917 if (gcse_file != NULL)
3918 {
c4c81601
RK
3919 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ",
3920 regno);
3921 fprintf (gcse_file, "insn %d with constant ",
3922 INSN_UID (insn));
e78d9500 3923 print_rtl (gcse_file, src);
7506f491
DE
3924 fprintf (gcse_file, "\n");
3925 }
3926
3927 /* The original insn setting reg_used may or may not now be
3928 deletable. We leave the deletion to flow. */
3929 }
e78d9500
JL
3930
3931 /* Try to propagate a CONST_INT into a conditional jump.
3932 We're pretty specific about what we will handle in this
3933 code, we can extend this as necessary over time.
3934
3935 Right now the insn in question must look like
abd535b6 3936 (set (pc) (if_then_else ...)) */
b5ce41ff 3937 else if (alter_jumps
6e9a3c38
JL
3938 && GET_CODE (insn) == JUMP_INSN
3939 && condjump_p (insn)
3940 && ! simplejump_p (insn))
abd535b6
BS
3941 changed |= cprop_jump (insn, copy_rtx (insn), reg_used, src);
3942#ifdef HAVE_cc0
3943 /* Similar code for machines that use a pair of CC0 setter and
3944 conditional jump insn. */
3945 else if (alter_jumps
3946 && GET_CODE (PATTERN (insn)) == SET
3947 && SET_DEST (PATTERN (insn)) == cc0_rtx
3948 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3949 && condjump_p (NEXT_INSN (insn))
3950 && ! simplejump_p (NEXT_INSN (insn)))
3951 changed |= cprop_cc0_jump (insn, reg_used, src);
3952#endif
7506f491
DE
3953 }
3954 else if (GET_CODE (src) == REG
3955 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3956 && REGNO (src) != regno)
3957 {
cafba495 3958 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 3959 {
cafba495
BS
3960 changed = 1;
3961 copy_prop_count++;
3962 if (gcse_file != NULL)
7506f491 3963 {
c4c81601
RK
3964 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d",
3965 regno, INSN_UID (insn));
3966 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 3967 }
cafba495
BS
3968
3969 /* The original insn setting reg_used may or may not now be
3970 deletable. We leave the deletion to flow. */
3971 /* FIXME: If it turns out that the insn isn't deletable,
3972 then we may have unnecessarily extended register lifetimes
3973 and made things worse. */
7506f491
DE
3974 }
3975 }
3976 }
3977
3978 return changed;
3979}
3980
c4c81601
RK
3981/* Forward propagate copies. This includes copies and constants. Return
3982 non-zero if a change was made. */
7506f491
DE
3983
3984static int
b5ce41ff
JL
3985cprop (alter_jumps)
3986 int alter_jumps;
7506f491
DE
3987{
3988 int bb, changed;
3989 rtx insn;
3990
3991 /* Note we start at block 1. */
3992
3993 changed = 0;
3994 for (bb = 1; bb < n_basic_blocks; bb++)
3995 {
3996 /* Reset tables used to keep track of what's still valid [since the
3997 start of the block]. */
3998 reset_opr_set_tables ();
3999
3b413743
RH
4000 for (insn = BLOCK_HEAD (bb);
4001 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
7506f491
DE
4002 insn = NEXT_INSN (insn))
4003 {
2c3c49de 4004 if (INSN_P (insn))
7506f491 4005 {
b5ce41ff 4006 changed |= cprop_insn (insn, alter_jumps);
7506f491
DE
4007
4008 /* Keep track of everything modified by this insn. */
abd535b6
BS
4009 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4010 call mark_oprs_set if we turned the insn into a NOTE. */
4011 if (GET_CODE (insn) != NOTE)
4012 mark_oprs_set (insn);
7506f491 4013 }
ac7c5af5 4014 }
7506f491
DE
4015 }
4016
4017 if (gcse_file != NULL)
4018 fprintf (gcse_file, "\n");
4019
4020 return changed;
4021}
4022
4023/* Perform one copy/constant propagation pass.
4024 F is the first insn in the function.
4025 PASS is the pass count. */
4026
4027static int
b5ce41ff 4028one_cprop_pass (pass, alter_jumps)
7506f491 4029 int pass;
b5ce41ff 4030 int alter_jumps;
7506f491
DE
4031{
4032 int changed = 0;
4033
4034 const_prop_count = 0;
4035 copy_prop_count = 0;
4036
4037 alloc_set_hash_table (max_cuid);
b5ce41ff 4038 compute_set_hash_table ();
7506f491
DE
4039 if (gcse_file)
4040 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4041 n_sets);
4042 if (n_sets > 0)
4043 {
4044 alloc_cprop_mem (n_basic_blocks, n_sets);
4045 compute_cprop_data ();
b5ce41ff 4046 changed = cprop (alter_jumps);
7506f491
DE
4047 free_cprop_mem ();
4048 }
c4c81601 4049
7506f491
DE
4050 free_set_hash_table ();
4051
4052 if (gcse_file)
4053 {
c4c81601
RK
4054 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4055 current_function_name, pass, bytes_used);
4056 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4057 const_prop_count, copy_prop_count);
7506f491
DE
4058 }
4059
4060 return changed;
4061}
4062\f
a65f3558 4063/* Compute PRE+LCM working variables. */
7506f491
DE
4064
4065/* Local properties of expressions. */
4066/* Nonzero for expressions that are transparent in the block. */
a65f3558 4067static sbitmap *transp;
7506f491 4068
5c35539b
RH
4069/* Nonzero for expressions that are transparent at the end of the block.
4070 This is only zero for expressions killed by abnormal critical edge
4071 created by a calls. */
a65f3558 4072static sbitmap *transpout;
5c35539b 4073
a65f3558
JL
4074/* Nonzero for expressions that are computed (available) in the block. */
4075static sbitmap *comp;
7506f491 4076
a65f3558
JL
4077/* Nonzero for expressions that are locally anticipatable in the block. */
4078static sbitmap *antloc;
7506f491 4079
a65f3558
JL
4080/* Nonzero for expressions where this block is an optimal computation
4081 point. */
4082static sbitmap *pre_optimal;
5c35539b 4083
a65f3558
JL
4084/* Nonzero for expressions which are redundant in a particular block. */
4085static sbitmap *pre_redundant;
7506f491 4086
a42cd965
AM
4087/* Nonzero for expressions which should be inserted on a specific edge. */
4088static sbitmap *pre_insert_map;
4089
4090/* Nonzero for expressions which should be deleted in a specific block. */
4091static sbitmap *pre_delete_map;
4092
4093/* Contains the edge_list returned by pre_edge_lcm. */
4094static struct edge_list *edge_list;
4095
a65f3558
JL
4096/* Redundant insns. */
4097static sbitmap pre_redundant_insns;
7506f491 4098
a65f3558 4099/* Allocate vars used for PRE analysis. */
7506f491
DE
4100
4101static void
a65f3558
JL
4102alloc_pre_mem (n_blocks, n_exprs)
4103 int n_blocks, n_exprs;
7506f491 4104{
a65f3558
JL
4105 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4106 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4107 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4108
a42cd965
AM
4109 pre_optimal = NULL;
4110 pre_redundant = NULL;
4111 pre_insert_map = NULL;
4112 pre_delete_map = NULL;
4113 ae_in = NULL;
4114 ae_out = NULL;
a42cd965 4115 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4116
a42cd965 4117 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4118}
4119
a65f3558 4120/* Free vars used for PRE analysis. */
7506f491
DE
4121
4122static void
a65f3558 4123free_pre_mem ()
7506f491 4124{
a65f3558
JL
4125 free (transp);
4126 free (comp);
bd3675fc
JL
4127
4128 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4129
a42cd965
AM
4130 if (pre_optimal)
4131 free (pre_optimal);
4132 if (pre_redundant)
4133 free (pre_redundant);
4134 if (pre_insert_map)
4135 free (pre_insert_map);
4136 if (pre_delete_map)
4137 free (pre_delete_map);
a42cd965
AM
4138
4139 if (ae_in)
4140 free (ae_in);
4141 if (ae_out)
4142 free (ae_out);
a42cd965 4143
bd3675fc 4144 transp = comp = NULL;
a42cd965 4145 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4146 ae_in = ae_out = NULL;
7506f491
DE
4147}
4148
4149/* Top level routine to do the dataflow analysis needed by PRE. */
4150
4151static void
4152compute_pre_data ()
4153{
c66e8ae9
JL
4154 int i;
4155
a65f3558 4156 compute_local_properties (transp, comp, antloc, 0);
a42cd965 4157 sbitmap_vector_zero (ae_kill, n_basic_blocks);
c66e8ae9
JL
4158
4159 /* Compute ae_kill for each basic block using:
4160
4161 ~(TRANSP | COMP)
4162
a2e90653 4163 This is significantly faster than compute_ae_kill. */
c66e8ae9
JL
4164
4165 for (i = 0; i < n_basic_blocks; i++)
4166 {
4167 sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
4168 sbitmap_not (ae_kill[i], ae_kill[i]);
4169 }
4170
a42cd965
AM
4171 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4172 ae_kill, &pre_insert_map, &pre_delete_map);
bd3675fc
JL
4173 free (antloc);
4174 antloc = NULL;
4175 free (ae_kill);
4176 ae_kill = NULL;
7506f491
DE
4177}
4178\f
4179/* PRE utilities */
4180
a65f3558
JL
4181/* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4182 block BB.
7506f491
DE
4183
4184 VISITED is a pointer to a working buffer for tracking which BB's have
4185 been visited. It is NULL for the top-level call.
4186
4187 We treat reaching expressions that go through blocks containing the same
4188 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4189 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4190 2 as not reaching. The intent is to improve the probability of finding
4191 only one reaching expression and to reduce register lifetimes by picking
4192 the closest such expression. */
4193
4194static int
89e606c9 4195pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
a65f3558 4196 int occr_bb;
7506f491
DE
4197 struct expr *expr;
4198 int bb;
4199 char *visited;
4200{
36349f8b 4201 edge pred;
7506f491 4202
36349f8b 4203 for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
7506f491 4204 {
36349f8b 4205 int pred_bb = pred->src->index;
7506f491 4206
36349f8b 4207 if (pred->src == ENTRY_BLOCK_PTR
7506f491
DE
4208 /* Has predecessor has already been visited? */
4209 || visited[pred_bb])
c4c81601
RK
4210 ;/* Nothing to do. */
4211
7506f491 4212 /* Does this predecessor generate this expression? */
89e606c9 4213 else if (TEST_BIT (comp[pred_bb], expr->bitmap_index))
7506f491
DE
4214 {
4215 /* Is this the occurrence we're looking for?
4216 Note that there's only one generating occurrence per block
4217 so we just need to check the block number. */
a65f3558 4218 if (occr_bb == pred_bb)
7506f491 4219 return 1;
c4c81601 4220
7506f491
DE
4221 visited[pred_bb] = 1;
4222 }
4223 /* Ignore this predecessor if it kills the expression. */
a65f3558 4224 else if (! TEST_BIT (transp[pred_bb], expr->bitmap_index))
7506f491 4225 visited[pred_bb] = 1;
c4c81601 4226
7506f491
DE
4227 /* Neither gen nor kill. */
4228 else
ac7c5af5 4229 {
7506f491 4230 visited[pred_bb] = 1;
89e606c9 4231 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 4232 return 1;
ac7c5af5 4233 }
7506f491
DE
4234 }
4235
4236 /* All paths have been checked. */
4237 return 0;
4238}
283a2545
RL
4239
4240/* The wrapper for pre_expr_reaches_here_work that ensures that any
4241 memory allocated for that function is returned. */
4242
4243static int
89e606c9 4244pre_expr_reaches_here_p (occr_bb, expr, bb)
283a2545
RL
4245 int occr_bb;
4246 struct expr *expr;
4247 int bb;
283a2545
RL
4248{
4249 int rval;
c4c81601 4250 char *visited = (char *) xcalloc (n_basic_blocks, 1);
283a2545 4251
89e606c9 4252 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
283a2545
RL
4253
4254 free (visited);
c4c81601 4255 return rval;
283a2545 4256}
7506f491 4257\f
a42cd965
AM
4258
4259/* Given an expr, generate RTL which we can insert at the end of a BB,
4260 or on an edge. Set the block number of any insns generated to
4261 the value of BB. */
4262
4263static rtx
4264process_insert_insn (expr)
4265 struct expr *expr;
4266{
4267 rtx reg = expr->reaching_reg;
4268 rtx pat, copied_expr;
4269 rtx first_new_insn;
4270
4271 start_sequence ();
4272 copied_expr = copy_rtx (expr->expr);
4273 emit_move_insn (reg, copied_expr);
4274 first_new_insn = get_insns ();
4275 pat = gen_sequence ();
4276 end_sequence ();
4277
4278 return pat;
4279}
4280
a65f3558
JL
4281/* Add EXPR to the end of basic block BB.
4282
4283 This is used by both the PRE and code hoisting.
4284
4285 For PRE, we want to verify that the expr is either transparent
4286 or locally anticipatable in the target block. This check makes
4287 no sense for code hoisting. */
7506f491
DE
4288
4289static void
a65f3558 4290insert_insn_end_bb (expr, bb, pre)
7506f491
DE
4291 struct expr *expr;
4292 int bb;
a65f3558 4293 int pre;
7506f491
DE
4294{
4295 rtx insn = BLOCK_END (bb);
4296 rtx new_insn;
4297 rtx reg = expr->reaching_reg;
4298 int regno = REGNO (reg);
a42cd965 4299 rtx pat;
c4c81601 4300 int i;
7506f491 4301
a42cd965 4302 pat = process_insert_insn (expr);
7506f491
DE
4303
4304 /* If the last insn is a jump, insert EXPR in front [taking care to
4305 handle cc0, etc. properly]. */
4306
4307 if (GET_CODE (insn) == JUMP_INSN)
4308 {
50b2596f 4309#ifdef HAVE_cc0
7506f491 4310 rtx note;
50b2596f 4311#endif
7506f491
DE
4312
4313 /* If this is a jump table, then we can't insert stuff here. Since
4314 we know the previous real insn must be the tablejump, we insert
4315 the new instruction just before the tablejump. */
4316 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4317 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4318 insn = prev_real_insn (insn);
4319
4320#ifdef HAVE_cc0
4321 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4322 if cc0 isn't set. */
4323 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4324 if (note)
4325 insn = XEXP (note, 0);
4326 else
4327 {
4328 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4329 if (maybe_cc0_setter
2c3c49de 4330 && INSN_P (maybe_cc0_setter)
7506f491
DE
4331 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4332 insn = maybe_cc0_setter;
4333 }
4334#endif
4335 /* FIXME: What if something in cc0/jump uses value set in new insn? */
b5229628 4336 new_insn = emit_block_insn_before (pat, insn, BASIC_BLOCK (bb));
3947e2f9 4337 }
c4c81601 4338
3947e2f9
RH
4339 /* Likewise if the last insn is a call, as will happen in the presence
4340 of exception handling. */
5c35539b 4341 else if (GET_CODE (insn) == CALL_INSN)
3947e2f9
RH
4342 {
4343 HARD_REG_SET parm_regs;
4344 int nparm_regs;
4345 rtx p;
4346
4347 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4348 we search backward and place the instructions before the first
4349 parameter is loaded. Do this for everyone for consistency and a
c4c81601 4350 presumtion that we'll get better code elsewhere as well.
3947e2f9 4351
c4c81601 4352 It should always be the case that we can put these instructions
a65f3558
JL
4353 anywhere in the basic block with performing PRE optimizations.
4354 Check this. */
c4c81601 4355
a65f3558
JL
4356 if (pre
4357 && !TEST_BIT (antloc[bb], expr->bitmap_index)
4358 && !TEST_BIT (transp[bb], expr->bitmap_index))
3947e2f9
RH
4359 abort ();
4360
4361 /* Since different machines initialize their parameter registers
4362 in different orders, assume nothing. Collect the set of all
4363 parameter registers. */
4364 CLEAR_HARD_REG_SET (parm_regs);
4365 nparm_regs = 0;
4366 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4367 if (GET_CODE (XEXP (p, 0)) == USE
4368 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4369 {
c4c81601 4370 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
5c35539b 4371 abort ();
c4c81601
RK
4372
4373 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
3947e2f9
RH
4374 nparm_regs++;
4375 }
4376
4377 /* Search backward for the first set of a register in this set. */
4378 while (nparm_regs && BLOCK_HEAD (bb) != insn)
4379 {
4380 insn = PREV_INSN (insn);
4381 p = single_set (insn);
4382 if (p && GET_CODE (SET_DEST (p)) == REG
4383 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4384 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4385 {
4386 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4387 nparm_regs--;
4388 }
4389 }
4390
b1d26727
JL
4391 /* If we found all the parameter loads, then we want to insert
4392 before the first parameter load.
4393
4394 If we did not find all the parameter loads, then we might have
4395 stopped on the head of the block, which could be a CODE_LABEL.
4396 If we inserted before the CODE_LABEL, then we would be putting
4397 the insn in the wrong basic block. In that case, put the insn
b5229628 4398 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 4399 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 4400 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 4401 insn = NEXT_INSN (insn);
c4c81601 4402
b5229628 4403 new_insn = emit_block_insn_before (pat, insn, BASIC_BLOCK (bb));
7506f491
DE
4404 }
4405 else
4406 {
4407 new_insn = emit_insn_after (pat, insn);
4408 BLOCK_END (bb) = new_insn;
7506f491
DE
4409 }
4410
a65f3558
JL
4411 /* Keep block number table up to date.
4412 Note, PAT could be a multiple insn sequence, we have to make
4413 sure that each insn in the sequence is handled. */
4414 if (GET_CODE (pat) == SEQUENCE)
4415 {
a65f3558
JL
4416 for (i = 0; i < XVECLEN (pat, 0); i++)
4417 {
4418 rtx insn = XVECEXP (pat, 0, i);
c4c81601 4419
a65f3558 4420 set_block_num (insn, bb);
2c3c49de 4421 if (INSN_P (insn))
a65f3558 4422 add_label_notes (PATTERN (insn), new_insn);
c4c81601 4423
84832317 4424 note_stores (PATTERN (insn), record_set_info, insn);
a65f3558
JL
4425 }
4426 }
4427 else
4428 {
4429 add_label_notes (SET_SRC (pat), new_insn);
4430 set_block_num (new_insn, bb);
c4c81601 4431
a65f3558
JL
4432 /* Keep register set table up to date. */
4433 record_one_set (regno, new_insn);
4434 }
3947e2f9 4435
7506f491
DE
4436 gcse_create_count++;
4437
4438 if (gcse_file)
4439 {
c4c81601
RK
4440 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4441 bb, INSN_UID (new_insn));
4442 fprintf (gcse_file, "copying expression %d to reg %d\n",
4443 expr->bitmap_index, regno);
7506f491
DE
4444 }
4445}
4446
a42cd965
AM
4447/* Insert partially redundant expressions on edges in the CFG to make
4448 the expressions fully redundant. */
7506f491 4449
a42cd965
AM
4450static int
4451pre_edge_insert (edge_list, index_map)
4452 struct edge_list *edge_list;
7506f491
DE
4453 struct expr **index_map;
4454{
c4c81601 4455 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
4456 sbitmap *inserted;
4457
a42cd965
AM
4458 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4459 if it reaches any of the deleted expressions. */
7506f491 4460
a42cd965
AM
4461 set_size = pre_insert_map[0]->size;
4462 num_edges = NUM_EDGES (edge_list);
4463 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4464 sbitmap_vector_zero (inserted, num_edges);
7506f491 4465
a42cd965 4466 for (e = 0; e < num_edges; e++)
7506f491
DE
4467 {
4468 int indx;
a42cd965
AM
4469 basic_block pred = INDEX_EDGE_PRED_BB (edge_list, e);
4470 int bb = pred->index;
a65f3558 4471
a65f3558 4472 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 4473 {
a42cd965 4474 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 4475
a65f3558 4476 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
c4c81601
RK
4477 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4478 {
4479 struct expr *expr = index_map[j];
4480 struct occr *occr;
a65f3558 4481
c4c81601
RK
4482 /* Now look at each deleted occurence of this expression. */
4483 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4484 {
4485 if (! occr->deleted_p)
4486 continue;
4487
4488 /* Insert this expression on this edge if if it would
4489 reach the deleted occurence in BB. */
4490 if (!TEST_BIT (inserted[e], j))
4491 {
4492 rtx insn;
4493 edge eg = INDEX_EDGE (edge_list, e);
4494
4495 /* We can't insert anything on an abnormal and
4496 critical edge, so we insert the insn at the end of
4497 the previous block. There are several alternatives
4498 detailed in Morgans book P277 (sec 10.5) for
4499 handling this situation. This one is easiest for
4500 now. */
4501
4502 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4503 insert_insn_end_bb (index_map[j], bb, 0);
4504 else
4505 {
4506 insn = process_insert_insn (index_map[j]);
4507 insert_insn_on_edge (insn, eg);
4508 }
4509
4510 if (gcse_file)
4511 {
4512 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4513 bb,
4514 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4515 fprintf (gcse_file, "copy expression %d\n",
4516 expr->bitmap_index);
4517 }
4518
4519 SET_BIT (inserted[e], j);
4520 did_insert = 1;
4521 gcse_create_count++;
4522 }
4523 }
4524 }
7506f491
DE
4525 }
4526 }
5faf03ae 4527
5faf03ae 4528 free (inserted);
a42cd965 4529 return did_insert;
7506f491
DE
4530}
4531
c4c81601 4532/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
4533
4534static void
4535pre_insert_copy_insn (expr, insn)
4536 struct expr *expr;
4537 rtx insn;
4538{
4539 rtx reg = expr->reaching_reg;
4540 int regno = REGNO (reg);
4541 int indx = expr->bitmap_index;
4542 rtx set = single_set (insn);
4543 rtx new_insn;
a42cd965 4544 int bb = BLOCK_NUM (insn);
7506f491
DE
4545
4546 if (!set)
4547 abort ();
c4c81601 4548
9e6a5703 4549 new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)),
7506f491 4550 insn);
c4c81601 4551
7506f491 4552 /* Keep block number table up to date. */
a42cd965 4553 set_block_num (new_insn, bb);
c4c81601 4554
7506f491
DE
4555 /* Keep register set table up to date. */
4556 record_one_set (regno, new_insn);
a42cd965
AM
4557 if (insn == BLOCK_END (bb))
4558 BLOCK_END (bb) = new_insn;
7506f491
DE
4559
4560 gcse_create_count++;
4561
4562 if (gcse_file)
a42cd965
AM
4563 fprintf (gcse_file,
4564 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4565 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4566 INSN_UID (insn), regno);
7506f491
DE
4567}
4568
4569/* Copy available expressions that reach the redundant expression
4570 to `reaching_reg'. */
4571
4572static void
4573pre_insert_copies ()
4574{
2e653e39 4575 unsigned int i;
c4c81601
RK
4576 struct expr *expr;
4577 struct occr *occr;
4578 struct occr *avail;
a65f3558 4579
7506f491
DE
4580 /* For each available expression in the table, copy the result to
4581 `reaching_reg' if the expression reaches a deleted one.
4582
4583 ??? The current algorithm is rather brute force.
4584 Need to do some profiling. */
4585
4586 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4587 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4588 {
4589 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4590 we don't want to insert a copy here because the expression may not
4591 really be redundant. So only insert an insn if the expression was
4592 deleted. This test also avoids further processing if the
4593 expression wasn't deleted anywhere. */
4594 if (expr->reaching_reg == NULL)
4595 continue;
4596
4597 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4598 {
4599 if (! occr->deleted_p)
4600 continue;
7506f491 4601
c4c81601
RK
4602 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4603 {
4604 rtx insn = avail->insn;
7506f491 4605
c4c81601
RK
4606 /* No need to handle this one if handled already. */
4607 if (avail->copied_p)
4608 continue;
7506f491 4609
c4c81601
RK
4610 /* Don't handle this one if it's a redundant one. */
4611 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4612 continue;
7506f491 4613
c4c81601
RK
4614 /* Or if the expression doesn't reach the deleted one. */
4615 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail->insn), expr,
4616 BLOCK_NUM (occr->insn)))
4617 continue;
7506f491 4618
c4c81601
RK
4619 /* Copy the result of avail to reaching_reg. */
4620 pre_insert_copy_insn (expr, insn);
4621 avail->copied_p = 1;
4622 }
4623 }
4624 }
7506f491
DE
4625}
4626
4627/* Delete redundant computations.
7506f491
DE
4628 Deletion is done by changing the insn to copy the `reaching_reg' of
4629 the expression into the result of the SET. It is left to later passes
4630 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4631
4632 Returns non-zero if a change is made. */
4633
4634static int
4635pre_delete ()
4636{
2e653e39 4637 unsigned int i;
63bc1d05 4638 int changed;
c4c81601
RK
4639 struct expr *expr;
4640 struct occr *occr;
a65f3558 4641
7506f491
DE
4642 changed = 0;
4643 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4644 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4645 {
4646 int indx = expr->bitmap_index;
7506f491 4647
c4c81601
RK
4648 /* We only need to search antic_occr since we require
4649 ANTLOC != 0. */
7506f491 4650
c4c81601
RK
4651 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4652 {
4653 rtx insn = occr->insn;
4654 rtx set;
4655 int bb = BLOCK_NUM (insn);
7506f491 4656
e20dccad 4657 if (TEST_BIT (pre_delete_map[bb], indx))
c4c81601
RK
4658 {
4659 set = single_set (insn);
4660 if (! set)
4661 abort ();
4662
4663 /* Create a pseudo-reg to store the result of reaching
4664 expressions into. Get the mode for the new pseudo from
4665 the mode of the original destination pseudo. */
4666 if (expr->reaching_reg == NULL)
4667 expr->reaching_reg
4668 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4669
4670 /* In theory this should never fail since we're creating
4671 a reg->reg copy.
4672
4673 However, on the x86 some of the movXX patterns actually
4674 contain clobbers of scratch regs. This may cause the
4675 insn created by validate_change to not match any pattern
4676 and thus cause validate_change to fail. */
4677 if (validate_change (insn, &SET_SRC (set),
4678 expr->reaching_reg, 0))
4679 {
4680 occr->deleted_p = 1;
4681 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4682 changed = 1;
4683 gcse_subst_count++;
4684 }
7506f491 4685
c4c81601
RK
4686 if (gcse_file)
4687 {
4688 fprintf (gcse_file,
4689 "PRE: redundant insn %d (expression %d) in ",
4690 INSN_UID (insn), indx);
4691 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4692 bb, REGNO (expr->reaching_reg));
4693 }
4694 }
4695 }
4696 }
7506f491
DE
4697
4698 return changed;
4699}
4700
4701/* Perform GCSE optimizations using PRE.
4702 This is called by one_pre_gcse_pass after all the dataflow analysis
4703 has been done.
4704
c4c81601
RK
4705 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4706 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4707 Compiler Design and Implementation.
7506f491 4708
c4c81601
RK
4709 ??? A new pseudo reg is created to hold the reaching expression. The nice
4710 thing about the classical approach is that it would try to use an existing
4711 reg. If the register can't be adequately optimized [i.e. we introduce
4712 reload problems], one could add a pass here to propagate the new register
4713 through the block.
7506f491 4714
c4c81601
RK
4715 ??? We don't handle single sets in PARALLELs because we're [currently] not
4716 able to copy the rest of the parallel when we insert copies to create full
4717 redundancies from partial redundancies. However, there's no reason why we
4718 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
4719 redundancies. */
4720
4721static int
4722pre_gcse ()
4723{
2e653e39
RK
4724 unsigned int i;
4725 int did_insert, changed;
7506f491 4726 struct expr **index_map;
c4c81601 4727 struct expr *expr;
7506f491
DE
4728
4729 /* Compute a mapping from expression number (`bitmap_index') to
4730 hash table entry. */
4731
dd1bd863 4732 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
7506f491 4733 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4734 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4735 index_map[expr->bitmap_index] = expr;
7506f491
DE
4736
4737 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
4738 pre_redundant_insns = sbitmap_alloc (max_cuid);
4739 sbitmap_zero (pre_redundant_insns);
7506f491
DE
4740
4741 /* Delete the redundant insns first so that
4742 - we know what register to use for the new insns and for the other
4743 ones with reaching expressions
4744 - we know which insns are redundant when we go to create copies */
c4c81601 4745
7506f491
DE
4746 changed = pre_delete ();
4747
a42cd965 4748 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 4749
7506f491 4750 /* In other places with reaching expressions, copy the expression to the
a42cd965 4751 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 4752 pre_insert_copies ();
a42cd965
AM
4753 if (did_insert)
4754 {
4755 commit_edge_insertions ();
4756 changed = 1;
4757 }
7506f491 4758
283a2545 4759 free (index_map);
a65f3558 4760 free (pre_redundant_insns);
7506f491
DE
4761 return changed;
4762}
4763
4764/* Top level routine to perform one PRE GCSE pass.
4765
4766 Return non-zero if a change was made. */
4767
4768static int
b5ce41ff 4769one_pre_gcse_pass (pass)
7506f491
DE
4770 int pass;
4771{
4772 int changed = 0;
4773
4774 gcse_subst_count = 0;
4775 gcse_create_count = 0;
4776
4777 alloc_expr_hash_table (max_cuid);
a42cd965 4778 add_noreturn_fake_exit_edges ();
b5ce41ff 4779 compute_expr_hash_table ();
7506f491
DE
4780 if (gcse_file)
4781 dump_hash_table (gcse_file, "Expression", expr_hash_table,
4782 expr_hash_table_size, n_exprs);
c4c81601 4783
7506f491
DE
4784 if (n_exprs > 0)
4785 {
4786 alloc_pre_mem (n_basic_blocks, n_exprs);
4787 compute_pre_data ();
4788 changed |= pre_gcse ();
a42cd965 4789 free_edge_list (edge_list);
7506f491
DE
4790 free_pre_mem ();
4791 }
c4c81601 4792
a42cd965 4793 remove_fake_edges ();
7506f491
DE
4794 free_expr_hash_table ();
4795
4796 if (gcse_file)
4797 {
c4c81601
RK
4798 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4799 current_function_name, pass, bytes_used);
4800 fprintf (gcse_file, "%d substs, %d insns created\n",
4801 gcse_subst_count, gcse_create_count);
7506f491
DE
4802 }
4803
4804 return changed;
4805}
aeb2f500
JW
4806\f
4807/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4808 We have to add REG_LABEL notes, because the following loop optimization
4809 pass requires them. */
4810
4811/* ??? This is very similar to the loop.c add_label_notes function. We
4812 could probably share code here. */
4813
4814/* ??? If there was a jump optimization pass after gcse and before loop,
4815 then we would not need to do this here, because jump would add the
4816 necessary REG_LABEL notes. */
4817
4818static void
4819add_label_notes (x, insn)
4820 rtx x;
4821 rtx insn;
4822{
4823 enum rtx_code code = GET_CODE (x);
4824 int i, j;
6f7d635c 4825 const char *fmt;
aeb2f500
JW
4826
4827 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4828 {
6b3603c2 4829 /* This code used to ignore labels that referred to dispatch tables to
ac7c5af5 4830 avoid flow generating (slighly) worse code.
6b3603c2 4831
ac7c5af5
JL
4832 We no longer ignore such label references (see LABEL_REF handling in
4833 mark_jump_label for additional information). */
c4c81601 4834
6b3603c2
JL
4835 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
4836 REG_NOTES (insn));
aeb2f500
JW
4837 return;
4838 }
4839
c4c81601 4840 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
4841 {
4842 if (fmt[i] == 'e')
4843 add_label_notes (XEXP (x, i), insn);
4844 else if (fmt[i] == 'E')
4845 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4846 add_label_notes (XVECEXP (x, i, j), insn);
4847 }
4848}
a65f3558
JL
4849
4850/* Compute transparent outgoing information for each block.
4851
4852 An expression is transparent to an edge unless it is killed by
4853 the edge itself. This can only happen with abnormal control flow,
4854 when the edge is traversed through a call. This happens with
4855 non-local labels and exceptions.
4856
4857 This would not be necessary if we split the edge. While this is
4858 normally impossible for abnormal critical edges, with some effort
4859 it should be possible with exception handling, since we still have
4860 control over which handler should be invoked. But due to increased
4861 EH table sizes, this may not be worthwhile. */
4862
4863static void
4864compute_transpout ()
4865{
4866 int bb;
2e653e39 4867 unsigned int i;
c4c81601 4868 struct expr *expr;
a65f3558
JL
4869
4870 sbitmap_vector_ones (transpout, n_basic_blocks);
4871
4872 for (bb = 0; bb < n_basic_blocks; ++bb)
4873 {
a65f3558
JL
4874 /* Note that flow inserted a nop a the end of basic blocks that
4875 end in call instructions for reasons other than abnormal
4876 control flow. */
4877 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
4878 continue;
4879
4880 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
4881 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
4882 if (GET_CODE (expr->expr) == MEM)
4883 {
4884 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4885 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4886 continue;
a65f3558 4887
c4c81601
RK
4888 /* ??? Optimally, we would use interprocedural alias
4889 analysis to determine if this mem is actually killed
4890 by this call. */
4891 RESET_BIT (transpout[bb], expr->bitmap_index);
4892 }
a65f3558
JL
4893 }
4894}
dfdb644f
JL
4895
4896/* Removal of useless null pointer checks */
4897
dfdb644f 4898/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
4899 invalidate nonnull_local and set nonnull_killed. DATA is really a
4900 `null_pointer_info *'.
dfdb644f
JL
4901
4902 We ignore hard registers. */
c4c81601 4903
dfdb644f 4904static void
84832317 4905invalidate_nonnull_info (x, setter, data)
dfdb644f
JL
4906 rtx x;
4907 rtx setter ATTRIBUTE_UNUSED;
0511851c 4908 void *data;
dfdb644f 4909{
770ae6cc
RK
4910 unsigned int regno;
4911 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 4912
dfdb644f
JL
4913 while (GET_CODE (x) == SUBREG)
4914 x = SUBREG_REG (x);
4915
4916 /* Ignore anything that is not a register or is a hard register. */
4917 if (GET_CODE (x) != REG
0511851c
MM
4918 || REGNO (x) < npi->min_reg
4919 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
4920 return;
4921
0511851c 4922 regno = REGNO (x) - npi->min_reg;
dfdb644f 4923
0511851c
MM
4924 RESET_BIT (npi->nonnull_local[npi->current_block], regno);
4925 SET_BIT (npi->nonnull_killed[npi->current_block], regno);
dfdb644f
JL
4926}
4927
0511851c
MM
4928/* Do null-pointer check elimination for the registers indicated in
4929 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
4930 they are not our responsibility to free. */
dfdb644f 4931
0511851c 4932static void
b71a2ff8 4933delete_null_pointer_checks_1 (block_reg, nonnull_avin, nonnull_avout, npi)
770ae6cc 4934 unsigned int *block_reg;
0511851c
MM
4935 sbitmap *nonnull_avin;
4936 sbitmap *nonnull_avout;
4937 struct null_pointer_info *npi;
dfdb644f 4938{
ce724250 4939 int bb;
0511851c
MM
4940 int current_block;
4941 sbitmap *nonnull_local = npi->nonnull_local;
4942 sbitmap *nonnull_killed = npi->nonnull_killed;
dfdb644f 4943
dfdb644f
JL
4944 /* Compute local properties, nonnull and killed. A register will have
4945 the nonnull property if at the end of the current block its value is
4946 known to be nonnull. The killed property indicates that somewhere in
4947 the block any information we had about the register is killed.
4948
4949 Note that a register can have both properties in a single block. That
4950 indicates that it's killed, then later in the block a new value is
4951 computed. */
4952 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
4953 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
c4c81601 4954
dfdb644f
JL
4955 for (current_block = 0; current_block < n_basic_blocks; current_block++)
4956 {
4957 rtx insn, stop_insn;
4958
0511851c
MM
4959 /* Set the current block for invalidate_nonnull_info. */
4960 npi->current_block = current_block;
4961
dfdb644f
JL
4962 /* Scan each insn in the basic block looking for memory references and
4963 register sets. */
4964 stop_insn = NEXT_INSN (BLOCK_END (current_block));
4965 for (insn = BLOCK_HEAD (current_block);
4966 insn != stop_insn;
4967 insn = NEXT_INSN (insn))
4968 {
4969 rtx set;
0511851c 4970 rtx reg;
dfdb644f
JL
4971
4972 /* Ignore anything that is not a normal insn. */
2c3c49de 4973 if (! INSN_P (insn))
dfdb644f
JL
4974 continue;
4975
4976 /* Basically ignore anything that is not a simple SET. We do have
4977 to make sure to invalidate nonnull_local and set nonnull_killed
4978 for such insns though. */
4979 set = single_set (insn);
4980 if (!set)
4981 {
0511851c 4982 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
4983 continue;
4984 }
4985
4986 /* See if we've got a useable memory load. We handle it first
4987 in case it uses its address register as a dest (which kills
4988 the nonnull property). */
4989 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
4990 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
4991 && REGNO (reg) >= npi->min_reg
4992 && REGNO (reg) < npi->max_reg)
dfdb644f 4993 SET_BIT (nonnull_local[current_block],
0511851c 4994 REGNO (reg) - npi->min_reg);
dfdb644f
JL
4995
4996 /* Now invalidate stuff clobbered by this insn. */
0511851c 4997 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
4998
4999 /* And handle stores, we do these last since any sets in INSN can
5000 not kill the nonnull property if it is derived from a MEM
5001 appearing in a SET_DEST. */
5002 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5003 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5004 && REGNO (reg) >= npi->min_reg
5005 && REGNO (reg) < npi->max_reg)
dfdb644f 5006 SET_BIT (nonnull_local[current_block],
0511851c 5007 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5008 }
5009 }
5010
5011 /* Now compute global properties based on the local properties. This
5012 is a classic global availablity algorithm. */
ce724250
JL
5013 compute_available (nonnull_local, nonnull_killed,
5014 nonnull_avout, nonnull_avin);
dfdb644f
JL
5015
5016 /* Now look at each bb and see if it ends with a compare of a value
5017 against zero. */
5018 for (bb = 0; bb < n_basic_blocks; bb++)
5019 {
5020 rtx last_insn = BLOCK_END (bb);
0511851c 5021 rtx condition, earliest;
dfdb644f
JL
5022 int compare_and_branch;
5023
0511851c
MM
5024 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5025 since BLOCK_REG[BB] is zero if this block did not end with a
5026 comparison against zero, this condition works. */
5027 if (block_reg[bb] < npi->min_reg
5028 || block_reg[bb] >= npi->max_reg)
dfdb644f
JL
5029 continue;
5030
5031 /* LAST_INSN is a conditional jump. Get its condition. */
5032 condition = get_condition (last_insn, &earliest);
5033
40d7a3fe
NB
5034 /* If we can't determine the condition then skip. */
5035 if (! condition)
5036 continue;
5037
dfdb644f 5038 /* Is the register known to have a nonzero value? */
0511851c 5039 if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
dfdb644f
JL
5040 continue;
5041
5042 /* Try to compute whether the compare/branch at the loop end is one or
5043 two instructions. */
5044 if (earliest == last_insn)
5045 compare_and_branch = 1;
5046 else if (earliest == prev_nonnote_insn (last_insn))
5047 compare_and_branch = 2;
5048 else
5049 continue;
5050
5051 /* We know the register in this comparison is nonnull at exit from
5052 this block. We can optimize this comparison. */
5053 if (GET_CODE (condition) == NE)
5054 {
5055 rtx new_jump;
5056
5057 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5058 last_insn);
5059 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5060 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5061 emit_barrier_after (new_jump);
5062 }
5063 delete_insn (last_insn);
5064 if (compare_and_branch == 2)
5065 delete_insn (earliest);
0511851c
MM
5066
5067 /* Don't check this block again. (Note that BLOCK_END is
5068 invalid here; we deleted the last instruction in the
5069 block.) */
5070 block_reg[bb] = 0;
5071 }
5072}
5073
5074/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5075 at compile time.
5076
5077 This is conceptually similar to global constant/copy propagation and
5078 classic global CSE (it even uses the same dataflow equations as cprop).
5079
5080 If a register is used as memory address with the form (mem (reg)), then we
5081 know that REG can not be zero at that point in the program. Any instruction
5082 which sets REG "kills" this property.
5083
5084 So, if every path leading to a conditional branch has an available memory
5085 reference of that form, then we know the register can not have the value
5086 zero at the conditional branch.
5087
5088 So we merely need to compute the local properies and propagate that data
5089 around the cfg, then optimize where possible.
5090
5091 We run this pass two times. Once before CSE, then again after CSE. This
5092 has proven to be the most profitable approach. It is rare for new
5093 optimization opportunities of this nature to appear after the first CSE
5094 pass.
5095
5096 This could probably be integrated with global cprop with a little work. */
5097
5098void
5099delete_null_pointer_checks (f)
2e653e39 5100 rtx f ATTRIBUTE_UNUSED;
0511851c 5101{
0511851c 5102 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5103 unsigned int *block_reg;
0511851c
MM
5104 int bb;
5105 int reg;
5106 int regs_per_pass;
5107 int max_reg;
5108 struct null_pointer_info npi;
5109
0511851c
MM
5110 /* If we have only a single block, then there's nothing to do. */
5111 if (n_basic_blocks <= 1)
a18820c6 5112 return;
0511851c
MM
5113
5114 /* Trying to perform global optimizations on flow graphs which have
5115 a high connectivity will take a long time and is unlikely to be
5116 particularly useful.
5117
5118 In normal circumstances a cfg should have about twice has many edges
5119 as blocks. But we do not want to punish small functions which have
5120 a couple switch statements. So we require a relatively large number
5121 of basic blocks and the ratio of edges to blocks to be high. */
5122 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
a18820c6 5123 return;
0511851c 5124
0511851c
MM
5125 /* We need four bitmaps, each with a bit for each register in each
5126 basic block. */
5127 max_reg = max_reg_num ();
5128 regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
5129
5130 /* Allocate bitmaps to hold local and global properties. */
5131 npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5132 npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5133 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5134 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5135
5136 /* Go through the basic blocks, seeing whether or not each block
5137 ends with a conditional branch whose condition is a comparison
5138 against zero. Record the register compared in BLOCK_REG. */
f9e158c3 5139 block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
0511851c
MM
5140 for (bb = 0; bb < n_basic_blocks; bb++)
5141 {
5142 rtx last_insn = BLOCK_END (bb);
5143 rtx condition, earliest, reg;
5144
5145 /* We only want conditional branches. */
5146 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5147 || !any_condjump_p (last_insn)
5148 || !onlyjump_p (last_insn))
0511851c
MM
5149 continue;
5150
5151 /* LAST_INSN is a conditional jump. Get its condition. */
5152 condition = get_condition (last_insn, &earliest);
5153
5154 /* If we were unable to get the condition, or it is not a equality
5155 comparison against zero then there's nothing we can do. */
5156 if (!condition
5157 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5158 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5159 || (XEXP (condition, 1)
5160 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5161 continue;
5162
5163 /* We must be checking a register against zero. */
5164 reg = XEXP (condition, 0);
5165 if (GET_CODE (reg) != REG)
5166 continue;
5167
5168 block_reg[bb] = REGNO (reg);
5169 }
5170
5171 /* Go through the algorithm for each block of registers. */
5172 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5173 {
5174 npi.min_reg = reg;
5175 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
b71a2ff8 5176 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
0511851c 5177 nonnull_avout, &npi);
dfdb644f
JL
5178 }
5179
0511851c
MM
5180 /* Free the table of registers compared at the end of every block. */
5181 free (block_reg);
5182
dfdb644f 5183 /* Free bitmaps. */
0511851c
MM
5184 free (npi.nonnull_local);
5185 free (npi.nonnull_killed);
dfdb644f
JL
5186 free (nonnull_avin);
5187 free (nonnull_avout);
5188}
bb457bd9
JL
5189
5190/* Code Hoisting variables and subroutines. */
5191
5192/* Very busy expressions. */
5193static sbitmap *hoist_vbein;
5194static sbitmap *hoist_vbeout;
5195
5196/* Hoistable expressions. */
5197static sbitmap *hoist_exprs;
5198
5199/* Dominator bitmaps. */
5200static sbitmap *dominators;
bb457bd9
JL
5201
5202/* ??? We could compute post dominators and run this algorithm in
5203 reverse to to perform tail merging, doing so would probably be
5204 more effective than the tail merging code in jump.c.
5205
5206 It's unclear if tail merging could be run in parallel with
5207 code hoisting. It would be nice. */
5208
5209/* Allocate vars used for code hoisting analysis. */
5210
5211static void
5212alloc_code_hoist_mem (n_blocks, n_exprs)
5213 int n_blocks, n_exprs;
5214{
5215 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5216 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5217 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5218
5219 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5220 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5221 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5222 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5223
5224 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
bb457bd9
JL
5225}
5226
5227/* Free vars used for code hoisting analysis. */
5228
5229static void
5230free_code_hoist_mem ()
5231{
5232 free (antloc);
5233 free (transp);
5234 free (comp);
5235
5236 free (hoist_vbein);
5237 free (hoist_vbeout);
5238 free (hoist_exprs);
5239 free (transpout);
5240
5241 free (dominators);
bb457bd9
JL
5242}
5243
5244/* Compute the very busy expressions at entry/exit from each block.
5245
5246 An expression is very busy if all paths from a given point
5247 compute the expression. */
5248
5249static void
5250compute_code_hoist_vbeinout ()
5251{
5252 int bb, changed, passes;
5253
5254 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5255 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5256
5257 passes = 0;
5258 changed = 1;
c4c81601 5259
bb457bd9
JL
5260 while (changed)
5261 {
5262 changed = 0;
c4c81601 5263
bb457bd9
JL
5264 /* We scan the blocks in the reverse order to speed up
5265 the convergence. */
5266 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5267 {
5268 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5269 hoist_vbeout[bb], transp[bb]);
5270 if (bb != n_basic_blocks - 1)
a42cd965 5271 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
bb457bd9 5272 }
c4c81601 5273
bb457bd9
JL
5274 passes++;
5275 }
5276
5277 if (gcse_file)
5278 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5279}
5280
5281/* Top level routine to do the dataflow analysis needed by code hoisting. */
5282
5283static void
5284compute_code_hoist_data ()
5285{
5286 compute_local_properties (transp, comp, antloc, 0);
5287 compute_transpout ();
5288 compute_code_hoist_vbeinout ();
092ae4ba 5289 compute_flow_dominators (dominators, NULL);
bb457bd9
JL
5290 if (gcse_file)
5291 fprintf (gcse_file, "\n");
5292}
5293
5294/* Determine if the expression identified by EXPR_INDEX would
5295 reach BB unimpared if it was placed at the end of EXPR_BB.
5296
5297 It's unclear exactly what Muchnick meant by "unimpared". It seems
5298 to me that the expression must either be computed or transparent in
5299 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5300 would allow the expression to be hoisted out of loops, even if
5301 the expression wasn't a loop invariant.
5302
5303 Contrast this to reachability for PRE where an expression is
5304 considered reachable if *any* path reaches instead of *all*
5305 paths. */
5306
5307static int
5308hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5309 int expr_bb;
5310 int expr_index;
5311 int bb;
5312 char *visited;
5313{
5314 edge pred;
283a2545
RL
5315 int visited_allocated_locally = 0;
5316
bb457bd9
JL
5317
5318 if (visited == NULL)
5319 {
283a2545
RL
5320 visited_allocated_locally = 1;
5321 visited = xcalloc (n_basic_blocks, 1);
bb457bd9
JL
5322 }
5323
5324 visited[expr_bb] = 1;
5325 for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
5326 {
5327 int pred_bb = pred->src->index;
5328
5329 if (pred->src == ENTRY_BLOCK_PTR)
5330 break;
5331 else if (visited[pred_bb])
5332 continue;
c4c81601 5333
bb457bd9
JL
5334 /* Does this predecessor generate this expression? */
5335 else if (TEST_BIT (comp[pred_bb], expr_index))
5336 break;
5337 else if (! TEST_BIT (transp[pred_bb], expr_index))
5338 break;
c4c81601 5339
bb457bd9
JL
5340 /* Not killed. */
5341 else
5342 {
5343 visited[pred_bb] = 1;
5344 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5345 pred_bb, visited))
5346 break;
5347 }
5348 }
283a2545
RL
5349 if (visited_allocated_locally)
5350 free (visited);
c4c81601 5351
bb457bd9
JL
5352 return (pred == NULL);
5353}
5354\f
5355/* Actually perform code hoisting. */
c4c81601 5356
bb457bd9
JL
5357static void
5358hoist_code ()
5359{
2e653e39
RK
5360 int bb, dominated;
5361 unsigned int i;
bb457bd9 5362 struct expr **index_map;
c4c81601 5363 struct expr *expr;
bb457bd9
JL
5364
5365 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5366
5367 /* Compute a mapping from expression number (`bitmap_index') to
5368 hash table entry. */
5369
dd1bd863 5370 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
bb457bd9 5371 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5372 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5373 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
5374
5375 /* Walk over each basic block looking for potentially hoistable
5376 expressions, nothing gets hoisted from the entry block. */
5377 for (bb = 0; bb < n_basic_blocks; bb++)
5378 {
5379 int found = 0;
5380 int insn_inserted_p;
5381
5382 /* Examine each expression that is very busy at the exit of this
5383 block. These are the potentially hoistable expressions. */
5384 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5385 {
5386 int hoistable = 0;
c4c81601
RK
5387
5388 if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
bb457bd9
JL
5389 {
5390 /* We've found a potentially hoistable expression, now
5391 we look at every block BB dominates to see if it
5392 computes the expression. */
5393 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5394 {
5395 /* Ignore self dominance. */
5396 if (bb == dominated
5397 || ! TEST_BIT (dominators[dominated], bb))
5398 continue;
5399
5400 /* We've found a dominated block, now see if it computes
5401 the busy expression and whether or not moving that
5402 expression to the "beginning" of that block is safe. */
5403 if (!TEST_BIT (antloc[dominated], i))
5404 continue;
5405
5406 /* Note if the expression would reach the dominated block
5407 unimpared if it was placed at the end of BB.
5408
5409 Keep track of how many times this expression is hoistable
5410 from a dominated block into BB. */
5411 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5412 hoistable++;
5413 }
5414
5415 /* If we found more than one hoistable occurence of this
5416 expression, then note it in the bitmap of expressions to
5417 hoist. It makes no sense to hoist things which are computed
5418 in only one BB, and doing so tends to pessimize register
5419 allocation. One could increase this value to try harder
5420 to avoid any possible code expansion due to register
5421 allocation issues; however experiments have shown that
5422 the vast majority of hoistable expressions are only movable
5423 from two successors, so raising this threshhold is likely
5424 to nullify any benefit we get from code hoisting. */
5425 if (hoistable > 1)
5426 {
5427 SET_BIT (hoist_exprs[bb], i);
5428 found = 1;
5429 }
5430 }
5431 }
5432
5433 /* If we found nothing to hoist, then quit now. */
5434 if (! found)
5435 continue;
5436
5437 /* Loop over all the hoistable expressions. */
5438 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5439 {
5440 /* We want to insert the expression into BB only once, so
5441 note when we've inserted it. */
5442 insn_inserted_p = 0;
5443
5444 /* These tests should be the same as the tests above. */
5445 if (TEST_BIT (hoist_vbeout[bb], i))
5446 {
5447 /* We've found a potentially hoistable expression, now
5448 we look at every block BB dominates to see if it
5449 computes the expression. */
5450 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5451 {
5452 /* Ignore self dominance. */
5453 if (bb == dominated
5454 || ! TEST_BIT (dominators[dominated], bb))
5455 continue;
5456
5457 /* We've found a dominated block, now see if it computes
5458 the busy expression and whether or not moving that
5459 expression to the "beginning" of that block is safe. */
5460 if (!TEST_BIT (antloc[dominated], i))
5461 continue;
5462
5463 /* The expression is computed in the dominated block and
5464 it would be safe to compute it at the start of the
5465 dominated block. Now we have to determine if the
5466 expresion would reach the dominated block if it was
5467 placed at the end of BB. */
5468 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5469 {
5470 struct expr *expr = index_map[i];
5471 struct occr *occr = expr->antic_occr;
5472 rtx insn;
5473 rtx set;
5474
bb457bd9
JL
5475 /* Find the right occurence of this expression. */
5476 while (BLOCK_NUM (occr->insn) != dominated && occr)
5477 occr = occr->next;
5478
5479 /* Should never happen. */
5480 if (!occr)
5481 abort ();
5482
5483 insn = occr->insn;
5484
5485 set = single_set (insn);
5486 if (! set)
5487 abort ();
5488
5489 /* Create a pseudo-reg to store the result of reaching
5490 expressions into. Get the mode for the new pseudo
5491 from the mode of the original destination pseudo. */
5492 if (expr->reaching_reg == NULL)
5493 expr->reaching_reg
5494 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5495
5496 /* In theory this should never fail since we're creating
5497 a reg->reg copy.
5498
c4c81601
RK
5499 However, on the x86 some of the movXX patterns
5500 actually contain clobbers of scratch regs. This may
5501 cause the insn created by validate_change to not
5502 match any pattern and thus cause validate_change to
5503 fail. */
bb457bd9
JL
5504 if (validate_change (insn, &SET_SRC (set),
5505 expr->reaching_reg, 0))
5506 {
5507 occr->deleted_p = 1;
5508 if (!insn_inserted_p)
5509 {
5510 insert_insn_end_bb (index_map[i], bb, 0);
5511 insn_inserted_p = 1;
5512 }
5513 }
5514 }
5515 }
5516 }
5517 }
5518 }
c4c81601 5519
283a2545 5520 free (index_map);
bb457bd9
JL
5521}
5522
5523/* Top level routine to perform one code hoisting (aka unification) pass
5524
5525 Return non-zero if a change was made. */
5526
5527static int
5528one_code_hoisting_pass ()
5529{
5530 int changed = 0;
5531
5532 alloc_expr_hash_table (max_cuid);
5533 compute_expr_hash_table ();
5534 if (gcse_file)
5535 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5536 expr_hash_table_size, n_exprs);
c4c81601 5537
bb457bd9
JL
5538 if (n_exprs > 0)
5539 {
5540 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5541 compute_code_hoist_data ();
5542 hoist_code ();
5543 free_code_hoist_mem ();
5544 }
c4c81601 5545
bb457bd9
JL
5546 free_expr_hash_table ();
5547
5548 return changed;
5549}