]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
final.c (output_alternate_entry_point): If ASM_OUTPUT_TYPE_DIRECTIVE is defined,...
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
8e42ace1
KH
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
01198c2f 148#include "toplev.h"
7506f491
DE
149
150#include "rtl.h"
6baf1cc8 151#include "tm_p.h"
7506f491
DE
152#include "regs.h"
153#include "hard-reg-set.h"
154#include "flags.h"
155#include "real.h"
156#include "insn-config.h"
157#include "recog.h"
158#include "basic-block.h"
50b2596f 159#include "output.h"
49ad7cfa 160#include "function.h"
589005ff 161#include "expr.h"
e7d482b9 162#include "except.h"
fb0c0a12 163#include "ggc.h"
f1fa37ff 164#include "params.h"
ae860ff7 165#include "cselib.h"
aaa4ca30 166
7506f491 167#include "obstack.h"
4fa31c2a 168
7506f491
DE
169/* Propagate flow information through back edges and thus enable PRE's
170 moving loop invariant calculations out of loops.
171
172 Originally this tended to create worse overall code, but several
173 improvements during the development of PRE seem to have made following
174 back edges generally a win.
175
176 Note much of the loop invariant code motion done here would normally
177 be done by loop.c, which has more heuristics for when to move invariants
178 out of loops. At some point we might need to move some of those
179 heuristics into gcse.c. */
7506f491 180
f4e584dc
JL
181/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
182 are a superset of those done by GCSE.
7506f491 183
f4e584dc 184 We perform the following steps:
7506f491
DE
185
186 1) Compute basic block information.
187
188 2) Compute table of places where registers are set.
189
190 3) Perform copy/constant propagation.
191
192 4) Perform global cse.
193
e78d9500 194 5) Perform another pass of copy/constant propagation.
7506f491
DE
195
196 Two passes of copy/constant propagation are done because the first one
197 enables more GCSE and the second one helps to clean up the copies that
198 GCSE creates. This is needed more for PRE than for Classic because Classic
199 GCSE will try to use an existing register containing the common
200 subexpression rather than create a new one. This is harder to do for PRE
201 because of the code motion (which Classic GCSE doesn't do).
202
203 Expressions we are interested in GCSE-ing are of the form
204 (set (pseudo-reg) (expression)).
205 Function want_to_gcse_p says what these are.
206
207 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 208 partially redundant).
7506f491
DE
209
210 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
211 assignment) based GVN (global value numbering). L. T. Simpson's paper
212 (Rice University) on value numbering is a useful reference for this.
213
214 **********************
215
216 We used to support multiple passes but there are diminishing returns in
217 doing so. The first pass usually makes 90% of the changes that are doable.
218 A second pass can make a few more changes made possible by the first pass.
219 Experiments show any further passes don't make enough changes to justify
220 the expense.
221
222 A study of spec92 using an unlimited number of passes:
223 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
224 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
225 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
226
227 It was found doing copy propagation between each pass enables further
228 substitutions.
229
230 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 231 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
232 be modified if one wants to experiment.
233
234 **********************
235
236 The steps for PRE are:
237
238 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
239
240 2) Perform the data flow analysis for PRE.
241
242 3) Delete the redundant instructions
243
244 4) Insert the required copies [if any] that make the partially
245 redundant instructions fully redundant.
246
247 5) For other reaching expressions, insert an instruction to copy the value
248 to a newly created pseudo that will reach the redundant instruction.
249
250 The deletion is done first so that when we do insertions we
251 know which pseudo reg to use.
252
253 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
254 argue it is not. The number of iterations for the algorithm to converge
255 is typically 2-4 so I don't view it as that expensive (relatively speaking).
256
f4e584dc 257 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
258 we create. To make an expression reach the place where it's redundant,
259 the result of the expression is copied to a new register, and the redundant
260 expression is deleted by replacing it with this new register. Classic GCSE
261 doesn't have this problem as much as it computes the reaching defs of
262 each register in each block and thus can try to use an existing register.
263
264 **********************
265
7506f491
DE
266 A fair bit of simplicity is created by creating small functions for simple
267 tasks, even when the function is only called in one place. This may
268 measurably slow things down [or may not] by creating more function call
269 overhead than is necessary. The source is laid out so that it's trivial
270 to make the affected functions inline so that one can measure what speed
271 up, if any, can be achieved, and maybe later when things settle things can
272 be rearranged.
273
274 Help stamp out big monolithic functions! */
275\f
276/* GCSE global vars. */
277
278/* -dG dump file. */
279static FILE *gcse_file;
280
f4e584dc
JL
281/* Note whether or not we should run jump optimization after gcse. We
282 want to do this for two cases.
283
284 * If we changed any jumps via cprop.
285
286 * If we added any labels via edge splitting. */
287
288static int run_jump_opt_after_gcse;
289
7506f491
DE
290/* Bitmaps are normally not included in debugging dumps.
291 However it's useful to be able to print them from GDB.
292 We could create special functions for this, but it's simpler to
293 just allow passing stderr to the dump_foo fns. Since stderr can
294 be a macro, we store a copy here. */
295static FILE *debug_stderr;
296
297/* An obstack for our working variables. */
298static struct obstack gcse_obstack;
299
300/* Non-zero for each mode that supports (set (reg) (reg)).
301 This is trivially true for integer and floating point values.
302 It may or may not be true for condition codes. */
303static char can_copy_p[(int) NUM_MACHINE_MODES];
304
305/* Non-zero if can_copy_p has been initialized. */
306static int can_copy_init_p;
307
c4c81601 308struct reg_use {rtx reg_rtx; };
abd535b6 309
7506f491
DE
310/* Hash table of expressions. */
311
312struct expr
313{
314 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
315 rtx expr;
316 /* Index in the available expression bitmaps. */
317 int bitmap_index;
318 /* Next entry with the same hash. */
319 struct expr *next_same_hash;
320 /* List of anticipatable occurrences in basic blocks in the function.
321 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
322 basic block, the operands are not modified in the basic block prior
323 to the occurrence and the output is not used between the start of
324 the block and the occurrence. */
7506f491
DE
325 struct occr *antic_occr;
326 /* List of available occurrence in basic blocks in the function.
327 An "available occurrence" is one that is the last occurrence in the
328 basic block and the operands are not modified by following statements in
329 the basic block [including this insn]. */
330 struct occr *avail_occr;
331 /* Non-null if the computation is PRE redundant.
332 The value is the newly created pseudo-reg to record a copy of the
333 expression in all the places that reach the redundant copy. */
334 rtx reaching_reg;
335};
336
337/* Occurrence of an expression.
338 There is one per basic block. If a pattern appears more than once the
339 last appearance is used [or first for anticipatable expressions]. */
340
341struct occr
342{
343 /* Next occurrence of this expression. */
344 struct occr *next;
345 /* The insn that computes the expression. */
346 rtx insn;
347 /* Non-zero if this [anticipatable] occurrence has been deleted. */
348 char deleted_p;
349 /* Non-zero if this [available] occurrence has been copied to
350 reaching_reg. */
351 /* ??? This is mutually exclusive with deleted_p, so they could share
352 the same byte. */
353 char copied_p;
354};
355
356/* Expression and copy propagation hash tables.
357 Each hash table is an array of buckets.
358 ??? It is known that if it were an array of entries, structure elements
359 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
360 not clear whether in the final analysis a sufficient amount of memory would
361 be saved as the size of the available expression bitmaps would be larger
362 [one could build a mapping table without holes afterwards though].
c4c81601 363 Someday I'll perform the computation and figure it out. */
7506f491
DE
364
365/* Total size of the expression hash table, in elements. */
2e653e39
RK
366static unsigned int expr_hash_table_size;
367
7506f491
DE
368/* The table itself.
369 This is an array of `expr_hash_table_size' elements. */
370static struct expr **expr_hash_table;
371
372/* Total size of the copy propagation hash table, in elements. */
ebb13e7e 373static unsigned int set_hash_table_size;
c4c81601 374
7506f491
DE
375/* The table itself.
376 This is an array of `set_hash_table_size' elements. */
377static struct expr **set_hash_table;
378
379/* Mapping of uids to cuids.
380 Only real insns get cuids. */
381static int *uid_cuid;
382
383/* Highest UID in UID_CUID. */
384static int max_uid;
385
386/* Get the cuid of an insn. */
b86db3eb
BS
387#ifdef ENABLE_CHECKING
388#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
389#else
7506f491 390#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 391#endif
7506f491
DE
392
393/* Number of cuids. */
394static int max_cuid;
395
396/* Mapping of cuids to insns. */
397static rtx *cuid_insn;
398
399/* Get insn from cuid. */
400#define CUID_INSN(CUID) (cuid_insn[CUID])
401
402/* Maximum register number in function prior to doing gcse + 1.
403 Registers created during this pass have regno >= max_gcse_regno.
404 This is named with "gcse" to not collide with global of same name. */
770ae6cc 405static unsigned int max_gcse_regno;
7506f491
DE
406
407/* Maximum number of cse-able expressions found. */
408static int n_exprs;
c4c81601 409
7506f491
DE
410/* Maximum number of assignments for copy propagation found. */
411static int n_sets;
412
413/* Table of registers that are modified.
c4c81601 414
7506f491
DE
415 For each register, each element is a list of places where the pseudo-reg
416 is set.
417
418 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
419 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 420 a bitmap instead of the lists `reg_set_table' uses].
7506f491 421
c4c81601
RK
422 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
423 num-regs) [however perhaps it may be useful to keep the data as is]. One
424 advantage of recording things this way is that `reg_set_table' is fairly
425 sparse with respect to pseudo regs but for hard regs could be fairly dense
426 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
427 up functions like compute_transp since in the case of pseudo-regs we only
428 need to iterate over the number of times a pseudo-reg is set, not over the
429 number of basic blocks [clearly there is a bit of a slow down in the cases
430 where a pseudo is set more than once in a block, however it is believed
431 that the net effect is to speed things up]. This isn't done for hard-regs
432 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
433 function call can consume a fair bit of memory, and iterating over
434 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 435
c4c81601
RK
436typedef struct reg_set
437{
7506f491
DE
438 /* The next setting of this register. */
439 struct reg_set *next;
440 /* The insn where it was set. */
441 rtx insn;
442} reg_set;
c4c81601 443
7506f491 444static reg_set **reg_set_table;
c4c81601 445
7506f491
DE
446/* Size of `reg_set_table'.
447 The table starts out at max_gcse_regno + slop, and is enlarged as
448 necessary. */
449static int reg_set_table_size;
c4c81601 450
7506f491
DE
451/* Amount to grow `reg_set_table' by when it's full. */
452#define REG_SET_TABLE_SLOP 100
453
a13d4ebf 454/* This is a list of expressions which are MEMs and will be used by load
589005ff 455 or store motion.
a13d4ebf
AM
456 Load motion tracks MEMs which aren't killed by
457 anything except itself. (ie, loads and stores to a single location).
589005ff 458 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
459 allowance. (all stores copy the same value to the reaching reg used
460 for the loads). This means all values used to store into memory must have
589005ff 461 no side effects so we can re-issue the setter value.
a13d4ebf
AM
462 Store Motion uses this structure as an expression table to track stores
463 which look interesting, and might be moveable towards the exit block. */
464
465struct ls_expr
466{
467 struct expr * expr; /* Gcse expression reference for LM. */
468 rtx pattern; /* Pattern of this mem. */
aaa4ca30
AJ
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
476};
477
478/* Head of the list of load/store memory refs. */
479static struct ls_expr * pre_ldst_mems = NULL;
480
7506f491
DE
481/* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
73991d6a 484static regset reg_set_bitmap;
7506f491
DE
485
486/* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491static sbitmap *reg_set_in_block;
492
a13d4ebf
AM
493/* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495static rtx * modify_mem_list;
73991d6a 496bitmap modify_mem_list_set;
a13d4ebf
AM
497
498/* This array parallels modify_mem_list, but is kept canonicalized. */
499static rtx * canon_modify_mem_list;
73991d6a 500bitmap canon_modify_mem_list_set;
7506f491
DE
501/* Various variables for statistics gathering. */
502
503/* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506static int bytes_used;
c4c81601 507
7506f491
DE
508/* GCSE substitutions made. */
509static int gcse_subst_count;
510/* Number of copy instructions created. */
511static int gcse_create_count;
512/* Number of constants propagated. */
513static int const_prop_count;
514/* Number of copys propagated. */
515static int copy_prop_count;
7506f491
DE
516\f
517/* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
520
7506f491
DE
521/* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
523
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
526
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
c4c81601 529 ae_kill[block_num][expr_num] */
7506f491
DE
530
531/* For reaching defs */
532static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
533
534/* for available exprs */
535static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 536
0511851c
MM
537/* Objects of this type are passed around by the null-pointer check
538 removal routines. */
c4c81601
RK
539struct null_pointer_info
540{
0511851c 541 /* The basic block being processed. */
e0082a72 542 basic_block current_block;
0511851c 543 /* The first register to be handled in this pass. */
770ae6cc 544 unsigned int min_reg;
0511851c 545 /* One greater than the last register to be handled in this pass. */
770ae6cc 546 unsigned int max_reg;
0511851c
MM
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
549};
7506f491 550\f
c4c81601
RK
551static void compute_can_copy PARAMS ((void));
552static char *gmalloc PARAMS ((unsigned int));
553static char *grealloc PARAMS ((char *, unsigned int));
554static char *gcse_alloc PARAMS ((unsigned long));
555static void alloc_gcse_mem PARAMS ((rtx));
556static void free_gcse_mem PARAMS ((void));
557static void alloc_reg_set_mem PARAMS ((int));
558static void free_reg_set_mem PARAMS ((void));
559static int get_bitmap_width PARAMS ((int, int, int));
560static void record_one_set PARAMS ((int, rtx));
561static void record_set_info PARAMS ((rtx, rtx, void *));
562static void compute_sets PARAMS ((rtx));
563static void hash_scan_insn PARAMS ((rtx, int, int));
564static void hash_scan_set PARAMS ((rtx, rtx, int));
565static void hash_scan_clobber PARAMS ((rtx, rtx));
566static void hash_scan_call PARAMS ((rtx, rtx));
567static int want_to_gcse_p PARAMS ((rtx));
568static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
569static int oprs_anticipatable_p PARAMS ((rtx, rtx));
570static int oprs_available_p PARAMS ((rtx, rtx));
571static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
572 int, int));
573static void insert_set_in_table PARAMS ((rtx, rtx));
574static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
575static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
c0712acb 576static unsigned int hash_string_1 PARAMS ((const char *));
c4c81601
RK
577static unsigned int hash_set PARAMS ((int, int));
578static int expr_equiv_p PARAMS ((rtx, rtx));
579static void record_last_reg_set_info PARAMS ((rtx, int));
580static void record_last_mem_set_info PARAMS ((rtx));
581static void record_last_set_info PARAMS ((rtx, rtx, void *));
711d877c 582static void compute_hash_table PARAMS ((int));
c4c81601
RK
583static void alloc_set_hash_table PARAMS ((int));
584static void free_set_hash_table PARAMS ((void));
585static void compute_set_hash_table PARAMS ((void));
2e653e39 586static void alloc_expr_hash_table PARAMS ((unsigned int));
c4c81601
RK
587static void free_expr_hash_table PARAMS ((void));
588static void compute_expr_hash_table PARAMS ((void));
589static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
590 int, int));
591static struct expr *lookup_expr PARAMS ((rtx));
770ae6cc
RK
592static struct expr *lookup_set PARAMS ((unsigned int, rtx));
593static struct expr *next_set PARAMS ((unsigned int, struct expr *));
c4c81601
RK
594static void reset_opr_set_tables PARAMS ((void));
595static int oprs_not_set_p PARAMS ((rtx, rtx));
596static void mark_call PARAMS ((rtx));
597static void mark_set PARAMS ((rtx, rtx));
598static void mark_clobber PARAMS ((rtx, rtx));
599static void mark_oprs_set PARAMS ((rtx));
600static void alloc_cprop_mem PARAMS ((int, int));
601static void free_cprop_mem PARAMS ((void));
602static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
603static void compute_transpout PARAMS ((void));
604static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
605 int));
711d877c 606static void compute_cprop_data PARAMS ((void));
9e71c818 607static void find_used_regs PARAMS ((rtx *, void *));
c4c81601
RK
608static int try_replace_reg PARAMS ((rtx, rtx, rtx));
609static struct expr *find_avail_set PARAMS ((int, rtx));
0e3f0221 610static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
a13d4ebf 611static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
e2d2ed72 612static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
a13d4ebf 613static void canon_list_insert PARAMS ((rtx, rtx, void *));
ae860ff7 614static int cprop_insn PARAMS ((rtx, int));
c4c81601
RK
615static int cprop PARAMS ((int));
616static int one_cprop_pass PARAMS ((int, int));
ae860ff7 617static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
0e3f0221
RS
618static struct expr *find_bypass_set PARAMS ((int, int));
619static int bypass_block PARAMS ((basic_block, rtx, rtx));
620static int bypass_conditional_jumps PARAMS ((void));
c4c81601
RK
621static void alloc_pre_mem PARAMS ((int, int));
622static void free_pre_mem PARAMS ((void));
623static void compute_pre_data PARAMS ((void));
589005ff 624static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
e2d2ed72
AM
625 basic_block));
626static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
c4c81601
RK
627static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
628static void pre_insert_copies PARAMS ((void));
629static int pre_delete PARAMS ((void));
630static int pre_gcse PARAMS ((void));
631static int one_pre_gcse_pass PARAMS ((int));
632static void add_label_notes PARAMS ((rtx, rtx));
633static void alloc_code_hoist_mem PARAMS ((int, int));
634static void free_code_hoist_mem PARAMS ((void));
711d877c 635static void compute_code_hoist_vbeinout PARAMS ((void));
c4c81601 636static void compute_code_hoist_data PARAMS ((void));
589005ff 637static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
e2d2ed72 638 char *));
c4c81601
RK
639static void hoist_code PARAMS ((void));
640static int one_code_hoisting_pass PARAMS ((void));
641static void alloc_rd_mem PARAMS ((int, int));
642static void free_rd_mem PARAMS ((void));
e2d2ed72 643static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
c4c81601 644static void compute_kill_rd PARAMS ((void));
711d877c 645static void compute_rd PARAMS ((void));
c4c81601
RK
646static void alloc_avail_expr_mem PARAMS ((int, int));
647static void free_avail_expr_mem PARAMS ((void));
648static void compute_ae_gen PARAMS ((void));
e2d2ed72 649static int expr_killed_p PARAMS ((rtx, basic_block));
c4c81601 650static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
711d877c 651static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
e2d2ed72 652 basic_block, int));
c4c81601
RK
653static rtx computing_insn PARAMS ((struct expr *, rtx));
654static int def_reaches_here_p PARAMS ((rtx, rtx));
655static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
656static int handle_avail_expr PARAMS ((rtx, struct expr *));
657static int classic_gcse PARAMS ((void));
658static int one_classic_gcse_pass PARAMS ((int));
659static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
99a15921 660static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
8e184d9c 661 sbitmap *, sbitmap *,
711d877c
KG
662 struct null_pointer_info *));
663static rtx process_insert_insn PARAMS ((struct expr *));
664static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
c4c81601 665static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
e2d2ed72
AM
666 basic_block, int, char *));
667static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
668 basic_block, char *));
a13d4ebf
AM
669static struct ls_expr * ldst_entry PARAMS ((rtx));
670static void free_ldst_entry PARAMS ((struct ls_expr *));
671static void free_ldst_mems PARAMS ((void));
672static void print_ldst_list PARAMS ((FILE *));
673static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
674static int enumerate_ldsts PARAMS ((void));
675static inline struct ls_expr * first_ls_expr PARAMS ((void));
676static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
677static int simple_mem PARAMS ((rtx));
678static void invalidate_any_buried_refs PARAMS ((rtx));
589005ff 679static void compute_ld_motion_mems PARAMS ((void));
a13d4ebf
AM
680static void trim_ld_motion_mems PARAMS ((void));
681static void update_ld_motion_stores PARAMS ((struct expr *));
aaa4ca30
AJ
682static void reg_set_info PARAMS ((rtx, rtx, void *));
683static int store_ops_ok PARAMS ((rtx, basic_block));
a13d4ebf
AM
684static void find_moveable_store PARAMS ((rtx));
685static int compute_store_table PARAMS ((void));
686static int load_kills_store PARAMS ((rtx, rtx));
687static int find_loads PARAMS ((rtx, rtx));
688static int store_killed_in_insn PARAMS ((rtx, rtx));
aaa4ca30 689static int store_killed_after PARAMS ((rtx, rtx, basic_block));
e2d2ed72 690static int store_killed_before PARAMS ((rtx, rtx, basic_block));
a13d4ebf 691static void build_store_vectors PARAMS ((void));
e2d2ed72 692static void insert_insn_start_bb PARAMS ((rtx, basic_block));
a13d4ebf 693static int insert_store PARAMS ((struct ls_expr *, edge));
e2d2ed72 694static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
589005ff 695static void delete_store PARAMS ((struct ls_expr *,
e2d2ed72 696 basic_block));
a13d4ebf
AM
697static void free_store_memory PARAMS ((void));
698static void store_motion PARAMS ((void));
0fe854a7 699static void free_insn_expr_list_list PARAMS ((rtx *));
73991d6a
JH
700static void clear_modify_mem_tables PARAMS ((void));
701static void free_modify_mem_tables PARAMS ((void));
10d1bb36 702static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
ae860ff7
JH
703static bool do_local_cprop PARAMS ((rtx, rtx, int));
704static void local_cprop_pass PARAMS ((int));
7506f491
DE
705\f
706/* Entry point for global common subexpression elimination.
707 F is the first instruction in the function. */
708
e78d9500 709int
7506f491
DE
710gcse_main (f, file)
711 rtx f;
712 FILE *file;
713{
714 int changed, pass;
715 /* Bytes used at start of pass. */
716 int initial_bytes_used;
717 /* Maximum number of bytes used by a pass. */
718 int max_pass_bytes;
719 /* Point to release obstack data from for each pass. */
720 char *gcse_obstack_bottom;
721
a13d4ebf
AM
722 /* Insertion of instructions on edges can create new basic blocks; we
723 need the original basic block count so that we can properly deallocate
724 arrays sized on the number of basic blocks originally in the cfg. */
725 int orig_bb_count;
b5ce41ff
JL
726 /* We do not construct an accurate cfg in functions which call
727 setjmp, so just punt to be safe. */
7506f491 728 if (current_function_calls_setjmp)
e78d9500 729 return 0;
589005ff 730
b5ce41ff
JL
731 /* Assume that we do not need to run jump optimizations after gcse. */
732 run_jump_opt_after_gcse = 0;
733
7506f491
DE
734 /* For calling dump_foo fns from gdb. */
735 debug_stderr = stderr;
b5ce41ff 736 gcse_file = file;
7506f491 737
b5ce41ff
JL
738 /* Identify the basic block information for this function, including
739 successors and predecessors. */
7506f491 740 max_gcse_regno = max_reg_num ();
7506f491 741
a42cd965
AM
742 if (file)
743 dump_flow_info (file);
744
0b17ab2f 745 orig_bb_count = n_basic_blocks;
7506f491 746 /* Return if there's nothing to do. */
0b17ab2f 747 if (n_basic_blocks <= 1)
a18820c6 748 return 0;
7506f491 749
55f7891b
JL
750 /* Trying to perform global optimizations on flow graphs which have
751 a high connectivity will take a long time and is unlikely to be
752 particularly useful.
753
43e72072 754 In normal circumstances a cfg should have about twice as many edges
55f7891b
JL
755 as blocks. But we do not want to punish small functions which have
756 a couple switch statements. So we require a relatively large number
757 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 758 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
759 {
760 if (warn_disabled_optimization)
8e42ace1 761 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
0b17ab2f 762 n_basic_blocks, n_edges / n_basic_blocks);
18424ae1
BL
763 return 0;
764 }
55f7891b 765
f1fa37ff
MM
766 /* If allocating memory for the cprop bitmap would take up too much
767 storage it's better just to disable the optimization. */
589005ff 768 if ((n_basic_blocks
f1fa37ff
MM
769 * SBITMAP_SET_SIZE (max_gcse_regno)
770 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
771 {
772 if (warn_disabled_optimization)
773 warning ("GCSE disabled: %d basic blocks and %d registers",
0b17ab2f 774 n_basic_blocks, max_gcse_regno);
f1fa37ff
MM
775
776 return 0;
777 }
778
7506f491
DE
779 /* See what modes support reg/reg copy operations. */
780 if (! can_copy_init_p)
781 {
782 compute_can_copy ();
783 can_copy_init_p = 1;
784 }
785
786 gcc_obstack_init (&gcse_obstack);
a42cd965 787 bytes_used = 0;
7506f491 788
a13d4ebf
AM
789 /* We need alias. */
790 init_alias_analysis ();
c4c81601
RK
791 /* Record where pseudo-registers are set. This data is kept accurate
792 during each pass. ??? We could also record hard-reg information here
793 [since it's unchanging], however it is currently done during hash table
794 computation.
b5ce41ff 795
c4c81601
RK
796 It may be tempting to compute MEM set information here too, but MEM sets
797 will be subject to code motion one day and thus we need to compute
b5ce41ff 798 information about memory sets when we build the hash tables. */
7506f491
DE
799
800 alloc_reg_set_mem (max_gcse_regno);
801 compute_sets (f);
802
803 pass = 0;
804 initial_bytes_used = bytes_used;
805 max_pass_bytes = 0;
806 gcse_obstack_bottom = gcse_alloc (1);
807 changed = 1;
740f35a0 808 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
809 {
810 changed = 0;
811 if (file)
812 fprintf (file, "GCSE pass %d\n\n", pass + 1);
813
814 /* Initialize bytes_used to the space for the pred/succ lists,
815 and the reg_set_table data. */
816 bytes_used = initial_bytes_used;
817
818 /* Each pass may create new registers, so recalculate each time. */
819 max_gcse_regno = max_reg_num ();
820
821 alloc_gcse_mem (f);
822
b5ce41ff
JL
823 /* Don't allow constant propagation to modify jumps
824 during this pass. */
825 changed = one_cprop_pass (pass + 1, 0);
7506f491
DE
826
827 if (optimize_size)
b5ce41ff 828 changed |= one_classic_gcse_pass (pass + 1);
7506f491 829 else
589005ff 830 {
a42cd965 831 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
832 /* We may have just created new basic blocks. Release and
833 recompute various things which are sized on the number of
834 basic blocks. */
835 if (changed)
836 {
73991d6a 837 free_modify_mem_tables ();
a13d4ebf 838 modify_mem_list
d55bc081 839 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
a13d4ebf 840 canon_modify_mem_list
d55bc081
ZD
841 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
842 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
843 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
0b17ab2f 844 orig_bb_count = n_basic_blocks;
a13d4ebf 845 }
a42cd965
AM
846 free_reg_set_mem ();
847 alloc_reg_set_mem (max_reg_num ());
848 compute_sets (f);
849 run_jump_opt_after_gcse = 1;
850 }
7506f491
DE
851
852 if (max_pass_bytes < bytes_used)
853 max_pass_bytes = bytes_used;
854
bb457bd9
JL
855 /* Free up memory, then reallocate for code hoisting. We can
856 not re-use the existing allocated memory because the tables
857 will not have info for the insns or registers created by
858 partial redundancy elimination. */
7506f491
DE
859 free_gcse_mem ();
860
bb457bd9
JL
861 /* It does not make sense to run code hoisting unless we optimizing
862 for code size -- it rarely makes programs faster, and can make
863 them bigger if we did partial redundancy elimination (when optimizing
864 for space, we use a classic gcse algorithm instead of partial
865 redundancy algorithms). */
866 if (optimize_size)
589005ff 867 {
bb457bd9
JL
868 max_gcse_regno = max_reg_num ();
869 alloc_gcse_mem (f);
870 changed |= one_code_hoisting_pass ();
871 free_gcse_mem ();
872
873 if (max_pass_bytes < bytes_used)
874 max_pass_bytes = bytes_used;
589005ff 875 }
bb457bd9 876
7506f491
DE
877 if (file)
878 {
879 fprintf (file, "\n");
880 fflush (file);
881 }
c4c81601 882
7506f491
DE
883 obstack_free (&gcse_obstack, gcse_obstack_bottom);
884 pass++;
885 }
886
b5ce41ff
JL
887 /* Do one last pass of copy propagation, including cprop into
888 conditional jumps. */
889
890 max_gcse_regno = max_reg_num ();
891 alloc_gcse_mem (f);
892 /* This time, go ahead and allow cprop to alter jumps. */
893 one_cprop_pass (pass + 1, 1);
894 free_gcse_mem ();
7506f491
DE
895
896 if (file)
897 {
898 fprintf (file, "GCSE of %s: %d basic blocks, ",
0b17ab2f 899 current_function_name, n_basic_blocks);
7506f491
DE
900 fprintf (file, "%d pass%s, %d bytes\n\n",
901 pass, pass > 1 ? "es" : "", max_pass_bytes);
902 }
903
6496a589 904 obstack_free (&gcse_obstack, NULL);
7506f491 905 free_reg_set_mem ();
a13d4ebf
AM
906 /* We are finished with alias. */
907 end_alias_analysis ();
908 allocate_reg_info (max_reg_num (), FALSE, FALSE);
909
61ad9a34
JJ
910 /* Store motion disabled until it is fixed. */
911 if (0 && !optimize_size && flag_gcse_sm)
a13d4ebf
AM
912 store_motion ();
913 /* Record where pseudo-registers are set. */
e78d9500 914 return run_jump_opt_after_gcse;
7506f491
DE
915}
916\f
917/* Misc. utilities. */
918
919/* Compute which modes support reg/reg copy operations. */
920
921static void
922compute_can_copy ()
923{
924 int i;
50b2596f 925#ifndef AVOID_CCMODE_COPIES
8e42ace1 926 rtx reg, insn;
50b2596f 927#endif
961192e1 928 memset (can_copy_p, 0, NUM_MACHINE_MODES);
7506f491
DE
929
930 start_sequence ();
931 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
932 if (GET_MODE_CLASS (i) == MODE_CC)
933 {
7506f491 934#ifdef AVOID_CCMODE_COPIES
c4c81601 935 can_copy_p[i] = 0;
7506f491 936#else
c4c81601
RK
937 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
938 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 939 if (recog (PATTERN (insn), insn, NULL) >= 0)
c4c81601 940 can_copy_p[i] = 1;
7506f491 941#endif
c4c81601 942 }
141b5810
AO
943 else
944 can_copy_p[i] = 1;
c4c81601 945
7506f491 946 end_sequence ();
7506f491
DE
947}
948\f
949/* Cover function to xmalloc to record bytes allocated. */
950
951static char *
952gmalloc (size)
953 unsigned int size;
954{
955 bytes_used += size;
956 return xmalloc (size);
957}
958
959/* Cover function to xrealloc.
960 We don't record the additional size since we don't know it.
961 It won't affect memory usage stats much anyway. */
962
963static char *
964grealloc (ptr, size)
965 char *ptr;
966 unsigned int size;
967{
968 return xrealloc (ptr, size);
969}
970
77bbd421 971/* Cover function to obstack_alloc. */
7506f491
DE
972
973static char *
974gcse_alloc (size)
975 unsigned long size;
976{
77bbd421 977 bytes_used += size;
7506f491
DE
978 return (char *) obstack_alloc (&gcse_obstack, size);
979}
980
981/* Allocate memory for the cuid mapping array,
982 and reg/memory set tracking tables.
983
984 This is called at the start of each pass. */
985
986static void
987alloc_gcse_mem (f)
988 rtx f;
989{
8e42ace1 990 int i, n;
7506f491
DE
991 rtx insn;
992
993 /* Find the largest UID and create a mapping from UIDs to CUIDs.
994 CUIDs are like UIDs except they increase monotonically, have no gaps,
995 and only apply to real insns. */
996
997 max_uid = get_max_uid ();
998 n = (max_uid + 1) * sizeof (int);
999 uid_cuid = (int *) gmalloc (n);
961192e1 1000 memset ((char *) uid_cuid, 0, n);
7506f491
DE
1001 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1002 {
2c3c49de 1003 if (INSN_P (insn))
b86db3eb 1004 uid_cuid[INSN_UID (insn)] = i++;
7506f491 1005 else
b86db3eb 1006 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
1007 }
1008
1009 /* Create a table mapping cuids to insns. */
1010
1011 max_cuid = i;
1012 n = (max_cuid + 1) * sizeof (rtx);
1013 cuid_insn = (rtx *) gmalloc (n);
961192e1 1014 memset ((char *) cuid_insn, 0, n);
7506f491 1015 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 1016 if (INSN_P (insn))
c4c81601 1017 CUID_INSN (i++) = insn;
7506f491
DE
1018
1019 /* Allocate vars to track sets of regs. */
73991d6a 1020 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
1021
1022 /* Allocate vars to track sets of regs, memory per block. */
d55bc081 1023 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7506f491 1024 max_gcse_regno);
a13d4ebf
AM
1025 /* Allocate array to keep a list of insns which modify memory in each
1026 basic block. */
d55bc081
ZD
1027 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1028 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1029 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1030 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
73991d6a
JH
1031 modify_mem_list_set = BITMAP_XMALLOC ();
1032 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1033}
1034
1035/* Free memory allocated by alloc_gcse_mem. */
1036
1037static void
1038free_gcse_mem ()
1039{
1040 free (uid_cuid);
1041 free (cuid_insn);
1042
73991d6a 1043 BITMAP_XFREE (reg_set_bitmap);
7506f491 1044
5a660bff 1045 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1046 free_modify_mem_tables ();
1047 BITMAP_XFREE (modify_mem_list_set);
1048 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1049}
1050
0511851c
MM
1051/* Many of the global optimization algorithms work by solving dataflow
1052 equations for various expressions. Initially, some local value is
c4c81601
RK
1053 computed for each expression in each block. Then, the values across the
1054 various blocks are combined (by following flow graph edges) to arrive at
1055 global values. Conceptually, each set of equations is independent. We
1056 may therefore solve all the equations in parallel, solve them one at a
1057 time, or pick any intermediate approach.
1058
1059 When you're going to need N two-dimensional bitmaps, each X (say, the
1060 number of blocks) by Y (say, the number of expressions), call this
1061 function. It's not important what X and Y represent; only that Y
1062 correspond to the things that can be done in parallel. This function will
1063 return an appropriate chunking factor C; you should solve C sets of
1064 equations in parallel. By going through this function, we can easily
1065 trade space against time; by solving fewer equations in parallel we use
1066 less space. */
0511851c
MM
1067
1068static int
1069get_bitmap_width (n, x, y)
1070 int n;
1071 int x;
1072 int y;
1073{
1074 /* It's not really worth figuring out *exactly* how much memory will
1075 be used by a particular choice. The important thing is to get
1076 something approximately right. */
1077 size_t max_bitmap_memory = 10 * 1024 * 1024;
1078
1079 /* The number of bytes we'd use for a single column of minimum
1080 width. */
1081 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1082
1083 /* Often, it's reasonable just to solve all the equations in
1084 parallel. */
1085 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1086 return y;
1087
1088 /* Otherwise, pick the largest width we can, without going over the
1089 limit. */
1090 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1091 / column_size);
1092}
b5ce41ff
JL
1093\f
1094/* Compute the local properties of each recorded expression.
c4c81601
RK
1095
1096 Local properties are those that are defined by the block, irrespective of
1097 other blocks.
b5ce41ff
JL
1098
1099 An expression is transparent in a block if its operands are not modified
1100 in the block.
1101
1102 An expression is computed (locally available) in a block if it is computed
1103 at least once and expression would contain the same value if the
1104 computation was moved to the end of the block.
1105
1106 An expression is locally anticipatable in a block if it is computed at
1107 least once and expression would contain the same value if the computation
1108 was moved to the beginning of the block.
1109
c4c81601
RK
1110 We call this routine for cprop, pre and code hoisting. They all compute
1111 basically the same information and thus can easily share this code.
7506f491 1112
c4c81601
RK
1113 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1114 properties. If NULL, then it is not necessary to compute or record that
1115 particular property.
b5ce41ff 1116
c4c81601
RK
1117 SETP controls which hash table to look at. If zero, this routine looks at
1118 the expr hash table; if nonzero this routine looks at the set hash table.
1119 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1120 ABSALTERED. */
589005ff 1121
b5ce41ff
JL
1122static void
1123compute_local_properties (transp, comp, antloc, setp)
1124 sbitmap *transp;
1125 sbitmap *comp;
1126 sbitmap *antloc;
1127 int setp;
1128{
2e653e39 1129 unsigned int i, hash_table_size;
b5ce41ff 1130 struct expr **hash_table;
589005ff 1131
b5ce41ff
JL
1132 /* Initialize any bitmaps that were passed in. */
1133 if (transp)
695ab36a
BS
1134 {
1135 if (setp)
d55bc081 1136 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1137 else
d55bc081 1138 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1139 }
c4c81601 1140
b5ce41ff 1141 if (comp)
d55bc081 1142 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1143 if (antloc)
d55bc081 1144 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff
JL
1145
1146 /* We use the same code for cprop, pre and hoisting. For cprop
1147 we care about the set hash table, for pre and hoisting we
1148 care about the expr hash table. */
1149 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1150 hash_table = setp ? set_hash_table : expr_hash_table;
1151
1152 for (i = 0; i < hash_table_size; i++)
7506f491 1153 {
b5ce41ff
JL
1154 struct expr *expr;
1155
1156 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1157 {
b5ce41ff 1158 int indx = expr->bitmap_index;
c4c81601 1159 struct occr *occr;
b5ce41ff
JL
1160
1161 /* The expression is transparent in this block if it is not killed.
1162 We start by assuming all are transparent [none are killed], and
1163 then reset the bits for those that are. */
b5ce41ff
JL
1164 if (transp)
1165 compute_transp (expr->expr, indx, transp, setp);
1166
1167 /* The occurrences recorded in antic_occr are exactly those that
1168 we want to set to non-zero in ANTLOC. */
b5ce41ff 1169 if (antloc)
c4c81601
RK
1170 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1171 {
1172 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1173
c4c81601
RK
1174 /* While we're scanning the table, this is a good place to
1175 initialize this. */
1176 occr->deleted_p = 0;
1177 }
b5ce41ff
JL
1178
1179 /* The occurrences recorded in avail_occr are exactly those that
1180 we want to set to non-zero in COMP. */
1181 if (comp)
c4c81601
RK
1182 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1183 {
1184 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1185
c4c81601
RK
1186 /* While we're scanning the table, this is a good place to
1187 initialize this. */
1188 occr->copied_p = 0;
1189 }
b5ce41ff
JL
1190
1191 /* While we're scanning the table, this is a good place to
1192 initialize this. */
1193 expr->reaching_reg = 0;
1194 }
7506f491 1195 }
7506f491
DE
1196}
1197\f
1198/* Register set information.
1199
1200 `reg_set_table' records where each register is set or otherwise
1201 modified. */
1202
1203static struct obstack reg_set_obstack;
1204
1205static void
1206alloc_reg_set_mem (n_regs)
1207 int n_regs;
1208{
c4c81601 1209 unsigned int n;
7506f491
DE
1210
1211 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1212 n = reg_set_table_size * sizeof (struct reg_set *);
1213 reg_set_table = (struct reg_set **) gmalloc (n);
961192e1 1214 memset ((char *) reg_set_table, 0, n);
7506f491
DE
1215
1216 gcc_obstack_init (&reg_set_obstack);
1217}
1218
1219static void
1220free_reg_set_mem ()
1221{
1222 free (reg_set_table);
6496a589 1223 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1224}
1225
1226/* Record REGNO in the reg_set table. */
1227
1228static void
1229record_one_set (regno, insn)
1230 int regno;
1231 rtx insn;
1232{
172890a2 1233 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1234 struct reg_set *new_reg_info;
7506f491
DE
1235
1236 /* If the table isn't big enough, enlarge it. */
1237 if (regno >= reg_set_table_size)
1238 {
1239 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601
RK
1240
1241 reg_set_table
1242 = (struct reg_set **) grealloc ((char *) reg_set_table,
1243 new_size * sizeof (struct reg_set *));
961192e1 1244 memset ((char *) (reg_set_table + reg_set_table_size), 0,
8e42ace1 1245 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1246 reg_set_table_size = new_size;
1247 }
1248
1249 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1250 sizeof (struct reg_set));
1251 bytes_used += sizeof (struct reg_set);
1252 new_reg_info->insn = insn;
274969ea
MM
1253 new_reg_info->next = reg_set_table[regno];
1254 reg_set_table[regno] = new_reg_info;
7506f491
DE
1255}
1256
c4c81601
RK
1257/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1258 an insn. The DATA is really the instruction in which the SET is
1259 occurring. */
7506f491
DE
1260
1261static void
84832317 1262record_set_info (dest, setter, data)
50b2596f 1263 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 1264 void *data;
7506f491 1265{
84832317
MM
1266 rtx record_set_insn = (rtx) data;
1267
c4c81601
RK
1268 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1269 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1270}
1271
1272/* Scan the function and record each set of each pseudo-register.
1273
c4c81601
RK
1274 This is called once, at the start of the gcse pass. See the comments for
1275 `reg_set_table' for further documenation. */
7506f491
DE
1276
1277static void
1278compute_sets (f)
1279 rtx f;
1280{
c4c81601 1281 rtx insn;
7506f491 1282
c4c81601 1283 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1284 if (INSN_P (insn))
c4c81601 1285 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1286}
1287\f
1288/* Hash table support. */
1289
80c29cc4
RZ
1290struct reg_avail_info
1291{
e0082a72 1292 basic_block last_bb;
80c29cc4
RZ
1293 int first_set;
1294 int last_set;
1295};
1296
1297static struct reg_avail_info *reg_avail_info;
e0082a72 1298static basic_block current_bb;
7506f491 1299
7506f491 1300
fb0c0a12
RK
1301/* See whether X, the source of a set, is something we want to consider for
1302 GCSE. */
7506f491 1303
e2500fed 1304static GTY(()) rtx test_insn;
7506f491
DE
1305static int
1306want_to_gcse_p (x)
1307 rtx x;
1308{
fb0c0a12
RK
1309 int num_clobbers = 0;
1310 int icode;
1311
c4c81601 1312 switch (GET_CODE (x))
7506f491
DE
1313 {
1314 case REG:
1315 case SUBREG:
1316 case CONST_INT:
1317 case CONST_DOUBLE:
69ef87e2 1318 case CONST_VECTOR:
7506f491
DE
1319 case CALL:
1320 return 0;
1321
1322 default:
1323 break;
1324 }
1325
fb0c0a12
RK
1326 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1327 if (general_operand (x, GET_MODE (x)))
1328 return 1;
1329 else if (GET_MODE (x) == VOIDmode)
1330 return 0;
1331
1332 /* Otherwise, check if we can make a valid insn from it. First initialize
1333 our test insn if we haven't already. */
1334 if (test_insn == 0)
1335 {
1336 test_insn
1337 = make_insn_raw (gen_rtx_SET (VOIDmode,
1338 gen_rtx_REG (word_mode,
1339 FIRST_PSEUDO_REGISTER * 2),
1340 const0_rtx));
1341 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1342 }
1343
1344 /* Now make an insn like the one we would make when GCSE'ing and see if
1345 valid. */
1346 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1347 SET_SRC (PATTERN (test_insn)) = x;
1348 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1349 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1350}
1351
1352/* Return non-zero if the operands of expression X are unchanged from the
1353 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1354 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1355
1356static int
1357oprs_unchanged_p (x, insn, avail_p)
1358 rtx x, insn;
1359 int avail_p;
1360{
c4c81601 1361 int i, j;
7506f491 1362 enum rtx_code code;
6f7d635c 1363 const char *fmt;
7506f491 1364
7506f491
DE
1365 if (x == 0)
1366 return 1;
1367
1368 code = GET_CODE (x);
1369 switch (code)
1370 {
1371 case REG:
80c29cc4
RZ
1372 {
1373 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1374
1375 if (info->last_bb != current_bb)
1376 return 1;
589005ff 1377 if (avail_p)
80c29cc4
RZ
1378 return info->last_set < INSN_CUID (insn);
1379 else
1380 return info->first_set >= INSN_CUID (insn);
1381 }
7506f491
DE
1382
1383 case MEM:
e0082a72 1384 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1385 x, avail_p))
1386 return 0;
7506f491 1387 else
c4c81601 1388 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1389
1390 case PRE_DEC:
1391 case PRE_INC:
1392 case POST_DEC:
1393 case POST_INC:
4b983fdc
RH
1394 case PRE_MODIFY:
1395 case POST_MODIFY:
7506f491
DE
1396 return 0;
1397
1398 case PC:
1399 case CC0: /*FIXME*/
1400 case CONST:
1401 case CONST_INT:
1402 case CONST_DOUBLE:
69ef87e2 1403 case CONST_VECTOR:
7506f491
DE
1404 case SYMBOL_REF:
1405 case LABEL_REF:
1406 case ADDR_VEC:
1407 case ADDR_DIFF_VEC:
1408 return 1;
1409
1410 default:
1411 break;
1412 }
1413
c4c81601 1414 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1415 {
1416 if (fmt[i] == 'e')
1417 {
c4c81601
RK
1418 /* If we are about to do the last recursive call needed at this
1419 level, change it into iteration. This function is called enough
1420 to be worth it. */
7506f491 1421 if (i == 0)
c4c81601
RK
1422 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1423
1424 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1425 return 0;
1426 }
1427 else if (fmt[i] == 'E')
c4c81601
RK
1428 for (j = 0; j < XVECLEN (x, i); j++)
1429 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1430 return 0;
7506f491
DE
1431 }
1432
1433 return 1;
1434}
1435
a13d4ebf
AM
1436/* Used for communication between mems_conflict_for_gcse_p and
1437 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1438 conflict between two memory references. */
1439static int gcse_mems_conflict_p;
1440
1441/* Used for communication between mems_conflict_for_gcse_p and
1442 load_killed_in_block_p. A memory reference for a load instruction,
1443 mems_conflict_for_gcse_p will see if a memory store conflicts with
1444 this memory load. */
1445static rtx gcse_mem_operand;
1446
1447/* DEST is the output of an instruction. If it is a memory reference, and
1448 possibly conflicts with the load found in gcse_mem_operand, then set
1449 gcse_mems_conflict_p to a nonzero value. */
1450
1451static void
1452mems_conflict_for_gcse_p (dest, setter, data)
1453 rtx dest, setter ATTRIBUTE_UNUSED;
1454 void *data ATTRIBUTE_UNUSED;
1455{
1456 while (GET_CODE (dest) == SUBREG
1457 || GET_CODE (dest) == ZERO_EXTRACT
1458 || GET_CODE (dest) == SIGN_EXTRACT
1459 || GET_CODE (dest) == STRICT_LOW_PART)
1460 dest = XEXP (dest, 0);
1461
1462 /* If DEST is not a MEM, then it will not conflict with the load. Note
1463 that function calls are assumed to clobber memory, but are handled
1464 elsewhere. */
1465 if (GET_CODE (dest) != MEM)
1466 return;
aaa4ca30 1467
a13d4ebf 1468 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1469 don't mark as killed this time. */
1470
a13d4ebf
AM
1471 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1472 {
1473 if (!find_rtx_in_ldst (dest))
1474 gcse_mems_conflict_p = 1;
1475 return;
1476 }
aaa4ca30 1477
a13d4ebf
AM
1478 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1479 rtx_addr_varies_p))
1480 gcse_mems_conflict_p = 1;
1481}
1482
1483/* Return nonzero if the expression in X (a memory reference) is killed
1484 in block BB before or after the insn with the CUID in UID_LIMIT.
1485 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1486 before UID_LIMIT.
1487
1488 To check the entire block, set UID_LIMIT to max_uid + 1 and
1489 AVAIL_P to 0. */
1490
1491static int
1492load_killed_in_block_p (bb, uid_limit, x, avail_p)
e2d2ed72 1493 basic_block bb;
a13d4ebf
AM
1494 int uid_limit;
1495 rtx x;
1496 int avail_p;
1497{
0b17ab2f 1498 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1499 while (list_entry)
1500 {
1501 rtx setter;
1502 /* Ignore entries in the list that do not apply. */
1503 if ((avail_p
1504 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1505 || (! avail_p
1506 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1507 {
1508 list_entry = XEXP (list_entry, 1);
1509 continue;
1510 }
1511
1512 setter = XEXP (list_entry, 0);
1513
1514 /* If SETTER is a call everything is clobbered. Note that calls
1515 to pure functions are never put on the list, so we need not
1516 worry about them. */
1517 if (GET_CODE (setter) == CALL_INSN)
1518 return 1;
1519
1520 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1521 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1522
1523 The note_stores interface is pretty limited, so we have to
1524 communicate via global variables. Yuk. */
1525 gcse_mem_operand = x;
1526 gcse_mems_conflict_p = 0;
1527 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1528 if (gcse_mems_conflict_p)
1529 return 1;
1530 list_entry = XEXP (list_entry, 1);
1531 }
1532 return 0;
1533}
1534
7506f491
DE
1535/* Return non-zero if the operands of expression X are unchanged from
1536 the start of INSN's basic block up to but not including INSN. */
1537
1538static int
1539oprs_anticipatable_p (x, insn)
1540 rtx x, insn;
1541{
1542 return oprs_unchanged_p (x, insn, 0);
1543}
1544
1545/* Return non-zero if the operands of expression X are unchanged from
1546 INSN to the end of INSN's basic block. */
1547
1548static int
1549oprs_available_p (x, insn)
1550 rtx x, insn;
1551{
1552 return oprs_unchanged_p (x, insn, 1);
1553}
1554
1555/* Hash expression X.
c4c81601
RK
1556
1557 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1558 indicating if a volatile operand is found or if the expression contains
1559 something we don't want to insert in the table.
7506f491
DE
1560
1561 ??? One might want to merge this with canon_hash. Later. */
1562
1563static unsigned int
1564hash_expr (x, mode, do_not_record_p, hash_table_size)
1565 rtx x;
1566 enum machine_mode mode;
1567 int *do_not_record_p;
1568 int hash_table_size;
1569{
1570 unsigned int hash;
1571
1572 *do_not_record_p = 0;
1573
1574 hash = hash_expr_1 (x, mode, do_not_record_p);
1575 return hash % hash_table_size;
1576}
172890a2 1577
6462bb43 1578/* Hash a string. Just add its bytes up. */
172890a2 1579
6462bb43
AO
1580static inline unsigned
1581hash_string_1 (ps)
1582 const char *ps;
1583{
1584 unsigned hash = 0;
8e42ace1 1585 const unsigned char *p = (const unsigned char *) ps;
589005ff 1586
6462bb43
AO
1587 if (p)
1588 while (*p)
1589 hash += *p++;
1590
1591 return hash;
1592}
7506f491
DE
1593
1594/* Subroutine of hash_expr to do the actual work. */
1595
1596static unsigned int
1597hash_expr_1 (x, mode, do_not_record_p)
1598 rtx x;
1599 enum machine_mode mode;
1600 int *do_not_record_p;
1601{
1602 int i, j;
1603 unsigned hash = 0;
1604 enum rtx_code code;
6f7d635c 1605 const char *fmt;
7506f491 1606
c4c81601
RK
1607 /* Used to turn recursion into iteration. We can't rely on GCC's
1608 tail-recursion eliminatio since we need to keep accumulating values
1609 in HASH. */
7506f491
DE
1610
1611 if (x == 0)
1612 return hash;
1613
c4c81601 1614 repeat:
7506f491
DE
1615 code = GET_CODE (x);
1616 switch (code)
1617 {
1618 case REG:
c4c81601
RK
1619 hash += ((unsigned int) REG << 7) + REGNO (x);
1620 return hash;
7506f491
DE
1621
1622 case CONST_INT:
c4c81601
RK
1623 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1624 + (unsigned int) INTVAL (x));
1625 return hash;
7506f491
DE
1626
1627 case CONST_DOUBLE:
1628 /* This is like the general case, except that it only counts
1629 the integers representing the constant. */
c4c81601 1630 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1631 if (GET_MODE (x) != VOIDmode)
1632 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1633 hash += (unsigned int) XWINT (x, i);
7506f491 1634 else
c4c81601
RK
1635 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1636 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1637 return hash;
1638
69ef87e2
AH
1639 case CONST_VECTOR:
1640 {
1641 int units;
1642 rtx elt;
1643
1644 units = CONST_VECTOR_NUNITS (x);
1645
1646 for (i = 0; i < units; ++i)
1647 {
1648 elt = CONST_VECTOR_ELT (x, i);
1649 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1650 }
1651
1652 return hash;
1653 }
1654
7506f491
DE
1655 /* Assume there is only one rtx object for any given label. */
1656 case LABEL_REF:
1657 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1658 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1659 hash += (((unsigned int) LABEL_REF << 7)
1660 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1661 return hash;
1662
1663 case SYMBOL_REF:
1664 {
1665 /* Don't hash on the symbol's address to avoid bootstrap differences.
1666 Different hash values may cause expressions to be recorded in
1667 different orders and thus different registers to be used in the
1668 final assembler. This also avoids differences in the dump files
1669 between various stages. */
1670 unsigned int h = 0;
3cce094d 1671 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1672
7506f491
DE
1673 while (*p)
1674 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1675
1676 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1677 return hash;
1678 }
1679
1680 case MEM:
1681 if (MEM_VOLATILE_P (x))
1682 {
1683 *do_not_record_p = 1;
1684 return 0;
1685 }
c4c81601
RK
1686
1687 hash += (unsigned int) MEM;
d51f3632
JH
1688 /* We used alias set for hashing, but this is not good, since the alias
1689 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1690 causing the profiles to fail to match. */
7506f491
DE
1691 x = XEXP (x, 0);
1692 goto repeat;
1693
1694 case PRE_DEC:
1695 case PRE_INC:
1696 case POST_DEC:
1697 case POST_INC:
1698 case PC:
1699 case CC0:
1700 case CALL:
1701 case UNSPEC_VOLATILE:
1702 *do_not_record_p = 1;
1703 return 0;
1704
1705 case ASM_OPERANDS:
1706 if (MEM_VOLATILE_P (x))
1707 {
1708 *do_not_record_p = 1;
1709 return 0;
1710 }
6462bb43
AO
1711 else
1712 {
1713 /* We don't want to take the filename and line into account. */
1714 hash += (unsigned) code + (unsigned) GET_MODE (x)
1715 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1716 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1717 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1718
1719 if (ASM_OPERANDS_INPUT_LENGTH (x))
1720 {
1721 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1722 {
1723 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1724 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1725 do_not_record_p)
1726 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1727 (x, i)));
1728 }
1729
1730 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1731 x = ASM_OPERANDS_INPUT (x, 0);
1732 mode = GET_MODE (x);
1733 goto repeat;
1734 }
1735 return hash;
1736 }
7506f491
DE
1737
1738 default:
1739 break;
1740 }
1741
7506f491 1742 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1743 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1744 {
1745 if (fmt[i] == 'e')
1746 {
7506f491
DE
1747 /* If we are about to do the last recursive call
1748 needed at this level, change it into iteration.
1749 This function is called enough to be worth it. */
1750 if (i == 0)
1751 {
c4c81601 1752 x = XEXP (x, i);
7506f491
DE
1753 goto repeat;
1754 }
c4c81601
RK
1755
1756 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1757 if (*do_not_record_p)
1758 return 0;
1759 }
c4c81601 1760
7506f491
DE
1761 else if (fmt[i] == 'E')
1762 for (j = 0; j < XVECLEN (x, i); j++)
1763 {
1764 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1765 if (*do_not_record_p)
1766 return 0;
1767 }
c4c81601 1768
7506f491 1769 else if (fmt[i] == 's')
6462bb43 1770 hash += hash_string_1 (XSTR (x, i));
7506f491 1771 else if (fmt[i] == 'i')
c4c81601 1772 hash += (unsigned int) XINT (x, i);
7506f491
DE
1773 else
1774 abort ();
1775 }
1776
1777 return hash;
1778}
1779
1780/* Hash a set of register REGNO.
1781
c4c81601
RK
1782 Sets are hashed on the register that is set. This simplifies the PRE copy
1783 propagation code.
7506f491
DE
1784
1785 ??? May need to make things more elaborate. Later, as necessary. */
1786
1787static unsigned int
1788hash_set (regno, hash_table_size)
1789 int regno;
1790 int hash_table_size;
1791{
1792 unsigned int hash;
1793
1794 hash = regno;
1795 return hash % hash_table_size;
1796}
1797
1798/* Return non-zero if exp1 is equivalent to exp2.
1799 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1800
1801static int
1802expr_equiv_p (x, y)
1803 rtx x, y;
1804{
b3694847
SS
1805 int i, j;
1806 enum rtx_code code;
1807 const char *fmt;
7506f491
DE
1808
1809 if (x == y)
1810 return 1;
c4c81601 1811
7506f491
DE
1812 if (x == 0 || y == 0)
1813 return x == y;
1814
1815 code = GET_CODE (x);
1816 if (code != GET_CODE (y))
1817 return 0;
1818
1819 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1820 if (GET_MODE (x) != GET_MODE (y))
1821 return 0;
1822
1823 switch (code)
1824 {
1825 case PC:
1826 case CC0:
1827 return x == y;
1828
1829 case CONST_INT:
1830 return INTVAL (x) == INTVAL (y);
1831
1832 case LABEL_REF:
1833 return XEXP (x, 0) == XEXP (y, 0);
1834
1835 case SYMBOL_REF:
1836 return XSTR (x, 0) == XSTR (y, 0);
1837
1838 case REG:
1839 return REGNO (x) == REGNO (y);
1840
297c3335
RH
1841 case MEM:
1842 /* Can't merge two expressions in different alias sets, since we can
1843 decide that the expression is transparent in a block when it isn't,
1844 due to it being set with the different alias set. */
1845 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1846 return 0;
1847 break;
1848
7506f491
DE
1849 /* For commutative operations, check both orders. */
1850 case PLUS:
1851 case MULT:
1852 case AND:
1853 case IOR:
1854 case XOR:
1855 case NE:
1856 case EQ:
1857 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1858 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1859 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1860 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1861
6462bb43
AO
1862 case ASM_OPERANDS:
1863 /* We don't use the generic code below because we want to
1864 disregard filename and line numbers. */
1865
1866 /* A volatile asm isn't equivalent to any other. */
1867 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1868 return 0;
1869
1870 if (GET_MODE (x) != GET_MODE (y)
1871 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1872 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1873 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1874 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1875 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1876 return 0;
1877
1878 if (ASM_OPERANDS_INPUT_LENGTH (x))
1879 {
1880 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1881 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1882 ASM_OPERANDS_INPUT (y, i))
1883 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1884 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1885 return 0;
1886 }
1887
1888 return 1;
1889
7506f491
DE
1890 default:
1891 break;
1892 }
1893
1894 /* Compare the elements. If any pair of corresponding elements
1895 fail to match, return 0 for the whole thing. */
1896
1897 fmt = GET_RTX_FORMAT (code);
1898 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1899 {
1900 switch (fmt[i])
1901 {
1902 case 'e':
1903 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1904 return 0;
1905 break;
1906
1907 case 'E':
1908 if (XVECLEN (x, i) != XVECLEN (y, i))
1909 return 0;
1910 for (j = 0; j < XVECLEN (x, i); j++)
1911 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1912 return 0;
1913 break;
1914
1915 case 's':
1916 if (strcmp (XSTR (x, i), XSTR (y, i)))
1917 return 0;
1918 break;
1919
1920 case 'i':
1921 if (XINT (x, i) != XINT (y, i))
1922 return 0;
1923 break;
1924
1925 case 'w':
1926 if (XWINT (x, i) != XWINT (y, i))
1927 return 0;
1928 break;
1929
1930 case '0':
1931 break;
aaa4ca30 1932
7506f491
DE
1933 default:
1934 abort ();
1935 }
8e42ace1 1936 }
7506f491
DE
1937
1938 return 1;
1939}
1940
1941/* Insert expression X in INSN in the hash table.
1942 If it is already present, record it as the last occurrence in INSN's
1943 basic block.
1944
1945 MODE is the mode of the value X is being stored into.
1946 It is only used if X is a CONST_INT.
1947
1948 ANTIC_P is non-zero if X is an anticipatable expression.
1949 AVAIL_P is non-zero if X is an available expression. */
1950
1951static void
1952insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1953 rtx x;
1954 enum machine_mode mode;
1955 rtx insn;
1956 int antic_p, avail_p;
1957{
1958 int found, do_not_record_p;
1959 unsigned int hash;
1960 struct expr *cur_expr, *last_expr = NULL;
1961 struct occr *antic_occr, *avail_occr;
1962 struct occr *last_occr = NULL;
1963
1964 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1965
1966 /* Do not insert expression in table if it contains volatile operands,
1967 or if hash_expr determines the expression is something we don't want
1968 to or can't handle. */
1969 if (do_not_record_p)
1970 return;
1971
1972 cur_expr = expr_hash_table[hash];
1973 found = 0;
1974
c4c81601 1975 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1976 {
1977 /* If the expression isn't found, save a pointer to the end of
1978 the list. */
1979 last_expr = cur_expr;
1980 cur_expr = cur_expr->next_same_hash;
1981 }
1982
1983 if (! found)
1984 {
1985 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1986 bytes_used += sizeof (struct expr);
1987 if (expr_hash_table[hash] == NULL)
c4c81601
RK
1988 /* This is the first pattern that hashed to this index. */
1989 expr_hash_table[hash] = cur_expr;
7506f491 1990 else
c4c81601
RK
1991 /* Add EXPR to end of this hash chain. */
1992 last_expr->next_same_hash = cur_expr;
1993
589005ff 1994 /* Set the fields of the expr element. */
7506f491
DE
1995 cur_expr->expr = x;
1996 cur_expr->bitmap_index = n_exprs++;
1997 cur_expr->next_same_hash = NULL;
1998 cur_expr->antic_occr = NULL;
1999 cur_expr->avail_occr = NULL;
2000 }
2001
2002 /* Now record the occurrence(s). */
7506f491
DE
2003 if (antic_p)
2004 {
2005 antic_occr = cur_expr->antic_occr;
2006
2007 /* Search for another occurrence in the same basic block. */
2008 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2009 {
2010 /* If an occurrence isn't found, save a pointer to the end of
2011 the list. */
2012 last_occr = antic_occr;
2013 antic_occr = antic_occr->next;
2014 }
2015
2016 if (antic_occr)
c4c81601
RK
2017 /* Found another instance of the expression in the same basic block.
2018 Prefer the currently recorded one. We want the first one in the
2019 block and the block is scanned from start to end. */
2020 ; /* nothing to do */
7506f491
DE
2021 else
2022 {
2023 /* First occurrence of this expression in this basic block. */
2024 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2025 bytes_used += sizeof (struct occr);
2026 /* First occurrence of this expression in any block? */
2027 if (cur_expr->antic_occr == NULL)
2028 cur_expr->antic_occr = antic_occr;
2029 else
2030 last_occr->next = antic_occr;
c4c81601 2031
7506f491
DE
2032 antic_occr->insn = insn;
2033 antic_occr->next = NULL;
2034 }
2035 }
2036
2037 if (avail_p)
2038 {
2039 avail_occr = cur_expr->avail_occr;
2040
2041 /* Search for another occurrence in the same basic block. */
2042 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2043 {
2044 /* If an occurrence isn't found, save a pointer to the end of
2045 the list. */
2046 last_occr = avail_occr;
2047 avail_occr = avail_occr->next;
2048 }
2049
2050 if (avail_occr)
c4c81601
RK
2051 /* Found another instance of the expression in the same basic block.
2052 Prefer this occurrence to the currently recorded one. We want
2053 the last one in the block and the block is scanned from start
2054 to end. */
2055 avail_occr->insn = insn;
7506f491
DE
2056 else
2057 {
2058 /* First occurrence of this expression in this basic block. */
2059 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2060 bytes_used += sizeof (struct occr);
c4c81601 2061
7506f491
DE
2062 /* First occurrence of this expression in any block? */
2063 if (cur_expr->avail_occr == NULL)
2064 cur_expr->avail_occr = avail_occr;
2065 else
2066 last_occr->next = avail_occr;
c4c81601 2067
7506f491
DE
2068 avail_occr->insn = insn;
2069 avail_occr->next = NULL;
2070 }
2071 }
2072}
2073
2074/* Insert pattern X in INSN in the hash table.
2075 X is a SET of a reg to either another reg or a constant.
2076 If it is already present, record it as the last occurrence in INSN's
2077 basic block. */
2078
2079static void
2080insert_set_in_table (x, insn)
2081 rtx x;
2082 rtx insn;
2083{
2084 int found;
2085 unsigned int hash;
2086 struct expr *cur_expr, *last_expr = NULL;
2087 struct occr *cur_occr, *last_occr = NULL;
2088
2089 if (GET_CODE (x) != SET
2090 || GET_CODE (SET_DEST (x)) != REG)
2091 abort ();
2092
2093 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
2094
2095 cur_expr = set_hash_table[hash];
2096 found = 0;
2097
c4c81601 2098 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2099 {
2100 /* If the expression isn't found, save a pointer to the end of
2101 the list. */
2102 last_expr = cur_expr;
2103 cur_expr = cur_expr->next_same_hash;
2104 }
2105
2106 if (! found)
2107 {
2108 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2109 bytes_used += sizeof (struct expr);
2110 if (set_hash_table[hash] == NULL)
c4c81601
RK
2111 /* This is the first pattern that hashed to this index. */
2112 set_hash_table[hash] = cur_expr;
7506f491 2113 else
c4c81601
RK
2114 /* Add EXPR to end of this hash chain. */
2115 last_expr->next_same_hash = cur_expr;
2116
7506f491
DE
2117 /* Set the fields of the expr element.
2118 We must copy X because it can be modified when copy propagation is
2119 performed on its operands. */
7506f491
DE
2120 cur_expr->expr = copy_rtx (x);
2121 cur_expr->bitmap_index = n_sets++;
2122 cur_expr->next_same_hash = NULL;
2123 cur_expr->antic_occr = NULL;
2124 cur_expr->avail_occr = NULL;
2125 }
2126
2127 /* Now record the occurrence. */
7506f491
DE
2128 cur_occr = cur_expr->avail_occr;
2129
2130 /* Search for another occurrence in the same basic block. */
2131 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2132 {
2133 /* If an occurrence isn't found, save a pointer to the end of
2134 the list. */
2135 last_occr = cur_occr;
2136 cur_occr = cur_occr->next;
2137 }
2138
2139 if (cur_occr)
c4c81601
RK
2140 /* Found another instance of the expression in the same basic block.
2141 Prefer this occurrence to the currently recorded one. We want the
2142 last one in the block and the block is scanned from start to end. */
2143 cur_occr->insn = insn;
7506f491
DE
2144 else
2145 {
2146 /* First occurrence of this expression in this basic block. */
2147 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2148 bytes_used += sizeof (struct occr);
c4c81601 2149
7506f491
DE
2150 /* First occurrence of this expression in any block? */
2151 if (cur_expr->avail_occr == NULL)
2152 cur_expr->avail_occr = cur_occr;
2153 else
2154 last_occr->next = cur_occr;
c4c81601 2155
7506f491
DE
2156 cur_occr->insn = insn;
2157 cur_occr->next = NULL;
2158 }
2159}
2160
c4c81601
RK
2161/* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
2162 non-zero, this is for the assignment hash table, otherwise it is for the
2163 expression hash table. */
7506f491
DE
2164
2165static void
2166hash_scan_set (pat, insn, set_p)
2167 rtx pat, insn;
2168 int set_p;
2169{
2170 rtx src = SET_SRC (pat);
2171 rtx dest = SET_DEST (pat);
172890a2 2172 rtx note;
7506f491
DE
2173
2174 if (GET_CODE (src) == CALL)
2175 hash_scan_call (src, insn);
2176
172890a2 2177 else if (GET_CODE (dest) == REG)
7506f491 2178 {
172890a2 2179 unsigned int regno = REGNO (dest);
7506f491
DE
2180 rtx tmp;
2181
172890a2
RK
2182 /* If this is a single set and we are doing constant propagation,
2183 see if a REG_NOTE shows this equivalent to a constant. */
2184 if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2185 && CONSTANT_P (XEXP (note, 0)))
2186 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2187
7506f491
DE
2188 /* Only record sets of pseudo-regs in the hash table. */
2189 if (! set_p
2190 && regno >= FIRST_PSEUDO_REGISTER
2191 /* Don't GCSE something if we can't do a reg/reg copy. */
2192 && can_copy_p [GET_MODE (dest)]
068473ec
JH
2193 /* GCSE commonly inserts instruction after the insn. We can't
2194 do that easily for EH_REGION notes so disable GCSE on these
2195 for now. */
2196 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2197 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2198 && want_to_gcse_p (src)
2199 /* Don't CSE a nop. */
43e72072
JJ
2200 && ! set_noop_p (pat)
2201 /* Don't GCSE if it has attached REG_EQUIV note.
2202 At this point this only function parameters should have
2203 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2204 explicitly, it means address of parameter has been taken,
43e72072
JJ
2205 so we should not extend the lifetime of the pseudo. */
2206 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2207 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2208 {
2209 /* An expression is not anticipatable if its operands are
52d76e11
RK
2210 modified before this insn or if this is not the only SET in
2211 this insn. */
2212 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2213 /* An expression is not available if its operands are
eb296bd9
GK
2214 subsequently modified, including this insn. It's also not
2215 available if this is a branch, because we can't insert
2216 a set after the branch. */
2217 int avail_p = (oprs_available_p (src, insn)
2218 && ! JUMP_P (insn));
c4c81601 2219
7506f491
DE
2220 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
2221 }
c4c81601 2222
7506f491
DE
2223 /* Record sets for constant/copy propagation. */
2224 else if (set_p
2225 && regno >= FIRST_PSEUDO_REGISTER
2226 && ((GET_CODE (src) == REG
2227 && REGNO (src) >= FIRST_PSEUDO_REGISTER
172890a2
RK
2228 && can_copy_p [GET_MODE (dest)]
2229 && REGNO (src) != regno)
b446e5a2 2230 || CONSTANT_P (src))
7506f491
DE
2231 /* A copy is not available if its src or dest is subsequently
2232 modified. Here we want to search from INSN+1 on, but
2233 oprs_available_p searches from INSN on. */
2234 && (insn == BLOCK_END (BLOCK_NUM (insn))
2235 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2236 && oprs_available_p (pat, tmp))))
2237 insert_set_in_table (pat, insn);
2238 }
7506f491
DE
2239}
2240
2241static void
2242hash_scan_clobber (x, insn)
50b2596f 2243 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
2244{
2245 /* Currently nothing to do. */
2246}
2247
2248static void
2249hash_scan_call (x, insn)
50b2596f 2250 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
7506f491
DE
2251{
2252 /* Currently nothing to do. */
2253}
2254
2255/* Process INSN and add hash table entries as appropriate.
2256
2257 Only available expressions that set a single pseudo-reg are recorded.
2258
2259 Single sets in a PARALLEL could be handled, but it's an extra complication
2260 that isn't dealt with right now. The trick is handling the CLOBBERs that
2261 are also in the PARALLEL. Later.
2262
2263 If SET_P is non-zero, this is for the assignment hash table,
ed79bb3d
R
2264 otherwise it is for the expression hash table.
2265 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2266 not record any expressions. */
7506f491
DE
2267
2268static void
ed79bb3d 2269hash_scan_insn (insn, set_p, in_libcall_block)
7506f491
DE
2270 rtx insn;
2271 int set_p;
48e87cef 2272 int in_libcall_block;
7506f491
DE
2273{
2274 rtx pat = PATTERN (insn);
c4c81601 2275 int i;
7506f491 2276
172890a2
RK
2277 if (in_libcall_block)
2278 return;
2279
7506f491
DE
2280 /* Pick out the sets of INSN and for other forms of instructions record
2281 what's been modified. */
2282
172890a2
RK
2283 if (GET_CODE (pat) == SET)
2284 hash_scan_set (pat, insn, set_p);
7506f491 2285 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2286 for (i = 0; i < XVECLEN (pat, 0); i++)
2287 {
2288 rtx x = XVECEXP (pat, 0, i);
7506f491 2289
c4c81601 2290 if (GET_CODE (x) == SET)
172890a2 2291 hash_scan_set (x, insn, set_p);
c4c81601
RK
2292 else if (GET_CODE (x) == CLOBBER)
2293 hash_scan_clobber (x, insn);
2294 else if (GET_CODE (x) == CALL)
2295 hash_scan_call (x, insn);
2296 }
7506f491 2297
7506f491
DE
2298 else if (GET_CODE (pat) == CLOBBER)
2299 hash_scan_clobber (pat, insn);
2300 else if (GET_CODE (pat) == CALL)
2301 hash_scan_call (pat, insn);
2302}
2303
2304static void
2305dump_hash_table (file, name, table, table_size, total_size)
2306 FILE *file;
dff01034 2307 const char *name;
7506f491
DE
2308 struct expr **table;
2309 int table_size, total_size;
2310{
2311 int i;
2312 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2313 struct expr **flat_table;
2314 unsigned int *hash_val;
c4c81601 2315 struct expr *expr;
4da896b2 2316
589005ff 2317 flat_table
4da896b2
MM
2318 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2319 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
7506f491 2320
7506f491 2321 for (i = 0; i < table_size; i++)
c4c81601
RK
2322 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2323 {
2324 flat_table[expr->bitmap_index] = expr;
2325 hash_val[expr->bitmap_index] = i;
2326 }
7506f491
DE
2327
2328 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2329 name, table_size, total_size);
2330
2331 for (i = 0; i < total_size; i++)
21318741
RK
2332 if (flat_table[i] != 0)
2333 {
a0ac9e5a 2334 expr = flat_table[i];
21318741
RK
2335 fprintf (file, "Index %d (hash value %d)\n ",
2336 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2337 print_rtl (file, expr->expr);
21318741
RK
2338 fprintf (file, "\n");
2339 }
7506f491
DE
2340
2341 fprintf (file, "\n");
4da896b2 2342
4da896b2
MM
2343 free (flat_table);
2344 free (hash_val);
7506f491
DE
2345}
2346
2347/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2348
80c29cc4 2349 first_set records the first place in the block where the register
7506f491 2350 is set and is used to compute "anticipatability".
c4c81601 2351
80c29cc4 2352 last_set records the last place in the block where the register
7506f491 2353 is set and is used to compute "availability".
c4c81601 2354
80c29cc4
RZ
2355 last_bb records the block for which first_set and last_set are
2356 valid, as a quick test to invalidate them.
2357
7506f491
DE
2358 reg_set_in_block records whether the register is set in the block
2359 and is used to compute "transparency". */
2360
2361static void
2362record_last_reg_set_info (insn, regno)
2363 rtx insn;
2364 int regno;
2365{
80c29cc4
RZ
2366 struct reg_avail_info *info = &reg_avail_info[regno];
2367 int cuid = INSN_CUID (insn);
c4c81601 2368
80c29cc4
RZ
2369 info->last_set = cuid;
2370 if (info->last_bb != current_bb)
2371 {
2372 info->last_bb = current_bb;
2373 info->first_set = cuid;
e0082a72 2374 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2375 }
7506f491
DE
2376}
2377
a13d4ebf
AM
2378
2379/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2380 Note we store a pair of elements in the list, so they have to be
2381 taken off pairwise. */
2382
589005ff 2383static void
a13d4ebf
AM
2384canon_list_insert (dest, unused1, v_insn)
2385 rtx dest ATTRIBUTE_UNUSED;
2386 rtx unused1 ATTRIBUTE_UNUSED;
2387 void * v_insn;
2388{
2389 rtx dest_addr, insn;
0fe854a7 2390 int bb;
a13d4ebf
AM
2391
2392 while (GET_CODE (dest) == SUBREG
2393 || GET_CODE (dest) == ZERO_EXTRACT
2394 || GET_CODE (dest) == SIGN_EXTRACT
2395 || GET_CODE (dest) == STRICT_LOW_PART)
2396 dest = XEXP (dest, 0);
2397
2398 /* If DEST is not a MEM, then it will not conflict with a load. Note
2399 that function calls are assumed to clobber memory, but are handled
2400 elsewhere. */
2401
2402 if (GET_CODE (dest) != MEM)
2403 return;
2404
2405 dest_addr = get_addr (XEXP (dest, 0));
2406 dest_addr = canon_rtx (dest_addr);
589005ff 2407 insn = (rtx) v_insn;
0fe854a7 2408 bb = BLOCK_NUM (insn);
a13d4ebf 2409
589005ff 2410 canon_modify_mem_list[bb] =
0fe854a7 2411 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2412 canon_modify_mem_list[bb] =
0fe854a7
RH
2413 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2414 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2415}
2416
a13d4ebf
AM
2417/* Record memory modification information for INSN. We do not actually care
2418 about the memory location(s) that are set, or even how they are set (consider
2419 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2420
2421static void
2422record_last_mem_set_info (insn)
2423 rtx insn;
2424{
0fe854a7
RH
2425 int bb = BLOCK_NUM (insn);
2426
ccef9ef5 2427 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2428 everything. */
0fe854a7
RH
2429 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2430 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2431
2432 if (GET_CODE (insn) == CALL_INSN)
2433 {
2434 /* Note that traversals of this loop (other than for free-ing)
2435 will break after encountering a CALL_INSN. So, there's no
dc297297 2436 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2437 canon_modify_mem_list[bb] =
2438 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2439 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2440 }
2441 else
0fe854a7 2442 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2443}
2444
7506f491 2445/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2446 SET or CLOBBER in an insn. DATA is really the instruction in which
2447 the SET is taking place. */
7506f491
DE
2448
2449static void
84832317 2450record_last_set_info (dest, setter, data)
50b2596f 2451 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 2452 void *data;
7506f491 2453{
84832317
MM
2454 rtx last_set_insn = (rtx) data;
2455
7506f491
DE
2456 if (GET_CODE (dest) == SUBREG)
2457 dest = SUBREG_REG (dest);
2458
2459 if (GET_CODE (dest) == REG)
2460 record_last_reg_set_info (last_set_insn, REGNO (dest));
2461 else if (GET_CODE (dest) == MEM
2462 /* Ignore pushes, they clobber nothing. */
2463 && ! push_operand (dest, GET_MODE (dest)))
2464 record_last_mem_set_info (last_set_insn);
2465}
2466
2467/* Top level function to create an expression or assignment hash table.
2468
2469 Expression entries are placed in the hash table if
2470 - they are of the form (set (pseudo-reg) src),
2471 - src is something we want to perform GCSE on,
2472 - none of the operands are subsequently modified in the block
2473
2474 Assignment entries are placed in the hash table if
2475 - they are of the form (set (pseudo-reg) src),
2476 - src is something we want to perform const/copy propagation on,
2477 - none of the operands or target are subsequently modified in the block
c4c81601 2478
7506f491
DE
2479 Currently src must be a pseudo-reg or a const_int.
2480
2481 F is the first insn.
2482 SET_P is non-zero for computing the assignment hash table. */
2483
2484static void
b5ce41ff 2485compute_hash_table (set_p)
7506f491
DE
2486 int set_p;
2487{
80c29cc4 2488 unsigned int i;
7506f491
DE
2489
2490 /* While we compute the hash table we also compute a bit array of which
2491 registers are set in which blocks.
7506f491
DE
2492 ??? This isn't needed during const/copy propagation, but it's cheap to
2493 compute. Later. */
d55bc081 2494 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2495
a13d4ebf 2496 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2497 clear_modify_mem_tables ();
7506f491 2498 /* Some working arrays used to track first and last set in each block. */
80c29cc4
RZ
2499 reg_avail_info = (struct reg_avail_info*)
2500 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2501
2502 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2503 reg_avail_info[i].last_bb = NULL;
7506f491 2504
e0082a72 2505 FOR_EACH_BB (current_bb)
7506f491
DE
2506 {
2507 rtx insn;
770ae6cc 2508 unsigned int regno;
ed79bb3d 2509 int in_libcall_block;
7506f491
DE
2510
2511 /* First pass over the instructions records information used to
2512 determine when registers and memory are first and last set.
ccef9ef5 2513 ??? hard-reg reg_set_in_block computation
7506f491
DE
2514 could be moved to compute_sets since they currently don't change. */
2515
e0082a72
ZD
2516 for (insn = current_bb->head;
2517 insn && insn != NEXT_INSN (current_bb->end);
7506f491
DE
2518 insn = NEXT_INSN (insn))
2519 {
2c3c49de 2520 if (! INSN_P (insn))
7506f491
DE
2521 continue;
2522
2523 if (GET_CODE (insn) == CALL_INSN)
2524 {
19652adf 2525 bool clobbers_all = false;
589005ff 2526#ifdef NON_SAVING_SETJMP
19652adf
ZW
2527 if (NON_SAVING_SETJMP
2528 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2529 clobbers_all = true;
2530#endif
2531
7506f491 2532 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2533 if (clobbers_all
2534 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2535 record_last_reg_set_info (insn, regno);
c4c81601 2536
24a28584 2537 mark_call (insn);
7506f491
DE
2538 }
2539
84832317 2540 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2541 }
2542
2543 /* The next pass builds the hash table. */
2544
e0082a72
ZD
2545 for (insn = current_bb->head, in_libcall_block = 0;
2546 insn && insn != NEXT_INSN (current_bb->end);
7506f491 2547 insn = NEXT_INSN (insn))
2c3c49de 2548 if (INSN_P (insn))
c4c81601
RK
2549 {
2550 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff
KH
2551 in_libcall_block = 1;
2552 else if (set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2553 in_libcall_block = 0;
2554 hash_scan_insn (insn, set_p, in_libcall_block);
2555 if (!set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2556 in_libcall_block = 0;
8e42ace1 2557 }
7506f491
DE
2558 }
2559
80c29cc4
RZ
2560 free (reg_avail_info);
2561 reg_avail_info = NULL;
7506f491
DE
2562}
2563
2564/* Allocate space for the set hash table.
2565 N_INSNS is the number of instructions in the function.
2566 It is used to determine the number of buckets to use. */
2567
2568static void
2569alloc_set_hash_table (n_insns)
2570 int n_insns;
2571{
2572 int n;
2573
2574 set_hash_table_size = n_insns / 4;
2575 if (set_hash_table_size < 11)
2576 set_hash_table_size = 11;
c4c81601 2577
7506f491
DE
2578 /* Attempt to maintain efficient use of hash table.
2579 Making it an odd number is simplest for now.
2580 ??? Later take some measurements. */
2581 set_hash_table_size |= 1;
2582 n = set_hash_table_size * sizeof (struct expr *);
2583 set_hash_table = (struct expr **) gmalloc (n);
2584}
2585
2586/* Free things allocated by alloc_set_hash_table. */
2587
2588static void
2589free_set_hash_table ()
2590{
2591 free (set_hash_table);
2592}
2593
2594/* Compute the hash table for doing copy/const propagation. */
2595
2596static void
b5ce41ff 2597compute_set_hash_table ()
7506f491
DE
2598{
2599 /* Initialize count of number of entries in hash table. */
2600 n_sets = 0;
961192e1 2601 memset ((char *) set_hash_table, 0,
8e42ace1 2602 set_hash_table_size * sizeof (struct expr *));
7506f491 2603
b5ce41ff 2604 compute_hash_table (1);
7506f491
DE
2605}
2606
2607/* Allocate space for the expression hash table.
2608 N_INSNS is the number of instructions in the function.
2609 It is used to determine the number of buckets to use. */
2610
2611static void
2612alloc_expr_hash_table (n_insns)
2e653e39 2613 unsigned int n_insns;
7506f491
DE
2614{
2615 int n;
2616
2617 expr_hash_table_size = n_insns / 2;
2618 /* Make sure the amount is usable. */
2619 if (expr_hash_table_size < 11)
2620 expr_hash_table_size = 11;
c4c81601 2621
7506f491
DE
2622 /* Attempt to maintain efficient use of hash table.
2623 Making it an odd number is simplest for now.
2624 ??? Later take some measurements. */
2625 expr_hash_table_size |= 1;
2626 n = expr_hash_table_size * sizeof (struct expr *);
2627 expr_hash_table = (struct expr **) gmalloc (n);
2628}
2629
2630/* Free things allocated by alloc_expr_hash_table. */
2631
2632static void
2633free_expr_hash_table ()
2634{
2635 free (expr_hash_table);
2636}
2637
2638/* Compute the hash table for doing GCSE. */
2639
2640static void
b5ce41ff 2641compute_expr_hash_table ()
7506f491
DE
2642{
2643 /* Initialize count of number of entries in hash table. */
2644 n_exprs = 0;
961192e1 2645 memset ((char *) expr_hash_table, 0,
8e42ace1 2646 expr_hash_table_size * sizeof (struct expr *));
7506f491 2647
b5ce41ff 2648 compute_hash_table (0);
7506f491
DE
2649}
2650\f
2651/* Expression tracking support. */
2652
2653/* Lookup pattern PAT in the expression table.
2654 The result is a pointer to the table entry, or NULL if not found. */
2655
2656static struct expr *
2657lookup_expr (pat)
2658 rtx pat;
2659{
2660 int do_not_record_p;
2661 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2662 expr_hash_table_size);
2663 struct expr *expr;
2664
2665 if (do_not_record_p)
2666 return NULL;
2667
2668 expr = expr_hash_table[hash];
2669
2670 while (expr && ! expr_equiv_p (expr->expr, pat))
2671 expr = expr->next_same_hash;
2672
2673 return expr;
2674}
2675
c4c81601
RK
2676/* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2677 matches it, otherwise return the first entry for REGNO. The result is a
2678 pointer to the table entry, or NULL if not found. */
7506f491
DE
2679
2680static struct expr *
2681lookup_set (regno, pat)
770ae6cc 2682 unsigned int regno;
7506f491
DE
2683 rtx pat;
2684{
2685 unsigned int hash = hash_set (regno, set_hash_table_size);
2686 struct expr *expr;
2687
2688 expr = set_hash_table[hash];
2689
2690 if (pat)
2691 {
2692 while (expr && ! expr_equiv_p (expr->expr, pat))
2693 expr = expr->next_same_hash;
2694 }
2695 else
2696 {
2697 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2698 expr = expr->next_same_hash;
2699 }
2700
2701 return expr;
2702}
2703
2704/* Return the next entry for REGNO in list EXPR. */
2705
2706static struct expr *
2707next_set (regno, expr)
770ae6cc 2708 unsigned int regno;
7506f491
DE
2709 struct expr *expr;
2710{
2711 do
2712 expr = expr->next_same_hash;
2713 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2714
7506f491
DE
2715 return expr;
2716}
2717
0fe854a7
RH
2718/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2719 types may be mixed. */
2720
2721static void
2722free_insn_expr_list_list (listp)
2723 rtx *listp;
2724{
2725 rtx list, next;
2726
2727 for (list = *listp; list ; list = next)
2728 {
2729 next = XEXP (list, 1);
2730 if (GET_CODE (list) == EXPR_LIST)
2731 free_EXPR_LIST_node (list);
2732 else
2733 free_INSN_LIST_node (list);
2734 }
2735
2736 *listp = NULL;
2737}
2738
73991d6a
JH
2739/* Clear canon_modify_mem_list and modify_mem_list tables. */
2740static void
2741clear_modify_mem_tables ()
2742{
2743 int i;
2744
2745 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2746 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2747 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2748
2749 EXECUTE_IF_SET_IN_BITMAP
2750 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2751 free_insn_expr_list_list (canon_modify_mem_list + i));
2752 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2753}
2754
2755/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2756
2757static void
2758free_modify_mem_tables ()
2759{
2760 clear_modify_mem_tables ();
2761 free (modify_mem_list);
2762 free (canon_modify_mem_list);
2763 modify_mem_list = 0;
2764 canon_modify_mem_list = 0;
2765}
2766
7506f491
DE
2767/* Reset tables used to keep track of what's still available [since the
2768 start of the block]. */
2769
2770static void
2771reset_opr_set_tables ()
2772{
2773 /* Maintain a bitmap of which regs have been set since beginning of
2774 the block. */
73991d6a 2775 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2776
7506f491
DE
2777 /* Also keep a record of the last instruction to modify memory.
2778 For now this is very trivial, we only record whether any memory
2779 location has been modified. */
73991d6a 2780 clear_modify_mem_tables ();
7506f491
DE
2781}
2782
2783/* Return non-zero if the operands of X are not set before INSN in
2784 INSN's basic block. */
2785
2786static int
2787oprs_not_set_p (x, insn)
2788 rtx x, insn;
2789{
c4c81601 2790 int i, j;
7506f491 2791 enum rtx_code code;
6f7d635c 2792 const char *fmt;
7506f491 2793
7506f491
DE
2794 if (x == 0)
2795 return 1;
2796
2797 code = GET_CODE (x);
2798 switch (code)
2799 {
2800 case PC:
2801 case CC0:
2802 case CONST:
2803 case CONST_INT:
2804 case CONST_DOUBLE:
69ef87e2 2805 case CONST_VECTOR:
7506f491
DE
2806 case SYMBOL_REF:
2807 case LABEL_REF:
2808 case ADDR_VEC:
2809 case ADDR_DIFF_VEC:
2810 return 1;
2811
2812 case MEM:
589005ff 2813 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2814 INSN_CUID (insn), x, 0))
a13d4ebf 2815 return 0;
c4c81601
RK
2816 else
2817 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2818
2819 case REG:
73991d6a 2820 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2821
2822 default:
2823 break;
2824 }
2825
c4c81601 2826 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2827 {
2828 if (fmt[i] == 'e')
2829 {
7506f491
DE
2830 /* If we are about to do the last recursive call
2831 needed at this level, change it into iteration.
2832 This function is called enough to be worth it. */
2833 if (i == 0)
c4c81601
RK
2834 return oprs_not_set_p (XEXP (x, i), insn);
2835
2836 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2837 return 0;
2838 }
2839 else if (fmt[i] == 'E')
c4c81601
RK
2840 for (j = 0; j < XVECLEN (x, i); j++)
2841 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2842 return 0;
7506f491
DE
2843 }
2844
2845 return 1;
2846}
2847
2848/* Mark things set by a CALL. */
2849
2850static void
b5ce41ff
JL
2851mark_call (insn)
2852 rtx insn;
7506f491 2853{
24a28584 2854 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2855 record_last_mem_set_info (insn);
7506f491
DE
2856}
2857
2858/* Mark things set by a SET. */
2859
2860static void
2861mark_set (pat, insn)
2862 rtx pat, insn;
2863{
2864 rtx dest = SET_DEST (pat);
2865
2866 while (GET_CODE (dest) == SUBREG
2867 || GET_CODE (dest) == ZERO_EXTRACT
2868 || GET_CODE (dest) == SIGN_EXTRACT
2869 || GET_CODE (dest) == STRICT_LOW_PART)
2870 dest = XEXP (dest, 0);
2871
a13d4ebf 2872 if (GET_CODE (dest) == REG)
73991d6a 2873 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2874 else if (GET_CODE (dest) == MEM)
2875 record_last_mem_set_info (insn);
2876
7506f491 2877 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2878 mark_call (insn);
7506f491
DE
2879}
2880
2881/* Record things set by a CLOBBER. */
2882
2883static void
2884mark_clobber (pat, insn)
2885 rtx pat, insn;
2886{
2887 rtx clob = XEXP (pat, 0);
2888
2889 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2890 clob = XEXP (clob, 0);
2891
a13d4ebf 2892 if (GET_CODE (clob) == REG)
73991d6a 2893 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2894 else
2895 record_last_mem_set_info (insn);
7506f491
DE
2896}
2897
2898/* Record things set by INSN.
2899 This data is used by oprs_not_set_p. */
2900
2901static void
2902mark_oprs_set (insn)
2903 rtx insn;
2904{
2905 rtx pat = PATTERN (insn);
c4c81601 2906 int i;
7506f491
DE
2907
2908 if (GET_CODE (pat) == SET)
2909 mark_set (pat, insn);
2910 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2911 for (i = 0; i < XVECLEN (pat, 0); i++)
2912 {
2913 rtx x = XVECEXP (pat, 0, i);
2914
2915 if (GET_CODE (x) == SET)
2916 mark_set (x, insn);
2917 else if (GET_CODE (x) == CLOBBER)
2918 mark_clobber (x, insn);
2919 else if (GET_CODE (x) == CALL)
2920 mark_call (insn);
2921 }
7506f491 2922
7506f491
DE
2923 else if (GET_CODE (pat) == CLOBBER)
2924 mark_clobber (pat, insn);
2925 else if (GET_CODE (pat) == CALL)
b5ce41ff 2926 mark_call (insn);
7506f491 2927}
b5ce41ff 2928
7506f491
DE
2929\f
2930/* Classic GCSE reaching definition support. */
2931
2932/* Allocate reaching def variables. */
2933
2934static void
2935alloc_rd_mem (n_blocks, n_insns)
2936 int n_blocks, n_insns;
2937{
2938 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2939 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491
DE
2940
2941 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2942 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491
DE
2943
2944 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2945 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491
DE
2946
2947 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2948 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2949}
2950
2951/* Free reaching def variables. */
2952
2953static void
2954free_rd_mem ()
2955{
5a660bff
DB
2956 sbitmap_vector_free (rd_kill);
2957 sbitmap_vector_free (rd_gen);
2958 sbitmap_vector_free (reaching_defs);
2959 sbitmap_vector_free (rd_out);
7506f491
DE
2960}
2961
c4c81601 2962/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2963
2964static void
2965handle_rd_kill_set (insn, regno, bb)
2966 rtx insn;
e2d2ed72
AM
2967 int regno;
2968 basic_block bb;
7506f491 2969{
c4c81601 2970 struct reg_set *this_reg;
7506f491 2971
c4c81601
RK
2972 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2973 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2974 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2975}
2976
7506f491
DE
2977/* Compute the set of kill's for reaching definitions. */
2978
2979static void
2980compute_kill_rd ()
2981{
e0082a72 2982 int cuid;
172890a2
RK
2983 unsigned int regno;
2984 int i;
e0082a72 2985 basic_block bb;
7506f491
DE
2986
2987 /* For each block
2988 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2989 generates a definition in the block)
2990 Call the reg set by the insn corresponding to that bit regx
2991 Look at the linked list starting at reg_set_table[regx]
2992 For each setting of regx in the linked list, which is not in
2993 this block
6d2f8887 2994 Set the bit in `kill' corresponding to that insn. */
e0082a72 2995 FOR_EACH_BB (bb)
c4c81601 2996 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2997 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2998 {
c4c81601
RK
2999 rtx insn = CUID_INSN (cuid);
3000 rtx pat = PATTERN (insn);
7506f491 3001
c4c81601
RK
3002 if (GET_CODE (insn) == CALL_INSN)
3003 {
3004 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 3005 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 3006 handle_rd_kill_set (insn, regno, bb);
c4c81601 3007 }
7506f491 3008
c4c81601
RK
3009 if (GET_CODE (pat) == PARALLEL)
3010 {
3011 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 3012 {
c4c81601 3013 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 3014
c4c81601
RK
3015 if ((code == SET || code == CLOBBER)
3016 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
3017 handle_rd_kill_set (insn,
3018 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 3019 bb);
ac7c5af5 3020 }
ac7c5af5 3021 }
c4c81601
RK
3022 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
3023 /* Each setting of this register outside of this block
3024 must be marked in the set of kills in this block. */
e0082a72 3025 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 3026 }
7506f491
DE
3027}
3028
589005ff 3029/* Compute the reaching definitions as in
7506f491
DE
3030 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3031 Chapter 10. It is the same algorithm as used for computing available
3032 expressions but applied to the gens and kills of reaching definitions. */
3033
3034static void
3035compute_rd ()
3036{
e0082a72
ZD
3037 int changed, passes;
3038 basic_block bb;
7506f491 3039
e0082a72
ZD
3040 FOR_EACH_BB (bb)
3041 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
3042
3043 passes = 0;
3044 changed = 1;
3045 while (changed)
3046 {
3047 changed = 0;
e0082a72 3048 FOR_EACH_BB (bb)
ac7c5af5 3049 {
e0082a72
ZD
3050 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3051 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3052 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 3053 }
7506f491
DE
3054 passes++;
3055 }
3056
3057 if (gcse_file)
3058 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3059}
3060\f
3061/* Classic GCSE available expression support. */
3062
3063/* Allocate memory for available expression computation. */
3064
3065static void
3066alloc_avail_expr_mem (n_blocks, n_exprs)
3067 int n_blocks, n_exprs;
3068{
3069 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3070 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491
DE
3071
3072 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3073 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491
DE
3074
3075 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3076 sbitmap_vector_zero (ae_in, n_blocks);
7506f491
DE
3077
3078 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3079 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
3080}
3081
3082static void
3083free_avail_expr_mem ()
3084{
5a660bff
DB
3085 sbitmap_vector_free (ae_kill);
3086 sbitmap_vector_free (ae_gen);
3087 sbitmap_vector_free (ae_in);
3088 sbitmap_vector_free (ae_out);
7506f491
DE
3089}
3090
3091/* Compute the set of available expressions generated in each basic block. */
3092
3093static void
3094compute_ae_gen ()
3095{
2e653e39 3096 unsigned int i;
c4c81601
RK
3097 struct expr *expr;
3098 struct occr *occr;
7506f491
DE
3099
3100 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3101 This is all we have to do because an expression is not recorded if it
3102 is not available, and the only expressions we want to work with are the
3103 ones that are recorded. */
7506f491 3104 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
3105 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
3106 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3107 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3108}
3109
3110/* Return non-zero if expression X is killed in BB. */
3111
3112static int
3113expr_killed_p (x, bb)
3114 rtx x;
e2d2ed72 3115 basic_block bb;
7506f491 3116{
c4c81601 3117 int i, j;
7506f491 3118 enum rtx_code code;
6f7d635c 3119 const char *fmt;
7506f491 3120
7506f491
DE
3121 if (x == 0)
3122 return 1;
3123
3124 code = GET_CODE (x);
3125 switch (code)
3126 {
3127 case REG:
0b17ab2f 3128 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3129
3130 case MEM:
a13d4ebf
AM
3131 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3132 return 1;
c4c81601
RK
3133 else
3134 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3135
3136 case PC:
3137 case CC0: /*FIXME*/
3138 case CONST:
3139 case CONST_INT:
3140 case CONST_DOUBLE:
69ef87e2 3141 case CONST_VECTOR:
7506f491
DE
3142 case SYMBOL_REF:
3143 case LABEL_REF:
3144 case ADDR_VEC:
3145 case ADDR_DIFF_VEC:
3146 return 0;
3147
3148 default:
3149 break;
3150 }
3151
c4c81601 3152 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3153 {
3154 if (fmt[i] == 'e')
3155 {
7506f491
DE
3156 /* If we are about to do the last recursive call
3157 needed at this level, change it into iteration.
3158 This function is called enough to be worth it. */
3159 if (i == 0)
c4c81601
RK
3160 return expr_killed_p (XEXP (x, i), bb);
3161 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3162 return 1;
3163 }
3164 else if (fmt[i] == 'E')
c4c81601
RK
3165 for (j = 0; j < XVECLEN (x, i); j++)
3166 if (expr_killed_p (XVECEXP (x, i, j), bb))
3167 return 1;
7506f491
DE
3168 }
3169
3170 return 0;
3171}
3172
3173/* Compute the set of available expressions killed in each basic block. */
3174
3175static void
a42cd965
AM
3176compute_ae_kill (ae_gen, ae_kill)
3177 sbitmap *ae_gen, *ae_kill;
7506f491 3178{
e0082a72 3179 basic_block bb;
2e653e39 3180 unsigned int i;
c4c81601 3181 struct expr *expr;
7506f491 3182
e0082a72 3183 FOR_EACH_BB (bb)
c4c81601
RK
3184 for (i = 0; i < expr_hash_table_size; i++)
3185 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
7506f491 3186 {
c4c81601 3187 /* Skip EXPR if generated in this block. */
e0082a72 3188 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3189 continue;
7506f491 3190
e0082a72
ZD
3191 if (expr_killed_p (expr->expr, bb))
3192 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3193 }
7506f491 3194}
7506f491
DE
3195\f
3196/* Actually perform the Classic GCSE optimizations. */
3197
3198/* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
3199
3200 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
3201 as a positive reach. We want to do this when there are two computations
3202 of the expression in the block.
3203
3204 VISITED is a pointer to a working buffer for tracking which BB's have
3205 been visited. It is NULL for the top-level call.
3206
3207 We treat reaching expressions that go through blocks containing the same
3208 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3209 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3210 2 as not reaching. The intent is to improve the probability of finding
3211 only one reaching expression and to reduce register lifetimes by picking
3212 the closest such expression. */
3213
3214static int
283a2545 3215expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
7506f491
DE
3216 struct occr *occr;
3217 struct expr *expr;
e2d2ed72 3218 basic_block bb;
7506f491
DE
3219 int check_self_loop;
3220 char *visited;
3221{
36349f8b 3222 edge pred;
7506f491 3223
e2d2ed72 3224 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3225 {
e2d2ed72 3226 basic_block pred_bb = pred->src;
7506f491 3227
0b17ab2f 3228 if (visited[pred_bb->index])
c4c81601 3229 /* This predecessor has already been visited. Nothing to do. */
7506f491 3230 ;
7506f491 3231 else if (pred_bb == bb)
ac7c5af5 3232 {
7506f491
DE
3233 /* BB loops on itself. */
3234 if (check_self_loop
0b17ab2f
RH
3235 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3236 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3237 return 1;
c4c81601 3238
0b17ab2f 3239 visited[pred_bb->index] = 1;
ac7c5af5 3240 }
c4c81601 3241
7506f491 3242 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3243 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3244 visited[pred_bb->index] = 1;
c4c81601 3245
7506f491 3246 /* Does this predecessor generate this expression? */
0b17ab2f 3247 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3248 {
3249 /* Is this the occurrence we're looking for?
3250 Note that there's only one generating occurrence per block
3251 so we just need to check the block number. */
0b17ab2f 3252 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3253 return 1;
c4c81601 3254
0b17ab2f 3255 visited[pred_bb->index] = 1;
7506f491 3256 }
c4c81601 3257
7506f491
DE
3258 /* Neither gen nor kill. */
3259 else
ac7c5af5 3260 {
0b17ab2f 3261 visited[pred_bb->index] = 1;
589005ff 3262 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3263 visited))
c4c81601 3264
7506f491 3265 return 1;
ac7c5af5 3266 }
7506f491
DE
3267 }
3268
3269 /* All paths have been checked. */
3270 return 0;
3271}
3272
283a2545 3273/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3274 memory allocated for that function is returned. */
283a2545
RL
3275
3276static int
3277expr_reaches_here_p (occr, expr, bb, check_self_loop)
3278 struct occr *occr;
3279 struct expr *expr;
e2d2ed72 3280 basic_block bb;
283a2545
RL
3281 int check_self_loop;
3282{
3283 int rval;
d55bc081 3284 char *visited = (char *) xcalloc (last_basic_block, 1);
283a2545 3285
c4c81601 3286 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3287
283a2545 3288 free (visited);
c4c81601 3289 return rval;
283a2545
RL
3290}
3291
7506f491
DE
3292/* Return the instruction that computes EXPR that reaches INSN's basic block.
3293 If there is more than one such instruction, return NULL.
3294
3295 Called only by handle_avail_expr. */
3296
3297static rtx
3298computing_insn (expr, insn)
3299 struct expr *expr;
3300 rtx insn;
3301{
e2d2ed72 3302 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3303
3304 if (expr->avail_occr->next == NULL)
589005ff 3305 {
e2d2ed72 3306 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3307 /* The available expression is actually itself
3308 (i.e. a loop in the flow graph) so do nothing. */
3309 return NULL;
3310
7506f491
DE
3311 /* (FIXME) Case that we found a pattern that was created by
3312 a substitution that took place. */
3313 return expr->avail_occr->insn;
3314 }
3315 else
3316 {
3317 /* Pattern is computed more than once.
589005ff 3318 Search backwards from this insn to see how many of these
7506f491
DE
3319 computations actually reach this insn. */
3320 struct occr *occr;
3321 rtx insn_computes_expr = NULL;
3322 int can_reach = 0;
3323
3324 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3325 {
e2d2ed72 3326 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3327 {
3328 /* The expression is generated in this block.
3329 The only time we care about this is when the expression
3330 is generated later in the block [and thus there's a loop].
3331 We let the normal cse pass handle the other cases. */
c4c81601
RK
3332 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3333 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3334 {
3335 can_reach++;
3336 if (can_reach > 1)
3337 return NULL;
c4c81601 3338
7506f491
DE
3339 insn_computes_expr = occr->insn;
3340 }
3341 }
c4c81601
RK
3342 else if (expr_reaches_here_p (occr, expr, bb, 0))
3343 {
3344 can_reach++;
3345 if (can_reach > 1)
3346 return NULL;
3347
3348 insn_computes_expr = occr->insn;
3349 }
7506f491
DE
3350 }
3351
3352 if (insn_computes_expr == NULL)
3353 abort ();
c4c81601 3354
7506f491
DE
3355 return insn_computes_expr;
3356 }
3357}
3358
3359/* Return non-zero if the definition in DEF_INSN can reach INSN.
3360 Only called by can_disregard_other_sets. */
3361
3362static int
3363def_reaches_here_p (insn, def_insn)
3364 rtx insn, def_insn;
3365{
3366 rtx reg;
3367
3368 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3369 return 1;
3370
3371 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3372 {
3373 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3374 {
7506f491
DE
3375 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3376 return 1;
c4c81601 3377 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3378 reg = XEXP (PATTERN (def_insn), 0);
3379 else if (GET_CODE (PATTERN (def_insn)) == SET)
3380 reg = SET_DEST (PATTERN (def_insn));
3381 else
3382 abort ();
c4c81601 3383
7506f491
DE
3384 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3385 }
3386 else
3387 return 0;
3388 }
3389
3390 return 0;
3391}
3392
c4c81601
RK
3393/* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3394 value returned is the number of definitions that reach INSN. Returning a
3395 value of zero means that [maybe] more than one definition reaches INSN and
3396 the caller can't perform whatever optimization it is trying. i.e. it is
3397 always safe to return zero. */
7506f491
DE
3398
3399static int
3400can_disregard_other_sets (addr_this_reg, insn, for_combine)
3401 struct reg_set **addr_this_reg;
3402 rtx insn;
3403 int for_combine;
3404{
3405 int number_of_reaching_defs = 0;
c4c81601 3406 struct reg_set *this_reg;
7506f491 3407
c4c81601
RK
3408 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3409 if (def_reaches_here_p (insn, this_reg->insn))
3410 {
3411 number_of_reaching_defs++;
3412 /* Ignore parallels for now. */
3413 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3414 return 0;
3415
3416 if (!for_combine
3417 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3418 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3419 SET_SRC (PATTERN (insn)))))
3420 /* A setting of the reg to a different value reaches INSN. */
3421 return 0;
3422
3423 if (number_of_reaching_defs > 1)
3424 {
3425 /* If in this setting the value the register is being set to is
3426 equal to the previous value the register was set to and this
3427 setting reaches the insn we are trying to do the substitution
3428 on then we are ok. */
3429 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3430 return 0;
c4c81601
RK
3431 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3432 SET_SRC (PATTERN (insn))))
3433 return 0;
3434 }
7506f491 3435
589005ff 3436 *addr_this_reg = this_reg;
c4c81601 3437 }
7506f491
DE
3438
3439 return number_of_reaching_defs;
3440}
3441
3442/* Expression computed by insn is available and the substitution is legal,
3443 so try to perform the substitution.
3444
3445 The result is non-zero if any changes were made. */
3446
3447static int
3448handle_avail_expr (insn, expr)
3449 rtx insn;
3450 struct expr *expr;
3451{
0631e0bf 3452 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3453 rtx to;
3454 struct reg_set *this_reg;
3455 int found_setting, use_src;
3456 int changed = 0;
3457
3458 /* We only handle the case where one computation of the expression
3459 reaches this instruction. */
3460 insn_computes_expr = computing_insn (expr, insn);
3461 if (insn_computes_expr == NULL)
3462 return 0;
0631e0bf
JH
3463 expr_set = single_set (insn_computes_expr);
3464 if (!expr_set)
3465 abort ();
7506f491
DE
3466
3467 found_setting = 0;
3468 use_src = 0;
3469
3470 /* At this point we know only one computation of EXPR outside of this
3471 block reaches this insn. Now try to find a register that the
3472 expression is computed into. */
0631e0bf 3473 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3474 {
3475 /* This is the case when the available expression that reaches
3476 here has already been handled as an available expression. */
770ae6cc 3477 unsigned int regnum_for_replacing
0631e0bf 3478 = REGNO (SET_SRC (expr_set));
c4c81601 3479
7506f491
DE
3480 /* If the register was created by GCSE we can't use `reg_set_table',
3481 however we know it's set only once. */
3482 if (regnum_for_replacing >= max_gcse_regno
3483 /* If the register the expression is computed into is set only once,
3484 or only one set reaches this insn, we can use it. */
3485 || (((this_reg = reg_set_table[regnum_for_replacing]),
3486 this_reg->next == NULL)
3487 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3488 {
3489 use_src = 1;
3490 found_setting = 1;
3491 }
7506f491
DE
3492 }
3493
3494 if (!found_setting)
3495 {
770ae6cc 3496 unsigned int regnum_for_replacing
0631e0bf 3497 = REGNO (SET_DEST (expr_set));
c4c81601 3498
7506f491
DE
3499 /* This shouldn't happen. */
3500 if (regnum_for_replacing >= max_gcse_regno)
3501 abort ();
c4c81601 3502
7506f491 3503 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3504
7506f491
DE
3505 /* If the register the expression is computed into is set only once,
3506 or only one set reaches this insn, use it. */
3507 if (this_reg->next == NULL
3508 || can_disregard_other_sets (&this_reg, insn, 0))
3509 found_setting = 1;
3510 }
3511
3512 if (found_setting)
3513 {
3514 pat = PATTERN (insn);
3515 if (use_src)
0631e0bf 3516 to = SET_SRC (expr_set);
7506f491 3517 else
0631e0bf 3518 to = SET_DEST (expr_set);
7506f491
DE
3519 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3520
3521 /* We should be able to ignore the return code from validate_change but
3522 to play it safe we check. */
3523 if (changed)
3524 {
3525 gcse_subst_count++;
3526 if (gcse_file != NULL)
3527 {
c4c81601
RK
3528 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3529 INSN_UID (insn));
3530 fprintf (gcse_file, " reg %d %s insn %d\n",
3531 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3532 INSN_UID (insn_computes_expr));
3533 }
7506f491
DE
3534 }
3535 }
c4c81601 3536
7506f491
DE
3537 /* The register that the expr is computed into is set more than once. */
3538 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3539 {
3540 /* Insert an insn after insnx that copies the reg set in insnx
3541 into a new pseudo register call this new register REGN.
3542 From insnb until end of basic block or until REGB is set
3543 replace all uses of REGB with REGN. */
3544 rtx new_insn;
3545
0631e0bf 3546 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3547
3548 /* Generate the new insn. */
3549 /* ??? If the change fails, we return 0, even though we created
3550 an insn. I think this is ok. */
9e6a5703
JC
3551 new_insn
3552 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3553 SET_DEST (expr_set)),
c4c81601
RK
3554 insn_computes_expr);
3555
7506f491
DE
3556 /* Keep register set table up to date. */
3557 record_one_set (REGNO (to), new_insn);
3558
3559 gcse_create_count++;
3560 if (gcse_file != NULL)
ac7c5af5 3561 {
c4c81601 3562 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3563 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3564 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3565 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3566 INSN_UID (insn_computes_expr));
c4c81601
RK
3567 fprintf (gcse_file, " into newly allocated reg %d\n",
3568 REGNO (to));
ac7c5af5 3569 }
7506f491
DE
3570
3571 pat = PATTERN (insn);
3572
3573 /* Do register replacement for INSN. */
3574 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3575 SET_DEST (PATTERN
3576 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3577 0);
3578
3579 /* We should be able to ignore the return code from validate_change but
3580 to play it safe we check. */
3581 if (changed)
3582 {
3583 gcse_subst_count++;
3584 if (gcse_file != NULL)
3585 {
c4c81601
RK
3586 fprintf (gcse_file,
3587 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3588 INSN_UID (insn),
c4c81601
RK
3589 REGNO (SET_DEST (PATTERN (NEXT_INSN
3590 (insn_computes_expr)))));
3591 fprintf (gcse_file, "set in insn %d\n",
589005ff 3592 INSN_UID (insn_computes_expr));
7506f491 3593 }
7506f491
DE
3594 }
3595 }
3596
3597 return changed;
3598}
3599
c4c81601
RK
3600/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3601 the dataflow analysis has been done.
7506f491
DE
3602
3603 The result is non-zero if a change was made. */
3604
3605static int
3606classic_gcse ()
3607{
e0082a72 3608 int changed;
7506f491 3609 rtx insn;
e0082a72 3610 basic_block bb;
7506f491
DE
3611
3612 /* Note we start at block 1. */
3613
e0082a72
ZD
3614 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3615 return 0;
3616
7506f491 3617 changed = 0;
e0082a72 3618 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3619 {
3620 /* Reset tables used to keep track of what's still valid [since the
3621 start of the block]. */
3622 reset_opr_set_tables ();
3623
e0082a72
ZD
3624 for (insn = bb->head;
3625 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491
DE
3626 insn = NEXT_INSN (insn))
3627 {
3628 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3629 if (GET_CODE (insn) == INSN
3630 && GET_CODE (PATTERN (insn)) == SET
3631 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3632 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3633 {
3634 rtx pat = PATTERN (insn);
3635 rtx src = SET_SRC (pat);
3636 struct expr *expr;
3637
3638 if (want_to_gcse_p (src)
3639 /* Is the expression recorded? */
3640 && ((expr = lookup_expr (src)) != NULL)
3641 /* Is the expression available [at the start of the
3642 block]? */
e0082a72 3643 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3644 /* Are the operands unchanged since the start of the
3645 block? */
3646 && oprs_not_set_p (src, insn))
3647 changed |= handle_avail_expr (insn, expr);
3648 }
3649
3650 /* Keep track of everything modified by this insn. */
3651 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3652 if (INSN_P (insn))
7506f491 3653 mark_oprs_set (insn);
ac7c5af5 3654 }
7506f491
DE
3655 }
3656
3657 return changed;
3658}
3659
3660/* Top level routine to perform one classic GCSE pass.
3661
3662 Return non-zero if a change was made. */
3663
3664static int
b5ce41ff 3665one_classic_gcse_pass (pass)
7506f491
DE
3666 int pass;
3667{
3668 int changed = 0;
3669
3670 gcse_subst_count = 0;
3671 gcse_create_count = 0;
3672
3673 alloc_expr_hash_table (max_cuid);
d55bc081 3674 alloc_rd_mem (last_basic_block, max_cuid);
b5ce41ff 3675 compute_expr_hash_table ();
7506f491
DE
3676 if (gcse_file)
3677 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3678 expr_hash_table_size, n_exprs);
c4c81601 3679
7506f491
DE
3680 if (n_exprs > 0)
3681 {
3682 compute_kill_rd ();
3683 compute_rd ();
d55bc081 3684 alloc_avail_expr_mem (last_basic_block, n_exprs);
7506f491 3685 compute_ae_gen ();
a42cd965 3686 compute_ae_kill (ae_gen, ae_kill);
bd0eaec2 3687 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3688 changed = classic_gcse ();
3689 free_avail_expr_mem ();
3690 }
c4c81601 3691
7506f491
DE
3692 free_rd_mem ();
3693 free_expr_hash_table ();
3694
3695 if (gcse_file)
3696 {
3697 fprintf (gcse_file, "\n");
c4c81601
RK
3698 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3699 current_function_name, pass, bytes_used, gcse_subst_count);
3700 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3701 }
3702
3703 return changed;
3704}
3705\f
3706/* Compute copy/constant propagation working variables. */
3707
3708/* Local properties of assignments. */
7506f491
DE
3709static sbitmap *cprop_pavloc;
3710static sbitmap *cprop_absaltered;
3711
3712/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3713static sbitmap *cprop_avin;
3714static sbitmap *cprop_avout;
3715
c4c81601
RK
3716/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3717 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3718
3719static void
3720alloc_cprop_mem (n_blocks, n_sets)
3721 int n_blocks, n_sets;
3722{
3723 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3724 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3725
3726 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3727 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3728}
3729
3730/* Free vars used by copy/const propagation. */
3731
3732static void
3733free_cprop_mem ()
3734{
5a660bff
DB
3735 sbitmap_vector_free (cprop_pavloc);
3736 sbitmap_vector_free (cprop_absaltered);
3737 sbitmap_vector_free (cprop_avin);
3738 sbitmap_vector_free (cprop_avout);
7506f491
DE
3739}
3740
c4c81601
RK
3741/* For each block, compute whether X is transparent. X is either an
3742 expression or an assignment [though we don't care which, for this context
3743 an assignment is treated as an expression]. For each block where an
3744 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3745 bit in BMAP. */
7506f491
DE
3746
3747static void
3748compute_transp (x, indx, bmap, set_p)
3749 rtx x;
3750 int indx;
3751 sbitmap *bmap;
3752 int set_p;
3753{
e0082a72
ZD
3754 int i, j;
3755 basic_block bb;
7506f491 3756 enum rtx_code code;
c4c81601 3757 reg_set *r;
6f7d635c 3758 const char *fmt;
7506f491 3759
c4c81601
RK
3760 /* repeat is used to turn tail-recursion into iteration since GCC
3761 can't do it when there's no return value. */
7506f491
DE
3762 repeat:
3763
3764 if (x == 0)
3765 return;
3766
3767 code = GET_CODE (x);
3768 switch (code)
3769 {
3770 case REG:
c4c81601
RK
3771 if (set_p)
3772 {
3773 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3774 {
e0082a72
ZD
3775 FOR_EACH_BB (bb)
3776 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3777 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3778 }
3779 else
3780 {
3781 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3782 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3783 }
3784 }
3785 else
3786 {
3787 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3788 {
e0082a72
ZD
3789 FOR_EACH_BB (bb)
3790 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3791 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3792 }
3793 else
3794 {
3795 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3796 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3797 }
3798 }
7506f491 3799
c4c81601 3800 return;
7506f491
DE
3801
3802 case MEM:
e0082a72 3803 FOR_EACH_BB (bb)
a13d4ebf 3804 {
e0082a72 3805 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3806
3807 while (list_entry)
3808 {
3809 rtx dest, dest_addr;
3810
3811 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3812 {
3813 if (set_p)
e0082a72 3814 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3815 else
e0082a72 3816 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3817 break;
3818 }
3819 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3820 Examine each hunk of memory that is modified. */
3821
3822 dest = XEXP (list_entry, 0);
3823 list_entry = XEXP (list_entry, 1);
3824 dest_addr = XEXP (list_entry, 0);
589005ff 3825
a13d4ebf
AM
3826 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3827 x, rtx_addr_varies_p))
3828 {
3829 if (set_p)
e0082a72 3830 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3831 else
e0082a72 3832 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3833 break;
3834 }
3835 list_entry = XEXP (list_entry, 1);
3836 }
3837 }
c4c81601 3838
7506f491
DE
3839 x = XEXP (x, 0);
3840 goto repeat;
3841
3842 case PC:
3843 case CC0: /*FIXME*/
3844 case CONST:
3845 case CONST_INT:
3846 case CONST_DOUBLE:
69ef87e2 3847 case CONST_VECTOR:
7506f491
DE
3848 case SYMBOL_REF:
3849 case LABEL_REF:
3850 case ADDR_VEC:
3851 case ADDR_DIFF_VEC:
3852 return;
3853
3854 default:
3855 break;
3856 }
3857
c4c81601 3858 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3859 {
3860 if (fmt[i] == 'e')
3861 {
7506f491
DE
3862 /* If we are about to do the last recursive call
3863 needed at this level, change it into iteration.
3864 This function is called enough to be worth it. */
3865 if (i == 0)
3866 {
c4c81601 3867 x = XEXP (x, i);
7506f491
DE
3868 goto repeat;
3869 }
c4c81601
RK
3870
3871 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3872 }
3873 else if (fmt[i] == 'E')
c4c81601
RK
3874 for (j = 0; j < XVECLEN (x, i); j++)
3875 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3876 }
3877}
3878
7506f491
DE
3879/* Top level routine to do the dataflow analysis needed by copy/const
3880 propagation. */
3881
3882static void
3883compute_cprop_data ()
3884{
b5ce41ff 3885 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
ce724250
JL
3886 compute_available (cprop_pavloc, cprop_absaltered,
3887 cprop_avout, cprop_avin);
7506f491
DE
3888}
3889\f
3890/* Copy/constant propagation. */
3891
7506f491
DE
3892/* Maximum number of register uses in an insn that we handle. */
3893#define MAX_USES 8
3894
3895/* Table of uses found in an insn.
3896 Allocated statically to avoid alloc/free complexity and overhead. */
3897static struct reg_use reg_use_table[MAX_USES];
3898
3899/* Index into `reg_use_table' while building it. */
3900static int reg_use_count;
3901
c4c81601
RK
3902/* Set up a list of register numbers used in INSN. The found uses are stored
3903 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3904 and contains the number of uses in the table upon exit.
7506f491 3905
c4c81601
RK
3906 ??? If a register appears multiple times we will record it multiple times.
3907 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3908
3909static void
9e71c818
JH
3910find_used_regs (xptr, data)
3911 rtx *xptr;
3912 void *data ATTRIBUTE_UNUSED;
7506f491 3913{
c4c81601 3914 int i, j;
7506f491 3915 enum rtx_code code;
6f7d635c 3916 const char *fmt;
9e71c818 3917 rtx x = *xptr;
7506f491 3918
c4c81601
RK
3919 /* repeat is used to turn tail-recursion into iteration since GCC
3920 can't do it when there's no return value. */
7506f491 3921 repeat:
7506f491
DE
3922 if (x == 0)
3923 return;
3924
3925 code = GET_CODE (x);
9e71c818 3926 if (REG_P (x))
7506f491 3927 {
7506f491
DE
3928 if (reg_use_count == MAX_USES)
3929 return;
c4c81601 3930
7506f491
DE
3931 reg_use_table[reg_use_count].reg_rtx = x;
3932 reg_use_count++;
7506f491
DE
3933 }
3934
3935 /* Recursively scan the operands of this expression. */
3936
c4c81601 3937 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3938 {
3939 if (fmt[i] == 'e')
3940 {
3941 /* If we are about to do the last recursive call
3942 needed at this level, change it into iteration.
3943 This function is called enough to be worth it. */
3944 if (i == 0)
3945 {
3946 x = XEXP (x, 0);
3947 goto repeat;
3948 }
c4c81601 3949
9e71c818 3950 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3951 }
3952 else if (fmt[i] == 'E')
c4c81601 3953 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3954 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3955 }
3956}
3957
3958/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3959 Returns non-zero is successful. */
3960
3961static int
3962try_replace_reg (from, to, insn)
3963 rtx from, to, insn;
3964{
172890a2 3965 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3966 rtx src = 0;
172890a2
RK
3967 int success = 0;
3968 rtx set = single_set (insn);
833fc3ad 3969
2b773ee2
JH
3970 validate_replace_src_group (from, to, insn);
3971 if (num_changes_pending () && apply_change_group ())
3972 success = 1;
9e71c818 3973
f305679f 3974 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3975 {
f305679f
JH
3976 /* If above failed and this is a single set, try to simplify the source of
3977 the set given our substitution. We could perhaps try this for multiple
3978 SETs, but it probably won't buy us anything. */
172890a2
RK
3979 src = simplify_replace_rtx (SET_SRC (set), from, to);
3980
9e71c818
JH
3981 if (!rtx_equal_p (src, SET_SRC (set))
3982 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3983 success = 1;
833fc3ad 3984
f305679f
JH
3985 /* If we've failed to do replacement, have a single SET, and don't already
3986 have a note, add a REG_EQUAL note to not lose information. */
3987 if (!success && note == 0 && set != 0)
3988 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3989 }
e251e2a2 3990
172890a2
RK
3991 /* If there is already a NOTE, update the expression in it with our
3992 replacement. */
3993 else if (note != 0)
3994 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
833fc3ad 3995
172890a2
RK
3996 /* REG_EQUAL may get simplified into register.
3997 We don't allow that. Remove that note. This code ought
3998 not to hapen, because previous code ought to syntetize
3999 reg-reg move, but be on the safe side. */
4000 if (note && REG_P (XEXP (note, 0)))
4001 remove_note (insn, note);
833fc3ad 4002
833fc3ad
JH
4003 return success;
4004}
c4c81601
RK
4005
4006/* Find a set of REGNOs that are available on entry to INSN's block. Returns
4007 NULL no such set is found. */
7506f491
DE
4008
4009static struct expr *
4010find_avail_set (regno, insn)
4011 int regno;
4012 rtx insn;
4013{
cafba495
BS
4014 /* SET1 contains the last set found that can be returned to the caller for
4015 use in a substitution. */
4016 struct expr *set1 = 0;
589005ff 4017
cafba495
BS
4018 /* Loops are not possible here. To get a loop we would need two sets
4019 available at the start of the block containing INSN. ie we would
4020 need two sets like this available at the start of the block:
4021
4022 (set (reg X) (reg Y))
4023 (set (reg Y) (reg X))
4024
4025 This can not happen since the set of (reg Y) would have killed the
4026 set of (reg X) making it unavailable at the start of this block. */
4027 while (1)
8e42ace1 4028 {
cafba495
BS
4029 rtx src;
4030 struct expr *set = lookup_set (regno, NULL_RTX);
4031
4032 /* Find a set that is available at the start of the block
4033 which contains INSN. */
4034 while (set)
4035 {
4036 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
4037 break;
4038 set = next_set (regno, set);
4039 }
7506f491 4040
cafba495
BS
4041 /* If no available set was found we've reached the end of the
4042 (possibly empty) copy chain. */
4043 if (set == 0)
589005ff 4044 break;
cafba495
BS
4045
4046 if (GET_CODE (set->expr) != SET)
4047 abort ();
4048
4049 src = SET_SRC (set->expr);
4050
4051 /* We know the set is available.
4052 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 4053 have changed since the start of the block).
cafba495
BS
4054
4055 If the source operand changed, we may still use it for the next
4056 iteration of this loop, but we may not use it for substitutions. */
c4c81601 4057
cafba495
BS
4058 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4059 set1 = set;
4060
4061 /* If the source of the set is anything except a register, then
4062 we have reached the end of the copy chain. */
4063 if (GET_CODE (src) != REG)
7506f491 4064 break;
7506f491 4065
cafba495
BS
4066 /* Follow the copy chain, ie start another iteration of the loop
4067 and see if we have an available copy into SRC. */
4068 regno = REGNO (src);
8e42ace1 4069 }
cafba495
BS
4070
4071 /* SET1 holds the last set that was available and anticipatable at
4072 INSN. */
4073 return set1;
7506f491
DE
4074}
4075
abd535b6 4076/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221
RS
4077 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4078 it is the instruction that immediately preceeds JUMP, and must be a
818b6b7f 4079 single SET of a register. FROM is what we will try to replace,
0e3f0221 4080 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 4081 if a change was made. */
c4c81601 4082
abd535b6 4083static int
0e3f0221
RS
4084cprop_jump (bb, setcc, jump, from, src)
4085 basic_block bb;
4086 rtx setcc;
4087 rtx jump;
172890a2 4088 rtx from;
abd535b6
BS
4089 rtx src;
4090{
0e3f0221
RS
4091 rtx new, new_set;
4092 rtx set = pc_set (jump);
4093
4094 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4095 then substitute that given values in this expanded JUMP. */
4096 if (setcc != NULL)
b2f02503
RS
4097 {
4098 rtx setcc_set = single_set (setcc);
4099 new_set = simplify_replace_rtx (SET_SRC (set),
4100 SET_DEST (setcc_set),
4101 SET_SRC (setcc_set));
4102 }
0e3f0221
RS
4103 else
4104 new_set = set;
4105
4106 new = simplify_replace_rtx (new_set, from, src);
abd535b6
BS
4107
4108 /* If no simplification can be made, then try the next
4109 register. */
0e3f0221 4110 if (rtx_equal_p (new, new_set))
abd535b6 4111 return 0;
589005ff 4112
7d5ab30e 4113 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 4114 if (new == pc_rtx)
0e3f0221 4115 delete_insn (jump);
7d5ab30e 4116 else
abd535b6 4117 {
0e3f0221 4118 if (! validate_change (jump, &SET_SRC (set), new, 0))
7d5ab30e 4119 return 0;
abd535b6 4120
7d5ab30e
JH
4121 /* If this has turned into an unconditional jump,
4122 then put a barrier after it so that the unreachable
4123 code will be deleted. */
4124 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4125 emit_barrier_after (jump);
7d5ab30e 4126 }
abd535b6 4127
0e3f0221
RS
4128#ifdef HAVE_cc0
4129 /* Delete the cc0 setter. */
818b6b7f 4130 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4131 delete_insn (setcc);
4132#endif
4133
172890a2 4134 run_jump_opt_after_gcse = 1;
c4c81601 4135
172890a2
RK
4136 const_prop_count++;
4137 if (gcse_file != NULL)
4138 {
4139 fprintf (gcse_file,
818b6b7f 4140 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4141 REGNO (from), INSN_UID (jump));
172890a2
RK
4142 print_rtl (gcse_file, src);
4143 fprintf (gcse_file, "\n");
abd535b6 4144 }
0005550b 4145 purge_dead_edges (bb);
172890a2
RK
4146
4147 return 1;
abd535b6
BS
4148}
4149
ae860ff7
JH
4150static bool
4151constprop_register (insn, from, to, alter_jumps)
4152 rtx insn;
4153 rtx from;
4154 rtx to;
4155 int alter_jumps;
4156{
4157 rtx sset;
4158
4159 /* Check for reg or cc0 setting instructions followed by
4160 conditional branch instructions first. */
4161 if (alter_jumps
4162 && (sset = single_set (insn)) != NULL
4163 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4164 {
4165 rtx dest = SET_DEST (sset);
4166 if ((REG_P (dest) || CC0_P (dest))
4167 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4168 return 1;
4169 }
4170
4171 /* Handle normal insns next. */
4172 if (GET_CODE (insn) == INSN
4173 && try_replace_reg (from, to, insn))
4174 return 1;
4175
4176 /* Try to propagate a CONST_INT into a conditional jump.
4177 We're pretty specific about what we will handle in this
4178 code, we can extend this as necessary over time.
4179
4180 Right now the insn in question must look like
4181 (set (pc) (if_then_else ...)) */
4182 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4183 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4184 return 0;
4185}
4186
7506f491
DE
4187/* Perform constant and copy propagation on INSN.
4188 The result is non-zero if a change was made. */
4189
4190static int
ae860ff7 4191cprop_insn (insn, alter_jumps)
7506f491 4192 rtx insn;
b5ce41ff 4193 int alter_jumps;
7506f491
DE
4194{
4195 struct reg_use *reg_used;
4196 int changed = 0;
833fc3ad 4197 rtx note;
7506f491 4198
9e71c818 4199 if (!INSN_P (insn))
7506f491
DE
4200 return 0;
4201
4202 reg_use_count = 0;
9e71c818 4203 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4204
172890a2 4205 note = find_reg_equal_equiv_note (insn);
833fc3ad 4206
dc297297 4207 /* We may win even when propagating constants into notes. */
833fc3ad 4208 if (note)
9e71c818 4209 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4210
c4c81601
RK
4211 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4212 reg_used++, reg_use_count--)
7506f491 4213 {
770ae6cc 4214 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4215 rtx pat, src;
4216 struct expr *set;
7506f491
DE
4217
4218 /* Ignore registers created by GCSE.
dc297297 4219 We do this because ... */
7506f491
DE
4220 if (regno >= max_gcse_regno)
4221 continue;
4222
4223 /* If the register has already been set in this block, there's
4224 nothing we can do. */
4225 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4226 continue;
4227
4228 /* Find an assignment that sets reg_used and is available
4229 at the start of the block. */
4230 set = find_avail_set (regno, insn);
4231 if (! set)
4232 continue;
589005ff 4233
7506f491
DE
4234 pat = set->expr;
4235 /* ??? We might be able to handle PARALLELs. Later. */
4236 if (GET_CODE (pat) != SET)
4237 abort ();
c4c81601 4238
7506f491
DE
4239 src = SET_SRC (pat);
4240
e78d9500 4241 /* Constant propagation. */
b446e5a2 4242 if (CONSTANT_P (src))
7506f491 4243 {
ae860ff7 4244 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4245 {
4246 changed = 1;
4247 const_prop_count++;
4248 if (gcse_file != NULL)
4249 {
ae860ff7
JH
4250 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4251 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4252 print_rtl (gcse_file, src);
7506f491
DE
4253 fprintf (gcse_file, "\n");
4254 }
7506f491
DE
4255 }
4256 }
4257 else if (GET_CODE (src) == REG
4258 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4259 && REGNO (src) != regno)
4260 {
cafba495 4261 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4262 {
cafba495
BS
4263 changed = 1;
4264 copy_prop_count++;
4265 if (gcse_file != NULL)
7506f491 4266 {
ae860ff7 4267 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4268 regno, INSN_UID (insn));
4269 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4270 }
cafba495
BS
4271
4272 /* The original insn setting reg_used may or may not now be
4273 deletable. We leave the deletion to flow. */
4274 /* FIXME: If it turns out that the insn isn't deletable,
4275 then we may have unnecessarily extended register lifetimes
4276 and made things worse. */
7506f491
DE
4277 }
4278 }
4279 }
4280
4281 return changed;
4282}
4283
ae860ff7
JH
4284static bool
4285do_local_cprop (x, insn, alter_jumps)
4286 rtx x;
4287 rtx insn;
4288 int alter_jumps;
4289{
4290 rtx newreg = NULL, newcnst = NULL;
4291
4292 /* Rule out USE instructions and ASM statements as we don't want to change the hard
4293 registers mentioned. */
4294 if (GET_CODE (x) == REG
4295 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4296 || (GET_CODE (PATTERN (insn)) != USE && asm_noperands (PATTERN (insn)) < 0)))
4297 {
4298 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4299 struct elt_loc_list *l;
4300
4301 if (!val)
4302 return false;
4303 for (l = val->locs; l; l = l->next)
4304 {
4305 rtx this_rtx = l->loc;
46690369
JH
4306 rtx note;
4307
ae860ff7
JH
4308 if (CONSTANT_P (this_rtx))
4309 newcnst = this_rtx;
46690369
JH
4310 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4311 /* Don't copy propagate if it has attached REG_EQUIV note.
4312 At this point this only function parameters should have
4313 REG_EQUIV notes and if the argument slot is used somewhere
4314 explicitly, it means address of parameter has been taken,
4315 so we should not extend the lifetime of the pseudo. */
4316 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4317 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4318 newreg = this_rtx;
4319 }
4320 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4321 {
4322 if (gcse_file != NULL)
4323 {
4324 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4325 REGNO (x));
4326 fprintf (gcse_file, "insn %d with constant ",
4327 INSN_UID (insn));
4328 print_rtl (gcse_file, newcnst);
4329 fprintf (gcse_file, "\n");
4330 }
4331 const_prop_count++;
4332 return true;
4333 }
4334 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4335 {
4336 if (gcse_file != NULL)
4337 {
4338 fprintf (gcse_file,
4339 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4340 REGNO (x), INSN_UID (insn));
4341 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4342 }
4343 copy_prop_count++;
4344 return true;
4345 }
4346 }
4347 return false;
4348}
4349
4350static void
4351local_cprop_pass (alter_jumps)
4352 int alter_jumps;
4353{
4354 rtx insn;
4355 struct reg_use *reg_used;
4356
4357 cselib_init ();
4358 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4359 {
4360 if (INSN_P (insn))
4361 {
4362 rtx note = find_reg_equal_equiv_note (insn);
4363
4364 do
4365 {
4366 reg_use_count = 0;
4367 note_uses (&PATTERN (insn), find_used_regs, NULL);
4368 if (note)
4369 find_used_regs (&XEXP (note, 0), NULL);
4370
4371 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4372 reg_used++, reg_use_count--)
4373 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps))
4374 break;
4375 }
4376 while (reg_use_count);
4377 }
4378 cselib_process_insn (insn);
4379 }
4380 cselib_finish ();
4381}
4382
c4c81601
RK
4383/* Forward propagate copies. This includes copies and constants. Return
4384 non-zero if a change was made. */
7506f491
DE
4385
4386static int
b5ce41ff
JL
4387cprop (alter_jumps)
4388 int alter_jumps;
7506f491 4389{
e0082a72
ZD
4390 int changed;
4391 basic_block bb;
7506f491
DE
4392 rtx insn;
4393
4394 /* Note we start at block 1. */
e0082a72
ZD
4395 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4396 {
4397 if (gcse_file != NULL)
4398 fprintf (gcse_file, "\n");
4399 return 0;
4400 }
7506f491
DE
4401
4402 changed = 0;
e0082a72 4403 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4404 {
4405 /* Reset tables used to keep track of what's still valid [since the
4406 start of the block]. */
4407 reset_opr_set_tables ();
4408
e0082a72
ZD
4409 for (insn = bb->head;
4410 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491 4411 insn = NEXT_INSN (insn))
172890a2
RK
4412 if (INSN_P (insn))
4413 {
ae860ff7 4414 changed |= cprop_insn (insn, alter_jumps);
7506f491 4415
172890a2
RK
4416 /* Keep track of everything modified by this insn. */
4417 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4418 call mark_oprs_set if we turned the insn into a NOTE. */
4419 if (GET_CODE (insn) != NOTE)
4420 mark_oprs_set (insn);
8e42ace1 4421 }
7506f491
DE
4422 }
4423
4424 if (gcse_file != NULL)
4425 fprintf (gcse_file, "\n");
4426
4427 return changed;
4428}
4429
4430/* Perform one copy/constant propagation pass.
4431 F is the first insn in the function.
4432 PASS is the pass count. */
4433
4434static int
b5ce41ff 4435one_cprop_pass (pass, alter_jumps)
7506f491 4436 int pass;
b5ce41ff 4437 int alter_jumps;
7506f491
DE
4438{
4439 int changed = 0;
4440
4441 const_prop_count = 0;
4442 copy_prop_count = 0;
4443
ae860ff7
JH
4444 local_cprop_pass (alter_jumps);
4445
7506f491 4446 alloc_set_hash_table (max_cuid);
b5ce41ff 4447 compute_set_hash_table ();
7506f491
DE
4448 if (gcse_file)
4449 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4450 n_sets);
4451 if (n_sets > 0)
4452 {
d55bc081 4453 alloc_cprop_mem (last_basic_block, n_sets);
7506f491 4454 compute_cprop_data ();
b5ce41ff 4455 changed = cprop (alter_jumps);
0e3f0221
RS
4456 if (alter_jumps)
4457 changed |= bypass_conditional_jumps ();
7506f491
DE
4458 free_cprop_mem ();
4459 }
c4c81601 4460
7506f491
DE
4461 free_set_hash_table ();
4462
4463 if (gcse_file)
4464 {
c4c81601
RK
4465 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4466 current_function_name, pass, bytes_used);
4467 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4468 const_prop_count, copy_prop_count);
7506f491
DE
4469 }
4470
4471 return changed;
4472}
4473\f
0e3f0221
RS
4474/* Bypass conditional jumps. */
4475
4476/* Find a set of REGNO to a constant that is available at the end of basic
4477 block BB. Returns NULL if no such set is found. Based heavily upon
4478 find_avail_set. */
4479
4480static struct expr *
4481find_bypass_set (regno, bb)
4482 int regno;
4483 int bb;
4484{
4485 struct expr *result = 0;
4486
4487 for (;;)
4488 {
4489 rtx src;
4490 struct expr *set = lookup_set (regno, NULL_RTX);
4491
4492 while (set)
4493 {
4494 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4495 break;
4496 set = next_set (regno, set);
4497 }
4498
4499 if (set == 0)
4500 break;
4501
4502 if (GET_CODE (set->expr) != SET)
4503 abort ();
4504
4505 src = SET_SRC (set->expr);
4506 if (CONSTANT_P (src))
4507 result = set;
4508
4509 if (GET_CODE (src) != REG)
4510 break;
4511
4512 regno = REGNO (src);
4513 }
4514 return result;
4515}
4516
4517
4518/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4519 basic block BB which has more than one predecessor. If not NULL, SETCC
4520 is the first instruction of BB, which is immediately followed by JUMP_INSN
4521 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4522 Returns nonzero if a change was made. */
4523
4524static int
4525bypass_block (bb, setcc, jump)
4526 basic_block bb;
4527 rtx setcc, jump;
4528{
4529 rtx insn, note;
4530 edge e, enext;
818b6b7f 4531 int i, change;
0e3f0221
RS
4532
4533 insn = (setcc != NULL) ? setcc : jump;
4534
4535 /* Determine set of register uses in INSN. */
4536 reg_use_count = 0;
4537 note_uses (&PATTERN (insn), find_used_regs, NULL);
4538 note = find_reg_equal_equiv_note (insn);
4539 if (note)
4540 find_used_regs (&XEXP (note, 0), NULL);
4541
4542 change = 0;
4543 for (e = bb->pred; e; e = enext)
4544 {
4545 enext = e->pred_next;
4546 for (i = 0; i < reg_use_count; i++)
4547 {
4548 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4549 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4550 basic_block dest, old_dest;
589005ff
KH
4551 struct expr *set;
4552 rtx src, new;
0e3f0221 4553
589005ff
KH
4554 if (regno >= max_gcse_regno)
4555 continue;
0e3f0221 4556
589005ff 4557 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4558
4559 if (! set)
4560 continue;
4561
589005ff 4562 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4563
4564 if (setcc != NULL)
4565 src = simplify_replace_rtx (src,
589005ff
KH
4566 SET_DEST (PATTERN (setcc)),
4567 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4568
4569 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4570 SET_SRC (set->expr));
0e3f0221 4571
589005ff 4572 if (new == pc_rtx)
0e3f0221
RS
4573 dest = FALLTHRU_EDGE (bb)->dest;
4574 else if (GET_CODE (new) == LABEL_REF)
4575 dest = BRANCH_EDGE (bb)->dest;
4576 else
4577 dest = NULL;
4578
4579 /* Once basic block indices are stable, we should be able
4580 to use redirect_edge_and_branch_force instead. */
818b6b7f
RH
4581 old_dest = e->dest;
4582 if (dest != NULL && dest != old_dest
0e3f0221
RS
4583 && redirect_edge_and_branch (e, dest))
4584 {
818b6b7f 4585 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4586 Don't copy CC0 setters, as CC0 is dead after jump. */
4587 if (setcc)
4588 {
4589 rtx pat = PATTERN (setcc);
818b6b7f 4590 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4591 insert_insn_on_edge (copy_insn (pat), e);
4592 }
4593
4594 if (gcse_file != NULL)
4595 {
818b6b7f
RH
4596 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4597 regno, INSN_UID (jump));
0e3f0221
RS
4598 print_rtl (gcse_file, SET_SRC (set->expr));
4599 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4600 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4601 }
4602 change = 1;
4603 break;
4604 }
4605 }
4606 }
4607 return change;
4608}
4609
4610/* Find basic blocks with more than one predecessor that only contain a
4611 single conditional jump. If the result of the comparison is known at
4612 compile-time from any incoming edge, redirect that edge to the
4613 appropriate target. Returns nonzero if a change was made. */
4614
4615static int
4616bypass_conditional_jumps ()
4617{
4618 basic_block bb;
4619 int changed;
4620 rtx setcc;
4621 rtx insn;
4622 rtx dest;
4623
4624 /* Note we start at block 1. */
4625 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4626 return 0;
4627
4628 changed = 0;
4629 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 4630 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
4631 {
4632 /* Check for more than one predecessor. */
4633 if (bb->pred && bb->pred->pred_next)
4634 {
4635 setcc = NULL_RTX;
4636 for (insn = bb->head;
4637 insn != NULL && insn != NEXT_INSN (bb->end);
4638 insn = NEXT_INSN (insn))
4639 if (GET_CODE (insn) == INSN)
4640 {
9543a9d2 4641 if (setcc)
0e3f0221 4642 break;
ba4f7968 4643 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
4644 break;
4645
ba4f7968 4646 dest = SET_DEST (PATTERN (insn));
818b6b7f 4647 if (REG_P (dest) || CC0_P (dest))
0e3f0221 4648 setcc = insn;
0e3f0221
RS
4649 else
4650 break;
4651 }
4652 else if (GET_CODE (insn) == JUMP_INSN)
4653 {
4654 if (any_condjump_p (insn) && onlyjump_p (insn))
4655 changed |= bypass_block (bb, setcc, insn);
4656 break;
4657 }
4658 else if (INSN_P (insn))
4659 break;
4660 }
4661 }
4662
818b6b7f 4663 /* If we bypassed any register setting insns, we inserted a
0e3f0221
RS
4664 copy on the redirected edge. These need to be commited. */
4665 if (changed)
4666 commit_edge_insertions();
4667
4668 return changed;
4669}
4670\f
a65f3558 4671/* Compute PRE+LCM working variables. */
7506f491
DE
4672
4673/* Local properties of expressions. */
4674/* Nonzero for expressions that are transparent in the block. */
a65f3558 4675static sbitmap *transp;
7506f491 4676
5c35539b
RH
4677/* Nonzero for expressions that are transparent at the end of the block.
4678 This is only zero for expressions killed by abnormal critical edge
4679 created by a calls. */
a65f3558 4680static sbitmap *transpout;
5c35539b 4681
a65f3558
JL
4682/* Nonzero for expressions that are computed (available) in the block. */
4683static sbitmap *comp;
7506f491 4684
a65f3558
JL
4685/* Nonzero for expressions that are locally anticipatable in the block. */
4686static sbitmap *antloc;
7506f491 4687
a65f3558
JL
4688/* Nonzero for expressions where this block is an optimal computation
4689 point. */
4690static sbitmap *pre_optimal;
5c35539b 4691
a65f3558
JL
4692/* Nonzero for expressions which are redundant in a particular block. */
4693static sbitmap *pre_redundant;
7506f491 4694
a42cd965
AM
4695/* Nonzero for expressions which should be inserted on a specific edge. */
4696static sbitmap *pre_insert_map;
4697
4698/* Nonzero for expressions which should be deleted in a specific block. */
4699static sbitmap *pre_delete_map;
4700
4701/* Contains the edge_list returned by pre_edge_lcm. */
4702static struct edge_list *edge_list;
4703
a65f3558
JL
4704/* Redundant insns. */
4705static sbitmap pre_redundant_insns;
7506f491 4706
a65f3558 4707/* Allocate vars used for PRE analysis. */
7506f491
DE
4708
4709static void
a65f3558
JL
4710alloc_pre_mem (n_blocks, n_exprs)
4711 int n_blocks, n_exprs;
7506f491 4712{
a65f3558
JL
4713 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4714 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4715 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4716
a42cd965
AM
4717 pre_optimal = NULL;
4718 pre_redundant = NULL;
4719 pre_insert_map = NULL;
4720 pre_delete_map = NULL;
4721 ae_in = NULL;
4722 ae_out = NULL;
a42cd965 4723 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4724
a42cd965 4725 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4726}
4727
a65f3558 4728/* Free vars used for PRE analysis. */
7506f491
DE
4729
4730static void
a65f3558 4731free_pre_mem ()
7506f491 4732{
5a660bff
DB
4733 sbitmap_vector_free (transp);
4734 sbitmap_vector_free (comp);
bd3675fc
JL
4735
4736 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4737
a42cd965 4738 if (pre_optimal)
5a660bff 4739 sbitmap_vector_free (pre_optimal);
a42cd965 4740 if (pre_redundant)
5a660bff 4741 sbitmap_vector_free (pre_redundant);
a42cd965 4742 if (pre_insert_map)
5a660bff 4743 sbitmap_vector_free (pre_insert_map);
a42cd965 4744 if (pre_delete_map)
5a660bff 4745 sbitmap_vector_free (pre_delete_map);
a42cd965 4746 if (ae_in)
5a660bff 4747 sbitmap_vector_free (ae_in);
a42cd965 4748 if (ae_out)
5a660bff 4749 sbitmap_vector_free (ae_out);
a42cd965 4750
bd3675fc 4751 transp = comp = NULL;
a42cd965 4752 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4753 ae_in = ae_out = NULL;
7506f491
DE
4754}
4755
4756/* Top level routine to do the dataflow analysis needed by PRE. */
4757
4758static void
4759compute_pre_data ()
4760{
b614171e 4761 sbitmap trapping_expr;
e0082a72 4762 basic_block bb;
b614171e 4763 unsigned int ui;
c66e8ae9 4764
a65f3558 4765 compute_local_properties (transp, comp, antloc, 0);
d55bc081 4766 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 4767
b614171e
MM
4768 /* Collect expressions which might trap. */
4769 trapping_expr = sbitmap_alloc (n_exprs);
4770 sbitmap_zero (trapping_expr);
4771 for (ui = 0; ui < expr_hash_table_size; ui++)
4772 {
4773 struct expr *e;
4774 for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
4775 if (may_trap_p (e->expr))
4776 SET_BIT (trapping_expr, e->bitmap_index);
4777 }
4778
c66e8ae9
JL
4779 /* Compute ae_kill for each basic block using:
4780
4781 ~(TRANSP | COMP)
4782
a2e90653 4783 This is significantly faster than compute_ae_kill. */
c66e8ae9 4784
e0082a72 4785 FOR_EACH_BB (bb)
c66e8ae9 4786 {
b614171e
MM
4787 edge e;
4788
4789 /* If the current block is the destination of an abnormal edge, we
4790 kill all trapping expressions because we won't be able to properly
4791 place the instruction on the edge. So make them neither
4792 anticipatable nor transparent. This is fairly conservative. */
e0082a72 4793 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
4794 if (e->flags & EDGE_ABNORMAL)
4795 {
e0082a72
ZD
4796 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4797 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
4798 break;
4799 }
4800
e0082a72
ZD
4801 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4802 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
4803 }
4804
a42cd965
AM
4805 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4806 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 4807 sbitmap_vector_free (antloc);
bd3675fc 4808 antloc = NULL;
5a660bff 4809 sbitmap_vector_free (ae_kill);
589005ff 4810 ae_kill = NULL;
76ac938b 4811 sbitmap_free (trapping_expr);
7506f491
DE
4812}
4813\f
4814/* PRE utilities */
4815
a65f3558
JL
4816/* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4817 block BB.
7506f491
DE
4818
4819 VISITED is a pointer to a working buffer for tracking which BB's have
4820 been visited. It is NULL for the top-level call.
4821
4822 We treat reaching expressions that go through blocks containing the same
4823 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4824 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4825 2 as not reaching. The intent is to improve the probability of finding
4826 only one reaching expression and to reduce register lifetimes by picking
4827 the closest such expression. */
4828
4829static int
89e606c9 4830pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
e2d2ed72 4831 basic_block occr_bb;
7506f491 4832 struct expr *expr;
e2d2ed72 4833 basic_block bb;
7506f491
DE
4834 char *visited;
4835{
36349f8b 4836 edge pred;
7506f491 4837
e2d2ed72 4838 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 4839 {
e2d2ed72 4840 basic_block pred_bb = pred->src;
7506f491 4841
36349f8b 4842 if (pred->src == ENTRY_BLOCK_PTR
7506f491 4843 /* Has predecessor has already been visited? */
0b17ab2f 4844 || visited[pred_bb->index])
c4c81601
RK
4845 ;/* Nothing to do. */
4846
7506f491 4847 /* Does this predecessor generate this expression? */
0b17ab2f 4848 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
4849 {
4850 /* Is this the occurrence we're looking for?
4851 Note that there's only one generating occurrence per block
4852 so we just need to check the block number. */
a65f3558 4853 if (occr_bb == pred_bb)
7506f491 4854 return 1;
c4c81601 4855
0b17ab2f 4856 visited[pred_bb->index] = 1;
7506f491
DE
4857 }
4858 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
4859 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4860 visited[pred_bb->index] = 1;
c4c81601 4861
7506f491
DE
4862 /* Neither gen nor kill. */
4863 else
ac7c5af5 4864 {
0b17ab2f 4865 visited[pred_bb->index] = 1;
89e606c9 4866 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 4867 return 1;
ac7c5af5 4868 }
7506f491
DE
4869 }
4870
4871 /* All paths have been checked. */
4872 return 0;
4873}
283a2545
RL
4874
4875/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 4876 memory allocated for that function is returned. */
283a2545
RL
4877
4878static int
89e606c9 4879pre_expr_reaches_here_p (occr_bb, expr, bb)
e2d2ed72 4880 basic_block occr_bb;
283a2545 4881 struct expr *expr;
e2d2ed72 4882 basic_block bb;
283a2545
RL
4883{
4884 int rval;
d55bc081 4885 char *visited = (char *) xcalloc (last_basic_block, 1);
283a2545 4886
8e42ace1 4887 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
4888
4889 free (visited);
c4c81601 4890 return rval;
283a2545 4891}
7506f491 4892\f
a42cd965
AM
4893
4894/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 4895 or on an edge. Set the block number of any insns generated to
a42cd965
AM
4896 the value of BB. */
4897
4898static rtx
4899process_insert_insn (expr)
4900 struct expr *expr;
4901{
4902 rtx reg = expr->reaching_reg;
fb0c0a12
RK
4903 rtx exp = copy_rtx (expr->expr);
4904 rtx pat;
a42cd965
AM
4905
4906 start_sequence ();
fb0c0a12
RK
4907
4908 /* If the expression is something that's an operand, like a constant,
4909 just copy it to a register. */
4910 if (general_operand (exp, GET_MODE (reg)))
4911 emit_move_insn (reg, exp);
4912
4913 /* Otherwise, make a new insn to compute this expression and make sure the
4914 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4915 expression to make sure we don't have any sharing issues. */
8d444206 4916 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 4917 abort ();
589005ff 4918
2f937369 4919 pat = get_insns ();
a42cd965
AM
4920 end_sequence ();
4921
4922 return pat;
4923}
589005ff 4924
a65f3558
JL
4925/* Add EXPR to the end of basic block BB.
4926
4927 This is used by both the PRE and code hoisting.
4928
4929 For PRE, we want to verify that the expr is either transparent
4930 or locally anticipatable in the target block. This check makes
4931 no sense for code hoisting. */
7506f491
DE
4932
4933static void
a65f3558 4934insert_insn_end_bb (expr, bb, pre)
7506f491 4935 struct expr *expr;
e2d2ed72 4936 basic_block bb;
a65f3558 4937 int pre;
7506f491 4938{
e2d2ed72 4939 rtx insn = bb->end;
7506f491
DE
4940 rtx new_insn;
4941 rtx reg = expr->reaching_reg;
4942 int regno = REGNO (reg);
2f937369 4943 rtx pat, pat_end;
7506f491 4944
a42cd965 4945 pat = process_insert_insn (expr);
2f937369
DM
4946 if (pat == NULL_RTX || ! INSN_P (pat))
4947 abort ();
4948
4949 pat_end = pat;
4950 while (NEXT_INSN (pat_end) != NULL_RTX)
4951 pat_end = NEXT_INSN (pat_end);
7506f491
DE
4952
4953 /* If the last insn is a jump, insert EXPR in front [taking care to
068473ec
JH
4954 handle cc0, etc. properly]. Similary we need to care trapping
4955 instructions in presence of non-call exceptions. */
7506f491 4956
068473ec
JH
4957 if (GET_CODE (insn) == JUMP_INSN
4958 || (GET_CODE (insn) == INSN
4959 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 4960 {
50b2596f 4961#ifdef HAVE_cc0
7506f491 4962 rtx note;
50b2596f 4963#endif
068473ec
JH
4964 /* It should always be the case that we can put these instructions
4965 anywhere in the basic block with performing PRE optimizations.
4966 Check this. */
3b25fbfe 4967 if (GET_CODE (insn) == INSN && pre
0b17ab2f 4968 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 4969 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 4970 abort ();
7506f491
DE
4971
4972 /* If this is a jump table, then we can't insert stuff here. Since
4973 we know the previous real insn must be the tablejump, we insert
4974 the new instruction just before the tablejump. */
4975 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4976 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4977 insn = prev_real_insn (insn);
4978
4979#ifdef HAVE_cc0
4980 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4981 if cc0 isn't set. */
4982 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4983 if (note)
4984 insn = XEXP (note, 0);
4985 else
4986 {
4987 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4988 if (maybe_cc0_setter
2c3c49de 4989 && INSN_P (maybe_cc0_setter)
7506f491
DE
4990 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4991 insn = maybe_cc0_setter;
4992 }
4993#endif
4994 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 4995 new_insn = emit_insn_before (pat, insn);
3947e2f9 4996 }
c4c81601 4997
3947e2f9
RH
4998 /* Likewise if the last insn is a call, as will happen in the presence
4999 of exception handling. */
068473ec
JH
5000 else if (GET_CODE (insn) == CALL_INSN
5001 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5002 {
3947e2f9
RH
5003 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5004 we search backward and place the instructions before the first
5005 parameter is loaded. Do this for everyone for consistency and a
589005ff 5006 presumtion that we'll get better code elsewhere as well.
3947e2f9 5007
c4c81601 5008 It should always be the case that we can put these instructions
a65f3558
JL
5009 anywhere in the basic block with performing PRE optimizations.
5010 Check this. */
c4c81601 5011
a65f3558 5012 if (pre
0b17ab2f 5013 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5014 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5015 abort ();
5016
5017 /* Since different machines initialize their parameter registers
5018 in different orders, assume nothing. Collect the set of all
5019 parameter registers. */
833366d6 5020 insn = find_first_parameter_load (insn, bb->head);
3947e2f9 5021
b1d26727
JL
5022 /* If we found all the parameter loads, then we want to insert
5023 before the first parameter load.
5024
5025 If we did not find all the parameter loads, then we might have
5026 stopped on the head of the block, which could be a CODE_LABEL.
5027 If we inserted before the CODE_LABEL, then we would be putting
5028 the insn in the wrong basic block. In that case, put the insn
b5229628 5029 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5030 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5031 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5032 insn = NEXT_INSN (insn);
c4c81601 5033
3c030e88 5034 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5035 }
5036 else
3c030e88 5037 new_insn = emit_insn_after (pat, insn);
7506f491 5038
2f937369 5039 while (1)
a65f3558 5040 {
2f937369 5041 if (INSN_P (pat))
a65f3558 5042 {
2f937369
DM
5043 add_label_notes (PATTERN (pat), new_insn);
5044 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5045 }
2f937369
DM
5046 if (pat == pat_end)
5047 break;
5048 pat = NEXT_INSN (pat);
a65f3558 5049 }
3947e2f9 5050
7506f491
DE
5051 gcse_create_count++;
5052
5053 if (gcse_file)
5054 {
c4c81601 5055 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5056 bb->index, INSN_UID (new_insn));
c4c81601
RK
5057 fprintf (gcse_file, "copying expression %d to reg %d\n",
5058 expr->bitmap_index, regno);
7506f491
DE
5059 }
5060}
5061
a42cd965
AM
5062/* Insert partially redundant expressions on edges in the CFG to make
5063 the expressions fully redundant. */
7506f491 5064
a42cd965
AM
5065static int
5066pre_edge_insert (edge_list, index_map)
5067 struct edge_list *edge_list;
7506f491
DE
5068 struct expr **index_map;
5069{
c4c81601 5070 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5071 sbitmap *inserted;
5072
a42cd965
AM
5073 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5074 if it reaches any of the deleted expressions. */
7506f491 5075
a42cd965
AM
5076 set_size = pre_insert_map[0]->size;
5077 num_edges = NUM_EDGES (edge_list);
5078 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
5079 sbitmap_vector_zero (inserted, num_edges);
7506f491 5080
a42cd965 5081 for (e = 0; e < num_edges; e++)
7506f491
DE
5082 {
5083 int indx;
e2d2ed72 5084 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5085
a65f3558 5086 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5087 {
a42cd965 5088 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5089
a65f3558 5090 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
c4c81601
RK
5091 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5092 {
5093 struct expr *expr = index_map[j];
5094 struct occr *occr;
a65f3558 5095
ff7cc307 5096 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5097 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5098 {
5099 if (! occr->deleted_p)
5100 continue;
5101
5102 /* Insert this expression on this edge if if it would
ff7cc307 5103 reach the deleted occurrence in BB. */
c4c81601
RK
5104 if (!TEST_BIT (inserted[e], j))
5105 {
5106 rtx insn;
5107 edge eg = INDEX_EDGE (edge_list, e);
5108
5109 /* We can't insert anything on an abnormal and
5110 critical edge, so we insert the insn at the end of
5111 the previous block. There are several alternatives
5112 detailed in Morgans book P277 (sec 10.5) for
5113 handling this situation. This one is easiest for
5114 now. */
5115
5116 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5117 insert_insn_end_bb (index_map[j], bb, 0);
5118 else
5119 {
5120 insn = process_insert_insn (index_map[j]);
5121 insert_insn_on_edge (insn, eg);
5122 }
5123
5124 if (gcse_file)
5125 {
5126 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5127 bb->index,
5128 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5129 fprintf (gcse_file, "copy expression %d\n",
5130 expr->bitmap_index);
5131 }
5132
a13d4ebf 5133 update_ld_motion_stores (expr);
c4c81601
RK
5134 SET_BIT (inserted[e], j);
5135 did_insert = 1;
5136 gcse_create_count++;
5137 }
5138 }
5139 }
7506f491
DE
5140 }
5141 }
5faf03ae 5142
5a660bff 5143 sbitmap_vector_free (inserted);
a42cd965 5144 return did_insert;
7506f491
DE
5145}
5146
c4c81601 5147/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
5148
5149static void
5150pre_insert_copy_insn (expr, insn)
5151 struct expr *expr;
5152 rtx insn;
5153{
5154 rtx reg = expr->reaching_reg;
5155 int regno = REGNO (reg);
5156 int indx = expr->bitmap_index;
5157 rtx set = single_set (insn);
5158 rtx new_insn;
5159
5160 if (!set)
5161 abort ();
c4c81601 5162
cccf0ae8 5163 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
c4c81601 5164
7506f491
DE
5165 /* Keep register set table up to date. */
5166 record_one_set (regno, new_insn);
5167
5168 gcse_create_count++;
5169
5170 if (gcse_file)
a42cd965
AM
5171 fprintf (gcse_file,
5172 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5173 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5174 INSN_UID (insn), regno);
222f7ba9 5175 update_ld_motion_stores (expr);
7506f491
DE
5176}
5177
5178/* Copy available expressions that reach the redundant expression
5179 to `reaching_reg'. */
5180
5181static void
5182pre_insert_copies ()
5183{
2e653e39 5184 unsigned int i;
c4c81601
RK
5185 struct expr *expr;
5186 struct occr *occr;
5187 struct occr *avail;
a65f3558 5188
7506f491
DE
5189 /* For each available expression in the table, copy the result to
5190 `reaching_reg' if the expression reaches a deleted one.
5191
5192 ??? The current algorithm is rather brute force.
5193 Need to do some profiling. */
5194
5195 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5196 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5197 {
5198 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5199 we don't want to insert a copy here because the expression may not
5200 really be redundant. So only insert an insn if the expression was
5201 deleted. This test also avoids further processing if the
5202 expression wasn't deleted anywhere. */
5203 if (expr->reaching_reg == NULL)
5204 continue;
5205
5206 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5207 {
5208 if (! occr->deleted_p)
5209 continue;
7506f491 5210
c4c81601
RK
5211 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5212 {
5213 rtx insn = avail->insn;
7506f491 5214
c4c81601
RK
5215 /* No need to handle this one if handled already. */
5216 if (avail->copied_p)
5217 continue;
7506f491 5218
c4c81601
RK
5219 /* Don't handle this one if it's a redundant one. */
5220 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5221 continue;
7506f491 5222
c4c81601 5223 /* Or if the expression doesn't reach the deleted one. */
589005ff 5224 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5225 expr,
5226 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5227 continue;
7506f491 5228
c4c81601
RK
5229 /* Copy the result of avail to reaching_reg. */
5230 pre_insert_copy_insn (expr, insn);
5231 avail->copied_p = 1;
5232 }
5233 }
5234 }
7506f491
DE
5235}
5236
10d1bb36
JH
5237/* Emit move from SRC to DEST noting the equivalence with expression computed
5238 in INSN. */
5239static rtx
5240gcse_emit_move_after (src, dest, insn)
5241 rtx src, dest, insn;
5242{
5243 rtx new;
6bdb8dd6 5244 rtx set = single_set (insn), set2;
10d1bb36
JH
5245 rtx note;
5246 rtx eqv;
5247
5248 /* This should never fail since we're creating a reg->reg copy
5249 we've verified to be valid. */
5250
6bdb8dd6 5251 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5252
10d1bb36 5253 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5254 set2 = single_set (new);
5255 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5256 return new;
10d1bb36
JH
5257 if ((note = find_reg_equal_equiv_note (insn)))
5258 eqv = XEXP (note, 0);
5259 else
5260 eqv = SET_SRC (set);
5261
5262 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (src));
5263
5264 return new;
5265}
5266
7506f491 5267/* Delete redundant computations.
7506f491
DE
5268 Deletion is done by changing the insn to copy the `reaching_reg' of
5269 the expression into the result of the SET. It is left to later passes
5270 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5271
5272 Returns non-zero if a change is made. */
5273
5274static int
5275pre_delete ()
5276{
2e653e39 5277 unsigned int i;
63bc1d05 5278 int changed;
c4c81601
RK
5279 struct expr *expr;
5280 struct occr *occr;
a65f3558 5281
7506f491
DE
5282 changed = 0;
5283 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5284 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5285 {
5286 int indx = expr->bitmap_index;
7506f491 5287
c4c81601
RK
5288 /* We only need to search antic_occr since we require
5289 ANTLOC != 0. */
7506f491 5290
c4c81601
RK
5291 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5292 {
5293 rtx insn = occr->insn;
5294 rtx set;
e2d2ed72 5295 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5296
0b17ab2f 5297 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
5298 {
5299 set = single_set (insn);
5300 if (! set)
5301 abort ();
5302
5303 /* Create a pseudo-reg to store the result of reaching
5304 expressions into. Get the mode for the new pseudo from
5305 the mode of the original destination pseudo. */
5306 if (expr->reaching_reg == NULL)
5307 expr->reaching_reg
5308 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5309
10d1bb36
JH
5310 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5311 delete_insn (insn);
5312 occr->deleted_p = 1;
5313 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5314 changed = 1;
5315 gcse_subst_count++;
7506f491 5316
c4c81601
RK
5317 if (gcse_file)
5318 {
5319 fprintf (gcse_file,
5320 "PRE: redundant insn %d (expression %d) in ",
5321 INSN_UID (insn), indx);
5322 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5323 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5324 }
5325 }
5326 }
5327 }
7506f491
DE
5328
5329 return changed;
5330}
5331
5332/* Perform GCSE optimizations using PRE.
5333 This is called by one_pre_gcse_pass after all the dataflow analysis
5334 has been done.
5335
c4c81601
RK
5336 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5337 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5338 Compiler Design and Implementation.
7506f491 5339
c4c81601
RK
5340 ??? A new pseudo reg is created to hold the reaching expression. The nice
5341 thing about the classical approach is that it would try to use an existing
5342 reg. If the register can't be adequately optimized [i.e. we introduce
5343 reload problems], one could add a pass here to propagate the new register
5344 through the block.
7506f491 5345
c4c81601
RK
5346 ??? We don't handle single sets in PARALLELs because we're [currently] not
5347 able to copy the rest of the parallel when we insert copies to create full
5348 redundancies from partial redundancies. However, there's no reason why we
5349 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5350 redundancies. */
5351
5352static int
5353pre_gcse ()
5354{
2e653e39
RK
5355 unsigned int i;
5356 int did_insert, changed;
7506f491 5357 struct expr **index_map;
c4c81601 5358 struct expr *expr;
7506f491
DE
5359
5360 /* Compute a mapping from expression number (`bitmap_index') to
5361 hash table entry. */
5362
dd1bd863 5363 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
7506f491 5364 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5365 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5366 index_map[expr->bitmap_index] = expr;
7506f491
DE
5367
5368 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5369 pre_redundant_insns = sbitmap_alloc (max_cuid);
5370 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5371
5372 /* Delete the redundant insns first so that
5373 - we know what register to use for the new insns and for the other
5374 ones with reaching expressions
5375 - we know which insns are redundant when we go to create copies */
c4c81601 5376
7506f491
DE
5377 changed = pre_delete ();
5378
a42cd965 5379 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5380
7506f491 5381 /* In other places with reaching expressions, copy the expression to the
a42cd965 5382 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5383 pre_insert_copies ();
a42cd965
AM
5384 if (did_insert)
5385 {
5386 commit_edge_insertions ();
5387 changed = 1;
5388 }
7506f491 5389
283a2545 5390 free (index_map);
76ac938b 5391 sbitmap_free (pre_redundant_insns);
7506f491
DE
5392 return changed;
5393}
5394
5395/* Top level routine to perform one PRE GCSE pass.
5396
5397 Return non-zero if a change was made. */
5398
5399static int
b5ce41ff 5400one_pre_gcse_pass (pass)
7506f491
DE
5401 int pass;
5402{
5403 int changed = 0;
5404
5405 gcse_subst_count = 0;
5406 gcse_create_count = 0;
5407
5408 alloc_expr_hash_table (max_cuid);
a42cd965 5409 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5410 if (flag_gcse_lm)
5411 compute_ld_motion_mems ();
5412
b5ce41ff 5413 compute_expr_hash_table ();
a13d4ebf 5414 trim_ld_motion_mems ();
7506f491
DE
5415 if (gcse_file)
5416 dump_hash_table (gcse_file, "Expression", expr_hash_table,
5417 expr_hash_table_size, n_exprs);
c4c81601 5418
7506f491
DE
5419 if (n_exprs > 0)
5420 {
d55bc081 5421 alloc_pre_mem (last_basic_block, n_exprs);
7506f491
DE
5422 compute_pre_data ();
5423 changed |= pre_gcse ();
a42cd965 5424 free_edge_list (edge_list);
7506f491
DE
5425 free_pre_mem ();
5426 }
c4c81601 5427
a13d4ebf 5428 free_ldst_mems ();
a42cd965 5429 remove_fake_edges ();
7506f491
DE
5430 free_expr_hash_table ();
5431
5432 if (gcse_file)
5433 {
c4c81601
RK
5434 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5435 current_function_name, pass, bytes_used);
5436 fprintf (gcse_file, "%d substs, %d insns created\n",
5437 gcse_subst_count, gcse_create_count);
7506f491
DE
5438 }
5439
5440 return changed;
5441}
aeb2f500
JW
5442\f
5443/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5444 If notes are added to an insn which references a CODE_LABEL, the
5445 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5446 because the following loop optimization pass requires them. */
aeb2f500
JW
5447
5448/* ??? This is very similar to the loop.c add_label_notes function. We
5449 could probably share code here. */
5450
5451/* ??? If there was a jump optimization pass after gcse and before loop,
5452 then we would not need to do this here, because jump would add the
5453 necessary REG_LABEL notes. */
5454
5455static void
5456add_label_notes (x, insn)
5457 rtx x;
5458 rtx insn;
5459{
5460 enum rtx_code code = GET_CODE (x);
5461 int i, j;
6f7d635c 5462 const char *fmt;
aeb2f500
JW
5463
5464 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5465 {
6b3603c2 5466 /* This code used to ignore labels that referred to dispatch tables to
ac7c5af5 5467 avoid flow generating (slighly) worse code.
6b3603c2 5468
ac7c5af5
JL
5469 We no longer ignore such label references (see LABEL_REF handling in
5470 mark_jump_label for additional information). */
c4c81601 5471
6b8c9327 5472 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5473 REG_NOTES (insn));
5b1ef594 5474 if (LABEL_P (XEXP (x, 0)))
589005ff 5475 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5476 return;
5477 }
5478
c4c81601 5479 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5480 {
5481 if (fmt[i] == 'e')
5482 add_label_notes (XEXP (x, i), insn);
5483 else if (fmt[i] == 'E')
5484 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5485 add_label_notes (XVECEXP (x, i, j), insn);
5486 }
5487}
a65f3558
JL
5488
5489/* Compute transparent outgoing information for each block.
5490
5491 An expression is transparent to an edge unless it is killed by
5492 the edge itself. This can only happen with abnormal control flow,
5493 when the edge is traversed through a call. This happens with
5494 non-local labels and exceptions.
5495
5496 This would not be necessary if we split the edge. While this is
5497 normally impossible for abnormal critical edges, with some effort
5498 it should be possible with exception handling, since we still have
5499 control over which handler should be invoked. But due to increased
5500 EH table sizes, this may not be worthwhile. */
5501
5502static void
5503compute_transpout ()
5504{
e0082a72 5505 basic_block bb;
2e653e39 5506 unsigned int i;
c4c81601 5507 struct expr *expr;
a65f3558 5508
d55bc081 5509 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5510
e0082a72 5511 FOR_EACH_BB (bb)
a65f3558 5512 {
a65f3558
JL
5513 /* Note that flow inserted a nop a the end of basic blocks that
5514 end in call instructions for reasons other than abnormal
5515 control flow. */
e0082a72 5516 if (GET_CODE (bb->end) != CALL_INSN)
a65f3558
JL
5517 continue;
5518
5519 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
5520 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
5521 if (GET_CODE (expr->expr) == MEM)
5522 {
5523 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5524 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5525 continue;
589005ff 5526
c4c81601
RK
5527 /* ??? Optimally, we would use interprocedural alias
5528 analysis to determine if this mem is actually killed
5529 by this call. */
e0082a72 5530 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5531 }
a65f3558
JL
5532 }
5533}
dfdb644f
JL
5534
5535/* Removal of useless null pointer checks */
5536
dfdb644f 5537/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5538 invalidate nonnull_local and set nonnull_killed. DATA is really a
5539 `null_pointer_info *'.
dfdb644f
JL
5540
5541 We ignore hard registers. */
c4c81601 5542
dfdb644f 5543static void
84832317 5544invalidate_nonnull_info (x, setter, data)
dfdb644f
JL
5545 rtx x;
5546 rtx setter ATTRIBUTE_UNUSED;
0511851c 5547 void *data;
dfdb644f 5548{
770ae6cc
RK
5549 unsigned int regno;
5550 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5551
dfdb644f
JL
5552 while (GET_CODE (x) == SUBREG)
5553 x = SUBREG_REG (x);
5554
5555 /* Ignore anything that is not a register or is a hard register. */
5556 if (GET_CODE (x) != REG
0511851c
MM
5557 || REGNO (x) < npi->min_reg
5558 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5559 return;
5560
0511851c 5561 regno = REGNO (x) - npi->min_reg;
dfdb644f 5562
e0082a72
ZD
5563 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5564 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5565}
5566
0511851c
MM
5567/* Do null-pointer check elimination for the registers indicated in
5568 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5569 they are not our responsibility to free. */
dfdb644f 5570
99a15921 5571static int
9cd56be1 5572delete_null_pointer_checks_1 (block_reg, nonnull_avin,
8e184d9c 5573 nonnull_avout, npi)
770ae6cc 5574 unsigned int *block_reg;
0511851c
MM
5575 sbitmap *nonnull_avin;
5576 sbitmap *nonnull_avout;
5577 struct null_pointer_info *npi;
dfdb644f 5578{
e0082a72 5579 basic_block bb, current_block;
0511851c
MM
5580 sbitmap *nonnull_local = npi->nonnull_local;
5581 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5582 int something_changed = 0;
589005ff 5583
dfdb644f
JL
5584 /* Compute local properties, nonnull and killed. A register will have
5585 the nonnull property if at the end of the current block its value is
5586 known to be nonnull. The killed property indicates that somewhere in
5587 the block any information we had about the register is killed.
5588
5589 Note that a register can have both properties in a single block. That
5590 indicates that it's killed, then later in the block a new value is
5591 computed. */
d55bc081
ZD
5592 sbitmap_vector_zero (nonnull_local, last_basic_block);
5593 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5594
e0082a72 5595 FOR_EACH_BB (current_block)
dfdb644f
JL
5596 {
5597 rtx insn, stop_insn;
5598
0511851c
MM
5599 /* Set the current block for invalidate_nonnull_info. */
5600 npi->current_block = current_block;
5601
dfdb644f
JL
5602 /* Scan each insn in the basic block looking for memory references and
5603 register sets. */
e0082a72
ZD
5604 stop_insn = NEXT_INSN (current_block->end);
5605 for (insn = current_block->head;
dfdb644f
JL
5606 insn != stop_insn;
5607 insn = NEXT_INSN (insn))
5608 {
5609 rtx set;
0511851c 5610 rtx reg;
dfdb644f
JL
5611
5612 /* Ignore anything that is not a normal insn. */
2c3c49de 5613 if (! INSN_P (insn))
dfdb644f
JL
5614 continue;
5615
5616 /* Basically ignore anything that is not a simple SET. We do have
5617 to make sure to invalidate nonnull_local and set nonnull_killed
5618 for such insns though. */
5619 set = single_set (insn);
5620 if (!set)
5621 {
0511851c 5622 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5623 continue;
5624 }
5625
f63d1bf7 5626 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
5627 in case it uses its address register as a dest (which kills
5628 the nonnull property). */
5629 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5630 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5631 && REGNO (reg) >= npi->min_reg
5632 && REGNO (reg) < npi->max_reg)
e0082a72 5633 SET_BIT (nonnull_local[current_block->index],
0511851c 5634 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5635
5636 /* Now invalidate stuff clobbered by this insn. */
0511851c 5637 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5638
5639 /* And handle stores, we do these last since any sets in INSN can
5640 not kill the nonnull property if it is derived from a MEM
5641 appearing in a SET_DEST. */
5642 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5643 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5644 && REGNO (reg) >= npi->min_reg
5645 && REGNO (reg) < npi->max_reg)
e0082a72 5646 SET_BIT (nonnull_local[current_block->index],
0511851c 5647 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5648 }
5649 }
5650
5651 /* Now compute global properties based on the local properties. This
5652 is a classic global availablity algorithm. */
ce724250
JL
5653 compute_available (nonnull_local, nonnull_killed,
5654 nonnull_avout, nonnull_avin);
dfdb644f
JL
5655
5656 /* Now look at each bb and see if it ends with a compare of a value
5657 against zero. */
e0082a72 5658 FOR_EACH_BB (bb)
dfdb644f 5659 {
e0082a72 5660 rtx last_insn = bb->end;
0511851c 5661 rtx condition, earliest;
dfdb644f
JL
5662 int compare_and_branch;
5663
0511851c
MM
5664 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5665 since BLOCK_REG[BB] is zero if this block did not end with a
5666 comparison against zero, this condition works. */
e0082a72
ZD
5667 if (block_reg[bb->index] < npi->min_reg
5668 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
5669 continue;
5670
5671 /* LAST_INSN is a conditional jump. Get its condition. */
5672 condition = get_condition (last_insn, &earliest);
5673
40d7a3fe
NB
5674 /* If we can't determine the condition then skip. */
5675 if (! condition)
5676 continue;
5677
dfdb644f 5678 /* Is the register known to have a nonzero value? */
e0082a72 5679 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
5680 continue;
5681
5682 /* Try to compute whether the compare/branch at the loop end is one or
5683 two instructions. */
5684 if (earliest == last_insn)
5685 compare_and_branch = 1;
5686 else if (earliest == prev_nonnote_insn (last_insn))
5687 compare_and_branch = 2;
5688 else
5689 continue;
5690
5691 /* We know the register in this comparison is nonnull at exit from
5692 this block. We can optimize this comparison. */
5693 if (GET_CODE (condition) == NE)
5694 {
5695 rtx new_jump;
5696
38c1593d
JH
5697 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5698 last_insn);
dfdb644f
JL
5699 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5700 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5701 emit_barrier_after (new_jump);
5702 }
8e184d9c 5703
99a15921 5704 something_changed = 1;
9cd56be1 5705 delete_insn (last_insn);
dfdb644f 5706 if (compare_and_branch == 2)
589005ff 5707 delete_insn (earliest);
e0082a72 5708 purge_dead_edges (bb);
0511851c
MM
5709
5710 /* Don't check this block again. (Note that BLOCK_END is
589005ff 5711 invalid here; we deleted the last instruction in the
0511851c 5712 block.) */
e0082a72 5713 block_reg[bb->index] = 0;
0511851c 5714 }
99a15921
JL
5715
5716 return something_changed;
0511851c
MM
5717}
5718
5719/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5720 at compile time.
5721
5722 This is conceptually similar to global constant/copy propagation and
5723 classic global CSE (it even uses the same dataflow equations as cprop).
5724
5725 If a register is used as memory address with the form (mem (reg)), then we
5726 know that REG can not be zero at that point in the program. Any instruction
5727 which sets REG "kills" this property.
5728
5729 So, if every path leading to a conditional branch has an available memory
5730 reference of that form, then we know the register can not have the value
589005ff 5731 zero at the conditional branch.
0511851c
MM
5732
5733 So we merely need to compute the local properies and propagate that data
5734 around the cfg, then optimize where possible.
5735
5736 We run this pass two times. Once before CSE, then again after CSE. This
5737 has proven to be the most profitable approach. It is rare for new
5738 optimization opportunities of this nature to appear after the first CSE
5739 pass.
5740
5741 This could probably be integrated with global cprop with a little work. */
5742
99a15921 5743int
0511851c 5744delete_null_pointer_checks (f)
2e653e39 5745 rtx f ATTRIBUTE_UNUSED;
0511851c 5746{
0511851c 5747 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5748 unsigned int *block_reg;
e0082a72 5749 basic_block bb;
0511851c
MM
5750 int reg;
5751 int regs_per_pass;
5752 int max_reg;
5753 struct null_pointer_info npi;
99a15921 5754 int something_changed = 0;
0511851c 5755
0511851c 5756 /* If we have only a single block, then there's nothing to do. */
0b17ab2f 5757 if (n_basic_blocks <= 1)
99a15921 5758 return 0;
0511851c
MM
5759
5760 /* Trying to perform global optimizations on flow graphs which have
5761 a high connectivity will take a long time and is unlikely to be
5762 particularly useful.
5763
43e72072 5764 In normal circumstances a cfg should have about twice as many edges
0511851c
MM
5765 as blocks. But we do not want to punish small functions which have
5766 a couple switch statements. So we require a relatively large number
5767 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 5768 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
99a15921 5769 return 0;
0511851c 5770
0511851c
MM
5771 /* We need four bitmaps, each with a bit for each register in each
5772 basic block. */
5773 max_reg = max_reg_num ();
d55bc081 5774 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
5775
5776 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
5777 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5778 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5779 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5780 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
5781
5782 /* Go through the basic blocks, seeing whether or not each block
5783 ends with a conditional branch whose condition is a comparison
5784 against zero. Record the register compared in BLOCK_REG. */
d55bc081 5785 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
e0082a72 5786 FOR_EACH_BB (bb)
0511851c 5787 {
e0082a72 5788 rtx last_insn = bb->end;
0511851c
MM
5789 rtx condition, earliest, reg;
5790
5791 /* We only want conditional branches. */
5792 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5793 || !any_condjump_p (last_insn)
5794 || !onlyjump_p (last_insn))
0511851c
MM
5795 continue;
5796
5797 /* LAST_INSN is a conditional jump. Get its condition. */
5798 condition = get_condition (last_insn, &earliest);
5799
4fe9b91c 5800 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
5801 comparison against zero then there's nothing we can do. */
5802 if (!condition
5803 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5804 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 5805 || (XEXP (condition, 1)
0511851c
MM
5806 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5807 continue;
5808
5809 /* We must be checking a register against zero. */
5810 reg = XEXP (condition, 0);
5811 if (GET_CODE (reg) != REG)
5812 continue;
5813
e0082a72 5814 block_reg[bb->index] = REGNO (reg);
0511851c
MM
5815 }
5816
5817 /* Go through the algorithm for each block of registers. */
5818 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5819 {
5820 npi.min_reg = reg;
5821 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
5822 something_changed |= delete_null_pointer_checks_1 (block_reg,
5823 nonnull_avin,
5824 nonnull_avout,
5825 &npi);
dfdb644f
JL
5826 }
5827
0511851c
MM
5828 /* Free the table of registers compared at the end of every block. */
5829 free (block_reg);
5830
dfdb644f 5831 /* Free bitmaps. */
5a660bff
DB
5832 sbitmap_vector_free (npi.nonnull_local);
5833 sbitmap_vector_free (npi.nonnull_killed);
5834 sbitmap_vector_free (nonnull_avin);
5835 sbitmap_vector_free (nonnull_avout);
99a15921
JL
5836
5837 return something_changed;
dfdb644f 5838}
bb457bd9
JL
5839
5840/* Code Hoisting variables and subroutines. */
5841
5842/* Very busy expressions. */
5843static sbitmap *hoist_vbein;
5844static sbitmap *hoist_vbeout;
5845
5846/* Hoistable expressions. */
5847static sbitmap *hoist_exprs;
5848
5849/* Dominator bitmaps. */
355be0dc 5850dominance_info dominators;
bb457bd9
JL
5851
5852/* ??? We could compute post dominators and run this algorithm in
68e82b83 5853 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
5854 more effective than the tail merging code in jump.c.
5855
5856 It's unclear if tail merging could be run in parallel with
5857 code hoisting. It would be nice. */
5858
5859/* Allocate vars used for code hoisting analysis. */
5860
5861static void
5862alloc_code_hoist_mem (n_blocks, n_exprs)
5863 int n_blocks, n_exprs;
5864{
5865 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5866 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5867 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5868
5869 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5870 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5871 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5872 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
5873}
5874
5875/* Free vars used for code hoisting analysis. */
5876
5877static void
5878free_code_hoist_mem ()
5879{
5a660bff
DB
5880 sbitmap_vector_free (antloc);
5881 sbitmap_vector_free (transp);
5882 sbitmap_vector_free (comp);
bb457bd9 5883
5a660bff
DB
5884 sbitmap_vector_free (hoist_vbein);
5885 sbitmap_vector_free (hoist_vbeout);
5886 sbitmap_vector_free (hoist_exprs);
5887 sbitmap_vector_free (transpout);
bb457bd9 5888
355be0dc 5889 free_dominance_info (dominators);
bb457bd9
JL
5890}
5891
5892/* Compute the very busy expressions at entry/exit from each block.
5893
5894 An expression is very busy if all paths from a given point
5895 compute the expression. */
5896
5897static void
5898compute_code_hoist_vbeinout ()
5899{
e0082a72
ZD
5900 int changed, passes;
5901 basic_block bb;
bb457bd9 5902
d55bc081
ZD
5903 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5904 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
5905
5906 passes = 0;
5907 changed = 1;
c4c81601 5908
bb457bd9
JL
5909 while (changed)
5910 {
5911 changed = 0;
c4c81601 5912
bb457bd9
JL
5913 /* We scan the blocks in the reverse order to speed up
5914 the convergence. */
e0082a72 5915 FOR_EACH_BB_REVERSE (bb)
bb457bd9 5916 {
e0082a72
ZD
5917 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5918 hoist_vbeout[bb->index], transp[bb->index]);
5919 if (bb->next_bb != EXIT_BLOCK_PTR)
5920 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 5921 }
c4c81601 5922
bb457bd9
JL
5923 passes++;
5924 }
5925
5926 if (gcse_file)
5927 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5928}
5929
5930/* Top level routine to do the dataflow analysis needed by code hoisting. */
5931
5932static void
5933compute_code_hoist_data ()
5934{
5935 compute_local_properties (transp, comp, antloc, 0);
5936 compute_transpout ();
5937 compute_code_hoist_vbeinout ();
355be0dc 5938 dominators = calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
5939 if (gcse_file)
5940 fprintf (gcse_file, "\n");
5941}
5942
5943/* Determine if the expression identified by EXPR_INDEX would
5944 reach BB unimpared if it was placed at the end of EXPR_BB.
5945
5946 It's unclear exactly what Muchnick meant by "unimpared". It seems
5947 to me that the expression must either be computed or transparent in
5948 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5949 would allow the expression to be hoisted out of loops, even if
5950 the expression wasn't a loop invariant.
5951
5952 Contrast this to reachability for PRE where an expression is
5953 considered reachable if *any* path reaches instead of *all*
5954 paths. */
5955
5956static int
5957hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
e2d2ed72 5958 basic_block expr_bb;
bb457bd9 5959 int expr_index;
e2d2ed72 5960 basic_block bb;
bb457bd9
JL
5961 char *visited;
5962{
5963 edge pred;
283a2545 5964 int visited_allocated_locally = 0;
589005ff 5965
bb457bd9
JL
5966
5967 if (visited == NULL)
5968 {
8e42ace1 5969 visited_allocated_locally = 1;
d55bc081 5970 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
5971 }
5972
e2d2ed72 5973 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 5974 {
e2d2ed72 5975 basic_block pred_bb = pred->src;
bb457bd9
JL
5976
5977 if (pred->src == ENTRY_BLOCK_PTR)
5978 break;
f305679f
JH
5979 else if (pred_bb == expr_bb)
5980 continue;
0b17ab2f 5981 else if (visited[pred_bb->index])
bb457bd9 5982 continue;
c4c81601 5983
bb457bd9 5984 /* Does this predecessor generate this expression? */
0b17ab2f 5985 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 5986 break;
0b17ab2f 5987 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 5988 break;
c4c81601 5989
bb457bd9
JL
5990 /* Not killed. */
5991 else
5992 {
0b17ab2f 5993 visited[pred_bb->index] = 1;
bb457bd9
JL
5994 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5995 pred_bb, visited))
5996 break;
5997 }
5998 }
589005ff 5999 if (visited_allocated_locally)
283a2545 6000 free (visited);
c4c81601 6001
bb457bd9
JL
6002 return (pred == NULL);
6003}
6004\f
6005/* Actually perform code hoisting. */
c4c81601 6006
bb457bd9
JL
6007static void
6008hoist_code ()
6009{
e0082a72 6010 basic_block bb, dominated;
c635a1ec
DB
6011 basic_block *domby;
6012 unsigned int domby_len;
6013 unsigned int i,j;
bb457bd9 6014 struct expr **index_map;
c4c81601 6015 struct expr *expr;
bb457bd9 6016
d55bc081 6017 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6018
6019 /* Compute a mapping from expression number (`bitmap_index') to
6020 hash table entry. */
6021
dd1bd863 6022 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
bb457bd9 6023 for (i = 0; i < expr_hash_table_size; i++)
c4c81601
RK
6024 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
6025 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6026
6027 /* Walk over each basic block looking for potentially hoistable
6028 expressions, nothing gets hoisted from the entry block. */
e0082a72 6029 FOR_EACH_BB (bb)
bb457bd9
JL
6030 {
6031 int found = 0;
6032 int insn_inserted_p;
6033
c635a1ec 6034 domby_len = get_dominated_by (dominators, bb, &domby);
bb457bd9
JL
6035 /* Examine each expression that is very busy at the exit of this
6036 block. These are the potentially hoistable expressions. */
e0082a72 6037 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6038 {
6039 int hoistable = 0;
c4c81601 6040
c635a1ec
DB
6041 if (TEST_BIT (hoist_vbeout[bb->index], i)
6042 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6043 {
6044 /* We've found a potentially hoistable expression, now
6045 we look at every block BB dominates to see if it
6046 computes the expression. */
c635a1ec 6047 for (j = 0; j < domby_len; j++)
bb457bd9 6048 {
c635a1ec 6049 dominated = domby[j];
bb457bd9 6050 /* Ignore self dominance. */
c635a1ec 6051 if (bb == dominated)
bb457bd9 6052 continue;
bb457bd9
JL
6053 /* We've found a dominated block, now see if it computes
6054 the busy expression and whether or not moving that
6055 expression to the "beginning" of that block is safe. */
e0082a72 6056 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6057 continue;
6058
6059 /* Note if the expression would reach the dominated block
589005ff 6060 unimpared if it was placed at the end of BB.
bb457bd9
JL
6061
6062 Keep track of how many times this expression is hoistable
6063 from a dominated block into BB. */
e0082a72 6064 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6065 hoistable++;
6066 }
6067
ff7cc307 6068 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6069 expression, then note it in the bitmap of expressions to
6070 hoist. It makes no sense to hoist things which are computed
6071 in only one BB, and doing so tends to pessimize register
6072 allocation. One could increase this value to try harder
6073 to avoid any possible code expansion due to register
6074 allocation issues; however experiments have shown that
6075 the vast majority of hoistable expressions are only movable
6076 from two successors, so raising this threshhold is likely
6077 to nullify any benefit we get from code hoisting. */
6078 if (hoistable > 1)
6079 {
e0082a72 6080 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6081 found = 1;
6082 }
6083 }
6084 }
bb457bd9
JL
6085 /* If we found nothing to hoist, then quit now. */
6086 if (! found)
c635a1ec
DB
6087 {
6088 free (domby);
bb457bd9 6089 continue;
c635a1ec 6090 }
bb457bd9
JL
6091
6092 /* Loop over all the hoistable expressions. */
e0082a72 6093 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6094 {
6095 /* We want to insert the expression into BB only once, so
6096 note when we've inserted it. */
6097 insn_inserted_p = 0;
6098
6099 /* These tests should be the same as the tests above. */
e0082a72 6100 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6101 {
6102 /* We've found a potentially hoistable expression, now
6103 we look at every block BB dominates to see if it
6104 computes the expression. */
c635a1ec 6105 for (j = 0; j < domby_len; j++)
bb457bd9 6106 {
c635a1ec 6107 dominated = domby[j];
bb457bd9 6108 /* Ignore self dominance. */
c635a1ec 6109 if (bb == dominated)
bb457bd9
JL
6110 continue;
6111
6112 /* We've found a dominated block, now see if it computes
6113 the busy expression and whether or not moving that
6114 expression to the "beginning" of that block is safe. */
e0082a72 6115 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6116 continue;
6117
6118 /* The expression is computed in the dominated block and
6119 it would be safe to compute it at the start of the
6120 dominated block. Now we have to determine if the
ff7cc307 6121 expression would reach the dominated block if it was
bb457bd9 6122 placed at the end of BB. */
e0082a72 6123 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6124 {
6125 struct expr *expr = index_map[i];
6126 struct occr *occr = expr->antic_occr;
6127 rtx insn;
6128 rtx set;
6129
ff7cc307 6130 /* Find the right occurrence of this expression. */
e0082a72 6131 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6132 occr = occr->next;
6133
6134 /* Should never happen. */
6135 if (!occr)
6136 abort ();
6137
6138 insn = occr->insn;
589005ff 6139
bb457bd9
JL
6140 set = single_set (insn);
6141 if (! set)
6142 abort ();
6143
6144 /* Create a pseudo-reg to store the result of reaching
6145 expressions into. Get the mode for the new pseudo
6146 from the mode of the original destination pseudo. */
6147 if (expr->reaching_reg == NULL)
6148 expr->reaching_reg
6149 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6150
10d1bb36
JH
6151 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6152 delete_insn (insn);
6153 occr->deleted_p = 1;
6154 if (!insn_inserted_p)
bb457bd9 6155 {
10d1bb36
JH
6156 insert_insn_end_bb (index_map[i], bb, 0);
6157 insn_inserted_p = 1;
bb457bd9
JL
6158 }
6159 }
6160 }
6161 }
6162 }
c635a1ec 6163 free (domby);
bb457bd9 6164 }
c4c81601 6165
8e42ace1 6166 free (index_map);
bb457bd9
JL
6167}
6168
6169/* Top level routine to perform one code hoisting (aka unification) pass
6170
6171 Return non-zero if a change was made. */
6172
6173static int
6174one_code_hoisting_pass ()
6175{
6176 int changed = 0;
6177
6178 alloc_expr_hash_table (max_cuid);
6179 compute_expr_hash_table ();
6180 if (gcse_file)
6181 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
6182 expr_hash_table_size, n_exprs);
c4c81601 6183
bb457bd9
JL
6184 if (n_exprs > 0)
6185 {
d55bc081 6186 alloc_code_hoist_mem (last_basic_block, n_exprs);
bb457bd9
JL
6187 compute_code_hoist_data ();
6188 hoist_code ();
6189 free_code_hoist_mem ();
6190 }
c4c81601 6191
bb457bd9
JL
6192 free_expr_hash_table ();
6193
6194 return changed;
6195}
a13d4ebf
AM
6196\f
6197/* Here we provide the things required to do store motion towards
6198 the exit. In order for this to be effective, gcse also needed to
6199 be taught how to move a load when it is kill only by a store to itself.
6200
6201 int i;
6202 float a[10];
6203
6204 void foo(float scale)
6205 {
6206 for (i=0; i<10; i++)
6207 a[i] *= scale;
6208 }
6209
6210 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6211 the load out since its live around the loop, and stored at the bottom
6212 of the loop.
a13d4ebf 6213
589005ff 6214 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6215 an enhancement to gcse which when using edge based lcm, recognizes
6216 this situation and allows gcse to move the load out of the loop.
6217
6218 Once gcse has hoisted the load, store motion can then push this
6219 load towards the exit, and we end up with no loads or stores of 'i'
6220 in the loop. */
6221
ff7cc307 6222/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6223 doesn't find one, we create one and initialize it. */
6224
6225static struct ls_expr *
6226ldst_entry (x)
6227 rtx x;
6228{
6229 struct ls_expr * ptr;
6230
6231 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6232 if (expr_equiv_p (ptr->pattern, x))
6233 break;
6234
6235 if (!ptr)
6236 {
6237 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6238
6239 ptr->next = pre_ldst_mems;
6240 ptr->expr = NULL;
6241 ptr->pattern = x;
6242 ptr->loads = NULL_RTX;
6243 ptr->stores = NULL_RTX;
6244 ptr->reaching_reg = NULL_RTX;
6245 ptr->invalid = 0;
6246 ptr->index = 0;
6247 ptr->hash_index = 0;
6248 pre_ldst_mems = ptr;
6249 }
589005ff 6250
a13d4ebf
AM
6251 return ptr;
6252}
6253
6254/* Free up an individual ldst entry. */
6255
589005ff 6256static void
a13d4ebf
AM
6257free_ldst_entry (ptr)
6258 struct ls_expr * ptr;
6259{
aaa4ca30
AJ
6260 free_INSN_LIST_list (& ptr->loads);
6261 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6262
6263 free (ptr);
6264}
6265
6266/* Free up all memory associated with the ldst list. */
6267
6268static void
6269free_ldst_mems ()
6270{
589005ff 6271 while (pre_ldst_mems)
a13d4ebf
AM
6272 {
6273 struct ls_expr * tmp = pre_ldst_mems;
6274
6275 pre_ldst_mems = pre_ldst_mems->next;
6276
6277 free_ldst_entry (tmp);
6278 }
6279
6280 pre_ldst_mems = NULL;
6281}
6282
6283/* Dump debugging info about the ldst list. */
6284
6285static void
6286print_ldst_list (file)
6287 FILE * file;
6288{
6289 struct ls_expr * ptr;
6290
6291 fprintf (file, "LDST list: \n");
6292
6293 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6294 {
6295 fprintf (file, " Pattern (%3d): ", ptr->index);
6296
6297 print_rtl (file, ptr->pattern);
6298
6299 fprintf (file, "\n Loads : ");
6300
6301 if (ptr->loads)
6302 print_rtl (file, ptr->loads);
6303 else
6304 fprintf (file, "(nil)");
6305
6306 fprintf (file, "\n Stores : ");
6307
6308 if (ptr->stores)
6309 print_rtl (file, ptr->stores);
6310 else
6311 fprintf (file, "(nil)");
6312
6313 fprintf (file, "\n\n");
6314 }
6315
6316 fprintf (file, "\n");
6317}
6318
6319/* Returns 1 if X is in the list of ldst only expressions. */
6320
6321static struct ls_expr *
6322find_rtx_in_ldst (x)
6323 rtx x;
6324{
6325 struct ls_expr * ptr;
589005ff 6326
a13d4ebf
AM
6327 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6328 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6329 return ptr;
6330
6331 return NULL;
6332}
6333
6334/* Assign each element of the list of mems a monotonically increasing value. */
6335
6336static int
6337enumerate_ldsts ()
6338{
6339 struct ls_expr * ptr;
6340 int n = 0;
6341
6342 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6343 ptr->index = n++;
6344
6345 return n;
6346}
6347
6348/* Return first item in the list. */
6349
6350static inline struct ls_expr *
6351first_ls_expr ()
6352{
6353 return pre_ldst_mems;
6354}
6355
6356/* Return the next item in ther list after the specified one. */
6357
6358static inline struct ls_expr *
6359next_ls_expr (ptr)
6360 struct ls_expr * ptr;
6361{
6362 return ptr->next;
6363}
6364\f
6365/* Load Motion for loads which only kill themselves. */
6366
6367/* Return true if x is a simple MEM operation, with no registers or
6368 side effects. These are the types of loads we consider for the
6369 ld_motion list, otherwise we let the usual aliasing take care of it. */
6370
589005ff 6371static int
a13d4ebf
AM
6372simple_mem (x)
6373 rtx x;
6374{
6375 if (GET_CODE (x) != MEM)
6376 return 0;
589005ff 6377
a13d4ebf
AM
6378 if (MEM_VOLATILE_P (x))
6379 return 0;
589005ff 6380
a13d4ebf
AM
6381 if (GET_MODE (x) == BLKmode)
6382 return 0;
aaa4ca30
AJ
6383
6384 if (!rtx_varies_p (XEXP (x, 0), 0))
a13d4ebf 6385 return 1;
589005ff 6386
a13d4ebf
AM
6387 return 0;
6388}
6389
589005ff
KH
6390/* Make sure there isn't a buried reference in this pattern anywhere.
6391 If there is, invalidate the entry for it since we're not capable
6392 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6393 loads since the aliasing code will allow all entries in the
6394 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6395 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6396 fix it up. */
6397
6398static void
6399invalidate_any_buried_refs (x)
6400 rtx x;
6401{
6402 const char * fmt;
8e42ace1 6403 int i, j;
a13d4ebf
AM
6404 struct ls_expr * ptr;
6405
6406 /* Invalidate it in the list. */
6407 if (GET_CODE (x) == MEM && simple_mem (x))
6408 {
6409 ptr = ldst_entry (x);
6410 ptr->invalid = 1;
6411 }
6412
6413 /* Recursively process the insn. */
6414 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6415
a13d4ebf
AM
6416 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6417 {
6418 if (fmt[i] == 'e')
6419 invalidate_any_buried_refs (XEXP (x, i));
6420 else if (fmt[i] == 'E')
6421 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6422 invalidate_any_buried_refs (XVECEXP (x, i, j));
6423 }
6424}
6425
6426/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6427 being defined as MEM loads and stores to symbols, with no
589005ff 6428 side effects and no registers in the expression. If there are any
f63d1bf7 6429 uses/defs which don't match this criteria, it is invalidated and
a13d4ebf
AM
6430 trimmed out later. */
6431
589005ff 6432static void
a13d4ebf
AM
6433compute_ld_motion_mems ()
6434{
6435 struct ls_expr * ptr;
e0082a72 6436 basic_block bb;
a13d4ebf 6437 rtx insn;
589005ff 6438
a13d4ebf
AM
6439 pre_ldst_mems = NULL;
6440
e0082a72 6441 FOR_EACH_BB (bb)
a13d4ebf 6442 {
e0082a72
ZD
6443 for (insn = bb->head;
6444 insn && insn != NEXT_INSN (bb->end);
a13d4ebf
AM
6445 insn = NEXT_INSN (insn))
6446 {
6447 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6448 {
6449 if (GET_CODE (PATTERN (insn)) == SET)
6450 {
6451 rtx src = SET_SRC (PATTERN (insn));
6452 rtx dest = SET_DEST (PATTERN (insn));
6453
6454 /* Check for a simple LOAD... */
6455 if (GET_CODE (src) == MEM && simple_mem (src))
6456 {
6457 ptr = ldst_entry (src);
6458 if (GET_CODE (dest) == REG)
6459 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6460 else
6461 ptr->invalid = 1;
6462 }
6463 else
6464 {
6465 /* Make sure there isn't a buried load somewhere. */
6466 invalidate_any_buried_refs (src);
6467 }
589005ff 6468
a13d4ebf
AM
6469 /* Check for stores. Don't worry about aliased ones, they
6470 will block any movement we might do later. We only care
6471 about this exact pattern since those are the only
6472 circumstance that we will ignore the aliasing info. */
6473 if (GET_CODE (dest) == MEM && simple_mem (dest))
6474 {
6475 ptr = ldst_entry (dest);
589005ff 6476
f54104df
AO
6477 if (GET_CODE (src) != MEM
6478 && GET_CODE (src) != ASM_OPERANDS)
a13d4ebf
AM
6479 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6480 else
6481 ptr->invalid = 1;
6482 }
6483 }
6484 else
6485 invalidate_any_buried_refs (PATTERN (insn));
6486 }
6487 }
6488 }
6489}
6490
589005ff 6491/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6492 expression list for pre gcse. */
6493
6494static void
6495trim_ld_motion_mems ()
6496{
6497 struct ls_expr * last = NULL;
6498 struct ls_expr * ptr = first_ls_expr ();
6499
6500 while (ptr != NULL)
6501 {
6502 int del = ptr->invalid;
6503 struct expr * expr = NULL;
589005ff 6504
a13d4ebf 6505 /* Delete if entry has been made invalid. */
589005ff 6506 if (!del)
a13d4ebf
AM
6507 {
6508 unsigned int i;
589005ff 6509
a13d4ebf
AM
6510 del = 1;
6511 /* Delete if we cannot find this mem in the expression list. */
6512 for (i = 0; i < expr_hash_table_size && del; i++)
6513 {
589005ff
KH
6514 for (expr = expr_hash_table[i];
6515 expr != NULL;
a13d4ebf
AM
6516 expr = expr->next_same_hash)
6517 if (expr_equiv_p (expr->expr, ptr->pattern))
6518 {
6519 del = 0;
6520 break;
6521 }
6522 }
6523 }
589005ff 6524
a13d4ebf
AM
6525 if (del)
6526 {
6527 if (last != NULL)
6528 {
6529 last->next = ptr->next;
6530 free_ldst_entry (ptr);
6531 ptr = last->next;
6532 }
6533 else
6534 {
6535 pre_ldst_mems = pre_ldst_mems->next;
6536 free_ldst_entry (ptr);
6537 ptr = pre_ldst_mems;
6538 }
6539 }
6540 else
6541 {
6542 /* Set the expression field if we are keeping it. */
6543 last = ptr;
6544 ptr->expr = expr;
6545 ptr = ptr->next;
6546 }
6547 }
6548
6549 /* Show the world what we've found. */
6550 if (gcse_file && pre_ldst_mems != NULL)
6551 print_ldst_list (gcse_file);
6552}
6553
6554/* This routine will take an expression which we are replacing with
6555 a reaching register, and update any stores that are needed if
6556 that expression is in the ld_motion list. Stores are updated by
6557 copying their SRC to the reaching register, and then storeing
6558 the reaching register into the store location. These keeps the
6559 correct value in the reaching register for the loads. */
6560
6561static void
6562update_ld_motion_stores (expr)
6563 struct expr * expr;
6564{
6565 struct ls_expr * mem_ptr;
6566
6567 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6568 {
589005ff
KH
6569 /* We can try to find just the REACHED stores, but is shouldn't
6570 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6571 dead and should be eliminated later. */
6572
6573 /* We replace SET mem = expr with
6574 SET reg = expr
589005ff 6575 SET mem = reg , where reg is the
a13d4ebf
AM
6576 reaching reg used in the load. */
6577 rtx list = mem_ptr->stores;
589005ff 6578
a13d4ebf
AM
6579 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6580 {
6581 rtx insn = XEXP (list, 0);
6582 rtx pat = PATTERN (insn);
6583 rtx src = SET_SRC (pat);
6584 rtx reg = expr->reaching_reg;
c57718d3 6585 rtx copy, new;
a13d4ebf
AM
6586
6587 /* If we've already copied it, continue. */
6588 if (expr->reaching_reg == src)
6589 continue;
589005ff 6590
a13d4ebf
AM
6591 if (gcse_file)
6592 {
6593 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6594 print_rtl (gcse_file, expr->reaching_reg);
6595 fprintf (gcse_file, ":\n ");
6596 print_inline_rtx (gcse_file, insn, 8);
6597 fprintf (gcse_file, "\n");
6598 }
589005ff 6599
a13d4ebf 6600 copy = gen_move_insn ( reg, SET_SRC (pat));
c57718d3
RK
6601 new = emit_insn_before (copy, insn);
6602 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6603 SET_SRC (pat) = reg;
6604
6605 /* un-recognize this pattern since it's probably different now. */
6606 INSN_CODE (insn) = -1;
6607 gcse_create_count++;
6608 }
6609 }
6610}
6611\f
6612/* Store motion code. */
6613
589005ff 6614/* This is used to communicate the target bitvector we want to use in the
aaa4ca30
AJ
6615 reg_set_info routine when called via the note_stores mechanism. */
6616static sbitmap * regvec;
6617
a13d4ebf
AM
6618/* Used in computing the reverse edge graph bit vectors. */
6619static sbitmap * st_antloc;
6620
6621/* Global holding the number of store expressions we are dealing with. */
6622static int num_stores;
6623
aaa4ca30 6624/* Checks to set if we need to mark a register set. Called from note_stores. */
a13d4ebf 6625
aaa4ca30
AJ
6626static void
6627reg_set_info (dest, setter, data)
6628 rtx dest, setter ATTRIBUTE_UNUSED;
6629 void * data ATTRIBUTE_UNUSED;
a13d4ebf 6630{
aaa4ca30
AJ
6631 if (GET_CODE (dest) == SUBREG)
6632 dest = SUBREG_REG (dest);
adfcce61 6633
aaa4ca30
AJ
6634 if (GET_CODE (dest) == REG)
6635 SET_BIT (*regvec, REGNO (dest));
a13d4ebf
AM
6636}
6637
589005ff 6638/* Return non-zero if the register operands of expression X are killed
aaa4ca30 6639 anywhere in basic block BB. */
a13d4ebf
AM
6640
6641static int
aaa4ca30 6642store_ops_ok (x, bb)
a13d4ebf 6643 rtx x;
e2d2ed72 6644 basic_block bb;
a13d4ebf
AM
6645{
6646 int i;
6647 enum rtx_code code;
6648 const char * fmt;
6649
6650 /* Repeat is used to turn tail-recursion into iteration. */
6651 repeat:
6652
6653 if (x == 0)
6654 return 1;
6655
6656 code = GET_CODE (x);
6657 switch (code)
6658 {
6659 case REG:
aaa4ca30
AJ
6660 /* If a reg has changed after us in this
6661 block, the operand has been killed. */
0b17ab2f 6662 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
a13d4ebf
AM
6663
6664 case MEM:
6665 x = XEXP (x, 0);
6666 goto repeat;
6667
6668 case PRE_DEC:
6669 case PRE_INC:
6670 case POST_DEC:
6671 case POST_INC:
6672 return 0;
6673
6674 case PC:
6675 case CC0: /*FIXME*/
6676 case CONST:
6677 case CONST_INT:
6678 case CONST_DOUBLE:
69ef87e2 6679 case CONST_VECTOR:
a13d4ebf
AM
6680 case SYMBOL_REF:
6681 case LABEL_REF:
6682 case ADDR_VEC:
6683 case ADDR_DIFF_VEC:
6684 return 1;
6685
6686 default:
6687 break;
6688 }
6689
6690 i = GET_RTX_LENGTH (code) - 1;
6691 fmt = GET_RTX_FORMAT (code);
589005ff 6692
a13d4ebf
AM
6693 for (; i >= 0; i--)
6694 {
6695 if (fmt[i] == 'e')
6696 {
6697 rtx tem = XEXP (x, i);
6698
6699 /* If we are about to do the last recursive call
6700 needed at this level, change it into iteration.
6701 This function is called enough to be worth it. */
6702 if (i == 0)
6703 {
6704 x = tem;
6705 goto repeat;
6706 }
589005ff 6707
aaa4ca30 6708 if (! store_ops_ok (tem, bb))
a13d4ebf
AM
6709 return 0;
6710 }
6711 else if (fmt[i] == 'E')
6712 {
6713 int j;
589005ff 6714
a13d4ebf
AM
6715 for (j = 0; j < XVECLEN (x, i); j++)
6716 {
aaa4ca30 6717 if (! store_ops_ok (XVECEXP (x, i, j), bb))
a13d4ebf
AM
6718 return 0;
6719 }
6720 }
6721 }
6722
6723 return 1;
6724}
6725
aaa4ca30 6726/* Determine whether insn is MEM store pattern that we will consider moving. */
a13d4ebf
AM
6727
6728static void
6729find_moveable_store (insn)
6730 rtx insn;
6731{
6732 struct ls_expr * ptr;
6733 rtx dest = PATTERN (insn);
6734
f54104df
AO
6735 if (GET_CODE (dest) != SET
6736 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
a13d4ebf
AM
6737 return;
6738
6739 dest = SET_DEST (dest);
589005ff 6740
a13d4ebf
AM
6741 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6742 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
6743 return;
6744
6745 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
a13d4ebf 6746 return;
aaa4ca30
AJ
6747
6748 if (rtx_varies_p (XEXP (dest, 0), 0))
a13d4ebf 6749 return;
aaa4ca30 6750
a13d4ebf
AM
6751 ptr = ldst_entry (dest);
6752 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6753}
6754
aaa4ca30
AJ
6755/* Perform store motion. Much like gcse, except we move expressions the
6756 other way by looking at the flowgraph in reverse. */
a13d4ebf
AM
6757
6758static int
6759compute_store_table ()
6760{
e0082a72
ZD
6761 int ret;
6762 basic_block bb;
aaa4ca30 6763 unsigned regno;
a13d4ebf 6764 rtx insn, pat;
aaa4ca30 6765
a13d4ebf
AM
6766 max_gcse_regno = max_reg_num ();
6767
d55bc081 6768 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
aaa4ca30 6769 max_gcse_regno);
d55bc081 6770 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 6771 pre_ldst_mems = 0;
aaa4ca30 6772
a13d4ebf 6773 /* Find all the stores we care about. */
e0082a72 6774 FOR_EACH_BB (bb)
a13d4ebf 6775 {
e0082a72
ZD
6776 regvec = & (reg_set_in_block[bb->index]);
6777 for (insn = bb->end;
6778 insn && insn != PREV_INSN (bb->end);
a13d4ebf
AM
6779 insn = PREV_INSN (insn))
6780 {
19652adf
ZW
6781 /* Ignore anything that is not a normal insn. */
6782 if (! INSN_P (insn))
a13d4ebf
AM
6783 continue;
6784
aaa4ca30
AJ
6785 if (GET_CODE (insn) == CALL_INSN)
6786 {
19652adf 6787 bool clobbers_all = false;
589005ff 6788#ifdef NON_SAVING_SETJMP
19652adf
ZW
6789 if (NON_SAVING_SETJMP
6790 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6791 clobbers_all = true;
6792#endif
6793
aaa4ca30 6794 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
6795 if (clobbers_all
6796 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 6797 SET_BIT (reg_set_in_block[bb->index], regno);
aaa4ca30 6798 }
589005ff 6799
a13d4ebf 6800 pat = PATTERN (insn);
aaa4ca30 6801 note_stores (pat, reg_set_info, NULL);
589005ff 6802
a13d4ebf
AM
6803 /* Now that we've marked regs, look for stores. */
6804 if (GET_CODE (pat) == SET)
6805 find_moveable_store (insn);
6806 }
6807 }
6808
6809 ret = enumerate_ldsts ();
589005ff 6810
a13d4ebf
AM
6811 if (gcse_file)
6812 {
6813 fprintf (gcse_file, "Store Motion Expressions.\n");
6814 print_ldst_list (gcse_file);
6815 }
589005ff 6816
a13d4ebf
AM
6817 return ret;
6818}
6819
aaa4ca30 6820/* Check to see if the load X is aliased with STORE_PATTERN. */
a13d4ebf
AM
6821
6822static int
6823load_kills_store (x, store_pattern)
6824 rtx x, store_pattern;
6825{
6826 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6827 return 1;
6828 return 0;
6829}
6830
589005ff 6831/* Go through the entire insn X, looking for any loads which might alias
aaa4ca30 6832 STORE_PATTERN. Return 1 if found. */
a13d4ebf
AM
6833
6834static int
6835find_loads (x, store_pattern)
6836 rtx x, store_pattern;
6837{
6838 const char * fmt;
8e42ace1 6839 int i, j;
a13d4ebf
AM
6840 int ret = 0;
6841
24a28584
JH
6842 if (!x)
6843 return 0;
6844
589005ff 6845 if (GET_CODE (x) == SET)
a13d4ebf
AM
6846 x = SET_SRC (x);
6847
6848 if (GET_CODE (x) == MEM)
6849 {
6850 if (load_kills_store (x, store_pattern))
6851 return 1;
6852 }
6853
6854 /* Recursively process the insn. */
6855 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6856
a13d4ebf
AM
6857 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6858 {
6859 if (fmt[i] == 'e')
6860 ret |= find_loads (XEXP (x, i), store_pattern);
6861 else if (fmt[i] == 'E')
6862 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6863 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6864 }
6865 return ret;
6866}
6867
589005ff 6868/* Check if INSN kills the store pattern X (is aliased with it).
a13d4ebf
AM
6869 Return 1 if it it does. */
6870
589005ff 6871static int
a13d4ebf
AM
6872store_killed_in_insn (x, insn)
6873 rtx x, insn;
6874{
6875 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6876 return 0;
589005ff 6877
a13d4ebf
AM
6878 if (GET_CODE (insn) == CALL_INSN)
6879 {
1218665b
JJ
6880 /* A normal or pure call might read from pattern,
6881 but a const call will not. */
a6a063b8 6882 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
a13d4ebf 6883 }
589005ff 6884
a13d4ebf
AM
6885 if (GET_CODE (PATTERN (insn)) == SET)
6886 {
6887 rtx pat = PATTERN (insn);
6888 /* Check for memory stores to aliased objects. */
6889 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
aaa4ca30 6890 /* pretend its a load and check for aliasing. */
a13d4ebf
AM
6891 if (find_loads (SET_DEST (pat), x))
6892 return 1;
6893 return find_loads (SET_SRC (pat), x);
6894 }
6895 else
6896 return find_loads (PATTERN (insn), x);
6897}
6898
6899/* Returns 1 if the expression X is loaded or clobbered on or after INSN
6900 within basic block BB. */
6901
589005ff 6902static int
aaa4ca30 6903store_killed_after (x, insn, bb)
a13d4ebf 6904 rtx x, insn;
e2d2ed72 6905 basic_block bb;
a13d4ebf 6906{
8e42ace1 6907 rtx last = bb->end;
589005ff 6908
8e42ace1
KH
6909 if (insn == last)
6910 return 0;
aaa4ca30
AJ
6911
6912 /* Check if the register operands of the store are OK in this block.
589005ff
KH
6913 Note that if registers are changed ANYWHERE in the block, we'll
6914 decide we can't move it, regardless of whether it changed above
aaa4ca30
AJ
6915 or below the store. This could be improved by checking the register
6916 operands while lookinng for aliasing in each insn. */
6917 if (!store_ops_ok (XEXP (x, 0), bb))
a13d4ebf
AM
6918 return 1;
6919
8e42ace1
KH
6920 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6921 if (store_killed_in_insn (x, insn))
6922 return 1;
589005ff 6923
a13d4ebf
AM
6924 return 0;
6925}
6926
aaa4ca30 6927/* Returns 1 if the expression X is loaded or clobbered on or before INSN
a13d4ebf 6928 within basic block BB. */
589005ff 6929static int
a13d4ebf
AM
6930store_killed_before (x, insn, bb)
6931 rtx x, insn;
e2d2ed72 6932 basic_block bb;
a13d4ebf 6933{
8e42ace1 6934 rtx first = bb->head;
a13d4ebf 6935
8e42ace1
KH
6936 if (insn == first)
6937 return store_killed_in_insn (x, insn);
589005ff 6938
aaa4ca30 6939 /* Check if the register operands of the store are OK in this block.
589005ff
KH
6940 Note that if registers are changed ANYWHERE in the block, we'll
6941 decide we can't move it, regardless of whether it changed above
aaa4ca30
AJ
6942 or below the store. This could be improved by checking the register
6943 operands while lookinng for aliasing in each insn. */
6944 if (!store_ops_ok (XEXP (x, 0), bb))
a13d4ebf
AM
6945 return 1;
6946
8e42ace1
KH
6947 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6948 if (store_killed_in_insn (x, insn))
6949 return 1;
589005ff 6950
8e42ace1 6951 return 0;
a13d4ebf
AM
6952}
6953
6954#define ANTIC_STORE_LIST(x) ((x)->loads)
6955#define AVAIL_STORE_LIST(x) ((x)->stores)
6956
6957/* Given the table of available store insns at the end of blocks,
6958 determine which ones are not killed by aliasing, and generate
6959 the appropriate vectors for gen and killed. */
6960static void
589005ff 6961build_store_vectors ()
a13d4ebf 6962{
e0082a72 6963 basic_block bb, b;
a13d4ebf
AM
6964 rtx insn, st;
6965 struct ls_expr * ptr;
6966
6967 /* Build the gen_vector. This is any store in the table which is not killed
6968 by aliasing later in its block. */
d55bc081
ZD
6969 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6970 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 6971
d55bc081
ZD
6972 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6973 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 6974
a13d4ebf 6975 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 6976 {
a13d4ebf
AM
6977 /* Put all the stores into either the antic list, or the avail list,
6978 or both. */
6979 rtx store_list = ptr->stores;
6980 ptr->stores = NULL_RTX;
6981
6982 for (st = store_list; st != NULL; st = XEXP (st, 1))
6983 {
6984 insn = XEXP (st, 0);
e2d2ed72 6985 bb = BLOCK_FOR_INSN (insn);
589005ff 6986
aaa4ca30 6987 if (!store_killed_after (ptr->pattern, insn, bb))
a13d4ebf
AM
6988 {
6989 /* If we've already seen an availale expression in this block,
6990 we can delete the one we saw already (It occurs earlier in
6991 the block), and replace it with this one). We'll copy the
6992 old SRC expression to an unused register in case there
6993 are any side effects. */
0b17ab2f 6994 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf
AM
6995 {
6996 /* Find previous store. */
6997 rtx st;
6998 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
e2d2ed72 6999 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
a13d4ebf
AM
7000 break;
7001 if (st)
7002 {
7003 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7004 if (gcse_file)
8e42ace1 7005 fprintf (gcse_file, "Removing redundant store:\n");
a13d4ebf
AM
7006 replace_store_insn (r, XEXP (st, 0), bb);
7007 XEXP (st, 0) = insn;
7008 continue;
7009 }
7010 }
0b17ab2f 7011 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf
AM
7012 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7013 AVAIL_STORE_LIST (ptr));
7014 }
589005ff 7015
a13d4ebf
AM
7016 if (!store_killed_before (ptr->pattern, insn, bb))
7017 {
7018 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7019 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7020 ANTIC_STORE_LIST (ptr));
7021 }
7022 }
589005ff 7023
a13d4ebf
AM
7024 /* Free the original list of store insns. */
7025 free_INSN_LIST_list (&store_list);
7026 }
589005ff 7027
d55bc081
ZD
7028 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7029 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7030
d55bc081
ZD
7031 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7032 sbitmap_vector_zero (transp, last_basic_block);
a13d4ebf
AM
7033
7034 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
e0082a72 7035 FOR_EACH_BB (b)
a13d4ebf 7036 {
e0082a72 7037 if (store_killed_after (ptr->pattern, b->head, b))
a13d4ebf 7038 {
dc297297 7039 /* The anticipatable expression is not killed if it's gen'd. */
aaa4ca30 7040 /*
589005ff 7041 We leave this check out for now. If we have a code sequence
aaa4ca30
AJ
7042 in a block which looks like:
7043 ST MEMa = x
7044 L y = MEMa
7045 ST MEMa = z
7046 We should flag this as having an ANTIC expression, NOT
7047 transparent, NOT killed, and AVAIL.
7048 Unfortunately, since we haven't re-written all loads to
589005ff 7049 use the reaching reg, we'll end up doing an incorrect
aaa4ca30
AJ
7050 Load in the middle here if we push the store down. It happens in
7051 gcc.c-torture/execute/960311-1.c with -O3
7052 If we always kill it in this case, we'll sometimes do
7053 uneccessary work, but it shouldn't actually hurt anything.
7054 if (!TEST_BIT (ae_gen[b], ptr->index)). */
e0082a72 7055 SET_BIT (ae_kill[b->index], ptr->index);
aaa4ca30
AJ
7056 }
7057 else
e0082a72 7058 SET_BIT (transp[b->index], ptr->index);
aaa4ca30
AJ
7059 }
7060
7061 /* Any block with no exits calls some non-returning function, so
7062 we better mark the store killed here, or we might not store to
7063 it at all. If we knew it was abort, we wouldn't have to store,
7064 but we don't know that for sure. */
589005ff 7065 if (gcse_file)
aaa4ca30
AJ
7066 {
7067 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7068 print_ldst_list (gcse_file);
d55bc081
ZD
7069 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7070 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7071 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7072 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7073 }
7074}
7075
589005ff 7076/* Insert an instruction at the begining of a basic block, and update
a13d4ebf
AM
7077 the BLOCK_HEAD if needed. */
7078
589005ff 7079static void
a13d4ebf
AM
7080insert_insn_start_bb (insn, bb)
7081 rtx insn;
e2d2ed72 7082 basic_block bb;
a13d4ebf
AM
7083{
7084 /* Insert at start of successor block. */
e2d2ed72
AM
7085 rtx prev = PREV_INSN (bb->head);
7086 rtx before = bb->head;
a13d4ebf
AM
7087 while (before != 0)
7088 {
7089 if (GET_CODE (before) != CODE_LABEL
7090 && (GET_CODE (before) != NOTE
7091 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7092 break;
7093 prev = before;
e2d2ed72 7094 if (prev == bb->end)
a13d4ebf
AM
7095 break;
7096 before = NEXT_INSN (before);
7097 }
7098
7099 insn = emit_insn_after (insn, prev);
7100
a13d4ebf
AM
7101 if (gcse_file)
7102 {
7103 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7104 bb->index);
a13d4ebf
AM
7105 print_inline_rtx (gcse_file, insn, 6);
7106 fprintf (gcse_file, "\n");
7107 }
7108}
7109
7110/* This routine will insert a store on an edge. EXPR is the ldst entry for
7111 the memory reference, and E is the edge to insert it on. Returns non-zero
7112 if an edge insertion was performed. */
7113
7114static int
7115insert_store (expr, e)
7116 struct ls_expr * expr;
7117 edge e;
7118{
7119 rtx reg, insn;
e2d2ed72 7120 basic_block bb;
a13d4ebf
AM
7121 edge tmp;
7122
7123 /* We did all the deleted before this insert, so if we didn't delete a
7124 store, then we haven't set the reaching reg yet either. */
7125 if (expr->reaching_reg == NULL_RTX)
7126 return 0;
7127
7128 reg = expr->reaching_reg;
7129 insn = gen_move_insn (expr->pattern, reg);
589005ff 7130
a13d4ebf
AM
7131 /* If we are inserting this expression on ALL predecessor edges of a BB,
7132 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7133 edges so we don't try to insert it on the other edges. */
e2d2ed72 7134 bb = e->dest;
a13d4ebf
AM
7135 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7136 {
7137 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7138 if (index == EDGE_INDEX_NO_EDGE)
7139 abort ();
7140 if (! TEST_BIT (pre_insert_map[index], expr->index))
7141 break;
7142 }
7143
7144 /* If tmp is NULL, we found an insertion on every edge, blank the
7145 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7146 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7147 {
7148 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7149 {
7150 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7151 RESET_BIT (pre_insert_map[index], expr->index);
7152 }
7153 insert_insn_start_bb (insn, bb);
7154 return 0;
7155 }
589005ff 7156
a13d4ebf
AM
7157 /* We can't insert on this edge, so we'll insert at the head of the
7158 successors block. See Morgan, sec 10.5. */
7159 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7160 {
7161 insert_insn_start_bb (insn, bb);
7162 return 0;
7163 }
7164
7165 insert_insn_on_edge (insn, e);
589005ff 7166
a13d4ebf
AM
7167 if (gcse_file)
7168 {
7169 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7170 e->src->index, e->dest->index);
a13d4ebf
AM
7171 print_inline_rtx (gcse_file, insn, 6);
7172 fprintf (gcse_file, "\n");
7173 }
589005ff 7174
a13d4ebf
AM
7175 return 1;
7176}
7177
7178/* This routine will replace a store with a SET to a specified register. */
7179
7180static void
7181replace_store_insn (reg, del, bb)
7182 rtx reg, del;
e2d2ed72 7183 basic_block bb;
a13d4ebf
AM
7184{
7185 rtx insn;
589005ff 7186
a13d4ebf
AM
7187 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7188 insn = emit_insn_after (insn, del);
589005ff 7189
a13d4ebf
AM
7190 if (gcse_file)
7191 {
589005ff 7192 fprintf (gcse_file,
0b17ab2f 7193 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7194 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7195 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7196 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7197 fprintf (gcse_file, "\n");
a13d4ebf 7198 }
589005ff 7199
49ce134f 7200 delete_insn (del);
a13d4ebf
AM
7201}
7202
7203
7204/* Delete a store, but copy the value that would have been stored into
7205 the reaching_reg for later storing. */
7206
7207static void
7208delete_store (expr, bb)
7209 struct ls_expr * expr;
e2d2ed72 7210 basic_block bb;
a13d4ebf
AM
7211{
7212 rtx reg, i, del;
7213
7214 if (expr->reaching_reg == NULL_RTX)
7215 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7216
589005ff
KH
7217
7218 /* If there is more than 1 store, the earlier ones will be dead,
7219 but it doesn't hurt to replace them here. */
a13d4ebf 7220 reg = expr->reaching_reg;
589005ff 7221
a13d4ebf
AM
7222 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7223 {
7224 del = XEXP (i, 0);
e2d2ed72 7225 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7226 {
589005ff 7227 /* We know there is only one since we deleted redundant
a13d4ebf
AM
7228 ones during the available computation. */
7229 replace_store_insn (reg, del, bb);
7230 break;
7231 }
7232 }
7233}
7234
7235/* Free memory used by store motion. */
7236
589005ff 7237static void
a13d4ebf
AM
7238free_store_memory ()
7239{
7240 free_ldst_mems ();
589005ff 7241
a13d4ebf 7242 if (ae_gen)
5a660bff 7243 sbitmap_vector_free (ae_gen);
a13d4ebf 7244 if (ae_kill)
5a660bff 7245 sbitmap_vector_free (ae_kill);
a13d4ebf 7246 if (transp)
5a660bff 7247 sbitmap_vector_free (transp);
a13d4ebf 7248 if (st_antloc)
5a660bff 7249 sbitmap_vector_free (st_antloc);
a13d4ebf 7250 if (pre_insert_map)
5a660bff 7251 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7252 if (pre_delete_map)
5a660bff 7253 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7254 if (reg_set_in_block)
7255 sbitmap_vector_free (reg_set_in_block);
589005ff 7256
a13d4ebf
AM
7257 ae_gen = ae_kill = transp = st_antloc = NULL;
7258 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7259}
7260
7261/* Perform store motion. Much like gcse, except we move expressions the
7262 other way by looking at the flowgraph in reverse. */
7263
7264static void
7265store_motion ()
7266{
e0082a72 7267 basic_block bb;
0b17ab2f 7268 int x;
a13d4ebf 7269 struct ls_expr * ptr;
adfcce61 7270 int update_flow = 0;
aaa4ca30 7271
a13d4ebf
AM
7272 if (gcse_file)
7273 {
7274 fprintf (gcse_file, "before store motion\n");
7275 print_rtl (gcse_file, get_insns ());
7276 }
7277
7278
7279 init_alias_analysis ();
aaa4ca30 7280
a13d4ebf
AM
7281 /* Find all the stores that are live to the end of their block. */
7282 num_stores = compute_store_table ();
7283 if (num_stores == 0)
7284 {
aaa4ca30 7285 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7286 end_alias_analysis ();
7287 return;
7288 }
7289
7290 /* Now compute whats actually available to move. */
7291 add_noreturn_fake_exit_edges ();
7292 build_store_vectors ();
7293
589005ff
KH
7294 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7295 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7296 &pre_delete_map);
7297
7298 /* Now we want to insert the new stores which are going to be needed. */
7299 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7300 {
e0082a72
ZD
7301 FOR_EACH_BB (bb)
7302 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7303 delete_store (ptr, bb);
a13d4ebf 7304
0b17ab2f
RH
7305 for (x = 0; x < NUM_EDGES (edge_list); x++)
7306 if (TEST_BIT (pre_insert_map[x], ptr->index))
7307 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7308 }
7309
7310 if (update_flow)
7311 commit_edge_insertions ();
aaa4ca30 7312
a13d4ebf
AM
7313 free_store_memory ();
7314 free_edge_list (edge_list);
7315 remove_fake_edges ();
7316 end_alias_analysis ();
7317}
e2500fed
GK
7318
7319#include "gt-gcse.h"