]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
target.h (asm_out.file_start, [...]): New hooks.
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
a0134312 3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
8e42ace1 4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
6baf1cc8 153#include "tm_p.h"
7506f491
DE
154#include "regs.h"
155#include "hard-reg-set.h"
156#include "flags.h"
157#include "real.h"
158#include "insn-config.h"
159#include "recog.h"
160#include "basic-block.h"
50b2596f 161#include "output.h"
49ad7cfa 162#include "function.h"
589005ff 163#include "expr.h"
e7d482b9 164#include "except.h"
fb0c0a12 165#include "ggc.h"
f1fa37ff 166#include "params.h"
ae860ff7 167#include "cselib.h"
aaa4ca30 168
7506f491 169#include "obstack.h"
4fa31c2a 170
7506f491
DE
171/* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
173
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
177
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
7506f491 182
f4e584dc
JL
183/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
7506f491 185
f4e584dc 186 We perform the following steps:
7506f491
DE
187
188 1) Compute basic block information.
189
190 2) Compute table of places where registers are set.
191
192 3) Perform copy/constant propagation.
193
194 4) Perform global cse.
195
e78d9500 196 5) Perform another pass of copy/constant propagation.
7506f491
DE
197
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
204
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
208
209 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 210 partially redundant).
7506f491
DE
211
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
215
216 **********************
217
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
223
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
228
229 It was found doing copy propagation between each pass enables further
230 substitutions.
231
232 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
234 be modified if one wants to experiment.
235
236 **********************
237
238 The steps for PRE are:
239
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
241
242 2) Perform the data flow analysis for PRE.
243
244 3) Delete the redundant instructions
245
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
248
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
251
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
254
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
258
f4e584dc 259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
265
266 **********************
267
7506f491
DE
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
275
276 Help stamp out big monolithic functions! */
277\f
278/* GCSE global vars. */
279
280/* -dG dump file. */
281static FILE *gcse_file;
282
f4e584dc
JL
283/* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
285
286 * If we changed any jumps via cprop.
287
288 * If we added any labels via edge splitting. */
289
290static int run_jump_opt_after_gcse;
291
7506f491
DE
292/* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297static FILE *debug_stderr;
298
299/* An obstack for our working variables. */
300static struct obstack gcse_obstack;
301
c4c81601 302struct reg_use {rtx reg_rtx; };
abd535b6 303
7506f491
DE
304/* Hash table of expressions. */
305
306struct expr
307{
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
7506f491
DE
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
329};
330
331/* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
334
335struct occr
336{
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
cc2902df 341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 342 char deleted_p;
cc2902df 343 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
348};
349
350/* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
c4c81601 357 Someday I'll perform the computation and figure it out. */
7506f491 358
02280659
ZD
359struct hash_table
360{
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
364
365 /* Size of the hash table, in elements. */
366 unsigned int size;
2e653e39 367
02280659
ZD
368 /* Number of hash table elements. */
369 unsigned int n_elems;
7506f491 370
02280659
ZD
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
373};
c4c81601 374
02280659
ZD
375/* Expression hash table. */
376static struct hash_table expr_hash_table;
377
378/* Copy propagation hash table. */
379static struct hash_table set_hash_table;
7506f491
DE
380
381/* Mapping of uids to cuids.
382 Only real insns get cuids. */
383static int *uid_cuid;
384
385/* Highest UID in UID_CUID. */
386static int max_uid;
387
388/* Get the cuid of an insn. */
b86db3eb
BS
389#ifdef ENABLE_CHECKING
390#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391#else
7506f491 392#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 393#endif
7506f491
DE
394
395/* Number of cuids. */
396static int max_cuid;
397
398/* Mapping of cuids to insns. */
399static rtx *cuid_insn;
400
401/* Get insn from cuid. */
402#define CUID_INSN(CUID) (cuid_insn[CUID])
403
404/* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
770ae6cc 407static unsigned int max_gcse_regno;
7506f491 408
7506f491 409/* Table of registers that are modified.
c4c81601 410
7506f491
DE
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
413
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 416 a bitmap instead of the lists `reg_set_table' uses].
7506f491 417
c4c81601
RK
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 431
c4c81601
RK
432typedef struct reg_set
433{
7506f491
DE
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438} reg_set;
c4c81601 439
7506f491 440static reg_set **reg_set_table;
c4c81601 441
7506f491
DE
442/* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445static int reg_set_table_size;
c4c81601 446
7506f491
DE
447/* Amount to grow `reg_set_table' by when it's full. */
448#define REG_SET_TABLE_SLOP 100
449
a13d4ebf 450/* This is a list of expressions which are MEMs and will be used by load
589005ff 451 or store motion.
a13d4ebf
AM
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
589005ff 454 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
589005ff 457 no side effects so we can re-issue the setter value.
a13d4ebf
AM
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
460
461struct ls_expr
462{
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
47a3dae1 465 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
473};
474
fbef91d8
RS
475/* Array of implicit set patterns indexed by basic block index. */
476static rtx *implicit_sets;
477
a13d4ebf
AM
478/* Head of the list of load/store memory refs. */
479static struct ls_expr * pre_ldst_mems = NULL;
480
7506f491
DE
481/* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
73991d6a 484static regset reg_set_bitmap;
7506f491
DE
485
486/* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491static sbitmap *reg_set_in_block;
492
a13d4ebf
AM
493/* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495static rtx * modify_mem_list;
73991d6a 496bitmap modify_mem_list_set;
a13d4ebf
AM
497
498/* This array parallels modify_mem_list, but is kept canonicalized. */
499static rtx * canon_modify_mem_list;
73991d6a 500bitmap canon_modify_mem_list_set;
7506f491
DE
501/* Various variables for statistics gathering. */
502
503/* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506static int bytes_used;
c4c81601 507
7506f491
DE
508/* GCSE substitutions made. */
509static int gcse_subst_count;
510/* Number of copy instructions created. */
511static int gcse_create_count;
512/* Number of constants propagated. */
513static int const_prop_count;
514/* Number of copys propagated. */
515static int copy_prop_count;
7506f491
DE
516\f
517/* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
520
7506f491
DE
521/* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
523
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
526
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
c4c81601 529 ae_kill[block_num][expr_num] */
7506f491
DE
530
531/* For reaching defs */
532static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
533
534/* for available exprs */
535static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 536
0511851c
MM
537/* Objects of this type are passed around by the null-pointer check
538 removal routines. */
c4c81601
RK
539struct null_pointer_info
540{
0511851c 541 /* The basic block being processed. */
e0082a72 542 basic_block current_block;
0511851c 543 /* The first register to be handled in this pass. */
770ae6cc 544 unsigned int min_reg;
0511851c 545 /* One greater than the last register to be handled in this pass. */
770ae6cc 546 unsigned int max_reg;
0511851c
MM
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
549};
7506f491 550\f
c4c81601
RK
551static void compute_can_copy PARAMS ((void));
552static char *gmalloc PARAMS ((unsigned int));
553static char *grealloc PARAMS ((char *, unsigned int));
554static char *gcse_alloc PARAMS ((unsigned long));
555static void alloc_gcse_mem PARAMS ((rtx));
556static void free_gcse_mem PARAMS ((void));
557static void alloc_reg_set_mem PARAMS ((int));
558static void free_reg_set_mem PARAMS ((void));
559static int get_bitmap_width PARAMS ((int, int, int));
560static void record_one_set PARAMS ((int, rtx));
561static void record_set_info PARAMS ((rtx, rtx, void *));
562static void compute_sets PARAMS ((rtx));
02280659
ZD
563static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
564static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
565static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
566static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
c4c81601 567static int want_to_gcse_p PARAMS ((rtx));
6b2d1c9e 568static bool gcse_constant_p PARAMS ((rtx));
c4c81601
RK
569static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
570static int oprs_anticipatable_p PARAMS ((rtx, rtx));
571static int oprs_available_p PARAMS ((rtx, rtx));
572static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
02280659
ZD
573 int, int, struct hash_table *));
574static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
c4c81601
RK
575static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
576static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
c0712acb 577static unsigned int hash_string_1 PARAMS ((const char *));
c4c81601
RK
578static unsigned int hash_set PARAMS ((int, int));
579static int expr_equiv_p PARAMS ((rtx, rtx));
580static void record_last_reg_set_info PARAMS ((rtx, int));
581static void record_last_mem_set_info PARAMS ((rtx));
582static void record_last_set_info PARAMS ((rtx, rtx, void *));
02280659
ZD
583static void compute_hash_table PARAMS ((struct hash_table *));
584static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
585static void free_hash_table PARAMS ((struct hash_table *));
586static void compute_hash_table_work PARAMS ((struct hash_table *));
587static void dump_hash_table PARAMS ((FILE *, const char *,
588 struct hash_table *));
589static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
ceda50e9 590static struct expr *lookup_set PARAMS ((unsigned int, struct hash_table *));
770ae6cc 591static struct expr *next_set PARAMS ((unsigned int, struct expr *));
c4c81601
RK
592static void reset_opr_set_tables PARAMS ((void));
593static int oprs_not_set_p PARAMS ((rtx, rtx));
594static void mark_call PARAMS ((rtx));
595static void mark_set PARAMS ((rtx, rtx));
596static void mark_clobber PARAMS ((rtx, rtx));
597static void mark_oprs_set PARAMS ((rtx));
598static void alloc_cprop_mem PARAMS ((int, int));
599static void free_cprop_mem PARAMS ((void));
600static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601static void compute_transpout PARAMS ((void));
602static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
02280659 603 struct hash_table *));
711d877c 604static void compute_cprop_data PARAMS ((void));
9e71c818 605static void find_used_regs PARAMS ((rtx *, void *));
c4c81601
RK
606static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607static struct expr *find_avail_set PARAMS ((int, rtx));
0e3f0221 608static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
a13d4ebf 609static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
e2d2ed72 610static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
a13d4ebf 611static void canon_list_insert PARAMS ((rtx, rtx, void *));
ae860ff7 612static int cprop_insn PARAMS ((rtx, int));
c4c81601 613static int cprop PARAMS ((int));
fbef91d8 614static void find_implicit_sets PARAMS ((void));
a0134312 615static int one_cprop_pass PARAMS ((int, int, int));
ae860ff7 616static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
0e3f0221 617static struct expr *find_bypass_set PARAMS ((int, int));
e129b3f9 618static bool reg_killed_on_edge PARAMS ((rtx, edge));
0e3f0221
RS
619static int bypass_block PARAMS ((basic_block, rtx, rtx));
620static int bypass_conditional_jumps PARAMS ((void));
c4c81601
RK
621static void alloc_pre_mem PARAMS ((int, int));
622static void free_pre_mem PARAMS ((void));
623static void compute_pre_data PARAMS ((void));
589005ff 624static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
e2d2ed72
AM
625 basic_block));
626static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
c4c81601
RK
627static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
628static void pre_insert_copies PARAMS ((void));
629static int pre_delete PARAMS ((void));
630static int pre_gcse PARAMS ((void));
631static int one_pre_gcse_pass PARAMS ((int));
632static void add_label_notes PARAMS ((rtx, rtx));
633static void alloc_code_hoist_mem PARAMS ((int, int));
634static void free_code_hoist_mem PARAMS ((void));
711d877c 635static void compute_code_hoist_vbeinout PARAMS ((void));
c4c81601 636static void compute_code_hoist_data PARAMS ((void));
589005ff 637static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
e2d2ed72 638 char *));
c4c81601
RK
639static void hoist_code PARAMS ((void));
640static int one_code_hoisting_pass PARAMS ((void));
641static void alloc_rd_mem PARAMS ((int, int));
642static void free_rd_mem PARAMS ((void));
e2d2ed72 643static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
c4c81601 644static void compute_kill_rd PARAMS ((void));
711d877c 645static void compute_rd PARAMS ((void));
c4c81601
RK
646static void alloc_avail_expr_mem PARAMS ((int, int));
647static void free_avail_expr_mem PARAMS ((void));
02280659 648static void compute_ae_gen PARAMS ((struct hash_table *));
e2d2ed72 649static int expr_killed_p PARAMS ((rtx, basic_block));
02280659 650static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
711d877c 651static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
e2d2ed72 652 basic_block, int));
c4c81601
RK
653static rtx computing_insn PARAMS ((struct expr *, rtx));
654static int def_reaches_here_p PARAMS ((rtx, rtx));
655static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
656static int handle_avail_expr PARAMS ((rtx, struct expr *));
657static int classic_gcse PARAMS ((void));
658static int one_classic_gcse_pass PARAMS ((int));
659static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
99a15921 660static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
8e184d9c 661 sbitmap *, sbitmap *,
711d877c
KG
662 struct null_pointer_info *));
663static rtx process_insert_insn PARAMS ((struct expr *));
664static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
c4c81601 665static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
e2d2ed72
AM
666 basic_block, int, char *));
667static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
668 basic_block, char *));
a13d4ebf
AM
669static struct ls_expr * ldst_entry PARAMS ((rtx));
670static void free_ldst_entry PARAMS ((struct ls_expr *));
671static void free_ldst_mems PARAMS ((void));
672static void print_ldst_list PARAMS ((FILE *));
673static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
674static int enumerate_ldsts PARAMS ((void));
675static inline struct ls_expr * first_ls_expr PARAMS ((void));
676static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
677static int simple_mem PARAMS ((rtx));
678static void invalidate_any_buried_refs PARAMS ((rtx));
589005ff 679static void compute_ld_motion_mems PARAMS ((void));
a13d4ebf
AM
680static void trim_ld_motion_mems PARAMS ((void));
681static void update_ld_motion_stores PARAMS ((struct expr *));
aaa4ca30 682static void reg_set_info PARAMS ((rtx, rtx, void *));
47a3dae1
ZD
683static bool store_ops_ok PARAMS ((rtx, int *));
684static rtx extract_mentioned_regs PARAMS ((rtx));
685static rtx extract_mentioned_regs_helper PARAMS ((rtx, rtx));
686static void find_moveable_store PARAMS ((rtx, int *, int *));
a13d4ebf 687static int compute_store_table PARAMS ((void));
47a3dae1
ZD
688static bool load_kills_store PARAMS ((rtx, rtx));
689static bool find_loads PARAMS ((rtx, rtx));
690static bool store_killed_in_insn PARAMS ((rtx, rtx, rtx));
691static bool store_killed_after PARAMS ((rtx, rtx, rtx, basic_block,
692 int *, rtx *));
693static bool store_killed_before PARAMS ((rtx, rtx, rtx, basic_block,
694 int *));
a13d4ebf 695static void build_store_vectors PARAMS ((void));
e2d2ed72 696static void insert_insn_start_bb PARAMS ((rtx, basic_block));
a13d4ebf 697static int insert_store PARAMS ((struct ls_expr *, edge));
e2d2ed72 698static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
589005ff 699static void delete_store PARAMS ((struct ls_expr *,
e2d2ed72 700 basic_block));
a13d4ebf
AM
701static void free_store_memory PARAMS ((void));
702static void store_motion PARAMS ((void));
0fe854a7 703static void free_insn_expr_list_list PARAMS ((rtx *));
73991d6a
JH
704static void clear_modify_mem_tables PARAMS ((void));
705static void free_modify_mem_tables PARAMS ((void));
10d1bb36 706static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
710ee3ed 707static void local_cprop_find_used_regs PARAMS ((rtx *, void *));
8ba46434
R
708static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
709static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
ae860ff7 710static void local_cprop_pass PARAMS ((int));
7506f491
DE
711\f
712/* Entry point for global common subexpression elimination.
713 F is the first instruction in the function. */
714
e78d9500 715int
7506f491
DE
716gcse_main (f, file)
717 rtx f;
718 FILE *file;
719{
720 int changed, pass;
721 /* Bytes used at start of pass. */
722 int initial_bytes_used;
723 /* Maximum number of bytes used by a pass. */
724 int max_pass_bytes;
725 /* Point to release obstack data from for each pass. */
726 char *gcse_obstack_bottom;
727
b5ce41ff
JL
728 /* We do not construct an accurate cfg in functions which call
729 setjmp, so just punt to be safe. */
7506f491 730 if (current_function_calls_setjmp)
e78d9500 731 return 0;
589005ff 732
b5ce41ff
JL
733 /* Assume that we do not need to run jump optimizations after gcse. */
734 run_jump_opt_after_gcse = 0;
735
7506f491
DE
736 /* For calling dump_foo fns from gdb. */
737 debug_stderr = stderr;
b5ce41ff 738 gcse_file = file;
7506f491 739
b5ce41ff
JL
740 /* Identify the basic block information for this function, including
741 successors and predecessors. */
7506f491 742 max_gcse_regno = max_reg_num ();
7506f491 743
a42cd965
AM
744 if (file)
745 dump_flow_info (file);
746
7506f491 747 /* Return if there's nothing to do. */
0b17ab2f 748 if (n_basic_blocks <= 1)
a18820c6 749 return 0;
7506f491 750
55f7891b
JL
751 /* Trying to perform global optimizations on flow graphs which have
752 a high connectivity will take a long time and is unlikely to be
753 particularly useful.
754
43e72072 755 In normal circumstances a cfg should have about twice as many edges
55f7891b
JL
756 as blocks. But we do not want to punish small functions which have
757 a couple switch statements. So we require a relatively large number
758 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 759 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
18424ae1
BL
760 {
761 if (warn_disabled_optimization)
8e42ace1 762 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
0b17ab2f 763 n_basic_blocks, n_edges / n_basic_blocks);
18424ae1
BL
764 return 0;
765 }
55f7891b 766
f1fa37ff
MM
767 /* If allocating memory for the cprop bitmap would take up too much
768 storage it's better just to disable the optimization. */
589005ff 769 if ((n_basic_blocks
f1fa37ff
MM
770 * SBITMAP_SET_SIZE (max_gcse_regno)
771 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
772 {
773 if (warn_disabled_optimization)
774 warning ("GCSE disabled: %d basic blocks and %d registers",
0b17ab2f 775 n_basic_blocks, max_gcse_regno);
f1fa37ff
MM
776
777 return 0;
778 }
779
7506f491 780 gcc_obstack_init (&gcse_obstack);
a42cd965 781 bytes_used = 0;
7506f491 782
a13d4ebf
AM
783 /* We need alias. */
784 init_alias_analysis ();
c4c81601
RK
785 /* Record where pseudo-registers are set. This data is kept accurate
786 during each pass. ??? We could also record hard-reg information here
787 [since it's unchanging], however it is currently done during hash table
788 computation.
b5ce41ff 789
c4c81601
RK
790 It may be tempting to compute MEM set information here too, but MEM sets
791 will be subject to code motion one day and thus we need to compute
b5ce41ff 792 information about memory sets when we build the hash tables. */
7506f491
DE
793
794 alloc_reg_set_mem (max_gcse_regno);
795 compute_sets (f);
796
797 pass = 0;
798 initial_bytes_used = bytes_used;
799 max_pass_bytes = 0;
800 gcse_obstack_bottom = gcse_alloc (1);
801 changed = 1;
740f35a0 802 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
803 {
804 changed = 0;
805 if (file)
806 fprintf (file, "GCSE pass %d\n\n", pass + 1);
807
808 /* Initialize bytes_used to the space for the pred/succ lists,
809 and the reg_set_table data. */
810 bytes_used = initial_bytes_used;
811
812 /* Each pass may create new registers, so recalculate each time. */
813 max_gcse_regno = max_reg_num ();
814
815 alloc_gcse_mem (f);
816
b5ce41ff
JL
817 /* Don't allow constant propagation to modify jumps
818 during this pass. */
a0134312 819 changed = one_cprop_pass (pass + 1, 0, 0);
7506f491
DE
820
821 if (optimize_size)
b5ce41ff 822 changed |= one_classic_gcse_pass (pass + 1);
7506f491 823 else
589005ff 824 {
a42cd965 825 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
826 /* We may have just created new basic blocks. Release and
827 recompute various things which are sized on the number of
828 basic blocks. */
829 if (changed)
830 {
73991d6a 831 free_modify_mem_tables ();
a13d4ebf 832 modify_mem_list
d55bc081 833 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
a13d4ebf 834 canon_modify_mem_list
d55bc081
ZD
835 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
836 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
837 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
a13d4ebf 838 }
a42cd965
AM
839 free_reg_set_mem ();
840 alloc_reg_set_mem (max_reg_num ());
841 compute_sets (f);
842 run_jump_opt_after_gcse = 1;
843 }
7506f491
DE
844
845 if (max_pass_bytes < bytes_used)
846 max_pass_bytes = bytes_used;
847
bb457bd9
JL
848 /* Free up memory, then reallocate for code hoisting. We can
849 not re-use the existing allocated memory because the tables
850 will not have info for the insns or registers created by
851 partial redundancy elimination. */
7506f491
DE
852 free_gcse_mem ();
853
bb457bd9
JL
854 /* It does not make sense to run code hoisting unless we optimizing
855 for code size -- it rarely makes programs faster, and can make
856 them bigger if we did partial redundancy elimination (when optimizing
857 for space, we use a classic gcse algorithm instead of partial
858 redundancy algorithms). */
859 if (optimize_size)
589005ff 860 {
bb457bd9
JL
861 max_gcse_regno = max_reg_num ();
862 alloc_gcse_mem (f);
863 changed |= one_code_hoisting_pass ();
864 free_gcse_mem ();
865
866 if (max_pass_bytes < bytes_used)
867 max_pass_bytes = bytes_used;
589005ff 868 }
bb457bd9 869
7506f491
DE
870 if (file)
871 {
872 fprintf (file, "\n");
873 fflush (file);
874 }
c4c81601 875
7506f491
DE
876 obstack_free (&gcse_obstack, gcse_obstack_bottom);
877 pass++;
878 }
879
b5ce41ff
JL
880 /* Do one last pass of copy propagation, including cprop into
881 conditional jumps. */
882
883 max_gcse_regno = max_reg_num ();
884 alloc_gcse_mem (f);
885 /* This time, go ahead and allow cprop to alter jumps. */
a0134312 886 one_cprop_pass (pass + 1, 1, 0);
b5ce41ff 887 free_gcse_mem ();
7506f491
DE
888
889 if (file)
890 {
891 fprintf (file, "GCSE of %s: %d basic blocks, ",
0b17ab2f 892 current_function_name, n_basic_blocks);
7506f491
DE
893 fprintf (file, "%d pass%s, %d bytes\n\n",
894 pass, pass > 1 ? "es" : "", max_pass_bytes);
895 }
896
6496a589 897 obstack_free (&gcse_obstack, NULL);
7506f491 898 free_reg_set_mem ();
a13d4ebf
AM
899 /* We are finished with alias. */
900 end_alias_analysis ();
901 allocate_reg_info (max_reg_num (), FALSE, FALSE);
902
47a3dae1 903 if (!optimize_size && flag_gcse_sm)
a13d4ebf 904 store_motion ();
47a3dae1 905
a13d4ebf 906 /* Record where pseudo-registers are set. */
e78d9500 907 return run_jump_opt_after_gcse;
7506f491
DE
908}
909\f
910/* Misc. utilities. */
911
773eae39
EB
912/* Nonzero for each mode that supports (set (reg) (reg)).
913 This is trivially true for integer and floating point values.
914 It may or may not be true for condition codes. */
915static char can_copy[(int) NUM_MACHINE_MODES];
916
7506f491
DE
917/* Compute which modes support reg/reg copy operations. */
918
919static void
920compute_can_copy ()
921{
922 int i;
50b2596f 923#ifndef AVOID_CCMODE_COPIES
8e42ace1 924 rtx reg, insn;
50b2596f 925#endif
773eae39 926 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
927
928 start_sequence ();
929 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
930 if (GET_MODE_CLASS (i) == MODE_CC)
931 {
7506f491 932#ifdef AVOID_CCMODE_COPIES
773eae39 933 can_copy[i] = 0;
7506f491 934#else
c4c81601
RK
935 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
936 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 937 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 938 can_copy[i] = 1;
7506f491 939#endif
c4c81601 940 }
141b5810 941 else
773eae39 942 can_copy[i] = 1;
c4c81601 943
7506f491 944 end_sequence ();
7506f491 945}
773eae39
EB
946
947/* Returns whether the mode supports reg/reg copy operations. */
948
949bool
950can_copy_p (mode)
951 enum machine_mode mode;
952{
953 static bool can_copy_init_p = false;
954
955 if (! can_copy_init_p)
956 {
957 compute_can_copy ();
958 can_copy_init_p = true;
959 }
960
961 return can_copy[mode] != 0;
962}
7506f491
DE
963\f
964/* Cover function to xmalloc to record bytes allocated. */
965
966static char *
967gmalloc (size)
968 unsigned int size;
969{
970 bytes_used += size;
971 return xmalloc (size);
972}
973
974/* Cover function to xrealloc.
975 We don't record the additional size since we don't know it.
976 It won't affect memory usage stats much anyway. */
977
978static char *
979grealloc (ptr, size)
980 char *ptr;
981 unsigned int size;
982{
983 return xrealloc (ptr, size);
984}
985
77bbd421 986/* Cover function to obstack_alloc. */
7506f491
DE
987
988static char *
989gcse_alloc (size)
990 unsigned long size;
991{
77bbd421 992 bytes_used += size;
7506f491
DE
993 return (char *) obstack_alloc (&gcse_obstack, size);
994}
995
996/* Allocate memory for the cuid mapping array,
997 and reg/memory set tracking tables.
998
999 This is called at the start of each pass. */
1000
1001static void
1002alloc_gcse_mem (f)
1003 rtx f;
1004{
8e42ace1 1005 int i, n;
7506f491
DE
1006 rtx insn;
1007
1008 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1009 CUIDs are like UIDs except they increase monotonically, have no gaps,
1010 and only apply to real insns. */
1011
1012 max_uid = get_max_uid ();
1013 n = (max_uid + 1) * sizeof (int);
1014 uid_cuid = (int *) gmalloc (n);
961192e1 1015 memset ((char *) uid_cuid, 0, n);
7506f491
DE
1016 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1017 {
2c3c49de 1018 if (INSN_P (insn))
b86db3eb 1019 uid_cuid[INSN_UID (insn)] = i++;
7506f491 1020 else
b86db3eb 1021 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
1022 }
1023
1024 /* Create a table mapping cuids to insns. */
1025
1026 max_cuid = i;
1027 n = (max_cuid + 1) * sizeof (rtx);
1028 cuid_insn = (rtx *) gmalloc (n);
961192e1 1029 memset ((char *) cuid_insn, 0, n);
7506f491 1030 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 1031 if (INSN_P (insn))
c4c81601 1032 CUID_INSN (i++) = insn;
7506f491
DE
1033
1034 /* Allocate vars to track sets of regs. */
73991d6a 1035 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
1036
1037 /* Allocate vars to track sets of regs, memory per block. */
d55bc081 1038 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7506f491 1039 max_gcse_regno);
a13d4ebf
AM
1040 /* Allocate array to keep a list of insns which modify memory in each
1041 basic block. */
d55bc081
ZD
1042 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1043 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1044 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1045 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
73991d6a
JH
1046 modify_mem_list_set = BITMAP_XMALLOC ();
1047 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1048}
1049
1050/* Free memory allocated by alloc_gcse_mem. */
1051
1052static void
1053free_gcse_mem ()
1054{
1055 free (uid_cuid);
1056 free (cuid_insn);
1057
73991d6a 1058 BITMAP_XFREE (reg_set_bitmap);
7506f491 1059
5a660bff 1060 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1061 free_modify_mem_tables ();
1062 BITMAP_XFREE (modify_mem_list_set);
1063 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1064}
1065
0511851c
MM
1066/* Many of the global optimization algorithms work by solving dataflow
1067 equations for various expressions. Initially, some local value is
c4c81601
RK
1068 computed for each expression in each block. Then, the values across the
1069 various blocks are combined (by following flow graph edges) to arrive at
1070 global values. Conceptually, each set of equations is independent. We
1071 may therefore solve all the equations in parallel, solve them one at a
1072 time, or pick any intermediate approach.
1073
1074 When you're going to need N two-dimensional bitmaps, each X (say, the
1075 number of blocks) by Y (say, the number of expressions), call this
1076 function. It's not important what X and Y represent; only that Y
1077 correspond to the things that can be done in parallel. This function will
1078 return an appropriate chunking factor C; you should solve C sets of
1079 equations in parallel. By going through this function, we can easily
1080 trade space against time; by solving fewer equations in parallel we use
1081 less space. */
0511851c
MM
1082
1083static int
1084get_bitmap_width (n, x, y)
1085 int n;
1086 int x;
1087 int y;
1088{
1089 /* It's not really worth figuring out *exactly* how much memory will
1090 be used by a particular choice. The important thing is to get
1091 something approximately right. */
1092 size_t max_bitmap_memory = 10 * 1024 * 1024;
1093
1094 /* The number of bytes we'd use for a single column of minimum
1095 width. */
1096 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1097
1098 /* Often, it's reasonable just to solve all the equations in
1099 parallel. */
1100 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1101 return y;
1102
1103 /* Otherwise, pick the largest width we can, without going over the
1104 limit. */
1105 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1106 / column_size);
1107}
b5ce41ff
JL
1108\f
1109/* Compute the local properties of each recorded expression.
c4c81601
RK
1110
1111 Local properties are those that are defined by the block, irrespective of
1112 other blocks.
b5ce41ff
JL
1113
1114 An expression is transparent in a block if its operands are not modified
1115 in the block.
1116
1117 An expression is computed (locally available) in a block if it is computed
1118 at least once and expression would contain the same value if the
1119 computation was moved to the end of the block.
1120
1121 An expression is locally anticipatable in a block if it is computed at
1122 least once and expression would contain the same value if the computation
1123 was moved to the beginning of the block.
1124
c4c81601
RK
1125 We call this routine for cprop, pre and code hoisting. They all compute
1126 basically the same information and thus can easily share this code.
7506f491 1127
c4c81601
RK
1128 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1129 properties. If NULL, then it is not necessary to compute or record that
1130 particular property.
b5ce41ff 1131
02280659
ZD
1132 TABLE controls which hash table to look at. If it is set hash table,
1133 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1134 ABSALTERED. */
589005ff 1135
b5ce41ff 1136static void
02280659 1137compute_local_properties (transp, comp, antloc, table)
b5ce41ff
JL
1138 sbitmap *transp;
1139 sbitmap *comp;
1140 sbitmap *antloc;
02280659 1141 struct hash_table *table;
b5ce41ff 1142{
02280659 1143 unsigned int i;
589005ff 1144
b5ce41ff
JL
1145 /* Initialize any bitmaps that were passed in. */
1146 if (transp)
695ab36a 1147 {
02280659 1148 if (table->set_p)
d55bc081 1149 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1150 else
d55bc081 1151 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1152 }
c4c81601 1153
b5ce41ff 1154 if (comp)
d55bc081 1155 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1156 if (antloc)
d55bc081 1157 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1158
02280659 1159 for (i = 0; i < table->size; i++)
7506f491 1160 {
b5ce41ff
JL
1161 struct expr *expr;
1162
02280659 1163 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1164 {
b5ce41ff 1165 int indx = expr->bitmap_index;
c4c81601 1166 struct occr *occr;
b5ce41ff
JL
1167
1168 /* The expression is transparent in this block if it is not killed.
1169 We start by assuming all are transparent [none are killed], and
1170 then reset the bits for those that are. */
b5ce41ff 1171 if (transp)
02280659 1172 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1173
1174 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1175 we want to set to nonzero in ANTLOC. */
b5ce41ff 1176 if (antloc)
c4c81601
RK
1177 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1178 {
1179 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1180
c4c81601
RK
1181 /* While we're scanning the table, this is a good place to
1182 initialize this. */
1183 occr->deleted_p = 0;
1184 }
b5ce41ff
JL
1185
1186 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1187 we want to set to nonzero in COMP. */
b5ce41ff 1188 if (comp)
c4c81601
RK
1189 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1190 {
1191 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1192
c4c81601
RK
1193 /* While we're scanning the table, this is a good place to
1194 initialize this. */
1195 occr->copied_p = 0;
1196 }
b5ce41ff
JL
1197
1198 /* While we're scanning the table, this is a good place to
1199 initialize this. */
1200 expr->reaching_reg = 0;
1201 }
7506f491 1202 }
7506f491
DE
1203}
1204\f
1205/* Register set information.
1206
1207 `reg_set_table' records where each register is set or otherwise
1208 modified. */
1209
1210static struct obstack reg_set_obstack;
1211
1212static void
1213alloc_reg_set_mem (n_regs)
1214 int n_regs;
1215{
c4c81601 1216 unsigned int n;
7506f491
DE
1217
1218 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1219 n = reg_set_table_size * sizeof (struct reg_set *);
1220 reg_set_table = (struct reg_set **) gmalloc (n);
961192e1 1221 memset ((char *) reg_set_table, 0, n);
7506f491
DE
1222
1223 gcc_obstack_init (&reg_set_obstack);
1224}
1225
1226static void
1227free_reg_set_mem ()
1228{
1229 free (reg_set_table);
6496a589 1230 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1231}
1232
1233/* Record REGNO in the reg_set table. */
1234
1235static void
1236record_one_set (regno, insn)
1237 int regno;
1238 rtx insn;
1239{
172890a2 1240 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1241 struct reg_set *new_reg_info;
7506f491
DE
1242
1243 /* If the table isn't big enough, enlarge it. */
1244 if (regno >= reg_set_table_size)
1245 {
1246 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601
RK
1247
1248 reg_set_table
1249 = (struct reg_set **) grealloc ((char *) reg_set_table,
1250 new_size * sizeof (struct reg_set *));
961192e1 1251 memset ((char *) (reg_set_table + reg_set_table_size), 0,
8e42ace1 1252 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1253 reg_set_table_size = new_size;
1254 }
1255
1256 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1257 sizeof (struct reg_set));
1258 bytes_used += sizeof (struct reg_set);
1259 new_reg_info->insn = insn;
274969ea
MM
1260 new_reg_info->next = reg_set_table[regno];
1261 reg_set_table[regno] = new_reg_info;
7506f491
DE
1262}
1263
c4c81601
RK
1264/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1265 an insn. The DATA is really the instruction in which the SET is
1266 occurring. */
7506f491
DE
1267
1268static void
84832317 1269record_set_info (dest, setter, data)
50b2596f 1270 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 1271 void *data;
7506f491 1272{
84832317
MM
1273 rtx record_set_insn = (rtx) data;
1274
c4c81601
RK
1275 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1276 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1277}
1278
1279/* Scan the function and record each set of each pseudo-register.
1280
c4c81601 1281 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1282 `reg_set_table' for further documentation. */
7506f491
DE
1283
1284static void
1285compute_sets (f)
1286 rtx f;
1287{
c4c81601 1288 rtx insn;
7506f491 1289
c4c81601 1290 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1291 if (INSN_P (insn))
c4c81601 1292 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1293}
1294\f
1295/* Hash table support. */
1296
80c29cc4
RZ
1297struct reg_avail_info
1298{
e0082a72 1299 basic_block last_bb;
80c29cc4
RZ
1300 int first_set;
1301 int last_set;
1302};
1303
1304static struct reg_avail_info *reg_avail_info;
e0082a72 1305static basic_block current_bb;
7506f491 1306
7506f491 1307
fb0c0a12
RK
1308/* See whether X, the source of a set, is something we want to consider for
1309 GCSE. */
7506f491 1310
e2500fed 1311static GTY(()) rtx test_insn;
7506f491
DE
1312static int
1313want_to_gcse_p (x)
1314 rtx x;
1315{
fb0c0a12
RK
1316 int num_clobbers = 0;
1317 int icode;
1318
c4c81601 1319 switch (GET_CODE (x))
7506f491
DE
1320 {
1321 case REG:
1322 case SUBREG:
1323 case CONST_INT:
1324 case CONST_DOUBLE:
69ef87e2 1325 case CONST_VECTOR:
7506f491 1326 case CALL:
34ee7f82 1327 case CONSTANT_P_RTX:
7506f491
DE
1328 return 0;
1329
1330 default:
1331 break;
1332 }
1333
fb0c0a12
RK
1334 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1335 if (general_operand (x, GET_MODE (x)))
1336 return 1;
1337 else if (GET_MODE (x) == VOIDmode)
1338 return 0;
1339
1340 /* Otherwise, check if we can make a valid insn from it. First initialize
1341 our test insn if we haven't already. */
1342 if (test_insn == 0)
1343 {
1344 test_insn
1345 = make_insn_raw (gen_rtx_SET (VOIDmode,
1346 gen_rtx_REG (word_mode,
1347 FIRST_PSEUDO_REGISTER * 2),
1348 const0_rtx));
1349 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1350 }
1351
1352 /* Now make an insn like the one we would make when GCSE'ing and see if
1353 valid. */
1354 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1355 SET_SRC (PATTERN (test_insn)) = x;
1356 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1357 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1358}
1359
cc2902df 1360/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1361 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1362 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1363
1364static int
1365oprs_unchanged_p (x, insn, avail_p)
1366 rtx x, insn;
1367 int avail_p;
1368{
c4c81601 1369 int i, j;
7506f491 1370 enum rtx_code code;
6f7d635c 1371 const char *fmt;
7506f491 1372
7506f491
DE
1373 if (x == 0)
1374 return 1;
1375
1376 code = GET_CODE (x);
1377 switch (code)
1378 {
1379 case REG:
80c29cc4
RZ
1380 {
1381 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1382
1383 if (info->last_bb != current_bb)
1384 return 1;
589005ff 1385 if (avail_p)
80c29cc4
RZ
1386 return info->last_set < INSN_CUID (insn);
1387 else
1388 return info->first_set >= INSN_CUID (insn);
1389 }
7506f491
DE
1390
1391 case MEM:
e0082a72 1392 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1393 x, avail_p))
1394 return 0;
7506f491 1395 else
c4c81601 1396 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1397
1398 case PRE_DEC:
1399 case PRE_INC:
1400 case POST_DEC:
1401 case POST_INC:
4b983fdc
RH
1402 case PRE_MODIFY:
1403 case POST_MODIFY:
7506f491
DE
1404 return 0;
1405
1406 case PC:
1407 case CC0: /*FIXME*/
1408 case CONST:
1409 case CONST_INT:
1410 case CONST_DOUBLE:
69ef87e2 1411 case CONST_VECTOR:
7506f491
DE
1412 case SYMBOL_REF:
1413 case LABEL_REF:
1414 case ADDR_VEC:
1415 case ADDR_DIFF_VEC:
1416 return 1;
1417
1418 default:
1419 break;
1420 }
1421
c4c81601 1422 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1423 {
1424 if (fmt[i] == 'e')
1425 {
c4c81601
RK
1426 /* If we are about to do the last recursive call needed at this
1427 level, change it into iteration. This function is called enough
1428 to be worth it. */
7506f491 1429 if (i == 0)
c4c81601
RK
1430 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1431
1432 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1433 return 0;
1434 }
1435 else if (fmt[i] == 'E')
c4c81601
RK
1436 for (j = 0; j < XVECLEN (x, i); j++)
1437 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1438 return 0;
7506f491
DE
1439 }
1440
1441 return 1;
1442}
1443
a13d4ebf
AM
1444/* Used for communication between mems_conflict_for_gcse_p and
1445 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1446 conflict between two memory references. */
1447static int gcse_mems_conflict_p;
1448
1449/* Used for communication between mems_conflict_for_gcse_p and
1450 load_killed_in_block_p. A memory reference for a load instruction,
1451 mems_conflict_for_gcse_p will see if a memory store conflicts with
1452 this memory load. */
1453static rtx gcse_mem_operand;
1454
1455/* DEST is the output of an instruction. If it is a memory reference, and
1456 possibly conflicts with the load found in gcse_mem_operand, then set
1457 gcse_mems_conflict_p to a nonzero value. */
1458
1459static void
1460mems_conflict_for_gcse_p (dest, setter, data)
1461 rtx dest, setter ATTRIBUTE_UNUSED;
1462 void *data ATTRIBUTE_UNUSED;
1463{
1464 while (GET_CODE (dest) == SUBREG
1465 || GET_CODE (dest) == ZERO_EXTRACT
1466 || GET_CODE (dest) == SIGN_EXTRACT
1467 || GET_CODE (dest) == STRICT_LOW_PART)
1468 dest = XEXP (dest, 0);
1469
1470 /* If DEST is not a MEM, then it will not conflict with the load. Note
1471 that function calls are assumed to clobber memory, but are handled
1472 elsewhere. */
1473 if (GET_CODE (dest) != MEM)
1474 return;
aaa4ca30 1475
a13d4ebf 1476 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1477 don't mark as killed this time. */
1478
47a3dae1 1479 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1480 {
1481 if (!find_rtx_in_ldst (dest))
1482 gcse_mems_conflict_p = 1;
1483 return;
1484 }
aaa4ca30 1485
a13d4ebf
AM
1486 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1487 rtx_addr_varies_p))
1488 gcse_mems_conflict_p = 1;
1489}
1490
1491/* Return nonzero if the expression in X (a memory reference) is killed
1492 in block BB before or after the insn with the CUID in UID_LIMIT.
1493 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1494 before UID_LIMIT.
1495
1496 To check the entire block, set UID_LIMIT to max_uid + 1 and
1497 AVAIL_P to 0. */
1498
1499static int
1500load_killed_in_block_p (bb, uid_limit, x, avail_p)
e2d2ed72 1501 basic_block bb;
a13d4ebf
AM
1502 int uid_limit;
1503 rtx x;
1504 int avail_p;
1505{
0b17ab2f 1506 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1507 while (list_entry)
1508 {
1509 rtx setter;
1510 /* Ignore entries in the list that do not apply. */
1511 if ((avail_p
1512 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1513 || (! avail_p
1514 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1515 {
1516 list_entry = XEXP (list_entry, 1);
1517 continue;
1518 }
1519
1520 setter = XEXP (list_entry, 0);
1521
1522 /* If SETTER is a call everything is clobbered. Note that calls
1523 to pure functions are never put on the list, so we need not
1524 worry about them. */
1525 if (GET_CODE (setter) == CALL_INSN)
1526 return 1;
1527
1528 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1529 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1530
1531 The note_stores interface is pretty limited, so we have to
1532 communicate via global variables. Yuk. */
1533 gcse_mem_operand = x;
1534 gcse_mems_conflict_p = 0;
1535 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1536 if (gcse_mems_conflict_p)
1537 return 1;
1538 list_entry = XEXP (list_entry, 1);
1539 }
1540 return 0;
1541}
1542
cc2902df 1543/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1544 the start of INSN's basic block up to but not including INSN. */
1545
1546static int
1547oprs_anticipatable_p (x, insn)
1548 rtx x, insn;
1549{
1550 return oprs_unchanged_p (x, insn, 0);
1551}
1552
cc2902df 1553/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1554 INSN to the end of INSN's basic block. */
1555
1556static int
1557oprs_available_p (x, insn)
1558 rtx x, insn;
1559{
1560 return oprs_unchanged_p (x, insn, 1);
1561}
1562
1563/* Hash expression X.
c4c81601
RK
1564
1565 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1566 indicating if a volatile operand is found or if the expression contains
1567 something we don't want to insert in the table.
7506f491
DE
1568
1569 ??? One might want to merge this with canon_hash. Later. */
1570
1571static unsigned int
1572hash_expr (x, mode, do_not_record_p, hash_table_size)
1573 rtx x;
1574 enum machine_mode mode;
1575 int *do_not_record_p;
1576 int hash_table_size;
1577{
1578 unsigned int hash;
1579
1580 *do_not_record_p = 0;
1581
1582 hash = hash_expr_1 (x, mode, do_not_record_p);
1583 return hash % hash_table_size;
1584}
172890a2 1585
6462bb43 1586/* Hash a string. Just add its bytes up. */
172890a2 1587
6462bb43
AO
1588static inline unsigned
1589hash_string_1 (ps)
1590 const char *ps;
1591{
1592 unsigned hash = 0;
8e42ace1 1593 const unsigned char *p = (const unsigned char *) ps;
589005ff 1594
6462bb43
AO
1595 if (p)
1596 while (*p)
1597 hash += *p++;
1598
1599 return hash;
1600}
7506f491
DE
1601
1602/* Subroutine of hash_expr to do the actual work. */
1603
1604static unsigned int
1605hash_expr_1 (x, mode, do_not_record_p)
1606 rtx x;
1607 enum machine_mode mode;
1608 int *do_not_record_p;
1609{
1610 int i, j;
1611 unsigned hash = 0;
1612 enum rtx_code code;
6f7d635c 1613 const char *fmt;
7506f491 1614
c4c81601 1615 /* Used to turn recursion into iteration. We can't rely on GCC's
fbe5a4a6 1616 tail-recursion elimination since we need to keep accumulating values
c4c81601 1617 in HASH. */
7506f491
DE
1618
1619 if (x == 0)
1620 return hash;
1621
c4c81601 1622 repeat:
7506f491
DE
1623 code = GET_CODE (x);
1624 switch (code)
1625 {
1626 case REG:
c4c81601
RK
1627 hash += ((unsigned int) REG << 7) + REGNO (x);
1628 return hash;
7506f491
DE
1629
1630 case CONST_INT:
c4c81601
RK
1631 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1632 + (unsigned int) INTVAL (x));
1633 return hash;
7506f491
DE
1634
1635 case CONST_DOUBLE:
1636 /* This is like the general case, except that it only counts
1637 the integers representing the constant. */
c4c81601 1638 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1639 if (GET_MODE (x) != VOIDmode)
1640 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1641 hash += (unsigned int) XWINT (x, i);
7506f491 1642 else
c4c81601
RK
1643 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1644 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1645 return hash;
1646
69ef87e2
AH
1647 case CONST_VECTOR:
1648 {
1649 int units;
1650 rtx elt;
1651
1652 units = CONST_VECTOR_NUNITS (x);
1653
1654 for (i = 0; i < units; ++i)
1655 {
1656 elt = CONST_VECTOR_ELT (x, i);
1657 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1658 }
1659
1660 return hash;
1661 }
1662
7506f491
DE
1663 /* Assume there is only one rtx object for any given label. */
1664 case LABEL_REF:
1665 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1666 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1667 hash += (((unsigned int) LABEL_REF << 7)
1668 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1669 return hash;
1670
1671 case SYMBOL_REF:
1672 {
1673 /* Don't hash on the symbol's address to avoid bootstrap differences.
1674 Different hash values may cause expressions to be recorded in
1675 different orders and thus different registers to be used in the
1676 final assembler. This also avoids differences in the dump files
1677 between various stages. */
1678 unsigned int h = 0;
3cce094d 1679 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1680
7506f491
DE
1681 while (*p)
1682 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1683
1684 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1685 return hash;
1686 }
1687
1688 case MEM:
1689 if (MEM_VOLATILE_P (x))
1690 {
1691 *do_not_record_p = 1;
1692 return 0;
1693 }
c4c81601
RK
1694
1695 hash += (unsigned int) MEM;
d51f3632
JH
1696 /* We used alias set for hashing, but this is not good, since the alias
1697 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1698 causing the profiles to fail to match. */
7506f491
DE
1699 x = XEXP (x, 0);
1700 goto repeat;
1701
1702 case PRE_DEC:
1703 case PRE_INC:
1704 case POST_DEC:
1705 case POST_INC:
1706 case PC:
1707 case CC0:
1708 case CALL:
1709 case UNSPEC_VOLATILE:
1710 *do_not_record_p = 1;
1711 return 0;
1712
1713 case ASM_OPERANDS:
1714 if (MEM_VOLATILE_P (x))
1715 {
1716 *do_not_record_p = 1;
1717 return 0;
1718 }
6462bb43
AO
1719 else
1720 {
1721 /* We don't want to take the filename and line into account. */
1722 hash += (unsigned) code + (unsigned) GET_MODE (x)
1723 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1724 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1725 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1726
1727 if (ASM_OPERANDS_INPUT_LENGTH (x))
1728 {
1729 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1730 {
1731 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1732 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1733 do_not_record_p)
1734 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1735 (x, i)));
1736 }
1737
1738 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1739 x = ASM_OPERANDS_INPUT (x, 0);
1740 mode = GET_MODE (x);
1741 goto repeat;
1742 }
1743 return hash;
1744 }
7506f491
DE
1745
1746 default:
1747 break;
1748 }
1749
7506f491 1750 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1751 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1752 {
1753 if (fmt[i] == 'e')
1754 {
7506f491
DE
1755 /* If we are about to do the last recursive call
1756 needed at this level, change it into iteration.
1757 This function is called enough to be worth it. */
1758 if (i == 0)
1759 {
c4c81601 1760 x = XEXP (x, i);
7506f491
DE
1761 goto repeat;
1762 }
c4c81601
RK
1763
1764 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1765 if (*do_not_record_p)
1766 return 0;
1767 }
c4c81601 1768
7506f491
DE
1769 else if (fmt[i] == 'E')
1770 for (j = 0; j < XVECLEN (x, i); j++)
1771 {
1772 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1773 if (*do_not_record_p)
1774 return 0;
1775 }
c4c81601 1776
7506f491 1777 else if (fmt[i] == 's')
6462bb43 1778 hash += hash_string_1 (XSTR (x, i));
7506f491 1779 else if (fmt[i] == 'i')
c4c81601 1780 hash += (unsigned int) XINT (x, i);
7506f491
DE
1781 else
1782 abort ();
1783 }
1784
1785 return hash;
1786}
1787
1788/* Hash a set of register REGNO.
1789
c4c81601
RK
1790 Sets are hashed on the register that is set. This simplifies the PRE copy
1791 propagation code.
7506f491
DE
1792
1793 ??? May need to make things more elaborate. Later, as necessary. */
1794
1795static unsigned int
1796hash_set (regno, hash_table_size)
1797 int regno;
1798 int hash_table_size;
1799{
1800 unsigned int hash;
1801
1802 hash = regno;
1803 return hash % hash_table_size;
1804}
1805
cc2902df 1806/* Return nonzero if exp1 is equivalent to exp2.
7506f491
DE
1807 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1808
1809static int
1810expr_equiv_p (x, y)
1811 rtx x, y;
1812{
b3694847
SS
1813 int i, j;
1814 enum rtx_code code;
1815 const char *fmt;
7506f491
DE
1816
1817 if (x == y)
1818 return 1;
c4c81601 1819
7506f491
DE
1820 if (x == 0 || y == 0)
1821 return x == y;
1822
1823 code = GET_CODE (x);
1824 if (code != GET_CODE (y))
1825 return 0;
1826
1827 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1828 if (GET_MODE (x) != GET_MODE (y))
1829 return 0;
1830
1831 switch (code)
1832 {
1833 case PC:
1834 case CC0:
1835 return x == y;
1836
1837 case CONST_INT:
1838 return INTVAL (x) == INTVAL (y);
1839
1840 case LABEL_REF:
1841 return XEXP (x, 0) == XEXP (y, 0);
1842
1843 case SYMBOL_REF:
1844 return XSTR (x, 0) == XSTR (y, 0);
1845
1846 case REG:
1847 return REGNO (x) == REGNO (y);
1848
297c3335
RH
1849 case MEM:
1850 /* Can't merge two expressions in different alias sets, since we can
1851 decide that the expression is transparent in a block when it isn't,
1852 due to it being set with the different alias set. */
1853 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1854 return 0;
1855 break;
1856
7506f491
DE
1857 /* For commutative operations, check both orders. */
1858 case PLUS:
1859 case MULT:
1860 case AND:
1861 case IOR:
1862 case XOR:
1863 case NE:
1864 case EQ:
1865 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1866 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1867 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1868 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1869
6462bb43
AO
1870 case ASM_OPERANDS:
1871 /* We don't use the generic code below because we want to
1872 disregard filename and line numbers. */
1873
1874 /* A volatile asm isn't equivalent to any other. */
1875 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1876 return 0;
1877
1878 if (GET_MODE (x) != GET_MODE (y)
1879 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1880 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1881 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1882 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1883 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1884 return 0;
1885
1886 if (ASM_OPERANDS_INPUT_LENGTH (x))
1887 {
1888 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1889 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1890 ASM_OPERANDS_INPUT (y, i))
1891 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1892 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1893 return 0;
1894 }
1895
1896 return 1;
1897
7506f491
DE
1898 default:
1899 break;
1900 }
1901
1902 /* Compare the elements. If any pair of corresponding elements
1903 fail to match, return 0 for the whole thing. */
1904
1905 fmt = GET_RTX_FORMAT (code);
1906 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1907 {
1908 switch (fmt[i])
1909 {
1910 case 'e':
1911 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1912 return 0;
1913 break;
1914
1915 case 'E':
1916 if (XVECLEN (x, i) != XVECLEN (y, i))
1917 return 0;
1918 for (j = 0; j < XVECLEN (x, i); j++)
1919 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1920 return 0;
1921 break;
1922
1923 case 's':
1924 if (strcmp (XSTR (x, i), XSTR (y, i)))
1925 return 0;
1926 break;
1927
1928 case 'i':
1929 if (XINT (x, i) != XINT (y, i))
1930 return 0;
1931 break;
1932
1933 case 'w':
1934 if (XWINT (x, i) != XWINT (y, i))
1935 return 0;
1936 break;
1937
1938 case '0':
1939 break;
aaa4ca30 1940
7506f491
DE
1941 default:
1942 abort ();
1943 }
8e42ace1 1944 }
7506f491
DE
1945
1946 return 1;
1947}
1948
02280659 1949/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1950 If it is already present, record it as the last occurrence in INSN's
1951 basic block.
1952
1953 MODE is the mode of the value X is being stored into.
1954 It is only used if X is a CONST_INT.
1955
cc2902df
KH
1956 ANTIC_P is nonzero if X is an anticipatable expression.
1957 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1958
1959static void
02280659 1960insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
7506f491
DE
1961 rtx x;
1962 enum machine_mode mode;
1963 rtx insn;
1964 int antic_p, avail_p;
02280659 1965 struct hash_table *table;
7506f491
DE
1966{
1967 int found, do_not_record_p;
1968 unsigned int hash;
1969 struct expr *cur_expr, *last_expr = NULL;
1970 struct occr *antic_occr, *avail_occr;
1971 struct occr *last_occr = NULL;
1972
02280659 1973 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1974
1975 /* Do not insert expression in table if it contains volatile operands,
1976 or if hash_expr determines the expression is something we don't want
1977 to or can't handle. */
1978 if (do_not_record_p)
1979 return;
1980
02280659 1981 cur_expr = table->table[hash];
7506f491
DE
1982 found = 0;
1983
c4c81601 1984 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1985 {
1986 /* If the expression isn't found, save a pointer to the end of
1987 the list. */
1988 last_expr = cur_expr;
1989 cur_expr = cur_expr->next_same_hash;
1990 }
1991
1992 if (! found)
1993 {
1994 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1995 bytes_used += sizeof (struct expr);
02280659 1996 if (table->table[hash] == NULL)
c4c81601 1997 /* This is the first pattern that hashed to this index. */
02280659 1998 table->table[hash] = cur_expr;
7506f491 1999 else
c4c81601
RK
2000 /* Add EXPR to end of this hash chain. */
2001 last_expr->next_same_hash = cur_expr;
2002
589005ff 2003 /* Set the fields of the expr element. */
7506f491 2004 cur_expr->expr = x;
02280659 2005 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2006 cur_expr->next_same_hash = NULL;
2007 cur_expr->antic_occr = NULL;
2008 cur_expr->avail_occr = NULL;
2009 }
2010
2011 /* Now record the occurrence(s). */
7506f491
DE
2012 if (antic_p)
2013 {
2014 antic_occr = cur_expr->antic_occr;
2015
2016 /* Search for another occurrence in the same basic block. */
2017 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2018 {
2019 /* If an occurrence isn't found, save a pointer to the end of
2020 the list. */
2021 last_occr = antic_occr;
2022 antic_occr = antic_occr->next;
2023 }
2024
2025 if (antic_occr)
c4c81601
RK
2026 /* Found another instance of the expression in the same basic block.
2027 Prefer the currently recorded one. We want the first one in the
2028 block and the block is scanned from start to end. */
2029 ; /* nothing to do */
7506f491
DE
2030 else
2031 {
2032 /* First occurrence of this expression in this basic block. */
2033 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2034 bytes_used += sizeof (struct occr);
2035 /* First occurrence of this expression in any block? */
2036 if (cur_expr->antic_occr == NULL)
2037 cur_expr->antic_occr = antic_occr;
2038 else
2039 last_occr->next = antic_occr;
c4c81601 2040
7506f491
DE
2041 antic_occr->insn = insn;
2042 antic_occr->next = NULL;
2043 }
2044 }
2045
2046 if (avail_p)
2047 {
2048 avail_occr = cur_expr->avail_occr;
2049
2050 /* Search for another occurrence in the same basic block. */
2051 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2052 {
2053 /* If an occurrence isn't found, save a pointer to the end of
2054 the list. */
2055 last_occr = avail_occr;
2056 avail_occr = avail_occr->next;
2057 }
2058
2059 if (avail_occr)
c4c81601
RK
2060 /* Found another instance of the expression in the same basic block.
2061 Prefer this occurrence to the currently recorded one. We want
2062 the last one in the block and the block is scanned from start
2063 to end. */
2064 avail_occr->insn = insn;
7506f491
DE
2065 else
2066 {
2067 /* First occurrence of this expression in this basic block. */
2068 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2069 bytes_used += sizeof (struct occr);
c4c81601 2070
7506f491
DE
2071 /* First occurrence of this expression in any block? */
2072 if (cur_expr->avail_occr == NULL)
2073 cur_expr->avail_occr = avail_occr;
2074 else
2075 last_occr->next = avail_occr;
c4c81601 2076
7506f491
DE
2077 avail_occr->insn = insn;
2078 avail_occr->next = NULL;
2079 }
2080 }
2081}
2082
2083/* Insert pattern X in INSN in the hash table.
2084 X is a SET of a reg to either another reg or a constant.
2085 If it is already present, record it as the last occurrence in INSN's
2086 basic block. */
2087
2088static void
02280659 2089insert_set_in_table (x, insn, table)
7506f491
DE
2090 rtx x;
2091 rtx insn;
02280659 2092 struct hash_table *table;
7506f491
DE
2093{
2094 int found;
2095 unsigned int hash;
2096 struct expr *cur_expr, *last_expr = NULL;
2097 struct occr *cur_occr, *last_occr = NULL;
2098
2099 if (GET_CODE (x) != SET
2100 || GET_CODE (SET_DEST (x)) != REG)
2101 abort ();
2102
02280659 2103 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 2104
02280659 2105 cur_expr = table->table[hash];
7506f491
DE
2106 found = 0;
2107
c4c81601 2108 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2109 {
2110 /* If the expression isn't found, save a pointer to the end of
2111 the list. */
2112 last_expr = cur_expr;
2113 cur_expr = cur_expr->next_same_hash;
2114 }
2115
2116 if (! found)
2117 {
2118 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2119 bytes_used += sizeof (struct expr);
02280659 2120 if (table->table[hash] == NULL)
c4c81601 2121 /* This is the first pattern that hashed to this index. */
02280659 2122 table->table[hash] = cur_expr;
7506f491 2123 else
c4c81601
RK
2124 /* Add EXPR to end of this hash chain. */
2125 last_expr->next_same_hash = cur_expr;
2126
7506f491
DE
2127 /* Set the fields of the expr element.
2128 We must copy X because it can be modified when copy propagation is
2129 performed on its operands. */
7506f491 2130 cur_expr->expr = copy_rtx (x);
02280659 2131 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2132 cur_expr->next_same_hash = NULL;
2133 cur_expr->antic_occr = NULL;
2134 cur_expr->avail_occr = NULL;
2135 }
2136
2137 /* Now record the occurrence. */
7506f491
DE
2138 cur_occr = cur_expr->avail_occr;
2139
2140 /* Search for another occurrence in the same basic block. */
2141 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2142 {
2143 /* If an occurrence isn't found, save a pointer to the end of
2144 the list. */
2145 last_occr = cur_occr;
2146 cur_occr = cur_occr->next;
2147 }
2148
2149 if (cur_occr)
c4c81601
RK
2150 /* Found another instance of the expression in the same basic block.
2151 Prefer this occurrence to the currently recorded one. We want the
2152 last one in the block and the block is scanned from start to end. */
2153 cur_occr->insn = insn;
7506f491
DE
2154 else
2155 {
2156 /* First occurrence of this expression in this basic block. */
2157 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2158 bytes_used += sizeof (struct occr);
c4c81601 2159
7506f491
DE
2160 /* First occurrence of this expression in any block? */
2161 if (cur_expr->avail_occr == NULL)
2162 cur_expr->avail_occr = cur_occr;
2163 else
2164 last_occr->next = cur_occr;
c4c81601 2165
7506f491
DE
2166 cur_occr->insn = insn;
2167 cur_occr->next = NULL;
2168 }
2169}
2170
6b2d1c9e
RS
2171/* Determine whether the rtx X should be treated as a constant for
2172 the purposes of GCSE's constant propagation. */
2173
2174static bool
2175gcse_constant_p (x)
2176 rtx x;
2177{
2178 /* Consider a COMPARE of two integers constant. */
2179 if (GET_CODE (x) == COMPARE
2180 && GET_CODE (XEXP (x, 0)) == CONST_INT
2181 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2182 return true;
2183
2184 if (GET_CODE (x) == CONSTANT_P_RTX)
2185 return false;
2186
2187 return CONSTANT_P (x);
2188}
2189
02280659
ZD
2190/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2191 expression one). */
7506f491
DE
2192
2193static void
02280659 2194hash_scan_set (pat, insn, table)
7506f491 2195 rtx pat, insn;
02280659 2196 struct hash_table *table;
7506f491
DE
2197{
2198 rtx src = SET_SRC (pat);
2199 rtx dest = SET_DEST (pat);
172890a2 2200 rtx note;
7506f491
DE
2201
2202 if (GET_CODE (src) == CALL)
02280659 2203 hash_scan_call (src, insn, table);
7506f491 2204
172890a2 2205 else if (GET_CODE (dest) == REG)
7506f491 2206 {
172890a2 2207 unsigned int regno = REGNO (dest);
7506f491
DE
2208 rtx tmp;
2209
172890a2
RK
2210 /* If this is a single set and we are doing constant propagation,
2211 see if a REG_NOTE shows this equivalent to a constant. */
02280659 2212 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
6b2d1c9e 2213 && gcse_constant_p (XEXP (note, 0)))
172890a2
RK
2214 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2215
7506f491 2216 /* Only record sets of pseudo-regs in the hash table. */
02280659 2217 if (! table->set_p
7506f491
DE
2218 && regno >= FIRST_PSEUDO_REGISTER
2219 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 2220 && can_copy_p (GET_MODE (dest))
068473ec
JH
2221 /* GCSE commonly inserts instruction after the insn. We can't
2222 do that easily for EH_REGION notes so disable GCSE on these
2223 for now. */
2224 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2225 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2226 && want_to_gcse_p (src)
2227 /* Don't CSE a nop. */
43e72072
JJ
2228 && ! set_noop_p (pat)
2229 /* Don't GCSE if it has attached REG_EQUIV note.
2230 At this point this only function parameters should have
2231 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2232 explicitly, it means address of parameter has been taken,
43e72072
JJ
2233 so we should not extend the lifetime of the pseudo. */
2234 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2235 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2236 {
2237 /* An expression is not anticipatable if its operands are
52d76e11
RK
2238 modified before this insn or if this is not the only SET in
2239 this insn. */
2240 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2241 /* An expression is not available if its operands are
eb296bd9
GK
2242 subsequently modified, including this insn. It's also not
2243 available if this is a branch, because we can't insert
2244 a set after the branch. */
2245 int avail_p = (oprs_available_p (src, insn)
2246 && ! JUMP_P (insn));
c4c81601 2247
02280659 2248 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 2249 }
c4c81601 2250
7506f491 2251 /* Record sets for constant/copy propagation. */
02280659 2252 else if (table->set_p
7506f491
DE
2253 && regno >= FIRST_PSEUDO_REGISTER
2254 && ((GET_CODE (src) == REG
2255 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 2256 && can_copy_p (GET_MODE (dest))
172890a2 2257 && REGNO (src) != regno)
6b2d1c9e 2258 || gcse_constant_p (src))
7506f491
DE
2259 /* A copy is not available if its src or dest is subsequently
2260 modified. Here we want to search from INSN+1 on, but
2261 oprs_available_p searches from INSN on. */
2262 && (insn == BLOCK_END (BLOCK_NUM (insn))
2263 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2264 && oprs_available_p (pat, tmp))))
02280659 2265 insert_set_in_table (pat, insn, table);
7506f491 2266 }
7506f491
DE
2267}
2268
2269static void
02280659 2270hash_scan_clobber (x, insn, table)
50b2596f 2271 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
02280659 2272 struct hash_table *table ATTRIBUTE_UNUSED;
7506f491
DE
2273{
2274 /* Currently nothing to do. */
2275}
2276
2277static void
02280659 2278hash_scan_call (x, insn, table)
50b2596f 2279 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
02280659 2280 struct hash_table *table ATTRIBUTE_UNUSED;
7506f491
DE
2281{
2282 /* Currently nothing to do. */
2283}
2284
2285/* Process INSN and add hash table entries as appropriate.
2286
2287 Only available expressions that set a single pseudo-reg are recorded.
2288
2289 Single sets in a PARALLEL could be handled, but it's an extra complication
2290 that isn't dealt with right now. The trick is handling the CLOBBERs that
2291 are also in the PARALLEL. Later.
2292
cc2902df 2293 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
2294 otherwise it is for the expression hash table.
2295 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2296 not record any expressions. */
7506f491
DE
2297
2298static void
02280659 2299hash_scan_insn (insn, table, in_libcall_block)
7506f491 2300 rtx insn;
02280659 2301 struct hash_table *table;
48e87cef 2302 int in_libcall_block;
7506f491
DE
2303{
2304 rtx pat = PATTERN (insn);
c4c81601 2305 int i;
7506f491 2306
172890a2
RK
2307 if (in_libcall_block)
2308 return;
2309
7506f491
DE
2310 /* Pick out the sets of INSN and for other forms of instructions record
2311 what's been modified. */
2312
172890a2 2313 if (GET_CODE (pat) == SET)
02280659 2314 hash_scan_set (pat, insn, table);
7506f491 2315 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2316 for (i = 0; i < XVECLEN (pat, 0); i++)
2317 {
2318 rtx x = XVECEXP (pat, 0, i);
7506f491 2319
c4c81601 2320 if (GET_CODE (x) == SET)
02280659 2321 hash_scan_set (x, insn, table);
c4c81601 2322 else if (GET_CODE (x) == CLOBBER)
02280659 2323 hash_scan_clobber (x, insn, table);
c4c81601 2324 else if (GET_CODE (x) == CALL)
02280659 2325 hash_scan_call (x, insn, table);
c4c81601 2326 }
7506f491 2327
7506f491 2328 else if (GET_CODE (pat) == CLOBBER)
02280659 2329 hash_scan_clobber (pat, insn, table);
7506f491 2330 else if (GET_CODE (pat) == CALL)
02280659 2331 hash_scan_call (pat, insn, table);
7506f491
DE
2332}
2333
2334static void
02280659 2335dump_hash_table (file, name, table)
7506f491 2336 FILE *file;
dff01034 2337 const char *name;
02280659 2338 struct hash_table *table;
7506f491
DE
2339{
2340 int i;
2341 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2342 struct expr **flat_table;
2343 unsigned int *hash_val;
c4c81601 2344 struct expr *expr;
4da896b2 2345
589005ff 2346 flat_table
02280659
ZD
2347 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2348 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 2349
02280659
ZD
2350 for (i = 0; i < (int) table->size; i++)
2351 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
2352 {
2353 flat_table[expr->bitmap_index] = expr;
2354 hash_val[expr->bitmap_index] = i;
2355 }
7506f491
DE
2356
2357 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 2358 name, table->size, table->n_elems);
7506f491 2359
02280659 2360 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
2361 if (flat_table[i] != 0)
2362 {
a0ac9e5a 2363 expr = flat_table[i];
21318741
RK
2364 fprintf (file, "Index %d (hash value %d)\n ",
2365 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2366 print_rtl (file, expr->expr);
21318741
RK
2367 fprintf (file, "\n");
2368 }
7506f491
DE
2369
2370 fprintf (file, "\n");
4da896b2 2371
4da896b2
MM
2372 free (flat_table);
2373 free (hash_val);
7506f491
DE
2374}
2375
2376/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2377
80c29cc4 2378 first_set records the first place in the block where the register
7506f491 2379 is set and is used to compute "anticipatability".
c4c81601 2380
80c29cc4 2381 last_set records the last place in the block where the register
7506f491 2382 is set and is used to compute "availability".
c4c81601 2383
80c29cc4
RZ
2384 last_bb records the block for which first_set and last_set are
2385 valid, as a quick test to invalidate them.
2386
7506f491
DE
2387 reg_set_in_block records whether the register is set in the block
2388 and is used to compute "transparency". */
2389
2390static void
2391record_last_reg_set_info (insn, regno)
2392 rtx insn;
2393 int regno;
2394{
80c29cc4
RZ
2395 struct reg_avail_info *info = &reg_avail_info[regno];
2396 int cuid = INSN_CUID (insn);
c4c81601 2397
80c29cc4
RZ
2398 info->last_set = cuid;
2399 if (info->last_bb != current_bb)
2400 {
2401 info->last_bb = current_bb;
2402 info->first_set = cuid;
e0082a72 2403 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2404 }
7506f491
DE
2405}
2406
a13d4ebf
AM
2407
2408/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2409 Note we store a pair of elements in the list, so they have to be
2410 taken off pairwise. */
2411
589005ff 2412static void
a13d4ebf
AM
2413canon_list_insert (dest, unused1, v_insn)
2414 rtx dest ATTRIBUTE_UNUSED;
2415 rtx unused1 ATTRIBUTE_UNUSED;
2416 void * v_insn;
2417{
2418 rtx dest_addr, insn;
0fe854a7 2419 int bb;
a13d4ebf
AM
2420
2421 while (GET_CODE (dest) == SUBREG
2422 || GET_CODE (dest) == ZERO_EXTRACT
2423 || GET_CODE (dest) == SIGN_EXTRACT
2424 || GET_CODE (dest) == STRICT_LOW_PART)
2425 dest = XEXP (dest, 0);
2426
2427 /* If DEST is not a MEM, then it will not conflict with a load. Note
2428 that function calls are assumed to clobber memory, but are handled
2429 elsewhere. */
2430
2431 if (GET_CODE (dest) != MEM)
2432 return;
2433
2434 dest_addr = get_addr (XEXP (dest, 0));
2435 dest_addr = canon_rtx (dest_addr);
589005ff 2436 insn = (rtx) v_insn;
0fe854a7 2437 bb = BLOCK_NUM (insn);
a13d4ebf 2438
589005ff 2439 canon_modify_mem_list[bb] =
0fe854a7 2440 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2441 canon_modify_mem_list[bb] =
0fe854a7
RH
2442 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2443 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2444}
2445
a13d4ebf
AM
2446/* Record memory modification information for INSN. We do not actually care
2447 about the memory location(s) that are set, or even how they are set (consider
2448 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2449
2450static void
2451record_last_mem_set_info (insn)
2452 rtx insn;
2453{
0fe854a7
RH
2454 int bb = BLOCK_NUM (insn);
2455
ccef9ef5 2456 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2457 everything. */
0fe854a7
RH
2458 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2459 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2460
2461 if (GET_CODE (insn) == CALL_INSN)
2462 {
2463 /* Note that traversals of this loop (other than for free-ing)
2464 will break after encountering a CALL_INSN. So, there's no
dc297297 2465 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2466 canon_modify_mem_list[bb] =
2467 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2468 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2469 }
2470 else
0fe854a7 2471 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2472}
2473
7506f491 2474/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2475 SET or CLOBBER in an insn. DATA is really the instruction in which
2476 the SET is taking place. */
7506f491
DE
2477
2478static void
84832317 2479record_last_set_info (dest, setter, data)
50b2596f 2480 rtx dest, setter ATTRIBUTE_UNUSED;
84832317 2481 void *data;
7506f491 2482{
84832317
MM
2483 rtx last_set_insn = (rtx) data;
2484
7506f491
DE
2485 if (GET_CODE (dest) == SUBREG)
2486 dest = SUBREG_REG (dest);
2487
2488 if (GET_CODE (dest) == REG)
2489 record_last_reg_set_info (last_set_insn, REGNO (dest));
2490 else if (GET_CODE (dest) == MEM
2491 /* Ignore pushes, they clobber nothing. */
2492 && ! push_operand (dest, GET_MODE (dest)))
2493 record_last_mem_set_info (last_set_insn);
2494}
2495
2496/* Top level function to create an expression or assignment hash table.
2497
2498 Expression entries are placed in the hash table if
2499 - they are of the form (set (pseudo-reg) src),
2500 - src is something we want to perform GCSE on,
2501 - none of the operands are subsequently modified in the block
2502
2503 Assignment entries are placed in the hash table if
2504 - they are of the form (set (pseudo-reg) src),
2505 - src is something we want to perform const/copy propagation on,
2506 - none of the operands or target are subsequently modified in the block
c4c81601 2507
7506f491
DE
2508 Currently src must be a pseudo-reg or a const_int.
2509
02280659 2510 TABLE is the table computed. */
7506f491
DE
2511
2512static void
02280659
ZD
2513compute_hash_table_work (table)
2514 struct hash_table *table;
7506f491 2515{
80c29cc4 2516 unsigned int i;
7506f491
DE
2517
2518 /* While we compute the hash table we also compute a bit array of which
2519 registers are set in which blocks.
7506f491
DE
2520 ??? This isn't needed during const/copy propagation, but it's cheap to
2521 compute. Later. */
d55bc081 2522 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2523
a13d4ebf 2524 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2525 clear_modify_mem_tables ();
7506f491 2526 /* Some working arrays used to track first and last set in each block. */
80c29cc4
RZ
2527 reg_avail_info = (struct reg_avail_info*)
2528 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2529
2530 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2531 reg_avail_info[i].last_bb = NULL;
7506f491 2532
e0082a72 2533 FOR_EACH_BB (current_bb)
7506f491
DE
2534 {
2535 rtx insn;
770ae6cc 2536 unsigned int regno;
ed79bb3d 2537 int in_libcall_block;
7506f491
DE
2538
2539 /* First pass over the instructions records information used to
2540 determine when registers and memory are first and last set.
ccef9ef5 2541 ??? hard-reg reg_set_in_block computation
7506f491
DE
2542 could be moved to compute_sets since they currently don't change. */
2543
e0082a72
ZD
2544 for (insn = current_bb->head;
2545 insn && insn != NEXT_INSN (current_bb->end);
7506f491
DE
2546 insn = NEXT_INSN (insn))
2547 {
2c3c49de 2548 if (! INSN_P (insn))
7506f491
DE
2549 continue;
2550
2551 if (GET_CODE (insn) == CALL_INSN)
2552 {
19652adf 2553 bool clobbers_all = false;
589005ff 2554#ifdef NON_SAVING_SETJMP
19652adf
ZW
2555 if (NON_SAVING_SETJMP
2556 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2557 clobbers_all = true;
2558#endif
2559
7506f491 2560 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2561 if (clobbers_all
2562 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2563 record_last_reg_set_info (insn, regno);
c4c81601 2564
24a28584 2565 mark_call (insn);
7506f491
DE
2566 }
2567
84832317 2568 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2569 }
2570
fbef91d8
RS
2571 /* Insert implicit sets in the hash table. */
2572 if (table->set_p
2573 && implicit_sets[current_bb->index] != NULL_RTX)
2574 hash_scan_set (implicit_sets[current_bb->index],
2575 current_bb->head, table);
2576
7506f491
DE
2577 /* The next pass builds the hash table. */
2578
e0082a72
ZD
2579 for (insn = current_bb->head, in_libcall_block = 0;
2580 insn && insn != NEXT_INSN (current_bb->end);
7506f491 2581 insn = NEXT_INSN (insn))
2c3c49de 2582 if (INSN_P (insn))
c4c81601
RK
2583 {
2584 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2585 in_libcall_block = 1;
02280659 2586 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2587 in_libcall_block = 0;
02280659
ZD
2588 hash_scan_insn (insn, table, in_libcall_block);
2589 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2590 in_libcall_block = 0;
8e42ace1 2591 }
7506f491
DE
2592 }
2593
80c29cc4
RZ
2594 free (reg_avail_info);
2595 reg_avail_info = NULL;
7506f491
DE
2596}
2597
02280659 2598/* Allocate space for the set/expr hash TABLE.
7506f491 2599 N_INSNS is the number of instructions in the function.
02280659
ZD
2600 It is used to determine the number of buckets to use.
2601 SET_P determines whether set or expression table will
2602 be created. */
7506f491
DE
2603
2604static void
02280659 2605alloc_hash_table (n_insns, table, set_p)
7506f491 2606 int n_insns;
02280659
ZD
2607 struct hash_table *table;
2608 int set_p;
7506f491
DE
2609{
2610 int n;
2611
02280659
ZD
2612 table->size = n_insns / 4;
2613 if (table->size < 11)
2614 table->size = 11;
c4c81601 2615
7506f491
DE
2616 /* Attempt to maintain efficient use of hash table.
2617 Making it an odd number is simplest for now.
2618 ??? Later take some measurements. */
02280659
ZD
2619 table->size |= 1;
2620 n = table->size * sizeof (struct expr *);
2621 table->table = (struct expr **) gmalloc (n);
2622 table->set_p = set_p;
7506f491
DE
2623}
2624
02280659 2625/* Free things allocated by alloc_hash_table. */
7506f491
DE
2626
2627static void
02280659
ZD
2628free_hash_table (table)
2629 struct hash_table *table;
7506f491 2630{
02280659 2631 free (table->table);
7506f491
DE
2632}
2633
02280659
ZD
2634/* Compute the hash TABLE for doing copy/const propagation or
2635 expression hash table. */
7506f491
DE
2636
2637static void
02280659
ZD
2638compute_hash_table (table)
2639 struct hash_table *table;
7506f491
DE
2640{
2641 /* Initialize count of number of entries in hash table. */
02280659
ZD
2642 table->n_elems = 0;
2643 memset ((char *) table->table, 0,
2644 table->size * sizeof (struct expr *));
7506f491 2645
02280659 2646 compute_hash_table_work (table);
7506f491
DE
2647}
2648\f
2649/* Expression tracking support. */
2650
02280659 2651/* Lookup pattern PAT in the expression TABLE.
7506f491
DE
2652 The result is a pointer to the table entry, or NULL if not found. */
2653
2654static struct expr *
02280659 2655lookup_expr (pat, table)
7506f491 2656 rtx pat;
02280659 2657 struct hash_table *table;
7506f491
DE
2658{
2659 int do_not_record_p;
2660 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
02280659 2661 table->size);
7506f491
DE
2662 struct expr *expr;
2663
2664 if (do_not_record_p)
2665 return NULL;
2666
02280659 2667 expr = table->table[hash];
7506f491
DE
2668
2669 while (expr && ! expr_equiv_p (expr->expr, pat))
2670 expr = expr->next_same_hash;
2671
2672 return expr;
2673}
2674
ceda50e9
RH
2675/* Lookup REGNO in the set TABLE. The result is a pointer to the
2676 table entry, or NULL if not found. */
7506f491
DE
2677
2678static struct expr *
ceda50e9 2679lookup_set (regno, table)
770ae6cc 2680 unsigned int regno;
02280659 2681 struct hash_table *table;
7506f491 2682{
02280659 2683 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2684 struct expr *expr;
2685
02280659 2686 expr = table->table[hash];
7506f491 2687
ceda50e9
RH
2688 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2689 expr = expr->next_same_hash;
7506f491
DE
2690
2691 return expr;
2692}
2693
2694/* Return the next entry for REGNO in list EXPR. */
2695
2696static struct expr *
2697next_set (regno, expr)
770ae6cc 2698 unsigned int regno;
7506f491
DE
2699 struct expr *expr;
2700{
2701 do
2702 expr = expr->next_same_hash;
2703 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2704
7506f491
DE
2705 return expr;
2706}
2707
0fe854a7
RH
2708/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2709 types may be mixed. */
2710
2711static void
2712free_insn_expr_list_list (listp)
2713 rtx *listp;
2714{
2715 rtx list, next;
2716
2717 for (list = *listp; list ; list = next)
2718 {
2719 next = XEXP (list, 1);
2720 if (GET_CODE (list) == EXPR_LIST)
2721 free_EXPR_LIST_node (list);
2722 else
2723 free_INSN_LIST_node (list);
2724 }
2725
2726 *listp = NULL;
2727}
2728
73991d6a
JH
2729/* Clear canon_modify_mem_list and modify_mem_list tables. */
2730static void
2731clear_modify_mem_tables ()
2732{
2733 int i;
2734
2735 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2736 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2737 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2738
2739 EXECUTE_IF_SET_IN_BITMAP
2740 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2741 free_insn_expr_list_list (canon_modify_mem_list + i));
2742 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2743}
2744
2745/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2746
2747static void
2748free_modify_mem_tables ()
2749{
2750 clear_modify_mem_tables ();
2751 free (modify_mem_list);
2752 free (canon_modify_mem_list);
2753 modify_mem_list = 0;
2754 canon_modify_mem_list = 0;
2755}
2756
7506f491
DE
2757/* Reset tables used to keep track of what's still available [since the
2758 start of the block]. */
2759
2760static void
2761reset_opr_set_tables ()
2762{
2763 /* Maintain a bitmap of which regs have been set since beginning of
2764 the block. */
73991d6a 2765 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2766
7506f491
DE
2767 /* Also keep a record of the last instruction to modify memory.
2768 For now this is very trivial, we only record whether any memory
2769 location has been modified. */
73991d6a 2770 clear_modify_mem_tables ();
7506f491
DE
2771}
2772
cc2902df 2773/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2774 INSN's basic block. */
2775
2776static int
2777oprs_not_set_p (x, insn)
2778 rtx x, insn;
2779{
c4c81601 2780 int i, j;
7506f491 2781 enum rtx_code code;
6f7d635c 2782 const char *fmt;
7506f491 2783
7506f491
DE
2784 if (x == 0)
2785 return 1;
2786
2787 code = GET_CODE (x);
2788 switch (code)
2789 {
2790 case PC:
2791 case CC0:
2792 case CONST:
2793 case CONST_INT:
2794 case CONST_DOUBLE:
69ef87e2 2795 case CONST_VECTOR:
7506f491
DE
2796 case SYMBOL_REF:
2797 case LABEL_REF:
2798 case ADDR_VEC:
2799 case ADDR_DIFF_VEC:
2800 return 1;
2801
2802 case MEM:
589005ff 2803 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2804 INSN_CUID (insn), x, 0))
a13d4ebf 2805 return 0;
c4c81601
RK
2806 else
2807 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2808
2809 case REG:
73991d6a 2810 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2811
2812 default:
2813 break;
2814 }
2815
c4c81601 2816 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2817 {
2818 if (fmt[i] == 'e')
2819 {
7506f491
DE
2820 /* If we are about to do the last recursive call
2821 needed at this level, change it into iteration.
2822 This function is called enough to be worth it. */
2823 if (i == 0)
c4c81601
RK
2824 return oprs_not_set_p (XEXP (x, i), insn);
2825
2826 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2827 return 0;
2828 }
2829 else if (fmt[i] == 'E')
c4c81601
RK
2830 for (j = 0; j < XVECLEN (x, i); j++)
2831 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2832 return 0;
7506f491
DE
2833 }
2834
2835 return 1;
2836}
2837
2838/* Mark things set by a CALL. */
2839
2840static void
b5ce41ff
JL
2841mark_call (insn)
2842 rtx insn;
7506f491 2843{
24a28584 2844 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2845 record_last_mem_set_info (insn);
7506f491
DE
2846}
2847
2848/* Mark things set by a SET. */
2849
2850static void
2851mark_set (pat, insn)
2852 rtx pat, insn;
2853{
2854 rtx dest = SET_DEST (pat);
2855
2856 while (GET_CODE (dest) == SUBREG
2857 || GET_CODE (dest) == ZERO_EXTRACT
2858 || GET_CODE (dest) == SIGN_EXTRACT
2859 || GET_CODE (dest) == STRICT_LOW_PART)
2860 dest = XEXP (dest, 0);
2861
a13d4ebf 2862 if (GET_CODE (dest) == REG)
73991d6a 2863 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2864 else if (GET_CODE (dest) == MEM)
2865 record_last_mem_set_info (insn);
2866
7506f491 2867 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2868 mark_call (insn);
7506f491
DE
2869}
2870
2871/* Record things set by a CLOBBER. */
2872
2873static void
2874mark_clobber (pat, insn)
2875 rtx pat, insn;
2876{
2877 rtx clob = XEXP (pat, 0);
2878
2879 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2880 clob = XEXP (clob, 0);
2881
a13d4ebf 2882 if (GET_CODE (clob) == REG)
73991d6a 2883 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2884 else
2885 record_last_mem_set_info (insn);
7506f491
DE
2886}
2887
2888/* Record things set by INSN.
2889 This data is used by oprs_not_set_p. */
2890
2891static void
2892mark_oprs_set (insn)
2893 rtx insn;
2894{
2895 rtx pat = PATTERN (insn);
c4c81601 2896 int i;
7506f491
DE
2897
2898 if (GET_CODE (pat) == SET)
2899 mark_set (pat, insn);
2900 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2901 for (i = 0; i < XVECLEN (pat, 0); i++)
2902 {
2903 rtx x = XVECEXP (pat, 0, i);
2904
2905 if (GET_CODE (x) == SET)
2906 mark_set (x, insn);
2907 else if (GET_CODE (x) == CLOBBER)
2908 mark_clobber (x, insn);
2909 else if (GET_CODE (x) == CALL)
2910 mark_call (insn);
2911 }
7506f491 2912
7506f491
DE
2913 else if (GET_CODE (pat) == CLOBBER)
2914 mark_clobber (pat, insn);
2915 else if (GET_CODE (pat) == CALL)
b5ce41ff 2916 mark_call (insn);
7506f491 2917}
b5ce41ff 2918
7506f491
DE
2919\f
2920/* Classic GCSE reaching definition support. */
2921
2922/* Allocate reaching def variables. */
2923
2924static void
2925alloc_rd_mem (n_blocks, n_insns)
2926 int n_blocks, n_insns;
2927{
2928 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2929 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491
DE
2930
2931 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2932 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491
DE
2933
2934 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2935 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491
DE
2936
2937 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2938 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2939}
2940
2941/* Free reaching def variables. */
2942
2943static void
2944free_rd_mem ()
2945{
5a660bff
DB
2946 sbitmap_vector_free (rd_kill);
2947 sbitmap_vector_free (rd_gen);
2948 sbitmap_vector_free (reaching_defs);
2949 sbitmap_vector_free (rd_out);
7506f491
DE
2950}
2951
c4c81601 2952/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2953
2954static void
2955handle_rd_kill_set (insn, regno, bb)
2956 rtx insn;
e2d2ed72
AM
2957 int regno;
2958 basic_block bb;
7506f491 2959{
c4c81601 2960 struct reg_set *this_reg;
7506f491 2961
c4c81601
RK
2962 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2963 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2964 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2965}
2966
7506f491
DE
2967/* Compute the set of kill's for reaching definitions. */
2968
2969static void
2970compute_kill_rd ()
2971{
e0082a72 2972 int cuid;
172890a2
RK
2973 unsigned int regno;
2974 int i;
e0082a72 2975 basic_block bb;
7506f491
DE
2976
2977 /* For each block
2978 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2979 generates a definition in the block)
2980 Call the reg set by the insn corresponding to that bit regx
2981 Look at the linked list starting at reg_set_table[regx]
2982 For each setting of regx in the linked list, which is not in
2983 this block
6d2f8887 2984 Set the bit in `kill' corresponding to that insn. */
e0082a72 2985 FOR_EACH_BB (bb)
c4c81601 2986 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2987 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2988 {
c4c81601
RK
2989 rtx insn = CUID_INSN (cuid);
2990 rtx pat = PATTERN (insn);
7506f491 2991
c4c81601
RK
2992 if (GET_CODE (insn) == CALL_INSN)
2993 {
2994 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 2995 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 2996 handle_rd_kill_set (insn, regno, bb);
c4c81601 2997 }
7506f491 2998
c4c81601
RK
2999 if (GET_CODE (pat) == PARALLEL)
3000 {
3001 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 3002 {
c4c81601 3003 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 3004
c4c81601
RK
3005 if ((code == SET || code == CLOBBER)
3006 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
3007 handle_rd_kill_set (insn,
3008 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 3009 bb);
ac7c5af5 3010 }
ac7c5af5 3011 }
c4c81601
RK
3012 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
3013 /* Each setting of this register outside of this block
3014 must be marked in the set of kills in this block. */
e0082a72 3015 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 3016 }
7506f491
DE
3017}
3018
589005ff 3019/* Compute the reaching definitions as in
7506f491
DE
3020 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3021 Chapter 10. It is the same algorithm as used for computing available
3022 expressions but applied to the gens and kills of reaching definitions. */
3023
3024static void
3025compute_rd ()
3026{
e0082a72
ZD
3027 int changed, passes;
3028 basic_block bb;
7506f491 3029
e0082a72
ZD
3030 FOR_EACH_BB (bb)
3031 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
3032
3033 passes = 0;
3034 changed = 1;
3035 while (changed)
3036 {
3037 changed = 0;
e0082a72 3038 FOR_EACH_BB (bb)
ac7c5af5 3039 {
e0082a72
ZD
3040 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3041 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3042 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 3043 }
7506f491
DE
3044 passes++;
3045 }
3046
3047 if (gcse_file)
3048 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3049}
3050\f
3051/* Classic GCSE available expression support. */
3052
3053/* Allocate memory for available expression computation. */
3054
3055static void
3056alloc_avail_expr_mem (n_blocks, n_exprs)
3057 int n_blocks, n_exprs;
3058{
3059 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3060 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491
DE
3061
3062 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3063 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491
DE
3064
3065 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3066 sbitmap_vector_zero (ae_in, n_blocks);
7506f491
DE
3067
3068 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3069 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
3070}
3071
3072static void
3073free_avail_expr_mem ()
3074{
5a660bff
DB
3075 sbitmap_vector_free (ae_kill);
3076 sbitmap_vector_free (ae_gen);
3077 sbitmap_vector_free (ae_in);
3078 sbitmap_vector_free (ae_out);
7506f491
DE
3079}
3080
3081/* Compute the set of available expressions generated in each basic block. */
3082
3083static void
02280659
ZD
3084compute_ae_gen (expr_hash_table)
3085 struct hash_table *expr_hash_table;
7506f491 3086{
2e653e39 3087 unsigned int i;
c4c81601
RK
3088 struct expr *expr;
3089 struct occr *occr;
7506f491
DE
3090
3091 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3092 This is all we have to do because an expression is not recorded if it
3093 is not available, and the only expressions we want to work with are the
3094 ones that are recorded. */
02280659
ZD
3095 for (i = 0; i < expr_hash_table->size; i++)
3096 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
c4c81601
RK
3097 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3098 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3099}
3100
cc2902df 3101/* Return nonzero if expression X is killed in BB. */
7506f491
DE
3102
3103static int
3104expr_killed_p (x, bb)
3105 rtx x;
e2d2ed72 3106 basic_block bb;
7506f491 3107{
c4c81601 3108 int i, j;
7506f491 3109 enum rtx_code code;
6f7d635c 3110 const char *fmt;
7506f491 3111
7506f491
DE
3112 if (x == 0)
3113 return 1;
3114
3115 code = GET_CODE (x);
3116 switch (code)
3117 {
3118 case REG:
0b17ab2f 3119 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3120
3121 case MEM:
a13d4ebf
AM
3122 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3123 return 1;
c4c81601
RK
3124 else
3125 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3126
3127 case PC:
3128 case CC0: /*FIXME*/
3129 case CONST:
3130 case CONST_INT:
3131 case CONST_DOUBLE:
69ef87e2 3132 case CONST_VECTOR:
7506f491
DE
3133 case SYMBOL_REF:
3134 case LABEL_REF:
3135 case ADDR_VEC:
3136 case ADDR_DIFF_VEC:
3137 return 0;
3138
3139 default:
3140 break;
3141 }
3142
c4c81601 3143 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3144 {
3145 if (fmt[i] == 'e')
3146 {
7506f491
DE
3147 /* If we are about to do the last recursive call
3148 needed at this level, change it into iteration.
3149 This function is called enough to be worth it. */
3150 if (i == 0)
c4c81601
RK
3151 return expr_killed_p (XEXP (x, i), bb);
3152 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3153 return 1;
3154 }
3155 else if (fmt[i] == 'E')
c4c81601
RK
3156 for (j = 0; j < XVECLEN (x, i); j++)
3157 if (expr_killed_p (XVECEXP (x, i, j), bb))
3158 return 1;
7506f491
DE
3159 }
3160
3161 return 0;
3162}
3163
3164/* Compute the set of available expressions killed in each basic block. */
3165
3166static void
02280659 3167compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
a42cd965 3168 sbitmap *ae_gen, *ae_kill;
02280659 3169 struct hash_table *expr_hash_table;
7506f491 3170{
e0082a72 3171 basic_block bb;
2e653e39 3172 unsigned int i;
c4c81601 3173 struct expr *expr;
7506f491 3174
e0082a72 3175 FOR_EACH_BB (bb)
02280659
ZD
3176 for (i = 0; i < expr_hash_table->size; i++)
3177 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
7506f491 3178 {
c4c81601 3179 /* Skip EXPR if generated in this block. */
e0082a72 3180 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3181 continue;
7506f491 3182
e0082a72
ZD
3183 if (expr_killed_p (expr->expr, bb))
3184 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3185 }
7506f491 3186}
7506f491
DE
3187\f
3188/* Actually perform the Classic GCSE optimizations. */
3189
cc2902df 3190/* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
7506f491 3191
cc2902df 3192 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
7506f491
DE
3193 as a positive reach. We want to do this when there are two computations
3194 of the expression in the block.
3195
3196 VISITED is a pointer to a working buffer for tracking which BB's have
3197 been visited. It is NULL for the top-level call.
3198
3199 We treat reaching expressions that go through blocks containing the same
3200 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3201 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3202 2 as not reaching. The intent is to improve the probability of finding
3203 only one reaching expression and to reduce register lifetimes by picking
3204 the closest such expression. */
3205
3206static int
283a2545 3207expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
7506f491
DE
3208 struct occr *occr;
3209 struct expr *expr;
e2d2ed72 3210 basic_block bb;
7506f491
DE
3211 int check_self_loop;
3212 char *visited;
3213{
36349f8b 3214 edge pred;
7506f491 3215
e2d2ed72 3216 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3217 {
e2d2ed72 3218 basic_block pred_bb = pred->src;
7506f491 3219
0b17ab2f 3220 if (visited[pred_bb->index])
c4c81601 3221 /* This predecessor has already been visited. Nothing to do. */
7506f491 3222 ;
7506f491 3223 else if (pred_bb == bb)
ac7c5af5 3224 {
7506f491
DE
3225 /* BB loops on itself. */
3226 if (check_self_loop
0b17ab2f
RH
3227 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3228 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3229 return 1;
c4c81601 3230
0b17ab2f 3231 visited[pred_bb->index] = 1;
ac7c5af5 3232 }
c4c81601 3233
7506f491 3234 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3235 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3236 visited[pred_bb->index] = 1;
c4c81601 3237
7506f491 3238 /* Does this predecessor generate this expression? */
0b17ab2f 3239 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3240 {
3241 /* Is this the occurrence we're looking for?
3242 Note that there's only one generating occurrence per block
3243 so we just need to check the block number. */
0b17ab2f 3244 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3245 return 1;
c4c81601 3246
0b17ab2f 3247 visited[pred_bb->index] = 1;
7506f491 3248 }
c4c81601 3249
7506f491
DE
3250 /* Neither gen nor kill. */
3251 else
ac7c5af5 3252 {
0b17ab2f 3253 visited[pred_bb->index] = 1;
589005ff 3254 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3255 visited))
c4c81601 3256
7506f491 3257 return 1;
ac7c5af5 3258 }
7506f491
DE
3259 }
3260
3261 /* All paths have been checked. */
3262 return 0;
3263}
3264
283a2545 3265/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3266 memory allocated for that function is returned. */
283a2545
RL
3267
3268static int
3269expr_reaches_here_p (occr, expr, bb, check_self_loop)
3270 struct occr *occr;
3271 struct expr *expr;
e2d2ed72 3272 basic_block bb;
283a2545
RL
3273 int check_self_loop;
3274{
3275 int rval;
d55bc081 3276 char *visited = (char *) xcalloc (last_basic_block, 1);
283a2545 3277
c4c81601 3278 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3279
283a2545 3280 free (visited);
c4c81601 3281 return rval;
283a2545
RL
3282}
3283
7506f491
DE
3284/* Return the instruction that computes EXPR that reaches INSN's basic block.
3285 If there is more than one such instruction, return NULL.
3286
3287 Called only by handle_avail_expr. */
3288
3289static rtx
3290computing_insn (expr, insn)
3291 struct expr *expr;
3292 rtx insn;
3293{
e2d2ed72 3294 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3295
3296 if (expr->avail_occr->next == NULL)
589005ff 3297 {
e2d2ed72 3298 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3299 /* The available expression is actually itself
3300 (i.e. a loop in the flow graph) so do nothing. */
3301 return NULL;
3302
7506f491
DE
3303 /* (FIXME) Case that we found a pattern that was created by
3304 a substitution that took place. */
3305 return expr->avail_occr->insn;
3306 }
3307 else
3308 {
3309 /* Pattern is computed more than once.
589005ff 3310 Search backwards from this insn to see how many of these
7506f491
DE
3311 computations actually reach this insn. */
3312 struct occr *occr;
3313 rtx insn_computes_expr = NULL;
3314 int can_reach = 0;
3315
3316 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3317 {
e2d2ed72 3318 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3319 {
3320 /* The expression is generated in this block.
3321 The only time we care about this is when the expression
3322 is generated later in the block [and thus there's a loop].
3323 We let the normal cse pass handle the other cases. */
c4c81601
RK
3324 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3325 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3326 {
3327 can_reach++;
3328 if (can_reach > 1)
3329 return NULL;
c4c81601 3330
7506f491
DE
3331 insn_computes_expr = occr->insn;
3332 }
3333 }
c4c81601
RK
3334 else if (expr_reaches_here_p (occr, expr, bb, 0))
3335 {
3336 can_reach++;
3337 if (can_reach > 1)
3338 return NULL;
3339
3340 insn_computes_expr = occr->insn;
3341 }
7506f491
DE
3342 }
3343
3344 if (insn_computes_expr == NULL)
3345 abort ();
c4c81601 3346
7506f491
DE
3347 return insn_computes_expr;
3348 }
3349}
3350
cc2902df 3351/* Return nonzero if the definition in DEF_INSN can reach INSN.
7506f491
DE
3352 Only called by can_disregard_other_sets. */
3353
3354static int
3355def_reaches_here_p (insn, def_insn)
3356 rtx insn, def_insn;
3357{
3358 rtx reg;
3359
3360 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3361 return 1;
3362
3363 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3364 {
3365 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3366 {
7506f491
DE
3367 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3368 return 1;
c4c81601 3369 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3370 reg = XEXP (PATTERN (def_insn), 0);
3371 else if (GET_CODE (PATTERN (def_insn)) == SET)
3372 reg = SET_DEST (PATTERN (def_insn));
3373 else
3374 abort ();
c4c81601 3375
7506f491
DE
3376 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3377 }
3378 else
3379 return 0;
3380 }
3381
3382 return 0;
3383}
3384
cc2902df 3385/* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
c4c81601
RK
3386 value returned is the number of definitions that reach INSN. Returning a
3387 value of zero means that [maybe] more than one definition reaches INSN and
3388 the caller can't perform whatever optimization it is trying. i.e. it is
3389 always safe to return zero. */
7506f491
DE
3390
3391static int
3392can_disregard_other_sets (addr_this_reg, insn, for_combine)
3393 struct reg_set **addr_this_reg;
3394 rtx insn;
3395 int for_combine;
3396{
3397 int number_of_reaching_defs = 0;
c4c81601 3398 struct reg_set *this_reg;
7506f491 3399
c4c81601
RK
3400 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3401 if (def_reaches_here_p (insn, this_reg->insn))
3402 {
3403 number_of_reaching_defs++;
3404 /* Ignore parallels for now. */
3405 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3406 return 0;
3407
3408 if (!for_combine
3409 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3410 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3411 SET_SRC (PATTERN (insn)))))
3412 /* A setting of the reg to a different value reaches INSN. */
3413 return 0;
3414
3415 if (number_of_reaching_defs > 1)
3416 {
3417 /* If in this setting the value the register is being set to is
3418 equal to the previous value the register was set to and this
3419 setting reaches the insn we are trying to do the substitution
3420 on then we are ok. */
3421 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3422 return 0;
c4c81601
RK
3423 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3424 SET_SRC (PATTERN (insn))))
3425 return 0;
3426 }
7506f491 3427
589005ff 3428 *addr_this_reg = this_reg;
c4c81601 3429 }
7506f491
DE
3430
3431 return number_of_reaching_defs;
3432}
3433
3434/* Expression computed by insn is available and the substitution is legal,
3435 so try to perform the substitution.
3436
cc2902df 3437 The result is nonzero if any changes were made. */
7506f491
DE
3438
3439static int
3440handle_avail_expr (insn, expr)
3441 rtx insn;
3442 struct expr *expr;
3443{
0631e0bf 3444 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3445 rtx to;
3446 struct reg_set *this_reg;
3447 int found_setting, use_src;
3448 int changed = 0;
3449
3450 /* We only handle the case where one computation of the expression
3451 reaches this instruction. */
3452 insn_computes_expr = computing_insn (expr, insn);
3453 if (insn_computes_expr == NULL)
3454 return 0;
0631e0bf
JH
3455 expr_set = single_set (insn_computes_expr);
3456 if (!expr_set)
3457 abort ();
7506f491
DE
3458
3459 found_setting = 0;
3460 use_src = 0;
3461
3462 /* At this point we know only one computation of EXPR outside of this
3463 block reaches this insn. Now try to find a register that the
3464 expression is computed into. */
0631e0bf 3465 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3466 {
3467 /* This is the case when the available expression that reaches
3468 here has already been handled as an available expression. */
770ae6cc 3469 unsigned int regnum_for_replacing
0631e0bf 3470 = REGNO (SET_SRC (expr_set));
c4c81601 3471
7506f491
DE
3472 /* If the register was created by GCSE we can't use `reg_set_table',
3473 however we know it's set only once. */
3474 if (regnum_for_replacing >= max_gcse_regno
3475 /* If the register the expression is computed into is set only once,
3476 or only one set reaches this insn, we can use it. */
3477 || (((this_reg = reg_set_table[regnum_for_replacing]),
3478 this_reg->next == NULL)
3479 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3480 {
3481 use_src = 1;
3482 found_setting = 1;
3483 }
7506f491
DE
3484 }
3485
3486 if (!found_setting)
3487 {
770ae6cc 3488 unsigned int regnum_for_replacing
0631e0bf 3489 = REGNO (SET_DEST (expr_set));
c4c81601 3490
7506f491
DE
3491 /* This shouldn't happen. */
3492 if (regnum_for_replacing >= max_gcse_regno)
3493 abort ();
c4c81601 3494
7506f491 3495 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3496
7506f491
DE
3497 /* If the register the expression is computed into is set only once,
3498 or only one set reaches this insn, use it. */
3499 if (this_reg->next == NULL
3500 || can_disregard_other_sets (&this_reg, insn, 0))
3501 found_setting = 1;
3502 }
3503
3504 if (found_setting)
3505 {
3506 pat = PATTERN (insn);
3507 if (use_src)
0631e0bf 3508 to = SET_SRC (expr_set);
7506f491 3509 else
0631e0bf 3510 to = SET_DEST (expr_set);
7506f491
DE
3511 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3512
3513 /* We should be able to ignore the return code from validate_change but
3514 to play it safe we check. */
3515 if (changed)
3516 {
3517 gcse_subst_count++;
3518 if (gcse_file != NULL)
3519 {
c4c81601
RK
3520 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3521 INSN_UID (insn));
3522 fprintf (gcse_file, " reg %d %s insn %d\n",
3523 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3524 INSN_UID (insn_computes_expr));
3525 }
7506f491
DE
3526 }
3527 }
c4c81601 3528
7506f491
DE
3529 /* The register that the expr is computed into is set more than once. */
3530 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3531 {
3532 /* Insert an insn after insnx that copies the reg set in insnx
3533 into a new pseudo register call this new register REGN.
3534 From insnb until end of basic block or until REGB is set
3535 replace all uses of REGB with REGN. */
3536 rtx new_insn;
3537
0631e0bf 3538 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3539
3540 /* Generate the new insn. */
3541 /* ??? If the change fails, we return 0, even though we created
3542 an insn. I think this is ok. */
9e6a5703
JC
3543 new_insn
3544 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3545 SET_DEST (expr_set)),
c4c81601
RK
3546 insn_computes_expr);
3547
7506f491
DE
3548 /* Keep register set table up to date. */
3549 record_one_set (REGNO (to), new_insn);
3550
3551 gcse_create_count++;
3552 if (gcse_file != NULL)
ac7c5af5 3553 {
c4c81601 3554 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3555 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3556 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3557 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3558 INSN_UID (insn_computes_expr));
c4c81601
RK
3559 fprintf (gcse_file, " into newly allocated reg %d\n",
3560 REGNO (to));
ac7c5af5 3561 }
7506f491
DE
3562
3563 pat = PATTERN (insn);
3564
3565 /* Do register replacement for INSN. */
3566 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3567 SET_DEST (PATTERN
3568 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3569 0);
3570
3571 /* We should be able to ignore the return code from validate_change but
3572 to play it safe we check. */
3573 if (changed)
3574 {
3575 gcse_subst_count++;
3576 if (gcse_file != NULL)
3577 {
c4c81601
RK
3578 fprintf (gcse_file,
3579 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3580 INSN_UID (insn),
c4c81601
RK
3581 REGNO (SET_DEST (PATTERN (NEXT_INSN
3582 (insn_computes_expr)))));
3583 fprintf (gcse_file, "set in insn %d\n",
589005ff 3584 INSN_UID (insn_computes_expr));
7506f491 3585 }
7506f491
DE
3586 }
3587 }
3588
3589 return changed;
3590}
3591
c4c81601
RK
3592/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3593 the dataflow analysis has been done.
7506f491 3594
cc2902df 3595 The result is nonzero if a change was made. */
7506f491
DE
3596
3597static int
3598classic_gcse ()
3599{
e0082a72 3600 int changed;
7506f491 3601 rtx insn;
e0082a72 3602 basic_block bb;
7506f491
DE
3603
3604 /* Note we start at block 1. */
3605
e0082a72
ZD
3606 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3607 return 0;
3608
7506f491 3609 changed = 0;
e0082a72 3610 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3611 {
3612 /* Reset tables used to keep track of what's still valid [since the
3613 start of the block]. */
3614 reset_opr_set_tables ();
3615
e0082a72
ZD
3616 for (insn = bb->head;
3617 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491
DE
3618 insn = NEXT_INSN (insn))
3619 {
3620 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3621 if (GET_CODE (insn) == INSN
3622 && GET_CODE (PATTERN (insn)) == SET
3623 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3624 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3625 {
3626 rtx pat = PATTERN (insn);
3627 rtx src = SET_SRC (pat);
3628 struct expr *expr;
3629
3630 if (want_to_gcse_p (src)
3631 /* Is the expression recorded? */
02280659 3632 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
7506f491
DE
3633 /* Is the expression available [at the start of the
3634 block]? */
e0082a72 3635 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3636 /* Are the operands unchanged since the start of the
3637 block? */
3638 && oprs_not_set_p (src, insn))
3639 changed |= handle_avail_expr (insn, expr);
3640 }
3641
3642 /* Keep track of everything modified by this insn. */
3643 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3644 if (INSN_P (insn))
7506f491 3645 mark_oprs_set (insn);
ac7c5af5 3646 }
7506f491
DE
3647 }
3648
3649 return changed;
3650}
3651
3652/* Top level routine to perform one classic GCSE pass.
3653
cc2902df 3654 Return nonzero if a change was made. */
7506f491
DE
3655
3656static int
b5ce41ff 3657one_classic_gcse_pass (pass)
7506f491
DE
3658 int pass;
3659{
3660 int changed = 0;
3661
3662 gcse_subst_count = 0;
3663 gcse_create_count = 0;
3664
02280659 3665 alloc_hash_table (max_cuid, &expr_hash_table, 0);
d55bc081 3666 alloc_rd_mem (last_basic_block, max_cuid);
02280659 3667 compute_hash_table (&expr_hash_table);
7506f491 3668 if (gcse_file)
02280659 3669 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 3670
02280659 3671 if (expr_hash_table.n_elems > 0)
7506f491
DE
3672 {
3673 compute_kill_rd ();
3674 compute_rd ();
02280659
ZD
3675 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3676 compute_ae_gen (&expr_hash_table);
3677 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
bd0eaec2 3678 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3679 changed = classic_gcse ();
3680 free_avail_expr_mem ();
3681 }
c4c81601 3682
7506f491 3683 free_rd_mem ();
02280659 3684 free_hash_table (&expr_hash_table);
7506f491
DE
3685
3686 if (gcse_file)
3687 {
3688 fprintf (gcse_file, "\n");
c4c81601
RK
3689 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3690 current_function_name, pass, bytes_used, gcse_subst_count);
3691 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3692 }
3693
3694 return changed;
3695}
3696\f
3697/* Compute copy/constant propagation working variables. */
3698
3699/* Local properties of assignments. */
7506f491
DE
3700static sbitmap *cprop_pavloc;
3701static sbitmap *cprop_absaltered;
3702
3703/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3704static sbitmap *cprop_avin;
3705static sbitmap *cprop_avout;
3706
c4c81601
RK
3707/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3708 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3709
3710static void
3711alloc_cprop_mem (n_blocks, n_sets)
3712 int n_blocks, n_sets;
3713{
3714 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3715 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3716
3717 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3718 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3719}
3720
3721/* Free vars used by copy/const propagation. */
3722
3723static void
3724free_cprop_mem ()
3725{
5a660bff
DB
3726 sbitmap_vector_free (cprop_pavloc);
3727 sbitmap_vector_free (cprop_absaltered);
3728 sbitmap_vector_free (cprop_avin);
3729 sbitmap_vector_free (cprop_avout);
7506f491
DE
3730}
3731
c4c81601
RK
3732/* For each block, compute whether X is transparent. X is either an
3733 expression or an assignment [though we don't care which, for this context
3734 an assignment is treated as an expression]. For each block where an
3735 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3736 bit in BMAP. */
7506f491
DE
3737
3738static void
3739compute_transp (x, indx, bmap, set_p)
3740 rtx x;
3741 int indx;
3742 sbitmap *bmap;
3743 int set_p;
3744{
e0082a72
ZD
3745 int i, j;
3746 basic_block bb;
7506f491 3747 enum rtx_code code;
c4c81601 3748 reg_set *r;
6f7d635c 3749 const char *fmt;
7506f491 3750
c4c81601
RK
3751 /* repeat is used to turn tail-recursion into iteration since GCC
3752 can't do it when there's no return value. */
7506f491
DE
3753 repeat:
3754
3755 if (x == 0)
3756 return;
3757
3758 code = GET_CODE (x);
3759 switch (code)
3760 {
3761 case REG:
c4c81601
RK
3762 if (set_p)
3763 {
3764 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3765 {
e0082a72
ZD
3766 FOR_EACH_BB (bb)
3767 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3768 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3769 }
3770 else
3771 {
3772 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3773 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3774 }
3775 }
3776 else
3777 {
3778 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3779 {
e0082a72
ZD
3780 FOR_EACH_BB (bb)
3781 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3782 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3783 }
3784 else
3785 {
3786 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3787 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3788 }
3789 }
7506f491 3790
c4c81601 3791 return;
7506f491
DE
3792
3793 case MEM:
e0082a72 3794 FOR_EACH_BB (bb)
a13d4ebf 3795 {
e0082a72 3796 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3797
3798 while (list_entry)
3799 {
3800 rtx dest, dest_addr;
3801
3802 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3803 {
3804 if (set_p)
e0082a72 3805 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3806 else
e0082a72 3807 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3808 break;
3809 }
3810 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3811 Examine each hunk of memory that is modified. */
3812
3813 dest = XEXP (list_entry, 0);
3814 list_entry = XEXP (list_entry, 1);
3815 dest_addr = XEXP (list_entry, 0);
589005ff 3816
a13d4ebf
AM
3817 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3818 x, rtx_addr_varies_p))
3819 {
3820 if (set_p)
e0082a72 3821 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3822 else
e0082a72 3823 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3824 break;
3825 }
3826 list_entry = XEXP (list_entry, 1);
3827 }
3828 }
c4c81601 3829
7506f491
DE
3830 x = XEXP (x, 0);
3831 goto repeat;
3832
3833 case PC:
3834 case CC0: /*FIXME*/
3835 case CONST:
3836 case CONST_INT:
3837 case CONST_DOUBLE:
69ef87e2 3838 case CONST_VECTOR:
7506f491
DE
3839 case SYMBOL_REF:
3840 case LABEL_REF:
3841 case ADDR_VEC:
3842 case ADDR_DIFF_VEC:
3843 return;
3844
3845 default:
3846 break;
3847 }
3848
c4c81601 3849 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3850 {
3851 if (fmt[i] == 'e')
3852 {
7506f491
DE
3853 /* If we are about to do the last recursive call
3854 needed at this level, change it into iteration.
3855 This function is called enough to be worth it. */
3856 if (i == 0)
3857 {
c4c81601 3858 x = XEXP (x, i);
7506f491
DE
3859 goto repeat;
3860 }
c4c81601
RK
3861
3862 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3863 }
3864 else if (fmt[i] == 'E')
c4c81601
RK
3865 for (j = 0; j < XVECLEN (x, i); j++)
3866 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3867 }
3868}
3869
7506f491
DE
3870/* Top level routine to do the dataflow analysis needed by copy/const
3871 propagation. */
3872
3873static void
3874compute_cprop_data ()
3875{
02280659 3876 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
3877 compute_available (cprop_pavloc, cprop_absaltered,
3878 cprop_avout, cprop_avin);
7506f491
DE
3879}
3880\f
3881/* Copy/constant propagation. */
3882
7506f491
DE
3883/* Maximum number of register uses in an insn that we handle. */
3884#define MAX_USES 8
3885
3886/* Table of uses found in an insn.
3887 Allocated statically to avoid alloc/free complexity and overhead. */
3888static struct reg_use reg_use_table[MAX_USES];
3889
3890/* Index into `reg_use_table' while building it. */
3891static int reg_use_count;
3892
c4c81601
RK
3893/* Set up a list of register numbers used in INSN. The found uses are stored
3894 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3895 and contains the number of uses in the table upon exit.
7506f491 3896
c4c81601
RK
3897 ??? If a register appears multiple times we will record it multiple times.
3898 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3899
3900static void
9e71c818
JH
3901find_used_regs (xptr, data)
3902 rtx *xptr;
3903 void *data ATTRIBUTE_UNUSED;
7506f491 3904{
c4c81601 3905 int i, j;
7506f491 3906 enum rtx_code code;
6f7d635c 3907 const char *fmt;
9e71c818 3908 rtx x = *xptr;
7506f491 3909
c4c81601
RK
3910 /* repeat is used to turn tail-recursion into iteration since GCC
3911 can't do it when there's no return value. */
7506f491 3912 repeat:
7506f491
DE
3913 if (x == 0)
3914 return;
3915
3916 code = GET_CODE (x);
9e71c818 3917 if (REG_P (x))
7506f491 3918 {
7506f491
DE
3919 if (reg_use_count == MAX_USES)
3920 return;
c4c81601 3921
7506f491
DE
3922 reg_use_table[reg_use_count].reg_rtx = x;
3923 reg_use_count++;
7506f491
DE
3924 }
3925
3926 /* Recursively scan the operands of this expression. */
3927
c4c81601 3928 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3929 {
3930 if (fmt[i] == 'e')
3931 {
3932 /* If we are about to do the last recursive call
3933 needed at this level, change it into iteration.
3934 This function is called enough to be worth it. */
3935 if (i == 0)
3936 {
3937 x = XEXP (x, 0);
3938 goto repeat;
3939 }
c4c81601 3940
9e71c818 3941 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3942 }
3943 else if (fmt[i] == 'E')
c4c81601 3944 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3945 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3946 }
3947}
3948
3949/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 3950 Returns nonzero is successful. */
7506f491
DE
3951
3952static int
3953try_replace_reg (from, to, insn)
3954 rtx from, to, insn;
3955{
172890a2 3956 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3957 rtx src = 0;
172890a2
RK
3958 int success = 0;
3959 rtx set = single_set (insn);
833fc3ad 3960
2b773ee2
JH
3961 validate_replace_src_group (from, to, insn);
3962 if (num_changes_pending () && apply_change_group ())
3963 success = 1;
9e71c818 3964
9feff114
JDA
3965 /* Try to simplify SET_SRC if we have substituted a constant. */
3966 if (success && set && CONSTANT_P (to))
3967 {
3968 src = simplify_rtx (SET_SRC (set));
3969
3970 if (src)
3971 validate_change (insn, &SET_SRC (set), src, 0);
3972 }
3973
f305679f 3974 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3975 {
f305679f
JH
3976 /* If above failed and this is a single set, try to simplify the source of
3977 the set given our substitution. We could perhaps try this for multiple
3978 SETs, but it probably won't buy us anything. */
172890a2
RK
3979 src = simplify_replace_rtx (SET_SRC (set), from, to);
3980
9e71c818
JH
3981 if (!rtx_equal_p (src, SET_SRC (set))
3982 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3983 success = 1;
833fc3ad 3984
f305679f
JH
3985 /* If we've failed to do replacement, have a single SET, and don't already
3986 have a note, add a REG_EQUAL note to not lose information. */
3987 if (!success && note == 0 && set != 0)
3988 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3989 }
e251e2a2 3990
172890a2
RK
3991 /* If there is already a NOTE, update the expression in it with our
3992 replacement. */
3993 else if (note != 0)
3994 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
833fc3ad 3995
172890a2
RK
3996 /* REG_EQUAL may get simplified into register.
3997 We don't allow that. Remove that note. This code ought
fbe5a4a6 3998 not to happen, because previous code ought to synthesize
172890a2
RK
3999 reg-reg move, but be on the safe side. */
4000 if (note && REG_P (XEXP (note, 0)))
4001 remove_note (insn, note);
833fc3ad 4002
833fc3ad
JH
4003 return success;
4004}
c4c81601
RK
4005
4006/* Find a set of REGNOs that are available on entry to INSN's block. Returns
4007 NULL no such set is found. */
7506f491
DE
4008
4009static struct expr *
4010find_avail_set (regno, insn)
4011 int regno;
4012 rtx insn;
4013{
cafba495
BS
4014 /* SET1 contains the last set found that can be returned to the caller for
4015 use in a substitution. */
4016 struct expr *set1 = 0;
589005ff 4017
cafba495
BS
4018 /* Loops are not possible here. To get a loop we would need two sets
4019 available at the start of the block containing INSN. ie we would
4020 need two sets like this available at the start of the block:
4021
4022 (set (reg X) (reg Y))
4023 (set (reg Y) (reg X))
4024
4025 This can not happen since the set of (reg Y) would have killed the
4026 set of (reg X) making it unavailable at the start of this block. */
4027 while (1)
8e42ace1 4028 {
cafba495 4029 rtx src;
ceda50e9 4030 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
4031
4032 /* Find a set that is available at the start of the block
4033 which contains INSN. */
4034 while (set)
4035 {
4036 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
4037 break;
4038 set = next_set (regno, set);
4039 }
7506f491 4040
cafba495
BS
4041 /* If no available set was found we've reached the end of the
4042 (possibly empty) copy chain. */
4043 if (set == 0)
589005ff 4044 break;
cafba495
BS
4045
4046 if (GET_CODE (set->expr) != SET)
4047 abort ();
4048
4049 src = SET_SRC (set->expr);
4050
4051 /* We know the set is available.
4052 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 4053 have changed since the start of the block).
cafba495
BS
4054
4055 If the source operand changed, we may still use it for the next
4056 iteration of this loop, but we may not use it for substitutions. */
c4c81601 4057
6b2d1c9e 4058 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
4059 set1 = set;
4060
4061 /* If the source of the set is anything except a register, then
4062 we have reached the end of the copy chain. */
4063 if (GET_CODE (src) != REG)
7506f491 4064 break;
7506f491 4065
cafba495
BS
4066 /* Follow the copy chain, ie start another iteration of the loop
4067 and see if we have an available copy into SRC. */
4068 regno = REGNO (src);
8e42ace1 4069 }
cafba495
BS
4070
4071 /* SET1 holds the last set that was available and anticipatable at
4072 INSN. */
4073 return set1;
7506f491
DE
4074}
4075
abd535b6 4076/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 4077 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 4078 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 4079 single SET of a register. FROM is what we will try to replace,
0e3f0221 4080 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 4081 if a change was made. */
c4c81601 4082
abd535b6 4083static int
0e3f0221
RS
4084cprop_jump (bb, setcc, jump, from, src)
4085 basic_block bb;
4086 rtx setcc;
4087 rtx jump;
172890a2 4088 rtx from;
abd535b6
BS
4089 rtx src;
4090{
bc6688b4 4091 rtx new, set_src, note_src;
0e3f0221 4092 rtx set = pc_set (jump);
bc6688b4 4093 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 4094
bc6688b4
RS
4095 if (note)
4096 {
4097 note_src = XEXP (note, 0);
4098 if (GET_CODE (note_src) == EXPR_LIST)
4099 note_src = NULL_RTX;
4100 }
4101 else note_src = NULL_RTX;
4102
4103 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4104 set_src = note_src ? note_src : SET_SRC (set);
4105
4106 /* First substitute the SETCC condition into the JUMP instruction,
4107 then substitute that given values into this expanded JUMP. */
4108 if (setcc != NULL_RTX
48ddd46c
JH
4109 && !modified_between_p (from, setcc, jump)
4110 && !modified_between_p (src, setcc, jump))
b2f02503 4111 {
bc6688b4 4112 rtx setcc_src;
b2f02503 4113 rtx setcc_set = single_set (setcc);
bc6688b4
RS
4114 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4115 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4116 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4117 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4118 setcc_src);
b2f02503 4119 }
0e3f0221 4120 else
bc6688b4 4121 setcc = NULL_RTX;
0e3f0221 4122
bc6688b4 4123 new = simplify_replace_rtx (set_src, from, src);
abd535b6 4124
bc6688b4
RS
4125 /* If no simplification can be made, then try the next register. */
4126 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 4127 return 0;
589005ff 4128
7d5ab30e 4129 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 4130 if (new == pc_rtx)
0e3f0221 4131 delete_insn (jump);
7d5ab30e 4132 else
abd535b6 4133 {
48ddd46c
JH
4134 /* Ensure the value computed inside the jump insn to be equivalent
4135 to one computed by setcc. */
bc6688b4 4136 if (setcc && modified_in_p (new, setcc))
48ddd46c 4137 return 0;
0e3f0221 4138 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
4139 {
4140 /* When (some) constants are not valid in a comparison, and there
4141 are two registers to be replaced by constants before the entire
4142 comparison can be folded into a constant, we need to keep
4143 intermediate information in REG_EQUAL notes. For targets with
4144 separate compare insns, such notes are added by try_replace_reg.
4145 When we have a combined compare-and-branch instruction, however,
4146 we need to attach a note to the branch itself to make this
4147 optimization work. */
4148
4149 if (!rtx_equal_p (new, note_src))
4150 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4151 return 0;
4152 }
4153
4154 /* Remove REG_EQUAL note after simplification. */
4155 if (note_src)
4156 remove_note (jump, note);
abd535b6 4157
7d5ab30e
JH
4158 /* If this has turned into an unconditional jump,
4159 then put a barrier after it so that the unreachable
4160 code will be deleted. */
4161 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4162 emit_barrier_after (jump);
7d5ab30e 4163 }
abd535b6 4164
0e3f0221
RS
4165#ifdef HAVE_cc0
4166 /* Delete the cc0 setter. */
818b6b7f 4167 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4168 delete_insn (setcc);
4169#endif
4170
172890a2 4171 run_jump_opt_after_gcse = 1;
c4c81601 4172
172890a2
RK
4173 const_prop_count++;
4174 if (gcse_file != NULL)
4175 {
4176 fprintf (gcse_file,
818b6b7f 4177 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4178 REGNO (from), INSN_UID (jump));
172890a2
RK
4179 print_rtl (gcse_file, src);
4180 fprintf (gcse_file, "\n");
abd535b6 4181 }
0005550b 4182 purge_dead_edges (bb);
172890a2
RK
4183
4184 return 1;
abd535b6
BS
4185}
4186
ae860ff7
JH
4187static bool
4188constprop_register (insn, from, to, alter_jumps)
4189 rtx insn;
4190 rtx from;
4191 rtx to;
4192 int alter_jumps;
4193{
4194 rtx sset;
4195
4196 /* Check for reg or cc0 setting instructions followed by
4197 conditional branch instructions first. */
4198 if (alter_jumps
4199 && (sset = single_set (insn)) != NULL
244d05fb 4200 && NEXT_INSN (insn)
ae860ff7
JH
4201 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4202 {
4203 rtx dest = SET_DEST (sset);
4204 if ((REG_P (dest) || CC0_P (dest))
4205 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4206 return 1;
4207 }
4208
4209 /* Handle normal insns next. */
4210 if (GET_CODE (insn) == INSN
4211 && try_replace_reg (from, to, insn))
4212 return 1;
4213
4214 /* Try to propagate a CONST_INT into a conditional jump.
4215 We're pretty specific about what we will handle in this
4216 code, we can extend this as necessary over time.
4217
4218 Right now the insn in question must look like
4219 (set (pc) (if_then_else ...)) */
4220 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4221 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4222 return 0;
4223}
4224
7506f491 4225/* Perform constant and copy propagation on INSN.
cc2902df 4226 The result is nonzero if a change was made. */
7506f491
DE
4227
4228static int
ae860ff7 4229cprop_insn (insn, alter_jumps)
7506f491 4230 rtx insn;
b5ce41ff 4231 int alter_jumps;
7506f491
DE
4232{
4233 struct reg_use *reg_used;
4234 int changed = 0;
833fc3ad 4235 rtx note;
7506f491 4236
9e71c818 4237 if (!INSN_P (insn))
7506f491
DE
4238 return 0;
4239
4240 reg_use_count = 0;
9e71c818 4241 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4242
172890a2 4243 note = find_reg_equal_equiv_note (insn);
833fc3ad 4244
dc297297 4245 /* We may win even when propagating constants into notes. */
833fc3ad 4246 if (note)
9e71c818 4247 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4248
c4c81601
RK
4249 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4250 reg_used++, reg_use_count--)
7506f491 4251 {
770ae6cc 4252 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4253 rtx pat, src;
4254 struct expr *set;
7506f491
DE
4255
4256 /* Ignore registers created by GCSE.
dc297297 4257 We do this because ... */
7506f491
DE
4258 if (regno >= max_gcse_regno)
4259 continue;
4260
4261 /* If the register has already been set in this block, there's
4262 nothing we can do. */
4263 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4264 continue;
4265
4266 /* Find an assignment that sets reg_used and is available
4267 at the start of the block. */
4268 set = find_avail_set (regno, insn);
4269 if (! set)
4270 continue;
589005ff 4271
7506f491
DE
4272 pat = set->expr;
4273 /* ??? We might be able to handle PARALLELs. Later. */
4274 if (GET_CODE (pat) != SET)
4275 abort ();
c4c81601 4276
7506f491
DE
4277 src = SET_SRC (pat);
4278
e78d9500 4279 /* Constant propagation. */
6b2d1c9e 4280 if (gcse_constant_p (src))
7506f491 4281 {
ae860ff7 4282 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4283 {
4284 changed = 1;
4285 const_prop_count++;
4286 if (gcse_file != NULL)
4287 {
ae860ff7
JH
4288 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4289 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4290 print_rtl (gcse_file, src);
7506f491
DE
4291 fprintf (gcse_file, "\n");
4292 }
bc6688b4
RS
4293 if (INSN_DELETED_P (insn))
4294 return 1;
7506f491
DE
4295 }
4296 }
4297 else if (GET_CODE (src) == REG
4298 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4299 && REGNO (src) != regno)
4300 {
cafba495 4301 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4302 {
cafba495
BS
4303 changed = 1;
4304 copy_prop_count++;
4305 if (gcse_file != NULL)
7506f491 4306 {
ae860ff7 4307 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4308 regno, INSN_UID (insn));
4309 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4310 }
cafba495
BS
4311
4312 /* The original insn setting reg_used may or may not now be
4313 deletable. We leave the deletion to flow. */
4314 /* FIXME: If it turns out that the insn isn't deletable,
4315 then we may have unnecessarily extended register lifetimes
4316 and made things worse. */
7506f491
DE
4317 }
4318 }
4319 }
4320
4321 return changed;
4322}
4323
710ee3ed
RH
4324/* Like find_used_regs, but avoid recording uses that appear in
4325 input-output contexts such as zero_extract or pre_dec. This
4326 restricts the cases we consider to those for which local cprop
4327 can legitimately make replacements. */
4328
4329static void
4330local_cprop_find_used_regs (xptr, data)
4331 rtx *xptr;
4332 void *data;
4333{
4334 rtx x = *xptr;
4335
4336 if (x == 0)
4337 return;
4338
4339 switch (GET_CODE (x))
4340 {
4341 case ZERO_EXTRACT:
4342 case SIGN_EXTRACT:
4343 case STRICT_LOW_PART:
4344 return;
4345
4346 case PRE_DEC:
4347 case PRE_INC:
4348 case POST_DEC:
4349 case POST_INC:
4350 case PRE_MODIFY:
4351 case POST_MODIFY:
4352 /* Can only legitimately appear this early in the context of
4353 stack pushes for function arguments, but handle all of the
4354 codes nonetheless. */
4355 return;
4356
4357 case SUBREG:
4358 /* Setting a subreg of a register larger than word_mode leaves
4359 the non-written words unchanged. */
4360 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4361 return;
4362 break;
4363
4364 default:
4365 break;
4366 }
4367
4368 find_used_regs (xptr, data);
4369}
4370
8ba46434
R
4371/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4372 their REG_EQUAL notes need updating. */
e197b6fc 4373
ae860ff7 4374static bool
8ba46434 4375do_local_cprop (x, insn, alter_jumps, libcall_sp)
ae860ff7
JH
4376 rtx x;
4377 rtx insn;
4378 int alter_jumps;
8ba46434 4379 rtx *libcall_sp;
ae860ff7
JH
4380{
4381 rtx newreg = NULL, newcnst = NULL;
4382
e197b6fc
RH
4383 /* Rule out USE instructions and ASM statements as we don't want to
4384 change the hard registers mentioned. */
ae860ff7
JH
4385 if (GET_CODE (x) == REG
4386 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
4387 || (GET_CODE (PATTERN (insn)) != USE
4388 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
4389 {
4390 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4391 struct elt_loc_list *l;
4392
4393 if (!val)
4394 return false;
4395 for (l = val->locs; l; l = l->next)
4396 {
4397 rtx this_rtx = l->loc;
46690369
JH
4398 rtx note;
4399
9635cfad
JH
4400 if (l->in_libcall)
4401 continue;
4402
6b2d1c9e 4403 if (gcse_constant_p (this_rtx))
ae860ff7 4404 newcnst = this_rtx;
46690369
JH
4405 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4406 /* Don't copy propagate if it has attached REG_EQUIV note.
4407 At this point this only function parameters should have
4408 REG_EQUIV notes and if the argument slot is used somewhere
4409 explicitly, it means address of parameter has been taken,
4410 so we should not extend the lifetime of the pseudo. */
4411 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4412 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4413 newreg = this_rtx;
4414 }
4415 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4416 {
8ba46434 4417 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 4418 match the new register, we either have to abandon this replacement
8ba46434
R
4419 or fix delete_trivially_dead_insns to preserve the setting insn,
4420 or make it delete the REG_EUAQL note, and fix up all passes that
4421 require the REG_EQUAL note there. */
4422 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4423 abort ();
ae860ff7
JH
4424 if (gcse_file != NULL)
4425 {
4426 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4427 REGNO (x));
4428 fprintf (gcse_file, "insn %d with constant ",
4429 INSN_UID (insn));
4430 print_rtl (gcse_file, newcnst);
4431 fprintf (gcse_file, "\n");
4432 }
4433 const_prop_count++;
4434 return true;
4435 }
4436 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4437 {
8ba46434 4438 adjust_libcall_notes (x, newreg, insn, libcall_sp);
ae860ff7
JH
4439 if (gcse_file != NULL)
4440 {
4441 fprintf (gcse_file,
4442 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4443 REGNO (x), INSN_UID (insn));
4444 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4445 }
4446 copy_prop_count++;
4447 return true;
4448 }
4449 }
4450 return false;
4451}
4452
8ba46434
R
4453/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4454 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
4455 replaced with NEWVAL in INSN. Return true if all substitutions could
4456 be made. */
8ba46434
R
4457static bool
4458adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4459 rtx oldreg, newval, insn, *libcall_sp;
4460{
f4e3e618 4461 rtx end;
8ba46434
R
4462
4463 while ((end = *libcall_sp++))
4464 {
f4e3e618 4465 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
4466
4467 if (! note)
4468 continue;
4469
4470 if (REG_P (newval))
4471 {
4472 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4473 {
4474 do
4475 {
4476 note = find_reg_equal_equiv_note (end);
4477 if (! note)
4478 continue;
4479 if (reg_mentioned_p (newval, XEXP (note, 0)))
4480 return false;
4481 }
4482 while ((end = *libcall_sp++));
4483 return true;
4484 }
4485 }
4486 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4487 insn = end;
4488 }
4489 return true;
4490}
4491
4492#define MAX_NESTED_LIBCALLS 9
4493
ae860ff7
JH
4494static void
4495local_cprop_pass (alter_jumps)
4496 int alter_jumps;
4497{
4498 rtx insn;
4499 struct reg_use *reg_used;
8ba46434 4500 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 4501 bool changed = false;
ae860ff7
JH
4502
4503 cselib_init ();
8ba46434
R
4504 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4505 *libcall_sp = 0;
ae860ff7
JH
4506 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4507 {
4508 if (INSN_P (insn))
4509 {
8ba46434 4510 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 4511
8ba46434
R
4512 if (note)
4513 {
4514 if (libcall_sp == libcall_stack)
4515 abort ();
4516 *--libcall_sp = XEXP (note, 0);
4517 }
4518 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4519 if (note)
4520 libcall_sp++;
4521 note = find_reg_equal_equiv_note (insn);
ae860ff7
JH
4522 do
4523 {
4524 reg_use_count = 0;
710ee3ed 4525 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
ae860ff7 4526 if (note)
710ee3ed 4527 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
ae860ff7
JH
4528
4529 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4530 reg_used++, reg_use_count--)
8ba46434
R
4531 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4532 libcall_sp))
1649d92f
JH
4533 {
4534 changed = true;
4535 break;
4536 }
bc6688b4
RS
4537 if (INSN_DELETED_P (insn))
4538 break;
ae860ff7
JH
4539 }
4540 while (reg_use_count);
4541 }
4542 cselib_process_insn (insn);
4543 }
4544 cselib_finish ();
1649d92f
JH
4545 /* Global analysis may get into infinite loops for unreachable blocks. */
4546 if (changed && alter_jumps)
5f0bea72
JH
4547 {
4548 delete_unreachable_blocks ();
4549 free_reg_set_mem ();
4550 alloc_reg_set_mem (max_reg_num ());
4551 compute_sets (get_insns ());
4552 }
ae860ff7
JH
4553}
4554
c4c81601 4555/* Forward propagate copies. This includes copies and constants. Return
cc2902df 4556 nonzero if a change was made. */
7506f491
DE
4557
4558static int
b5ce41ff
JL
4559cprop (alter_jumps)
4560 int alter_jumps;
7506f491 4561{
e0082a72
ZD
4562 int changed;
4563 basic_block bb;
7506f491
DE
4564 rtx insn;
4565
4566 /* Note we start at block 1. */
e0082a72
ZD
4567 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4568 {
4569 if (gcse_file != NULL)
4570 fprintf (gcse_file, "\n");
4571 return 0;
4572 }
7506f491
DE
4573
4574 changed = 0;
e0082a72 4575 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4576 {
4577 /* Reset tables used to keep track of what's still valid [since the
4578 start of the block]. */
4579 reset_opr_set_tables ();
4580
e0082a72
ZD
4581 for (insn = bb->head;
4582 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491 4583 insn = NEXT_INSN (insn))
172890a2
RK
4584 if (INSN_P (insn))
4585 {
ae860ff7 4586 changed |= cprop_insn (insn, alter_jumps);
7506f491 4587
172890a2
RK
4588 /* Keep track of everything modified by this insn. */
4589 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4590 call mark_oprs_set if we turned the insn into a NOTE. */
4591 if (GET_CODE (insn) != NOTE)
4592 mark_oprs_set (insn);
8e42ace1 4593 }
7506f491
DE
4594 }
4595
4596 if (gcse_file != NULL)
4597 fprintf (gcse_file, "\n");
4598
4599 return changed;
4600}
4601
fbef91d8
RS
4602/* Similar to get_condition, only the resulting condition must be
4603 valid at JUMP, instead of at EARLIEST.
4604
4605 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4606 settle for the condition variable in the jump instruction being integral.
4607 We prefer to be able to record the value of a user variable, rather than
4608 the value of a temporary used in a condition. This could be solved by
4609 recording the value of *every* register scaned by canonicalize_condition,
4610 but this would require some code reorganization. */
4611
2fa4a849 4612rtx
fbef91d8
RS
4613fis_get_condition (jump)
4614 rtx jump;
4615{
4616 rtx cond, set, tmp, insn, earliest;
4617 bool reverse;
4618
4619 if (! any_condjump_p (jump))
4620 return NULL_RTX;
4621
4622 set = pc_set (jump);
4623 cond = XEXP (SET_SRC (set), 0);
4624
4625 /* If this branches to JUMP_LABEL when the condition is false,
4626 reverse the condition. */
4627 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4628 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4629
4630 /* Use canonicalize_condition to do the dirty work of manipulating
4631 MODE_CC values and COMPARE rtx codes. */
4632 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4633 if (!tmp)
4634 return NULL_RTX;
4635
4636 /* Verify that the given condition is valid at JUMP by virtue of not
4637 having been modified since EARLIEST. */
4638 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4639 if (INSN_P (insn) && modified_in_p (tmp, insn))
4640 break;
4641 if (insn == jump)
4642 return tmp;
4643
4644 /* The condition was modified. See if we can get a partial result
4645 that doesn't follow all the reversals. Perhaps combine can fold
4646 them together later. */
4647 tmp = XEXP (tmp, 0);
4648 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4649 return NULL_RTX;
4650 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4651 if (!tmp)
4652 return NULL_RTX;
4653
4654 /* For sanity's sake, re-validate the new result. */
4655 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4656 if (INSN_P (insn) && modified_in_p (tmp, insn))
4657 return NULL_RTX;
4658
4659 return tmp;
4660}
4661
4662/* Find the implicit sets of a function. An "implicit set" is a constraint
4663 on the value of a variable, implied by a conditional jump. For example,
4664 following "if (x == 2)", the then branch may be optimized as though the
4665 conditional performed an "explicit set", in this example, "x = 2". This
4666 function records the set patterns that are implicit at the start of each
4667 basic block. */
4668
4669static void
4670find_implicit_sets ()
4671{
4672 basic_block bb, dest;
4673 unsigned int count;
4674 rtx cond, new;
4675
4676 count = 0;
4677 FOR_EACH_BB (bb)
4678 /* Check for more than one sucessor. */
4679 if (bb->succ && bb->succ->succ_next)
4680 {
4681 cond = fis_get_condition (bb->end);
4682
4683 if (cond
4684 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4685 && GET_CODE (XEXP (cond, 0)) == REG
4686 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
6b2d1c9e 4687 && gcse_constant_p (XEXP (cond, 1)))
fbef91d8
RS
4688 {
4689 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4690 : FALLTHRU_EDGE (bb)->dest;
4691
4692 if (dest && ! dest->pred->pred_next
4693 && dest != EXIT_BLOCK_PTR)
4694 {
4695 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4696 XEXP (cond, 1));
4697 implicit_sets[dest->index] = new;
4698 if (gcse_file)
4699 {
4700 fprintf(gcse_file, "Implicit set of reg %d in ",
4701 REGNO (XEXP (cond, 0)));
4702 fprintf(gcse_file, "basic block %d\n", dest->index);
4703 }
4704 count++;
4705 }
4706 }
4707 }
4708
4709 if (gcse_file)
4710 fprintf (gcse_file, "Found %d implicit sets\n", count);
4711}
4712
7506f491 4713/* Perform one copy/constant propagation pass.
a0134312
RS
4714 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4715 propagation into conditional jumps. If BYPASS_JUMPS is true,
4716 perform conditional jump bypassing optimizations. */
7506f491
DE
4717
4718static int
a0134312 4719one_cprop_pass (pass, cprop_jumps, bypass_jumps)
7506f491 4720 int pass;
a0134312
RS
4721 int cprop_jumps;
4722 int bypass_jumps;
7506f491
DE
4723{
4724 int changed = 0;
4725
4726 const_prop_count = 0;
4727 copy_prop_count = 0;
4728
a0134312 4729 local_cprop_pass (cprop_jumps);
ae860ff7 4730
fbef91d8
RS
4731 /* Determine implicit sets. */
4732 implicit_sets = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
4733 find_implicit_sets ();
4734
02280659
ZD
4735 alloc_hash_table (max_cuid, &set_hash_table, 1);
4736 compute_hash_table (&set_hash_table);
fbef91d8
RS
4737
4738 /* Free implicit_sets before peak usage. */
4739 free (implicit_sets);
4740 implicit_sets = NULL;
4741
7506f491 4742 if (gcse_file)
02280659
ZD
4743 dump_hash_table (gcse_file, "SET", &set_hash_table);
4744 if (set_hash_table.n_elems > 0)
7506f491 4745 {
02280659 4746 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 4747 compute_cprop_data ();
a0134312
RS
4748 changed = cprop (cprop_jumps);
4749 if (bypass_jumps)
0e3f0221 4750 changed |= bypass_conditional_jumps ();
7506f491
DE
4751 free_cprop_mem ();
4752 }
c4c81601 4753
02280659 4754 free_hash_table (&set_hash_table);
7506f491
DE
4755
4756 if (gcse_file)
4757 {
c4c81601
RK
4758 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4759 current_function_name, pass, bytes_used);
4760 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4761 const_prop_count, copy_prop_count);
7506f491 4762 }
1649d92f
JH
4763 /* Global analysis may get into infinite loops for unreachable blocks. */
4764 if (changed && cprop_jumps)
4765 delete_unreachable_blocks ();
7506f491
DE
4766
4767 return changed;
4768}
4769\f
0e3f0221
RS
4770/* Bypass conditional jumps. */
4771
7821bfc7
RS
4772/* The value of last_basic_block at the beginning of the jump_bypass
4773 pass. The use of redirect_edge_and_branch_force may introduce new
4774 basic blocks, but the data flow analysis is only valid for basic
4775 block indices less than bypass_last_basic_block. */
4776
4777static int bypass_last_basic_block;
4778
0e3f0221
RS
4779/* Find a set of REGNO to a constant that is available at the end of basic
4780 block BB. Returns NULL if no such set is found. Based heavily upon
4781 find_avail_set. */
4782
4783static struct expr *
4784find_bypass_set (regno, bb)
4785 int regno;
4786 int bb;
4787{
4788 struct expr *result = 0;
4789
4790 for (;;)
4791 {
4792 rtx src;
ceda50e9 4793 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
4794
4795 while (set)
4796 {
4797 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4798 break;
4799 set = next_set (regno, set);
4800 }
4801
4802 if (set == 0)
4803 break;
4804
4805 if (GET_CODE (set->expr) != SET)
4806 abort ();
4807
4808 src = SET_SRC (set->expr);
6b2d1c9e 4809 if (gcse_constant_p (src))
0e3f0221
RS
4810 result = set;
4811
4812 if (GET_CODE (src) != REG)
4813 break;
4814
4815 regno = REGNO (src);
4816 }
4817 return result;
4818}
4819
4820
e129b3f9
RS
4821/* Subroutine of bypass_block that checks whether a pseudo is killed by
4822 any of the instructions inserted on an edge. Jump bypassing places
4823 condition code setters on CFG edges using insert_insn_on_edge. This
4824 function is required to check that our data flow analysis is still
4825 valid prior to commit_edge_insertions. */
4826
4827static bool
4828reg_killed_on_edge (reg, e)
4829 rtx reg;
4830 edge e;
4831{
4832 rtx insn;
4833
4834 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4835 if (INSN_P (insn) && reg_set_p (reg, insn))
4836 return true;
4837
4838 return false;
4839}
4840
0e3f0221
RS
4841/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4842 basic block BB which has more than one predecessor. If not NULL, SETCC
4843 is the first instruction of BB, which is immediately followed by JUMP_INSN
4844 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
4845 Returns nonzero if a change was made.
4846
4847 During the jump bypassing pass, we may place copies of SETCC instuctions
4848 on CFG edges. The following routine must be careful to pay attention to
4849 these inserted insns when performing its transformations. */
0e3f0221
RS
4850
4851static int
4852bypass_block (bb, setcc, jump)
4853 basic_block bb;
4854 rtx setcc, jump;
4855{
4856 rtx insn, note;
e129b3f9 4857 edge e, enext, edest;
818b6b7f 4858 int i, change;
72b8d451 4859 int may_be_loop_header;
0e3f0221
RS
4860
4861 insn = (setcc != NULL) ? setcc : jump;
4862
4863 /* Determine set of register uses in INSN. */
4864 reg_use_count = 0;
4865 note_uses (&PATTERN (insn), find_used_regs, NULL);
4866 note = find_reg_equal_equiv_note (insn);
4867 if (note)
4868 find_used_regs (&XEXP (note, 0), NULL);
4869
72b8d451
ZD
4870 may_be_loop_header = false;
4871 for (e = bb->pred; e; e = e->pred_next)
4872 if (e->flags & EDGE_DFS_BACK)
4873 {
4874 may_be_loop_header = true;
4875 break;
4876 }
4877
0e3f0221
RS
4878 change = 0;
4879 for (e = bb->pred; e; e = enext)
4880 {
4881 enext = e->pred_next;
7821bfc7
RS
4882 if (e->flags & EDGE_COMPLEX)
4883 continue;
4884
4885 /* We can't redirect edges from new basic blocks. */
4886 if (e->src->index >= bypass_last_basic_block)
4887 continue;
4888
72b8d451
ZD
4889 /* The irreducible loops created by redirecting of edges entering the
4890 loop from outside would decrease effectivity of some of the following
4891 optimalizations, so prevent this. */
4892 if (may_be_loop_header
4893 && !(e->flags & EDGE_DFS_BACK))
4894 continue;
4895
0e3f0221
RS
4896 for (i = 0; i < reg_use_count; i++)
4897 {
4898 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4899 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4900 basic_block dest, old_dest;
589005ff
KH
4901 struct expr *set;
4902 rtx src, new;
0e3f0221 4903
589005ff
KH
4904 if (regno >= max_gcse_regno)
4905 continue;
0e3f0221 4906
589005ff 4907 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4908
4909 if (! set)
4910 continue;
4911
e129b3f9
RS
4912 /* Check the data flow is valid after edge insertions. */
4913 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4914 continue;
4915
589005ff 4916 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4917
4918 if (setcc != NULL)
4919 src = simplify_replace_rtx (src,
589005ff
KH
4920 SET_DEST (PATTERN (setcc)),
4921 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4922
4923 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4924 SET_SRC (set->expr));
0e3f0221 4925
e129b3f9
RS
4926 /* Jump bypassing may have already placed instructions on
4927 edges of the CFG. We can't bypass an outgoing edge that
4928 has instructions associated with it, as these insns won't
4929 get executed if the incoming edge is redirected. */
4930
589005ff 4931 if (new == pc_rtx)
e129b3f9
RS
4932 {
4933 edest = FALLTHRU_EDGE (bb);
4934 dest = edest->insns ? NULL : edest->dest;
4935 }
0e3f0221 4936 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
4937 {
4938 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4939 /* Don't bypass edges containing instructions. */
4940 for (edest = bb->succ; edest; edest = edest->succ_next)
4941 if (edest->dest == dest && edest->insns)
4942 {
4943 dest = NULL;
4944 break;
4945 }
4946 }
0e3f0221
RS
4947 else
4948 dest = NULL;
4949
818b6b7f 4950 old_dest = e->dest;
7821bfc7
RS
4951 if (dest != NULL
4952 && dest != old_dest
4953 && dest != EXIT_BLOCK_PTR)
4954 {
4955 redirect_edge_and_branch_force (e, dest);
4956
818b6b7f 4957 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4958 Don't copy CC0 setters, as CC0 is dead after jump. */
4959 if (setcc)
4960 {
4961 rtx pat = PATTERN (setcc);
818b6b7f 4962 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4963 insert_insn_on_edge (copy_insn (pat), e);
4964 }
4965
4966 if (gcse_file != NULL)
4967 {
818b6b7f
RH
4968 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4969 regno, INSN_UID (jump));
0e3f0221
RS
4970 print_rtl (gcse_file, SET_SRC (set->expr));
4971 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4972 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4973 }
4974 change = 1;
4975 break;
4976 }
4977 }
4978 }
4979 return change;
4980}
4981
4982/* Find basic blocks with more than one predecessor that only contain a
4983 single conditional jump. If the result of the comparison is known at
4984 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
4985 appropriate target. Returns nonzero if a change was made.
4986
4987 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
4988
4989static int
4990bypass_conditional_jumps ()
4991{
4992 basic_block bb;
4993 int changed;
4994 rtx setcc;
4995 rtx insn;
4996 rtx dest;
4997
4998 /* Note we start at block 1. */
4999 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
5000 return 0;
5001
7821bfc7 5002 bypass_last_basic_block = last_basic_block;
72b8d451 5003 mark_dfs_back_edges ();
7821bfc7 5004
0e3f0221
RS
5005 changed = 0;
5006 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 5007 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
5008 {
5009 /* Check for more than one predecessor. */
5010 if (bb->pred && bb->pred->pred_next)
5011 {
5012 setcc = NULL_RTX;
5013 for (insn = bb->head;
5014 insn != NULL && insn != NEXT_INSN (bb->end);
5015 insn = NEXT_INSN (insn))
5016 if (GET_CODE (insn) == INSN)
5017 {
9543a9d2 5018 if (setcc)
0e3f0221 5019 break;
ba4f7968 5020 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
5021 break;
5022
ba4f7968 5023 dest = SET_DEST (PATTERN (insn));
818b6b7f 5024 if (REG_P (dest) || CC0_P (dest))
0e3f0221 5025 setcc = insn;
0e3f0221
RS
5026 else
5027 break;
5028 }
5029 else if (GET_CODE (insn) == JUMP_INSN)
5030 {
9a71ece1
RH
5031 if ((any_condjump_p (insn) || computed_jump_p (insn))
5032 && onlyjump_p (insn))
0e3f0221
RS
5033 changed |= bypass_block (bb, setcc, insn);
5034 break;
5035 }
5036 else if (INSN_P (insn))
5037 break;
5038 }
5039 }
5040
818b6b7f 5041 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 5042 copy on the redirected edge. These need to be committed. */
0e3f0221
RS
5043 if (changed)
5044 commit_edge_insertions();
5045
5046 return changed;
5047}
5048\f
a65f3558 5049/* Compute PRE+LCM working variables. */
7506f491
DE
5050
5051/* Local properties of expressions. */
5052/* Nonzero for expressions that are transparent in the block. */
a65f3558 5053static sbitmap *transp;
7506f491 5054
5c35539b
RH
5055/* Nonzero for expressions that are transparent at the end of the block.
5056 This is only zero for expressions killed by abnormal critical edge
5057 created by a calls. */
a65f3558 5058static sbitmap *transpout;
5c35539b 5059
a65f3558
JL
5060/* Nonzero for expressions that are computed (available) in the block. */
5061static sbitmap *comp;
7506f491 5062
a65f3558
JL
5063/* Nonzero for expressions that are locally anticipatable in the block. */
5064static sbitmap *antloc;
7506f491 5065
a65f3558
JL
5066/* Nonzero for expressions where this block is an optimal computation
5067 point. */
5068static sbitmap *pre_optimal;
5c35539b 5069
a65f3558
JL
5070/* Nonzero for expressions which are redundant in a particular block. */
5071static sbitmap *pre_redundant;
7506f491 5072
a42cd965
AM
5073/* Nonzero for expressions which should be inserted on a specific edge. */
5074static sbitmap *pre_insert_map;
5075
5076/* Nonzero for expressions which should be deleted in a specific block. */
5077static sbitmap *pre_delete_map;
5078
5079/* Contains the edge_list returned by pre_edge_lcm. */
5080static struct edge_list *edge_list;
5081
a65f3558
JL
5082/* Redundant insns. */
5083static sbitmap pre_redundant_insns;
7506f491 5084
a65f3558 5085/* Allocate vars used for PRE analysis. */
7506f491
DE
5086
5087static void
a65f3558
JL
5088alloc_pre_mem (n_blocks, n_exprs)
5089 int n_blocks, n_exprs;
7506f491 5090{
a65f3558
JL
5091 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5092 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5093 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 5094
a42cd965
AM
5095 pre_optimal = NULL;
5096 pre_redundant = NULL;
5097 pre_insert_map = NULL;
5098 pre_delete_map = NULL;
5099 ae_in = NULL;
5100 ae_out = NULL;
a42cd965 5101 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 5102
a42cd965 5103 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
5104}
5105
a65f3558 5106/* Free vars used for PRE analysis. */
7506f491
DE
5107
5108static void
a65f3558 5109free_pre_mem ()
7506f491 5110{
5a660bff
DB
5111 sbitmap_vector_free (transp);
5112 sbitmap_vector_free (comp);
bd3675fc
JL
5113
5114 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 5115
a42cd965 5116 if (pre_optimal)
5a660bff 5117 sbitmap_vector_free (pre_optimal);
a42cd965 5118 if (pre_redundant)
5a660bff 5119 sbitmap_vector_free (pre_redundant);
a42cd965 5120 if (pre_insert_map)
5a660bff 5121 sbitmap_vector_free (pre_insert_map);
a42cd965 5122 if (pre_delete_map)
5a660bff 5123 sbitmap_vector_free (pre_delete_map);
a42cd965 5124 if (ae_in)
5a660bff 5125 sbitmap_vector_free (ae_in);
a42cd965 5126 if (ae_out)
5a660bff 5127 sbitmap_vector_free (ae_out);
a42cd965 5128
bd3675fc 5129 transp = comp = NULL;
a42cd965 5130 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 5131 ae_in = ae_out = NULL;
7506f491
DE
5132}
5133
5134/* Top level routine to do the dataflow analysis needed by PRE. */
5135
5136static void
5137compute_pre_data ()
5138{
b614171e 5139 sbitmap trapping_expr;
e0082a72 5140 basic_block bb;
b614171e 5141 unsigned int ui;
c66e8ae9 5142
02280659 5143 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 5144 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 5145
b614171e 5146 /* Collect expressions which might trap. */
02280659 5147 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 5148 sbitmap_zero (trapping_expr);
02280659 5149 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
5150 {
5151 struct expr *e;
02280659 5152 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
5153 if (may_trap_p (e->expr))
5154 SET_BIT (trapping_expr, e->bitmap_index);
5155 }
5156
c66e8ae9
JL
5157 /* Compute ae_kill for each basic block using:
5158
5159 ~(TRANSP | COMP)
5160
a2e90653 5161 This is significantly faster than compute_ae_kill. */
c66e8ae9 5162
e0082a72 5163 FOR_EACH_BB (bb)
c66e8ae9 5164 {
b614171e
MM
5165 edge e;
5166
5167 /* If the current block is the destination of an abnormal edge, we
5168 kill all trapping expressions because we won't be able to properly
5169 place the instruction on the edge. So make them neither
5170 anticipatable nor transparent. This is fairly conservative. */
e0082a72 5171 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
5172 if (e->flags & EDGE_ABNORMAL)
5173 {
e0082a72
ZD
5174 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5175 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
5176 break;
5177 }
5178
e0082a72
ZD
5179 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5180 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
5181 }
5182
02280659 5183 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 5184 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 5185 sbitmap_vector_free (antloc);
bd3675fc 5186 antloc = NULL;
5a660bff 5187 sbitmap_vector_free (ae_kill);
589005ff 5188 ae_kill = NULL;
76ac938b 5189 sbitmap_free (trapping_expr);
7506f491
DE
5190}
5191\f
5192/* PRE utilities */
5193
cc2902df 5194/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 5195 block BB.
7506f491
DE
5196
5197 VISITED is a pointer to a working buffer for tracking which BB's have
5198 been visited. It is NULL for the top-level call.
5199
5200 We treat reaching expressions that go through blocks containing the same
5201 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5202 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5203 2 as not reaching. The intent is to improve the probability of finding
5204 only one reaching expression and to reduce register lifetimes by picking
5205 the closest such expression. */
5206
5207static int
89e606c9 5208pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
e2d2ed72 5209 basic_block occr_bb;
7506f491 5210 struct expr *expr;
e2d2ed72 5211 basic_block bb;
7506f491
DE
5212 char *visited;
5213{
36349f8b 5214 edge pred;
7506f491 5215
e2d2ed72 5216 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 5217 {
e2d2ed72 5218 basic_block pred_bb = pred->src;
7506f491 5219
36349f8b 5220 if (pred->src == ENTRY_BLOCK_PTR
7506f491 5221 /* Has predecessor has already been visited? */
0b17ab2f 5222 || visited[pred_bb->index])
c4c81601
RK
5223 ;/* Nothing to do. */
5224
7506f491 5225 /* Does this predecessor generate this expression? */
0b17ab2f 5226 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
5227 {
5228 /* Is this the occurrence we're looking for?
5229 Note that there's only one generating occurrence per block
5230 so we just need to check the block number. */
a65f3558 5231 if (occr_bb == pred_bb)
7506f491 5232 return 1;
c4c81601 5233
0b17ab2f 5234 visited[pred_bb->index] = 1;
7506f491
DE
5235 }
5236 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
5237 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5238 visited[pred_bb->index] = 1;
c4c81601 5239
7506f491
DE
5240 /* Neither gen nor kill. */
5241 else
ac7c5af5 5242 {
0b17ab2f 5243 visited[pred_bb->index] = 1;
89e606c9 5244 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 5245 return 1;
ac7c5af5 5246 }
7506f491
DE
5247 }
5248
5249 /* All paths have been checked. */
5250 return 0;
5251}
283a2545
RL
5252
5253/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 5254 memory allocated for that function is returned. */
283a2545
RL
5255
5256static int
89e606c9 5257pre_expr_reaches_here_p (occr_bb, expr, bb)
e2d2ed72 5258 basic_block occr_bb;
283a2545 5259 struct expr *expr;
e2d2ed72 5260 basic_block bb;
283a2545
RL
5261{
5262 int rval;
d55bc081 5263 char *visited = (char *) xcalloc (last_basic_block, 1);
283a2545 5264
8e42ace1 5265 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
5266
5267 free (visited);
c4c81601 5268 return rval;
283a2545 5269}
7506f491 5270\f
a42cd965
AM
5271
5272/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 5273 or on an edge. Set the block number of any insns generated to
a42cd965
AM
5274 the value of BB. */
5275
5276static rtx
5277process_insert_insn (expr)
5278 struct expr *expr;
5279{
5280 rtx reg = expr->reaching_reg;
fb0c0a12
RK
5281 rtx exp = copy_rtx (expr->expr);
5282 rtx pat;
a42cd965
AM
5283
5284 start_sequence ();
fb0c0a12
RK
5285
5286 /* If the expression is something that's an operand, like a constant,
5287 just copy it to a register. */
5288 if (general_operand (exp, GET_MODE (reg)))
5289 emit_move_insn (reg, exp);
5290
5291 /* Otherwise, make a new insn to compute this expression and make sure the
5292 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5293 expression to make sure we don't have any sharing issues. */
8d444206 5294 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 5295 abort ();
589005ff 5296
2f937369 5297 pat = get_insns ();
a42cd965
AM
5298 end_sequence ();
5299
5300 return pat;
5301}
589005ff 5302
a65f3558
JL
5303/* Add EXPR to the end of basic block BB.
5304
5305 This is used by both the PRE and code hoisting.
5306
5307 For PRE, we want to verify that the expr is either transparent
5308 or locally anticipatable in the target block. This check makes
5309 no sense for code hoisting. */
7506f491
DE
5310
5311static void
a65f3558 5312insert_insn_end_bb (expr, bb, pre)
7506f491 5313 struct expr *expr;
e2d2ed72 5314 basic_block bb;
a65f3558 5315 int pre;
7506f491 5316{
e2d2ed72 5317 rtx insn = bb->end;
7506f491
DE
5318 rtx new_insn;
5319 rtx reg = expr->reaching_reg;
5320 int regno = REGNO (reg);
2f937369 5321 rtx pat, pat_end;
7506f491 5322
a42cd965 5323 pat = process_insert_insn (expr);
2f937369
DM
5324 if (pat == NULL_RTX || ! INSN_P (pat))
5325 abort ();
5326
5327 pat_end = pat;
5328 while (NEXT_INSN (pat_end) != NULL_RTX)
5329 pat_end = NEXT_INSN (pat_end);
7506f491
DE
5330
5331 /* If the last insn is a jump, insert EXPR in front [taking care to
068473ec
JH
5332 handle cc0, etc. properly]. Similary we need to care trapping
5333 instructions in presence of non-call exceptions. */
7506f491 5334
068473ec
JH
5335 if (GET_CODE (insn) == JUMP_INSN
5336 || (GET_CODE (insn) == INSN
5337 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 5338 {
50b2596f 5339#ifdef HAVE_cc0
7506f491 5340 rtx note;
50b2596f 5341#endif
068473ec
JH
5342 /* It should always be the case that we can put these instructions
5343 anywhere in the basic block with performing PRE optimizations.
5344 Check this. */
3b25fbfe 5345 if (GET_CODE (insn) == INSN && pre
0b17ab2f 5346 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5347 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 5348 abort ();
7506f491
DE
5349
5350 /* If this is a jump table, then we can't insert stuff here. Since
5351 we know the previous real insn must be the tablejump, we insert
5352 the new instruction just before the tablejump. */
5353 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5354 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5355 insn = prev_real_insn (insn);
5356
5357#ifdef HAVE_cc0
5358 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5359 if cc0 isn't set. */
5360 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5361 if (note)
5362 insn = XEXP (note, 0);
5363 else
5364 {
5365 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5366 if (maybe_cc0_setter
2c3c49de 5367 && INSN_P (maybe_cc0_setter)
7506f491
DE
5368 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5369 insn = maybe_cc0_setter;
5370 }
5371#endif
5372 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 5373 new_insn = emit_insn_before (pat, insn);
3947e2f9 5374 }
c4c81601 5375
3947e2f9
RH
5376 /* Likewise if the last insn is a call, as will happen in the presence
5377 of exception handling. */
068473ec
JH
5378 else if (GET_CODE (insn) == CALL_INSN
5379 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5380 {
3947e2f9
RH
5381 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5382 we search backward and place the instructions before the first
5383 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 5384 presumption that we'll get better code elsewhere as well.
3947e2f9 5385
c4c81601 5386 It should always be the case that we can put these instructions
a65f3558
JL
5387 anywhere in the basic block with performing PRE optimizations.
5388 Check this. */
c4c81601 5389
a65f3558 5390 if (pre
0b17ab2f 5391 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5392 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5393 abort ();
5394
5395 /* Since different machines initialize their parameter registers
5396 in different orders, assume nothing. Collect the set of all
5397 parameter registers. */
833366d6 5398 insn = find_first_parameter_load (insn, bb->head);
3947e2f9 5399
b1d26727
JL
5400 /* If we found all the parameter loads, then we want to insert
5401 before the first parameter load.
5402
5403 If we did not find all the parameter loads, then we might have
5404 stopped on the head of the block, which could be a CODE_LABEL.
5405 If we inserted before the CODE_LABEL, then we would be putting
5406 the insn in the wrong basic block. In that case, put the insn
b5229628 5407 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5408 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5409 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5410 insn = NEXT_INSN (insn);
c4c81601 5411
3c030e88 5412 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5413 }
5414 else
3c030e88 5415 new_insn = emit_insn_after (pat, insn);
7506f491 5416
2f937369 5417 while (1)
a65f3558 5418 {
2f937369 5419 if (INSN_P (pat))
a65f3558 5420 {
2f937369
DM
5421 add_label_notes (PATTERN (pat), new_insn);
5422 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5423 }
2f937369
DM
5424 if (pat == pat_end)
5425 break;
5426 pat = NEXT_INSN (pat);
a65f3558 5427 }
3947e2f9 5428
7506f491
DE
5429 gcse_create_count++;
5430
5431 if (gcse_file)
5432 {
c4c81601 5433 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5434 bb->index, INSN_UID (new_insn));
c4c81601
RK
5435 fprintf (gcse_file, "copying expression %d to reg %d\n",
5436 expr->bitmap_index, regno);
7506f491
DE
5437 }
5438}
5439
a42cd965
AM
5440/* Insert partially redundant expressions on edges in the CFG to make
5441 the expressions fully redundant. */
7506f491 5442
a42cd965
AM
5443static int
5444pre_edge_insert (edge_list, index_map)
5445 struct edge_list *edge_list;
7506f491
DE
5446 struct expr **index_map;
5447{
c4c81601 5448 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5449 sbitmap *inserted;
5450
a42cd965
AM
5451 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5452 if it reaches any of the deleted expressions. */
7506f491 5453
a42cd965
AM
5454 set_size = pre_insert_map[0]->size;
5455 num_edges = NUM_EDGES (edge_list);
02280659 5456 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 5457 sbitmap_vector_zero (inserted, num_edges);
7506f491 5458
a42cd965 5459 for (e = 0; e < num_edges; e++)
7506f491
DE
5460 {
5461 int indx;
e2d2ed72 5462 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5463
a65f3558 5464 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5465 {
a42cd965 5466 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5467
02280659 5468 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
5469 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5470 {
5471 struct expr *expr = index_map[j];
5472 struct occr *occr;
a65f3558 5473
ff7cc307 5474 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5475 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5476 {
5477 if (! occr->deleted_p)
5478 continue;
5479
5480 /* Insert this expression on this edge if if it would
ff7cc307 5481 reach the deleted occurrence in BB. */
c4c81601
RK
5482 if (!TEST_BIT (inserted[e], j))
5483 {
5484 rtx insn;
5485 edge eg = INDEX_EDGE (edge_list, e);
5486
5487 /* We can't insert anything on an abnormal and
5488 critical edge, so we insert the insn at the end of
5489 the previous block. There are several alternatives
5490 detailed in Morgans book P277 (sec 10.5) for
5491 handling this situation. This one is easiest for
5492 now. */
5493
5494 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5495 insert_insn_end_bb (index_map[j], bb, 0);
5496 else
5497 {
5498 insn = process_insert_insn (index_map[j]);
5499 insert_insn_on_edge (insn, eg);
5500 }
5501
5502 if (gcse_file)
5503 {
5504 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5505 bb->index,
5506 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5507 fprintf (gcse_file, "copy expression %d\n",
5508 expr->bitmap_index);
5509 }
5510
a13d4ebf 5511 update_ld_motion_stores (expr);
c4c81601
RK
5512 SET_BIT (inserted[e], j);
5513 did_insert = 1;
5514 gcse_create_count++;
5515 }
5516 }
5517 }
7506f491
DE
5518 }
5519 }
5faf03ae 5520
5a660bff 5521 sbitmap_vector_free (inserted);
a42cd965 5522 return did_insert;
7506f491
DE
5523}
5524
c4c81601 5525/* Copy the result of INSN to REG. INDX is the expression number. */
7506f491
DE
5526
5527static void
5528pre_insert_copy_insn (expr, insn)
5529 struct expr *expr;
5530 rtx insn;
5531{
5532 rtx reg = expr->reaching_reg;
5533 int regno = REGNO (reg);
5534 int indx = expr->bitmap_index;
5535 rtx set = single_set (insn);
5536 rtx new_insn;
5537
5538 if (!set)
5539 abort ();
c4c81601 5540
47a3dae1 5541 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
c4c81601 5542
7506f491
DE
5543 /* Keep register set table up to date. */
5544 record_one_set (regno, new_insn);
5545
5546 gcse_create_count++;
5547
5548 if (gcse_file)
a42cd965
AM
5549 fprintf (gcse_file,
5550 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5551 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5552 INSN_UID (insn), regno);
222f7ba9 5553 update_ld_motion_stores (expr);
7506f491
DE
5554}
5555
5556/* Copy available expressions that reach the redundant expression
5557 to `reaching_reg'. */
5558
5559static void
5560pre_insert_copies ()
5561{
2e653e39 5562 unsigned int i;
c4c81601
RK
5563 struct expr *expr;
5564 struct occr *occr;
5565 struct occr *avail;
a65f3558 5566
7506f491
DE
5567 /* For each available expression in the table, copy the result to
5568 `reaching_reg' if the expression reaches a deleted one.
5569
5570 ??? The current algorithm is rather brute force.
5571 Need to do some profiling. */
5572
02280659
ZD
5573 for (i = 0; i < expr_hash_table.size; i++)
5574 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5575 {
5576 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5577 we don't want to insert a copy here because the expression may not
5578 really be redundant. So only insert an insn if the expression was
5579 deleted. This test also avoids further processing if the
5580 expression wasn't deleted anywhere. */
5581 if (expr->reaching_reg == NULL)
5582 continue;
5583
5584 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5585 {
5586 if (! occr->deleted_p)
5587 continue;
7506f491 5588
c4c81601
RK
5589 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5590 {
5591 rtx insn = avail->insn;
7506f491 5592
c4c81601
RK
5593 /* No need to handle this one if handled already. */
5594 if (avail->copied_p)
5595 continue;
7506f491 5596
c4c81601
RK
5597 /* Don't handle this one if it's a redundant one. */
5598 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5599 continue;
7506f491 5600
c4c81601 5601 /* Or if the expression doesn't reach the deleted one. */
589005ff 5602 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5603 expr,
5604 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5605 continue;
7506f491 5606
c4c81601
RK
5607 /* Copy the result of avail to reaching_reg. */
5608 pre_insert_copy_insn (expr, insn);
5609 avail->copied_p = 1;
5610 }
5611 }
5612 }
7506f491
DE
5613}
5614
10d1bb36
JH
5615/* Emit move from SRC to DEST noting the equivalence with expression computed
5616 in INSN. */
5617static rtx
5618gcse_emit_move_after (src, dest, insn)
5619 rtx src, dest, insn;
5620{
5621 rtx new;
6bdb8dd6 5622 rtx set = single_set (insn), set2;
10d1bb36
JH
5623 rtx note;
5624 rtx eqv;
5625
5626 /* This should never fail since we're creating a reg->reg copy
5627 we've verified to be valid. */
5628
6bdb8dd6 5629 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5630
10d1bb36 5631 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5632 set2 = single_set (new);
5633 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5634 return new;
10d1bb36
JH
5635 if ((note = find_reg_equal_equiv_note (insn)))
5636 eqv = XEXP (note, 0);
5637 else
5638 eqv = SET_SRC (set);
5639
a500466b 5640 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
5641
5642 return new;
5643}
5644
7506f491 5645/* Delete redundant computations.
7506f491
DE
5646 Deletion is done by changing the insn to copy the `reaching_reg' of
5647 the expression into the result of the SET. It is left to later passes
5648 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5649
cc2902df 5650 Returns nonzero if a change is made. */
7506f491
DE
5651
5652static int
5653pre_delete ()
5654{
2e653e39 5655 unsigned int i;
63bc1d05 5656 int changed;
c4c81601
RK
5657 struct expr *expr;
5658 struct occr *occr;
a65f3558 5659
7506f491 5660 changed = 0;
02280659
ZD
5661 for (i = 0; i < expr_hash_table.size; i++)
5662 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5663 {
5664 int indx = expr->bitmap_index;
7506f491 5665
c4c81601
RK
5666 /* We only need to search antic_occr since we require
5667 ANTLOC != 0. */
7506f491 5668
c4c81601
RK
5669 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5670 {
5671 rtx insn = occr->insn;
5672 rtx set;
e2d2ed72 5673 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5674
0b17ab2f 5675 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
5676 {
5677 set = single_set (insn);
5678 if (! set)
5679 abort ();
5680
5681 /* Create a pseudo-reg to store the result of reaching
5682 expressions into. Get the mode for the new pseudo from
5683 the mode of the original destination pseudo. */
5684 if (expr->reaching_reg == NULL)
5685 expr->reaching_reg
5686 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5687
10d1bb36
JH
5688 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5689 delete_insn (insn);
5690 occr->deleted_p = 1;
5691 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5692 changed = 1;
5693 gcse_subst_count++;
7506f491 5694
c4c81601
RK
5695 if (gcse_file)
5696 {
5697 fprintf (gcse_file,
5698 "PRE: redundant insn %d (expression %d) in ",
5699 INSN_UID (insn), indx);
5700 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5701 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5702 }
5703 }
5704 }
5705 }
7506f491
DE
5706
5707 return changed;
5708}
5709
5710/* Perform GCSE optimizations using PRE.
5711 This is called by one_pre_gcse_pass after all the dataflow analysis
5712 has been done.
5713
c4c81601
RK
5714 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5715 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5716 Compiler Design and Implementation.
7506f491 5717
c4c81601
RK
5718 ??? A new pseudo reg is created to hold the reaching expression. The nice
5719 thing about the classical approach is that it would try to use an existing
5720 reg. If the register can't be adequately optimized [i.e. we introduce
5721 reload problems], one could add a pass here to propagate the new register
5722 through the block.
7506f491 5723
c4c81601
RK
5724 ??? We don't handle single sets in PARALLELs because we're [currently] not
5725 able to copy the rest of the parallel when we insert copies to create full
5726 redundancies from partial redundancies. However, there's no reason why we
5727 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5728 redundancies. */
5729
5730static int
5731pre_gcse ()
5732{
2e653e39
RK
5733 unsigned int i;
5734 int did_insert, changed;
7506f491 5735 struct expr **index_map;
c4c81601 5736 struct expr *expr;
7506f491
DE
5737
5738 /* Compute a mapping from expression number (`bitmap_index') to
5739 hash table entry. */
5740
02280659
ZD
5741 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5742 for (i = 0; i < expr_hash_table.size; i++)
5743 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 5744 index_map[expr->bitmap_index] = expr;
7506f491
DE
5745
5746 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5747 pre_redundant_insns = sbitmap_alloc (max_cuid);
5748 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5749
5750 /* Delete the redundant insns first so that
5751 - we know what register to use for the new insns and for the other
5752 ones with reaching expressions
5753 - we know which insns are redundant when we go to create copies */
c4c81601 5754
7506f491
DE
5755 changed = pre_delete ();
5756
a42cd965 5757 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5758
7506f491 5759 /* In other places with reaching expressions, copy the expression to the
a42cd965 5760 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5761 pre_insert_copies ();
a42cd965
AM
5762 if (did_insert)
5763 {
5764 commit_edge_insertions ();
5765 changed = 1;
5766 }
7506f491 5767
283a2545 5768 free (index_map);
76ac938b 5769 sbitmap_free (pre_redundant_insns);
7506f491
DE
5770 return changed;
5771}
5772
5773/* Top level routine to perform one PRE GCSE pass.
5774
cc2902df 5775 Return nonzero if a change was made. */
7506f491
DE
5776
5777static int
b5ce41ff 5778one_pre_gcse_pass (pass)
7506f491
DE
5779 int pass;
5780{
5781 int changed = 0;
5782
5783 gcse_subst_count = 0;
5784 gcse_create_count = 0;
5785
02280659 5786 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 5787 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5788 if (flag_gcse_lm)
5789 compute_ld_motion_mems ();
5790
02280659 5791 compute_hash_table (&expr_hash_table);
a13d4ebf 5792 trim_ld_motion_mems ();
7506f491 5793 if (gcse_file)
02280659 5794 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 5795
02280659 5796 if (expr_hash_table.n_elems > 0)
7506f491 5797 {
02280659 5798 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
5799 compute_pre_data ();
5800 changed |= pre_gcse ();
a42cd965 5801 free_edge_list (edge_list);
7506f491
DE
5802 free_pre_mem ();
5803 }
c4c81601 5804
a13d4ebf 5805 free_ldst_mems ();
a42cd965 5806 remove_fake_edges ();
02280659 5807 free_hash_table (&expr_hash_table);
7506f491
DE
5808
5809 if (gcse_file)
5810 {
c4c81601
RK
5811 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5812 current_function_name, pass, bytes_used);
5813 fprintf (gcse_file, "%d substs, %d insns created\n",
5814 gcse_subst_count, gcse_create_count);
7506f491
DE
5815 }
5816
5817 return changed;
5818}
aeb2f500
JW
5819\f
5820/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5821 If notes are added to an insn which references a CODE_LABEL, the
5822 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5823 because the following loop optimization pass requires them. */
aeb2f500
JW
5824
5825/* ??? This is very similar to the loop.c add_label_notes function. We
5826 could probably share code here. */
5827
5828/* ??? If there was a jump optimization pass after gcse and before loop,
5829 then we would not need to do this here, because jump would add the
5830 necessary REG_LABEL notes. */
5831
5832static void
5833add_label_notes (x, insn)
5834 rtx x;
5835 rtx insn;
5836{
5837 enum rtx_code code = GET_CODE (x);
5838 int i, j;
6f7d635c 5839 const char *fmt;
aeb2f500
JW
5840
5841 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5842 {
6b3603c2 5843 /* This code used to ignore labels that referred to dispatch tables to
ac7c5af5 5844 avoid flow generating (slighly) worse code.
6b3603c2 5845
ac7c5af5
JL
5846 We no longer ignore such label references (see LABEL_REF handling in
5847 mark_jump_label for additional information). */
c4c81601 5848
6b8c9327 5849 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5850 REG_NOTES (insn));
5b1ef594 5851 if (LABEL_P (XEXP (x, 0)))
589005ff 5852 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5853 return;
5854 }
5855
c4c81601 5856 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5857 {
5858 if (fmt[i] == 'e')
5859 add_label_notes (XEXP (x, i), insn);
5860 else if (fmt[i] == 'E')
5861 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5862 add_label_notes (XVECEXP (x, i, j), insn);
5863 }
5864}
a65f3558
JL
5865
5866/* Compute transparent outgoing information for each block.
5867
5868 An expression is transparent to an edge unless it is killed by
5869 the edge itself. This can only happen with abnormal control flow,
5870 when the edge is traversed through a call. This happens with
5871 non-local labels and exceptions.
5872
5873 This would not be necessary if we split the edge. While this is
5874 normally impossible for abnormal critical edges, with some effort
5875 it should be possible with exception handling, since we still have
5876 control over which handler should be invoked. But due to increased
5877 EH table sizes, this may not be worthwhile. */
5878
5879static void
5880compute_transpout ()
5881{
e0082a72 5882 basic_block bb;
2e653e39 5883 unsigned int i;
c4c81601 5884 struct expr *expr;
a65f3558 5885
d55bc081 5886 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5887
e0082a72 5888 FOR_EACH_BB (bb)
a65f3558 5889 {
a65f3558
JL
5890 /* Note that flow inserted a nop a the end of basic blocks that
5891 end in call instructions for reasons other than abnormal
5892 control flow. */
e0082a72 5893 if (GET_CODE (bb->end) != CALL_INSN)
a65f3558
JL
5894 continue;
5895
02280659
ZD
5896 for (i = 0; i < expr_hash_table.size; i++)
5897 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
c4c81601
RK
5898 if (GET_CODE (expr->expr) == MEM)
5899 {
5900 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5901 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5902 continue;
589005ff 5903
c4c81601
RK
5904 /* ??? Optimally, we would use interprocedural alias
5905 analysis to determine if this mem is actually killed
5906 by this call. */
e0082a72 5907 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5908 }
a65f3558
JL
5909 }
5910}
dfdb644f
JL
5911
5912/* Removal of useless null pointer checks */
5913
dfdb644f 5914/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5915 invalidate nonnull_local and set nonnull_killed. DATA is really a
5916 `null_pointer_info *'.
dfdb644f
JL
5917
5918 We ignore hard registers. */
c4c81601 5919
dfdb644f 5920static void
84832317 5921invalidate_nonnull_info (x, setter, data)
dfdb644f
JL
5922 rtx x;
5923 rtx setter ATTRIBUTE_UNUSED;
0511851c 5924 void *data;
dfdb644f 5925{
770ae6cc
RK
5926 unsigned int regno;
5927 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5928
dfdb644f
JL
5929 while (GET_CODE (x) == SUBREG)
5930 x = SUBREG_REG (x);
5931
5932 /* Ignore anything that is not a register or is a hard register. */
5933 if (GET_CODE (x) != REG
0511851c
MM
5934 || REGNO (x) < npi->min_reg
5935 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5936 return;
5937
0511851c 5938 regno = REGNO (x) - npi->min_reg;
dfdb644f 5939
e0082a72
ZD
5940 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5941 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5942}
5943
0511851c
MM
5944/* Do null-pointer check elimination for the registers indicated in
5945 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5946 they are not our responsibility to free. */
dfdb644f 5947
99a15921 5948static int
9cd56be1 5949delete_null_pointer_checks_1 (block_reg, nonnull_avin,
8e184d9c 5950 nonnull_avout, npi)
770ae6cc 5951 unsigned int *block_reg;
0511851c
MM
5952 sbitmap *nonnull_avin;
5953 sbitmap *nonnull_avout;
5954 struct null_pointer_info *npi;
dfdb644f 5955{
e0082a72 5956 basic_block bb, current_block;
0511851c
MM
5957 sbitmap *nonnull_local = npi->nonnull_local;
5958 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5959 int something_changed = 0;
589005ff 5960
dfdb644f
JL
5961 /* Compute local properties, nonnull and killed. A register will have
5962 the nonnull property if at the end of the current block its value is
5963 known to be nonnull. The killed property indicates that somewhere in
5964 the block any information we had about the register is killed.
5965
5966 Note that a register can have both properties in a single block. That
5967 indicates that it's killed, then later in the block a new value is
5968 computed. */
d55bc081
ZD
5969 sbitmap_vector_zero (nonnull_local, last_basic_block);
5970 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5971
e0082a72 5972 FOR_EACH_BB (current_block)
dfdb644f
JL
5973 {
5974 rtx insn, stop_insn;
5975
0511851c
MM
5976 /* Set the current block for invalidate_nonnull_info. */
5977 npi->current_block = current_block;
5978
dfdb644f
JL
5979 /* Scan each insn in the basic block looking for memory references and
5980 register sets. */
e0082a72
ZD
5981 stop_insn = NEXT_INSN (current_block->end);
5982 for (insn = current_block->head;
dfdb644f
JL
5983 insn != stop_insn;
5984 insn = NEXT_INSN (insn))
5985 {
5986 rtx set;
0511851c 5987 rtx reg;
dfdb644f
JL
5988
5989 /* Ignore anything that is not a normal insn. */
2c3c49de 5990 if (! INSN_P (insn))
dfdb644f
JL
5991 continue;
5992
5993 /* Basically ignore anything that is not a simple SET. We do have
5994 to make sure to invalidate nonnull_local and set nonnull_killed
5995 for such insns though. */
5996 set = single_set (insn);
5997 if (!set)
5998 {
0511851c 5999 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
6000 continue;
6001 }
6002
f63d1bf7 6003 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
6004 in case it uses its address register as a dest (which kills
6005 the nonnull property). */
6006 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
6007 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
6008 && REGNO (reg) >= npi->min_reg
6009 && REGNO (reg) < npi->max_reg)
e0082a72 6010 SET_BIT (nonnull_local[current_block->index],
0511851c 6011 REGNO (reg) - npi->min_reg);
dfdb644f
JL
6012
6013 /* Now invalidate stuff clobbered by this insn. */
0511851c 6014 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
6015
6016 /* And handle stores, we do these last since any sets in INSN can
6017 not kill the nonnull property if it is derived from a MEM
6018 appearing in a SET_DEST. */
6019 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
6020 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
6021 && REGNO (reg) >= npi->min_reg
6022 && REGNO (reg) < npi->max_reg)
e0082a72 6023 SET_BIT (nonnull_local[current_block->index],
0511851c 6024 REGNO (reg) - npi->min_reg);
dfdb644f
JL
6025 }
6026 }
6027
6028 /* Now compute global properties based on the local properties. This
fbe5a4a6 6029 is a classic global availability algorithm. */
ce724250
JL
6030 compute_available (nonnull_local, nonnull_killed,
6031 nonnull_avout, nonnull_avin);
dfdb644f
JL
6032
6033 /* Now look at each bb and see if it ends with a compare of a value
6034 against zero. */
e0082a72 6035 FOR_EACH_BB (bb)
dfdb644f 6036 {
e0082a72 6037 rtx last_insn = bb->end;
0511851c 6038 rtx condition, earliest;
dfdb644f
JL
6039 int compare_and_branch;
6040
0511851c
MM
6041 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6042 since BLOCK_REG[BB] is zero if this block did not end with a
6043 comparison against zero, this condition works. */
e0082a72
ZD
6044 if (block_reg[bb->index] < npi->min_reg
6045 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
6046 continue;
6047
6048 /* LAST_INSN is a conditional jump. Get its condition. */
6049 condition = get_condition (last_insn, &earliest);
6050
40d7a3fe
NB
6051 /* If we can't determine the condition then skip. */
6052 if (! condition)
6053 continue;
6054
dfdb644f 6055 /* Is the register known to have a nonzero value? */
e0082a72 6056 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
6057 continue;
6058
6059 /* Try to compute whether the compare/branch at the loop end is one or
6060 two instructions. */
6061 if (earliest == last_insn)
6062 compare_and_branch = 1;
6063 else if (earliest == prev_nonnote_insn (last_insn))
6064 compare_and_branch = 2;
6065 else
6066 continue;
6067
6068 /* We know the register in this comparison is nonnull at exit from
6069 this block. We can optimize this comparison. */
6070 if (GET_CODE (condition) == NE)
6071 {
6072 rtx new_jump;
6073
38c1593d
JH
6074 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6075 last_insn);
dfdb644f
JL
6076 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6077 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6078 emit_barrier_after (new_jump);
6079 }
8e184d9c 6080
99a15921 6081 something_changed = 1;
9cd56be1 6082 delete_insn (last_insn);
dfdb644f 6083 if (compare_and_branch == 2)
589005ff 6084 delete_insn (earliest);
e0082a72 6085 purge_dead_edges (bb);
0511851c
MM
6086
6087 /* Don't check this block again. (Note that BLOCK_END is
589005ff 6088 invalid here; we deleted the last instruction in the
0511851c 6089 block.) */
e0082a72 6090 block_reg[bb->index] = 0;
0511851c 6091 }
99a15921
JL
6092
6093 return something_changed;
0511851c
MM
6094}
6095
6096/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6097 at compile time.
6098
6099 This is conceptually similar to global constant/copy propagation and
6100 classic global CSE (it even uses the same dataflow equations as cprop).
6101
6102 If a register is used as memory address with the form (mem (reg)), then we
6103 know that REG can not be zero at that point in the program. Any instruction
6104 which sets REG "kills" this property.
6105
6106 So, if every path leading to a conditional branch has an available memory
6107 reference of that form, then we know the register can not have the value
589005ff 6108 zero at the conditional branch.
0511851c 6109
fbe5a4a6 6110 So we merely need to compute the local properties and propagate that data
0511851c
MM
6111 around the cfg, then optimize where possible.
6112
6113 We run this pass two times. Once before CSE, then again after CSE. This
6114 has proven to be the most profitable approach. It is rare for new
6115 optimization opportunities of this nature to appear after the first CSE
6116 pass.
6117
6118 This could probably be integrated with global cprop with a little work. */
6119
99a15921 6120int
0511851c 6121delete_null_pointer_checks (f)
2e653e39 6122 rtx f ATTRIBUTE_UNUSED;
0511851c 6123{
0511851c 6124 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 6125 unsigned int *block_reg;
e0082a72 6126 basic_block bb;
0511851c
MM
6127 int reg;
6128 int regs_per_pass;
6129 int max_reg;
6130 struct null_pointer_info npi;
99a15921 6131 int something_changed = 0;
0511851c 6132
0511851c 6133 /* If we have only a single block, then there's nothing to do. */
0b17ab2f 6134 if (n_basic_blocks <= 1)
99a15921 6135 return 0;
0511851c
MM
6136
6137 /* Trying to perform global optimizations on flow graphs which have
6138 a high connectivity will take a long time and is unlikely to be
6139 particularly useful.
6140
43e72072 6141 In normal circumstances a cfg should have about twice as many edges
0511851c
MM
6142 as blocks. But we do not want to punish small functions which have
6143 a couple switch statements. So we require a relatively large number
6144 of basic blocks and the ratio of edges to blocks to be high. */
0b17ab2f 6145 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
99a15921 6146 return 0;
0511851c 6147
0511851c
MM
6148 /* We need four bitmaps, each with a bit for each register in each
6149 basic block. */
6150 max_reg = max_reg_num ();
d55bc081 6151 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
6152
6153 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
6154 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6155 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6156 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6157 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
6158
6159 /* Go through the basic blocks, seeing whether or not each block
6160 ends with a conditional branch whose condition is a comparison
6161 against zero. Record the register compared in BLOCK_REG. */
d55bc081 6162 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
e0082a72 6163 FOR_EACH_BB (bb)
0511851c 6164 {
e0082a72 6165 rtx last_insn = bb->end;
0511851c
MM
6166 rtx condition, earliest, reg;
6167
6168 /* We only want conditional branches. */
6169 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
6170 || !any_condjump_p (last_insn)
6171 || !onlyjump_p (last_insn))
0511851c
MM
6172 continue;
6173
6174 /* LAST_INSN is a conditional jump. Get its condition. */
6175 condition = get_condition (last_insn, &earliest);
6176
4fe9b91c 6177 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
6178 comparison against zero then there's nothing we can do. */
6179 if (!condition
6180 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6181 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 6182 || (XEXP (condition, 1)
0511851c
MM
6183 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6184 continue;
6185
6186 /* We must be checking a register against zero. */
6187 reg = XEXP (condition, 0);
6188 if (GET_CODE (reg) != REG)
6189 continue;
6190
e0082a72 6191 block_reg[bb->index] = REGNO (reg);
0511851c
MM
6192 }
6193
6194 /* Go through the algorithm for each block of registers. */
6195 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6196 {
6197 npi.min_reg = reg;
6198 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
6199 something_changed |= delete_null_pointer_checks_1 (block_reg,
6200 nonnull_avin,
6201 nonnull_avout,
6202 &npi);
dfdb644f
JL
6203 }
6204
0511851c
MM
6205 /* Free the table of registers compared at the end of every block. */
6206 free (block_reg);
6207
dfdb644f 6208 /* Free bitmaps. */
5a660bff
DB
6209 sbitmap_vector_free (npi.nonnull_local);
6210 sbitmap_vector_free (npi.nonnull_killed);
6211 sbitmap_vector_free (nonnull_avin);
6212 sbitmap_vector_free (nonnull_avout);
99a15921
JL
6213
6214 return something_changed;
dfdb644f 6215}
bb457bd9
JL
6216
6217/* Code Hoisting variables and subroutines. */
6218
6219/* Very busy expressions. */
6220static sbitmap *hoist_vbein;
6221static sbitmap *hoist_vbeout;
6222
6223/* Hoistable expressions. */
6224static sbitmap *hoist_exprs;
6225
6226/* Dominator bitmaps. */
355be0dc 6227dominance_info dominators;
bb457bd9
JL
6228
6229/* ??? We could compute post dominators and run this algorithm in
68e82b83 6230 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
6231 more effective than the tail merging code in jump.c.
6232
6233 It's unclear if tail merging could be run in parallel with
6234 code hoisting. It would be nice. */
6235
6236/* Allocate vars used for code hoisting analysis. */
6237
6238static void
6239alloc_code_hoist_mem (n_blocks, n_exprs)
6240 int n_blocks, n_exprs;
6241{
6242 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6243 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6244 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6245
6246 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6247 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6248 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6249 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
6250}
6251
6252/* Free vars used for code hoisting analysis. */
6253
6254static void
6255free_code_hoist_mem ()
6256{
5a660bff
DB
6257 sbitmap_vector_free (antloc);
6258 sbitmap_vector_free (transp);
6259 sbitmap_vector_free (comp);
bb457bd9 6260
5a660bff
DB
6261 sbitmap_vector_free (hoist_vbein);
6262 sbitmap_vector_free (hoist_vbeout);
6263 sbitmap_vector_free (hoist_exprs);
6264 sbitmap_vector_free (transpout);
bb457bd9 6265
355be0dc 6266 free_dominance_info (dominators);
bb457bd9
JL
6267}
6268
6269/* Compute the very busy expressions at entry/exit from each block.
6270
6271 An expression is very busy if all paths from a given point
6272 compute the expression. */
6273
6274static void
6275compute_code_hoist_vbeinout ()
6276{
e0082a72
ZD
6277 int changed, passes;
6278 basic_block bb;
bb457bd9 6279
d55bc081
ZD
6280 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6281 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
6282
6283 passes = 0;
6284 changed = 1;
c4c81601 6285
bb457bd9
JL
6286 while (changed)
6287 {
6288 changed = 0;
c4c81601 6289
bb457bd9
JL
6290 /* We scan the blocks in the reverse order to speed up
6291 the convergence. */
e0082a72 6292 FOR_EACH_BB_REVERSE (bb)
bb457bd9 6293 {
e0082a72
ZD
6294 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6295 hoist_vbeout[bb->index], transp[bb->index]);
6296 if (bb->next_bb != EXIT_BLOCK_PTR)
6297 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 6298 }
c4c81601 6299
bb457bd9
JL
6300 passes++;
6301 }
6302
6303 if (gcse_file)
6304 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6305}
6306
6307/* Top level routine to do the dataflow analysis needed by code hoisting. */
6308
6309static void
6310compute_code_hoist_data ()
6311{
02280659 6312 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
6313 compute_transpout ();
6314 compute_code_hoist_vbeinout ();
355be0dc 6315 dominators = calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6316 if (gcse_file)
6317 fprintf (gcse_file, "\n");
6318}
6319
6320/* Determine if the expression identified by EXPR_INDEX would
6321 reach BB unimpared if it was placed at the end of EXPR_BB.
6322
6323 It's unclear exactly what Muchnick meant by "unimpared". It seems
6324 to me that the expression must either be computed or transparent in
6325 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6326 would allow the expression to be hoisted out of loops, even if
6327 the expression wasn't a loop invariant.
6328
6329 Contrast this to reachability for PRE where an expression is
6330 considered reachable if *any* path reaches instead of *all*
6331 paths. */
6332
6333static int
6334hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
e2d2ed72 6335 basic_block expr_bb;
bb457bd9 6336 int expr_index;
e2d2ed72 6337 basic_block bb;
bb457bd9
JL
6338 char *visited;
6339{
6340 edge pred;
283a2545 6341 int visited_allocated_locally = 0;
589005ff 6342
bb457bd9
JL
6343
6344 if (visited == NULL)
6345 {
8e42ace1 6346 visited_allocated_locally = 1;
d55bc081 6347 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
6348 }
6349
e2d2ed72 6350 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 6351 {
e2d2ed72 6352 basic_block pred_bb = pred->src;
bb457bd9
JL
6353
6354 if (pred->src == ENTRY_BLOCK_PTR)
6355 break;
f305679f
JH
6356 else if (pred_bb == expr_bb)
6357 continue;
0b17ab2f 6358 else if (visited[pred_bb->index])
bb457bd9 6359 continue;
c4c81601 6360
bb457bd9 6361 /* Does this predecessor generate this expression? */
0b17ab2f 6362 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 6363 break;
0b17ab2f 6364 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 6365 break;
c4c81601 6366
bb457bd9
JL
6367 /* Not killed. */
6368 else
6369 {
0b17ab2f 6370 visited[pred_bb->index] = 1;
bb457bd9
JL
6371 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6372 pred_bb, visited))
6373 break;
6374 }
6375 }
589005ff 6376 if (visited_allocated_locally)
283a2545 6377 free (visited);
c4c81601 6378
bb457bd9
JL
6379 return (pred == NULL);
6380}
6381\f
6382/* Actually perform code hoisting. */
c4c81601 6383
bb457bd9
JL
6384static void
6385hoist_code ()
6386{
e0082a72 6387 basic_block bb, dominated;
c635a1ec
DB
6388 basic_block *domby;
6389 unsigned int domby_len;
6390 unsigned int i,j;
bb457bd9 6391 struct expr **index_map;
c4c81601 6392 struct expr *expr;
bb457bd9 6393
d55bc081 6394 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6395
6396 /* Compute a mapping from expression number (`bitmap_index') to
6397 hash table entry. */
6398
02280659
ZD
6399 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6400 for (i = 0; i < expr_hash_table.size; i++)
6401 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 6402 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6403
6404 /* Walk over each basic block looking for potentially hoistable
6405 expressions, nothing gets hoisted from the entry block. */
e0082a72 6406 FOR_EACH_BB (bb)
bb457bd9
JL
6407 {
6408 int found = 0;
6409 int insn_inserted_p;
6410
c635a1ec 6411 domby_len = get_dominated_by (dominators, bb, &domby);
bb457bd9
JL
6412 /* Examine each expression that is very busy at the exit of this
6413 block. These are the potentially hoistable expressions. */
e0082a72 6414 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6415 {
6416 int hoistable = 0;
c4c81601 6417
c635a1ec
DB
6418 if (TEST_BIT (hoist_vbeout[bb->index], i)
6419 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6420 {
6421 /* We've found a potentially hoistable expression, now
6422 we look at every block BB dominates to see if it
6423 computes the expression. */
c635a1ec 6424 for (j = 0; j < domby_len; j++)
bb457bd9 6425 {
c635a1ec 6426 dominated = domby[j];
bb457bd9 6427 /* Ignore self dominance. */
c635a1ec 6428 if (bb == dominated)
bb457bd9 6429 continue;
bb457bd9
JL
6430 /* We've found a dominated block, now see if it computes
6431 the busy expression and whether or not moving that
6432 expression to the "beginning" of that block is safe. */
e0082a72 6433 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6434 continue;
6435
6436 /* Note if the expression would reach the dominated block
589005ff 6437 unimpared if it was placed at the end of BB.
bb457bd9
JL
6438
6439 Keep track of how many times this expression is hoistable
6440 from a dominated block into BB. */
e0082a72 6441 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6442 hoistable++;
6443 }
6444
ff7cc307 6445 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6446 expression, then note it in the bitmap of expressions to
6447 hoist. It makes no sense to hoist things which are computed
6448 in only one BB, and doing so tends to pessimize register
6449 allocation. One could increase this value to try harder
6450 to avoid any possible code expansion due to register
6451 allocation issues; however experiments have shown that
6452 the vast majority of hoistable expressions are only movable
6453 from two successors, so raising this threshhold is likely
6454 to nullify any benefit we get from code hoisting. */
6455 if (hoistable > 1)
6456 {
e0082a72 6457 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6458 found = 1;
6459 }
6460 }
6461 }
bb457bd9
JL
6462 /* If we found nothing to hoist, then quit now. */
6463 if (! found)
c635a1ec
DB
6464 {
6465 free (domby);
bb457bd9 6466 continue;
c635a1ec 6467 }
bb457bd9
JL
6468
6469 /* Loop over all the hoistable expressions. */
e0082a72 6470 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6471 {
6472 /* We want to insert the expression into BB only once, so
6473 note when we've inserted it. */
6474 insn_inserted_p = 0;
6475
6476 /* These tests should be the same as the tests above. */
e0082a72 6477 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6478 {
6479 /* We've found a potentially hoistable expression, now
6480 we look at every block BB dominates to see if it
6481 computes the expression. */
c635a1ec 6482 for (j = 0; j < domby_len; j++)
bb457bd9 6483 {
c635a1ec 6484 dominated = domby[j];
bb457bd9 6485 /* Ignore self dominance. */
c635a1ec 6486 if (bb == dominated)
bb457bd9
JL
6487 continue;
6488
6489 /* We've found a dominated block, now see if it computes
6490 the busy expression and whether or not moving that
6491 expression to the "beginning" of that block is safe. */
e0082a72 6492 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6493 continue;
6494
6495 /* The expression is computed in the dominated block and
6496 it would be safe to compute it at the start of the
6497 dominated block. Now we have to determine if the
ff7cc307 6498 expression would reach the dominated block if it was
bb457bd9 6499 placed at the end of BB. */
e0082a72 6500 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6501 {
6502 struct expr *expr = index_map[i];
6503 struct occr *occr = expr->antic_occr;
6504 rtx insn;
6505 rtx set;
6506
ff7cc307 6507 /* Find the right occurrence of this expression. */
e0082a72 6508 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6509 occr = occr->next;
6510
6511 /* Should never happen. */
6512 if (!occr)
6513 abort ();
6514
6515 insn = occr->insn;
589005ff 6516
bb457bd9
JL
6517 set = single_set (insn);
6518 if (! set)
6519 abort ();
6520
6521 /* Create a pseudo-reg to store the result of reaching
6522 expressions into. Get the mode for the new pseudo
6523 from the mode of the original destination pseudo. */
6524 if (expr->reaching_reg == NULL)
6525 expr->reaching_reg
6526 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6527
10d1bb36
JH
6528 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6529 delete_insn (insn);
6530 occr->deleted_p = 1;
6531 if (!insn_inserted_p)
bb457bd9 6532 {
10d1bb36
JH
6533 insert_insn_end_bb (index_map[i], bb, 0);
6534 insn_inserted_p = 1;
bb457bd9
JL
6535 }
6536 }
6537 }
6538 }
6539 }
c635a1ec 6540 free (domby);
bb457bd9 6541 }
c4c81601 6542
8e42ace1 6543 free (index_map);
bb457bd9
JL
6544}
6545
6546/* Top level routine to perform one code hoisting (aka unification) pass
6547
cc2902df 6548 Return nonzero if a change was made. */
bb457bd9
JL
6549
6550static int
6551one_code_hoisting_pass ()
6552{
6553 int changed = 0;
6554
02280659
ZD
6555 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6556 compute_hash_table (&expr_hash_table);
bb457bd9 6557 if (gcse_file)
02280659 6558 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 6559
02280659 6560 if (expr_hash_table.n_elems > 0)
bb457bd9 6561 {
02280659 6562 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
6563 compute_code_hoist_data ();
6564 hoist_code ();
6565 free_code_hoist_mem ();
6566 }
c4c81601 6567
02280659 6568 free_hash_table (&expr_hash_table);
bb457bd9
JL
6569
6570 return changed;
6571}
a13d4ebf
AM
6572\f
6573/* Here we provide the things required to do store motion towards
6574 the exit. In order for this to be effective, gcse also needed to
6575 be taught how to move a load when it is kill only by a store to itself.
6576
6577 int i;
6578 float a[10];
6579
6580 void foo(float scale)
6581 {
6582 for (i=0; i<10; i++)
6583 a[i] *= scale;
6584 }
6585
6586 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6587 the load out since its live around the loop, and stored at the bottom
6588 of the loop.
a13d4ebf 6589
589005ff 6590 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6591 an enhancement to gcse which when using edge based lcm, recognizes
6592 this situation and allows gcse to move the load out of the loop.
6593
6594 Once gcse has hoisted the load, store motion can then push this
6595 load towards the exit, and we end up with no loads or stores of 'i'
6596 in the loop. */
6597
ff7cc307 6598/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6599 doesn't find one, we create one and initialize it. */
6600
6601static struct ls_expr *
6602ldst_entry (x)
6603 rtx x;
6604{
6605 struct ls_expr * ptr;
6606
6607 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6608 if (expr_equiv_p (ptr->pattern, x))
6609 break;
6610
6611 if (!ptr)
6612 {
6613 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6614
6615 ptr->next = pre_ldst_mems;
6616 ptr->expr = NULL;
6617 ptr->pattern = x;
47a3dae1 6618 ptr->pattern_regs = NULL_RTX;
a13d4ebf
AM
6619 ptr->loads = NULL_RTX;
6620 ptr->stores = NULL_RTX;
6621 ptr->reaching_reg = NULL_RTX;
6622 ptr->invalid = 0;
6623 ptr->index = 0;
6624 ptr->hash_index = 0;
6625 pre_ldst_mems = ptr;
6626 }
589005ff 6627
a13d4ebf
AM
6628 return ptr;
6629}
6630
6631/* Free up an individual ldst entry. */
6632
589005ff 6633static void
a13d4ebf
AM
6634free_ldst_entry (ptr)
6635 struct ls_expr * ptr;
6636{
aaa4ca30
AJ
6637 free_INSN_LIST_list (& ptr->loads);
6638 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6639
6640 free (ptr);
6641}
6642
6643/* Free up all memory associated with the ldst list. */
6644
6645static void
6646free_ldst_mems ()
6647{
589005ff 6648 while (pre_ldst_mems)
a13d4ebf
AM
6649 {
6650 struct ls_expr * tmp = pre_ldst_mems;
6651
6652 pre_ldst_mems = pre_ldst_mems->next;
6653
6654 free_ldst_entry (tmp);
6655 }
6656
6657 pre_ldst_mems = NULL;
6658}
6659
6660/* Dump debugging info about the ldst list. */
6661
6662static void
6663print_ldst_list (file)
6664 FILE * file;
6665{
6666 struct ls_expr * ptr;
6667
6668 fprintf (file, "LDST list: \n");
6669
6670 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6671 {
6672 fprintf (file, " Pattern (%3d): ", ptr->index);
6673
6674 print_rtl (file, ptr->pattern);
6675
6676 fprintf (file, "\n Loads : ");
6677
6678 if (ptr->loads)
6679 print_rtl (file, ptr->loads);
6680 else
6681 fprintf (file, "(nil)");
6682
6683 fprintf (file, "\n Stores : ");
6684
6685 if (ptr->stores)
6686 print_rtl (file, ptr->stores);
6687 else
6688 fprintf (file, "(nil)");
6689
6690 fprintf (file, "\n\n");
6691 }
6692
6693 fprintf (file, "\n");
6694}
6695
6696/* Returns 1 if X is in the list of ldst only expressions. */
6697
6698static struct ls_expr *
6699find_rtx_in_ldst (x)
6700 rtx x;
6701{
6702 struct ls_expr * ptr;
589005ff 6703
a13d4ebf
AM
6704 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6705 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6706 return ptr;
6707
6708 return NULL;
6709}
6710
6711/* Assign each element of the list of mems a monotonically increasing value. */
6712
6713static int
6714enumerate_ldsts ()
6715{
6716 struct ls_expr * ptr;
6717 int n = 0;
6718
6719 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6720 ptr->index = n++;
6721
6722 return n;
6723}
6724
6725/* Return first item in the list. */
6726
6727static inline struct ls_expr *
6728first_ls_expr ()
6729{
6730 return pre_ldst_mems;
6731}
6732
6733/* Return the next item in ther list after the specified one. */
6734
6735static inline struct ls_expr *
6736next_ls_expr (ptr)
6737 struct ls_expr * ptr;
6738{
6739 return ptr->next;
6740}
6741\f
6742/* Load Motion for loads which only kill themselves. */
6743
6744/* Return true if x is a simple MEM operation, with no registers or
6745 side effects. These are the types of loads we consider for the
6746 ld_motion list, otherwise we let the usual aliasing take care of it. */
6747
589005ff 6748static int
a13d4ebf
AM
6749simple_mem (x)
6750 rtx x;
6751{
6752 if (GET_CODE (x) != MEM)
6753 return 0;
589005ff 6754
a13d4ebf
AM
6755 if (MEM_VOLATILE_P (x))
6756 return 0;
589005ff 6757
a13d4ebf
AM
6758 if (GET_MODE (x) == BLKmode)
6759 return 0;
aaa4ca30 6760
47a3dae1
ZD
6761 /* If we are handling exceptions, we must be careful with memory references
6762 that may trap. If we are not, the behavior is undefined, so we may just
6763 continue. */
6764 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
6765 return 0;
6766
47a3dae1
ZD
6767 if (side_effects_p (x))
6768 return 0;
589005ff 6769
47a3dae1
ZD
6770 /* Do not consider function arguments passed on stack. */
6771 if (reg_mentioned_p (stack_pointer_rtx, x))
6772 return 0;
6773
6774 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6775 return 0;
6776
6777 return 1;
a13d4ebf
AM
6778}
6779
589005ff
KH
6780/* Make sure there isn't a buried reference in this pattern anywhere.
6781 If there is, invalidate the entry for it since we're not capable
6782 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6783 loads since the aliasing code will allow all entries in the
6784 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6785 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6786 fix it up. */
6787
6788static void
6789invalidate_any_buried_refs (x)
6790 rtx x;
6791{
6792 const char * fmt;
8e42ace1 6793 int i, j;
a13d4ebf
AM
6794 struct ls_expr * ptr;
6795
6796 /* Invalidate it in the list. */
6797 if (GET_CODE (x) == MEM && simple_mem (x))
6798 {
6799 ptr = ldst_entry (x);
6800 ptr->invalid = 1;
6801 }
6802
6803 /* Recursively process the insn. */
6804 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6805
a13d4ebf
AM
6806 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6807 {
6808 if (fmt[i] == 'e')
6809 invalidate_any_buried_refs (XEXP (x, i));
6810 else if (fmt[i] == 'E')
6811 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6812 invalidate_any_buried_refs (XVECEXP (x, i, j));
6813 }
6814}
6815
4d3eb89a
HPN
6816/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6817 being defined as MEM loads and stores to symbols, with no side effects
6818 and no registers in the expression. For a MEM destination, we also
6819 check that the insn is still valid if we replace the destination with a
6820 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6821 which don't match this criteria, they are invalidated and trimmed out
6822 later. */
a13d4ebf 6823
589005ff 6824static void
a13d4ebf
AM
6825compute_ld_motion_mems ()
6826{
6827 struct ls_expr * ptr;
e0082a72 6828 basic_block bb;
a13d4ebf 6829 rtx insn;
589005ff 6830
a13d4ebf
AM
6831 pre_ldst_mems = NULL;
6832
e0082a72 6833 FOR_EACH_BB (bb)
a13d4ebf 6834 {
e0082a72
ZD
6835 for (insn = bb->head;
6836 insn && insn != NEXT_INSN (bb->end);
a13d4ebf
AM
6837 insn = NEXT_INSN (insn))
6838 {
6839 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6840 {
6841 if (GET_CODE (PATTERN (insn)) == SET)
6842 {
6843 rtx src = SET_SRC (PATTERN (insn));
6844 rtx dest = SET_DEST (PATTERN (insn));
6845
6846 /* Check for a simple LOAD... */
6847 if (GET_CODE (src) == MEM && simple_mem (src))
6848 {
6849 ptr = ldst_entry (src);
6850 if (GET_CODE (dest) == REG)
6851 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6852 else
6853 ptr->invalid = 1;
6854 }
6855 else
6856 {
6857 /* Make sure there isn't a buried load somewhere. */
6858 invalidate_any_buried_refs (src);
6859 }
589005ff 6860
a13d4ebf
AM
6861 /* Check for stores. Don't worry about aliased ones, they
6862 will block any movement we might do later. We only care
6863 about this exact pattern since those are the only
6864 circumstance that we will ignore the aliasing info. */
6865 if (GET_CODE (dest) == MEM && simple_mem (dest))
6866 {
6867 ptr = ldst_entry (dest);
589005ff 6868
f54104df 6869 if (GET_CODE (src) != MEM
4d3eb89a
HPN
6870 && GET_CODE (src) != ASM_OPERANDS
6871 /* Check for REG manually since want_to_gcse_p
6872 returns 0 for all REGs. */
6873 && (REG_P (src) || want_to_gcse_p (src)))
a13d4ebf
AM
6874 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6875 else
6876 ptr->invalid = 1;
6877 }
6878 }
6879 else
6880 invalidate_any_buried_refs (PATTERN (insn));
6881 }
6882 }
6883 }
6884}
6885
589005ff 6886/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6887 expression list for pre gcse. */
6888
6889static void
6890trim_ld_motion_mems ()
6891{
6892 struct ls_expr * last = NULL;
6893 struct ls_expr * ptr = first_ls_expr ();
6894
6895 while (ptr != NULL)
6896 {
6897 int del = ptr->invalid;
6898 struct expr * expr = NULL;
589005ff 6899
a13d4ebf 6900 /* Delete if entry has been made invalid. */
589005ff 6901 if (!del)
a13d4ebf
AM
6902 {
6903 unsigned int i;
589005ff 6904
a13d4ebf
AM
6905 del = 1;
6906 /* Delete if we cannot find this mem in the expression list. */
02280659 6907 for (i = 0; i < expr_hash_table.size && del; i++)
a13d4ebf 6908 {
02280659 6909 for (expr = expr_hash_table.table[i];
589005ff 6910 expr != NULL;
a13d4ebf
AM
6911 expr = expr->next_same_hash)
6912 if (expr_equiv_p (expr->expr, ptr->pattern))
6913 {
6914 del = 0;
6915 break;
6916 }
6917 }
6918 }
589005ff 6919
a13d4ebf
AM
6920 if (del)
6921 {
6922 if (last != NULL)
6923 {
6924 last->next = ptr->next;
6925 free_ldst_entry (ptr);
6926 ptr = last->next;
6927 }
6928 else
6929 {
6930 pre_ldst_mems = pre_ldst_mems->next;
6931 free_ldst_entry (ptr);
6932 ptr = pre_ldst_mems;
6933 }
6934 }
6935 else
6936 {
6937 /* Set the expression field if we are keeping it. */
6938 last = ptr;
6939 ptr->expr = expr;
6940 ptr = ptr->next;
6941 }
6942 }
6943
6944 /* Show the world what we've found. */
6945 if (gcse_file && pre_ldst_mems != NULL)
6946 print_ldst_list (gcse_file);
6947}
6948
6949/* This routine will take an expression which we are replacing with
6950 a reaching register, and update any stores that are needed if
6951 that expression is in the ld_motion list. Stores are updated by
6952 copying their SRC to the reaching register, and then storeing
6953 the reaching register into the store location. These keeps the
6954 correct value in the reaching register for the loads. */
6955
6956static void
6957update_ld_motion_stores (expr)
6958 struct expr * expr;
6959{
6960 struct ls_expr * mem_ptr;
6961
6962 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6963 {
589005ff
KH
6964 /* We can try to find just the REACHED stores, but is shouldn't
6965 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6966 dead and should be eliminated later. */
6967
4d3eb89a
HPN
6968 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6969 where reg is the reaching reg used in the load. We checked in
6970 compute_ld_motion_mems that we can replace (set mem expr) with
6971 (set reg expr) in that insn. */
a13d4ebf 6972 rtx list = mem_ptr->stores;
589005ff 6973
a13d4ebf
AM
6974 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6975 {
6976 rtx insn = XEXP (list, 0);
6977 rtx pat = PATTERN (insn);
6978 rtx src = SET_SRC (pat);
6979 rtx reg = expr->reaching_reg;
c57718d3 6980 rtx copy, new;
a13d4ebf
AM
6981
6982 /* If we've already copied it, continue. */
6983 if (expr->reaching_reg == src)
6984 continue;
589005ff 6985
a13d4ebf
AM
6986 if (gcse_file)
6987 {
6988 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6989 print_rtl (gcse_file, expr->reaching_reg);
6990 fprintf (gcse_file, ":\n ");
6991 print_inline_rtx (gcse_file, insn, 8);
6992 fprintf (gcse_file, "\n");
6993 }
589005ff 6994
47a3dae1 6995 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
6996 new = emit_insn_before (copy, insn);
6997 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6998 SET_SRC (pat) = reg;
6999
7000 /* un-recognize this pattern since it's probably different now. */
7001 INSN_CODE (insn) = -1;
7002 gcse_create_count++;
7003 }
7004 }
7005}
7006\f
7007/* Store motion code. */
7008
47a3dae1
ZD
7009#define ANTIC_STORE_LIST(x) ((x)->loads)
7010#define AVAIL_STORE_LIST(x) ((x)->stores)
7011#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
7012
589005ff 7013/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 7014 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
7015static int * regvec;
7016
7017/* And current insn, for the same routine. */
7018static rtx compute_store_table_current_insn;
aaa4ca30 7019
a13d4ebf
AM
7020/* Used in computing the reverse edge graph bit vectors. */
7021static sbitmap * st_antloc;
7022
7023/* Global holding the number of store expressions we are dealing with. */
7024static int num_stores;
7025
aaa4ca30 7026/* Checks to set if we need to mark a register set. Called from note_stores. */
a13d4ebf 7027
aaa4ca30
AJ
7028static void
7029reg_set_info (dest, setter, data)
7030 rtx dest, setter ATTRIBUTE_UNUSED;
7031 void * data ATTRIBUTE_UNUSED;
a13d4ebf 7032{
aaa4ca30
AJ
7033 if (GET_CODE (dest) == SUBREG)
7034 dest = SUBREG_REG (dest);
adfcce61 7035
aaa4ca30 7036 if (GET_CODE (dest) == REG)
47a3dae1 7037 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
a13d4ebf
AM
7038}
7039
47a3dae1
ZD
7040/* Return zero if some of the registers in list X are killed
7041 due to set of registers in bitmap REGS_SET. */
7042
7043static bool
7044store_ops_ok (x, regs_set)
7045 rtx x;
7046 int *regs_set;
7047{
7048 rtx reg;
7049
7050 for (; x; x = XEXP (x, 1))
7051 {
7052 reg = XEXP (x, 0);
7053 if (regs_set[REGNO(reg)])
7054 return false;
7055 }
a13d4ebf 7056
47a3dae1
ZD
7057 return true;
7058}
7059
7060/* Returns a list of registers mentioned in X. */
7061static rtx
7062extract_mentioned_regs (x)
a13d4ebf 7063 rtx x;
47a3dae1
ZD
7064{
7065 return extract_mentioned_regs_helper (x, NULL_RTX);
7066}
7067
7068/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7069 registers. */
7070static rtx
7071extract_mentioned_regs_helper (x, accum)
7072 rtx x;
7073 rtx accum;
a13d4ebf
AM
7074{
7075 int i;
7076 enum rtx_code code;
7077 const char * fmt;
7078
7079 /* Repeat is used to turn tail-recursion into iteration. */
7080 repeat:
7081
7082 if (x == 0)
47a3dae1 7083 return accum;
a13d4ebf
AM
7084
7085 code = GET_CODE (x);
7086 switch (code)
7087 {
7088 case REG:
47a3dae1 7089 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
7090
7091 case MEM:
7092 x = XEXP (x, 0);
7093 goto repeat;
7094
7095 case PRE_DEC:
7096 case PRE_INC:
7097 case POST_DEC:
7098 case POST_INC:
47a3dae1
ZD
7099 /* We do not run this function with arguments having side effects. */
7100 abort ();
a13d4ebf
AM
7101
7102 case PC:
7103 case CC0: /*FIXME*/
7104 case CONST:
7105 case CONST_INT:
7106 case CONST_DOUBLE:
69ef87e2 7107 case CONST_VECTOR:
a13d4ebf
AM
7108 case SYMBOL_REF:
7109 case LABEL_REF:
7110 case ADDR_VEC:
7111 case ADDR_DIFF_VEC:
47a3dae1 7112 return accum;
a13d4ebf
AM
7113
7114 default:
7115 break;
7116 }
7117
7118 i = GET_RTX_LENGTH (code) - 1;
7119 fmt = GET_RTX_FORMAT (code);
589005ff 7120
a13d4ebf
AM
7121 for (; i >= 0; i--)
7122 {
7123 if (fmt[i] == 'e')
7124 {
7125 rtx tem = XEXP (x, i);
7126
7127 /* If we are about to do the last recursive call
47a3dae1 7128 needed at this level, change it into iteration. */
a13d4ebf
AM
7129 if (i == 0)
7130 {
7131 x = tem;
7132 goto repeat;
7133 }
589005ff 7134
47a3dae1 7135 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
7136 }
7137 else if (fmt[i] == 'E')
7138 {
7139 int j;
589005ff 7140
a13d4ebf 7141 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 7142 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
7143 }
7144 }
7145
47a3dae1 7146 return accum;
a13d4ebf
AM
7147}
7148
47a3dae1
ZD
7149/* Determine whether INSN is MEM store pattern that we will consider moving.
7150 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7151 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7152 including) the insn in this basic block. We must be passing through BB from
7153 head to end, as we are using this fact to speed things up.
7154
7155 The results are stored this way:
7156
7157 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7158 -- if the processed expression is not anticipatable, NULL_RTX is added
7159 there instead, so that we can use it as indicator that no further
7160 expression of this type may be anticipatable
7161 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7162 consequently, all of them but this head are dead and may be deleted.
7163 -- if the expression is not available, the insn due to that it fails to be
7164 available is stored in reaching_reg.
7165
7166 The things are complicated a bit by fact that there already may be stores
7167 to the same MEM from other blocks; also caller must take care of the
7168 neccessary cleanup of the temporary markers after end of the basic block.
7169 */
a13d4ebf
AM
7170
7171static void
47a3dae1 7172find_moveable_store (insn, regs_set_before, regs_set_after)
a13d4ebf 7173 rtx insn;
47a3dae1
ZD
7174 int *regs_set_before;
7175 int *regs_set_after;
a13d4ebf
AM
7176{
7177 struct ls_expr * ptr;
47a3dae1
ZD
7178 rtx dest, set, tmp;
7179 int check_anticipatable, check_available;
7180 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 7181
47a3dae1
ZD
7182 set = single_set (insn);
7183 if (!set)
a13d4ebf
AM
7184 return;
7185
47a3dae1 7186 dest = SET_DEST (set);
589005ff 7187
a13d4ebf
AM
7188 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7189 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
7190 return;
7191
47a3dae1
ZD
7192 if (side_effects_p (dest))
7193 return;
aaa4ca30 7194
47a3dae1
ZD
7195 /* If we are handling exceptions, we must be careful with memory references
7196 that may trap. If we are not, the behavior is undefined, so we may just
7197 continue. */
94f24ddc 7198 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1
ZD
7199 return;
7200
a13d4ebf 7201 ptr = ldst_entry (dest);
47a3dae1
ZD
7202 if (!ptr->pattern_regs)
7203 ptr->pattern_regs = extract_mentioned_regs (dest);
7204
7205 /* Do not check for anticipatability if we either found one anticipatable
7206 store already, or tested for one and found out that it was killed. */
7207 check_anticipatable = 0;
7208 if (!ANTIC_STORE_LIST (ptr))
7209 check_anticipatable = 1;
7210 else
7211 {
7212 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7213 if (tmp != NULL_RTX
7214 && BLOCK_FOR_INSN (tmp) != bb)
7215 check_anticipatable = 1;
7216 }
7217 if (check_anticipatable)
7218 {
7219 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7220 tmp = NULL_RTX;
7221 else
7222 tmp = insn;
7223 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7224 ANTIC_STORE_LIST (ptr));
7225 }
a13d4ebf 7226
47a3dae1
ZD
7227 /* It is not neccessary to check whether store is available if we did
7228 it successfully before; if we failed before, do not bother to check
7229 until we reach the insn that caused us to fail. */
7230 check_available = 0;
7231 if (!AVAIL_STORE_LIST (ptr))
7232 check_available = 1;
7233 else
7234 {
7235 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7236 if (BLOCK_FOR_INSN (tmp) != bb)
7237 check_available = 1;
7238 }
7239 if (check_available)
7240 {
7241 /* Check that we have already reached the insn at that the check
7242 failed last time. */
7243 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7244 {
7245 for (tmp = bb->end;
7246 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7247 tmp = PREV_INSN (tmp))
7248 continue;
7249 if (tmp == insn)
7250 check_available = 0;
7251 }
7252 else
7253 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7254 bb, regs_set_after,
7255 &LAST_AVAIL_CHECK_FAILURE (ptr));
7256 }
7257 if (!check_available)
7258 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7259}
7260
7261/* Find available and anticipatable stores. */
a13d4ebf
AM
7262
7263static int
7264compute_store_table ()
7265{
e0082a72
ZD
7266 int ret;
7267 basic_block bb;
aaa4ca30 7268 unsigned regno;
47a3dae1
ZD
7269 rtx insn, pat, tmp;
7270 int *last_set_in, *already_set;
7271 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 7272
a13d4ebf
AM
7273 max_gcse_regno = max_reg_num ();
7274
d55bc081 7275 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
aaa4ca30 7276 max_gcse_regno);
d55bc081 7277 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 7278 pre_ldst_mems = 0;
47a3dae1
ZD
7279 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7280 already_set = xmalloc (sizeof (int) * max_gcse_regno);
aaa4ca30 7281
a13d4ebf 7282 /* Find all the stores we care about. */
e0082a72 7283 FOR_EACH_BB (bb)
a13d4ebf 7284 {
47a3dae1
ZD
7285 /* First compute the registers set in this block. */
7286 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7287 regvec = last_set_in;
7288
7289 for (insn = bb->head;
7290 insn != NEXT_INSN (bb->end);
7291 insn = NEXT_INSN (insn))
7292 {
7293 if (! INSN_P (insn))
7294 continue;
7295
7296 if (GET_CODE (insn) == CALL_INSN)
7297 {
7298 bool clobbers_all = false;
7299#ifdef NON_SAVING_SETJMP
7300 if (NON_SAVING_SETJMP
7301 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7302 clobbers_all = true;
7303#endif
7304
7305 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7306 if (clobbers_all
7307 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7308 last_set_in[regno] = INSN_UID (insn);
7309 }
7310
7311 pat = PATTERN (insn);
7312 compute_store_table_current_insn = insn;
7313 note_stores (pat, reg_set_info, NULL);
7314 }
7315
7316 /* Record the set registers. */
7317 for (regno = 0; regno < max_gcse_regno; regno++)
7318 if (last_set_in[regno])
7319 SET_BIT (reg_set_in_block[bb->index], regno);
7320
7321 /* Now find the stores. */
7322 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7323 regvec = already_set;
7324 for (insn = bb->head;
7325 insn != NEXT_INSN (bb->end);
7326 insn = NEXT_INSN (insn))
a13d4ebf 7327 {
19652adf 7328 if (! INSN_P (insn))
a13d4ebf
AM
7329 continue;
7330
aaa4ca30
AJ
7331 if (GET_CODE (insn) == CALL_INSN)
7332 {
19652adf 7333 bool clobbers_all = false;
589005ff 7334#ifdef NON_SAVING_SETJMP
19652adf
ZW
7335 if (NON_SAVING_SETJMP
7336 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7337 clobbers_all = true;
7338#endif
7339
aaa4ca30 7340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
7341 if (clobbers_all
7342 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 7343 already_set[regno] = 1;
aaa4ca30 7344 }
589005ff 7345
a13d4ebf 7346 pat = PATTERN (insn);
aaa4ca30 7347 note_stores (pat, reg_set_info, NULL);
589005ff 7348
a13d4ebf 7349 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
7350 find_moveable_store (insn, already_set, last_set_in);
7351
7352 /* Unmark regs that are no longer set. */
7353 for (regno = 0; regno < max_gcse_regno; regno++)
7354 if (last_set_in[regno] == INSN_UID (insn))
7355 last_set_in[regno] = 0;
7356 }
7357
7358 /* Clear temporary marks. */
7359 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7360 {
7361 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7362 if (ANTIC_STORE_LIST (ptr)
7363 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7364 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7365 }
7366 }
7367
7368 /* Remove the stores that are not available anywhere, as there will
7369 be no opportunity to optimize them. */
7370 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7371 ptr != NULL;
7372 ptr = *prev_next_ptr_ptr)
7373 {
7374 if (!AVAIL_STORE_LIST (ptr))
7375 {
7376 *prev_next_ptr_ptr = ptr->next;
7377 free_ldst_entry (ptr);
a13d4ebf 7378 }
47a3dae1
ZD
7379 else
7380 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
7381 }
7382
7383 ret = enumerate_ldsts ();
589005ff 7384
a13d4ebf
AM
7385 if (gcse_file)
7386 {
47a3dae1 7387 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
a13d4ebf
AM
7388 print_ldst_list (gcse_file);
7389 }
589005ff 7390
47a3dae1
ZD
7391 free (last_set_in);
7392 free (already_set);
a13d4ebf
AM
7393 return ret;
7394}
7395
aaa4ca30 7396/* Check to see if the load X is aliased with STORE_PATTERN. */
a13d4ebf 7397
47a3dae1 7398static bool
a13d4ebf
AM
7399load_kills_store (x, store_pattern)
7400 rtx x, store_pattern;
7401{
7402 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
47a3dae1
ZD
7403 return true;
7404 return false;
a13d4ebf
AM
7405}
7406
589005ff 7407/* Go through the entire insn X, looking for any loads which might alias
47a3dae1 7408 STORE_PATTERN. Return true if found. */
a13d4ebf 7409
47a3dae1 7410static bool
a13d4ebf
AM
7411find_loads (x, store_pattern)
7412 rtx x, store_pattern;
7413{
7414 const char * fmt;
8e42ace1 7415 int i, j;
47a3dae1 7416 int ret = false;
a13d4ebf 7417
24a28584 7418 if (!x)
47a3dae1 7419 return false;
24a28584 7420
589005ff 7421 if (GET_CODE (x) == SET)
a13d4ebf
AM
7422 x = SET_SRC (x);
7423
7424 if (GET_CODE (x) == MEM)
7425 {
7426 if (load_kills_store (x, store_pattern))
47a3dae1 7427 return true;
a13d4ebf
AM
7428 }
7429
7430 /* Recursively process the insn. */
7431 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 7432
a13d4ebf
AM
7433 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7434 {
7435 if (fmt[i] == 'e')
7436 ret |= find_loads (XEXP (x, i), store_pattern);
7437 else if (fmt[i] == 'E')
7438 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7439 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
7440 }
7441 return ret;
7442}
7443
589005ff 7444/* Check if INSN kills the store pattern X (is aliased with it).
47a3dae1 7445 Return true if it it does. */
a13d4ebf 7446
47a3dae1
ZD
7447static bool
7448store_killed_in_insn (x, x_regs, insn)
7449 rtx x, x_regs, insn;
a13d4ebf 7450{
94f24ddc
ZD
7451 rtx reg, base;
7452
a13d4ebf 7453 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
47a3dae1 7454 return false;
589005ff 7455
a13d4ebf
AM
7456 if (GET_CODE (insn) == CALL_INSN)
7457 {
1218665b
JJ
7458 /* A normal or pure call might read from pattern,
7459 but a const call will not. */
47a3dae1
ZD
7460 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7461 return true;
7462
94f24ddc
ZD
7463 /* But even a const call reads its parameters. Check whether the
7464 base of some of registers used in mem is stack pointer. */
7465 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7466 {
7467 base = find_base_term (reg);
7468 if (!base
7469 || (GET_CODE (base) == ADDRESS
7470 && GET_MODE (base) == Pmode
7471 && XEXP (base, 0) == stack_pointer_rtx))
7472 return true;
7473 }
47a3dae1
ZD
7474
7475 return false;
a13d4ebf 7476 }
589005ff 7477
a13d4ebf
AM
7478 if (GET_CODE (PATTERN (insn)) == SET)
7479 {
7480 rtx pat = PATTERN (insn);
7481 /* Check for memory stores to aliased objects. */
7482 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
aaa4ca30 7483 /* pretend its a load and check for aliasing. */
a13d4ebf 7484 if (find_loads (SET_DEST (pat), x))
47a3dae1 7485 return true;
a13d4ebf
AM
7486 return find_loads (SET_SRC (pat), x);
7487 }
7488 else
7489 return find_loads (PATTERN (insn), x);
7490}
7491
47a3dae1
ZD
7492/* Returns true if the expression X is loaded or clobbered on or after INSN
7493 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7494 or after the insn. X_REGS is list of registers mentioned in X. If the store
7495 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 7496
47a3dae1
ZD
7497static bool
7498store_killed_after (x, x_regs, insn, bb, regs_set_after, fail_insn)
7499 rtx x, x_regs, insn;
e2d2ed72 7500 basic_block bb;
47a3dae1
ZD
7501 int *regs_set_after;
7502 rtx *fail_insn;
a13d4ebf 7503{
47a3dae1 7504 rtx last = bb->end, act;
aaa4ca30 7505
47a3dae1
ZD
7506 if (!store_ops_ok (x_regs, regs_set_after))
7507 {
7508 /* We do not know where it will happen. */
7509 if (fail_insn)
7510 *fail_insn = NULL_RTX;
7511 return true;
7512 }
a13d4ebf 7513
47a3dae1
ZD
7514 /* Scan from the end, so that fail_insn is determined correctly. */
7515 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7516 if (store_killed_in_insn (x, x_regs, act))
7517 {
7518 if (fail_insn)
7519 *fail_insn = act;
7520 return true;
7521 }
589005ff 7522
47a3dae1 7523 return false;
a13d4ebf 7524}
47a3dae1
ZD
7525
7526/* Returns true if the expression X is loaded or clobbered on or before INSN
7527 within basic block BB. X_REGS is list of registers mentioned in X.
7528 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7529static bool
7530store_killed_before (x, x_regs, insn, bb, regs_set_before)
7531 rtx x, x_regs, insn;
e2d2ed72 7532 basic_block bb;
47a3dae1 7533 int *regs_set_before;
a13d4ebf 7534{
8e42ace1 7535 rtx first = bb->head;
a13d4ebf 7536
47a3dae1
ZD
7537 if (!store_ops_ok (x_regs, regs_set_before))
7538 return true;
a13d4ebf 7539
47a3dae1
ZD
7540 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7541 if (store_killed_in_insn (x, x_regs, insn))
7542 return true;
589005ff 7543
47a3dae1 7544 return false;
a13d4ebf 7545}
47a3dae1
ZD
7546
7547/* Fill in available, anticipatable, transparent and kill vectors in
7548 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 7549static void
589005ff 7550build_store_vectors ()
a13d4ebf 7551{
47a3dae1
ZD
7552 basic_block bb;
7553 int *regs_set_in_block;
a13d4ebf
AM
7554 rtx insn, st;
7555 struct ls_expr * ptr;
47a3dae1 7556 unsigned regno;
a13d4ebf
AM
7557
7558 /* Build the gen_vector. This is any store in the table which is not killed
7559 by aliasing later in its block. */
d55bc081
ZD
7560 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7561 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 7562
d55bc081
ZD
7563 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7564 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 7565
a13d4ebf 7566 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 7567 {
47a3dae1 7568 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
7569 {
7570 insn = XEXP (st, 0);
e2d2ed72 7571 bb = BLOCK_FOR_INSN (insn);
589005ff 7572
47a3dae1
ZD
7573 /* If we've already seen an available expression in this block,
7574 we can delete this one (It occurs earlier in the block). We'll
7575 copy the SRC expression to an unused register in case there
7576 are any side effects. */
7577 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 7578 {
47a3dae1
ZD
7579 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7580 if (gcse_file)
7581 fprintf (gcse_file, "Removing redundant store:\n");
7582 replace_store_insn (r, XEXP (st, 0), bb);
7583 continue;
a13d4ebf 7584 }
47a3dae1 7585 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 7586 }
589005ff 7587
47a3dae1
ZD
7588 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7589 {
7590 insn = XEXP (st, 0);
7591 bb = BLOCK_FOR_INSN (insn);
7592 SET_BIT (st_antloc[bb->index], ptr->index);
7593 }
a13d4ebf 7594 }
589005ff 7595
d55bc081
ZD
7596 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7597 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7598
d55bc081
ZD
7599 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7600 sbitmap_vector_zero (transp, last_basic_block);
47a3dae1 7601 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
a13d4ebf 7602
47a3dae1
ZD
7603 FOR_EACH_BB (bb)
7604 {
7605 for (regno = 0; regno < max_gcse_regno; regno++)
7606 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7607
7608 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7609 {
7610 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7611 bb, regs_set_in_block, NULL))
7612 {
7613 /* It should not be neccessary to consider the expression
7614 killed if it is both anticipatable and available. */
7615 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7616 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7617 SET_BIT (ae_kill[bb->index], ptr->index);
7618 }
7619 else
7620 SET_BIT (transp[bb->index], ptr->index);
7621 }
7622 }
7623
7624 free (regs_set_in_block);
aaa4ca30 7625
589005ff 7626 if (gcse_file)
aaa4ca30 7627 {
d55bc081
ZD
7628 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7629 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7630 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7631 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7632 }
7633}
7634
fbe5a4a6 7635/* Insert an instruction at the beginning of a basic block, and update
a13d4ebf
AM
7636 the BLOCK_HEAD if needed. */
7637
589005ff 7638static void
a13d4ebf
AM
7639insert_insn_start_bb (insn, bb)
7640 rtx insn;
e2d2ed72 7641 basic_block bb;
a13d4ebf
AM
7642{
7643 /* Insert at start of successor block. */
e2d2ed72
AM
7644 rtx prev = PREV_INSN (bb->head);
7645 rtx before = bb->head;
a13d4ebf
AM
7646 while (before != 0)
7647 {
7648 if (GET_CODE (before) != CODE_LABEL
7649 && (GET_CODE (before) != NOTE
7650 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7651 break;
7652 prev = before;
e2d2ed72 7653 if (prev == bb->end)
a13d4ebf
AM
7654 break;
7655 before = NEXT_INSN (before);
7656 }
7657
7658 insn = emit_insn_after (insn, prev);
7659
a13d4ebf
AM
7660 if (gcse_file)
7661 {
7662 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7663 bb->index);
a13d4ebf
AM
7664 print_inline_rtx (gcse_file, insn, 6);
7665 fprintf (gcse_file, "\n");
7666 }
7667}
7668
7669/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 7670 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
7671 if an edge insertion was performed. */
7672
7673static int
7674insert_store (expr, e)
7675 struct ls_expr * expr;
7676 edge e;
7677{
7678 rtx reg, insn;
e2d2ed72 7679 basic_block bb;
a13d4ebf
AM
7680 edge tmp;
7681
7682 /* We did all the deleted before this insert, so if we didn't delete a
7683 store, then we haven't set the reaching reg yet either. */
7684 if (expr->reaching_reg == NULL_RTX)
7685 return 0;
7686
7687 reg = expr->reaching_reg;
47a3dae1 7688 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 7689
a13d4ebf
AM
7690 /* If we are inserting this expression on ALL predecessor edges of a BB,
7691 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7692 edges so we don't try to insert it on the other edges. */
e2d2ed72 7693 bb = e->dest;
a13d4ebf
AM
7694 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7695 {
7696 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7697 if (index == EDGE_INDEX_NO_EDGE)
7698 abort ();
7699 if (! TEST_BIT (pre_insert_map[index], expr->index))
7700 break;
7701 }
7702
7703 /* If tmp is NULL, we found an insertion on every edge, blank the
7704 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7705 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7706 {
7707 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7708 {
7709 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7710 RESET_BIT (pre_insert_map[index], expr->index);
7711 }
7712 insert_insn_start_bb (insn, bb);
7713 return 0;
7714 }
589005ff 7715
a13d4ebf
AM
7716 /* We can't insert on this edge, so we'll insert at the head of the
7717 successors block. See Morgan, sec 10.5. */
7718 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7719 {
7720 insert_insn_start_bb (insn, bb);
7721 return 0;
7722 }
7723
7724 insert_insn_on_edge (insn, e);
589005ff 7725
a13d4ebf
AM
7726 if (gcse_file)
7727 {
7728 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7729 e->src->index, e->dest->index);
a13d4ebf
AM
7730 print_inline_rtx (gcse_file, insn, 6);
7731 fprintf (gcse_file, "\n");
7732 }
589005ff 7733
a13d4ebf
AM
7734 return 1;
7735}
7736
7737/* This routine will replace a store with a SET to a specified register. */
7738
7739static void
7740replace_store_insn (reg, del, bb)
7741 rtx reg, del;
e2d2ed72 7742 basic_block bb;
a13d4ebf
AM
7743{
7744 rtx insn;
589005ff 7745
9a318d30 7746 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
a13d4ebf 7747 insn = emit_insn_after (insn, del);
589005ff 7748
a13d4ebf
AM
7749 if (gcse_file)
7750 {
589005ff 7751 fprintf (gcse_file,
0b17ab2f 7752 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7753 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7754 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7755 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7756 fprintf (gcse_file, "\n");
a13d4ebf 7757 }
589005ff 7758
49ce134f 7759 delete_insn (del);
a13d4ebf
AM
7760}
7761
7762
7763/* Delete a store, but copy the value that would have been stored into
7764 the reaching_reg for later storing. */
7765
7766static void
7767delete_store (expr, bb)
7768 struct ls_expr * expr;
e2d2ed72 7769 basic_block bb;
a13d4ebf
AM
7770{
7771 rtx reg, i, del;
7772
7773 if (expr->reaching_reg == NULL_RTX)
7774 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7775
a13d4ebf 7776 reg = expr->reaching_reg;
589005ff 7777
a13d4ebf
AM
7778 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7779 {
7780 del = XEXP (i, 0);
e2d2ed72 7781 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7782 {
589005ff 7783 /* We know there is only one since we deleted redundant
a13d4ebf
AM
7784 ones during the available computation. */
7785 replace_store_insn (reg, del, bb);
7786 break;
7787 }
7788 }
7789}
7790
7791/* Free memory used by store motion. */
7792
589005ff 7793static void
a13d4ebf
AM
7794free_store_memory ()
7795{
7796 free_ldst_mems ();
589005ff 7797
a13d4ebf 7798 if (ae_gen)
5a660bff 7799 sbitmap_vector_free (ae_gen);
a13d4ebf 7800 if (ae_kill)
5a660bff 7801 sbitmap_vector_free (ae_kill);
a13d4ebf 7802 if (transp)
5a660bff 7803 sbitmap_vector_free (transp);
a13d4ebf 7804 if (st_antloc)
5a660bff 7805 sbitmap_vector_free (st_antloc);
a13d4ebf 7806 if (pre_insert_map)
5a660bff 7807 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7808 if (pre_delete_map)
5a660bff 7809 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7810 if (reg_set_in_block)
7811 sbitmap_vector_free (reg_set_in_block);
589005ff 7812
a13d4ebf
AM
7813 ae_gen = ae_kill = transp = st_antloc = NULL;
7814 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7815}
7816
7817/* Perform store motion. Much like gcse, except we move expressions the
7818 other way by looking at the flowgraph in reverse. */
7819
7820static void
7821store_motion ()
7822{
e0082a72 7823 basic_block bb;
0b17ab2f 7824 int x;
a13d4ebf 7825 struct ls_expr * ptr;
adfcce61 7826 int update_flow = 0;
aaa4ca30 7827
a13d4ebf
AM
7828 if (gcse_file)
7829 {
7830 fprintf (gcse_file, "before store motion\n");
7831 print_rtl (gcse_file, get_insns ());
7832 }
7833
7834
7835 init_alias_analysis ();
aaa4ca30 7836
47a3dae1 7837 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
7838 num_stores = compute_store_table ();
7839 if (num_stores == 0)
7840 {
aaa4ca30 7841 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7842 end_alias_analysis ();
7843 return;
7844 }
7845
47a3dae1 7846 /* Now compute kill & transp vectors. */
a13d4ebf 7847 build_store_vectors ();
47a3dae1 7848 add_noreturn_fake_exit_edges ();
a13d4ebf 7849
589005ff
KH
7850 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7851 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7852 &pre_delete_map);
7853
7854 /* Now we want to insert the new stores which are going to be needed. */
7855 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7856 {
e0082a72
ZD
7857 FOR_EACH_BB (bb)
7858 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7859 delete_store (ptr, bb);
a13d4ebf 7860
0b17ab2f
RH
7861 for (x = 0; x < NUM_EDGES (edge_list); x++)
7862 if (TEST_BIT (pre_insert_map[x], ptr->index))
7863 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7864 }
7865
7866 if (update_flow)
7867 commit_edge_insertions ();
aaa4ca30 7868
a13d4ebf
AM
7869 free_store_memory ();
7870 free_edge_list (edge_list);
7871 remove_fake_edges ();
7872 end_alias_analysis ();
7873}
e2500fed 7874
a0134312
RS
7875\f
7876/* Entry point for jump bypassing optimization pass. */
7877
7878int
7879bypass_jumps (file)
7880 FILE *file;
7881{
7882 int changed;
7883
7884 /* We do not construct an accurate cfg in functions which call
7885 setjmp, so just punt to be safe. */
7886 if (current_function_calls_setjmp)
7887 return 0;
7888
7889 /* For calling dump_foo fns from gdb. */
7890 debug_stderr = stderr;
7891 gcse_file = file;
7892
7893 /* Identify the basic block information for this function, including
7894 successors and predecessors. */
7895 max_gcse_regno = max_reg_num ();
7896
7897 if (file)
7898 dump_flow_info (file);
7899
7900 /* Return if there's nothing to do. */
7901 if (n_basic_blocks <= 1)
7902 return 0;
7903
7904 /* Trying to perform global optimizations on flow graphs which have
7905 a high connectivity will take a long time and is unlikely to be
7906 particularly useful.
7907
7908 In normal circumstances a cfg should have about twice as many edges
7909 as blocks. But we do not want to punish small functions which have
7910 a couple switch statements. So we require a relatively large number
7911 of basic blocks and the ratio of edges to blocks to be high. */
7912 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7913 {
7914 if (warn_disabled_optimization)
7915 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7916 n_basic_blocks, n_edges / n_basic_blocks);
7917 return 0;
7918 }
7919
7920 /* If allocating memory for the cprop bitmap would take up too much
7921 storage it's better just to disable the optimization. */
7922 if ((n_basic_blocks
7923 * SBITMAP_SET_SIZE (max_gcse_regno)
7924 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7925 {
7926 if (warn_disabled_optimization)
7927 warning ("GCSE disabled: %d basic blocks and %d registers",
7928 n_basic_blocks, max_gcse_regno);
7929
7930 return 0;
7931 }
7932
a0134312
RS
7933 gcc_obstack_init (&gcse_obstack);
7934 bytes_used = 0;
7935
7936 /* We need alias. */
7937 init_alias_analysis ();
7938
7939 /* Record where pseudo-registers are set. This data is kept accurate
7940 during each pass. ??? We could also record hard-reg information here
7941 [since it's unchanging], however it is currently done during hash table
7942 computation.
7943
7944 It may be tempting to compute MEM set information here too, but MEM sets
7945 will be subject to code motion one day and thus we need to compute
7946 information about memory sets when we build the hash tables. */
7947
7948 alloc_reg_set_mem (max_gcse_regno);
7949 compute_sets (get_insns ());
7950
7951 max_gcse_regno = max_reg_num ();
7952 alloc_gcse_mem (get_insns ());
7953 changed = one_cprop_pass (1, 1, 1);
7954 free_gcse_mem ();
7955
7956 if (file)
7957 {
7958 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7959 current_function_name, n_basic_blocks);
7960 fprintf (file, "%d bytes\n\n", bytes_used);
7961 }
7962
7963 obstack_free (&gcse_obstack, NULL);
7964 free_reg_set_mem ();
7965
7966 /* We are finished with alias. */
7967 end_alias_analysis ();
7968 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7969
7970 return changed;
7971}
7972
e2500fed 7973#include "gt-gcse.h"