]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
gcc_release (announce_snapshot): Use changedir instead of plain cd.
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
d9221e01 3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
8e42ace1 4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
b0656d8b 153#include "tree.h"
6baf1cc8 154#include "tm_p.h"
7506f491
DE
155#include "regs.h"
156#include "hard-reg-set.h"
157#include "flags.h"
158#include "real.h"
159#include "insn-config.h"
160#include "recog.h"
161#include "basic-block.h"
50b2596f 162#include "output.h"
49ad7cfa 163#include "function.h"
589005ff 164#include "expr.h"
e7d482b9 165#include "except.h"
fb0c0a12 166#include "ggc.h"
f1fa37ff 167#include "params.h"
ae860ff7 168#include "cselib.h"
d128effb 169#include "intl.h"
7506f491 170#include "obstack.h"
4fa31c2a 171
7506f491
DE
172/* Propagate flow information through back edges and thus enable PRE's
173 moving loop invariant calculations out of loops.
174
175 Originally this tended to create worse overall code, but several
176 improvements during the development of PRE seem to have made following
177 back edges generally a win.
178
179 Note much of the loop invariant code motion done here would normally
180 be done by loop.c, which has more heuristics for when to move invariants
181 out of loops. At some point we might need to move some of those
182 heuristics into gcse.c. */
7506f491 183
f4e584dc
JL
184/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
185 are a superset of those done by GCSE.
7506f491 186
f4e584dc 187 We perform the following steps:
7506f491
DE
188
189 1) Compute basic block information.
190
191 2) Compute table of places where registers are set.
192
193 3) Perform copy/constant propagation.
194
195 4) Perform global cse.
196
e78d9500 197 5) Perform another pass of copy/constant propagation.
7506f491
DE
198
199 Two passes of copy/constant propagation are done because the first one
200 enables more GCSE and the second one helps to clean up the copies that
201 GCSE creates. This is needed more for PRE than for Classic because Classic
202 GCSE will try to use an existing register containing the common
203 subexpression rather than create a new one. This is harder to do for PRE
204 because of the code motion (which Classic GCSE doesn't do).
205
206 Expressions we are interested in GCSE-ing are of the form
207 (set (pseudo-reg) (expression)).
208 Function want_to_gcse_p says what these are.
209
210 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 211 partially redundant).
7506f491
DE
212
213 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
214 assignment) based GVN (global value numbering). L. T. Simpson's paper
215 (Rice University) on value numbering is a useful reference for this.
216
217 **********************
218
219 We used to support multiple passes but there are diminishing returns in
220 doing so. The first pass usually makes 90% of the changes that are doable.
221 A second pass can make a few more changes made possible by the first pass.
222 Experiments show any further passes don't make enough changes to justify
223 the expense.
224
225 A study of spec92 using an unlimited number of passes:
226 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
227 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
228 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229
230 It was found doing copy propagation between each pass enables further
231 substitutions.
232
233 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 234 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
235 be modified if one wants to experiment.
236
237 **********************
238
239 The steps for PRE are:
240
241 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242
243 2) Perform the data flow analysis for PRE.
244
245 3) Delete the redundant instructions
246
247 4) Insert the required copies [if any] that make the partially
248 redundant instructions fully redundant.
249
250 5) For other reaching expressions, insert an instruction to copy the value
251 to a newly created pseudo that will reach the redundant instruction.
252
253 The deletion is done first so that when we do insertions we
254 know which pseudo reg to use.
255
256 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
257 argue it is not. The number of iterations for the algorithm to converge
258 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259
f4e584dc 260 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
261 we create. To make an expression reach the place where it's redundant,
262 the result of the expression is copied to a new register, and the redundant
263 expression is deleted by replacing it with this new register. Classic GCSE
264 doesn't have this problem as much as it computes the reaching defs of
265 each register in each block and thus can try to use an existing register.
266
267 **********************
268
7506f491
DE
269 A fair bit of simplicity is created by creating small functions for simple
270 tasks, even when the function is only called in one place. This may
271 measurably slow things down [or may not] by creating more function call
272 overhead than is necessary. The source is laid out so that it's trivial
273 to make the affected functions inline so that one can measure what speed
274 up, if any, can be achieved, and maybe later when things settle things can
275 be rearranged.
276
277 Help stamp out big monolithic functions! */
278\f
279/* GCSE global vars. */
280
281/* -dG dump file. */
282static FILE *gcse_file;
283
f4e584dc
JL
284/* Note whether or not we should run jump optimization after gcse. We
285 want to do this for two cases.
286
287 * If we changed any jumps via cprop.
288
289 * If we added any labels via edge splitting. */
290
291static int run_jump_opt_after_gcse;
292
7506f491
DE
293/* Bitmaps are normally not included in debugging dumps.
294 However it's useful to be able to print them from GDB.
295 We could create special functions for this, but it's simpler to
296 just allow passing stderr to the dump_foo fns. Since stderr can
297 be a macro, we store a copy here. */
298static FILE *debug_stderr;
299
300/* An obstack for our working variables. */
301static struct obstack gcse_obstack;
302
c4c81601 303struct reg_use {rtx reg_rtx; };
abd535b6 304
7506f491
DE
305/* Hash table of expressions. */
306
307struct expr
308{
309 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
310 rtx expr;
311 /* Index in the available expression bitmaps. */
312 int bitmap_index;
313 /* Next entry with the same hash. */
314 struct expr *next_same_hash;
315 /* List of anticipatable occurrences in basic blocks in the function.
316 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
317 basic block, the operands are not modified in the basic block prior
318 to the occurrence and the output is not used between the start of
319 the block and the occurrence. */
7506f491
DE
320 struct occr *antic_occr;
321 /* List of available occurrence in basic blocks in the function.
322 An "available occurrence" is one that is the last occurrence in the
323 basic block and the operands are not modified by following statements in
324 the basic block [including this insn]. */
325 struct occr *avail_occr;
326 /* Non-null if the computation is PRE redundant.
327 The value is the newly created pseudo-reg to record a copy of the
328 expression in all the places that reach the redundant copy. */
329 rtx reaching_reg;
330};
331
332/* Occurrence of an expression.
333 There is one per basic block. If a pattern appears more than once the
334 last appearance is used [or first for anticipatable expressions]. */
335
336struct occr
337{
338 /* Next occurrence of this expression. */
339 struct occr *next;
340 /* The insn that computes the expression. */
341 rtx insn;
cc2902df 342 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 343 char deleted_p;
cc2902df 344 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
345 reaching_reg. */
346 /* ??? This is mutually exclusive with deleted_p, so they could share
347 the same byte. */
348 char copied_p;
349};
350
351/* Expression and copy propagation hash tables.
352 Each hash table is an array of buckets.
353 ??? It is known that if it were an array of entries, structure elements
354 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
355 not clear whether in the final analysis a sufficient amount of memory would
356 be saved as the size of the available expression bitmaps would be larger
357 [one could build a mapping table without holes afterwards though].
c4c81601 358 Someday I'll perform the computation and figure it out. */
7506f491 359
02280659
ZD
360struct hash_table
361{
362 /* The table itself.
363 This is an array of `expr_hash_table_size' elements. */
364 struct expr **table;
365
366 /* Size of the hash table, in elements. */
367 unsigned int size;
2e653e39 368
02280659
ZD
369 /* Number of hash table elements. */
370 unsigned int n_elems;
7506f491 371
02280659
ZD
372 /* Whether the table is expression of copy propagation one. */
373 int set_p;
374};
c4c81601 375
02280659
ZD
376/* Expression hash table. */
377static struct hash_table expr_hash_table;
378
379/* Copy propagation hash table. */
380static struct hash_table set_hash_table;
7506f491
DE
381
382/* Mapping of uids to cuids.
383 Only real insns get cuids. */
384static int *uid_cuid;
385
386/* Highest UID in UID_CUID. */
387static int max_uid;
388
389/* Get the cuid of an insn. */
b86db3eb
BS
390#ifdef ENABLE_CHECKING
391#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392#else
7506f491 393#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 394#endif
7506f491
DE
395
396/* Number of cuids. */
397static int max_cuid;
398
399/* Mapping of cuids to insns. */
400static rtx *cuid_insn;
401
402/* Get insn from cuid. */
403#define CUID_INSN(CUID) (cuid_insn[CUID])
404
405/* Maximum register number in function prior to doing gcse + 1.
406 Registers created during this pass have regno >= max_gcse_regno.
407 This is named with "gcse" to not collide with global of same name. */
770ae6cc 408static unsigned int max_gcse_regno;
7506f491 409
7506f491 410/* Table of registers that are modified.
c4c81601 411
7506f491
DE
412 For each register, each element is a list of places where the pseudo-reg
413 is set.
414
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 417 a bitmap instead of the lists `reg_set_table' uses].
7506f491 418
c4c81601
RK
419 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
420 num-regs) [however perhaps it may be useful to keep the data as is]. One
421 advantage of recording things this way is that `reg_set_table' is fairly
422 sparse with respect to pseudo regs but for hard regs could be fairly dense
423 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
424 up functions like compute_transp since in the case of pseudo-regs we only
425 need to iterate over the number of times a pseudo-reg is set, not over the
426 number of basic blocks [clearly there is a bit of a slow down in the cases
427 where a pseudo is set more than once in a block, however it is believed
428 that the net effect is to speed things up]. This isn't done for hard-regs
429 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
430 function call can consume a fair bit of memory, and iterating over
431 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 432
c4c81601
RK
433typedef struct reg_set
434{
7506f491
DE
435 /* The next setting of this register. */
436 struct reg_set *next;
437 /* The insn where it was set. */
438 rtx insn;
439} reg_set;
c4c81601 440
7506f491 441static reg_set **reg_set_table;
c4c81601 442
7506f491
DE
443/* Size of `reg_set_table'.
444 The table starts out at max_gcse_regno + slop, and is enlarged as
445 necessary. */
446static int reg_set_table_size;
c4c81601 447
7506f491
DE
448/* Amount to grow `reg_set_table' by when it's full. */
449#define REG_SET_TABLE_SLOP 100
450
a13d4ebf 451/* This is a list of expressions which are MEMs and will be used by load
589005ff 452 or store motion.
a13d4ebf
AM
453 Load motion tracks MEMs which aren't killed by
454 anything except itself. (ie, loads and stores to a single location).
589005ff 455 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
456 allowance. (all stores copy the same value to the reaching reg used
457 for the loads). This means all values used to store into memory must have
589005ff 458 no side effects so we can re-issue the setter value.
a13d4ebf
AM
459 Store Motion uses this structure as an expression table to track stores
460 which look interesting, and might be moveable towards the exit block. */
461
462struct ls_expr
463{
464 struct expr * expr; /* Gcse expression reference for LM. */
465 rtx pattern; /* Pattern of this mem. */
47a3dae1 466 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
467 rtx loads; /* INSN list of loads seen. */
468 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
469 struct ls_expr * next; /* Next in the list. */
470 int invalid; /* Invalid for some reason. */
471 int index; /* If it maps to a bitmap index. */
b58b21d5 472 unsigned int hash_index; /* Index when in a hash table. */
a13d4ebf
AM
473 rtx reaching_reg; /* Register to use when re-writing. */
474};
475
fbef91d8
RS
476/* Array of implicit set patterns indexed by basic block index. */
477static rtx *implicit_sets;
478
a13d4ebf
AM
479/* Head of the list of load/store memory refs. */
480static struct ls_expr * pre_ldst_mems = NULL;
481
7506f491
DE
482/* Bitmap containing one bit for each register in the program.
483 Used when performing GCSE to track which registers have been set since
484 the start of the basic block. */
73991d6a 485static regset reg_set_bitmap;
7506f491
DE
486
487/* For each block, a bitmap of registers set in the block.
488 This is used by expr_killed_p and compute_transp.
489 It is computed during hash table computation and not by compute_sets
490 as it includes registers added since the last pass (or between cprop and
491 gcse) and it's currently not easy to realloc sbitmap vectors. */
492static sbitmap *reg_set_in_block;
493
a13d4ebf
AM
494/* Array, indexed by basic block number for a list of insns which modify
495 memory within that block. */
496static rtx * modify_mem_list;
73991d6a 497bitmap modify_mem_list_set;
a13d4ebf
AM
498
499/* This array parallels modify_mem_list, but is kept canonicalized. */
500static rtx * canon_modify_mem_list;
73991d6a 501bitmap canon_modify_mem_list_set;
7506f491
DE
502/* Various variables for statistics gathering. */
503
504/* Memory used in a pass.
505 This isn't intended to be absolutely precise. Its intent is only
506 to keep an eye on memory usage. */
507static int bytes_used;
c4c81601 508
7506f491
DE
509/* GCSE substitutions made. */
510static int gcse_subst_count;
511/* Number of copy instructions created. */
512static int gcse_create_count;
513/* Number of constants propagated. */
514static int const_prop_count;
515/* Number of copys propagated. */
516static int copy_prop_count;
7506f491
DE
517\f
518/* These variables are used by classic GCSE.
519 Normally they'd be defined a bit later, but `rd_gen' needs to
520 be declared sooner. */
521
7506f491
DE
522/* Each block has a bitmap of each type.
523 The length of each blocks bitmap is:
524
525 max_cuid - for reaching definitions
526 n_exprs - for available expressions
527
528 Thus we view the bitmaps as 2 dimensional arrays. i.e.
529 rd_kill[block_num][cuid_num]
c4c81601 530 ae_kill[block_num][expr_num] */
7506f491
DE
531
532/* For reaching defs */
533static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534
535/* for available exprs */
536static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 537
0511851c
MM
538/* Objects of this type are passed around by the null-pointer check
539 removal routines. */
c4c81601
RK
540struct null_pointer_info
541{
0511851c 542 /* The basic block being processed. */
e0082a72 543 basic_block current_block;
0511851c 544 /* The first register to be handled in this pass. */
770ae6cc 545 unsigned int min_reg;
0511851c 546 /* One greater than the last register to be handled in this pass. */
770ae6cc 547 unsigned int max_reg;
0511851c
MM
548 sbitmap *nonnull_local;
549 sbitmap *nonnull_killed;
550};
7506f491 551\f
1d088dee 552static void compute_can_copy (void);
9fe15a12
KG
553static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
554static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
555static void *grealloc (void *, size_t);
703ad42b 556static void *gcse_alloc (unsigned long);
1d088dee
AJ
557static void alloc_gcse_mem (rtx);
558static void free_gcse_mem (void);
559static void alloc_reg_set_mem (int);
560static void free_reg_set_mem (void);
561static int get_bitmap_width (int, int, int);
562static void record_one_set (int, rtx);
b885908b 563static void replace_one_set (int, rtx, rtx);
1d088dee
AJ
564static void record_set_info (rtx, rtx, void *);
565static void compute_sets (rtx);
566static void hash_scan_insn (rtx, struct hash_table *, int);
567static void hash_scan_set (rtx, rtx, struct hash_table *);
568static void hash_scan_clobber (rtx, rtx, struct hash_table *);
569static void hash_scan_call (rtx, rtx, struct hash_table *);
570static int want_to_gcse_p (rtx);
571static bool gcse_constant_p (rtx);
572static int oprs_unchanged_p (rtx, rtx, int);
573static int oprs_anticipatable_p (rtx, rtx);
574static int oprs_available_p (rtx, rtx);
575static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
576 struct hash_table *);
577static void insert_set_in_table (rtx, rtx, struct hash_table *);
578static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
579static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
580static unsigned int hash_string_1 (const char *);
581static unsigned int hash_set (int, int);
582static int expr_equiv_p (rtx, rtx);
583static void record_last_reg_set_info (rtx, int);
584static void record_last_mem_set_info (rtx);
585static void record_last_set_info (rtx, rtx, void *);
586static void compute_hash_table (struct hash_table *);
587static void alloc_hash_table (int, struct hash_table *, int);
588static void free_hash_table (struct hash_table *);
589static void compute_hash_table_work (struct hash_table *);
590static void dump_hash_table (FILE *, const char *, struct hash_table *);
591static struct expr *lookup_expr (rtx, struct hash_table *);
592static struct expr *lookup_set (unsigned int, struct hash_table *);
593static struct expr *next_set (unsigned int, struct expr *);
594static void reset_opr_set_tables (void);
595static int oprs_not_set_p (rtx, rtx);
596static void mark_call (rtx);
597static void mark_set (rtx, rtx);
598static void mark_clobber (rtx, rtx);
599static void mark_oprs_set (rtx);
600static void alloc_cprop_mem (int, int);
601static void free_cprop_mem (void);
602static void compute_transp (rtx, int, sbitmap *, int);
603static void compute_transpout (void);
604static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
605 struct hash_table *);
606static void compute_cprop_data (void);
607static void find_used_regs (rtx *, void *);
608static int try_replace_reg (rtx, rtx, rtx);
609static struct expr *find_avail_set (int, rtx);
610static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
611static void mems_conflict_for_gcse_p (rtx, rtx, void *);
612static int load_killed_in_block_p (basic_block, int, rtx, int);
613static void canon_list_insert (rtx, rtx, void *);
614static int cprop_insn (rtx, int);
615static int cprop (int);
616static void find_implicit_sets (void);
617static int one_cprop_pass (int, int, int);
618static bool constprop_register (rtx, rtx, rtx, int);
619static struct expr *find_bypass_set (int, int);
620static bool reg_killed_on_edge (rtx, edge);
621static int bypass_block (basic_block, rtx, rtx);
622static int bypass_conditional_jumps (void);
623static void alloc_pre_mem (int, int);
624static void free_pre_mem (void);
625static void compute_pre_data (void);
626static int pre_expr_reaches_here_p (basic_block, struct expr *,
627 basic_block);
628static void insert_insn_end_bb (struct expr *, basic_block, int);
629static void pre_insert_copy_insn (struct expr *, rtx);
630static void pre_insert_copies (void);
631static int pre_delete (void);
632static int pre_gcse (void);
633static int one_pre_gcse_pass (int);
634static void add_label_notes (rtx, rtx);
635static void alloc_code_hoist_mem (int, int);
636static void free_code_hoist_mem (void);
637static void compute_code_hoist_vbeinout (void);
638static void compute_code_hoist_data (void);
639static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
640static void hoist_code (void);
641static int one_code_hoisting_pass (void);
642static void alloc_rd_mem (int, int);
643static void free_rd_mem (void);
644static void handle_rd_kill_set (rtx, int, basic_block);
645static void compute_kill_rd (void);
646static void compute_rd (void);
647static void alloc_avail_expr_mem (int, int);
648static void free_avail_expr_mem (void);
649static void compute_ae_gen (struct hash_table *);
650static int expr_killed_p (rtx, basic_block);
651static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
652static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
653 int);
654static rtx computing_insn (struct expr *, rtx);
655static int def_reaches_here_p (rtx, rtx);
656static int can_disregard_other_sets (struct reg_set **, rtx, int);
657static int handle_avail_expr (rtx, struct expr *);
658static int classic_gcse (void);
659static int one_classic_gcse_pass (int);
660static void invalidate_nonnull_info (rtx, rtx, void *);
661static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
662 struct null_pointer_info *);
663static rtx process_insert_insn (struct expr *);
664static int pre_edge_insert (struct edge_list *, struct expr **);
665static int expr_reaches_here_p_work (struct occr *, struct expr *,
666 basic_block, int, char *);
667static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
668 basic_block, char *);
669static struct ls_expr * ldst_entry (rtx);
670static void free_ldst_entry (struct ls_expr *);
671static void free_ldst_mems (void);
672static void print_ldst_list (FILE *);
673static struct ls_expr * find_rtx_in_ldst (rtx);
674static int enumerate_ldsts (void);
675static inline struct ls_expr * first_ls_expr (void);
676static inline struct ls_expr * next_ls_expr (struct ls_expr *);
677static int simple_mem (rtx);
678static void invalidate_any_buried_refs (rtx);
679static void compute_ld_motion_mems (void);
680static void trim_ld_motion_mems (void);
681static void update_ld_motion_stores (struct expr *);
682static void reg_set_info (rtx, rtx, void *);
01c43039 683static void reg_clear_last_set (rtx, rtx, void *);
1d088dee
AJ
684static bool store_ops_ok (rtx, int *);
685static rtx extract_mentioned_regs (rtx);
686static rtx extract_mentioned_regs_helper (rtx, rtx);
687static void find_moveable_store (rtx, int *, int *);
688static int compute_store_table (void);
3b14e3af
ZD
689static bool load_kills_store (rtx, rtx, int);
690static bool find_loads (rtx, rtx, int);
691static bool store_killed_in_insn (rtx, rtx, rtx, int);
1d088dee
AJ
692static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
693static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
694static void build_store_vectors (void);
695static void insert_insn_start_bb (rtx, basic_block);
696static int insert_store (struct ls_expr *, edge);
d088acea
ZD
697static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
698static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
1d088dee
AJ
699static void delete_store (struct ls_expr *, basic_block);
700static void free_store_memory (void);
701static void store_motion (void);
702static void free_insn_expr_list_list (rtx *);
703static void clear_modify_mem_tables (void);
704static void free_modify_mem_tables (void);
705static rtx gcse_emit_move_after (rtx, rtx, rtx);
706static void local_cprop_find_used_regs (rtx *, void *);
707static bool do_local_cprop (rtx, rtx, int, rtx*);
708static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
709static void local_cprop_pass (int);
d128effb 710static bool is_too_expensive (const char *);
7506f491 711\f
d128effb 712
7506f491
DE
713/* Entry point for global common subexpression elimination.
714 F is the first instruction in the function. */
715
e78d9500 716int
1d088dee 717gcse_main (rtx f, FILE *file)
7506f491
DE
718{
719 int changed, pass;
720 /* Bytes used at start of pass. */
721 int initial_bytes_used;
722 /* Maximum number of bytes used by a pass. */
723 int max_pass_bytes;
724 /* Point to release obstack data from for each pass. */
725 char *gcse_obstack_bottom;
726
b5ce41ff
JL
727 /* We do not construct an accurate cfg in functions which call
728 setjmp, so just punt to be safe. */
7506f491 729 if (current_function_calls_setjmp)
e78d9500 730 return 0;
589005ff 731
b5ce41ff
JL
732 /* Assume that we do not need to run jump optimizations after gcse. */
733 run_jump_opt_after_gcse = 0;
734
7506f491
DE
735 /* For calling dump_foo fns from gdb. */
736 debug_stderr = stderr;
b5ce41ff 737 gcse_file = file;
7506f491 738
b5ce41ff
JL
739 /* Identify the basic block information for this function, including
740 successors and predecessors. */
7506f491 741 max_gcse_regno = max_reg_num ();
7506f491 742
a42cd965
AM
743 if (file)
744 dump_flow_info (file);
745
d128effb
NS
746 /* Return if there's nothing to do, or it is too expensive. */
747 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
a18820c6 748 return 0;
d128effb 749
7506f491 750 gcc_obstack_init (&gcse_obstack);
a42cd965 751 bytes_used = 0;
7506f491 752
a13d4ebf
AM
753 /* We need alias. */
754 init_alias_analysis ();
c4c81601
RK
755 /* Record where pseudo-registers are set. This data is kept accurate
756 during each pass. ??? We could also record hard-reg information here
757 [since it's unchanging], however it is currently done during hash table
758 computation.
b5ce41ff 759
c4c81601
RK
760 It may be tempting to compute MEM set information here too, but MEM sets
761 will be subject to code motion one day and thus we need to compute
b5ce41ff 762 information about memory sets when we build the hash tables. */
7506f491
DE
763
764 alloc_reg_set_mem (max_gcse_regno);
765 compute_sets (f);
766
767 pass = 0;
768 initial_bytes_used = bytes_used;
769 max_pass_bytes = 0;
770 gcse_obstack_bottom = gcse_alloc (1);
771 changed = 1;
740f35a0 772 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
773 {
774 changed = 0;
775 if (file)
776 fprintf (file, "GCSE pass %d\n\n", pass + 1);
777
778 /* Initialize bytes_used to the space for the pred/succ lists,
779 and the reg_set_table data. */
780 bytes_used = initial_bytes_used;
781
782 /* Each pass may create new registers, so recalculate each time. */
783 max_gcse_regno = max_reg_num ();
784
785 alloc_gcse_mem (f);
786
b5ce41ff
JL
787 /* Don't allow constant propagation to modify jumps
788 during this pass. */
a0134312 789 changed = one_cprop_pass (pass + 1, 0, 0);
7506f491
DE
790
791 if (optimize_size)
b5ce41ff 792 changed |= one_classic_gcse_pass (pass + 1);
7506f491 793 else
589005ff 794 {
a42cd965 795 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
796 /* We may have just created new basic blocks. Release and
797 recompute various things which are sized on the number of
798 basic blocks. */
799 if (changed)
800 {
73991d6a 801 free_modify_mem_tables ();
9fe15a12
KG
802 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
803 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
a13d4ebf 804 }
a42cd965
AM
805 free_reg_set_mem ();
806 alloc_reg_set_mem (max_reg_num ());
807 compute_sets (f);
808 run_jump_opt_after_gcse = 1;
809 }
7506f491
DE
810
811 if (max_pass_bytes < bytes_used)
812 max_pass_bytes = bytes_used;
813
bb457bd9
JL
814 /* Free up memory, then reallocate for code hoisting. We can
815 not re-use the existing allocated memory because the tables
816 will not have info for the insns or registers created by
817 partial redundancy elimination. */
7506f491
DE
818 free_gcse_mem ();
819
5d3cc252 820 /* It does not make sense to run code hoisting unless we are optimizing
bb457bd9
JL
821 for code size -- it rarely makes programs faster, and can make
822 them bigger if we did partial redundancy elimination (when optimizing
823 for space, we use a classic gcse algorithm instead of partial
824 redundancy algorithms). */
825 if (optimize_size)
589005ff 826 {
bb457bd9
JL
827 max_gcse_regno = max_reg_num ();
828 alloc_gcse_mem (f);
829 changed |= one_code_hoisting_pass ();
830 free_gcse_mem ();
831
832 if (max_pass_bytes < bytes_used)
833 max_pass_bytes = bytes_used;
589005ff 834 }
bb457bd9 835
7506f491
DE
836 if (file)
837 {
838 fprintf (file, "\n");
839 fflush (file);
840 }
c4c81601 841
7506f491
DE
842 obstack_free (&gcse_obstack, gcse_obstack_bottom);
843 pass++;
844 }
845
b5ce41ff
JL
846 /* Do one last pass of copy propagation, including cprop into
847 conditional jumps. */
848
849 max_gcse_regno = max_reg_num ();
850 alloc_gcse_mem (f);
851 /* This time, go ahead and allow cprop to alter jumps. */
a0134312 852 one_cprop_pass (pass + 1, 1, 0);
b5ce41ff 853 free_gcse_mem ();
7506f491
DE
854
855 if (file)
856 {
857 fprintf (file, "GCSE of %s: %d basic blocks, ",
faed5cc3 858 current_function_name (), n_basic_blocks);
7506f491
DE
859 fprintf (file, "%d pass%s, %d bytes\n\n",
860 pass, pass > 1 ? "es" : "", max_pass_bytes);
861 }
862
6496a589 863 obstack_free (&gcse_obstack, NULL);
7506f491 864 free_reg_set_mem ();
a13d4ebf
AM
865 /* We are finished with alias. */
866 end_alias_analysis ();
867 allocate_reg_info (max_reg_num (), FALSE, FALSE);
868
47a3dae1 869 if (!optimize_size && flag_gcse_sm)
a13d4ebf 870 store_motion ();
47a3dae1 871
a13d4ebf 872 /* Record where pseudo-registers are set. */
e78d9500 873 return run_jump_opt_after_gcse;
7506f491
DE
874}
875\f
876/* Misc. utilities. */
877
773eae39
EB
878/* Nonzero for each mode that supports (set (reg) (reg)).
879 This is trivially true for integer and floating point values.
880 It may or may not be true for condition codes. */
881static char can_copy[(int) NUM_MACHINE_MODES];
882
7506f491
DE
883/* Compute which modes support reg/reg copy operations. */
884
885static void
1d088dee 886compute_can_copy (void)
7506f491
DE
887{
888 int i;
50b2596f 889#ifndef AVOID_CCMODE_COPIES
8e42ace1 890 rtx reg, insn;
50b2596f 891#endif
773eae39 892 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
893
894 start_sequence ();
895 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
896 if (GET_MODE_CLASS (i) == MODE_CC)
897 {
7506f491 898#ifdef AVOID_CCMODE_COPIES
773eae39 899 can_copy[i] = 0;
7506f491 900#else
c4c81601
RK
901 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
902 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 903 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 904 can_copy[i] = 1;
7506f491 905#endif
c4c81601 906 }
141b5810 907 else
773eae39 908 can_copy[i] = 1;
c4c81601 909
7506f491 910 end_sequence ();
7506f491 911}
773eae39
EB
912
913/* Returns whether the mode supports reg/reg copy operations. */
914
915bool
1d088dee 916can_copy_p (enum machine_mode mode)
773eae39
EB
917{
918 static bool can_copy_init_p = false;
919
920 if (! can_copy_init_p)
921 {
922 compute_can_copy ();
923 can_copy_init_p = true;
924 }
925
926 return can_copy[mode] != 0;
927}
7506f491
DE
928\f
929/* Cover function to xmalloc to record bytes allocated. */
930
703ad42b 931static void *
4ac11022 932gmalloc (size_t size)
7506f491
DE
933{
934 bytes_used += size;
935 return xmalloc (size);
936}
937
9fe15a12
KG
938/* Cover function to xcalloc to record bytes allocated. */
939
940static void *
941gcalloc (size_t nelem, size_t elsize)
942{
943 bytes_used += nelem * elsize;
944 return xcalloc (nelem, elsize);
945}
946
7506f491
DE
947/* Cover function to xrealloc.
948 We don't record the additional size since we don't know it.
949 It won't affect memory usage stats much anyway. */
950
703ad42b 951static void *
9fe15a12 952grealloc (void *ptr, size_t size)
7506f491
DE
953{
954 return xrealloc (ptr, size);
955}
956
77bbd421 957/* Cover function to obstack_alloc. */
7506f491 958
703ad42b 959static void *
1d088dee 960gcse_alloc (unsigned long size)
7506f491 961{
77bbd421 962 bytes_used += size;
703ad42b 963 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
964}
965
966/* Allocate memory for the cuid mapping array,
967 and reg/memory set tracking tables.
968
969 This is called at the start of each pass. */
970
971static void
1d088dee 972alloc_gcse_mem (rtx f)
7506f491 973{
9fe15a12 974 int i;
7506f491
DE
975 rtx insn;
976
977 /* Find the largest UID and create a mapping from UIDs to CUIDs.
978 CUIDs are like UIDs except they increase monotonically, have no gaps,
979 and only apply to real insns. */
980
981 max_uid = get_max_uid ();
9fe15a12 982 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
7506f491
DE
983 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
984 {
2c3c49de 985 if (INSN_P (insn))
b86db3eb 986 uid_cuid[INSN_UID (insn)] = i++;
7506f491 987 else
b86db3eb 988 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
989 }
990
991 /* Create a table mapping cuids to insns. */
992
993 max_cuid = i;
9fe15a12 994 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
7506f491 995 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 996 if (INSN_P (insn))
c4c81601 997 CUID_INSN (i++) = insn;
7506f491
DE
998
999 /* Allocate vars to track sets of regs. */
73991d6a 1000 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
1001
1002 /* Allocate vars to track sets of regs, memory per block. */
703ad42b 1003 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
a13d4ebf
AM
1004 /* Allocate array to keep a list of insns which modify memory in each
1005 basic block. */
9fe15a12
KG
1006 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1007 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
73991d6a
JH
1008 modify_mem_list_set = BITMAP_XMALLOC ();
1009 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1010}
1011
1012/* Free memory allocated by alloc_gcse_mem. */
1013
1014static void
1d088dee 1015free_gcse_mem (void)
7506f491
DE
1016{
1017 free (uid_cuid);
1018 free (cuid_insn);
1019
73991d6a 1020 BITMAP_XFREE (reg_set_bitmap);
7506f491 1021
5a660bff 1022 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1023 free_modify_mem_tables ();
1024 BITMAP_XFREE (modify_mem_list_set);
1025 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1026}
1027
0511851c
MM
1028/* Many of the global optimization algorithms work by solving dataflow
1029 equations for various expressions. Initially, some local value is
c4c81601
RK
1030 computed for each expression in each block. Then, the values across the
1031 various blocks are combined (by following flow graph edges) to arrive at
1032 global values. Conceptually, each set of equations is independent. We
1033 may therefore solve all the equations in parallel, solve them one at a
1034 time, or pick any intermediate approach.
1035
1036 When you're going to need N two-dimensional bitmaps, each X (say, the
1037 number of blocks) by Y (say, the number of expressions), call this
1038 function. It's not important what X and Y represent; only that Y
1039 correspond to the things that can be done in parallel. This function will
1040 return an appropriate chunking factor C; you should solve C sets of
1041 equations in parallel. By going through this function, we can easily
1042 trade space against time; by solving fewer equations in parallel we use
1043 less space. */
0511851c
MM
1044
1045static int
1d088dee 1046get_bitmap_width (int n, int x, int y)
0511851c
MM
1047{
1048 /* It's not really worth figuring out *exactly* how much memory will
1049 be used by a particular choice. The important thing is to get
1050 something approximately right. */
1051 size_t max_bitmap_memory = 10 * 1024 * 1024;
1052
1053 /* The number of bytes we'd use for a single column of minimum
1054 width. */
1055 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1056
1057 /* Often, it's reasonable just to solve all the equations in
1058 parallel. */
1059 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1060 return y;
1061
1062 /* Otherwise, pick the largest width we can, without going over the
1063 limit. */
1064 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1065 / column_size);
1066}
b5ce41ff
JL
1067\f
1068/* Compute the local properties of each recorded expression.
c4c81601
RK
1069
1070 Local properties are those that are defined by the block, irrespective of
1071 other blocks.
b5ce41ff
JL
1072
1073 An expression is transparent in a block if its operands are not modified
1074 in the block.
1075
1076 An expression is computed (locally available) in a block if it is computed
1077 at least once and expression would contain the same value if the
1078 computation was moved to the end of the block.
1079
1080 An expression is locally anticipatable in a block if it is computed at
1081 least once and expression would contain the same value if the computation
1082 was moved to the beginning of the block.
1083
c4c81601
RK
1084 We call this routine for cprop, pre and code hoisting. They all compute
1085 basically the same information and thus can easily share this code.
7506f491 1086
c4c81601
RK
1087 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1088 properties. If NULL, then it is not necessary to compute or record that
1089 particular property.
b5ce41ff 1090
02280659
ZD
1091 TABLE controls which hash table to look at. If it is set hash table,
1092 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1093 ABSALTERED. */
589005ff 1094
b5ce41ff 1095static void
1d088dee 1096compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
b5ce41ff 1097{
02280659 1098 unsigned int i;
589005ff 1099
b5ce41ff
JL
1100 /* Initialize any bitmaps that were passed in. */
1101 if (transp)
695ab36a 1102 {
02280659 1103 if (table->set_p)
d55bc081 1104 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1105 else
d55bc081 1106 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1107 }
c4c81601 1108
b5ce41ff 1109 if (comp)
d55bc081 1110 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1111 if (antloc)
d55bc081 1112 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1113
02280659 1114 for (i = 0; i < table->size; i++)
7506f491 1115 {
b5ce41ff
JL
1116 struct expr *expr;
1117
02280659 1118 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1119 {
b5ce41ff 1120 int indx = expr->bitmap_index;
c4c81601 1121 struct occr *occr;
b5ce41ff
JL
1122
1123 /* The expression is transparent in this block if it is not killed.
1124 We start by assuming all are transparent [none are killed], and
1125 then reset the bits for those that are. */
b5ce41ff 1126 if (transp)
02280659 1127 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1128
1129 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1130 we want to set to nonzero in ANTLOC. */
b5ce41ff 1131 if (antloc)
c4c81601
RK
1132 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1133 {
1134 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1135
c4c81601
RK
1136 /* While we're scanning the table, this is a good place to
1137 initialize this. */
1138 occr->deleted_p = 0;
1139 }
b5ce41ff
JL
1140
1141 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1142 we want to set to nonzero in COMP. */
b5ce41ff 1143 if (comp)
c4c81601
RK
1144 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1145 {
1146 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1147
c4c81601
RK
1148 /* While we're scanning the table, this is a good place to
1149 initialize this. */
1150 occr->copied_p = 0;
1151 }
b5ce41ff
JL
1152
1153 /* While we're scanning the table, this is a good place to
1154 initialize this. */
1155 expr->reaching_reg = 0;
1156 }
7506f491 1157 }
7506f491
DE
1158}
1159\f
1160/* Register set information.
1161
1162 `reg_set_table' records where each register is set or otherwise
1163 modified. */
1164
1165static struct obstack reg_set_obstack;
1166
1167static void
1d088dee 1168alloc_reg_set_mem (int n_regs)
7506f491 1169{
7506f491 1170 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
9fe15a12 1171 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
7506f491
DE
1172
1173 gcc_obstack_init (&reg_set_obstack);
1174}
1175
1176static void
1d088dee 1177free_reg_set_mem (void)
7506f491
DE
1178{
1179 free (reg_set_table);
6496a589 1180 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1181}
1182
b885908b
MH
1183/* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1184 Update the corresponding `reg_set_table' entry accordingly.
1185 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1186
1187static void
1188replace_one_set (int regno, rtx old_insn, rtx new_insn)
1189{
1190 struct reg_set *reg_info;
1191 if (regno >= reg_set_table_size)
1192 return;
1193 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1194 if (reg_info->insn == old_insn)
1195 {
1196 reg_info->insn = new_insn;
1197 break;
1198 }
1199}
1200
7506f491
DE
1201/* Record REGNO in the reg_set table. */
1202
1203static void
1d088dee 1204record_one_set (int regno, rtx insn)
7506f491 1205{
172890a2 1206 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1207 struct reg_set *new_reg_info;
7506f491
DE
1208
1209 /* If the table isn't big enough, enlarge it. */
1210 if (regno >= reg_set_table_size)
1211 {
1212 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601 1213
703ad42b
KG
1214 reg_set_table = grealloc (reg_set_table,
1215 new_size * sizeof (struct reg_set *));
1216 memset (reg_set_table + reg_set_table_size, 0,
8e42ace1 1217 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1218 reg_set_table_size = new_size;
1219 }
1220
703ad42b 1221 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
7506f491
DE
1222 bytes_used += sizeof (struct reg_set);
1223 new_reg_info->insn = insn;
274969ea
MM
1224 new_reg_info->next = reg_set_table[regno];
1225 reg_set_table[regno] = new_reg_info;
7506f491
DE
1226}
1227
c4c81601
RK
1228/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1229 an insn. The DATA is really the instruction in which the SET is
1230 occurring. */
7506f491
DE
1231
1232static void
1d088dee 1233record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1234{
84832317
MM
1235 rtx record_set_insn = (rtx) data;
1236
c4c81601
RK
1237 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1238 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1239}
1240
1241/* Scan the function and record each set of each pseudo-register.
1242
c4c81601 1243 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1244 `reg_set_table' for further documentation. */
7506f491
DE
1245
1246static void
1d088dee 1247compute_sets (rtx f)
7506f491 1248{
c4c81601 1249 rtx insn;
7506f491 1250
c4c81601 1251 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1252 if (INSN_P (insn))
c4c81601 1253 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1254}
1255\f
1256/* Hash table support. */
1257
80c29cc4
RZ
1258struct reg_avail_info
1259{
e0082a72 1260 basic_block last_bb;
80c29cc4
RZ
1261 int first_set;
1262 int last_set;
1263};
1264
1265static struct reg_avail_info *reg_avail_info;
e0082a72 1266static basic_block current_bb;
7506f491 1267
7506f491 1268
fb0c0a12
RK
1269/* See whether X, the source of a set, is something we want to consider for
1270 GCSE. */
7506f491 1271
e2500fed 1272static GTY(()) rtx test_insn;
7506f491 1273static int
1d088dee 1274want_to_gcse_p (rtx x)
7506f491 1275{
fb0c0a12
RK
1276 int num_clobbers = 0;
1277 int icode;
1278
c4c81601 1279 switch (GET_CODE (x))
7506f491
DE
1280 {
1281 case REG:
1282 case SUBREG:
1283 case CONST_INT:
1284 case CONST_DOUBLE:
69ef87e2 1285 case CONST_VECTOR:
7506f491 1286 case CALL:
34ee7f82 1287 case CONSTANT_P_RTX:
7506f491
DE
1288 return 0;
1289
1290 default:
1291 break;
1292 }
1293
fb0c0a12
RK
1294 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1295 if (general_operand (x, GET_MODE (x)))
1296 return 1;
1297 else if (GET_MODE (x) == VOIDmode)
1298 return 0;
1299
1300 /* Otherwise, check if we can make a valid insn from it. First initialize
1301 our test insn if we haven't already. */
1302 if (test_insn == 0)
1303 {
1304 test_insn
1305 = make_insn_raw (gen_rtx_SET (VOIDmode,
1306 gen_rtx_REG (word_mode,
1307 FIRST_PSEUDO_REGISTER * 2),
1308 const0_rtx));
1309 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1310 }
1311
1312 /* Now make an insn like the one we would make when GCSE'ing and see if
1313 valid. */
1314 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1315 SET_SRC (PATTERN (test_insn)) = x;
1316 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1317 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1318}
1319
cc2902df 1320/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1321 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1322 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1323
1324static int
1d088dee 1325oprs_unchanged_p (rtx x, rtx insn, int avail_p)
7506f491 1326{
c4c81601 1327 int i, j;
7506f491 1328 enum rtx_code code;
6f7d635c 1329 const char *fmt;
7506f491 1330
7506f491
DE
1331 if (x == 0)
1332 return 1;
1333
1334 code = GET_CODE (x);
1335 switch (code)
1336 {
1337 case REG:
80c29cc4
RZ
1338 {
1339 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1340
1341 if (info->last_bb != current_bb)
1342 return 1;
589005ff 1343 if (avail_p)
80c29cc4
RZ
1344 return info->last_set < INSN_CUID (insn);
1345 else
1346 return info->first_set >= INSN_CUID (insn);
1347 }
7506f491
DE
1348
1349 case MEM:
e0082a72 1350 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1351 x, avail_p))
1352 return 0;
7506f491 1353 else
c4c81601 1354 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1355
1356 case PRE_DEC:
1357 case PRE_INC:
1358 case POST_DEC:
1359 case POST_INC:
4b983fdc
RH
1360 case PRE_MODIFY:
1361 case POST_MODIFY:
7506f491
DE
1362 return 0;
1363
1364 case PC:
1365 case CC0: /*FIXME*/
1366 case CONST:
1367 case CONST_INT:
1368 case CONST_DOUBLE:
69ef87e2 1369 case CONST_VECTOR:
7506f491
DE
1370 case SYMBOL_REF:
1371 case LABEL_REF:
1372 case ADDR_VEC:
1373 case ADDR_DIFF_VEC:
1374 return 1;
1375
1376 default:
1377 break;
1378 }
1379
c4c81601 1380 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1381 {
1382 if (fmt[i] == 'e')
1383 {
c4c81601
RK
1384 /* If we are about to do the last recursive call needed at this
1385 level, change it into iteration. This function is called enough
1386 to be worth it. */
7506f491 1387 if (i == 0)
c4c81601
RK
1388 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1389
1390 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1391 return 0;
1392 }
1393 else if (fmt[i] == 'E')
c4c81601
RK
1394 for (j = 0; j < XVECLEN (x, i); j++)
1395 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1396 return 0;
7506f491
DE
1397 }
1398
1399 return 1;
1400}
1401
a13d4ebf
AM
1402/* Used for communication between mems_conflict_for_gcse_p and
1403 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1404 conflict between two memory references. */
1405static int gcse_mems_conflict_p;
1406
1407/* Used for communication between mems_conflict_for_gcse_p and
1408 load_killed_in_block_p. A memory reference for a load instruction,
1409 mems_conflict_for_gcse_p will see if a memory store conflicts with
1410 this memory load. */
1411static rtx gcse_mem_operand;
1412
1413/* DEST is the output of an instruction. If it is a memory reference, and
1414 possibly conflicts with the load found in gcse_mem_operand, then set
1415 gcse_mems_conflict_p to a nonzero value. */
1416
1417static void
1d088dee
AJ
1418mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1419 void *data ATTRIBUTE_UNUSED)
a13d4ebf
AM
1420{
1421 while (GET_CODE (dest) == SUBREG
1422 || GET_CODE (dest) == ZERO_EXTRACT
1423 || GET_CODE (dest) == SIGN_EXTRACT
1424 || GET_CODE (dest) == STRICT_LOW_PART)
1425 dest = XEXP (dest, 0);
1426
1427 /* If DEST is not a MEM, then it will not conflict with the load. Note
1428 that function calls are assumed to clobber memory, but are handled
1429 elsewhere. */
1430 if (GET_CODE (dest) != MEM)
1431 return;
aaa4ca30 1432
a13d4ebf 1433 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1434 don't mark as killed this time. */
1435
47a3dae1 1436 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1437 {
1438 if (!find_rtx_in_ldst (dest))
1439 gcse_mems_conflict_p = 1;
1440 return;
1441 }
aaa4ca30 1442
a13d4ebf
AM
1443 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1444 rtx_addr_varies_p))
1445 gcse_mems_conflict_p = 1;
1446}
1447
1448/* Return nonzero if the expression in X (a memory reference) is killed
1449 in block BB before or after the insn with the CUID in UID_LIMIT.
1450 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1451 before UID_LIMIT.
1452
1453 To check the entire block, set UID_LIMIT to max_uid + 1 and
1454 AVAIL_P to 0. */
1455
1456static int
1d088dee 1457load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
a13d4ebf 1458{
0b17ab2f 1459 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1460 while (list_entry)
1461 {
1462 rtx setter;
1463 /* Ignore entries in the list that do not apply. */
1464 if ((avail_p
1465 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1466 || (! avail_p
1467 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1468 {
1469 list_entry = XEXP (list_entry, 1);
1470 continue;
1471 }
1472
1473 setter = XEXP (list_entry, 0);
1474
1475 /* If SETTER is a call everything is clobbered. Note that calls
1476 to pure functions are never put on the list, so we need not
1477 worry about them. */
1478 if (GET_CODE (setter) == CALL_INSN)
1479 return 1;
1480
1481 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1482 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1483
1484 The note_stores interface is pretty limited, so we have to
1485 communicate via global variables. Yuk. */
1486 gcse_mem_operand = x;
1487 gcse_mems_conflict_p = 0;
1488 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1489 if (gcse_mems_conflict_p)
1490 return 1;
1491 list_entry = XEXP (list_entry, 1);
1492 }
1493 return 0;
1494}
1495
cc2902df 1496/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1497 the start of INSN's basic block up to but not including INSN. */
1498
1499static int
1d088dee 1500oprs_anticipatable_p (rtx x, rtx insn)
7506f491
DE
1501{
1502 return oprs_unchanged_p (x, insn, 0);
1503}
1504
cc2902df 1505/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1506 INSN to the end of INSN's basic block. */
1507
1508static int
1d088dee 1509oprs_available_p (rtx x, rtx insn)
7506f491
DE
1510{
1511 return oprs_unchanged_p (x, insn, 1);
1512}
1513
1514/* Hash expression X.
c4c81601
RK
1515
1516 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1517 indicating if a volatile operand is found or if the expression contains
b58b21d5
RS
1518 something we don't want to insert in the table. HASH_TABLE_SIZE is
1519 the current size of the hash table to be probed.
7506f491
DE
1520
1521 ??? One might want to merge this with canon_hash. Later. */
1522
1523static unsigned int
b58b21d5
RS
1524hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1525 int hash_table_size)
7506f491
DE
1526{
1527 unsigned int hash;
1528
1529 *do_not_record_p = 0;
1530
1531 hash = hash_expr_1 (x, mode, do_not_record_p);
1532 return hash % hash_table_size;
1533}
172890a2 1534
6462bb43 1535/* Hash a string. Just add its bytes up. */
172890a2 1536
6462bb43 1537static inline unsigned
1d088dee 1538hash_string_1 (const char *ps)
6462bb43
AO
1539{
1540 unsigned hash = 0;
8e42ace1 1541 const unsigned char *p = (const unsigned char *) ps;
589005ff 1542
6462bb43
AO
1543 if (p)
1544 while (*p)
1545 hash += *p++;
1546
1547 return hash;
1548}
7506f491
DE
1549
1550/* Subroutine of hash_expr to do the actual work. */
1551
1552static unsigned int
1d088dee 1553hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
7506f491
DE
1554{
1555 int i, j;
1556 unsigned hash = 0;
1557 enum rtx_code code;
6f7d635c 1558 const char *fmt;
7506f491 1559
c4c81601 1560 /* Used to turn recursion into iteration. We can't rely on GCC's
fbe5a4a6 1561 tail-recursion elimination since we need to keep accumulating values
c4c81601 1562 in HASH. */
7506f491
DE
1563
1564 if (x == 0)
1565 return hash;
1566
c4c81601 1567 repeat:
7506f491
DE
1568 code = GET_CODE (x);
1569 switch (code)
1570 {
1571 case REG:
c4c81601
RK
1572 hash += ((unsigned int) REG << 7) + REGNO (x);
1573 return hash;
7506f491
DE
1574
1575 case CONST_INT:
c4c81601
RK
1576 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1577 + (unsigned int) INTVAL (x));
1578 return hash;
7506f491
DE
1579
1580 case CONST_DOUBLE:
1581 /* This is like the general case, except that it only counts
1582 the integers representing the constant. */
c4c81601 1583 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1584 if (GET_MODE (x) != VOIDmode)
1585 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1586 hash += (unsigned int) XWINT (x, i);
7506f491 1587 else
c4c81601
RK
1588 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1589 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1590 return hash;
1591
69ef87e2
AH
1592 case CONST_VECTOR:
1593 {
1594 int units;
1595 rtx elt;
1596
1597 units = CONST_VECTOR_NUNITS (x);
1598
1599 for (i = 0; i < units; ++i)
1600 {
1601 elt = CONST_VECTOR_ELT (x, i);
1602 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1603 }
1604
1605 return hash;
1606 }
1607
7506f491
DE
1608 /* Assume there is only one rtx object for any given label. */
1609 case LABEL_REF:
1610 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1611 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1612 hash += (((unsigned int) LABEL_REF << 7)
1613 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1614 return hash;
1615
1616 case SYMBOL_REF:
1617 {
1618 /* Don't hash on the symbol's address to avoid bootstrap differences.
1619 Different hash values may cause expressions to be recorded in
1620 different orders and thus different registers to be used in the
1621 final assembler. This also avoids differences in the dump files
1622 between various stages. */
1623 unsigned int h = 0;
3cce094d 1624 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1625
7506f491
DE
1626 while (*p)
1627 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1628
1629 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1630 return hash;
1631 }
1632
1633 case MEM:
1634 if (MEM_VOLATILE_P (x))
1635 {
1636 *do_not_record_p = 1;
1637 return 0;
1638 }
c4c81601
RK
1639
1640 hash += (unsigned int) MEM;
d51f3632
JH
1641 /* We used alias set for hashing, but this is not good, since the alias
1642 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1643 causing the profiles to fail to match. */
7506f491
DE
1644 x = XEXP (x, 0);
1645 goto repeat;
1646
1647 case PRE_DEC:
1648 case PRE_INC:
1649 case POST_DEC:
1650 case POST_INC:
1651 case PC:
1652 case CC0:
1653 case CALL:
1654 case UNSPEC_VOLATILE:
1655 *do_not_record_p = 1;
1656 return 0;
1657
1658 case ASM_OPERANDS:
1659 if (MEM_VOLATILE_P (x))
1660 {
1661 *do_not_record_p = 1;
1662 return 0;
1663 }
6462bb43
AO
1664 else
1665 {
1666 /* We don't want to take the filename and line into account. */
1667 hash += (unsigned) code + (unsigned) GET_MODE (x)
1668 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1669 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1670 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1671
1672 if (ASM_OPERANDS_INPUT_LENGTH (x))
1673 {
1674 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1675 {
1676 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1677 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1678 do_not_record_p)
1679 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1680 (x, i)));
1681 }
1682
1683 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1684 x = ASM_OPERANDS_INPUT (x, 0);
1685 mode = GET_MODE (x);
1686 goto repeat;
1687 }
1688 return hash;
1689 }
7506f491
DE
1690
1691 default:
1692 break;
1693 }
1694
7506f491 1695 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1696 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1697 {
1698 if (fmt[i] == 'e')
1699 {
7506f491
DE
1700 /* If we are about to do the last recursive call
1701 needed at this level, change it into iteration.
1702 This function is called enough to be worth it. */
1703 if (i == 0)
1704 {
c4c81601 1705 x = XEXP (x, i);
7506f491
DE
1706 goto repeat;
1707 }
c4c81601
RK
1708
1709 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1710 if (*do_not_record_p)
1711 return 0;
1712 }
c4c81601 1713
7506f491
DE
1714 else if (fmt[i] == 'E')
1715 for (j = 0; j < XVECLEN (x, i); j++)
1716 {
1717 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1718 if (*do_not_record_p)
1719 return 0;
1720 }
c4c81601 1721
7506f491 1722 else if (fmt[i] == 's')
6462bb43 1723 hash += hash_string_1 (XSTR (x, i));
7506f491 1724 else if (fmt[i] == 'i')
c4c81601 1725 hash += (unsigned int) XINT (x, i);
7506f491
DE
1726 else
1727 abort ();
1728 }
1729
1730 return hash;
1731}
1732
1733/* Hash a set of register REGNO.
1734
c4c81601
RK
1735 Sets are hashed on the register that is set. This simplifies the PRE copy
1736 propagation code.
7506f491
DE
1737
1738 ??? May need to make things more elaborate. Later, as necessary. */
1739
1740static unsigned int
1d088dee 1741hash_set (int regno, int hash_table_size)
7506f491
DE
1742{
1743 unsigned int hash;
1744
1745 hash = regno;
1746 return hash % hash_table_size;
1747}
1748
cc2902df 1749/* Return nonzero if exp1 is equivalent to exp2.
7506f491
DE
1750 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1751
1752static int
1d088dee 1753expr_equiv_p (rtx x, rtx y)
7506f491 1754{
b3694847
SS
1755 int i, j;
1756 enum rtx_code code;
1757 const char *fmt;
7506f491
DE
1758
1759 if (x == y)
1760 return 1;
c4c81601 1761
7506f491 1762 if (x == 0 || y == 0)
ebd7a7af 1763 return 0;
7506f491
DE
1764
1765 code = GET_CODE (x);
1766 if (code != GET_CODE (y))
1767 return 0;
1768
1769 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1770 if (GET_MODE (x) != GET_MODE (y))
1771 return 0;
1772
1773 switch (code)
1774 {
1775 case PC:
1776 case CC0:
7506f491 1777 case CONST_INT:
ebd7a7af 1778 return 0;
7506f491
DE
1779
1780 case LABEL_REF:
1781 return XEXP (x, 0) == XEXP (y, 0);
1782
1783 case SYMBOL_REF:
1784 return XSTR (x, 0) == XSTR (y, 0);
1785
1786 case REG:
1787 return REGNO (x) == REGNO (y);
1788
297c3335
RH
1789 case MEM:
1790 /* Can't merge two expressions in different alias sets, since we can
1791 decide that the expression is transparent in a block when it isn't,
1792 due to it being set with the different alias set. */
1793 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1794 return 0;
bad998e0
ZD
1795
1796 /* A volatile mem should not be considered equivalent to any other. */
1797 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1798 return 0;
297c3335
RH
1799 break;
1800
7506f491
DE
1801 /* For commutative operations, check both orders. */
1802 case PLUS:
1803 case MULT:
1804 case AND:
1805 case IOR:
1806 case XOR:
1807 case NE:
1808 case EQ:
1809 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1810 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1811 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1812 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1813
6462bb43
AO
1814 case ASM_OPERANDS:
1815 /* We don't use the generic code below because we want to
1816 disregard filename and line numbers. */
1817
1818 /* A volatile asm isn't equivalent to any other. */
1819 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1820 return 0;
1821
1822 if (GET_MODE (x) != GET_MODE (y)
1823 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1824 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1825 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1826 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1827 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1828 return 0;
1829
1830 if (ASM_OPERANDS_INPUT_LENGTH (x))
1831 {
1832 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1833 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1834 ASM_OPERANDS_INPUT (y, i))
1835 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1836 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1837 return 0;
1838 }
1839
1840 return 1;
1841
7506f491
DE
1842 default:
1843 break;
1844 }
1845
1846 /* Compare the elements. If any pair of corresponding elements
1847 fail to match, return 0 for the whole thing. */
1848
1849 fmt = GET_RTX_FORMAT (code);
1850 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1851 {
1852 switch (fmt[i])
1853 {
1854 case 'e':
1855 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1856 return 0;
1857 break;
1858
1859 case 'E':
1860 if (XVECLEN (x, i) != XVECLEN (y, i))
1861 return 0;
1862 for (j = 0; j < XVECLEN (x, i); j++)
1863 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1864 return 0;
1865 break;
1866
1867 case 's':
1868 if (strcmp (XSTR (x, i), XSTR (y, i)))
1869 return 0;
1870 break;
1871
1872 case 'i':
1873 if (XINT (x, i) != XINT (y, i))
1874 return 0;
1875 break;
1876
1877 case 'w':
1878 if (XWINT (x, i) != XWINT (y, i))
1879 return 0;
1880 break;
1881
1882 case '0':
1883 break;
aaa4ca30 1884
7506f491
DE
1885 default:
1886 abort ();
1887 }
8e42ace1 1888 }
7506f491
DE
1889
1890 return 1;
1891}
1892
02280659 1893/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1894 If it is already present, record it as the last occurrence in INSN's
1895 basic block.
1896
1897 MODE is the mode of the value X is being stored into.
1898 It is only used if X is a CONST_INT.
1899
cc2902df
KH
1900 ANTIC_P is nonzero if X is an anticipatable expression.
1901 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1902
1903static void
1d088dee
AJ
1904insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1905 int avail_p, struct hash_table *table)
7506f491
DE
1906{
1907 int found, do_not_record_p;
1908 unsigned int hash;
1909 struct expr *cur_expr, *last_expr = NULL;
1910 struct occr *antic_occr, *avail_occr;
1911 struct occr *last_occr = NULL;
1912
02280659 1913 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1914
1915 /* Do not insert expression in table if it contains volatile operands,
1916 or if hash_expr determines the expression is something we don't want
1917 to or can't handle. */
1918 if (do_not_record_p)
1919 return;
1920
02280659 1921 cur_expr = table->table[hash];
7506f491
DE
1922 found = 0;
1923
c4c81601 1924 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1925 {
1926 /* If the expression isn't found, save a pointer to the end of
1927 the list. */
1928 last_expr = cur_expr;
1929 cur_expr = cur_expr->next_same_hash;
1930 }
1931
1932 if (! found)
1933 {
703ad42b 1934 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1935 bytes_used += sizeof (struct expr);
02280659 1936 if (table->table[hash] == NULL)
c4c81601 1937 /* This is the first pattern that hashed to this index. */
02280659 1938 table->table[hash] = cur_expr;
7506f491 1939 else
c4c81601
RK
1940 /* Add EXPR to end of this hash chain. */
1941 last_expr->next_same_hash = cur_expr;
1942
589005ff 1943 /* Set the fields of the expr element. */
7506f491 1944 cur_expr->expr = x;
02280659 1945 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1946 cur_expr->next_same_hash = NULL;
1947 cur_expr->antic_occr = NULL;
1948 cur_expr->avail_occr = NULL;
1949 }
1950
1951 /* Now record the occurrence(s). */
7506f491
DE
1952 if (antic_p)
1953 {
1954 antic_occr = cur_expr->antic_occr;
1955
1956 /* Search for another occurrence in the same basic block. */
1957 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1958 {
1959 /* If an occurrence isn't found, save a pointer to the end of
1960 the list. */
1961 last_occr = antic_occr;
1962 antic_occr = antic_occr->next;
1963 }
1964
1965 if (antic_occr)
c4c81601
RK
1966 /* Found another instance of the expression in the same basic block.
1967 Prefer the currently recorded one. We want the first one in the
1968 block and the block is scanned from start to end. */
1969 ; /* nothing to do */
7506f491
DE
1970 else
1971 {
1972 /* First occurrence of this expression in this basic block. */
703ad42b 1973 antic_occr = gcse_alloc (sizeof (struct occr));
7506f491
DE
1974 bytes_used += sizeof (struct occr);
1975 /* First occurrence of this expression in any block? */
1976 if (cur_expr->antic_occr == NULL)
1977 cur_expr->antic_occr = antic_occr;
1978 else
1979 last_occr->next = antic_occr;
c4c81601 1980
7506f491
DE
1981 antic_occr->insn = insn;
1982 antic_occr->next = NULL;
f9957958 1983 antic_occr->deleted_p = 0;
7506f491
DE
1984 }
1985 }
1986
1987 if (avail_p)
1988 {
1989 avail_occr = cur_expr->avail_occr;
1990
1991 /* Search for another occurrence in the same basic block. */
1992 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1993 {
1994 /* If an occurrence isn't found, save a pointer to the end of
1995 the list. */
1996 last_occr = avail_occr;
1997 avail_occr = avail_occr->next;
1998 }
1999
2000 if (avail_occr)
c4c81601
RK
2001 /* Found another instance of the expression in the same basic block.
2002 Prefer this occurrence to the currently recorded one. We want
2003 the last one in the block and the block is scanned from start
2004 to end. */
2005 avail_occr->insn = insn;
7506f491
DE
2006 else
2007 {
2008 /* First occurrence of this expression in this basic block. */
703ad42b 2009 avail_occr = gcse_alloc (sizeof (struct occr));
7506f491 2010 bytes_used += sizeof (struct occr);
c4c81601 2011
7506f491
DE
2012 /* First occurrence of this expression in any block? */
2013 if (cur_expr->avail_occr == NULL)
2014 cur_expr->avail_occr = avail_occr;
2015 else
2016 last_occr->next = avail_occr;
c4c81601 2017
7506f491
DE
2018 avail_occr->insn = insn;
2019 avail_occr->next = NULL;
f9957958 2020 avail_occr->deleted_p = 0;
7506f491
DE
2021 }
2022 }
2023}
2024
2025/* Insert pattern X in INSN in the hash table.
2026 X is a SET of a reg to either another reg or a constant.
2027 If it is already present, record it as the last occurrence in INSN's
2028 basic block. */
2029
2030static void
1d088dee 2031insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
7506f491
DE
2032{
2033 int found;
2034 unsigned int hash;
2035 struct expr *cur_expr, *last_expr = NULL;
2036 struct occr *cur_occr, *last_occr = NULL;
2037
2038 if (GET_CODE (x) != SET
2039 || GET_CODE (SET_DEST (x)) != REG)
2040 abort ();
2041
02280659 2042 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 2043
02280659 2044 cur_expr = table->table[hash];
7506f491
DE
2045 found = 0;
2046
c4c81601 2047 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2048 {
2049 /* If the expression isn't found, save a pointer to the end of
2050 the list. */
2051 last_expr = cur_expr;
2052 cur_expr = cur_expr->next_same_hash;
2053 }
2054
2055 if (! found)
2056 {
703ad42b 2057 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 2058 bytes_used += sizeof (struct expr);
02280659 2059 if (table->table[hash] == NULL)
c4c81601 2060 /* This is the first pattern that hashed to this index. */
02280659 2061 table->table[hash] = cur_expr;
7506f491 2062 else
c4c81601
RK
2063 /* Add EXPR to end of this hash chain. */
2064 last_expr->next_same_hash = cur_expr;
2065
7506f491
DE
2066 /* Set the fields of the expr element.
2067 We must copy X because it can be modified when copy propagation is
2068 performed on its operands. */
7506f491 2069 cur_expr->expr = copy_rtx (x);
02280659 2070 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2071 cur_expr->next_same_hash = NULL;
2072 cur_expr->antic_occr = NULL;
2073 cur_expr->avail_occr = NULL;
2074 }
2075
2076 /* Now record the occurrence. */
7506f491
DE
2077 cur_occr = cur_expr->avail_occr;
2078
2079 /* Search for another occurrence in the same basic block. */
2080 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2081 {
2082 /* If an occurrence isn't found, save a pointer to the end of
2083 the list. */
2084 last_occr = cur_occr;
2085 cur_occr = cur_occr->next;
2086 }
2087
2088 if (cur_occr)
c4c81601
RK
2089 /* Found another instance of the expression in the same basic block.
2090 Prefer this occurrence to the currently recorded one. We want the
2091 last one in the block and the block is scanned from start to end. */
2092 cur_occr->insn = insn;
7506f491
DE
2093 else
2094 {
2095 /* First occurrence of this expression in this basic block. */
703ad42b 2096 cur_occr = gcse_alloc (sizeof (struct occr));
7506f491 2097 bytes_used += sizeof (struct occr);
c4c81601 2098
7506f491
DE
2099 /* First occurrence of this expression in any block? */
2100 if (cur_expr->avail_occr == NULL)
2101 cur_expr->avail_occr = cur_occr;
2102 else
2103 last_occr->next = cur_occr;
c4c81601 2104
7506f491
DE
2105 cur_occr->insn = insn;
2106 cur_occr->next = NULL;
f9957958 2107 cur_occr->deleted_p = 0;
7506f491
DE
2108 }
2109}
2110
6b2d1c9e
RS
2111/* Determine whether the rtx X should be treated as a constant for
2112 the purposes of GCSE's constant propagation. */
2113
2114static bool
1d088dee 2115gcse_constant_p (rtx x)
6b2d1c9e
RS
2116{
2117 /* Consider a COMPARE of two integers constant. */
2118 if (GET_CODE (x) == COMPARE
2119 && GET_CODE (XEXP (x, 0)) == CONST_INT
2120 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2121 return true;
2122
db2f435b
AP
2123
2124 /* Consider a COMPARE of the same registers is a constant
938d968e 2125 if they are not floating point registers. */
db2f435b
AP
2126 if (GET_CODE(x) == COMPARE
2127 && GET_CODE (XEXP (x, 0)) == REG
2128 && GET_CODE (XEXP (x, 1)) == REG
2129 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2130 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2131 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2132 return true;
2133
6b2d1c9e
RS
2134 if (GET_CODE (x) == CONSTANT_P_RTX)
2135 return false;
2136
2137 return CONSTANT_P (x);
2138}
2139
02280659
ZD
2140/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2141 expression one). */
7506f491
DE
2142
2143static void
1d088dee 2144hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
7506f491
DE
2145{
2146 rtx src = SET_SRC (pat);
2147 rtx dest = SET_DEST (pat);
172890a2 2148 rtx note;
7506f491
DE
2149
2150 if (GET_CODE (src) == CALL)
02280659 2151 hash_scan_call (src, insn, table);
7506f491 2152
172890a2 2153 else if (GET_CODE (dest) == REG)
7506f491 2154 {
172890a2 2155 unsigned int regno = REGNO (dest);
7506f491
DE
2156 rtx tmp;
2157
172890a2
RK
2158 /* If this is a single set and we are doing constant propagation,
2159 see if a REG_NOTE shows this equivalent to a constant. */
02280659 2160 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
6b2d1c9e 2161 && gcse_constant_p (XEXP (note, 0)))
172890a2
RK
2162 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2163
7506f491 2164 /* Only record sets of pseudo-regs in the hash table. */
02280659 2165 if (! table->set_p
7506f491
DE
2166 && regno >= FIRST_PSEUDO_REGISTER
2167 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 2168 && can_copy_p (GET_MODE (dest))
068473ec
JH
2169 /* GCSE commonly inserts instruction after the insn. We can't
2170 do that easily for EH_REGION notes so disable GCSE on these
2171 for now. */
2172 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2173 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2174 && want_to_gcse_p (src)
2175 /* Don't CSE a nop. */
43e72072
JJ
2176 && ! set_noop_p (pat)
2177 /* Don't GCSE if it has attached REG_EQUIV note.
2178 At this point this only function parameters should have
2179 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2180 explicitly, it means address of parameter has been taken,
43e72072
JJ
2181 so we should not extend the lifetime of the pseudo. */
2182 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2183 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2184 {
2185 /* An expression is not anticipatable if its operands are
52d76e11
RK
2186 modified before this insn or if this is not the only SET in
2187 this insn. */
2188 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2189 /* An expression is not available if its operands are
eb296bd9
GK
2190 subsequently modified, including this insn. It's also not
2191 available if this is a branch, because we can't insert
2192 a set after the branch. */
2193 int avail_p = (oprs_available_p (src, insn)
2194 && ! JUMP_P (insn));
c4c81601 2195
02280659 2196 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 2197 }
c4c81601 2198
7506f491 2199 /* Record sets for constant/copy propagation. */
02280659 2200 else if (table->set_p
7506f491
DE
2201 && regno >= FIRST_PSEUDO_REGISTER
2202 && ((GET_CODE (src) == REG
2203 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 2204 && can_copy_p (GET_MODE (dest))
172890a2 2205 && REGNO (src) != regno)
6b2d1c9e 2206 || gcse_constant_p (src))
7506f491
DE
2207 /* A copy is not available if its src or dest is subsequently
2208 modified. Here we want to search from INSN+1 on, but
2209 oprs_available_p searches from INSN on. */
a813c111 2210 && (insn == BB_END (BLOCK_FOR_INSN (insn))
7506f491
DE
2211 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2212 && oprs_available_p (pat, tmp))))
02280659 2213 insert_set_in_table (pat, insn, table);
7506f491 2214 }
d91edf86 2215 /* In case of store we want to consider the memory value as available in
f5f2e3cd
MH
2216 the REG stored in that memory. This makes it possible to remove
2217 redundant loads from due to stores to the same location. */
2218 else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)
2219 {
2220 unsigned int regno = REGNO (src);
2221
2222 /* Do not do this for constant/copy propagation. */
2223 if (! table->set_p
2224 /* Only record sets of pseudo-regs in the hash table. */
2225 && regno >= FIRST_PSEUDO_REGISTER
2226 /* Don't GCSE something if we can't do a reg/reg copy. */
2227 && can_copy_p (GET_MODE (src))
2228 /* GCSE commonly inserts instruction after the insn. We can't
2229 do that easily for EH_REGION notes so disable GCSE on these
2230 for now. */
2231 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2232 /* Is SET_DEST something we want to gcse? */
2233 && want_to_gcse_p (dest)
2234 /* Don't CSE a nop. */
2235 && ! set_noop_p (pat)
2236 /* Don't GCSE if it has attached REG_EQUIV note.
2237 At this point this only function parameters should have
2238 REG_EQUIV notes and if the argument slot is used somewhere
2239 explicitly, it means address of parameter has been taken,
2240 so we should not extend the lifetime of the pseudo. */
2241 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2242 || GET_CODE (XEXP (note, 0)) != MEM))
2243 {
2244 /* Stores are never anticipatable. */
2245 int antic_p = 0;
2246 /* An expression is not available if its operands are
2247 subsequently modified, including this insn. It's also not
2248 available if this is a branch, because we can't insert
2249 a set after the branch. */
2250 int avail_p = oprs_available_p (dest, insn)
2251 && ! JUMP_P (insn);
2252
2253 /* Record the memory expression (DEST) in the hash table. */
2254 insert_expr_in_table (dest, GET_MODE (dest), insn,
2255 antic_p, avail_p, table);
2256 }
2257 }
7506f491
DE
2258}
2259
2260static void
1d088dee
AJ
2261hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2262 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2263{
2264 /* Currently nothing to do. */
2265}
2266
2267static void
1d088dee
AJ
2268hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2269 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2270{
2271 /* Currently nothing to do. */
2272}
2273
2274/* Process INSN and add hash table entries as appropriate.
2275
2276 Only available expressions that set a single pseudo-reg are recorded.
2277
2278 Single sets in a PARALLEL could be handled, but it's an extra complication
2279 that isn't dealt with right now. The trick is handling the CLOBBERs that
2280 are also in the PARALLEL. Later.
2281
cc2902df 2282 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
2283 otherwise it is for the expression hash table.
2284 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2285 not record any expressions. */
7506f491
DE
2286
2287static void
1d088dee 2288hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
7506f491
DE
2289{
2290 rtx pat = PATTERN (insn);
c4c81601 2291 int i;
7506f491 2292
172890a2
RK
2293 if (in_libcall_block)
2294 return;
2295
7506f491
DE
2296 /* Pick out the sets of INSN and for other forms of instructions record
2297 what's been modified. */
2298
172890a2 2299 if (GET_CODE (pat) == SET)
02280659 2300 hash_scan_set (pat, insn, table);
7506f491 2301 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2302 for (i = 0; i < XVECLEN (pat, 0); i++)
2303 {
2304 rtx x = XVECEXP (pat, 0, i);
7506f491 2305
c4c81601 2306 if (GET_CODE (x) == SET)
02280659 2307 hash_scan_set (x, insn, table);
c4c81601 2308 else if (GET_CODE (x) == CLOBBER)
02280659 2309 hash_scan_clobber (x, insn, table);
c4c81601 2310 else if (GET_CODE (x) == CALL)
02280659 2311 hash_scan_call (x, insn, table);
c4c81601 2312 }
7506f491 2313
7506f491 2314 else if (GET_CODE (pat) == CLOBBER)
02280659 2315 hash_scan_clobber (pat, insn, table);
7506f491 2316 else if (GET_CODE (pat) == CALL)
02280659 2317 hash_scan_call (pat, insn, table);
7506f491
DE
2318}
2319
2320static void
1d088dee 2321dump_hash_table (FILE *file, const char *name, struct hash_table *table)
7506f491
DE
2322{
2323 int i;
2324 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2325 struct expr **flat_table;
2326 unsigned int *hash_val;
c4c81601 2327 struct expr *expr;
4da896b2 2328
703ad42b
KG
2329 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2330 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 2331
02280659
ZD
2332 for (i = 0; i < (int) table->size; i++)
2333 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
2334 {
2335 flat_table[expr->bitmap_index] = expr;
2336 hash_val[expr->bitmap_index] = i;
2337 }
7506f491
DE
2338
2339 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 2340 name, table->size, table->n_elems);
7506f491 2341
02280659 2342 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
2343 if (flat_table[i] != 0)
2344 {
a0ac9e5a 2345 expr = flat_table[i];
21318741
RK
2346 fprintf (file, "Index %d (hash value %d)\n ",
2347 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2348 print_rtl (file, expr->expr);
21318741
RK
2349 fprintf (file, "\n");
2350 }
7506f491
DE
2351
2352 fprintf (file, "\n");
4da896b2 2353
4da896b2
MM
2354 free (flat_table);
2355 free (hash_val);
7506f491
DE
2356}
2357
2358/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2359
80c29cc4 2360 first_set records the first place in the block where the register
7506f491 2361 is set and is used to compute "anticipatability".
c4c81601 2362
80c29cc4 2363 last_set records the last place in the block where the register
7506f491 2364 is set and is used to compute "availability".
c4c81601 2365
80c29cc4
RZ
2366 last_bb records the block for which first_set and last_set are
2367 valid, as a quick test to invalidate them.
2368
7506f491
DE
2369 reg_set_in_block records whether the register is set in the block
2370 and is used to compute "transparency". */
2371
2372static void
1d088dee 2373record_last_reg_set_info (rtx insn, int regno)
7506f491 2374{
80c29cc4
RZ
2375 struct reg_avail_info *info = &reg_avail_info[regno];
2376 int cuid = INSN_CUID (insn);
c4c81601 2377
80c29cc4
RZ
2378 info->last_set = cuid;
2379 if (info->last_bb != current_bb)
2380 {
2381 info->last_bb = current_bb;
2382 info->first_set = cuid;
e0082a72 2383 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2384 }
7506f491
DE
2385}
2386
a13d4ebf
AM
2387
2388/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2389 Note we store a pair of elements in the list, so they have to be
2390 taken off pairwise. */
2391
589005ff 2392static void
1d088dee
AJ
2393canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2394 void * v_insn)
a13d4ebf
AM
2395{
2396 rtx dest_addr, insn;
0fe854a7 2397 int bb;
a13d4ebf
AM
2398
2399 while (GET_CODE (dest) == SUBREG
2400 || GET_CODE (dest) == ZERO_EXTRACT
2401 || GET_CODE (dest) == SIGN_EXTRACT
2402 || GET_CODE (dest) == STRICT_LOW_PART)
2403 dest = XEXP (dest, 0);
2404
2405 /* If DEST is not a MEM, then it will not conflict with a load. Note
2406 that function calls are assumed to clobber memory, but are handled
2407 elsewhere. */
2408
2409 if (GET_CODE (dest) != MEM)
2410 return;
2411
2412 dest_addr = get_addr (XEXP (dest, 0));
2413 dest_addr = canon_rtx (dest_addr);
589005ff 2414 insn = (rtx) v_insn;
0fe854a7 2415 bb = BLOCK_NUM (insn);
a13d4ebf 2416
589005ff 2417 canon_modify_mem_list[bb] =
0fe854a7 2418 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2419 canon_modify_mem_list[bb] =
0fe854a7
RH
2420 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2421 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2422}
2423
a13d4ebf
AM
2424/* Record memory modification information for INSN. We do not actually care
2425 about the memory location(s) that are set, or even how they are set (consider
2426 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2427
2428static void
1d088dee 2429record_last_mem_set_info (rtx insn)
7506f491 2430{
0fe854a7
RH
2431 int bb = BLOCK_NUM (insn);
2432
ccef9ef5 2433 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2434 everything. */
0fe854a7
RH
2435 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2436 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2437
2438 if (GET_CODE (insn) == CALL_INSN)
2439 {
2440 /* Note that traversals of this loop (other than for free-ing)
2441 will break after encountering a CALL_INSN. So, there's no
dc297297 2442 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2443 canon_modify_mem_list[bb] =
2444 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2445 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2446 }
2447 else
0fe854a7 2448 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2449}
2450
7506f491 2451/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2452 SET or CLOBBER in an insn. DATA is really the instruction in which
2453 the SET is taking place. */
7506f491
DE
2454
2455static void
1d088dee 2456record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 2457{
84832317
MM
2458 rtx last_set_insn = (rtx) data;
2459
7506f491
DE
2460 if (GET_CODE (dest) == SUBREG)
2461 dest = SUBREG_REG (dest);
2462
2463 if (GET_CODE (dest) == REG)
2464 record_last_reg_set_info (last_set_insn, REGNO (dest));
2465 else if (GET_CODE (dest) == MEM
2466 /* Ignore pushes, they clobber nothing. */
2467 && ! push_operand (dest, GET_MODE (dest)))
2468 record_last_mem_set_info (last_set_insn);
2469}
2470
2471/* Top level function to create an expression or assignment hash table.
2472
2473 Expression entries are placed in the hash table if
2474 - they are of the form (set (pseudo-reg) src),
2475 - src is something we want to perform GCSE on,
2476 - none of the operands are subsequently modified in the block
2477
2478 Assignment entries are placed in the hash table if
2479 - they are of the form (set (pseudo-reg) src),
2480 - src is something we want to perform const/copy propagation on,
2481 - none of the operands or target are subsequently modified in the block
c4c81601 2482
7506f491
DE
2483 Currently src must be a pseudo-reg or a const_int.
2484
02280659 2485 TABLE is the table computed. */
7506f491
DE
2486
2487static void
1d088dee 2488compute_hash_table_work (struct hash_table *table)
7506f491 2489{
80c29cc4 2490 unsigned int i;
7506f491
DE
2491
2492 /* While we compute the hash table we also compute a bit array of which
2493 registers are set in which blocks.
7506f491
DE
2494 ??? This isn't needed during const/copy propagation, but it's cheap to
2495 compute. Later. */
d55bc081 2496 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2497
a13d4ebf 2498 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2499 clear_modify_mem_tables ();
7506f491 2500 /* Some working arrays used to track first and last set in each block. */
703ad42b 2501 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
80c29cc4
RZ
2502
2503 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2504 reg_avail_info[i].last_bb = NULL;
7506f491 2505
e0082a72 2506 FOR_EACH_BB (current_bb)
7506f491
DE
2507 {
2508 rtx insn;
770ae6cc 2509 unsigned int regno;
ed79bb3d 2510 int in_libcall_block;
7506f491
DE
2511
2512 /* First pass over the instructions records information used to
2513 determine when registers and memory are first and last set.
ccef9ef5 2514 ??? hard-reg reg_set_in_block computation
7506f491
DE
2515 could be moved to compute_sets since they currently don't change. */
2516
a813c111
SB
2517 for (insn = BB_HEAD (current_bb);
2518 insn && insn != NEXT_INSN (BB_END (current_bb));
7506f491
DE
2519 insn = NEXT_INSN (insn))
2520 {
2c3c49de 2521 if (! INSN_P (insn))
7506f491
DE
2522 continue;
2523
2524 if (GET_CODE (insn) == CALL_INSN)
2525 {
19652adf 2526 bool clobbers_all = false;
589005ff 2527#ifdef NON_SAVING_SETJMP
19652adf
ZW
2528 if (NON_SAVING_SETJMP
2529 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2530 clobbers_all = true;
2531#endif
2532
7506f491 2533 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2534 if (clobbers_all
2535 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2536 record_last_reg_set_info (insn, regno);
c4c81601 2537
24a28584 2538 mark_call (insn);
7506f491
DE
2539 }
2540
84832317 2541 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2542 }
2543
fbef91d8
RS
2544 /* Insert implicit sets in the hash table. */
2545 if (table->set_p
2546 && implicit_sets[current_bb->index] != NULL_RTX)
2547 hash_scan_set (implicit_sets[current_bb->index],
a813c111 2548 BB_HEAD (current_bb), table);
fbef91d8 2549
7506f491
DE
2550 /* The next pass builds the hash table. */
2551
a813c111
SB
2552 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2553 insn && insn != NEXT_INSN (BB_END (current_bb));
7506f491 2554 insn = NEXT_INSN (insn))
2c3c49de 2555 if (INSN_P (insn))
c4c81601
RK
2556 {
2557 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2558 in_libcall_block = 1;
02280659 2559 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2560 in_libcall_block = 0;
02280659
ZD
2561 hash_scan_insn (insn, table, in_libcall_block);
2562 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2563 in_libcall_block = 0;
8e42ace1 2564 }
7506f491
DE
2565 }
2566
80c29cc4
RZ
2567 free (reg_avail_info);
2568 reg_avail_info = NULL;
7506f491
DE
2569}
2570
02280659 2571/* Allocate space for the set/expr hash TABLE.
7506f491 2572 N_INSNS is the number of instructions in the function.
02280659
ZD
2573 It is used to determine the number of buckets to use.
2574 SET_P determines whether set or expression table will
2575 be created. */
7506f491
DE
2576
2577static void
1d088dee 2578alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
7506f491
DE
2579{
2580 int n;
2581
02280659
ZD
2582 table->size = n_insns / 4;
2583 if (table->size < 11)
2584 table->size = 11;
c4c81601 2585
7506f491
DE
2586 /* Attempt to maintain efficient use of hash table.
2587 Making it an odd number is simplest for now.
2588 ??? Later take some measurements. */
02280659
ZD
2589 table->size |= 1;
2590 n = table->size * sizeof (struct expr *);
703ad42b 2591 table->table = gmalloc (n);
02280659 2592 table->set_p = set_p;
7506f491
DE
2593}
2594
02280659 2595/* Free things allocated by alloc_hash_table. */
7506f491
DE
2596
2597static void
1d088dee 2598free_hash_table (struct hash_table *table)
7506f491 2599{
02280659 2600 free (table->table);
7506f491
DE
2601}
2602
02280659
ZD
2603/* Compute the hash TABLE for doing copy/const propagation or
2604 expression hash table. */
7506f491
DE
2605
2606static void
1d088dee 2607compute_hash_table (struct hash_table *table)
7506f491
DE
2608{
2609 /* Initialize count of number of entries in hash table. */
02280659 2610 table->n_elems = 0;
703ad42b 2611 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 2612
02280659 2613 compute_hash_table_work (table);
7506f491
DE
2614}
2615\f
2616/* Expression tracking support. */
2617
02280659 2618/* Lookup pattern PAT in the expression TABLE.
7506f491
DE
2619 The result is a pointer to the table entry, or NULL if not found. */
2620
2621static struct expr *
1d088dee 2622lookup_expr (rtx pat, struct hash_table *table)
7506f491
DE
2623{
2624 int do_not_record_p;
2625 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
02280659 2626 table->size);
7506f491
DE
2627 struct expr *expr;
2628
2629 if (do_not_record_p)
2630 return NULL;
2631
02280659 2632 expr = table->table[hash];
7506f491
DE
2633
2634 while (expr && ! expr_equiv_p (expr->expr, pat))
2635 expr = expr->next_same_hash;
2636
2637 return expr;
2638}
2639
ceda50e9
RH
2640/* Lookup REGNO in the set TABLE. The result is a pointer to the
2641 table entry, or NULL if not found. */
7506f491
DE
2642
2643static struct expr *
1d088dee 2644lookup_set (unsigned int regno, struct hash_table *table)
7506f491 2645{
02280659 2646 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2647 struct expr *expr;
2648
02280659 2649 expr = table->table[hash];
7506f491 2650
ceda50e9
RH
2651 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2652 expr = expr->next_same_hash;
7506f491
DE
2653
2654 return expr;
2655}
2656
2657/* Return the next entry for REGNO in list EXPR. */
2658
2659static struct expr *
1d088dee 2660next_set (unsigned int regno, struct expr *expr)
7506f491
DE
2661{
2662 do
2663 expr = expr->next_same_hash;
2664 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2665
7506f491
DE
2666 return expr;
2667}
2668
0fe854a7
RH
2669/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2670 types may be mixed. */
2671
2672static void
1d088dee 2673free_insn_expr_list_list (rtx *listp)
0fe854a7
RH
2674{
2675 rtx list, next;
2676
2677 for (list = *listp; list ; list = next)
2678 {
2679 next = XEXP (list, 1);
2680 if (GET_CODE (list) == EXPR_LIST)
2681 free_EXPR_LIST_node (list);
2682 else
2683 free_INSN_LIST_node (list);
2684 }
2685
2686 *listp = NULL;
2687}
2688
73991d6a
JH
2689/* Clear canon_modify_mem_list and modify_mem_list tables. */
2690static void
1d088dee 2691clear_modify_mem_tables (void)
73991d6a
JH
2692{
2693 int i;
2694
2695 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2696 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2697 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2698
2699 EXECUTE_IF_SET_IN_BITMAP
2700 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2701 free_insn_expr_list_list (canon_modify_mem_list + i));
2702 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2703}
2704
2705/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2706
2707static void
1d088dee 2708free_modify_mem_tables (void)
73991d6a
JH
2709{
2710 clear_modify_mem_tables ();
2711 free (modify_mem_list);
2712 free (canon_modify_mem_list);
2713 modify_mem_list = 0;
2714 canon_modify_mem_list = 0;
2715}
2716
7506f491
DE
2717/* Reset tables used to keep track of what's still available [since the
2718 start of the block]. */
2719
2720static void
1d088dee 2721reset_opr_set_tables (void)
7506f491
DE
2722{
2723 /* Maintain a bitmap of which regs have been set since beginning of
2724 the block. */
73991d6a 2725 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2726
7506f491
DE
2727 /* Also keep a record of the last instruction to modify memory.
2728 For now this is very trivial, we only record whether any memory
2729 location has been modified. */
73991d6a 2730 clear_modify_mem_tables ();
7506f491
DE
2731}
2732
cc2902df 2733/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2734 INSN's basic block. */
2735
2736static int
1d088dee 2737oprs_not_set_p (rtx x, rtx insn)
7506f491 2738{
c4c81601 2739 int i, j;
7506f491 2740 enum rtx_code code;
6f7d635c 2741 const char *fmt;
7506f491 2742
7506f491
DE
2743 if (x == 0)
2744 return 1;
2745
2746 code = GET_CODE (x);
2747 switch (code)
2748 {
2749 case PC:
2750 case CC0:
2751 case CONST:
2752 case CONST_INT:
2753 case CONST_DOUBLE:
69ef87e2 2754 case CONST_VECTOR:
7506f491
DE
2755 case SYMBOL_REF:
2756 case LABEL_REF:
2757 case ADDR_VEC:
2758 case ADDR_DIFF_VEC:
2759 return 1;
2760
2761 case MEM:
589005ff 2762 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2763 INSN_CUID (insn), x, 0))
a13d4ebf 2764 return 0;
c4c81601
RK
2765 else
2766 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2767
2768 case REG:
73991d6a 2769 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2770
2771 default:
2772 break;
2773 }
2774
c4c81601 2775 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2776 {
2777 if (fmt[i] == 'e')
2778 {
7506f491
DE
2779 /* If we are about to do the last recursive call
2780 needed at this level, change it into iteration.
2781 This function is called enough to be worth it. */
2782 if (i == 0)
c4c81601
RK
2783 return oprs_not_set_p (XEXP (x, i), insn);
2784
2785 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2786 return 0;
2787 }
2788 else if (fmt[i] == 'E')
c4c81601
RK
2789 for (j = 0; j < XVECLEN (x, i); j++)
2790 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2791 return 0;
7506f491
DE
2792 }
2793
2794 return 1;
2795}
2796
2797/* Mark things set by a CALL. */
2798
2799static void
1d088dee 2800mark_call (rtx insn)
7506f491 2801{
24a28584 2802 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2803 record_last_mem_set_info (insn);
7506f491
DE
2804}
2805
2806/* Mark things set by a SET. */
2807
2808static void
1d088dee 2809mark_set (rtx pat, rtx insn)
7506f491
DE
2810{
2811 rtx dest = SET_DEST (pat);
2812
2813 while (GET_CODE (dest) == SUBREG
2814 || GET_CODE (dest) == ZERO_EXTRACT
2815 || GET_CODE (dest) == SIGN_EXTRACT
2816 || GET_CODE (dest) == STRICT_LOW_PART)
2817 dest = XEXP (dest, 0);
2818
a13d4ebf 2819 if (GET_CODE (dest) == REG)
73991d6a 2820 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2821 else if (GET_CODE (dest) == MEM)
2822 record_last_mem_set_info (insn);
2823
7506f491 2824 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2825 mark_call (insn);
7506f491
DE
2826}
2827
2828/* Record things set by a CLOBBER. */
2829
2830static void
1d088dee 2831mark_clobber (rtx pat, rtx insn)
7506f491
DE
2832{
2833 rtx clob = XEXP (pat, 0);
2834
2835 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2836 clob = XEXP (clob, 0);
2837
a13d4ebf 2838 if (GET_CODE (clob) == REG)
73991d6a 2839 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2840 else
2841 record_last_mem_set_info (insn);
7506f491
DE
2842}
2843
2844/* Record things set by INSN.
2845 This data is used by oprs_not_set_p. */
2846
2847static void
1d088dee 2848mark_oprs_set (rtx insn)
7506f491
DE
2849{
2850 rtx pat = PATTERN (insn);
c4c81601 2851 int i;
7506f491
DE
2852
2853 if (GET_CODE (pat) == SET)
2854 mark_set (pat, insn);
2855 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2856 for (i = 0; i < XVECLEN (pat, 0); i++)
2857 {
2858 rtx x = XVECEXP (pat, 0, i);
2859
2860 if (GET_CODE (x) == SET)
2861 mark_set (x, insn);
2862 else if (GET_CODE (x) == CLOBBER)
2863 mark_clobber (x, insn);
2864 else if (GET_CODE (x) == CALL)
2865 mark_call (insn);
2866 }
7506f491 2867
7506f491
DE
2868 else if (GET_CODE (pat) == CLOBBER)
2869 mark_clobber (pat, insn);
2870 else if (GET_CODE (pat) == CALL)
b5ce41ff 2871 mark_call (insn);
7506f491 2872}
b5ce41ff 2873
7506f491
DE
2874\f
2875/* Classic GCSE reaching definition support. */
2876
2877/* Allocate reaching def variables. */
2878
2879static void
1d088dee 2880alloc_rd_mem (int n_blocks, int n_insns)
7506f491 2881{
703ad42b 2882 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2883 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491 2884
703ad42b 2885 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2886 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491 2887
703ad42b 2888 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2889 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491 2890
703ad42b 2891 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2892 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2893}
2894
2895/* Free reaching def variables. */
2896
2897static void
1d088dee 2898free_rd_mem (void)
7506f491 2899{
5a660bff
DB
2900 sbitmap_vector_free (rd_kill);
2901 sbitmap_vector_free (rd_gen);
2902 sbitmap_vector_free (reaching_defs);
2903 sbitmap_vector_free (rd_out);
7506f491
DE
2904}
2905
c4c81601 2906/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2907
2908static void
1d088dee 2909handle_rd_kill_set (rtx insn, int regno, basic_block bb)
7506f491 2910{
c4c81601 2911 struct reg_set *this_reg;
7506f491 2912
c4c81601
RK
2913 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2914 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2915 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2916}
2917
7506f491
DE
2918/* Compute the set of kill's for reaching definitions. */
2919
2920static void
1d088dee 2921compute_kill_rd (void)
7506f491 2922{
e0082a72 2923 int cuid;
172890a2
RK
2924 unsigned int regno;
2925 int i;
e0082a72 2926 basic_block bb;
7506f491
DE
2927
2928 /* For each block
2929 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2930 generates a definition in the block)
2931 Call the reg set by the insn corresponding to that bit regx
2932 Look at the linked list starting at reg_set_table[regx]
2933 For each setting of regx in the linked list, which is not in
2934 this block
6d2f8887 2935 Set the bit in `kill' corresponding to that insn. */
e0082a72 2936 FOR_EACH_BB (bb)
c4c81601 2937 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2938 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2939 {
c4c81601
RK
2940 rtx insn = CUID_INSN (cuid);
2941 rtx pat = PATTERN (insn);
7506f491 2942
c4c81601
RK
2943 if (GET_CODE (insn) == CALL_INSN)
2944 {
2945 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 2946 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 2947 handle_rd_kill_set (insn, regno, bb);
c4c81601 2948 }
7506f491 2949
c4c81601
RK
2950 if (GET_CODE (pat) == PARALLEL)
2951 {
2952 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2953 {
c4c81601 2954 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2955
c4c81601
RK
2956 if ((code == SET || code == CLOBBER)
2957 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2958 handle_rd_kill_set (insn,
2959 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 2960 bb);
ac7c5af5 2961 }
ac7c5af5 2962 }
c4c81601
RK
2963 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2964 /* Each setting of this register outside of this block
2965 must be marked in the set of kills in this block. */
e0082a72 2966 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 2967 }
7506f491
DE
2968}
2969
589005ff 2970/* Compute the reaching definitions as in
7506f491
DE
2971 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2972 Chapter 10. It is the same algorithm as used for computing available
2973 expressions but applied to the gens and kills of reaching definitions. */
2974
2975static void
1d088dee 2976compute_rd (void)
7506f491 2977{
e0082a72
ZD
2978 int changed, passes;
2979 basic_block bb;
7506f491 2980
e0082a72
ZD
2981 FOR_EACH_BB (bb)
2982 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
2983
2984 passes = 0;
2985 changed = 1;
2986 while (changed)
2987 {
2988 changed = 0;
e0082a72 2989 FOR_EACH_BB (bb)
ac7c5af5 2990 {
e0082a72
ZD
2991 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2992 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2993 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 2994 }
7506f491
DE
2995 passes++;
2996 }
2997
2998 if (gcse_file)
2999 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3000}
3001\f
3002/* Classic GCSE available expression support. */
3003
3004/* Allocate memory for available expression computation. */
3005
3006static void
1d088dee 3007alloc_avail_expr_mem (int n_blocks, int n_exprs)
7506f491 3008{
703ad42b 3009 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3010 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491 3011
703ad42b 3012 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3013 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491 3014
703ad42b 3015 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3016 sbitmap_vector_zero (ae_in, n_blocks);
7506f491 3017
703ad42b 3018 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3019 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
3020}
3021
3022static void
1d088dee 3023free_avail_expr_mem (void)
7506f491 3024{
5a660bff
DB
3025 sbitmap_vector_free (ae_kill);
3026 sbitmap_vector_free (ae_gen);
3027 sbitmap_vector_free (ae_in);
3028 sbitmap_vector_free (ae_out);
7506f491
DE
3029}
3030
3031/* Compute the set of available expressions generated in each basic block. */
3032
3033static void
1d088dee 3034compute_ae_gen (struct hash_table *expr_hash_table)
7506f491 3035{
2e653e39 3036 unsigned int i;
c4c81601
RK
3037 struct expr *expr;
3038 struct occr *occr;
7506f491
DE
3039
3040 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3041 This is all we have to do because an expression is not recorded if it
3042 is not available, and the only expressions we want to work with are the
3043 ones that are recorded. */
02280659
ZD
3044 for (i = 0; i < expr_hash_table->size; i++)
3045 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
c4c81601
RK
3046 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3047 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3048}
3049
cc2902df 3050/* Return nonzero if expression X is killed in BB. */
7506f491
DE
3051
3052static int
1d088dee 3053expr_killed_p (rtx x, basic_block bb)
7506f491 3054{
c4c81601 3055 int i, j;
7506f491 3056 enum rtx_code code;
6f7d635c 3057 const char *fmt;
7506f491 3058
7506f491
DE
3059 if (x == 0)
3060 return 1;
3061
3062 code = GET_CODE (x);
3063 switch (code)
3064 {
3065 case REG:
0b17ab2f 3066 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3067
3068 case MEM:
a13d4ebf
AM
3069 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3070 return 1;
c4c81601
RK
3071 else
3072 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3073
3074 case PC:
3075 case CC0: /*FIXME*/
3076 case CONST:
3077 case CONST_INT:
3078 case CONST_DOUBLE:
69ef87e2 3079 case CONST_VECTOR:
7506f491
DE
3080 case SYMBOL_REF:
3081 case LABEL_REF:
3082 case ADDR_VEC:
3083 case ADDR_DIFF_VEC:
3084 return 0;
3085
3086 default:
3087 break;
3088 }
3089
c4c81601 3090 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3091 {
3092 if (fmt[i] == 'e')
3093 {
7506f491
DE
3094 /* If we are about to do the last recursive call
3095 needed at this level, change it into iteration.
3096 This function is called enough to be worth it. */
3097 if (i == 0)
c4c81601
RK
3098 return expr_killed_p (XEXP (x, i), bb);
3099 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3100 return 1;
3101 }
3102 else if (fmt[i] == 'E')
c4c81601
RK
3103 for (j = 0; j < XVECLEN (x, i); j++)
3104 if (expr_killed_p (XVECEXP (x, i, j), bb))
3105 return 1;
7506f491
DE
3106 }
3107
3108 return 0;
3109}
3110
3111/* Compute the set of available expressions killed in each basic block. */
3112
3113static void
1d088dee
AJ
3114compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3115 struct hash_table *expr_hash_table)
7506f491 3116{
e0082a72 3117 basic_block bb;
2e653e39 3118 unsigned int i;
c4c81601 3119 struct expr *expr;
7506f491 3120
e0082a72 3121 FOR_EACH_BB (bb)
02280659
ZD
3122 for (i = 0; i < expr_hash_table->size; i++)
3123 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
7506f491 3124 {
c4c81601 3125 /* Skip EXPR if generated in this block. */
e0082a72 3126 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3127 continue;
7506f491 3128
e0082a72
ZD
3129 if (expr_killed_p (expr->expr, bb))
3130 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3131 }
7506f491 3132}
7506f491
DE
3133\f
3134/* Actually perform the Classic GCSE optimizations. */
3135
cc2902df 3136/* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
7506f491 3137
cc2902df 3138 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
7506f491
DE
3139 as a positive reach. We want to do this when there are two computations
3140 of the expression in the block.
3141
3142 VISITED is a pointer to a working buffer for tracking which BB's have
3143 been visited. It is NULL for the top-level call.
3144
3145 We treat reaching expressions that go through blocks containing the same
3146 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3147 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3148 2 as not reaching. The intent is to improve the probability of finding
3149 only one reaching expression and to reduce register lifetimes by picking
3150 the closest such expression. */
3151
3152static int
1d088dee
AJ
3153expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3154 basic_block bb, int check_self_loop, char *visited)
7506f491 3155{
36349f8b 3156 edge pred;
7506f491 3157
e2d2ed72 3158 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3159 {
e2d2ed72 3160 basic_block pred_bb = pred->src;
7506f491 3161
0b17ab2f 3162 if (visited[pred_bb->index])
c4c81601 3163 /* This predecessor has already been visited. Nothing to do. */
7506f491 3164 ;
7506f491 3165 else if (pred_bb == bb)
ac7c5af5 3166 {
7506f491
DE
3167 /* BB loops on itself. */
3168 if (check_self_loop
0b17ab2f
RH
3169 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3170 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3171 return 1;
c4c81601 3172
0b17ab2f 3173 visited[pred_bb->index] = 1;
ac7c5af5 3174 }
c4c81601 3175
7506f491 3176 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3177 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3178 visited[pred_bb->index] = 1;
c4c81601 3179
7506f491 3180 /* Does this predecessor generate this expression? */
0b17ab2f 3181 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3182 {
3183 /* Is this the occurrence we're looking for?
3184 Note that there's only one generating occurrence per block
3185 so we just need to check the block number. */
0b17ab2f 3186 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3187 return 1;
c4c81601 3188
0b17ab2f 3189 visited[pred_bb->index] = 1;
7506f491 3190 }
c4c81601 3191
7506f491
DE
3192 /* Neither gen nor kill. */
3193 else
ac7c5af5 3194 {
0b17ab2f 3195 visited[pred_bb->index] = 1;
589005ff 3196 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3197 visited))
c4c81601 3198
7506f491 3199 return 1;
ac7c5af5 3200 }
7506f491
DE
3201 }
3202
3203 /* All paths have been checked. */
3204 return 0;
3205}
3206
283a2545 3207/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3208 memory allocated for that function is returned. */
283a2545
RL
3209
3210static int
1d088dee
AJ
3211expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3212 int check_self_loop)
283a2545
RL
3213{
3214 int rval;
703ad42b 3215 char *visited = xcalloc (last_basic_block, 1);
283a2545 3216
c4c81601 3217 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3218
283a2545 3219 free (visited);
c4c81601 3220 return rval;
283a2545
RL
3221}
3222
7506f491
DE
3223/* Return the instruction that computes EXPR that reaches INSN's basic block.
3224 If there is more than one such instruction, return NULL.
3225
3226 Called only by handle_avail_expr. */
3227
3228static rtx
1d088dee 3229computing_insn (struct expr *expr, rtx insn)
7506f491 3230{
e2d2ed72 3231 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3232
3233 if (expr->avail_occr->next == NULL)
589005ff 3234 {
e2d2ed72 3235 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3236 /* The available expression is actually itself
3237 (i.e. a loop in the flow graph) so do nothing. */
3238 return NULL;
3239
7506f491
DE
3240 /* (FIXME) Case that we found a pattern that was created by
3241 a substitution that took place. */
3242 return expr->avail_occr->insn;
3243 }
3244 else
3245 {
3246 /* Pattern is computed more than once.
589005ff 3247 Search backwards from this insn to see how many of these
7506f491
DE
3248 computations actually reach this insn. */
3249 struct occr *occr;
3250 rtx insn_computes_expr = NULL;
3251 int can_reach = 0;
3252
3253 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3254 {
e2d2ed72 3255 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3256 {
3257 /* The expression is generated in this block.
3258 The only time we care about this is when the expression
3259 is generated later in the block [and thus there's a loop].
3260 We let the normal cse pass handle the other cases. */
c4c81601
RK
3261 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3262 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3263 {
3264 can_reach++;
3265 if (can_reach > 1)
3266 return NULL;
c4c81601 3267
7506f491
DE
3268 insn_computes_expr = occr->insn;
3269 }
3270 }
c4c81601
RK
3271 else if (expr_reaches_here_p (occr, expr, bb, 0))
3272 {
3273 can_reach++;
3274 if (can_reach > 1)
3275 return NULL;
3276
3277 insn_computes_expr = occr->insn;
3278 }
7506f491
DE
3279 }
3280
3281 if (insn_computes_expr == NULL)
3282 abort ();
c4c81601 3283
7506f491
DE
3284 return insn_computes_expr;
3285 }
3286}
3287
cc2902df 3288/* Return nonzero if the definition in DEF_INSN can reach INSN.
7506f491
DE
3289 Only called by can_disregard_other_sets. */
3290
3291static int
1d088dee 3292def_reaches_here_p (rtx insn, rtx def_insn)
7506f491
DE
3293{
3294 rtx reg;
3295
3296 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3297 return 1;
3298
3299 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3300 {
3301 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3302 {
7506f491
DE
3303 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3304 return 1;
c4c81601 3305 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3306 reg = XEXP (PATTERN (def_insn), 0);
3307 else if (GET_CODE (PATTERN (def_insn)) == SET)
3308 reg = SET_DEST (PATTERN (def_insn));
3309 else
3310 abort ();
c4c81601 3311
7506f491
DE
3312 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3313 }
3314 else
3315 return 0;
3316 }
3317
3318 return 0;
3319}
3320
cc2902df 3321/* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
c4c81601
RK
3322 value returned is the number of definitions that reach INSN. Returning a
3323 value of zero means that [maybe] more than one definition reaches INSN and
3324 the caller can't perform whatever optimization it is trying. i.e. it is
3325 always safe to return zero. */
7506f491
DE
3326
3327static int
1d088dee 3328can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
7506f491
DE
3329{
3330 int number_of_reaching_defs = 0;
c4c81601 3331 struct reg_set *this_reg;
7506f491 3332
c4c81601
RK
3333 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3334 if (def_reaches_here_p (insn, this_reg->insn))
3335 {
3336 number_of_reaching_defs++;
3337 /* Ignore parallels for now. */
3338 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3339 return 0;
3340
3341 if (!for_combine
3342 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3343 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3344 SET_SRC (PATTERN (insn)))))
3345 /* A setting of the reg to a different value reaches INSN. */
3346 return 0;
3347
3348 if (number_of_reaching_defs > 1)
3349 {
3350 /* If in this setting the value the register is being set to is
3351 equal to the previous value the register was set to and this
3352 setting reaches the insn we are trying to do the substitution
3353 on then we are ok. */
3354 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3355 return 0;
c4c81601
RK
3356 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3357 SET_SRC (PATTERN (insn))))
3358 return 0;
3359 }
7506f491 3360
589005ff 3361 *addr_this_reg = this_reg;
c4c81601 3362 }
7506f491
DE
3363
3364 return number_of_reaching_defs;
3365}
3366
3367/* Expression computed by insn is available and the substitution is legal,
3368 so try to perform the substitution.
3369
cc2902df 3370 The result is nonzero if any changes were made. */
7506f491
DE
3371
3372static int
1d088dee 3373handle_avail_expr (rtx insn, struct expr *expr)
7506f491 3374{
0631e0bf 3375 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3376 rtx to;
3377 struct reg_set *this_reg;
3378 int found_setting, use_src;
3379 int changed = 0;
3380
3381 /* We only handle the case where one computation of the expression
3382 reaches this instruction. */
3383 insn_computes_expr = computing_insn (expr, insn);
3384 if (insn_computes_expr == NULL)
3385 return 0;
0631e0bf 3386 expr_set = single_set (insn_computes_expr);
e5396f90
RE
3387 /* The set might be in a parallel with multiple sets; we could
3388 probably handle that, but there's currently no easy way to find
3389 the relevant sub-expression. */
0631e0bf 3390 if (!expr_set)
e5396f90 3391 return 0;
7506f491
DE
3392
3393 found_setting = 0;
3394 use_src = 0;
3395
3396 /* At this point we know only one computation of EXPR outside of this
3397 block reaches this insn. Now try to find a register that the
3398 expression is computed into. */
0631e0bf 3399 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3400 {
3401 /* This is the case when the available expression that reaches
3402 here has already been handled as an available expression. */
770ae6cc 3403 unsigned int regnum_for_replacing
0631e0bf 3404 = REGNO (SET_SRC (expr_set));
c4c81601 3405
7506f491
DE
3406 /* If the register was created by GCSE we can't use `reg_set_table',
3407 however we know it's set only once. */
3408 if (regnum_for_replacing >= max_gcse_regno
3409 /* If the register the expression is computed into is set only once,
3410 or only one set reaches this insn, we can use it. */
3411 || (((this_reg = reg_set_table[regnum_for_replacing]),
3412 this_reg->next == NULL)
3413 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3414 {
3415 use_src = 1;
3416 found_setting = 1;
3417 }
7506f491
DE
3418 }
3419
3420 if (!found_setting)
3421 {
770ae6cc 3422 unsigned int regnum_for_replacing
0631e0bf 3423 = REGNO (SET_DEST (expr_set));
c4c81601 3424
7506f491
DE
3425 /* This shouldn't happen. */
3426 if (regnum_for_replacing >= max_gcse_regno)
3427 abort ();
c4c81601 3428
7506f491 3429 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3430
7506f491
DE
3431 /* If the register the expression is computed into is set only once,
3432 or only one set reaches this insn, use it. */
3433 if (this_reg->next == NULL
3434 || can_disregard_other_sets (&this_reg, insn, 0))
3435 found_setting = 1;
3436 }
3437
3438 if (found_setting)
3439 {
3440 pat = PATTERN (insn);
3441 if (use_src)
0631e0bf 3442 to = SET_SRC (expr_set);
7506f491 3443 else
0631e0bf 3444 to = SET_DEST (expr_set);
7506f491
DE
3445 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3446
3447 /* We should be able to ignore the return code from validate_change but
3448 to play it safe we check. */
3449 if (changed)
3450 {
3451 gcse_subst_count++;
3452 if (gcse_file != NULL)
3453 {
c4c81601
RK
3454 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3455 INSN_UID (insn));
3456 fprintf (gcse_file, " reg %d %s insn %d\n",
3457 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3458 INSN_UID (insn_computes_expr));
3459 }
7506f491
DE
3460 }
3461 }
c4c81601 3462
7506f491
DE
3463 /* The register that the expr is computed into is set more than once. */
3464 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3465 {
3466 /* Insert an insn after insnx that copies the reg set in insnx
3467 into a new pseudo register call this new register REGN.
3468 From insnb until end of basic block or until REGB is set
3469 replace all uses of REGB with REGN. */
3470 rtx new_insn;
3471
0631e0bf 3472 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3473
3474 /* Generate the new insn. */
3475 /* ??? If the change fails, we return 0, even though we created
3476 an insn. I think this is ok. */
9e6a5703
JC
3477 new_insn
3478 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3479 SET_DEST (expr_set)),
c4c81601
RK
3480 insn_computes_expr);
3481
7506f491
DE
3482 /* Keep register set table up to date. */
3483 record_one_set (REGNO (to), new_insn);
3484
3485 gcse_create_count++;
3486 if (gcse_file != NULL)
ac7c5af5 3487 {
c4c81601 3488 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3489 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3490 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3491 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3492 INSN_UID (insn_computes_expr));
c4c81601
RK
3493 fprintf (gcse_file, " into newly allocated reg %d\n",
3494 REGNO (to));
ac7c5af5 3495 }
7506f491
DE
3496
3497 pat = PATTERN (insn);
3498
3499 /* Do register replacement for INSN. */
3500 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3501 SET_DEST (PATTERN
3502 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3503 0);
3504
3505 /* We should be able to ignore the return code from validate_change but
3506 to play it safe we check. */
3507 if (changed)
3508 {
3509 gcse_subst_count++;
3510 if (gcse_file != NULL)
3511 {
c4c81601
RK
3512 fprintf (gcse_file,
3513 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3514 INSN_UID (insn),
c4c81601
RK
3515 REGNO (SET_DEST (PATTERN (NEXT_INSN
3516 (insn_computes_expr)))));
3517 fprintf (gcse_file, "set in insn %d\n",
589005ff 3518 INSN_UID (insn_computes_expr));
7506f491 3519 }
7506f491
DE
3520 }
3521 }
3522
3523 return changed;
3524}
3525
c4c81601
RK
3526/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3527 the dataflow analysis has been done.
7506f491 3528
cc2902df 3529 The result is nonzero if a change was made. */
7506f491
DE
3530
3531static int
1d088dee 3532classic_gcse (void)
7506f491 3533{
e0082a72 3534 int changed;
7506f491 3535 rtx insn;
e0082a72 3536 basic_block bb;
7506f491
DE
3537
3538 /* Note we start at block 1. */
3539
e0082a72
ZD
3540 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3541 return 0;
3542
7506f491 3543 changed = 0;
e0082a72 3544 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3545 {
3546 /* Reset tables used to keep track of what's still valid [since the
3547 start of the block]. */
3548 reset_opr_set_tables ();
3549
a813c111
SB
3550 for (insn = BB_HEAD (bb);
3551 insn != NULL && insn != NEXT_INSN (BB_END (bb));
7506f491
DE
3552 insn = NEXT_INSN (insn))
3553 {
3554 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3555 if (GET_CODE (insn) == INSN
3556 && GET_CODE (PATTERN (insn)) == SET
3557 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3558 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3559 {
3560 rtx pat = PATTERN (insn);
3561 rtx src = SET_SRC (pat);
3562 struct expr *expr;
3563
3564 if (want_to_gcse_p (src)
3565 /* Is the expression recorded? */
02280659 3566 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
7506f491
DE
3567 /* Is the expression available [at the start of the
3568 block]? */
e0082a72 3569 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3570 /* Are the operands unchanged since the start of the
3571 block? */
3572 && oprs_not_set_p (src, insn))
3573 changed |= handle_avail_expr (insn, expr);
3574 }
3575
3576 /* Keep track of everything modified by this insn. */
3577 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3578 if (INSN_P (insn))
7506f491 3579 mark_oprs_set (insn);
ac7c5af5 3580 }
7506f491
DE
3581 }
3582
3583 return changed;
3584}
3585
3586/* Top level routine to perform one classic GCSE pass.
3587
cc2902df 3588 Return nonzero if a change was made. */
7506f491
DE
3589
3590static int
1d088dee 3591one_classic_gcse_pass (int pass)
7506f491
DE
3592{
3593 int changed = 0;
3594
3595 gcse_subst_count = 0;
3596 gcse_create_count = 0;
3597
02280659 3598 alloc_hash_table (max_cuid, &expr_hash_table, 0);
d55bc081 3599 alloc_rd_mem (last_basic_block, max_cuid);
02280659 3600 compute_hash_table (&expr_hash_table);
7506f491 3601 if (gcse_file)
02280659 3602 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 3603
02280659 3604 if (expr_hash_table.n_elems > 0)
7506f491
DE
3605 {
3606 compute_kill_rd ();
3607 compute_rd ();
02280659
ZD
3608 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3609 compute_ae_gen (&expr_hash_table);
3610 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
bd0eaec2 3611 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3612 changed = classic_gcse ();
3613 free_avail_expr_mem ();
3614 }
c4c81601 3615
7506f491 3616 free_rd_mem ();
02280659 3617 free_hash_table (&expr_hash_table);
7506f491
DE
3618
3619 if (gcse_file)
3620 {
3621 fprintf (gcse_file, "\n");
c4c81601 3622 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
faed5cc3 3623 current_function_name (), pass, bytes_used, gcse_subst_count);
c4c81601 3624 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3625 }
3626
3627 return changed;
3628}
3629\f
3630/* Compute copy/constant propagation working variables. */
3631
3632/* Local properties of assignments. */
7506f491
DE
3633static sbitmap *cprop_pavloc;
3634static sbitmap *cprop_absaltered;
3635
3636/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3637static sbitmap *cprop_avin;
3638static sbitmap *cprop_avout;
3639
c4c81601
RK
3640/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3641 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3642
3643static void
1d088dee 3644alloc_cprop_mem (int n_blocks, int n_sets)
7506f491
DE
3645{
3646 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3647 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3648
3649 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3650 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3651}
3652
3653/* Free vars used by copy/const propagation. */
3654
3655static void
1d088dee 3656free_cprop_mem (void)
7506f491 3657{
5a660bff
DB
3658 sbitmap_vector_free (cprop_pavloc);
3659 sbitmap_vector_free (cprop_absaltered);
3660 sbitmap_vector_free (cprop_avin);
3661 sbitmap_vector_free (cprop_avout);
7506f491
DE
3662}
3663
c4c81601
RK
3664/* For each block, compute whether X is transparent. X is either an
3665 expression or an assignment [though we don't care which, for this context
3666 an assignment is treated as an expression]. For each block where an
3667 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3668 bit in BMAP. */
7506f491
DE
3669
3670static void
1d088dee 3671compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
7506f491 3672{
e0082a72
ZD
3673 int i, j;
3674 basic_block bb;
7506f491 3675 enum rtx_code code;
c4c81601 3676 reg_set *r;
6f7d635c 3677 const char *fmt;
7506f491 3678
c4c81601
RK
3679 /* repeat is used to turn tail-recursion into iteration since GCC
3680 can't do it when there's no return value. */
7506f491
DE
3681 repeat:
3682
3683 if (x == 0)
3684 return;
3685
3686 code = GET_CODE (x);
3687 switch (code)
3688 {
3689 case REG:
c4c81601
RK
3690 if (set_p)
3691 {
3692 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3693 {
e0082a72
ZD
3694 FOR_EACH_BB (bb)
3695 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3696 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3697 }
3698 else
3699 {
3700 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3701 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3702 }
3703 }
3704 else
3705 {
3706 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3707 {
e0082a72
ZD
3708 FOR_EACH_BB (bb)
3709 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3710 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3711 }
3712 else
3713 {
3714 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3715 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3716 }
3717 }
7506f491 3718
c4c81601 3719 return;
7506f491
DE
3720
3721 case MEM:
e0082a72 3722 FOR_EACH_BB (bb)
a13d4ebf 3723 {
e0082a72 3724 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3725
3726 while (list_entry)
3727 {
3728 rtx dest, dest_addr;
3729
3730 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3731 {
3732 if (set_p)
e0082a72 3733 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3734 else
e0082a72 3735 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3736 break;
3737 }
3738 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3739 Examine each hunk of memory that is modified. */
3740
3741 dest = XEXP (list_entry, 0);
3742 list_entry = XEXP (list_entry, 1);
3743 dest_addr = XEXP (list_entry, 0);
589005ff 3744
a13d4ebf
AM
3745 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3746 x, rtx_addr_varies_p))
3747 {
3748 if (set_p)
e0082a72 3749 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3750 else
e0082a72 3751 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3752 break;
3753 }
3754 list_entry = XEXP (list_entry, 1);
3755 }
3756 }
c4c81601 3757
7506f491
DE
3758 x = XEXP (x, 0);
3759 goto repeat;
3760
3761 case PC:
3762 case CC0: /*FIXME*/
3763 case CONST:
3764 case CONST_INT:
3765 case CONST_DOUBLE:
69ef87e2 3766 case CONST_VECTOR:
7506f491
DE
3767 case SYMBOL_REF:
3768 case LABEL_REF:
3769 case ADDR_VEC:
3770 case ADDR_DIFF_VEC:
3771 return;
3772
3773 default:
3774 break;
3775 }
3776
c4c81601 3777 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3778 {
3779 if (fmt[i] == 'e')
3780 {
7506f491
DE
3781 /* If we are about to do the last recursive call
3782 needed at this level, change it into iteration.
3783 This function is called enough to be worth it. */
3784 if (i == 0)
3785 {
c4c81601 3786 x = XEXP (x, i);
7506f491
DE
3787 goto repeat;
3788 }
c4c81601
RK
3789
3790 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3791 }
3792 else if (fmt[i] == 'E')
c4c81601
RK
3793 for (j = 0; j < XVECLEN (x, i); j++)
3794 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3795 }
3796}
3797
7506f491
DE
3798/* Top level routine to do the dataflow analysis needed by copy/const
3799 propagation. */
3800
3801static void
1d088dee 3802compute_cprop_data (void)
7506f491 3803{
02280659 3804 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
3805 compute_available (cprop_pavloc, cprop_absaltered,
3806 cprop_avout, cprop_avin);
7506f491
DE
3807}
3808\f
3809/* Copy/constant propagation. */
3810
7506f491
DE
3811/* Maximum number of register uses in an insn that we handle. */
3812#define MAX_USES 8
3813
3814/* Table of uses found in an insn.
3815 Allocated statically to avoid alloc/free complexity and overhead. */
3816static struct reg_use reg_use_table[MAX_USES];
3817
3818/* Index into `reg_use_table' while building it. */
3819static int reg_use_count;
3820
c4c81601
RK
3821/* Set up a list of register numbers used in INSN. The found uses are stored
3822 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3823 and contains the number of uses in the table upon exit.
7506f491 3824
c4c81601
RK
3825 ??? If a register appears multiple times we will record it multiple times.
3826 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3827
3828static void
1d088dee 3829find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
7506f491 3830{
c4c81601 3831 int i, j;
7506f491 3832 enum rtx_code code;
6f7d635c 3833 const char *fmt;
9e71c818 3834 rtx x = *xptr;
7506f491 3835
c4c81601
RK
3836 /* repeat is used to turn tail-recursion into iteration since GCC
3837 can't do it when there's no return value. */
7506f491 3838 repeat:
7506f491
DE
3839 if (x == 0)
3840 return;
3841
3842 code = GET_CODE (x);
9e71c818 3843 if (REG_P (x))
7506f491 3844 {
7506f491
DE
3845 if (reg_use_count == MAX_USES)
3846 return;
c4c81601 3847
7506f491
DE
3848 reg_use_table[reg_use_count].reg_rtx = x;
3849 reg_use_count++;
7506f491
DE
3850 }
3851
3852 /* Recursively scan the operands of this expression. */
3853
c4c81601 3854 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3855 {
3856 if (fmt[i] == 'e')
3857 {
3858 /* If we are about to do the last recursive call
3859 needed at this level, change it into iteration.
3860 This function is called enough to be worth it. */
3861 if (i == 0)
3862 {
3863 x = XEXP (x, 0);
3864 goto repeat;
3865 }
c4c81601 3866
9e71c818 3867 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3868 }
3869 else if (fmt[i] == 'E')
c4c81601 3870 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3871 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3872 }
3873}
3874
3875/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 3876 Returns nonzero is successful. */
7506f491
DE
3877
3878static int
1d088dee 3879try_replace_reg (rtx from, rtx to, rtx insn)
7506f491 3880{
172890a2 3881 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3882 rtx src = 0;
172890a2
RK
3883 int success = 0;
3884 rtx set = single_set (insn);
833fc3ad 3885
2b773ee2
JH
3886 validate_replace_src_group (from, to, insn);
3887 if (num_changes_pending () && apply_change_group ())
3888 success = 1;
9e71c818 3889
9feff114
JDA
3890 /* Try to simplify SET_SRC if we have substituted a constant. */
3891 if (success && set && CONSTANT_P (to))
3892 {
3893 src = simplify_rtx (SET_SRC (set));
3894
3895 if (src)
3896 validate_change (insn, &SET_SRC (set), src, 0);
3897 }
3898
ed8395a0
JZ
3899 /* If there is already a NOTE, update the expression in it with our
3900 replacement. */
3901 if (note != 0)
3902 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3903
f305679f 3904 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3905 {
f305679f
JH
3906 /* If above failed and this is a single set, try to simplify the source of
3907 the set given our substitution. We could perhaps try this for multiple
3908 SETs, but it probably won't buy us anything. */
172890a2
RK
3909 src = simplify_replace_rtx (SET_SRC (set), from, to);
3910
9e71c818
JH
3911 if (!rtx_equal_p (src, SET_SRC (set))
3912 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3913 success = 1;
833fc3ad 3914
bbd288a4
FS
3915 /* If we've failed to do replacement, have a single SET, don't already
3916 have a note, and have no special SET, add a REG_EQUAL note to not
3917 lose information. */
3918 if (!success && note == 0 && set != 0
3919 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3920 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
f305679f
JH
3921 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3922 }
e251e2a2 3923
172890a2
RK
3924 /* REG_EQUAL may get simplified into register.
3925 We don't allow that. Remove that note. This code ought
fbe5a4a6 3926 not to happen, because previous code ought to synthesize
172890a2
RK
3927 reg-reg move, but be on the safe side. */
3928 if (note && REG_P (XEXP (note, 0)))
3929 remove_note (insn, note);
833fc3ad 3930
833fc3ad
JH
3931 return success;
3932}
c4c81601
RK
3933
3934/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3935 NULL no such set is found. */
7506f491
DE
3936
3937static struct expr *
1d088dee 3938find_avail_set (int regno, rtx insn)
7506f491 3939{
cafba495
BS
3940 /* SET1 contains the last set found that can be returned to the caller for
3941 use in a substitution. */
3942 struct expr *set1 = 0;
589005ff 3943
cafba495
BS
3944 /* Loops are not possible here. To get a loop we would need two sets
3945 available at the start of the block containing INSN. ie we would
3946 need two sets like this available at the start of the block:
3947
3948 (set (reg X) (reg Y))
3949 (set (reg Y) (reg X))
3950
3951 This can not happen since the set of (reg Y) would have killed the
3952 set of (reg X) making it unavailable at the start of this block. */
3953 while (1)
8e42ace1 3954 {
cafba495 3955 rtx src;
ceda50e9 3956 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
3957
3958 /* Find a set that is available at the start of the block
3959 which contains INSN. */
3960 while (set)
3961 {
3962 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3963 break;
3964 set = next_set (regno, set);
3965 }
7506f491 3966
cafba495
BS
3967 /* If no available set was found we've reached the end of the
3968 (possibly empty) copy chain. */
3969 if (set == 0)
589005ff 3970 break;
cafba495
BS
3971
3972 if (GET_CODE (set->expr) != SET)
3973 abort ();
3974
3975 src = SET_SRC (set->expr);
3976
3977 /* We know the set is available.
3978 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 3979 have changed since the start of the block).
cafba495
BS
3980
3981 If the source operand changed, we may still use it for the next
3982 iteration of this loop, but we may not use it for substitutions. */
c4c81601 3983
6b2d1c9e 3984 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
3985 set1 = set;
3986
3987 /* If the source of the set is anything except a register, then
3988 we have reached the end of the copy chain. */
3989 if (GET_CODE (src) != REG)
7506f491 3990 break;
7506f491 3991
cafba495
BS
3992 /* Follow the copy chain, ie start another iteration of the loop
3993 and see if we have an available copy into SRC. */
3994 regno = REGNO (src);
8e42ace1 3995 }
cafba495
BS
3996
3997 /* SET1 holds the last set that was available and anticipatable at
3998 INSN. */
3999 return set1;
7506f491
DE
4000}
4001
abd535b6 4002/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 4003 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 4004 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 4005 single SET of a register. FROM is what we will try to replace,
0e3f0221 4006 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 4007 if a change was made. */
c4c81601 4008
abd535b6 4009static int
1d088dee 4010cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
abd535b6 4011{
bc6688b4 4012 rtx new, set_src, note_src;
0e3f0221 4013 rtx set = pc_set (jump);
bc6688b4 4014 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 4015
bc6688b4
RS
4016 if (note)
4017 {
4018 note_src = XEXP (note, 0);
4019 if (GET_CODE (note_src) == EXPR_LIST)
4020 note_src = NULL_RTX;
4021 }
4022 else note_src = NULL_RTX;
4023
4024 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4025 set_src = note_src ? note_src : SET_SRC (set);
4026
4027 /* First substitute the SETCC condition into the JUMP instruction,
4028 then substitute that given values into this expanded JUMP. */
4029 if (setcc != NULL_RTX
48ddd46c
JH
4030 && !modified_between_p (from, setcc, jump)
4031 && !modified_between_p (src, setcc, jump))
b2f02503 4032 {
bc6688b4 4033 rtx setcc_src;
b2f02503 4034 rtx setcc_set = single_set (setcc);
bc6688b4
RS
4035 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4036 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4037 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4038 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4039 setcc_src);
b2f02503 4040 }
0e3f0221 4041 else
bc6688b4 4042 setcc = NULL_RTX;
0e3f0221 4043
bc6688b4 4044 new = simplify_replace_rtx (set_src, from, src);
abd535b6 4045
bc6688b4
RS
4046 /* If no simplification can be made, then try the next register. */
4047 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 4048 return 0;
589005ff 4049
7d5ab30e 4050 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 4051 if (new == pc_rtx)
0e3f0221 4052 delete_insn (jump);
7d5ab30e 4053 else
abd535b6 4054 {
48ddd46c
JH
4055 /* Ensure the value computed inside the jump insn to be equivalent
4056 to one computed by setcc. */
bc6688b4 4057 if (setcc && modified_in_p (new, setcc))
48ddd46c 4058 return 0;
0e3f0221 4059 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
4060 {
4061 /* When (some) constants are not valid in a comparison, and there
4062 are two registers to be replaced by constants before the entire
4063 comparison can be folded into a constant, we need to keep
4064 intermediate information in REG_EQUAL notes. For targets with
4065 separate compare insns, such notes are added by try_replace_reg.
4066 When we have a combined compare-and-branch instruction, however,
4067 we need to attach a note to the branch itself to make this
4068 optimization work. */
4069
4070 if (!rtx_equal_p (new, note_src))
4071 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4072 return 0;
4073 }
4074
4075 /* Remove REG_EQUAL note after simplification. */
4076 if (note_src)
4077 remove_note (jump, note);
abd535b6 4078
7d5ab30e
JH
4079 /* If this has turned into an unconditional jump,
4080 then put a barrier after it so that the unreachable
4081 code will be deleted. */
4082 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4083 emit_barrier_after (jump);
7d5ab30e 4084 }
abd535b6 4085
0e3f0221
RS
4086#ifdef HAVE_cc0
4087 /* Delete the cc0 setter. */
818b6b7f 4088 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4089 delete_insn (setcc);
4090#endif
4091
172890a2 4092 run_jump_opt_after_gcse = 1;
c4c81601 4093
172890a2
RK
4094 const_prop_count++;
4095 if (gcse_file != NULL)
4096 {
4097 fprintf (gcse_file,
818b6b7f 4098 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4099 REGNO (from), INSN_UID (jump));
172890a2
RK
4100 print_rtl (gcse_file, src);
4101 fprintf (gcse_file, "\n");
abd535b6 4102 }
0005550b 4103 purge_dead_edges (bb);
172890a2
RK
4104
4105 return 1;
abd535b6
BS
4106}
4107
ae860ff7 4108static bool
1d088dee 4109constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
ae860ff7
JH
4110{
4111 rtx sset;
4112
4113 /* Check for reg or cc0 setting instructions followed by
4114 conditional branch instructions first. */
4115 if (alter_jumps
4116 && (sset = single_set (insn)) != NULL
244d05fb 4117 && NEXT_INSN (insn)
ae860ff7
JH
4118 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4119 {
4120 rtx dest = SET_DEST (sset);
4121 if ((REG_P (dest) || CC0_P (dest))
4122 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4123 return 1;
4124 }
4125
4126 /* Handle normal insns next. */
4127 if (GET_CODE (insn) == INSN
4128 && try_replace_reg (from, to, insn))
4129 return 1;
4130
4131 /* Try to propagate a CONST_INT into a conditional jump.
4132 We're pretty specific about what we will handle in this
4133 code, we can extend this as necessary over time.
4134
4135 Right now the insn in question must look like
4136 (set (pc) (if_then_else ...)) */
4137 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4138 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4139 return 0;
4140}
4141
7506f491 4142/* Perform constant and copy propagation on INSN.
cc2902df 4143 The result is nonzero if a change was made. */
7506f491
DE
4144
4145static int
1d088dee 4146cprop_insn (rtx insn, int alter_jumps)
7506f491
DE
4147{
4148 struct reg_use *reg_used;
4149 int changed = 0;
833fc3ad 4150 rtx note;
7506f491 4151
9e71c818 4152 if (!INSN_P (insn))
7506f491
DE
4153 return 0;
4154
4155 reg_use_count = 0;
9e71c818 4156 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4157
172890a2 4158 note = find_reg_equal_equiv_note (insn);
833fc3ad 4159
dc297297 4160 /* We may win even when propagating constants into notes. */
833fc3ad 4161 if (note)
9e71c818 4162 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4163
c4c81601
RK
4164 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4165 reg_used++, reg_use_count--)
7506f491 4166 {
770ae6cc 4167 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4168 rtx pat, src;
4169 struct expr *set;
7506f491
DE
4170
4171 /* Ignore registers created by GCSE.
dc297297 4172 We do this because ... */
7506f491
DE
4173 if (regno >= max_gcse_regno)
4174 continue;
4175
4176 /* If the register has already been set in this block, there's
4177 nothing we can do. */
4178 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4179 continue;
4180
4181 /* Find an assignment that sets reg_used and is available
4182 at the start of the block. */
4183 set = find_avail_set (regno, insn);
4184 if (! set)
4185 continue;
589005ff 4186
7506f491
DE
4187 pat = set->expr;
4188 /* ??? We might be able to handle PARALLELs. Later. */
4189 if (GET_CODE (pat) != SET)
4190 abort ();
c4c81601 4191
7506f491
DE
4192 src = SET_SRC (pat);
4193
e78d9500 4194 /* Constant propagation. */
6b2d1c9e 4195 if (gcse_constant_p (src))
7506f491 4196 {
ae860ff7 4197 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4198 {
4199 changed = 1;
4200 const_prop_count++;
4201 if (gcse_file != NULL)
4202 {
ae860ff7
JH
4203 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4204 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4205 print_rtl (gcse_file, src);
7506f491
DE
4206 fprintf (gcse_file, "\n");
4207 }
bc6688b4
RS
4208 if (INSN_DELETED_P (insn))
4209 return 1;
7506f491
DE
4210 }
4211 }
4212 else if (GET_CODE (src) == REG
4213 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4214 && REGNO (src) != regno)
4215 {
cafba495 4216 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4217 {
cafba495
BS
4218 changed = 1;
4219 copy_prop_count++;
4220 if (gcse_file != NULL)
7506f491 4221 {
ae860ff7 4222 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4223 regno, INSN_UID (insn));
4224 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4225 }
cafba495
BS
4226
4227 /* The original insn setting reg_used may or may not now be
4228 deletable. We leave the deletion to flow. */
4229 /* FIXME: If it turns out that the insn isn't deletable,
4230 then we may have unnecessarily extended register lifetimes
4231 and made things worse. */
7506f491
DE
4232 }
4233 }
4234 }
4235
4236 return changed;
4237}
4238
710ee3ed
RH
4239/* Like find_used_regs, but avoid recording uses that appear in
4240 input-output contexts such as zero_extract or pre_dec. This
4241 restricts the cases we consider to those for which local cprop
4242 can legitimately make replacements. */
4243
4244static void
1d088dee 4245local_cprop_find_used_regs (rtx *xptr, void *data)
710ee3ed
RH
4246{
4247 rtx x = *xptr;
4248
4249 if (x == 0)
4250 return;
4251
4252 switch (GET_CODE (x))
4253 {
4254 case ZERO_EXTRACT:
4255 case SIGN_EXTRACT:
4256 case STRICT_LOW_PART:
4257 return;
4258
4259 case PRE_DEC:
4260 case PRE_INC:
4261 case POST_DEC:
4262 case POST_INC:
4263 case PRE_MODIFY:
4264 case POST_MODIFY:
4265 /* Can only legitimately appear this early in the context of
4266 stack pushes for function arguments, but handle all of the
4267 codes nonetheless. */
4268 return;
4269
4270 case SUBREG:
4271 /* Setting a subreg of a register larger than word_mode leaves
4272 the non-written words unchanged. */
4273 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4274 return;
4275 break;
4276
4277 default:
4278 break;
4279 }
4280
4281 find_used_regs (xptr, data);
4282}
1d088dee 4283
8ba46434
R
4284/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4285 their REG_EQUAL notes need updating. */
e197b6fc 4286
ae860ff7 4287static bool
1d088dee 4288do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
ae860ff7
JH
4289{
4290 rtx newreg = NULL, newcnst = NULL;
4291
e197b6fc
RH
4292 /* Rule out USE instructions and ASM statements as we don't want to
4293 change the hard registers mentioned. */
ae860ff7
JH
4294 if (GET_CODE (x) == REG
4295 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
4296 || (GET_CODE (PATTERN (insn)) != USE
4297 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
4298 {
4299 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4300 struct elt_loc_list *l;
4301
4302 if (!val)
4303 return false;
4304 for (l = val->locs; l; l = l->next)
4305 {
4306 rtx this_rtx = l->loc;
46690369
JH
4307 rtx note;
4308
9635cfad
JH
4309 if (l->in_libcall)
4310 continue;
4311
6b2d1c9e 4312 if (gcse_constant_p (this_rtx))
ae860ff7 4313 newcnst = this_rtx;
46690369
JH
4314 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4315 /* Don't copy propagate if it has attached REG_EQUIV note.
4316 At this point this only function parameters should have
4317 REG_EQUIV notes and if the argument slot is used somewhere
4318 explicitly, it means address of parameter has been taken,
4319 so we should not extend the lifetime of the pseudo. */
4320 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4321 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4322 newreg = this_rtx;
4323 }
4324 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4325 {
8ba46434 4326 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 4327 match the new register, we either have to abandon this replacement
8ba46434
R
4328 or fix delete_trivially_dead_insns to preserve the setting insn,
4329 or make it delete the REG_EUAQL note, and fix up all passes that
4330 require the REG_EQUAL note there. */
4331 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4332 abort ();
ae860ff7
JH
4333 if (gcse_file != NULL)
4334 {
4335 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4336 REGNO (x));
4337 fprintf (gcse_file, "insn %d with constant ",
4338 INSN_UID (insn));
4339 print_rtl (gcse_file, newcnst);
4340 fprintf (gcse_file, "\n");
4341 }
4342 const_prop_count++;
4343 return true;
4344 }
4345 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4346 {
8ba46434 4347 adjust_libcall_notes (x, newreg, insn, libcall_sp);
ae860ff7
JH
4348 if (gcse_file != NULL)
4349 {
4350 fprintf (gcse_file,
4351 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4352 REGNO (x), INSN_UID (insn));
4353 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4354 }
4355 copy_prop_count++;
4356 return true;
4357 }
4358 }
4359 return false;
4360}
4361
8ba46434
R
4362/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4363 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
4364 replaced with NEWVAL in INSN. Return true if all substitutions could
4365 be made. */
8ba46434 4366static bool
1d088dee 4367adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
8ba46434 4368{
f4e3e618 4369 rtx end;
8ba46434
R
4370
4371 while ((end = *libcall_sp++))
4372 {
f4e3e618 4373 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
4374
4375 if (! note)
4376 continue;
4377
4378 if (REG_P (newval))
4379 {
4380 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4381 {
4382 do
4383 {
4384 note = find_reg_equal_equiv_note (end);
4385 if (! note)
4386 continue;
4387 if (reg_mentioned_p (newval, XEXP (note, 0)))
4388 return false;
4389 }
4390 while ((end = *libcall_sp++));
4391 return true;
4392 }
4393 }
4394 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4395 insn = end;
4396 }
4397 return true;
4398}
4399
4400#define MAX_NESTED_LIBCALLS 9
4401
ae860ff7 4402static void
1d088dee 4403local_cprop_pass (int alter_jumps)
ae860ff7
JH
4404{
4405 rtx insn;
4406 struct reg_use *reg_used;
8ba46434 4407 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 4408 bool changed = false;
ae860ff7 4409
463301c3 4410 cselib_init (false);
8ba46434
R
4411 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4412 *libcall_sp = 0;
ae860ff7
JH
4413 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4414 {
4415 if (INSN_P (insn))
4416 {
8ba46434 4417 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 4418
8ba46434
R
4419 if (note)
4420 {
4421 if (libcall_sp == libcall_stack)
4422 abort ();
4423 *--libcall_sp = XEXP (note, 0);
4424 }
4425 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4426 if (note)
4427 libcall_sp++;
4428 note = find_reg_equal_equiv_note (insn);
ae860ff7
JH
4429 do
4430 {
4431 reg_use_count = 0;
710ee3ed 4432 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
ae860ff7 4433 if (note)
710ee3ed 4434 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
ae860ff7
JH
4435
4436 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4437 reg_used++, reg_use_count--)
8ba46434
R
4438 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4439 libcall_sp))
1649d92f
JH
4440 {
4441 changed = true;
4442 break;
4443 }
bc6688b4
RS
4444 if (INSN_DELETED_P (insn))
4445 break;
ae860ff7
JH
4446 }
4447 while (reg_use_count);
4448 }
4449 cselib_process_insn (insn);
4450 }
4451 cselib_finish ();
1649d92f
JH
4452 /* Global analysis may get into infinite loops for unreachable blocks. */
4453 if (changed && alter_jumps)
5f0bea72
JH
4454 {
4455 delete_unreachable_blocks ();
4456 free_reg_set_mem ();
4457 alloc_reg_set_mem (max_reg_num ());
4458 compute_sets (get_insns ());
4459 }
ae860ff7
JH
4460}
4461
c4c81601 4462/* Forward propagate copies. This includes copies and constants. Return
cc2902df 4463 nonzero if a change was made. */
7506f491
DE
4464
4465static int
1d088dee 4466cprop (int alter_jumps)
7506f491 4467{
e0082a72
ZD
4468 int changed;
4469 basic_block bb;
7506f491
DE
4470 rtx insn;
4471
4472 /* Note we start at block 1. */
e0082a72
ZD
4473 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4474 {
4475 if (gcse_file != NULL)
4476 fprintf (gcse_file, "\n");
4477 return 0;
4478 }
7506f491
DE
4479
4480 changed = 0;
e0082a72 4481 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4482 {
4483 /* Reset tables used to keep track of what's still valid [since the
4484 start of the block]. */
4485 reset_opr_set_tables ();
4486
a813c111
SB
4487 for (insn = BB_HEAD (bb);
4488 insn != NULL && insn != NEXT_INSN (BB_END (bb));
7506f491 4489 insn = NEXT_INSN (insn))
172890a2
RK
4490 if (INSN_P (insn))
4491 {
ae860ff7 4492 changed |= cprop_insn (insn, alter_jumps);
7506f491 4493
172890a2
RK
4494 /* Keep track of everything modified by this insn. */
4495 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4496 call mark_oprs_set if we turned the insn into a NOTE. */
4497 if (GET_CODE (insn) != NOTE)
4498 mark_oprs_set (insn);
8e42ace1 4499 }
7506f491
DE
4500 }
4501
4502 if (gcse_file != NULL)
4503 fprintf (gcse_file, "\n");
4504
4505 return changed;
4506}
4507
fbef91d8
RS
4508/* Similar to get_condition, only the resulting condition must be
4509 valid at JUMP, instead of at EARLIEST.
4510
4511 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4512 settle for the condition variable in the jump instruction being integral.
4513 We prefer to be able to record the value of a user variable, rather than
4514 the value of a temporary used in a condition. This could be solved by
4515 recording the value of *every* register scaned by canonicalize_condition,
4516 but this would require some code reorganization. */
4517
2fa4a849 4518rtx
1d088dee 4519fis_get_condition (rtx jump)
fbef91d8
RS
4520{
4521 rtx cond, set, tmp, insn, earliest;
4522 bool reverse;
4523
4524 if (! any_condjump_p (jump))
4525 return NULL_RTX;
4526
4527 set = pc_set (jump);
4528 cond = XEXP (SET_SRC (set), 0);
4529
4530 /* If this branches to JUMP_LABEL when the condition is false,
4531 reverse the condition. */
4532 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4533 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4534
4535 /* Use canonicalize_condition to do the dirty work of manipulating
4536 MODE_CC values and COMPARE rtx codes. */
ec6ec6aa
ZD
4537 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4538 false);
fbef91d8
RS
4539 if (!tmp)
4540 return NULL_RTX;
4541
4542 /* Verify that the given condition is valid at JUMP by virtue of not
4543 having been modified since EARLIEST. */
4544 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4545 if (INSN_P (insn) && modified_in_p (tmp, insn))
4546 break;
4547 if (insn == jump)
4548 return tmp;
4549
4550 /* The condition was modified. See if we can get a partial result
4551 that doesn't follow all the reversals. Perhaps combine can fold
4552 them together later. */
4553 tmp = XEXP (tmp, 0);
4554 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4555 return NULL_RTX;
ec6ec6aa
ZD
4556 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4557 false);
fbef91d8
RS
4558 if (!tmp)
4559 return NULL_RTX;
4560
4561 /* For sanity's sake, re-validate the new result. */
4562 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4563 if (INSN_P (insn) && modified_in_p (tmp, insn))
4564 return NULL_RTX;
4565
4566 return tmp;
4567}
4568
b0656d8b
JW
4569/* Check the comparison COND to see if we can safely form an implicit set from
4570 it. COND is either an EQ or NE comparison. */
4571
4572static bool
4573implicit_set_cond_p (rtx cond)
4574{
4575 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
4576 rtx cst = XEXP (cond, 1);
4577
4578 /* We can't perform this optimization if either operand might be or might
4579 contain a signed zero. */
4580 if (HONOR_SIGNED_ZEROS (mode))
4581 {
4582 /* It is sufficient to check if CST is or contains a zero. We must
4583 handle float, complex, and vector. If any subpart is a zero, then
4584 the optimization can't be performed. */
4585 /* ??? The complex and vector checks are not implemented yet. We just
4586 always return zero for them. */
4587 if (GET_CODE (cst) == CONST_DOUBLE)
4588 {
4589 REAL_VALUE_TYPE d;
4590 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
4591 if (REAL_VALUES_EQUAL (d, dconst0))
4592 return 0;
4593 }
4594 else
4595 return 0;
4596 }
4597
4598 return gcse_constant_p (cst);
4599}
4600
fbef91d8
RS
4601/* Find the implicit sets of a function. An "implicit set" is a constraint
4602 on the value of a variable, implied by a conditional jump. For example,
4603 following "if (x == 2)", the then branch may be optimized as though the
4604 conditional performed an "explicit set", in this example, "x = 2". This
4605 function records the set patterns that are implicit at the start of each
4606 basic block. */
4607
4608static void
1d088dee 4609find_implicit_sets (void)
fbef91d8
RS
4610{
4611 basic_block bb, dest;
4612 unsigned int count;
4613 rtx cond, new;
4614
4615 count = 0;
4616 FOR_EACH_BB (bb)
a98ebe2e 4617 /* Check for more than one successor. */
fbef91d8
RS
4618 if (bb->succ && bb->succ->succ_next)
4619 {
a813c111 4620 cond = fis_get_condition (BB_END (bb));
fbef91d8
RS
4621
4622 if (cond
4623 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4624 && GET_CODE (XEXP (cond, 0)) == REG
4625 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
b0656d8b 4626 && implicit_set_cond_p (cond))
fbef91d8
RS
4627 {
4628 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4629 : FALLTHRU_EDGE (bb)->dest;
4630
4631 if (dest && ! dest->pred->pred_next
4632 && dest != EXIT_BLOCK_PTR)
4633 {
4634 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4635 XEXP (cond, 1));
4636 implicit_sets[dest->index] = new;
4637 if (gcse_file)
4638 {
4639 fprintf(gcse_file, "Implicit set of reg %d in ",
4640 REGNO (XEXP (cond, 0)));
4641 fprintf(gcse_file, "basic block %d\n", dest->index);
4642 }
4643 count++;
4644 }
4645 }
4646 }
4647
4648 if (gcse_file)
4649 fprintf (gcse_file, "Found %d implicit sets\n", count);
4650}
4651
7506f491 4652/* Perform one copy/constant propagation pass.
a0134312
RS
4653 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4654 propagation into conditional jumps. If BYPASS_JUMPS is true,
4655 perform conditional jump bypassing optimizations. */
7506f491
DE
4656
4657static int
1d088dee 4658one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
7506f491
DE
4659{
4660 int changed = 0;
4661
4662 const_prop_count = 0;
4663 copy_prop_count = 0;
4664
a0134312 4665 local_cprop_pass (cprop_jumps);
ae860ff7 4666
fbef91d8 4667 /* Determine implicit sets. */
703ad42b 4668 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
fbef91d8
RS
4669 find_implicit_sets ();
4670
02280659
ZD
4671 alloc_hash_table (max_cuid, &set_hash_table, 1);
4672 compute_hash_table (&set_hash_table);
fbef91d8
RS
4673
4674 /* Free implicit_sets before peak usage. */
4675 free (implicit_sets);
4676 implicit_sets = NULL;
4677
7506f491 4678 if (gcse_file)
02280659
ZD
4679 dump_hash_table (gcse_file, "SET", &set_hash_table);
4680 if (set_hash_table.n_elems > 0)
7506f491 4681 {
02280659 4682 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 4683 compute_cprop_data ();
a0134312
RS
4684 changed = cprop (cprop_jumps);
4685 if (bypass_jumps)
0e3f0221 4686 changed |= bypass_conditional_jumps ();
7506f491
DE
4687 free_cprop_mem ();
4688 }
c4c81601 4689
02280659 4690 free_hash_table (&set_hash_table);
7506f491
DE
4691
4692 if (gcse_file)
4693 {
c4c81601 4694 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
faed5cc3 4695 current_function_name (), pass, bytes_used);
c4c81601
RK
4696 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4697 const_prop_count, copy_prop_count);
7506f491 4698 }
1649d92f
JH
4699 /* Global analysis may get into infinite loops for unreachable blocks. */
4700 if (changed && cprop_jumps)
4701 delete_unreachable_blocks ();
7506f491
DE
4702
4703 return changed;
4704}
4705\f
0e3f0221
RS
4706/* Bypass conditional jumps. */
4707
7821bfc7
RS
4708/* The value of last_basic_block at the beginning of the jump_bypass
4709 pass. The use of redirect_edge_and_branch_force may introduce new
4710 basic blocks, but the data flow analysis is only valid for basic
4711 block indices less than bypass_last_basic_block. */
4712
4713static int bypass_last_basic_block;
4714
0e3f0221
RS
4715/* Find a set of REGNO to a constant that is available at the end of basic
4716 block BB. Returns NULL if no such set is found. Based heavily upon
4717 find_avail_set. */
4718
4719static struct expr *
1d088dee 4720find_bypass_set (int regno, int bb)
0e3f0221
RS
4721{
4722 struct expr *result = 0;
4723
4724 for (;;)
4725 {
4726 rtx src;
ceda50e9 4727 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
4728
4729 while (set)
4730 {
4731 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4732 break;
4733 set = next_set (regno, set);
4734 }
4735
4736 if (set == 0)
4737 break;
4738
4739 if (GET_CODE (set->expr) != SET)
4740 abort ();
4741
4742 src = SET_SRC (set->expr);
6b2d1c9e 4743 if (gcse_constant_p (src))
0e3f0221
RS
4744 result = set;
4745
4746 if (GET_CODE (src) != REG)
4747 break;
4748
4749 regno = REGNO (src);
4750 }
4751 return result;
4752}
4753
4754
e129b3f9
RS
4755/* Subroutine of bypass_block that checks whether a pseudo is killed by
4756 any of the instructions inserted on an edge. Jump bypassing places
4757 condition code setters on CFG edges using insert_insn_on_edge. This
4758 function is required to check that our data flow analysis is still
4759 valid prior to commit_edge_insertions. */
4760
4761static bool
1d088dee 4762reg_killed_on_edge (rtx reg, edge e)
e129b3f9
RS
4763{
4764 rtx insn;
4765
4766 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4767 if (INSN_P (insn) && reg_set_p (reg, insn))
4768 return true;
4769
4770 return false;
4771}
4772
0e3f0221
RS
4773/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4774 basic block BB which has more than one predecessor. If not NULL, SETCC
4775 is the first instruction of BB, which is immediately followed by JUMP_INSN
4776 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
4777 Returns nonzero if a change was made.
4778
e0bb17a8 4779 During the jump bypassing pass, we may place copies of SETCC instructions
e129b3f9
RS
4780 on CFG edges. The following routine must be careful to pay attention to
4781 these inserted insns when performing its transformations. */
0e3f0221
RS
4782
4783static int
1d088dee 4784bypass_block (basic_block bb, rtx setcc, rtx jump)
0e3f0221
RS
4785{
4786 rtx insn, note;
e129b3f9 4787 edge e, enext, edest;
818b6b7f 4788 int i, change;
72b8d451 4789 int may_be_loop_header;
0e3f0221
RS
4790
4791 insn = (setcc != NULL) ? setcc : jump;
4792
4793 /* Determine set of register uses in INSN. */
4794 reg_use_count = 0;
4795 note_uses (&PATTERN (insn), find_used_regs, NULL);
4796 note = find_reg_equal_equiv_note (insn);
4797 if (note)
4798 find_used_regs (&XEXP (note, 0), NULL);
4799
72b8d451
ZD
4800 may_be_loop_header = false;
4801 for (e = bb->pred; e; e = e->pred_next)
4802 if (e->flags & EDGE_DFS_BACK)
4803 {
4804 may_be_loop_header = true;
4805 break;
4806 }
4807
0e3f0221
RS
4808 change = 0;
4809 for (e = bb->pred; e; e = enext)
4810 {
4811 enext = e->pred_next;
7821bfc7
RS
4812 if (e->flags & EDGE_COMPLEX)
4813 continue;
4814
4815 /* We can't redirect edges from new basic blocks. */
4816 if (e->src->index >= bypass_last_basic_block)
4817 continue;
4818
72b8d451 4819 /* The irreducible loops created by redirecting of edges entering the
e0bb17a8
KH
4820 loop from outside would decrease effectiveness of some of the following
4821 optimizations, so prevent this. */
72b8d451
ZD
4822 if (may_be_loop_header
4823 && !(e->flags & EDGE_DFS_BACK))
4824 continue;
4825
0e3f0221
RS
4826 for (i = 0; i < reg_use_count; i++)
4827 {
4828 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4829 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4830 basic_block dest, old_dest;
589005ff
KH
4831 struct expr *set;
4832 rtx src, new;
0e3f0221 4833
589005ff
KH
4834 if (regno >= max_gcse_regno)
4835 continue;
0e3f0221 4836
589005ff 4837 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4838
4839 if (! set)
4840 continue;
4841
e129b3f9
RS
4842 /* Check the data flow is valid after edge insertions. */
4843 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4844 continue;
4845
589005ff 4846 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4847
4848 if (setcc != NULL)
4849 src = simplify_replace_rtx (src,
589005ff
KH
4850 SET_DEST (PATTERN (setcc)),
4851 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4852
4853 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4854 SET_SRC (set->expr));
0e3f0221 4855
1d088dee 4856 /* Jump bypassing may have already placed instructions on
e129b3f9
RS
4857 edges of the CFG. We can't bypass an outgoing edge that
4858 has instructions associated with it, as these insns won't
4859 get executed if the incoming edge is redirected. */
4860
589005ff 4861 if (new == pc_rtx)
e129b3f9
RS
4862 {
4863 edest = FALLTHRU_EDGE (bb);
4864 dest = edest->insns ? NULL : edest->dest;
4865 }
0e3f0221 4866 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
4867 {
4868 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4869 /* Don't bypass edges containing instructions. */
4870 for (edest = bb->succ; edest; edest = edest->succ_next)
4871 if (edest->dest == dest && edest->insns)
4872 {
4873 dest = NULL;
4874 break;
4875 }
4876 }
0e3f0221
RS
4877 else
4878 dest = NULL;
4879
a544524a
JH
4880 /* Avoid unification of the edge with other edges from original
4881 branch. We would end up emitting the instruction on "both"
4882 edges. */
4883
f0cad2d5 4884 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
a544524a
JH
4885 {
4886 edge e2;
4887 for (e2 = e->src->succ; e2; e2 = e2->succ_next)
4888 if (e2->dest == dest)
4889 {
4890 dest = NULL;
4891 break;
4892 }
4893 }
4894
818b6b7f 4895 old_dest = e->dest;
7821bfc7
RS
4896 if (dest != NULL
4897 && dest != old_dest
4898 && dest != EXIT_BLOCK_PTR)
4899 {
4900 redirect_edge_and_branch_force (e, dest);
4901
818b6b7f 4902 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4903 Don't copy CC0 setters, as CC0 is dead after jump. */
4904 if (setcc)
4905 {
4906 rtx pat = PATTERN (setcc);
818b6b7f 4907 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4908 insert_insn_on_edge (copy_insn (pat), e);
4909 }
4910
4911 if (gcse_file != NULL)
4912 {
818b6b7f
RH
4913 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4914 regno, INSN_UID (jump));
0e3f0221
RS
4915 print_rtl (gcse_file, SET_SRC (set->expr));
4916 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4917 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4918 }
4919 change = 1;
4920 break;
4921 }
4922 }
4923 }
4924 return change;
4925}
4926
4927/* Find basic blocks with more than one predecessor that only contain a
4928 single conditional jump. If the result of the comparison is known at
4929 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
4930 appropriate target. Returns nonzero if a change was made.
4931
4932 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
4933
4934static int
1d088dee 4935bypass_conditional_jumps (void)
0e3f0221
RS
4936{
4937 basic_block bb;
4938 int changed;
4939 rtx setcc;
4940 rtx insn;
4941 rtx dest;
4942
4943 /* Note we start at block 1. */
4944 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4945 return 0;
4946
7821bfc7 4947 bypass_last_basic_block = last_basic_block;
72b8d451 4948 mark_dfs_back_edges ();
7821bfc7 4949
0e3f0221
RS
4950 changed = 0;
4951 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 4952 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
4953 {
4954 /* Check for more than one predecessor. */
4955 if (bb->pred && bb->pred->pred_next)
4956 {
4957 setcc = NULL_RTX;
a813c111
SB
4958 for (insn = BB_HEAD (bb);
4959 insn != NULL && insn != NEXT_INSN (BB_END (bb));
0e3f0221
RS
4960 insn = NEXT_INSN (insn))
4961 if (GET_CODE (insn) == INSN)
4962 {
9543a9d2 4963 if (setcc)
0e3f0221 4964 break;
ba4f7968 4965 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
4966 break;
4967
ba4f7968 4968 dest = SET_DEST (PATTERN (insn));
818b6b7f 4969 if (REG_P (dest) || CC0_P (dest))
0e3f0221 4970 setcc = insn;
0e3f0221
RS
4971 else
4972 break;
4973 }
4974 else if (GET_CODE (insn) == JUMP_INSN)
4975 {
9a71ece1
RH
4976 if ((any_condjump_p (insn) || computed_jump_p (insn))
4977 && onlyjump_p (insn))
0e3f0221
RS
4978 changed |= bypass_block (bb, setcc, insn);
4979 break;
4980 }
4981 else if (INSN_P (insn))
4982 break;
4983 }
4984 }
4985
818b6b7f 4986 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 4987 copy on the redirected edge. These need to be committed. */
0e3f0221
RS
4988 if (changed)
4989 commit_edge_insertions();
4990
4991 return changed;
4992}
4993\f
a65f3558 4994/* Compute PRE+LCM working variables. */
7506f491
DE
4995
4996/* Local properties of expressions. */
4997/* Nonzero for expressions that are transparent in the block. */
a65f3558 4998static sbitmap *transp;
7506f491 4999
5c35539b
RH
5000/* Nonzero for expressions that are transparent at the end of the block.
5001 This is only zero for expressions killed by abnormal critical edge
5002 created by a calls. */
a65f3558 5003static sbitmap *transpout;
5c35539b 5004
a65f3558
JL
5005/* Nonzero for expressions that are computed (available) in the block. */
5006static sbitmap *comp;
7506f491 5007
a65f3558
JL
5008/* Nonzero for expressions that are locally anticipatable in the block. */
5009static sbitmap *antloc;
7506f491 5010
a65f3558
JL
5011/* Nonzero for expressions where this block is an optimal computation
5012 point. */
5013static sbitmap *pre_optimal;
5c35539b 5014
a65f3558
JL
5015/* Nonzero for expressions which are redundant in a particular block. */
5016static sbitmap *pre_redundant;
7506f491 5017
a42cd965
AM
5018/* Nonzero for expressions which should be inserted on a specific edge. */
5019static sbitmap *pre_insert_map;
5020
5021/* Nonzero for expressions which should be deleted in a specific block. */
5022static sbitmap *pre_delete_map;
5023
5024/* Contains the edge_list returned by pre_edge_lcm. */
5025static struct edge_list *edge_list;
5026
a65f3558
JL
5027/* Redundant insns. */
5028static sbitmap pre_redundant_insns;
7506f491 5029
a65f3558 5030/* Allocate vars used for PRE analysis. */
7506f491
DE
5031
5032static void
1d088dee 5033alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 5034{
a65f3558
JL
5035 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5036 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5037 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 5038
a42cd965
AM
5039 pre_optimal = NULL;
5040 pre_redundant = NULL;
5041 pre_insert_map = NULL;
5042 pre_delete_map = NULL;
5043 ae_in = NULL;
5044 ae_out = NULL;
a42cd965 5045 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 5046
a42cd965 5047 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
5048}
5049
a65f3558 5050/* Free vars used for PRE analysis. */
7506f491
DE
5051
5052static void
1d088dee 5053free_pre_mem (void)
7506f491 5054{
5a660bff
DB
5055 sbitmap_vector_free (transp);
5056 sbitmap_vector_free (comp);
bd3675fc
JL
5057
5058 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 5059
a42cd965 5060 if (pre_optimal)
5a660bff 5061 sbitmap_vector_free (pre_optimal);
a42cd965 5062 if (pre_redundant)
5a660bff 5063 sbitmap_vector_free (pre_redundant);
a42cd965 5064 if (pre_insert_map)
5a660bff 5065 sbitmap_vector_free (pre_insert_map);
a42cd965 5066 if (pre_delete_map)
5a660bff 5067 sbitmap_vector_free (pre_delete_map);
a42cd965 5068 if (ae_in)
5a660bff 5069 sbitmap_vector_free (ae_in);
a42cd965 5070 if (ae_out)
5a660bff 5071 sbitmap_vector_free (ae_out);
a42cd965 5072
bd3675fc 5073 transp = comp = NULL;
a42cd965 5074 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 5075 ae_in = ae_out = NULL;
7506f491
DE
5076}
5077
5078/* Top level routine to do the dataflow analysis needed by PRE. */
5079
5080static void
1d088dee 5081compute_pre_data (void)
7506f491 5082{
b614171e 5083 sbitmap trapping_expr;
e0082a72 5084 basic_block bb;
b614171e 5085 unsigned int ui;
c66e8ae9 5086
02280659 5087 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 5088 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 5089
b614171e 5090 /* Collect expressions which might trap. */
02280659 5091 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 5092 sbitmap_zero (trapping_expr);
02280659 5093 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
5094 {
5095 struct expr *e;
02280659 5096 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
5097 if (may_trap_p (e->expr))
5098 SET_BIT (trapping_expr, e->bitmap_index);
5099 }
5100
c66e8ae9
JL
5101 /* Compute ae_kill for each basic block using:
5102
5103 ~(TRANSP | COMP)
5104
a2e90653 5105 This is significantly faster than compute_ae_kill. */
c66e8ae9 5106
e0082a72 5107 FOR_EACH_BB (bb)
c66e8ae9 5108 {
b614171e
MM
5109 edge e;
5110
5111 /* If the current block is the destination of an abnormal edge, we
5112 kill all trapping expressions because we won't be able to properly
5113 place the instruction on the edge. So make them neither
5114 anticipatable nor transparent. This is fairly conservative. */
e0082a72 5115 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
5116 if (e->flags & EDGE_ABNORMAL)
5117 {
e0082a72
ZD
5118 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5119 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
5120 break;
5121 }
5122
e0082a72
ZD
5123 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5124 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
5125 }
5126
02280659 5127 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 5128 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 5129 sbitmap_vector_free (antloc);
bd3675fc 5130 antloc = NULL;
5a660bff 5131 sbitmap_vector_free (ae_kill);
589005ff 5132 ae_kill = NULL;
76ac938b 5133 sbitmap_free (trapping_expr);
7506f491
DE
5134}
5135\f
5136/* PRE utilities */
5137
cc2902df 5138/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 5139 block BB.
7506f491
DE
5140
5141 VISITED is a pointer to a working buffer for tracking which BB's have
5142 been visited. It is NULL for the top-level call.
5143
5144 We treat reaching expressions that go through blocks containing the same
5145 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5146 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5147 2 as not reaching. The intent is to improve the probability of finding
5148 only one reaching expression and to reduce register lifetimes by picking
5149 the closest such expression. */
5150
5151static int
1d088dee 5152pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
7506f491 5153{
36349f8b 5154 edge pred;
7506f491 5155
e2d2ed72 5156 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 5157 {
e2d2ed72 5158 basic_block pred_bb = pred->src;
7506f491 5159
36349f8b 5160 if (pred->src == ENTRY_BLOCK_PTR
7506f491 5161 /* Has predecessor has already been visited? */
0b17ab2f 5162 || visited[pred_bb->index])
c4c81601
RK
5163 ;/* Nothing to do. */
5164
7506f491 5165 /* Does this predecessor generate this expression? */
0b17ab2f 5166 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
5167 {
5168 /* Is this the occurrence we're looking for?
5169 Note that there's only one generating occurrence per block
5170 so we just need to check the block number. */
a65f3558 5171 if (occr_bb == pred_bb)
7506f491 5172 return 1;
c4c81601 5173
0b17ab2f 5174 visited[pred_bb->index] = 1;
7506f491
DE
5175 }
5176 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
5177 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5178 visited[pred_bb->index] = 1;
c4c81601 5179
7506f491
DE
5180 /* Neither gen nor kill. */
5181 else
ac7c5af5 5182 {
0b17ab2f 5183 visited[pred_bb->index] = 1;
89e606c9 5184 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 5185 return 1;
ac7c5af5 5186 }
7506f491
DE
5187 }
5188
5189 /* All paths have been checked. */
5190 return 0;
5191}
283a2545
RL
5192
5193/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 5194 memory allocated for that function is returned. */
283a2545
RL
5195
5196static int
1d088dee 5197pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
5198{
5199 int rval;
703ad42b 5200 char *visited = xcalloc (last_basic_block, 1);
283a2545 5201
8e42ace1 5202 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
5203
5204 free (visited);
c4c81601 5205 return rval;
283a2545 5206}
7506f491 5207\f
a42cd965
AM
5208
5209/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 5210 or on an edge. Set the block number of any insns generated to
a42cd965
AM
5211 the value of BB. */
5212
5213static rtx
1d088dee 5214process_insert_insn (struct expr *expr)
a42cd965
AM
5215{
5216 rtx reg = expr->reaching_reg;
fb0c0a12
RK
5217 rtx exp = copy_rtx (expr->expr);
5218 rtx pat;
a42cd965
AM
5219
5220 start_sequence ();
fb0c0a12
RK
5221
5222 /* If the expression is something that's an operand, like a constant,
5223 just copy it to a register. */
5224 if (general_operand (exp, GET_MODE (reg)))
5225 emit_move_insn (reg, exp);
5226
5227 /* Otherwise, make a new insn to compute this expression and make sure the
5228 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5229 expression to make sure we don't have any sharing issues. */
8d444206 5230 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 5231 abort ();
589005ff 5232
2f937369 5233 pat = get_insns ();
a42cd965
AM
5234 end_sequence ();
5235
5236 return pat;
5237}
589005ff 5238
a65f3558
JL
5239/* Add EXPR to the end of basic block BB.
5240
5241 This is used by both the PRE and code hoisting.
5242
5243 For PRE, we want to verify that the expr is either transparent
5244 or locally anticipatable in the target block. This check makes
5245 no sense for code hoisting. */
7506f491
DE
5246
5247static void
1d088dee 5248insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
7506f491 5249{
a813c111 5250 rtx insn = BB_END (bb);
7506f491
DE
5251 rtx new_insn;
5252 rtx reg = expr->reaching_reg;
5253 int regno = REGNO (reg);
2f937369 5254 rtx pat, pat_end;
7506f491 5255
a42cd965 5256 pat = process_insert_insn (expr);
2f937369
DM
5257 if (pat == NULL_RTX || ! INSN_P (pat))
5258 abort ();
5259
5260 pat_end = pat;
5261 while (NEXT_INSN (pat_end) != NULL_RTX)
5262 pat_end = NEXT_INSN (pat_end);
7506f491
DE
5263
5264 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 5265 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 5266 instructions in presence of non-call exceptions. */
7506f491 5267
068473ec
JH
5268 if (GET_CODE (insn) == JUMP_INSN
5269 || (GET_CODE (insn) == INSN
5270 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 5271 {
50b2596f 5272#ifdef HAVE_cc0
7506f491 5273 rtx note;
50b2596f 5274#endif
068473ec
JH
5275 /* It should always be the case that we can put these instructions
5276 anywhere in the basic block with performing PRE optimizations.
5277 Check this. */
3b25fbfe 5278 if (GET_CODE (insn) == INSN && pre
0b17ab2f 5279 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5280 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 5281 abort ();
7506f491
DE
5282
5283 /* If this is a jump table, then we can't insert stuff here. Since
5284 we know the previous real insn must be the tablejump, we insert
5285 the new instruction just before the tablejump. */
5286 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5287 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5288 insn = prev_real_insn (insn);
5289
5290#ifdef HAVE_cc0
5291 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5292 if cc0 isn't set. */
5293 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5294 if (note)
5295 insn = XEXP (note, 0);
5296 else
5297 {
5298 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5299 if (maybe_cc0_setter
2c3c49de 5300 && INSN_P (maybe_cc0_setter)
7506f491
DE
5301 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5302 insn = maybe_cc0_setter;
5303 }
5304#endif
5305 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 5306 new_insn = emit_insn_before (pat, insn);
3947e2f9 5307 }
c4c81601 5308
3947e2f9
RH
5309 /* Likewise if the last insn is a call, as will happen in the presence
5310 of exception handling. */
068473ec
JH
5311 else if (GET_CODE (insn) == CALL_INSN
5312 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5313 {
3947e2f9
RH
5314 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5315 we search backward and place the instructions before the first
5316 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 5317 presumption that we'll get better code elsewhere as well.
3947e2f9 5318
c4c81601 5319 It should always be the case that we can put these instructions
a65f3558
JL
5320 anywhere in the basic block with performing PRE optimizations.
5321 Check this. */
c4c81601 5322
a65f3558 5323 if (pre
0b17ab2f 5324 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5325 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5326 abort ();
5327
5328 /* Since different machines initialize their parameter registers
5329 in different orders, assume nothing. Collect the set of all
5330 parameter registers. */
a813c111 5331 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3947e2f9 5332
b1d26727
JL
5333 /* If we found all the parameter loads, then we want to insert
5334 before the first parameter load.
5335
5336 If we did not find all the parameter loads, then we might have
5337 stopped on the head of the block, which could be a CODE_LABEL.
5338 If we inserted before the CODE_LABEL, then we would be putting
5339 the insn in the wrong basic block. In that case, put the insn
b5229628 5340 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5341 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5342 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5343 insn = NEXT_INSN (insn);
c4c81601 5344
3c030e88 5345 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5346 }
5347 else
3c030e88 5348 new_insn = emit_insn_after (pat, insn);
7506f491 5349
2f937369 5350 while (1)
a65f3558 5351 {
2f937369 5352 if (INSN_P (pat))
a65f3558 5353 {
2f937369
DM
5354 add_label_notes (PATTERN (pat), new_insn);
5355 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5356 }
2f937369
DM
5357 if (pat == pat_end)
5358 break;
5359 pat = NEXT_INSN (pat);
a65f3558 5360 }
3947e2f9 5361
7506f491
DE
5362 gcse_create_count++;
5363
5364 if (gcse_file)
5365 {
c4c81601 5366 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5367 bb->index, INSN_UID (new_insn));
c4c81601
RK
5368 fprintf (gcse_file, "copying expression %d to reg %d\n",
5369 expr->bitmap_index, regno);
7506f491
DE
5370 }
5371}
5372
a42cd965
AM
5373/* Insert partially redundant expressions on edges in the CFG to make
5374 the expressions fully redundant. */
7506f491 5375
a42cd965 5376static int
1d088dee 5377pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 5378{
c4c81601 5379 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5380 sbitmap *inserted;
5381
a42cd965
AM
5382 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5383 if it reaches any of the deleted expressions. */
7506f491 5384
a42cd965
AM
5385 set_size = pre_insert_map[0]->size;
5386 num_edges = NUM_EDGES (edge_list);
02280659 5387 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 5388 sbitmap_vector_zero (inserted, num_edges);
7506f491 5389
a42cd965 5390 for (e = 0; e < num_edges; e++)
7506f491
DE
5391 {
5392 int indx;
e2d2ed72 5393 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5394
a65f3558 5395 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5396 {
a42cd965 5397 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5398
02280659 5399 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
5400 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5401 {
5402 struct expr *expr = index_map[j];
5403 struct occr *occr;
a65f3558 5404
ff7cc307 5405 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5406 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5407 {
5408 if (! occr->deleted_p)
5409 continue;
5410
5411 /* Insert this expression on this edge if if it would
ff7cc307 5412 reach the deleted occurrence in BB. */
c4c81601
RK
5413 if (!TEST_BIT (inserted[e], j))
5414 {
5415 rtx insn;
5416 edge eg = INDEX_EDGE (edge_list, e);
5417
5418 /* We can't insert anything on an abnormal and
5419 critical edge, so we insert the insn at the end of
5420 the previous block. There are several alternatives
5421 detailed in Morgans book P277 (sec 10.5) for
5422 handling this situation. This one is easiest for
5423 now. */
5424
5425 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5426 insert_insn_end_bb (index_map[j], bb, 0);
5427 else
5428 {
5429 insn = process_insert_insn (index_map[j]);
5430 insert_insn_on_edge (insn, eg);
5431 }
5432
5433 if (gcse_file)
5434 {
5435 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5436 bb->index,
5437 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5438 fprintf (gcse_file, "copy expression %d\n",
5439 expr->bitmap_index);
5440 }
5441
a13d4ebf 5442 update_ld_motion_stores (expr);
c4c81601
RK
5443 SET_BIT (inserted[e], j);
5444 did_insert = 1;
5445 gcse_create_count++;
5446 }
5447 }
5448 }
7506f491
DE
5449 }
5450 }
5faf03ae 5451
5a660bff 5452 sbitmap_vector_free (inserted);
a42cd965 5453 return did_insert;
7506f491
DE
5454}
5455
073089a7 5456/* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
b885908b
MH
5457 Given "old_reg <- expr" (INSN), instead of adding after it
5458 reaching_reg <- old_reg
5459 it's better to do the following:
5460 reaching_reg <- expr
5461 old_reg <- reaching_reg
5462 because this way copy propagation can discover additional PRE
f5f2e3cd
MH
5463 opportunities. But if this fails, we try the old way.
5464 When "expr" is a store, i.e.
5465 given "MEM <- old_reg", instead of adding after it
5466 reaching_reg <- old_reg
5467 it's better to add it before as follows:
5468 reaching_reg <- old_reg
5469 MEM <- reaching_reg. */
7506f491
DE
5470
5471static void
1d088dee 5472pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
5473{
5474 rtx reg = expr->reaching_reg;
5475 int regno = REGNO (reg);
5476 int indx = expr->bitmap_index;
073089a7
RS
5477 rtx pat = PATTERN (insn);
5478 rtx set, new_insn;
b885908b 5479 rtx old_reg;
073089a7 5480 int i;
7506f491 5481
073089a7
RS
5482 /* This block matches the logic in hash_scan_insn. */
5483 if (GET_CODE (pat) == SET)
5484 set = pat;
5485 else if (GET_CODE (pat) == PARALLEL)
5486 {
5487 /* Search through the parallel looking for the set whose
5488 source was the expression that we're interested in. */
5489 set = NULL_RTX;
5490 for (i = 0; i < XVECLEN (pat, 0); i++)
5491 {
5492 rtx x = XVECEXP (pat, 0, i);
5493 if (GET_CODE (x) == SET
5494 && expr_equiv_p (SET_SRC (x), expr->expr))
5495 {
5496 set = x;
5497 break;
5498 }
5499 }
5500 }
5501 else
7506f491 5502 abort ();
c4c81601 5503
f5f2e3cd 5504 if (GET_CODE (SET_DEST (set)) == REG)
073089a7 5505 {
f5f2e3cd
MH
5506 old_reg = SET_DEST (set);
5507 /* Check if we can modify the set destination in the original insn. */
5508 if (validate_change (insn, &SET_DEST (set), reg, 0))
5509 {
5510 new_insn = gen_move_insn (old_reg, reg);
5511 new_insn = emit_insn_after (new_insn, insn);
5512
5513 /* Keep register set table up to date. */
5514 replace_one_set (REGNO (old_reg), insn, new_insn);
5515 record_one_set (regno, insn);
5516 }
5517 else
5518 {
5519 new_insn = gen_move_insn (reg, old_reg);
5520 new_insn = emit_insn_after (new_insn, insn);
073089a7 5521
f5f2e3cd
MH
5522 /* Keep register set table up to date. */
5523 record_one_set (regno, new_insn);
5524 }
073089a7 5525 }
f5f2e3cd 5526 else /* This is possible only in case of a store to memory. */
073089a7 5527 {
f5f2e3cd 5528 old_reg = SET_SRC (set);
073089a7 5529 new_insn = gen_move_insn (reg, old_reg);
f5f2e3cd
MH
5530
5531 /* Check if we can modify the set source in the original insn. */
5532 if (validate_change (insn, &SET_SRC (set), reg, 0))
5533 new_insn = emit_insn_before (new_insn, insn);
5534 else
5535 new_insn = emit_insn_after (new_insn, insn);
c4c81601 5536
073089a7
RS
5537 /* Keep register set table up to date. */
5538 record_one_set (regno, new_insn);
5539 }
7506f491
DE
5540
5541 gcse_create_count++;
5542
5543 if (gcse_file)
a42cd965
AM
5544 fprintf (gcse_file,
5545 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5546 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5547 INSN_UID (insn), regno);
7506f491
DE
5548}
5549
5550/* Copy available expressions that reach the redundant expression
5551 to `reaching_reg'. */
5552
5553static void
1d088dee 5554pre_insert_copies (void)
7506f491 5555{
f5f2e3cd 5556 unsigned int i, added_copy;
c4c81601
RK
5557 struct expr *expr;
5558 struct occr *occr;
5559 struct occr *avail;
a65f3558 5560
7506f491
DE
5561 /* For each available expression in the table, copy the result to
5562 `reaching_reg' if the expression reaches a deleted one.
5563
5564 ??? The current algorithm is rather brute force.
5565 Need to do some profiling. */
5566
02280659
ZD
5567 for (i = 0; i < expr_hash_table.size; i++)
5568 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5569 {
5570 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5571 we don't want to insert a copy here because the expression may not
5572 really be redundant. So only insert an insn if the expression was
5573 deleted. This test also avoids further processing if the
5574 expression wasn't deleted anywhere. */
5575 if (expr->reaching_reg == NULL)
5576 continue;
f5f2e3cd
MH
5577
5578 /* Set when we add a copy for that expression. */
5579 added_copy = 0;
c4c81601
RK
5580
5581 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5582 {
5583 if (! occr->deleted_p)
5584 continue;
7506f491 5585
c4c81601
RK
5586 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5587 {
5588 rtx insn = avail->insn;
7506f491 5589
c4c81601
RK
5590 /* No need to handle this one if handled already. */
5591 if (avail->copied_p)
5592 continue;
7506f491 5593
c4c81601
RK
5594 /* Don't handle this one if it's a redundant one. */
5595 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5596 continue;
7506f491 5597
c4c81601 5598 /* Or if the expression doesn't reach the deleted one. */
589005ff 5599 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5600 expr,
5601 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5602 continue;
7506f491 5603
f5f2e3cd
MH
5604 added_copy = 1;
5605
c4c81601
RK
5606 /* Copy the result of avail to reaching_reg. */
5607 pre_insert_copy_insn (expr, insn);
5608 avail->copied_p = 1;
5609 }
5610 }
f5f2e3cd
MH
5611
5612 if (added_copy)
5613 update_ld_motion_stores (expr);
c4c81601 5614 }
7506f491
DE
5615}
5616
10d1bb36
JH
5617/* Emit move from SRC to DEST noting the equivalence with expression computed
5618 in INSN. */
5619static rtx
1d088dee 5620gcse_emit_move_after (rtx src, rtx dest, rtx insn)
10d1bb36
JH
5621{
5622 rtx new;
6bdb8dd6 5623 rtx set = single_set (insn), set2;
10d1bb36
JH
5624 rtx note;
5625 rtx eqv;
5626
5627 /* This should never fail since we're creating a reg->reg copy
5628 we've verified to be valid. */
5629
6bdb8dd6 5630 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5631
10d1bb36 5632 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5633 set2 = single_set (new);
5634 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5635 return new;
10d1bb36
JH
5636 if ((note = find_reg_equal_equiv_note (insn)))
5637 eqv = XEXP (note, 0);
5638 else
5639 eqv = SET_SRC (set);
5640
a500466b 5641 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
5642
5643 return new;
5644}
5645
7506f491 5646/* Delete redundant computations.
7506f491
DE
5647 Deletion is done by changing the insn to copy the `reaching_reg' of
5648 the expression into the result of the SET. It is left to later passes
5649 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5650
cc2902df 5651 Returns nonzero if a change is made. */
7506f491
DE
5652
5653static int
1d088dee 5654pre_delete (void)
7506f491 5655{
2e653e39 5656 unsigned int i;
63bc1d05 5657 int changed;
c4c81601
RK
5658 struct expr *expr;
5659 struct occr *occr;
a65f3558 5660
7506f491 5661 changed = 0;
02280659 5662 for (i = 0; i < expr_hash_table.size; i++)
073089a7
RS
5663 for (expr = expr_hash_table.table[i];
5664 expr != NULL;
5665 expr = expr->next_same_hash)
c4c81601
RK
5666 {
5667 int indx = expr->bitmap_index;
7506f491 5668
c4c81601
RK
5669 /* We only need to search antic_occr since we require
5670 ANTLOC != 0. */
7506f491 5671
c4c81601
RK
5672 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5673 {
5674 rtx insn = occr->insn;
5675 rtx set;
e2d2ed72 5676 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5677
073089a7
RS
5678 /* We only delete insns that have a single_set. */
5679 if (TEST_BIT (pre_delete_map[bb->index], indx)
5680 && (set = single_set (insn)) != 0)
c4c81601 5681 {
c4c81601
RK
5682 /* Create a pseudo-reg to store the result of reaching
5683 expressions into. Get the mode for the new pseudo from
5684 the mode of the original destination pseudo. */
5685 if (expr->reaching_reg == NULL)
5686 expr->reaching_reg
5687 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5688
10d1bb36
JH
5689 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5690 delete_insn (insn);
5691 occr->deleted_p = 1;
5692 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5693 changed = 1;
5694 gcse_subst_count++;
7506f491 5695
c4c81601
RK
5696 if (gcse_file)
5697 {
5698 fprintf (gcse_file,
5699 "PRE: redundant insn %d (expression %d) in ",
5700 INSN_UID (insn), indx);
5701 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5702 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5703 }
5704 }
5705 }
5706 }
7506f491
DE
5707
5708 return changed;
5709}
5710
5711/* Perform GCSE optimizations using PRE.
5712 This is called by one_pre_gcse_pass after all the dataflow analysis
5713 has been done.
5714
c4c81601
RK
5715 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5716 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5717 Compiler Design and Implementation.
7506f491 5718
c4c81601
RK
5719 ??? A new pseudo reg is created to hold the reaching expression. The nice
5720 thing about the classical approach is that it would try to use an existing
5721 reg. If the register can't be adequately optimized [i.e. we introduce
5722 reload problems], one could add a pass here to propagate the new register
5723 through the block.
7506f491 5724
c4c81601
RK
5725 ??? We don't handle single sets in PARALLELs because we're [currently] not
5726 able to copy the rest of the parallel when we insert copies to create full
5727 redundancies from partial redundancies. However, there's no reason why we
5728 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5729 redundancies. */
5730
5731static int
1d088dee 5732pre_gcse (void)
7506f491 5733{
2e653e39
RK
5734 unsigned int i;
5735 int did_insert, changed;
7506f491 5736 struct expr **index_map;
c4c81601 5737 struct expr *expr;
7506f491
DE
5738
5739 /* Compute a mapping from expression number (`bitmap_index') to
5740 hash table entry. */
5741
703ad42b 5742 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
5743 for (i = 0; i < expr_hash_table.size; i++)
5744 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 5745 index_map[expr->bitmap_index] = expr;
7506f491
DE
5746
5747 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5748 pre_redundant_insns = sbitmap_alloc (max_cuid);
5749 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5750
5751 /* Delete the redundant insns first so that
5752 - we know what register to use for the new insns and for the other
5753 ones with reaching expressions
5754 - we know which insns are redundant when we go to create copies */
c4c81601 5755
7506f491
DE
5756 changed = pre_delete ();
5757
a42cd965 5758 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5759
7506f491 5760 /* In other places with reaching expressions, copy the expression to the
a42cd965 5761 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5762 pre_insert_copies ();
a42cd965
AM
5763 if (did_insert)
5764 {
5765 commit_edge_insertions ();
5766 changed = 1;
5767 }
7506f491 5768
283a2545 5769 free (index_map);
76ac938b 5770 sbitmap_free (pre_redundant_insns);
7506f491
DE
5771 return changed;
5772}
5773
5774/* Top level routine to perform one PRE GCSE pass.
5775
cc2902df 5776 Return nonzero if a change was made. */
7506f491
DE
5777
5778static int
1d088dee 5779one_pre_gcse_pass (int pass)
7506f491
DE
5780{
5781 int changed = 0;
5782
5783 gcse_subst_count = 0;
5784 gcse_create_count = 0;
5785
02280659 5786 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 5787 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5788 if (flag_gcse_lm)
5789 compute_ld_motion_mems ();
5790
02280659 5791 compute_hash_table (&expr_hash_table);
a13d4ebf 5792 trim_ld_motion_mems ();
7506f491 5793 if (gcse_file)
02280659 5794 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 5795
02280659 5796 if (expr_hash_table.n_elems > 0)
7506f491 5797 {
02280659 5798 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
5799 compute_pre_data ();
5800 changed |= pre_gcse ();
a42cd965 5801 free_edge_list (edge_list);
7506f491
DE
5802 free_pre_mem ();
5803 }
c4c81601 5804
a13d4ebf 5805 free_ldst_mems ();
a42cd965 5806 remove_fake_edges ();
02280659 5807 free_hash_table (&expr_hash_table);
7506f491
DE
5808
5809 if (gcse_file)
5810 {
c4c81601 5811 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
faed5cc3 5812 current_function_name (), pass, bytes_used);
c4c81601
RK
5813 fprintf (gcse_file, "%d substs, %d insns created\n",
5814 gcse_subst_count, gcse_create_count);
7506f491
DE
5815 }
5816
5817 return changed;
5818}
aeb2f500
JW
5819\f
5820/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5821 If notes are added to an insn which references a CODE_LABEL, the
5822 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5823 because the following loop optimization pass requires them. */
aeb2f500
JW
5824
5825/* ??? This is very similar to the loop.c add_label_notes function. We
5826 could probably share code here. */
5827
5828/* ??? If there was a jump optimization pass after gcse and before loop,
5829 then we would not need to do this here, because jump would add the
5830 necessary REG_LABEL notes. */
5831
5832static void
1d088dee 5833add_label_notes (rtx x, rtx insn)
aeb2f500
JW
5834{
5835 enum rtx_code code = GET_CODE (x);
5836 int i, j;
6f7d635c 5837 const char *fmt;
aeb2f500
JW
5838
5839 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5840 {
6b3603c2 5841 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 5842 avoid flow generating (slightly) worse code.
6b3603c2 5843
ac7c5af5
JL
5844 We no longer ignore such label references (see LABEL_REF handling in
5845 mark_jump_label for additional information). */
c4c81601 5846
6b8c9327 5847 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5848 REG_NOTES (insn));
5b1ef594 5849 if (LABEL_P (XEXP (x, 0)))
589005ff 5850 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5851 return;
5852 }
5853
c4c81601 5854 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5855 {
5856 if (fmt[i] == 'e')
5857 add_label_notes (XEXP (x, i), insn);
5858 else if (fmt[i] == 'E')
5859 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5860 add_label_notes (XVECEXP (x, i, j), insn);
5861 }
5862}
a65f3558
JL
5863
5864/* Compute transparent outgoing information for each block.
5865
5866 An expression is transparent to an edge unless it is killed by
5867 the edge itself. This can only happen with abnormal control flow,
5868 when the edge is traversed through a call. This happens with
5869 non-local labels and exceptions.
5870
5871 This would not be necessary if we split the edge. While this is
5872 normally impossible for abnormal critical edges, with some effort
5873 it should be possible with exception handling, since we still have
5874 control over which handler should be invoked. But due to increased
5875 EH table sizes, this may not be worthwhile. */
5876
5877static void
1d088dee 5878compute_transpout (void)
a65f3558 5879{
e0082a72 5880 basic_block bb;
2e653e39 5881 unsigned int i;
c4c81601 5882 struct expr *expr;
a65f3558 5883
d55bc081 5884 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5885
e0082a72 5886 FOR_EACH_BB (bb)
a65f3558 5887 {
a65f3558
JL
5888 /* Note that flow inserted a nop a the end of basic blocks that
5889 end in call instructions for reasons other than abnormal
5890 control flow. */
a813c111 5891 if (GET_CODE (BB_END (bb)) != CALL_INSN)
a65f3558
JL
5892 continue;
5893
02280659
ZD
5894 for (i = 0; i < expr_hash_table.size; i++)
5895 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
c4c81601
RK
5896 if (GET_CODE (expr->expr) == MEM)
5897 {
5898 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5899 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5900 continue;
589005ff 5901
c4c81601
RK
5902 /* ??? Optimally, we would use interprocedural alias
5903 analysis to determine if this mem is actually killed
5904 by this call. */
e0082a72 5905 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5906 }
a65f3558
JL
5907 }
5908}
dfdb644f
JL
5909
5910/* Removal of useless null pointer checks */
5911
dfdb644f 5912/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5913 invalidate nonnull_local and set nonnull_killed. DATA is really a
5914 `null_pointer_info *'.
dfdb644f
JL
5915
5916 We ignore hard registers. */
c4c81601 5917
dfdb644f 5918static void
1d088dee 5919invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
dfdb644f 5920{
770ae6cc
RK
5921 unsigned int regno;
5922 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5923
dfdb644f
JL
5924 while (GET_CODE (x) == SUBREG)
5925 x = SUBREG_REG (x);
5926
5927 /* Ignore anything that is not a register or is a hard register. */
5928 if (GET_CODE (x) != REG
0511851c
MM
5929 || REGNO (x) < npi->min_reg
5930 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5931 return;
5932
0511851c 5933 regno = REGNO (x) - npi->min_reg;
dfdb644f 5934
e0082a72
ZD
5935 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5936 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5937}
5938
0511851c
MM
5939/* Do null-pointer check elimination for the registers indicated in
5940 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5941 they are not our responsibility to free. */
dfdb644f 5942
99a15921 5943static int
1d088dee
AJ
5944delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5945 sbitmap *nonnull_avout,
5946 struct null_pointer_info *npi)
dfdb644f 5947{
e0082a72 5948 basic_block bb, current_block;
0511851c
MM
5949 sbitmap *nonnull_local = npi->nonnull_local;
5950 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5951 int something_changed = 0;
589005ff 5952
dfdb644f
JL
5953 /* Compute local properties, nonnull and killed. A register will have
5954 the nonnull property if at the end of the current block its value is
5955 known to be nonnull. The killed property indicates that somewhere in
5956 the block any information we had about the register is killed.
5957
5958 Note that a register can have both properties in a single block. That
5959 indicates that it's killed, then later in the block a new value is
5960 computed. */
d55bc081
ZD
5961 sbitmap_vector_zero (nonnull_local, last_basic_block);
5962 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5963
e0082a72 5964 FOR_EACH_BB (current_block)
dfdb644f
JL
5965 {
5966 rtx insn, stop_insn;
5967
0511851c
MM
5968 /* Set the current block for invalidate_nonnull_info. */
5969 npi->current_block = current_block;
5970
dfdb644f
JL
5971 /* Scan each insn in the basic block looking for memory references and
5972 register sets. */
cb7c3c3f 5973 stop_insn = NEXT_INSN (BB_END (current_block));
a813c111 5974 for (insn = BB_HEAD (current_block);
dfdb644f
JL
5975 insn != stop_insn;
5976 insn = NEXT_INSN (insn))
5977 {
5978 rtx set;
0511851c 5979 rtx reg;
dfdb644f
JL
5980
5981 /* Ignore anything that is not a normal insn. */
2c3c49de 5982 if (! INSN_P (insn))
dfdb644f
JL
5983 continue;
5984
5985 /* Basically ignore anything that is not a simple SET. We do have
5986 to make sure to invalidate nonnull_local and set nonnull_killed
5987 for such insns though. */
5988 set = single_set (insn);
5989 if (!set)
5990 {
0511851c 5991 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5992 continue;
5993 }
5994
f63d1bf7 5995 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
5996 in case it uses its address register as a dest (which kills
5997 the nonnull property). */
5998 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5999 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
6000 && REGNO (reg) >= npi->min_reg
6001 && REGNO (reg) < npi->max_reg)
e0082a72 6002 SET_BIT (nonnull_local[current_block->index],
0511851c 6003 REGNO (reg) - npi->min_reg);
dfdb644f
JL
6004
6005 /* Now invalidate stuff clobbered by this insn. */
0511851c 6006 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
6007
6008 /* And handle stores, we do these last since any sets in INSN can
6009 not kill the nonnull property if it is derived from a MEM
6010 appearing in a SET_DEST. */
6011 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
6012 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
6013 && REGNO (reg) >= npi->min_reg
6014 && REGNO (reg) < npi->max_reg)
e0082a72 6015 SET_BIT (nonnull_local[current_block->index],
0511851c 6016 REGNO (reg) - npi->min_reg);
dfdb644f
JL
6017 }
6018 }
6019
6020 /* Now compute global properties based on the local properties. This
fbe5a4a6 6021 is a classic global availability algorithm. */
ce724250
JL
6022 compute_available (nonnull_local, nonnull_killed,
6023 nonnull_avout, nonnull_avin);
dfdb644f
JL
6024
6025 /* Now look at each bb and see if it ends with a compare of a value
6026 against zero. */
e0082a72 6027 FOR_EACH_BB (bb)
dfdb644f 6028 {
a813c111 6029 rtx last_insn = BB_END (bb);
0511851c 6030 rtx condition, earliest;
dfdb644f
JL
6031 int compare_and_branch;
6032
0511851c
MM
6033 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6034 since BLOCK_REG[BB] is zero if this block did not end with a
6035 comparison against zero, this condition works. */
e0082a72
ZD
6036 if (block_reg[bb->index] < npi->min_reg
6037 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
6038 continue;
6039
6040 /* LAST_INSN is a conditional jump. Get its condition. */
ec6ec6aa 6041 condition = get_condition (last_insn, &earliest, false);
dfdb644f 6042
40d7a3fe
NB
6043 /* If we can't determine the condition then skip. */
6044 if (! condition)
6045 continue;
6046
dfdb644f 6047 /* Is the register known to have a nonzero value? */
e0082a72 6048 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
6049 continue;
6050
6051 /* Try to compute whether the compare/branch at the loop end is one or
6052 two instructions. */
6053 if (earliest == last_insn)
6054 compare_and_branch = 1;
6055 else if (earliest == prev_nonnote_insn (last_insn))
6056 compare_and_branch = 2;
6057 else
6058 continue;
6059
6060 /* We know the register in this comparison is nonnull at exit from
6061 this block. We can optimize this comparison. */
6062 if (GET_CODE (condition) == NE)
6063 {
6064 rtx new_jump;
6065
38c1593d
JH
6066 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6067 last_insn);
dfdb644f
JL
6068 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6069 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6070 emit_barrier_after (new_jump);
6071 }
8e184d9c 6072
99a15921 6073 something_changed = 1;
9cd56be1 6074 delete_insn (last_insn);
fb643f64 6075#ifdef HAVE_cc0
dfdb644f 6076 if (compare_and_branch == 2)
589005ff 6077 delete_insn (earliest);
fb643f64 6078#endif
e0082a72 6079 purge_dead_edges (bb);
0511851c 6080
a813c111 6081 /* Don't check this block again. (Note that BB_END is
589005ff 6082 invalid here; we deleted the last instruction in the
0511851c 6083 block.) */
e0082a72 6084 block_reg[bb->index] = 0;
0511851c 6085 }
99a15921
JL
6086
6087 return something_changed;
0511851c
MM
6088}
6089
6090/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6091 at compile time.
6092
6093 This is conceptually similar to global constant/copy propagation and
6094 classic global CSE (it even uses the same dataflow equations as cprop).
6095
6096 If a register is used as memory address with the form (mem (reg)), then we
6097 know that REG can not be zero at that point in the program. Any instruction
6098 which sets REG "kills" this property.
6099
6100 So, if every path leading to a conditional branch has an available memory
6101 reference of that form, then we know the register can not have the value
589005ff 6102 zero at the conditional branch.
0511851c 6103
fbe5a4a6 6104 So we merely need to compute the local properties and propagate that data
0511851c
MM
6105 around the cfg, then optimize where possible.
6106
6107 We run this pass two times. Once before CSE, then again after CSE. This
6108 has proven to be the most profitable approach. It is rare for new
6109 optimization opportunities of this nature to appear after the first CSE
6110 pass.
6111
6112 This could probably be integrated with global cprop with a little work. */
6113
99a15921 6114int
1d088dee 6115delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
0511851c 6116{
0511851c 6117 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 6118 unsigned int *block_reg;
e0082a72 6119 basic_block bb;
0511851c
MM
6120 int reg;
6121 int regs_per_pass;
d128effb 6122 int max_reg = max_reg_num ();
0511851c 6123 struct null_pointer_info npi;
99a15921 6124 int something_changed = 0;
0511851c 6125
d128effb
NS
6126 /* If we have only a single block, or it is too expensive, give up. */
6127 if (n_basic_blocks <= 1
6128 || is_too_expensive (_ ("NULL pointer checks disabled")))
99a15921 6129 return 0;
0511851c 6130
0511851c
MM
6131 /* We need four bitmaps, each with a bit for each register in each
6132 basic block. */
d55bc081 6133 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
6134
6135 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
6136 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6137 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6138 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6139 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
6140
6141 /* Go through the basic blocks, seeing whether or not each block
6142 ends with a conditional branch whose condition is a comparison
6143 against zero. Record the register compared in BLOCK_REG. */
703ad42b 6144 block_reg = xcalloc (last_basic_block, sizeof (int));
e0082a72 6145 FOR_EACH_BB (bb)
0511851c 6146 {
a813c111 6147 rtx last_insn = BB_END (bb);
0511851c
MM
6148 rtx condition, earliest, reg;
6149
6150 /* We only want conditional branches. */
6151 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
6152 || !any_condjump_p (last_insn)
6153 || !onlyjump_p (last_insn))
0511851c
MM
6154 continue;
6155
6156 /* LAST_INSN is a conditional jump. Get its condition. */
ec6ec6aa 6157 condition = get_condition (last_insn, &earliest, false);
0511851c 6158
4fe9b91c 6159 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
6160 comparison against zero then there's nothing we can do. */
6161 if (!condition
6162 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6163 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 6164 || (XEXP (condition, 1)
0511851c
MM
6165 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6166 continue;
6167
6168 /* We must be checking a register against zero. */
6169 reg = XEXP (condition, 0);
6170 if (GET_CODE (reg) != REG)
6171 continue;
6172
e0082a72 6173 block_reg[bb->index] = REGNO (reg);
0511851c
MM
6174 }
6175
6176 /* Go through the algorithm for each block of registers. */
6177 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6178 {
6179 npi.min_reg = reg;
6180 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
6181 something_changed |= delete_null_pointer_checks_1 (block_reg,
6182 nonnull_avin,
6183 nonnull_avout,
6184 &npi);
dfdb644f
JL
6185 }
6186
0511851c
MM
6187 /* Free the table of registers compared at the end of every block. */
6188 free (block_reg);
6189
dfdb644f 6190 /* Free bitmaps. */
5a660bff
DB
6191 sbitmap_vector_free (npi.nonnull_local);
6192 sbitmap_vector_free (npi.nonnull_killed);
6193 sbitmap_vector_free (nonnull_avin);
6194 sbitmap_vector_free (nonnull_avout);
99a15921
JL
6195
6196 return something_changed;
dfdb644f 6197}
bb457bd9
JL
6198
6199/* Code Hoisting variables and subroutines. */
6200
6201/* Very busy expressions. */
6202static sbitmap *hoist_vbein;
6203static sbitmap *hoist_vbeout;
6204
6205/* Hoistable expressions. */
6206static sbitmap *hoist_exprs;
6207
bb457bd9 6208/* ??? We could compute post dominators and run this algorithm in
68e82b83 6209 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
6210 more effective than the tail merging code in jump.c.
6211
6212 It's unclear if tail merging could be run in parallel with
6213 code hoisting. It would be nice. */
6214
6215/* Allocate vars used for code hoisting analysis. */
6216
6217static void
1d088dee 6218alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
6219{
6220 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6221 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6222 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6223
6224 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6225 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6226 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6227 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
6228}
6229
6230/* Free vars used for code hoisting analysis. */
6231
6232static void
1d088dee 6233free_code_hoist_mem (void)
bb457bd9 6234{
5a660bff
DB
6235 sbitmap_vector_free (antloc);
6236 sbitmap_vector_free (transp);
6237 sbitmap_vector_free (comp);
bb457bd9 6238
5a660bff
DB
6239 sbitmap_vector_free (hoist_vbein);
6240 sbitmap_vector_free (hoist_vbeout);
6241 sbitmap_vector_free (hoist_exprs);
6242 sbitmap_vector_free (transpout);
bb457bd9 6243
d47cc544 6244 free_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6245}
6246
6247/* Compute the very busy expressions at entry/exit from each block.
6248
6249 An expression is very busy if all paths from a given point
6250 compute the expression. */
6251
6252static void
1d088dee 6253compute_code_hoist_vbeinout (void)
bb457bd9 6254{
e0082a72
ZD
6255 int changed, passes;
6256 basic_block bb;
bb457bd9 6257
d55bc081
ZD
6258 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6259 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
6260
6261 passes = 0;
6262 changed = 1;
c4c81601 6263
bb457bd9
JL
6264 while (changed)
6265 {
6266 changed = 0;
c4c81601 6267
bb457bd9
JL
6268 /* We scan the blocks in the reverse order to speed up
6269 the convergence. */
e0082a72 6270 FOR_EACH_BB_REVERSE (bb)
bb457bd9 6271 {
e0082a72
ZD
6272 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6273 hoist_vbeout[bb->index], transp[bb->index]);
6274 if (bb->next_bb != EXIT_BLOCK_PTR)
6275 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 6276 }
c4c81601 6277
bb457bd9
JL
6278 passes++;
6279 }
6280
6281 if (gcse_file)
6282 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6283}
6284
6285/* Top level routine to do the dataflow analysis needed by code hoisting. */
6286
6287static void
1d088dee 6288compute_code_hoist_data (void)
bb457bd9 6289{
02280659 6290 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
6291 compute_transpout ();
6292 compute_code_hoist_vbeinout ();
d47cc544 6293 calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6294 if (gcse_file)
6295 fprintf (gcse_file, "\n");
6296}
6297
6298/* Determine if the expression identified by EXPR_INDEX would
6299 reach BB unimpared if it was placed at the end of EXPR_BB.
6300
6301 It's unclear exactly what Muchnick meant by "unimpared". It seems
6302 to me that the expression must either be computed or transparent in
6303 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6304 would allow the expression to be hoisted out of loops, even if
6305 the expression wasn't a loop invariant.
6306
6307 Contrast this to reachability for PRE where an expression is
6308 considered reachable if *any* path reaches instead of *all*
6309 paths. */
6310
6311static int
1d088dee 6312hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
bb457bd9
JL
6313{
6314 edge pred;
283a2545 6315 int visited_allocated_locally = 0;
589005ff 6316
bb457bd9
JL
6317
6318 if (visited == NULL)
6319 {
8e42ace1 6320 visited_allocated_locally = 1;
d55bc081 6321 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
6322 }
6323
e2d2ed72 6324 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 6325 {
e2d2ed72 6326 basic_block pred_bb = pred->src;
bb457bd9
JL
6327
6328 if (pred->src == ENTRY_BLOCK_PTR)
6329 break;
f305679f
JH
6330 else if (pred_bb == expr_bb)
6331 continue;
0b17ab2f 6332 else if (visited[pred_bb->index])
bb457bd9 6333 continue;
c4c81601 6334
bb457bd9 6335 /* Does this predecessor generate this expression? */
0b17ab2f 6336 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 6337 break;
0b17ab2f 6338 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 6339 break;
c4c81601 6340
bb457bd9
JL
6341 /* Not killed. */
6342 else
6343 {
0b17ab2f 6344 visited[pred_bb->index] = 1;
bb457bd9
JL
6345 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6346 pred_bb, visited))
6347 break;
6348 }
6349 }
589005ff 6350 if (visited_allocated_locally)
283a2545 6351 free (visited);
c4c81601 6352
bb457bd9
JL
6353 return (pred == NULL);
6354}
6355\f
6356/* Actually perform code hoisting. */
c4c81601 6357
bb457bd9 6358static void
1d088dee 6359hoist_code (void)
bb457bd9 6360{
e0082a72 6361 basic_block bb, dominated;
c635a1ec
DB
6362 basic_block *domby;
6363 unsigned int domby_len;
6364 unsigned int i,j;
bb457bd9 6365 struct expr **index_map;
c4c81601 6366 struct expr *expr;
bb457bd9 6367
d55bc081 6368 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6369
6370 /* Compute a mapping from expression number (`bitmap_index') to
6371 hash table entry. */
6372
703ad42b 6373 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
6374 for (i = 0; i < expr_hash_table.size; i++)
6375 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 6376 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6377
6378 /* Walk over each basic block looking for potentially hoistable
6379 expressions, nothing gets hoisted from the entry block. */
e0082a72 6380 FOR_EACH_BB (bb)
bb457bd9
JL
6381 {
6382 int found = 0;
6383 int insn_inserted_p;
6384
d47cc544 6385 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
bb457bd9
JL
6386 /* Examine each expression that is very busy at the exit of this
6387 block. These are the potentially hoistable expressions. */
e0082a72 6388 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6389 {
6390 int hoistable = 0;
c4c81601 6391
c635a1ec
DB
6392 if (TEST_BIT (hoist_vbeout[bb->index], i)
6393 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6394 {
6395 /* We've found a potentially hoistable expression, now
6396 we look at every block BB dominates to see if it
6397 computes the expression. */
c635a1ec 6398 for (j = 0; j < domby_len; j++)
bb457bd9 6399 {
c635a1ec 6400 dominated = domby[j];
bb457bd9 6401 /* Ignore self dominance. */
c635a1ec 6402 if (bb == dominated)
bb457bd9 6403 continue;
bb457bd9
JL
6404 /* We've found a dominated block, now see if it computes
6405 the busy expression and whether or not moving that
6406 expression to the "beginning" of that block is safe. */
e0082a72 6407 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6408 continue;
6409
6410 /* Note if the expression would reach the dominated block
589005ff 6411 unimpared if it was placed at the end of BB.
bb457bd9
JL
6412
6413 Keep track of how many times this expression is hoistable
6414 from a dominated block into BB. */
e0082a72 6415 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6416 hoistable++;
6417 }
6418
ff7cc307 6419 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6420 expression, then note it in the bitmap of expressions to
6421 hoist. It makes no sense to hoist things which are computed
6422 in only one BB, and doing so tends to pessimize register
6423 allocation. One could increase this value to try harder
6424 to avoid any possible code expansion due to register
6425 allocation issues; however experiments have shown that
6426 the vast majority of hoistable expressions are only movable
e0bb17a8 6427 from two successors, so raising this threshold is likely
bb457bd9
JL
6428 to nullify any benefit we get from code hoisting. */
6429 if (hoistable > 1)
6430 {
e0082a72 6431 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6432 found = 1;
6433 }
6434 }
6435 }
bb457bd9
JL
6436 /* If we found nothing to hoist, then quit now. */
6437 if (! found)
c635a1ec 6438 {
1d088dee 6439 free (domby);
bb457bd9 6440 continue;
c635a1ec 6441 }
bb457bd9
JL
6442
6443 /* Loop over all the hoistable expressions. */
e0082a72 6444 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6445 {
6446 /* We want to insert the expression into BB only once, so
6447 note when we've inserted it. */
6448 insn_inserted_p = 0;
6449
6450 /* These tests should be the same as the tests above. */
e0082a72 6451 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6452 {
6453 /* We've found a potentially hoistable expression, now
6454 we look at every block BB dominates to see if it
6455 computes the expression. */
c635a1ec 6456 for (j = 0; j < domby_len; j++)
bb457bd9 6457 {
c635a1ec 6458 dominated = domby[j];
bb457bd9 6459 /* Ignore self dominance. */
c635a1ec 6460 if (bb == dominated)
bb457bd9
JL
6461 continue;
6462
6463 /* We've found a dominated block, now see if it computes
6464 the busy expression and whether or not moving that
6465 expression to the "beginning" of that block is safe. */
e0082a72 6466 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6467 continue;
6468
6469 /* The expression is computed in the dominated block and
6470 it would be safe to compute it at the start of the
6471 dominated block. Now we have to determine if the
ff7cc307 6472 expression would reach the dominated block if it was
bb457bd9 6473 placed at the end of BB. */
e0082a72 6474 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6475 {
6476 struct expr *expr = index_map[i];
6477 struct occr *occr = expr->antic_occr;
6478 rtx insn;
6479 rtx set;
6480
ff7cc307 6481 /* Find the right occurrence of this expression. */
e0082a72 6482 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6483 occr = occr->next;
6484
6485 /* Should never happen. */
6486 if (!occr)
6487 abort ();
6488
6489 insn = occr->insn;
589005ff 6490
bb457bd9
JL
6491 set = single_set (insn);
6492 if (! set)
6493 abort ();
6494
6495 /* Create a pseudo-reg to store the result of reaching
6496 expressions into. Get the mode for the new pseudo
6497 from the mode of the original destination pseudo. */
6498 if (expr->reaching_reg == NULL)
6499 expr->reaching_reg
6500 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6501
10d1bb36
JH
6502 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6503 delete_insn (insn);
6504 occr->deleted_p = 1;
6505 if (!insn_inserted_p)
bb457bd9 6506 {
10d1bb36
JH
6507 insert_insn_end_bb (index_map[i], bb, 0);
6508 insn_inserted_p = 1;
bb457bd9
JL
6509 }
6510 }
6511 }
6512 }
6513 }
c635a1ec 6514 free (domby);
bb457bd9 6515 }
c4c81601 6516
8e42ace1 6517 free (index_map);
bb457bd9
JL
6518}
6519
6520/* Top level routine to perform one code hoisting (aka unification) pass
6521
cc2902df 6522 Return nonzero if a change was made. */
bb457bd9
JL
6523
6524static int
1d088dee 6525one_code_hoisting_pass (void)
bb457bd9
JL
6526{
6527 int changed = 0;
6528
02280659
ZD
6529 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6530 compute_hash_table (&expr_hash_table);
bb457bd9 6531 if (gcse_file)
02280659 6532 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 6533
02280659 6534 if (expr_hash_table.n_elems > 0)
bb457bd9 6535 {
02280659 6536 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
6537 compute_code_hoist_data ();
6538 hoist_code ();
6539 free_code_hoist_mem ();
6540 }
c4c81601 6541
02280659 6542 free_hash_table (&expr_hash_table);
bb457bd9
JL
6543
6544 return changed;
6545}
a13d4ebf
AM
6546\f
6547/* Here we provide the things required to do store motion towards
6548 the exit. In order for this to be effective, gcse also needed to
6549 be taught how to move a load when it is kill only by a store to itself.
6550
6551 int i;
6552 float a[10];
6553
6554 void foo(float scale)
6555 {
6556 for (i=0; i<10; i++)
6557 a[i] *= scale;
6558 }
6559
6560 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6561 the load out since its live around the loop, and stored at the bottom
6562 of the loop.
a13d4ebf 6563
589005ff 6564 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6565 an enhancement to gcse which when using edge based lcm, recognizes
6566 this situation and allows gcse to move the load out of the loop.
6567
6568 Once gcse has hoisted the load, store motion can then push this
6569 load towards the exit, and we end up with no loads or stores of 'i'
6570 in the loop. */
6571
ff7cc307 6572/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6573 doesn't find one, we create one and initialize it. */
6574
6575static struct ls_expr *
1d088dee 6576ldst_entry (rtx x)
a13d4ebf 6577{
b58b21d5 6578 int do_not_record_p = 0;
a13d4ebf 6579 struct ls_expr * ptr;
b58b21d5 6580 unsigned int hash;
a13d4ebf 6581
b58b21d5 6582 hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
a13d4ebf 6583
b58b21d5
RS
6584 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6585 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
6586 return ptr;
6587
6588 ptr = xmalloc (sizeof (struct ls_expr));
6589
6590 ptr->next = pre_ldst_mems;
6591 ptr->expr = NULL;
6592 ptr->pattern = x;
6593 ptr->pattern_regs = NULL_RTX;
6594 ptr->loads = NULL_RTX;
6595 ptr->stores = NULL_RTX;
6596 ptr->reaching_reg = NULL_RTX;
6597 ptr->invalid = 0;
6598 ptr->index = 0;
6599 ptr->hash_index = hash;
6600 pre_ldst_mems = ptr;
589005ff 6601
a13d4ebf
AM
6602 return ptr;
6603}
6604
6605/* Free up an individual ldst entry. */
6606
589005ff 6607static void
1d088dee 6608free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 6609{
aaa4ca30
AJ
6610 free_INSN_LIST_list (& ptr->loads);
6611 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6612
6613 free (ptr);
6614}
6615
6616/* Free up all memory associated with the ldst list. */
6617
6618static void
1d088dee 6619free_ldst_mems (void)
a13d4ebf 6620{
589005ff 6621 while (pre_ldst_mems)
a13d4ebf
AM
6622 {
6623 struct ls_expr * tmp = pre_ldst_mems;
6624
6625 pre_ldst_mems = pre_ldst_mems->next;
6626
6627 free_ldst_entry (tmp);
6628 }
6629
6630 pre_ldst_mems = NULL;
6631}
6632
6633/* Dump debugging info about the ldst list. */
6634
6635static void
1d088dee 6636print_ldst_list (FILE * file)
a13d4ebf
AM
6637{
6638 struct ls_expr * ptr;
6639
6640 fprintf (file, "LDST list: \n");
6641
6642 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6643 {
6644 fprintf (file, " Pattern (%3d): ", ptr->index);
6645
6646 print_rtl (file, ptr->pattern);
6647
6648 fprintf (file, "\n Loads : ");
6649
6650 if (ptr->loads)
6651 print_rtl (file, ptr->loads);
6652 else
6653 fprintf (file, "(nil)");
6654
6655 fprintf (file, "\n Stores : ");
6656
6657 if (ptr->stores)
6658 print_rtl (file, ptr->stores);
6659 else
6660 fprintf (file, "(nil)");
6661
6662 fprintf (file, "\n\n");
6663 }
6664
6665 fprintf (file, "\n");
6666}
6667
6668/* Returns 1 if X is in the list of ldst only expressions. */
6669
6670static struct ls_expr *
1d088dee 6671find_rtx_in_ldst (rtx x)
a13d4ebf
AM
6672{
6673 struct ls_expr * ptr;
589005ff 6674
a13d4ebf
AM
6675 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6676 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6677 return ptr;
6678
6679 return NULL;
6680}
6681
6682/* Assign each element of the list of mems a monotonically increasing value. */
6683
6684static int
1d088dee 6685enumerate_ldsts (void)
a13d4ebf
AM
6686{
6687 struct ls_expr * ptr;
6688 int n = 0;
6689
6690 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6691 ptr->index = n++;
6692
6693 return n;
6694}
6695
6696/* Return first item in the list. */
6697
6698static inline struct ls_expr *
1d088dee 6699first_ls_expr (void)
a13d4ebf
AM
6700{
6701 return pre_ldst_mems;
6702}
6703
0e8a66de 6704/* Return the next item in the list after the specified one. */
a13d4ebf
AM
6705
6706static inline struct ls_expr *
1d088dee 6707next_ls_expr (struct ls_expr * ptr)
a13d4ebf
AM
6708{
6709 return ptr->next;
6710}
6711\f
6712/* Load Motion for loads which only kill themselves. */
6713
6714/* Return true if x is a simple MEM operation, with no registers or
6715 side effects. These are the types of loads we consider for the
6716 ld_motion list, otherwise we let the usual aliasing take care of it. */
6717
589005ff 6718static int
1d088dee 6719simple_mem (rtx x)
a13d4ebf
AM
6720{
6721 if (GET_CODE (x) != MEM)
6722 return 0;
589005ff 6723
a13d4ebf
AM
6724 if (MEM_VOLATILE_P (x))
6725 return 0;
589005ff 6726
a13d4ebf
AM
6727 if (GET_MODE (x) == BLKmode)
6728 return 0;
aaa4ca30 6729
47a3dae1
ZD
6730 /* If we are handling exceptions, we must be careful with memory references
6731 that may trap. If we are not, the behavior is undefined, so we may just
6732 continue. */
6733 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
6734 return 0;
6735
47a3dae1
ZD
6736 if (side_effects_p (x))
6737 return 0;
589005ff 6738
47a3dae1
ZD
6739 /* Do not consider function arguments passed on stack. */
6740 if (reg_mentioned_p (stack_pointer_rtx, x))
6741 return 0;
6742
6743 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6744 return 0;
6745
6746 return 1;
a13d4ebf
AM
6747}
6748
589005ff
KH
6749/* Make sure there isn't a buried reference in this pattern anywhere.
6750 If there is, invalidate the entry for it since we're not capable
6751 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6752 loads since the aliasing code will allow all entries in the
6753 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6754 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6755 fix it up. */
6756
6757static void
1d088dee 6758invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
6759{
6760 const char * fmt;
8e42ace1 6761 int i, j;
a13d4ebf
AM
6762 struct ls_expr * ptr;
6763
6764 /* Invalidate it in the list. */
6765 if (GET_CODE (x) == MEM && simple_mem (x))
6766 {
6767 ptr = ldst_entry (x);
6768 ptr->invalid = 1;
6769 }
6770
6771 /* Recursively process the insn. */
6772 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6773
a13d4ebf
AM
6774 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6775 {
6776 if (fmt[i] == 'e')
6777 invalidate_any_buried_refs (XEXP (x, i));
6778 else if (fmt[i] == 'E')
6779 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6780 invalidate_any_buried_refs (XVECEXP (x, i, j));
6781 }
6782}
6783
4d3eb89a
HPN
6784/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6785 being defined as MEM loads and stores to symbols, with no side effects
6786 and no registers in the expression. For a MEM destination, we also
6787 check that the insn is still valid if we replace the destination with a
6788 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6789 which don't match this criteria, they are invalidated and trimmed out
6790 later. */
a13d4ebf 6791
589005ff 6792static void
1d088dee 6793compute_ld_motion_mems (void)
a13d4ebf
AM
6794{
6795 struct ls_expr * ptr;
e0082a72 6796 basic_block bb;
a13d4ebf 6797 rtx insn;
589005ff 6798
a13d4ebf
AM
6799 pre_ldst_mems = NULL;
6800
e0082a72 6801 FOR_EACH_BB (bb)
a13d4ebf 6802 {
a813c111
SB
6803 for (insn = BB_HEAD (bb);
6804 insn && insn != NEXT_INSN (BB_END (bb));
a13d4ebf
AM
6805 insn = NEXT_INSN (insn))
6806 {
735e8085 6807 if (INSN_P (insn))
a13d4ebf
AM
6808 {
6809 if (GET_CODE (PATTERN (insn)) == SET)
6810 {
6811 rtx src = SET_SRC (PATTERN (insn));
6812 rtx dest = SET_DEST (PATTERN (insn));
6813
6814 /* Check for a simple LOAD... */
6815 if (GET_CODE (src) == MEM && simple_mem (src))
6816 {
6817 ptr = ldst_entry (src);
6818 if (GET_CODE (dest) == REG)
6819 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6820 else
6821 ptr->invalid = 1;
6822 }
6823 else
6824 {
6825 /* Make sure there isn't a buried load somewhere. */
6826 invalidate_any_buried_refs (src);
6827 }
589005ff 6828
a13d4ebf
AM
6829 /* Check for stores. Don't worry about aliased ones, they
6830 will block any movement we might do later. We only care
6831 about this exact pattern since those are the only
6832 circumstance that we will ignore the aliasing info. */
6833 if (GET_CODE (dest) == MEM && simple_mem (dest))
6834 {
6835 ptr = ldst_entry (dest);
589005ff 6836
f54104df 6837 if (GET_CODE (src) != MEM
4d3eb89a
HPN
6838 && GET_CODE (src) != ASM_OPERANDS
6839 /* Check for REG manually since want_to_gcse_p
6840 returns 0 for all REGs. */
6841 && (REG_P (src) || want_to_gcse_p (src)))
a13d4ebf
AM
6842 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6843 else
6844 ptr->invalid = 1;
6845 }
6846 }
6847 else
6848 invalidate_any_buried_refs (PATTERN (insn));
6849 }
6850 }
6851 }
6852}
6853
589005ff 6854/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6855 expression list for pre gcse. */
6856
6857static void
1d088dee 6858trim_ld_motion_mems (void)
a13d4ebf 6859{
b58b21d5
RS
6860 struct ls_expr * * last = & pre_ldst_mems;
6861 struct ls_expr * ptr = pre_ldst_mems;
a13d4ebf
AM
6862
6863 while (ptr != NULL)
6864 {
b58b21d5 6865 struct expr * expr;
589005ff 6866
a13d4ebf 6867 /* Delete if entry has been made invalid. */
b58b21d5 6868 if (! ptr->invalid)
a13d4ebf 6869 {
a13d4ebf 6870 /* Delete if we cannot find this mem in the expression list. */
b58b21d5 6871 unsigned int hash = ptr->hash_index % expr_hash_table.size;
589005ff 6872
b58b21d5
RS
6873 for (expr = expr_hash_table.table[hash];
6874 expr != NULL;
6875 expr = expr->next_same_hash)
6876 if (expr_equiv_p (expr->expr, ptr->pattern))
6877 break;
a13d4ebf
AM
6878 }
6879 else
b58b21d5
RS
6880 expr = (struct expr *) 0;
6881
6882 if (expr)
a13d4ebf
AM
6883 {
6884 /* Set the expression field if we are keeping it. */
a13d4ebf 6885 ptr->expr = expr;
b58b21d5 6886 last = & ptr->next;
a13d4ebf
AM
6887 ptr = ptr->next;
6888 }
b58b21d5
RS
6889 else
6890 {
6891 *last = ptr->next;
6892 free_ldst_entry (ptr);
6893 ptr = * last;
6894 }
a13d4ebf
AM
6895 }
6896
6897 /* Show the world what we've found. */
6898 if (gcse_file && pre_ldst_mems != NULL)
6899 print_ldst_list (gcse_file);
6900}
6901
6902/* This routine will take an expression which we are replacing with
6903 a reaching register, and update any stores that are needed if
6904 that expression is in the ld_motion list. Stores are updated by
a98ebe2e 6905 copying their SRC to the reaching register, and then storing
a13d4ebf
AM
6906 the reaching register into the store location. These keeps the
6907 correct value in the reaching register for the loads. */
6908
6909static void
1d088dee 6910update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
6911{
6912 struct ls_expr * mem_ptr;
6913
6914 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6915 {
589005ff
KH
6916 /* We can try to find just the REACHED stores, but is shouldn't
6917 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6918 dead and should be eliminated later. */
6919
4d3eb89a
HPN
6920 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6921 where reg is the reaching reg used in the load. We checked in
6922 compute_ld_motion_mems that we can replace (set mem expr) with
6923 (set reg expr) in that insn. */
a13d4ebf 6924 rtx list = mem_ptr->stores;
589005ff 6925
a13d4ebf
AM
6926 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6927 {
6928 rtx insn = XEXP (list, 0);
6929 rtx pat = PATTERN (insn);
6930 rtx src = SET_SRC (pat);
6931 rtx reg = expr->reaching_reg;
c57718d3 6932 rtx copy, new;
a13d4ebf
AM
6933
6934 /* If we've already copied it, continue. */
6935 if (expr->reaching_reg == src)
6936 continue;
589005ff 6937
a13d4ebf
AM
6938 if (gcse_file)
6939 {
6940 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6941 print_rtl (gcse_file, expr->reaching_reg);
6942 fprintf (gcse_file, ":\n ");
6943 print_inline_rtx (gcse_file, insn, 8);
6944 fprintf (gcse_file, "\n");
6945 }
589005ff 6946
47a3dae1 6947 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
6948 new = emit_insn_before (copy, insn);
6949 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6950 SET_SRC (pat) = reg;
6951
6952 /* un-recognize this pattern since it's probably different now. */
6953 INSN_CODE (insn) = -1;
6954 gcse_create_count++;
6955 }
6956 }
6957}
6958\f
6959/* Store motion code. */
6960
47a3dae1
ZD
6961#define ANTIC_STORE_LIST(x) ((x)->loads)
6962#define AVAIL_STORE_LIST(x) ((x)->stores)
6963#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6964
589005ff 6965/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 6966 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
6967static int * regvec;
6968
6969/* And current insn, for the same routine. */
6970static rtx compute_store_table_current_insn;
aaa4ca30 6971
a13d4ebf
AM
6972/* Used in computing the reverse edge graph bit vectors. */
6973static sbitmap * st_antloc;
6974
6975/* Global holding the number of store expressions we are dealing with. */
6976static int num_stores;
6977
01c43039
RE
6978/* Checks to set if we need to mark a register set. Called from
6979 note_stores. */
a13d4ebf 6980
aaa4ca30 6981static void
1d088dee 6982reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
01c43039 6983 void *data)
a13d4ebf 6984{
01c43039
RE
6985 sbitmap bb_reg = data;
6986
aaa4ca30
AJ
6987 if (GET_CODE (dest) == SUBREG)
6988 dest = SUBREG_REG (dest);
adfcce61 6989
aaa4ca30 6990 if (GET_CODE (dest) == REG)
01c43039
RE
6991 {
6992 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6993 if (bb_reg)
6994 SET_BIT (bb_reg, REGNO (dest));
6995 }
6996}
6997
6998/* Clear any mark that says that this insn sets dest. Called from
6999 note_stores. */
7000
7001static void
7002reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
7003 void *data)
7004{
7005 int *dead_vec = data;
7006
7007 if (GET_CODE (dest) == SUBREG)
7008 dest = SUBREG_REG (dest);
7009
7010 if (GET_CODE (dest) == REG &&
7011 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
7012 dead_vec[REGNO (dest)] = 0;
a13d4ebf
AM
7013}
7014
47a3dae1
ZD
7015/* Return zero if some of the registers in list X are killed
7016 due to set of registers in bitmap REGS_SET. */
1d088dee 7017
47a3dae1 7018static bool
1d088dee 7019store_ops_ok (rtx x, int *regs_set)
47a3dae1
ZD
7020{
7021 rtx reg;
7022
7023 for (; x; x = XEXP (x, 1))
7024 {
7025 reg = XEXP (x, 0);
7026 if (regs_set[REGNO(reg)])
1d088dee 7027 return false;
47a3dae1 7028 }
a13d4ebf 7029
47a3dae1
ZD
7030 return true;
7031}
7032
7033/* Returns a list of registers mentioned in X. */
7034static rtx
1d088dee 7035extract_mentioned_regs (rtx x)
47a3dae1
ZD
7036{
7037 return extract_mentioned_regs_helper (x, NULL_RTX);
7038}
7039
7040/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7041 registers. */
7042static rtx
1d088dee 7043extract_mentioned_regs_helper (rtx x, rtx accum)
a13d4ebf
AM
7044{
7045 int i;
7046 enum rtx_code code;
7047 const char * fmt;
7048
7049 /* Repeat is used to turn tail-recursion into iteration. */
7050 repeat:
7051
7052 if (x == 0)
47a3dae1 7053 return accum;
a13d4ebf
AM
7054
7055 code = GET_CODE (x);
7056 switch (code)
7057 {
7058 case REG:
47a3dae1 7059 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
7060
7061 case MEM:
7062 x = XEXP (x, 0);
7063 goto repeat;
7064
7065 case PRE_DEC:
7066 case PRE_INC:
7067 case POST_DEC:
7068 case POST_INC:
47a3dae1
ZD
7069 /* We do not run this function with arguments having side effects. */
7070 abort ();
a13d4ebf
AM
7071
7072 case PC:
7073 case CC0: /*FIXME*/
7074 case CONST:
7075 case CONST_INT:
7076 case CONST_DOUBLE:
69ef87e2 7077 case CONST_VECTOR:
a13d4ebf
AM
7078 case SYMBOL_REF:
7079 case LABEL_REF:
7080 case ADDR_VEC:
7081 case ADDR_DIFF_VEC:
47a3dae1 7082 return accum;
a13d4ebf
AM
7083
7084 default:
7085 break;
7086 }
7087
7088 i = GET_RTX_LENGTH (code) - 1;
7089 fmt = GET_RTX_FORMAT (code);
589005ff 7090
a13d4ebf
AM
7091 for (; i >= 0; i--)
7092 {
7093 if (fmt[i] == 'e')
7094 {
7095 rtx tem = XEXP (x, i);
7096
7097 /* If we are about to do the last recursive call
47a3dae1 7098 needed at this level, change it into iteration. */
a13d4ebf
AM
7099 if (i == 0)
7100 {
7101 x = tem;
7102 goto repeat;
7103 }
589005ff 7104
47a3dae1 7105 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
7106 }
7107 else if (fmt[i] == 'E')
7108 {
7109 int j;
589005ff 7110
a13d4ebf 7111 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 7112 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
7113 }
7114 }
7115
47a3dae1 7116 return accum;
a13d4ebf
AM
7117}
7118
47a3dae1
ZD
7119/* Determine whether INSN is MEM store pattern that we will consider moving.
7120 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7121 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7122 including) the insn in this basic block. We must be passing through BB from
7123 head to end, as we are using this fact to speed things up.
1d088dee 7124
47a3dae1
ZD
7125 The results are stored this way:
7126
7127 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7128 -- if the processed expression is not anticipatable, NULL_RTX is added
7129 there instead, so that we can use it as indicator that no further
7130 expression of this type may be anticipatable
7131 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7132 consequently, all of them but this head are dead and may be deleted.
7133 -- if the expression is not available, the insn due to that it fails to be
7134 available is stored in reaching_reg.
7135
7136 The things are complicated a bit by fact that there already may be stores
7137 to the same MEM from other blocks; also caller must take care of the
e0bb17a8 7138 necessary cleanup of the temporary markers after end of the basic block.
47a3dae1 7139 */
a13d4ebf
AM
7140
7141static void
1d088dee 7142find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
a13d4ebf
AM
7143{
7144 struct ls_expr * ptr;
47a3dae1
ZD
7145 rtx dest, set, tmp;
7146 int check_anticipatable, check_available;
7147 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 7148
47a3dae1
ZD
7149 set = single_set (insn);
7150 if (!set)
a13d4ebf
AM
7151 return;
7152
47a3dae1 7153 dest = SET_DEST (set);
589005ff 7154
a13d4ebf
AM
7155 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7156 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
7157 return;
7158
47a3dae1
ZD
7159 if (side_effects_p (dest))
7160 return;
aaa4ca30 7161
47a3dae1
ZD
7162 /* If we are handling exceptions, we must be careful with memory references
7163 that may trap. If we are not, the behavior is undefined, so we may just
7164 continue. */
94f24ddc 7165 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1 7166 return;
1d088dee 7167
a13d4ebf 7168 ptr = ldst_entry (dest);
47a3dae1
ZD
7169 if (!ptr->pattern_regs)
7170 ptr->pattern_regs = extract_mentioned_regs (dest);
7171
7172 /* Do not check for anticipatability if we either found one anticipatable
7173 store already, or tested for one and found out that it was killed. */
7174 check_anticipatable = 0;
7175 if (!ANTIC_STORE_LIST (ptr))
7176 check_anticipatable = 1;
7177 else
7178 {
7179 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7180 if (tmp != NULL_RTX
7181 && BLOCK_FOR_INSN (tmp) != bb)
7182 check_anticipatable = 1;
7183 }
7184 if (check_anticipatable)
7185 {
7186 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7187 tmp = NULL_RTX;
7188 else
7189 tmp = insn;
7190 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7191 ANTIC_STORE_LIST (ptr));
7192 }
a13d4ebf 7193
e0bb17a8 7194 /* It is not necessary to check whether store is available if we did
47a3dae1
ZD
7195 it successfully before; if we failed before, do not bother to check
7196 until we reach the insn that caused us to fail. */
7197 check_available = 0;
7198 if (!AVAIL_STORE_LIST (ptr))
7199 check_available = 1;
7200 else
7201 {
7202 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7203 if (BLOCK_FOR_INSN (tmp) != bb)
7204 check_available = 1;
7205 }
7206 if (check_available)
7207 {
7208 /* Check that we have already reached the insn at that the check
7209 failed last time. */
7210 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7211 {
a813c111 7212 for (tmp = BB_END (bb);
47a3dae1
ZD
7213 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7214 tmp = PREV_INSN (tmp))
7215 continue;
7216 if (tmp == insn)
7217 check_available = 0;
7218 }
7219 else
7220 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7221 bb, regs_set_after,
7222 &LAST_AVAIL_CHECK_FAILURE (ptr));
7223 }
7224 if (!check_available)
7225 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7226}
1d088dee 7227
47a3dae1 7228/* Find available and anticipatable stores. */
a13d4ebf
AM
7229
7230static int
1d088dee 7231compute_store_table (void)
a13d4ebf 7232{
e0082a72
ZD
7233 int ret;
7234 basic_block bb;
aaa4ca30 7235 unsigned regno;
47a3dae1
ZD
7236 rtx insn, pat, tmp;
7237 int *last_set_in, *already_set;
7238 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 7239
a13d4ebf
AM
7240 max_gcse_regno = max_reg_num ();
7241
703ad42b 7242 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
aaa4ca30 7243 max_gcse_regno);
d55bc081 7244 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 7245 pre_ldst_mems = 0;
01c43039 7246 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
47a3dae1 7247 already_set = xmalloc (sizeof (int) * max_gcse_regno);
aaa4ca30 7248
a13d4ebf 7249 /* Find all the stores we care about. */
e0082a72 7250 FOR_EACH_BB (bb)
a13d4ebf 7251 {
47a3dae1 7252 /* First compute the registers set in this block. */
47a3dae1
ZD
7253 regvec = last_set_in;
7254
a813c111
SB
7255 for (insn = BB_HEAD (bb);
7256 insn != NEXT_INSN (BB_END (bb));
47a3dae1
ZD
7257 insn = NEXT_INSN (insn))
7258 {
7259 if (! INSN_P (insn))
7260 continue;
7261
7262 if (GET_CODE (insn) == CALL_INSN)
7263 {
7264 bool clobbers_all = false;
7265#ifdef NON_SAVING_SETJMP
7266 if (NON_SAVING_SETJMP
7267 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7268 clobbers_all = true;
7269#endif
7270
7271 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7272 if (clobbers_all
7273 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
01c43039
RE
7274 {
7275 last_set_in[regno] = INSN_UID (insn);
7276 SET_BIT (reg_set_in_block[bb->index], regno);
7277 }
47a3dae1
ZD
7278 }
7279
7280 pat = PATTERN (insn);
7281 compute_store_table_current_insn = insn;
01c43039 7282 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
47a3dae1
ZD
7283 }
7284
47a3dae1
ZD
7285 /* Now find the stores. */
7286 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7287 regvec = already_set;
a813c111
SB
7288 for (insn = BB_HEAD (bb);
7289 insn != NEXT_INSN (BB_END (bb));
47a3dae1 7290 insn = NEXT_INSN (insn))
a13d4ebf 7291 {
19652adf 7292 if (! INSN_P (insn))
a13d4ebf
AM
7293 continue;
7294
aaa4ca30
AJ
7295 if (GET_CODE (insn) == CALL_INSN)
7296 {
19652adf 7297 bool clobbers_all = false;
589005ff 7298#ifdef NON_SAVING_SETJMP
19652adf
ZW
7299 if (NON_SAVING_SETJMP
7300 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7301 clobbers_all = true;
7302#endif
7303
aaa4ca30 7304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
7305 if (clobbers_all
7306 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 7307 already_set[regno] = 1;
aaa4ca30 7308 }
589005ff 7309
a13d4ebf 7310 pat = PATTERN (insn);
aaa4ca30 7311 note_stores (pat, reg_set_info, NULL);
589005ff 7312
a13d4ebf 7313 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
7314 find_moveable_store (insn, already_set, last_set_in);
7315
7316 /* Unmark regs that are no longer set. */
01c43039
RE
7317 compute_store_table_current_insn = insn;
7318 note_stores (pat, reg_clear_last_set, last_set_in);
7319 if (GET_CODE (insn) == CALL_INSN)
7320 {
7321 bool clobbers_all = false;
7322#ifdef NON_SAVING_SETJMP
7323 if (NON_SAVING_SETJMP
7324 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7325 clobbers_all = true;
7326#endif
7327
7328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7329 if ((clobbers_all
7330 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7331 && last_set_in[regno] == INSN_UID (insn))
7332 last_set_in[regno] = 0;
7333 }
47a3dae1
ZD
7334 }
7335
01c43039
RE
7336#ifdef ENABLE_CHECKING
7337 /* last_set_in should now be all-zero. */
7338 for (regno = 0; regno < max_gcse_regno; regno++)
7339 if (last_set_in[regno] != 0)
7340 abort ();
7341#endif
7342
47a3dae1
ZD
7343 /* Clear temporary marks. */
7344 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7345 {
7346 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7347 if (ANTIC_STORE_LIST (ptr)
7348 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7349 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7350 }
7351 }
7352
7353 /* Remove the stores that are not available anywhere, as there will
7354 be no opportunity to optimize them. */
7355 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7356 ptr != NULL;
7357 ptr = *prev_next_ptr_ptr)
7358 {
7359 if (!AVAIL_STORE_LIST (ptr))
7360 {
7361 *prev_next_ptr_ptr = ptr->next;
7362 free_ldst_entry (ptr);
a13d4ebf 7363 }
47a3dae1
ZD
7364 else
7365 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
7366 }
7367
7368 ret = enumerate_ldsts ();
589005ff 7369
a13d4ebf
AM
7370 if (gcse_file)
7371 {
47a3dae1 7372 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
a13d4ebf
AM
7373 print_ldst_list (gcse_file);
7374 }
589005ff 7375
47a3dae1
ZD
7376 free (last_set_in);
7377 free (already_set);
a13d4ebf
AM
7378 return ret;
7379}
7380
3b14e3af
ZD
7381/* Check to see if the load X is aliased with STORE_PATTERN.
7382 AFTER is true if we are checking the case when STORE_PATTERN occurs
7383 after the X. */
a13d4ebf 7384
47a3dae1 7385static bool
3b14e3af 7386load_kills_store (rtx x, rtx store_pattern, int after)
a13d4ebf 7387{
3b14e3af
ZD
7388 if (after)
7389 return anti_dependence (x, store_pattern);
7390 else
7391 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7392 rtx_addr_varies_p);
a13d4ebf
AM
7393}
7394
589005ff 7395/* Go through the entire insn X, looking for any loads which might alias
3b14e3af
ZD
7396 STORE_PATTERN. Return true if found.
7397 AFTER is true if we are checking the case when STORE_PATTERN occurs
7398 after the insn X. */
a13d4ebf 7399
47a3dae1 7400static bool
3b14e3af 7401find_loads (rtx x, rtx store_pattern, int after)
a13d4ebf
AM
7402{
7403 const char * fmt;
8e42ace1 7404 int i, j;
47a3dae1 7405 int ret = false;
a13d4ebf 7406
24a28584 7407 if (!x)
47a3dae1 7408 return false;
24a28584 7409
589005ff 7410 if (GET_CODE (x) == SET)
a13d4ebf
AM
7411 x = SET_SRC (x);
7412
7413 if (GET_CODE (x) == MEM)
7414 {
3b14e3af 7415 if (load_kills_store (x, store_pattern, after))
47a3dae1 7416 return true;
a13d4ebf
AM
7417 }
7418
7419 /* Recursively process the insn. */
7420 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 7421
a13d4ebf
AM
7422 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7423 {
7424 if (fmt[i] == 'e')
3b14e3af 7425 ret |= find_loads (XEXP (x, i), store_pattern, after);
a13d4ebf
AM
7426 else if (fmt[i] == 'E')
7427 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3b14e3af 7428 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
a13d4ebf
AM
7429 }
7430 return ret;
7431}
7432
589005ff 7433/* Check if INSN kills the store pattern X (is aliased with it).
3b14e3af
ZD
7434 AFTER is true if we are checking the case when store X occurs
7435 after the insn. Return true if it it does. */
a13d4ebf 7436
47a3dae1 7437static bool
3b14e3af 7438store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
a13d4ebf 7439{
d088acea 7440 rtx reg, base, note;
94f24ddc 7441
735e8085 7442 if (!INSN_P (insn))
47a3dae1 7443 return false;
589005ff 7444
a13d4ebf
AM
7445 if (GET_CODE (insn) == CALL_INSN)
7446 {
1218665b
JJ
7447 /* A normal or pure call might read from pattern,
7448 but a const call will not. */
47a3dae1
ZD
7449 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7450 return true;
7451
94f24ddc
ZD
7452 /* But even a const call reads its parameters. Check whether the
7453 base of some of registers used in mem is stack pointer. */
7454 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7455 {
bc083e18 7456 base = find_base_term (XEXP (reg, 0));
94f24ddc
ZD
7457 if (!base
7458 || (GET_CODE (base) == ADDRESS
7459 && GET_MODE (base) == Pmode
7460 && XEXP (base, 0) == stack_pointer_rtx))
7461 return true;
7462 }
47a3dae1
ZD
7463
7464 return false;
a13d4ebf 7465 }
589005ff 7466
a13d4ebf
AM
7467 if (GET_CODE (PATTERN (insn)) == SET)
7468 {
7469 rtx pat = PATTERN (insn);
3b14e3af
ZD
7470 rtx dest = SET_DEST (pat);
7471
7472 if (GET_CODE (dest) == SIGN_EXTRACT
7473 || GET_CODE (dest) == ZERO_EXTRACT)
7474 dest = XEXP (dest, 0);
7475
a13d4ebf 7476 /* Check for memory stores to aliased objects. */
3b14e3af
ZD
7477 if (GET_CODE (dest) == MEM
7478 && !expr_equiv_p (dest, x))
7479 {
7480 if (after)
7481 {
7482 if (output_dependence (dest, x))
7483 return true;
7484 }
7485 else
7486 {
7487 if (output_dependence (x, dest))
7488 return true;
7489 }
7490 }
d088acea
ZD
7491 if (find_loads (SET_SRC (pat), x, after))
7492 return true;
a13d4ebf 7493 }
d088acea
ZD
7494 else if (find_loads (PATTERN (insn), x, after))
7495 return true;
7496
7497 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7498 location aliased with X, then this insn kills X. */
7499 note = find_reg_equal_equiv_note (insn);
7500 if (! note)
7501 return false;
7502 note = XEXP (note, 0);
7503
7504 /* However, if the note represents a must alias rather than a may
7505 alias relationship, then it does not kill X. */
7506 if (expr_equiv_p (note, x))
7507 return false;
7508
7509 /* See if there are any aliased loads in the note. */
7510 return find_loads (note, x, after);
a13d4ebf
AM
7511}
7512
47a3dae1
ZD
7513/* Returns true if the expression X is loaded or clobbered on or after INSN
7514 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7515 or after the insn. X_REGS is list of registers mentioned in X. If the store
7516 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 7517
47a3dae1 7518static bool
1d088dee
AJ
7519store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7520 int *regs_set_after, rtx *fail_insn)
a13d4ebf 7521{
a813c111 7522 rtx last = BB_END (bb), act;
aaa4ca30 7523
47a3dae1 7524 if (!store_ops_ok (x_regs, regs_set_after))
1d088dee 7525 {
47a3dae1
ZD
7526 /* We do not know where it will happen. */
7527 if (fail_insn)
7528 *fail_insn = NULL_RTX;
7529 return true;
7530 }
a13d4ebf 7531
47a3dae1
ZD
7532 /* Scan from the end, so that fail_insn is determined correctly. */
7533 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
3b14e3af 7534 if (store_killed_in_insn (x, x_regs, act, false))
47a3dae1
ZD
7535 {
7536 if (fail_insn)
7537 *fail_insn = act;
7538 return true;
7539 }
589005ff 7540
47a3dae1 7541 return false;
a13d4ebf 7542}
1d088dee 7543
47a3dae1
ZD
7544/* Returns true if the expression X is loaded or clobbered on or before INSN
7545 within basic block BB. X_REGS is list of registers mentioned in X.
7546 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7547static bool
1d088dee
AJ
7548store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7549 int *regs_set_before)
a13d4ebf 7550{
a813c111 7551 rtx first = BB_HEAD (bb);
a13d4ebf 7552
47a3dae1
ZD
7553 if (!store_ops_ok (x_regs, regs_set_before))
7554 return true;
a13d4ebf 7555
47a3dae1 7556 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
3b14e3af 7557 if (store_killed_in_insn (x, x_regs, insn, true))
47a3dae1 7558 return true;
589005ff 7559
47a3dae1 7560 return false;
a13d4ebf 7561}
1d088dee 7562
47a3dae1
ZD
7563/* Fill in available, anticipatable, transparent and kill vectors in
7564 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 7565static void
1d088dee 7566build_store_vectors (void)
a13d4ebf 7567{
47a3dae1
ZD
7568 basic_block bb;
7569 int *regs_set_in_block;
a13d4ebf
AM
7570 rtx insn, st;
7571 struct ls_expr * ptr;
47a3dae1 7572 unsigned regno;
a13d4ebf
AM
7573
7574 /* Build the gen_vector. This is any store in the table which is not killed
7575 by aliasing later in its block. */
703ad42b 7576 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7577 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 7578
703ad42b 7579 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7580 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 7581
a13d4ebf 7582 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 7583 {
47a3dae1 7584 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
7585 {
7586 insn = XEXP (st, 0);
e2d2ed72 7587 bb = BLOCK_FOR_INSN (insn);
589005ff 7588
47a3dae1
ZD
7589 /* If we've already seen an available expression in this block,
7590 we can delete this one (It occurs earlier in the block). We'll
7591 copy the SRC expression to an unused register in case there
7592 are any side effects. */
7593 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 7594 {
47a3dae1
ZD
7595 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7596 if (gcse_file)
7597 fprintf (gcse_file, "Removing redundant store:\n");
d088acea 7598 replace_store_insn (r, XEXP (st, 0), bb, ptr);
47a3dae1 7599 continue;
a13d4ebf 7600 }
47a3dae1 7601 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 7602 }
589005ff 7603
47a3dae1
ZD
7604 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7605 {
7606 insn = XEXP (st, 0);
7607 bb = BLOCK_FOR_INSN (insn);
7608 SET_BIT (st_antloc[bb->index], ptr->index);
7609 }
a13d4ebf 7610 }
589005ff 7611
703ad42b 7612 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7613 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7614
703ad42b 7615 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7616 sbitmap_vector_zero (transp, last_basic_block);
47a3dae1 7617 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
a13d4ebf 7618
47a3dae1
ZD
7619 FOR_EACH_BB (bb)
7620 {
7621 for (regno = 0; regno < max_gcse_regno; regno++)
7622 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7623
7624 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7625 {
a813c111 7626 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
47a3dae1
ZD
7627 bb, regs_set_in_block, NULL))
7628 {
e0bb17a8 7629 /* It should not be necessary to consider the expression
47a3dae1
ZD
7630 killed if it is both anticipatable and available. */
7631 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7632 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7633 SET_BIT (ae_kill[bb->index], ptr->index);
1d088dee
AJ
7634 }
7635 else
7636 SET_BIT (transp[bb->index], ptr->index);
7637 }
47a3dae1
ZD
7638 }
7639
7640 free (regs_set_in_block);
aaa4ca30 7641
589005ff 7642 if (gcse_file)
aaa4ca30 7643 {
d55bc081
ZD
7644 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7645 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7646 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7647 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7648 }
7649}
7650
fbe5a4a6 7651/* Insert an instruction at the beginning of a basic block, and update
a813c111 7652 the BB_HEAD if needed. */
a13d4ebf 7653
589005ff 7654static void
1d088dee 7655insert_insn_start_bb (rtx insn, basic_block bb)
a13d4ebf
AM
7656{
7657 /* Insert at start of successor block. */
a813c111
SB
7658 rtx prev = PREV_INSN (BB_HEAD (bb));
7659 rtx before = BB_HEAD (bb);
a13d4ebf
AM
7660 while (before != 0)
7661 {
7662 if (GET_CODE (before) != CODE_LABEL
7663 && (GET_CODE (before) != NOTE
7664 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7665 break;
7666 prev = before;
a813c111 7667 if (prev == BB_END (bb))
a13d4ebf
AM
7668 break;
7669 before = NEXT_INSN (before);
7670 }
7671
7672 insn = emit_insn_after (insn, prev);
7673
a13d4ebf
AM
7674 if (gcse_file)
7675 {
7676 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7677 bb->index);
a13d4ebf
AM
7678 print_inline_rtx (gcse_file, insn, 6);
7679 fprintf (gcse_file, "\n");
7680 }
7681}
7682
7683/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 7684 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
7685 if an edge insertion was performed. */
7686
7687static int
1d088dee 7688insert_store (struct ls_expr * expr, edge e)
a13d4ebf
AM
7689{
7690 rtx reg, insn;
e2d2ed72 7691 basic_block bb;
a13d4ebf
AM
7692 edge tmp;
7693
7694 /* We did all the deleted before this insert, so if we didn't delete a
7695 store, then we haven't set the reaching reg yet either. */
7696 if (expr->reaching_reg == NULL_RTX)
7697 return 0;
7698
a0c8285b
JH
7699 if (e->flags & EDGE_FAKE)
7700 return 0;
7701
a13d4ebf 7702 reg = expr->reaching_reg;
47a3dae1 7703 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 7704
a13d4ebf
AM
7705 /* If we are inserting this expression on ALL predecessor edges of a BB,
7706 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7707 edges so we don't try to insert it on the other edges. */
e2d2ed72 7708 bb = e->dest;
a13d4ebf 7709 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
3f2eae23 7710 if (!(tmp->flags & EDGE_FAKE))
a0c8285b
JH
7711 {
7712 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7713 if (index == EDGE_INDEX_NO_EDGE)
7714 abort ();
7715 if (! TEST_BIT (pre_insert_map[index], expr->index))
7716 break;
7717 }
a13d4ebf
AM
7718
7719 /* If tmp is NULL, we found an insertion on every edge, blank the
7720 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7721 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7722 {
7723 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7724 {
7725 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7726 RESET_BIT (pre_insert_map[index], expr->index);
7727 }
7728 insert_insn_start_bb (insn, bb);
7729 return 0;
7730 }
589005ff 7731
a13d4ebf
AM
7732 /* We can't insert on this edge, so we'll insert at the head of the
7733 successors block. See Morgan, sec 10.5. */
7734 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7735 {
7736 insert_insn_start_bb (insn, bb);
7737 return 0;
7738 }
7739
7740 insert_insn_on_edge (insn, e);
589005ff 7741
a13d4ebf
AM
7742 if (gcse_file)
7743 {
7744 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7745 e->src->index, e->dest->index);
a13d4ebf
AM
7746 print_inline_rtx (gcse_file, insn, 6);
7747 fprintf (gcse_file, "\n");
7748 }
589005ff 7749
a13d4ebf
AM
7750 return 1;
7751}
7752
d088acea
ZD
7753/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7754 memory location in SMEXPR set in basic block BB.
7755
7756 This could be rather expensive. */
7757
7758static void
7759remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7760{
7761 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7762 sbitmap visited = sbitmap_alloc (last_basic_block);
7763 int stack_top = 0;
7764 rtx last, insn, note;
7765 rtx mem = smexpr->pattern;
7766
7767 sbitmap_zero (visited);
7768 act = bb->succ;
7769
7770 while (1)
7771 {
7772 if (!act)
7773 {
7774 if (!stack_top)
7775 {
7776 free (stack);
7777 sbitmap_free (visited);
7778 return;
7779 }
7780 act = stack[--stack_top];
7781 }
7782 bb = act->dest;
7783
7784 if (bb == EXIT_BLOCK_PTR
7785 || TEST_BIT (visited, bb->index)
7786 || TEST_BIT (ae_kill[bb->index], smexpr->index))
7787 {
7788 act = act->succ_next;
7789 continue;
7790 }
7791 SET_BIT (visited, bb->index);
7792
7793 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7794 {
7795 for (last = ANTIC_STORE_LIST (smexpr);
7796 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7797 last = XEXP (last, 1))
7798 continue;
7799 last = XEXP (last, 0);
7800 }
7801 else
a813c111 7802 last = NEXT_INSN (BB_END (bb));
d088acea 7803
a813c111 7804 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
d088acea
ZD
7805 if (INSN_P (insn))
7806 {
7807 note = find_reg_equal_equiv_note (insn);
7808 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7809 continue;
7810
7811 if (gcse_file)
7812 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7813 INSN_UID (insn));
7814 remove_note (insn, note);
7815 }
7816 act = act->succ_next;
7817 if (bb->succ)
7818 {
7819 if (act)
7820 stack[stack_top++] = act;
7821 act = bb->succ;
7822 }
7823 }
7824}
7825
a13d4ebf
AM
7826/* This routine will replace a store with a SET to a specified register. */
7827
7828static void
d088acea 7829replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
a13d4ebf 7830{
d088acea 7831 rtx insn, mem, note, set, ptr;
589005ff 7832
d088acea 7833 mem = smexpr->pattern;
9a318d30 7834 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
a13d4ebf 7835 insn = emit_insn_after (insn, del);
589005ff 7836
a13d4ebf
AM
7837 if (gcse_file)
7838 {
589005ff 7839 fprintf (gcse_file,
0b17ab2f 7840 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7841 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7842 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7843 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7844 fprintf (gcse_file, "\n");
a13d4ebf 7845 }
589005ff 7846
d088acea
ZD
7847 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7848 if (XEXP (ptr, 0) == del)
7849 {
7850 XEXP (ptr, 0) = insn;
7851 break;
7852 }
49ce134f 7853 delete_insn (del);
d088acea
ZD
7854
7855 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7856 they are no longer accurate provided that they are reached by this
7857 definition, so drop them. */
a813c111 7858 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
d088acea
ZD
7859 if (INSN_P (insn))
7860 {
7861 set = single_set (insn);
7862 if (!set)
7863 continue;
7864 if (expr_equiv_p (SET_DEST (set), mem))
7865 return;
7866 note = find_reg_equal_equiv_note (insn);
7867 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7868 continue;
7869
7870 if (gcse_file)
7871 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7872 INSN_UID (insn));
7873 remove_note (insn, note);
7874 }
7875 remove_reachable_equiv_notes (bb, smexpr);
a13d4ebf
AM
7876}
7877
7878
7879/* Delete a store, but copy the value that would have been stored into
7880 the reaching_reg for later storing. */
7881
7882static void
1d088dee 7883delete_store (struct ls_expr * expr, basic_block bb)
a13d4ebf
AM
7884{
7885 rtx reg, i, del;
7886
7887 if (expr->reaching_reg == NULL_RTX)
7888 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7889
a13d4ebf 7890 reg = expr->reaching_reg;
589005ff 7891
a13d4ebf
AM
7892 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7893 {
7894 del = XEXP (i, 0);
e2d2ed72 7895 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7896 {
589005ff 7897 /* We know there is only one since we deleted redundant
a13d4ebf 7898 ones during the available computation. */
d088acea 7899 replace_store_insn (reg, del, bb, expr);
a13d4ebf
AM
7900 break;
7901 }
7902 }
7903}
7904
7905/* Free memory used by store motion. */
7906
589005ff 7907static void
1d088dee 7908free_store_memory (void)
a13d4ebf
AM
7909{
7910 free_ldst_mems ();
589005ff 7911
a13d4ebf 7912 if (ae_gen)
5a660bff 7913 sbitmap_vector_free (ae_gen);
a13d4ebf 7914 if (ae_kill)
5a660bff 7915 sbitmap_vector_free (ae_kill);
a13d4ebf 7916 if (transp)
5a660bff 7917 sbitmap_vector_free (transp);
a13d4ebf 7918 if (st_antloc)
5a660bff 7919 sbitmap_vector_free (st_antloc);
a13d4ebf 7920 if (pre_insert_map)
5a660bff 7921 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7922 if (pre_delete_map)
5a660bff 7923 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7924 if (reg_set_in_block)
7925 sbitmap_vector_free (reg_set_in_block);
589005ff 7926
a13d4ebf
AM
7927 ae_gen = ae_kill = transp = st_antloc = NULL;
7928 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7929}
7930
7931/* Perform store motion. Much like gcse, except we move expressions the
7932 other way by looking at the flowgraph in reverse. */
7933
7934static void
1d088dee 7935store_motion (void)
a13d4ebf 7936{
e0082a72 7937 basic_block bb;
0b17ab2f 7938 int x;
a13d4ebf 7939 struct ls_expr * ptr;
adfcce61 7940 int update_flow = 0;
aaa4ca30 7941
a13d4ebf
AM
7942 if (gcse_file)
7943 {
7944 fprintf (gcse_file, "before store motion\n");
7945 print_rtl (gcse_file, get_insns ());
7946 }
7947
a13d4ebf 7948 init_alias_analysis ();
aaa4ca30 7949
47a3dae1 7950 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
7951 num_stores = compute_store_table ();
7952 if (num_stores == 0)
7953 {
aaa4ca30 7954 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7955 end_alias_analysis ();
7956 return;
7957 }
7958
47a3dae1 7959 /* Now compute kill & transp vectors. */
a13d4ebf 7960 build_store_vectors ();
47a3dae1 7961 add_noreturn_fake_exit_edges ();
2a868ea4 7962 connect_infinite_loops_to_exit ();
a13d4ebf 7963
589005ff
KH
7964 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7965 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7966 &pre_delete_map);
7967
7968 /* Now we want to insert the new stores which are going to be needed. */
7969 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7970 {
e0082a72
ZD
7971 FOR_EACH_BB (bb)
7972 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7973 delete_store (ptr, bb);
a13d4ebf 7974
0b17ab2f
RH
7975 for (x = 0; x < NUM_EDGES (edge_list); x++)
7976 if (TEST_BIT (pre_insert_map[x], ptr->index))
7977 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7978 }
7979
7980 if (update_flow)
7981 commit_edge_insertions ();
aaa4ca30 7982
a13d4ebf
AM
7983 free_store_memory ();
7984 free_edge_list (edge_list);
7985 remove_fake_edges ();
7986 end_alias_analysis ();
7987}
e2500fed 7988
a0134312
RS
7989\f
7990/* Entry point for jump bypassing optimization pass. */
7991
7992int
1d088dee 7993bypass_jumps (FILE *file)
a0134312
RS
7994{
7995 int changed;
7996
7997 /* We do not construct an accurate cfg in functions which call
7998 setjmp, so just punt to be safe. */
7999 if (current_function_calls_setjmp)
8000 return 0;
8001
8002 /* For calling dump_foo fns from gdb. */
8003 debug_stderr = stderr;
8004 gcse_file = file;
8005
8006 /* Identify the basic block information for this function, including
8007 successors and predecessors. */
8008 max_gcse_regno = max_reg_num ();
8009
8010 if (file)
8011 dump_flow_info (file);
8012
6614fd40 8013 /* Return if there's nothing to do, or it is too expensive. */
d128effb 8014 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
a0134312
RS
8015 return 0;
8016
a0134312
RS
8017 gcc_obstack_init (&gcse_obstack);
8018 bytes_used = 0;
8019
8020 /* We need alias. */
8021 init_alias_analysis ();
8022
8023 /* Record where pseudo-registers are set. This data is kept accurate
8024 during each pass. ??? We could also record hard-reg information here
8025 [since it's unchanging], however it is currently done during hash table
8026 computation.
8027
8028 It may be tempting to compute MEM set information here too, but MEM sets
8029 will be subject to code motion one day and thus we need to compute
8030 information about memory sets when we build the hash tables. */
8031
8032 alloc_reg_set_mem (max_gcse_regno);
8033 compute_sets (get_insns ());
8034
8035 max_gcse_regno = max_reg_num ();
8036 alloc_gcse_mem (get_insns ());
8037 changed = one_cprop_pass (1, 1, 1);
8038 free_gcse_mem ();
8039
8040 if (file)
8041 {
8042 fprintf (file, "BYPASS of %s: %d basic blocks, ",
faed5cc3 8043 current_function_name (), n_basic_blocks);
a0134312
RS
8044 fprintf (file, "%d bytes\n\n", bytes_used);
8045 }
8046
8047 obstack_free (&gcse_obstack, NULL);
8048 free_reg_set_mem ();
8049
8050 /* We are finished with alias. */
8051 end_alias_analysis ();
8052 allocate_reg_info (max_reg_num (), FALSE, FALSE);
8053
8054 return changed;
8055}
8056
d128effb
NS
8057/* Return true if the graph is too expensive to optimize. PASS is the
8058 optimization about to be performed. */
8059
8060static bool
8061is_too_expensive (const char *pass)
8062{
8063 /* Trying to perform global optimizations on flow graphs which have
8064 a high connectivity will take a long time and is unlikely to be
8065 particularly useful.
8066
8067 In normal circumstances a cfg should have about twice as many
8068 edges as blocks. But we do not want to punish small functions
8069 which have a couple switch statements. Rather than simply
8070 threshold the number of blocks, uses something with a more
8071 graceful degradation. */
8072 if (n_edges > 20000 + n_basic_blocks * 4)
8073 {
8074 if (warn_disabled_optimization)
8075 warning ("%s: %d basic blocks and %d edges/basic block",
8076 pass, n_basic_blocks, n_edges / n_basic_blocks);
8077
8078 return true;
8079 }
8080
8081 /* If allocating memory for the cprop bitmap would take up too much
8082 storage it's better just to disable the optimization. */
8083 if ((n_basic_blocks
8084 * SBITMAP_SET_SIZE (max_reg_num ())
8085 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
8086 {
8087 if (warn_disabled_optimization)
8088 warning ("%s: %d basic blocks and %d registers",
8089 pass, n_basic_blocks, max_reg_num ());
8090
8091 return true;
8092 }
8093
8094 return false;
8095}
8096
f9957958
MH
8097/* The following code implements gcse after reload, the purpose of this
8098 pass is to cleanup redundant loads generated by reload and other
8099 optimizations that come after gcse. It searches for simple inter-block
8100 redundancies and tries to eliminate them by adding moves and loads
8101 in cold places. */
8102
8103/* The following structure holds the information about the occurrences of
8104 the redundant instructions. */
8105struct unoccr
8106{
8107 struct unoccr *next;
8108 edge pred;
8109 rtx insn;
8110};
8111
8112static bool reg_used_on_edge (rtx, edge);
8113static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
8114static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
8115static rtx get_avail_load_store_reg (rtx);
8116static bool is_jump_table_basic_block (basic_block);
8117static bool bb_has_well_behaved_predecessors (basic_block);
8118static struct occr* get_bb_avail_insn (basic_block, struct occr *);
8119static void hash_scan_set_after_reload (rtx, rtx, struct hash_table *);
8120static void compute_hash_table_after_reload (struct hash_table *);
8121static void eliminate_partially_redundant_loads (basic_block,
8122 rtx,
8123 struct expr *);
8124static void gcse_after_reload (void);
8125static struct occr* get_bb_avail_insn (basic_block, struct occr *);
8126void gcse_after_reload_main (rtx, FILE *);
8127
8128
8129/* Check if register REG is used in any insn waiting to be inserted on E.
8130 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
8131 with PREV(insn),NEXT(insn) instead of calling
8132 reg_overlap_mentioned_p. */
8133
8134static bool
8135reg_used_on_edge (rtx reg, edge e)
8136{
8137 rtx insn;
8138
8139 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
8140 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8141 return true;
8142
8143 return false;
8144}
8145
8146/* Return the insn that sets register REG or clobbers it in between
8147 FROM_INSN and TO_INSN (exclusive of those two).
8148 Just like reg_set_between but for hard registers and not pseudos. */
8149
8150static rtx
8151reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
8152{
8153 rtx insn;
8154 int regno;
8155
8156 if (GET_CODE (reg) != REG)
8157 abort ();
8158 regno = REGNO (reg);
8159
8160 /* We are called after register allocation. */
8161 if (regno >= FIRST_PSEUDO_REGISTER)
8162 abort ();
8163
8164 if (from_insn == to_insn)
8165 return NULL_RTX;
8166
8167 for (insn = NEXT_INSN (from_insn);
8168 insn != to_insn;
8169 insn = NEXT_INSN (insn))
8170 {
8171 if (INSN_P (insn))
8172 {
8173 if (FIND_REG_INC_NOTE (insn, reg)
8174 || (GET_CODE (insn) == CALL_INSN
8175 && call_used_regs[regno])
8176 || find_reg_fusage (insn, CLOBBER, reg))
8177 return insn;
8178 }
8179 if (set_of (reg, insn) != NULL_RTX)
8180 return insn;
8181 }
8182 return NULL_RTX;
8183}
8184
8185/* Return the insn that uses register REG in between FROM_INSN and TO_INSN
8186 (exclusive of those two). Similar to reg_used_between but for hard
8187 registers and not pseudos. */
8188
8189static rtx
8190reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
8191{
8192 rtx insn;
8193 int regno;
8194
8195 if (GET_CODE (reg) != REG)
8196 return to_insn;
8197 regno = REGNO (reg);
8198
8199 /* We are called after register allocation. */
8200 if (regno >= FIRST_PSEUDO_REGISTER)
8201 abort ();
8202 if (from_insn == to_insn)
8203 return NULL_RTX;
8204
8205 for (insn = NEXT_INSN (from_insn);
8206 insn != to_insn;
8207 insn = NEXT_INSN (insn))
8208 if (INSN_P (insn)
8209 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
8210 || (GET_CODE (insn) == CALL_INSN
8211 && call_used_regs[regno])
8212 || find_reg_fusage (insn, USE, reg)
8213 || find_reg_fusage (insn, CLOBBER, reg)))
8214 return insn;
8215 return NULL_RTX;
8216}
8217
8218/* Return the loaded/stored register of a load/store instruction. */
8219
8220static rtx
8221get_avail_load_store_reg (rtx insn)
8222{
8223 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG) /* A load. */
8224 return SET_DEST(PATTERN(insn));
8225 if (GET_CODE (SET_SRC (PATTERN (insn))) == REG) /* A store. */
8226 return SET_SRC (PATTERN (insn));
8227 abort ();
8228}
8229
8230/* Don't handle ABNORMAL edges or jump tables. */
8231
8232static bool
8233is_jump_table_basic_block (basic_block bb)
8234{
8235 rtx insn = BB_END (bb);
8236
8237 if (GET_CODE (insn) == JUMP_INSN &&
8238 (GET_CODE (PATTERN (insn)) == ADDR_VEC
8239 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
8240 return true;
8241 return false;
8242}
8243
8244/* Return nonzero if the predecessors of BB are "well behaved". */
8245
8246static bool
8247bb_has_well_behaved_predecessors (basic_block bb)
8248{
8249 edge pred;
8250
8251 if (! bb->pred)
8252 return false;
8253 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
8254 if (((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
8255 || is_jump_table_basic_block (pred->src))
8256 return false;
8257 return true;
8258}
8259
8260
8261/* Search for the occurrences of expression in BB. */
8262
8263static struct occr*
8264get_bb_avail_insn (basic_block bb, struct occr *occr)
8265{
8266 for (; occr != NULL; occr = occr->next)
8267 if (BLOCK_FOR_INSN (occr->insn)->index == bb->index)
8268 return occr;
8269 return NULL;
8270}
8271
8272/* Perform partial GCSE pass after reload, try to eliminate redundant loads
8273 created by the reload pass. We try to look for a full or partial
8274 redundant loads fed by one or more loads/stores in predecessor BBs,
8275 and try adding loads to make them fully redundant. We also check if
8276 it's worth adding loads to be able to delete the redundant load.
8277
8278 Algorithm:
8279 1. Build available expressions hash table:
8280 For each load/store instruction, if the loaded/stored memory didn't
8281 change until the end of the basic block add this memory expression to
8282 the hash table.
8283 2. Perform Redundancy elimination:
8284 For each load instruction do the following:
8285 perform partial redundancy elimination, check if it's worth adding
8286 loads to make the load fully redundant. If so add loads and
8287 register copies and delete the load.
8288
8289 Future enhancement:
8290 if loaded register is used/defined between load and some store,
8291 look for some other free register between load and all its stores,
8292 and replace load with a copy from this register to the loaded
8293 register. */
8294
8295
8296/* This handles the case where several stores feed a partially redundant
8297 load. It checks if the redundancy elimination is possible and if it's
8298 worth it. */
8299
8300static void
8301eliminate_partially_redundant_loads (basic_block bb, rtx insn,
8302 struct expr *expr)
8303{
8304 edge pred;
8305 rtx avail_insn = NULL_RTX;
8306 rtx avail_reg;
8307 rtx dest, pat;
8308 struct occr *a_occr;
8309 struct unoccr *occr, *avail_occrs = NULL;
8310 struct unoccr *unoccr, *unavail_occrs = NULL;
8311 int npred_ok = 0;
8312 gcov_type ok_count = 0; /* Redundant load execution count. */
8313 gcov_type critical_count = 0; /* Execution count of critical edges. */
8314
8315 /* The execution count of the loads to be added to make the
8316 load fully redundant. */
8317 gcov_type not_ok_count = 0;
8318 basic_block pred_bb;
8319
8320 pat = PATTERN (insn);
8321 dest = SET_DEST (pat);
8322 /* Check if the loaded register is not used nor killed from the beginning
8323 of the block. */
8324 if (reg_used_between_after_reload_p (dest,
8325 PREV_INSN (BB_HEAD (bb)), insn))
8326 return;
8327
8328 /* Check potential for replacing load with copy for predecessors. */
8329 for (pred = bb->pred; pred; pred = pred->pred_next)
8330 {
8331 rtx next_pred_bb_end;
8332
8333 avail_insn = NULL_RTX;
8334 pred_bb = pred->src;
8335 next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
8336 for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
8337 a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
8338 {
8339 /* Check if the loaded register is not used. */
8340 avail_insn = a_occr->insn;
8341 if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
8342 abort ();
8343 /* Make sure we can generate a move from register avail_reg to
8344 dest. */
8345 extract_insn (gen_move_insn (copy_rtx (dest),
8346 copy_rtx (avail_reg)));
8347 if (! constrain_operands (1)
8348 || reg_killed_on_edge (avail_reg, pred)
8349 || reg_used_on_edge (dest, pred))
8350 {
8351 avail_insn = NULL;
8352 continue;
8353 }
8354 if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
8355 next_pred_bb_end))
8356 /* AVAIL_INSN remains non-null. */
8357 break;
8358 else
8359 avail_insn = NULL;
8360 }
8361 if (avail_insn != NULL_RTX)
8362 {
8363 npred_ok++;
8364 ok_count += pred->count;
8365 if (EDGE_CRITICAL_P (pred))
8366 critical_count += pred->count;
8367 occr = (struct unoccr *) gmalloc (sizeof (struct unoccr));
8368 occr->insn = avail_insn;
8369 occr->pred = pred;
8370 occr->next = avail_occrs;
8371 avail_occrs = occr;
8372 }
8373 else
8374 {
8375 not_ok_count += pred->count;
8376 if (EDGE_CRITICAL_P (pred))
8377 critical_count += pred->count;
8378 unoccr = (struct unoccr *) gmalloc (sizeof (struct unoccr));
8379 unoccr->insn = NULL_RTX;
8380 unoccr->pred = pred;
8381 unoccr->next = unavail_occrs;
8382 unavail_occrs = unoccr;
8383 }
8384 }
8385
8386 if (npred_ok == 0 /* No load can be replaced by copy. */
8387 || (optimize_size && npred_ok > 1)) /* Prevent exploding the code. */
8388 return;
8389
8390 /* Check if it's worth applying the partial redundancy elimination. */
8391 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
8392 return;
8393
8394 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
8395 return;
8396
8397 /* Generate moves to the loaded register from where
8398 the memory is available. */
8399 for (occr = avail_occrs; occr; occr = occr->next)
8400 {
8401 avail_insn = occr->insn;
8402 pred = occr->pred;
8403 /* Set avail_reg to be the register having the value of the
8404 memory. */
8405 avail_reg = get_avail_load_store_reg (avail_insn);
8406 if (! avail_reg)
8407 abort ();
8408
8409 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
8410 copy_rtx (avail_reg)),
8411 pred);
8412
8413 if (gcse_file)
8414 fprintf (gcse_file,
8415 "GCSE AFTER reload generating move from %d to %d on \
8416 edge from %d to %d\n",
8417 REGNO (avail_reg),
8418 REGNO (dest),
8419 pred->src->index,
8420 pred->dest->index);
8421 }
8422
8423 /* Regenerate loads where the memory is unavailable. */
8424 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
8425 {
8426 pred = unoccr->pred;
8427 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
8428
8429 if (gcse_file)
8430 fprintf (gcse_file,
8431 "GCSE AFTER reload: generating on edge from %d to %d\
8432 a copy of load:\n",
8433 pred->src->index,
8434 pred->dest->index);
8435 }
8436
8437 /* Delete the insn if it is not available in this block and mark it
8438 for deletion if it is available. If insn is available it may help
8439 discover additional redundancies, so mark it for later deletion.*/
8440 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
8441 a_occr && (a_occr->insn != insn);
8442 a_occr = get_bb_avail_insn (bb, a_occr->next));
8443
8444 if (!a_occr)
8445 delete_insn (insn);
8446 else
8447 a_occr->deleted_p = 1;
8448}
8449
8450/* Performing the redundancy elimination as described before. */
8451
8452static void
8453gcse_after_reload (void)
8454{
8455 unsigned int i;
8456 rtx insn;
8457 basic_block bb;
8458 struct expr *expr;
8459 struct occr *occr;
8460
8461 /* Note we start at block 1. */
8462
8463 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
8464 return;
8465
8466 FOR_BB_BETWEEN (bb,
8467 ENTRY_BLOCK_PTR->next_bb->next_bb,
8468 EXIT_BLOCK_PTR,
8469 next_bb)
8470 {
8471 if (! bb_has_well_behaved_predecessors (bb))
8472 continue;
8473
8474 /* Do not try this optimization on cold basic blocks. */
8475 if (probably_cold_bb_p (bb))
8476 continue;
8477
8478 reset_opr_set_tables ();
8479
8480 for (insn = BB_HEAD (bb);
8481 insn != NULL
8482 && insn != NEXT_INSN (BB_END (bb));
8483 insn = NEXT_INSN (insn))
8484 {
8485 /* Is it a load - of the form (set (reg) (mem))? */
8486 if (GET_CODE (insn) == INSN
8487 && GET_CODE (PATTERN (insn)) == SET
8488 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
8489 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
8490 {
8491 rtx pat = PATTERN (insn);
8492 rtx src = SET_SRC (pat);
8493 struct expr *expr;
8494
8495 if (general_operand (src, GET_MODE (src))
8496 /* Is the expression recorded? */
8497 && (expr = lookup_expr (src, &expr_hash_table)) != NULL
8498 /* Are the operands unchanged since the start of the
8499 block? */
8500 && oprs_not_set_p (src, insn)
8501 && ! MEM_VOLATILE_P (src)
8502 && GET_MODE (src) != BLKmode
8503 && !(flag_non_call_exceptions && may_trap_p (src))
8504 && !side_effects_p (src))
8505 {
8506 /* We now have a load (insn) and an available memory at
8507 its BB start (expr). Try to remove the loads if it is
8508 redundant. */
8509 eliminate_partially_redundant_loads (bb, insn, expr);
8510 }
8511 }
8512
8513 /* Keep track of everything modified by this insn. */
8514 if (INSN_P (insn))
8515 mark_oprs_set (insn);
8516 }
8517 }
8518
8519 commit_edge_insertions ();
8520
8521 /* Go over the expression hash table and delete insns that were
8522 marked for later deletion. */
8523 for (i = 0; i < expr_hash_table.size; i++)
8524 {
8525 for (expr = expr_hash_table.table[i];
8526 expr != NULL;
8527 expr = expr->next_same_hash)
8528 for (occr = expr->avail_occr; occr; occr = occr->next)
8529 if (occr->deleted_p)
8530 delete_insn (occr->insn);
8531 }
8532}
8533
8534/* Scan pattern PAT of INSN and add an entry to the hash TABLE.
8535 After reload we are interested in loads/stores only. */
8536
8537static void
8538hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
8539{
8540 rtx src = SET_SRC (pat);
8541 rtx dest = SET_DEST (pat);
8542
8543 if (GET_CODE (src) != MEM && GET_CODE (dest) != MEM)
8544 return;
8545
8546 if (GET_CODE (dest) == REG)
8547 {
8548 if (/* Don't GCSE something if we can't do a reg/reg copy. */
8549 can_copy_p (GET_MODE (dest))
8550 /* GCSE commonly inserts instruction after the insn. We can't
8551 do that easily for EH_REGION notes so disable GCSE on these
8552 for now. */
8553 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
8554 /* Is SET_SRC something we want to gcse? */
8555 && general_operand (src, GET_MODE (src))
8556 /* Don't CSE a nop. */
8557 && ! set_noop_p (pat)
8558 && ! JUMP_P (insn))
8559 {
8560 /* An expression is not available if its operands are
8561 subsequently modified, including this insn. */
8562 if (oprs_available_p (src, insn))
8563 insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table);
8564 }
8565 }
8566 else if ((GET_CODE (src) == REG))
8567 {
8568 /* Only record sets of pseudo-regs in the hash table. */
8569 if (/* Don't GCSE something if we can't do a reg/reg copy. */
8570 can_copy_p (GET_MODE (src))
8571 /* GCSE commonly inserts instruction after the insn. We can't
8572 do that easily for EH_REGION notes so disable GCSE on these
8573 for now. */
8574 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
8575 /* Is SET_DEST something we want to gcse? */
8576 && general_operand (dest, GET_MODE (dest))
8577 /* Don't CSE a nop. */
8578 && ! set_noop_p (pat)
8579 &&! JUMP_P (insn)
8580 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
8581 /* Check if the memory expression is killed after insn. */
8582 && ! load_killed_in_block_p (BLOCK_FOR_INSN (insn),
8583 INSN_CUID (insn) + 1,
8584 dest,
8585 1)
8586 && oprs_unchanged_p (XEXP (dest, 0), insn, 1))
8587 {
8588 insert_expr_in_table (dest, GET_MODE (dest), insn, 0, 1, table);
8589 }
8590 }
8591}
8592
8593
8594/* Create hash table of memory expressions available at end of basic
8595 blocks. */
8596
8597static void
8598compute_hash_table_after_reload (struct hash_table *table)
8599{
8600 unsigned int i;
8601
8602 table->set_p = 0;
8603
8604 /* Initialize count of number of entries in hash table. */
8605 table->n_elems = 0;
8606 memset ((char *) table->table, 0,
8607 table->size * sizeof (struct expr *));
8608
8609 /* While we compute the hash table we also compute a bit array of which
8610 registers are set in which blocks. */
8611 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
8612
8613 /* Re-cache any INSN_LIST nodes we have allocated. */
8614 clear_modify_mem_tables ();
8615
8616 /* Some working arrays used to track first and last set in each block. */
8617 reg_avail_info = (struct reg_avail_info*)
8618 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
8619
8620 for (i = 0; i < max_gcse_regno; ++i)
8621 reg_avail_info[i].last_bb = NULL;
8622
8623 FOR_EACH_BB (current_bb)
8624 {
8625 rtx insn;
8626 unsigned int regno;
8627
8628 /* First pass over the instructions records information used to
8629 determine when registers and memory are first and last set. */
8630 for (insn = BB_HEAD (current_bb);
8631 insn && insn != NEXT_INSN (BB_END (current_bb));
8632 insn = NEXT_INSN (insn))
8633 {
8634 if (! INSN_P (insn))
8635 continue;
8636
8637 if (GET_CODE (insn) == CALL_INSN)
8638 {
8639 bool clobbers_all = false;
8640
8641#ifdef NON_SAVING_SETJMP
8642 if (NON_SAVING_SETJMP
8643 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
8644 clobbers_all = true;
8645#endif
8646
8647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8648 if (clobbers_all
8649 || TEST_HARD_REG_BIT (regs_invalidated_by_call,
8650 regno))
8651 record_last_reg_set_info (insn, regno);
8652
8653 mark_call (insn);
8654 }
8655
8656 note_stores (PATTERN (insn), record_last_set_info, insn);
8657
8658 if (GET_CODE (PATTERN (insn)) == SET)
8659 {
8660 rtx src, dest;
8661
8662 src = SET_SRC (PATTERN (insn));
8663 dest = SET_DEST (PATTERN (insn));
8664 if (GET_CODE (src) == MEM && auto_inc_p (XEXP (src, 0)))
8665 {
8666 regno = REGNO (XEXP (XEXP (src, 0), 0));
8667 record_last_reg_set_info (insn, regno);
8668 }
8669 if (GET_CODE (dest) == MEM && auto_inc_p (XEXP (dest, 0)))
8670 {
8671 regno = REGNO (XEXP (XEXP (dest, 0), 0));
8672 record_last_reg_set_info (insn, regno);
8673 }
8674 }
8675 }
8676
8677 /* The next pass builds the hash table. */
8678 for (insn = BB_HEAD (current_bb);
8679 insn && insn != NEXT_INSN (BB_END (current_bb));
8680 insn = NEXT_INSN (insn))
8681 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
8682 if (! find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8683 hash_scan_set_after_reload (PATTERN (insn), insn, table);
8684 }
8685
8686 free (reg_avail_info);
8687 reg_avail_info = NULL;
8688}
8689
8690
8691/* Main entry point of the GCSE after reload - clean some redundant loads
8692 due to spilling. */
8693
8694void
8695gcse_after_reload_main (rtx f, FILE* file)
8696{
8697 gcse_subst_count = 0;
8698 gcse_create_count = 0;
8699
8700 gcse_file = file;
8701
8702 gcc_obstack_init (&gcse_obstack);
8703 bytes_used = 0;
8704
8705 /* We need alias. */
8706 init_alias_analysis ();
8707
8708 max_gcse_regno = max_reg_num ();
8709
8710 alloc_reg_set_mem (max_gcse_regno);
8711 alloc_gcse_mem (f);
8712 alloc_hash_table (max_cuid, &expr_hash_table, 0);
8713 compute_hash_table_after_reload (&expr_hash_table);
8714
8715 if (gcse_file)
8716 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
8717
8718 if (expr_hash_table.n_elems > 0)
8719 gcse_after_reload ();
8720
8721 free_hash_table (&expr_hash_table);
8722
8723 free_gcse_mem ();
8724 free_reg_set_mem ();
8725
8726 /* We are finished with alias. */
8727 end_alias_analysis ();
8728
8729 obstack_free (&gcse_obstack, NULL);
8730}
8731
e2500fed 8732#include "gt-gcse.h"