]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
ieee754-df.S (muldf3, divdf3): Fix denormalization of small negative values.
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
d9221e01 3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
8e42ace1 4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
b0656d8b 153#include "tree.h"
6baf1cc8 154#include "tm_p.h"
7506f491
DE
155#include "regs.h"
156#include "hard-reg-set.h"
157#include "flags.h"
158#include "real.h"
159#include "insn-config.h"
160#include "recog.h"
161#include "basic-block.h"
50b2596f 162#include "output.h"
49ad7cfa 163#include "function.h"
589005ff 164#include "expr.h"
e7d482b9 165#include "except.h"
fb0c0a12 166#include "ggc.h"
f1fa37ff 167#include "params.h"
ae860ff7 168#include "cselib.h"
d128effb 169#include "intl.h"
7506f491 170#include "obstack.h"
4fa31c2a 171
7506f491
DE
172/* Propagate flow information through back edges and thus enable PRE's
173 moving loop invariant calculations out of loops.
174
175 Originally this tended to create worse overall code, but several
176 improvements during the development of PRE seem to have made following
177 back edges generally a win.
178
179 Note much of the loop invariant code motion done here would normally
180 be done by loop.c, which has more heuristics for when to move invariants
181 out of loops. At some point we might need to move some of those
182 heuristics into gcse.c. */
7506f491 183
f4e584dc
JL
184/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
185 are a superset of those done by GCSE.
7506f491 186
f4e584dc 187 We perform the following steps:
7506f491
DE
188
189 1) Compute basic block information.
190
191 2) Compute table of places where registers are set.
192
193 3) Perform copy/constant propagation.
194
195 4) Perform global cse.
196
e78d9500 197 5) Perform another pass of copy/constant propagation.
7506f491
DE
198
199 Two passes of copy/constant propagation are done because the first one
200 enables more GCSE and the second one helps to clean up the copies that
201 GCSE creates. This is needed more for PRE than for Classic because Classic
202 GCSE will try to use an existing register containing the common
203 subexpression rather than create a new one. This is harder to do for PRE
204 because of the code motion (which Classic GCSE doesn't do).
205
206 Expressions we are interested in GCSE-ing are of the form
207 (set (pseudo-reg) (expression)).
208 Function want_to_gcse_p says what these are.
209
210 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 211 partially redundant).
7506f491
DE
212
213 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
214 assignment) based GVN (global value numbering). L. T. Simpson's paper
215 (Rice University) on value numbering is a useful reference for this.
216
217 **********************
218
219 We used to support multiple passes but there are diminishing returns in
220 doing so. The first pass usually makes 90% of the changes that are doable.
221 A second pass can make a few more changes made possible by the first pass.
222 Experiments show any further passes don't make enough changes to justify
223 the expense.
224
225 A study of spec92 using an unlimited number of passes:
226 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
227 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
228 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229
230 It was found doing copy propagation between each pass enables further
231 substitutions.
232
233 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 234 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
235 be modified if one wants to experiment.
236
237 **********************
238
239 The steps for PRE are:
240
241 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242
243 2) Perform the data flow analysis for PRE.
244
245 3) Delete the redundant instructions
246
247 4) Insert the required copies [if any] that make the partially
248 redundant instructions fully redundant.
249
250 5) For other reaching expressions, insert an instruction to copy the value
251 to a newly created pseudo that will reach the redundant instruction.
252
253 The deletion is done first so that when we do insertions we
254 know which pseudo reg to use.
255
256 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
257 argue it is not. The number of iterations for the algorithm to converge
258 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259
f4e584dc 260 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
261 we create. To make an expression reach the place where it's redundant,
262 the result of the expression is copied to a new register, and the redundant
263 expression is deleted by replacing it with this new register. Classic GCSE
264 doesn't have this problem as much as it computes the reaching defs of
265 each register in each block and thus can try to use an existing register.
266
267 **********************
268
7506f491
DE
269 A fair bit of simplicity is created by creating small functions for simple
270 tasks, even when the function is only called in one place. This may
271 measurably slow things down [or may not] by creating more function call
272 overhead than is necessary. The source is laid out so that it's trivial
273 to make the affected functions inline so that one can measure what speed
274 up, if any, can be achieved, and maybe later when things settle things can
275 be rearranged.
276
277 Help stamp out big monolithic functions! */
278\f
279/* GCSE global vars. */
280
281/* -dG dump file. */
282static FILE *gcse_file;
283
f4e584dc
JL
284/* Note whether or not we should run jump optimization after gcse. We
285 want to do this for two cases.
286
287 * If we changed any jumps via cprop.
288
289 * If we added any labels via edge splitting. */
290
291static int run_jump_opt_after_gcse;
292
7506f491
DE
293/* Bitmaps are normally not included in debugging dumps.
294 However it's useful to be able to print them from GDB.
295 We could create special functions for this, but it's simpler to
296 just allow passing stderr to the dump_foo fns. Since stderr can
297 be a macro, we store a copy here. */
298static FILE *debug_stderr;
299
300/* An obstack for our working variables. */
301static struct obstack gcse_obstack;
302
c4c81601 303struct reg_use {rtx reg_rtx; };
abd535b6 304
7506f491
DE
305/* Hash table of expressions. */
306
307struct expr
308{
309 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
310 rtx expr;
311 /* Index in the available expression bitmaps. */
312 int bitmap_index;
313 /* Next entry with the same hash. */
314 struct expr *next_same_hash;
315 /* List of anticipatable occurrences in basic blocks in the function.
316 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
317 basic block, the operands are not modified in the basic block prior
318 to the occurrence and the output is not used between the start of
319 the block and the occurrence. */
7506f491
DE
320 struct occr *antic_occr;
321 /* List of available occurrence in basic blocks in the function.
322 An "available occurrence" is one that is the last occurrence in the
323 basic block and the operands are not modified by following statements in
324 the basic block [including this insn]. */
325 struct occr *avail_occr;
326 /* Non-null if the computation is PRE redundant.
327 The value is the newly created pseudo-reg to record a copy of the
328 expression in all the places that reach the redundant copy. */
329 rtx reaching_reg;
330};
331
332/* Occurrence of an expression.
333 There is one per basic block. If a pattern appears more than once the
334 last appearance is used [or first for anticipatable expressions]. */
335
336struct occr
337{
338 /* Next occurrence of this expression. */
339 struct occr *next;
340 /* The insn that computes the expression. */
341 rtx insn;
cc2902df 342 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 343 char deleted_p;
cc2902df 344 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
345 reaching_reg. */
346 /* ??? This is mutually exclusive with deleted_p, so they could share
347 the same byte. */
348 char copied_p;
349};
350
351/* Expression and copy propagation hash tables.
352 Each hash table is an array of buckets.
353 ??? It is known that if it were an array of entries, structure elements
354 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
355 not clear whether in the final analysis a sufficient amount of memory would
356 be saved as the size of the available expression bitmaps would be larger
357 [one could build a mapping table without holes afterwards though].
c4c81601 358 Someday I'll perform the computation and figure it out. */
7506f491 359
02280659
ZD
360struct hash_table
361{
362 /* The table itself.
363 This is an array of `expr_hash_table_size' elements. */
364 struct expr **table;
365
366 /* Size of the hash table, in elements. */
367 unsigned int size;
2e653e39 368
02280659
ZD
369 /* Number of hash table elements. */
370 unsigned int n_elems;
7506f491 371
02280659
ZD
372 /* Whether the table is expression of copy propagation one. */
373 int set_p;
374};
c4c81601 375
02280659
ZD
376/* Expression hash table. */
377static struct hash_table expr_hash_table;
378
379/* Copy propagation hash table. */
380static struct hash_table set_hash_table;
7506f491
DE
381
382/* Mapping of uids to cuids.
383 Only real insns get cuids. */
384static int *uid_cuid;
385
386/* Highest UID in UID_CUID. */
387static int max_uid;
388
389/* Get the cuid of an insn. */
b86db3eb
BS
390#ifdef ENABLE_CHECKING
391#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392#else
7506f491 393#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 394#endif
7506f491
DE
395
396/* Number of cuids. */
397static int max_cuid;
398
399/* Mapping of cuids to insns. */
400static rtx *cuid_insn;
401
402/* Get insn from cuid. */
403#define CUID_INSN(CUID) (cuid_insn[CUID])
404
405/* Maximum register number in function prior to doing gcse + 1.
406 Registers created during this pass have regno >= max_gcse_regno.
407 This is named with "gcse" to not collide with global of same name. */
770ae6cc 408static unsigned int max_gcse_regno;
7506f491 409
7506f491 410/* Table of registers that are modified.
c4c81601 411
7506f491
DE
412 For each register, each element is a list of places where the pseudo-reg
413 is set.
414
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 417 a bitmap instead of the lists `reg_set_table' uses].
7506f491 418
c4c81601
RK
419 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
420 num-regs) [however perhaps it may be useful to keep the data as is]. One
421 advantage of recording things this way is that `reg_set_table' is fairly
422 sparse with respect to pseudo regs but for hard regs could be fairly dense
423 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
424 up functions like compute_transp since in the case of pseudo-regs we only
425 need to iterate over the number of times a pseudo-reg is set, not over the
426 number of basic blocks [clearly there is a bit of a slow down in the cases
427 where a pseudo is set more than once in a block, however it is believed
428 that the net effect is to speed things up]. This isn't done for hard-regs
429 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
430 function call can consume a fair bit of memory, and iterating over
431 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 432
c4c81601
RK
433typedef struct reg_set
434{
7506f491
DE
435 /* The next setting of this register. */
436 struct reg_set *next;
437 /* The insn where it was set. */
438 rtx insn;
439} reg_set;
c4c81601 440
7506f491 441static reg_set **reg_set_table;
c4c81601 442
7506f491
DE
443/* Size of `reg_set_table'.
444 The table starts out at max_gcse_regno + slop, and is enlarged as
445 necessary. */
446static int reg_set_table_size;
c4c81601 447
7506f491
DE
448/* Amount to grow `reg_set_table' by when it's full. */
449#define REG_SET_TABLE_SLOP 100
450
a13d4ebf 451/* This is a list of expressions which are MEMs and will be used by load
589005ff 452 or store motion.
a13d4ebf
AM
453 Load motion tracks MEMs which aren't killed by
454 anything except itself. (ie, loads and stores to a single location).
589005ff 455 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
456 allowance. (all stores copy the same value to the reaching reg used
457 for the loads). This means all values used to store into memory must have
589005ff 458 no side effects so we can re-issue the setter value.
a13d4ebf
AM
459 Store Motion uses this structure as an expression table to track stores
460 which look interesting, and might be moveable towards the exit block. */
461
462struct ls_expr
463{
464 struct expr * expr; /* Gcse expression reference for LM. */
465 rtx pattern; /* Pattern of this mem. */
47a3dae1 466 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
467 rtx loads; /* INSN list of loads seen. */
468 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
469 struct ls_expr * next; /* Next in the list. */
470 int invalid; /* Invalid for some reason. */
471 int index; /* If it maps to a bitmap index. */
b58b21d5 472 unsigned int hash_index; /* Index when in a hash table. */
a13d4ebf
AM
473 rtx reaching_reg; /* Register to use when re-writing. */
474};
475
fbef91d8
RS
476/* Array of implicit set patterns indexed by basic block index. */
477static rtx *implicit_sets;
478
a13d4ebf
AM
479/* Head of the list of load/store memory refs. */
480static struct ls_expr * pre_ldst_mems = NULL;
481
7506f491
DE
482/* Bitmap containing one bit for each register in the program.
483 Used when performing GCSE to track which registers have been set since
484 the start of the basic block. */
73991d6a 485static regset reg_set_bitmap;
7506f491
DE
486
487/* For each block, a bitmap of registers set in the block.
488 This is used by expr_killed_p and compute_transp.
489 It is computed during hash table computation and not by compute_sets
490 as it includes registers added since the last pass (or between cprop and
491 gcse) and it's currently not easy to realloc sbitmap vectors. */
492static sbitmap *reg_set_in_block;
493
a13d4ebf
AM
494/* Array, indexed by basic block number for a list of insns which modify
495 memory within that block. */
496static rtx * modify_mem_list;
73991d6a 497bitmap modify_mem_list_set;
a13d4ebf
AM
498
499/* This array parallels modify_mem_list, but is kept canonicalized. */
500static rtx * canon_modify_mem_list;
73991d6a 501bitmap canon_modify_mem_list_set;
7506f491
DE
502/* Various variables for statistics gathering. */
503
504/* Memory used in a pass.
505 This isn't intended to be absolutely precise. Its intent is only
506 to keep an eye on memory usage. */
507static int bytes_used;
c4c81601 508
7506f491
DE
509/* GCSE substitutions made. */
510static int gcse_subst_count;
511/* Number of copy instructions created. */
512static int gcse_create_count;
513/* Number of constants propagated. */
514static int const_prop_count;
515/* Number of copys propagated. */
516static int copy_prop_count;
7506f491
DE
517\f
518/* These variables are used by classic GCSE.
519 Normally they'd be defined a bit later, but `rd_gen' needs to
520 be declared sooner. */
521
7506f491
DE
522/* Each block has a bitmap of each type.
523 The length of each blocks bitmap is:
524
525 max_cuid - for reaching definitions
526 n_exprs - for available expressions
527
528 Thus we view the bitmaps as 2 dimensional arrays. i.e.
529 rd_kill[block_num][cuid_num]
c4c81601 530 ae_kill[block_num][expr_num] */
7506f491
DE
531
532/* For reaching defs */
533static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534
535/* for available exprs */
536static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 537
0511851c
MM
538/* Objects of this type are passed around by the null-pointer check
539 removal routines. */
c4c81601
RK
540struct null_pointer_info
541{
0511851c 542 /* The basic block being processed. */
e0082a72 543 basic_block current_block;
0511851c 544 /* The first register to be handled in this pass. */
770ae6cc 545 unsigned int min_reg;
0511851c 546 /* One greater than the last register to be handled in this pass. */
770ae6cc 547 unsigned int max_reg;
0511851c
MM
548 sbitmap *nonnull_local;
549 sbitmap *nonnull_killed;
550};
7506f491 551\f
1d088dee 552static void compute_can_copy (void);
9fe15a12
KG
553static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
554static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
555static void *grealloc (void *, size_t);
703ad42b 556static void *gcse_alloc (unsigned long);
1d088dee
AJ
557static void alloc_gcse_mem (rtx);
558static void free_gcse_mem (void);
559static void alloc_reg_set_mem (int);
560static void free_reg_set_mem (void);
561static int get_bitmap_width (int, int, int);
562static void record_one_set (int, rtx);
b885908b 563static void replace_one_set (int, rtx, rtx);
1d088dee
AJ
564static void record_set_info (rtx, rtx, void *);
565static void compute_sets (rtx);
566static void hash_scan_insn (rtx, struct hash_table *, int);
567static void hash_scan_set (rtx, rtx, struct hash_table *);
568static void hash_scan_clobber (rtx, rtx, struct hash_table *);
569static void hash_scan_call (rtx, rtx, struct hash_table *);
570static int want_to_gcse_p (rtx);
571static bool gcse_constant_p (rtx);
572static int oprs_unchanged_p (rtx, rtx, int);
573static int oprs_anticipatable_p (rtx, rtx);
574static int oprs_available_p (rtx, rtx);
575static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
576 struct hash_table *);
577static void insert_set_in_table (rtx, rtx, struct hash_table *);
578static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
579static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
580static unsigned int hash_string_1 (const char *);
581static unsigned int hash_set (int, int);
582static int expr_equiv_p (rtx, rtx);
583static void record_last_reg_set_info (rtx, int);
584static void record_last_mem_set_info (rtx);
585static void record_last_set_info (rtx, rtx, void *);
586static void compute_hash_table (struct hash_table *);
587static void alloc_hash_table (int, struct hash_table *, int);
588static void free_hash_table (struct hash_table *);
589static void compute_hash_table_work (struct hash_table *);
590static void dump_hash_table (FILE *, const char *, struct hash_table *);
591static struct expr *lookup_expr (rtx, struct hash_table *);
592static struct expr *lookup_set (unsigned int, struct hash_table *);
593static struct expr *next_set (unsigned int, struct expr *);
594static void reset_opr_set_tables (void);
595static int oprs_not_set_p (rtx, rtx);
596static void mark_call (rtx);
597static void mark_set (rtx, rtx);
598static void mark_clobber (rtx, rtx);
599static void mark_oprs_set (rtx);
600static void alloc_cprop_mem (int, int);
601static void free_cprop_mem (void);
602static void compute_transp (rtx, int, sbitmap *, int);
603static void compute_transpout (void);
604static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
605 struct hash_table *);
606static void compute_cprop_data (void);
607static void find_used_regs (rtx *, void *);
608static int try_replace_reg (rtx, rtx, rtx);
609static struct expr *find_avail_set (int, rtx);
610static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
611static void mems_conflict_for_gcse_p (rtx, rtx, void *);
612static int load_killed_in_block_p (basic_block, int, rtx, int);
613static void canon_list_insert (rtx, rtx, void *);
614static int cprop_insn (rtx, int);
615static int cprop (int);
616static void find_implicit_sets (void);
617static int one_cprop_pass (int, int, int);
618static bool constprop_register (rtx, rtx, rtx, int);
619static struct expr *find_bypass_set (int, int);
620static bool reg_killed_on_edge (rtx, edge);
621static int bypass_block (basic_block, rtx, rtx);
622static int bypass_conditional_jumps (void);
623static void alloc_pre_mem (int, int);
624static void free_pre_mem (void);
625static void compute_pre_data (void);
626static int pre_expr_reaches_here_p (basic_block, struct expr *,
627 basic_block);
628static void insert_insn_end_bb (struct expr *, basic_block, int);
629static void pre_insert_copy_insn (struct expr *, rtx);
630static void pre_insert_copies (void);
631static int pre_delete (void);
632static int pre_gcse (void);
633static int one_pre_gcse_pass (int);
634static void add_label_notes (rtx, rtx);
635static void alloc_code_hoist_mem (int, int);
636static void free_code_hoist_mem (void);
637static void compute_code_hoist_vbeinout (void);
638static void compute_code_hoist_data (void);
639static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
640static void hoist_code (void);
641static int one_code_hoisting_pass (void);
642static void alloc_rd_mem (int, int);
643static void free_rd_mem (void);
644static void handle_rd_kill_set (rtx, int, basic_block);
645static void compute_kill_rd (void);
646static void compute_rd (void);
647static void alloc_avail_expr_mem (int, int);
648static void free_avail_expr_mem (void);
649static void compute_ae_gen (struct hash_table *);
650static int expr_killed_p (rtx, basic_block);
651static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
652static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
653 int);
654static rtx computing_insn (struct expr *, rtx);
655static int def_reaches_here_p (rtx, rtx);
656static int can_disregard_other_sets (struct reg_set **, rtx, int);
657static int handle_avail_expr (rtx, struct expr *);
658static int classic_gcse (void);
659static int one_classic_gcse_pass (int);
660static void invalidate_nonnull_info (rtx, rtx, void *);
661static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
662 struct null_pointer_info *);
663static rtx process_insert_insn (struct expr *);
664static int pre_edge_insert (struct edge_list *, struct expr **);
665static int expr_reaches_here_p_work (struct occr *, struct expr *,
666 basic_block, int, char *);
667static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
668 basic_block, char *);
669static struct ls_expr * ldst_entry (rtx);
670static void free_ldst_entry (struct ls_expr *);
671static void free_ldst_mems (void);
672static void print_ldst_list (FILE *);
673static struct ls_expr * find_rtx_in_ldst (rtx);
674static int enumerate_ldsts (void);
675static inline struct ls_expr * first_ls_expr (void);
676static inline struct ls_expr * next_ls_expr (struct ls_expr *);
677static int simple_mem (rtx);
678static void invalidate_any_buried_refs (rtx);
679static void compute_ld_motion_mems (void);
680static void trim_ld_motion_mems (void);
681static void update_ld_motion_stores (struct expr *);
682static void reg_set_info (rtx, rtx, void *);
01c43039 683static void reg_clear_last_set (rtx, rtx, void *);
1d088dee
AJ
684static bool store_ops_ok (rtx, int *);
685static rtx extract_mentioned_regs (rtx);
686static rtx extract_mentioned_regs_helper (rtx, rtx);
687static void find_moveable_store (rtx, int *, int *);
688static int compute_store_table (void);
3b14e3af
ZD
689static bool load_kills_store (rtx, rtx, int);
690static bool find_loads (rtx, rtx, int);
691static bool store_killed_in_insn (rtx, rtx, rtx, int);
1d088dee
AJ
692static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
693static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
694static void build_store_vectors (void);
695static void insert_insn_start_bb (rtx, basic_block);
696static int insert_store (struct ls_expr *, edge);
d088acea
ZD
697static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
698static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
1d088dee
AJ
699static void delete_store (struct ls_expr *, basic_block);
700static void free_store_memory (void);
701static void store_motion (void);
702static void free_insn_expr_list_list (rtx *);
703static void clear_modify_mem_tables (void);
704static void free_modify_mem_tables (void);
705static rtx gcse_emit_move_after (rtx, rtx, rtx);
706static void local_cprop_find_used_regs (rtx *, void *);
707static bool do_local_cprop (rtx, rtx, int, rtx*);
708static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
709static void local_cprop_pass (int);
d128effb 710static bool is_too_expensive (const char *);
7506f491 711\f
d128effb 712
7506f491
DE
713/* Entry point for global common subexpression elimination.
714 F is the first instruction in the function. */
715
e78d9500 716int
1d088dee 717gcse_main (rtx f, FILE *file)
7506f491
DE
718{
719 int changed, pass;
720 /* Bytes used at start of pass. */
721 int initial_bytes_used;
722 /* Maximum number of bytes used by a pass. */
723 int max_pass_bytes;
724 /* Point to release obstack data from for each pass. */
725 char *gcse_obstack_bottom;
726
b5ce41ff
JL
727 /* We do not construct an accurate cfg in functions which call
728 setjmp, so just punt to be safe. */
7506f491 729 if (current_function_calls_setjmp)
e78d9500 730 return 0;
589005ff 731
b5ce41ff
JL
732 /* Assume that we do not need to run jump optimizations after gcse. */
733 run_jump_opt_after_gcse = 0;
734
7506f491
DE
735 /* For calling dump_foo fns from gdb. */
736 debug_stderr = stderr;
b5ce41ff 737 gcse_file = file;
7506f491 738
b5ce41ff
JL
739 /* Identify the basic block information for this function, including
740 successors and predecessors. */
7506f491 741 max_gcse_regno = max_reg_num ();
7506f491 742
a42cd965
AM
743 if (file)
744 dump_flow_info (file);
745
d128effb
NS
746 /* Return if there's nothing to do, or it is too expensive. */
747 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
a18820c6 748 return 0;
d128effb 749
7506f491 750 gcc_obstack_init (&gcse_obstack);
a42cd965 751 bytes_used = 0;
7506f491 752
a13d4ebf
AM
753 /* We need alias. */
754 init_alias_analysis ();
c4c81601
RK
755 /* Record where pseudo-registers are set. This data is kept accurate
756 during each pass. ??? We could also record hard-reg information here
757 [since it's unchanging], however it is currently done during hash table
758 computation.
b5ce41ff 759
c4c81601
RK
760 It may be tempting to compute MEM set information here too, but MEM sets
761 will be subject to code motion one day and thus we need to compute
b5ce41ff 762 information about memory sets when we build the hash tables. */
7506f491
DE
763
764 alloc_reg_set_mem (max_gcse_regno);
765 compute_sets (f);
766
767 pass = 0;
768 initial_bytes_used = bytes_used;
769 max_pass_bytes = 0;
770 gcse_obstack_bottom = gcse_alloc (1);
771 changed = 1;
740f35a0 772 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
773 {
774 changed = 0;
775 if (file)
776 fprintf (file, "GCSE pass %d\n\n", pass + 1);
777
778 /* Initialize bytes_used to the space for the pred/succ lists,
779 and the reg_set_table data. */
780 bytes_used = initial_bytes_used;
781
782 /* Each pass may create new registers, so recalculate each time. */
783 max_gcse_regno = max_reg_num ();
784
785 alloc_gcse_mem (f);
786
b5ce41ff
JL
787 /* Don't allow constant propagation to modify jumps
788 during this pass. */
a0134312 789 changed = one_cprop_pass (pass + 1, 0, 0);
7506f491
DE
790
791 if (optimize_size)
b5ce41ff 792 changed |= one_classic_gcse_pass (pass + 1);
7506f491 793 else
589005ff 794 {
a42cd965 795 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
796 /* We may have just created new basic blocks. Release and
797 recompute various things which are sized on the number of
798 basic blocks. */
799 if (changed)
800 {
73991d6a 801 free_modify_mem_tables ();
9fe15a12
KG
802 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
803 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
a13d4ebf 804 }
a42cd965
AM
805 free_reg_set_mem ();
806 alloc_reg_set_mem (max_reg_num ());
807 compute_sets (f);
808 run_jump_opt_after_gcse = 1;
809 }
7506f491
DE
810
811 if (max_pass_bytes < bytes_used)
812 max_pass_bytes = bytes_used;
813
bb457bd9
JL
814 /* Free up memory, then reallocate for code hoisting. We can
815 not re-use the existing allocated memory because the tables
816 will not have info for the insns or registers created by
817 partial redundancy elimination. */
7506f491
DE
818 free_gcse_mem ();
819
5d3cc252 820 /* It does not make sense to run code hoisting unless we are optimizing
bb457bd9
JL
821 for code size -- it rarely makes programs faster, and can make
822 them bigger if we did partial redundancy elimination (when optimizing
823 for space, we use a classic gcse algorithm instead of partial
824 redundancy algorithms). */
825 if (optimize_size)
589005ff 826 {
bb457bd9
JL
827 max_gcse_regno = max_reg_num ();
828 alloc_gcse_mem (f);
829 changed |= one_code_hoisting_pass ();
830 free_gcse_mem ();
831
832 if (max_pass_bytes < bytes_used)
833 max_pass_bytes = bytes_used;
589005ff 834 }
bb457bd9 835
7506f491
DE
836 if (file)
837 {
838 fprintf (file, "\n");
839 fflush (file);
840 }
c4c81601 841
7506f491
DE
842 obstack_free (&gcse_obstack, gcse_obstack_bottom);
843 pass++;
844 }
845
b5ce41ff
JL
846 /* Do one last pass of copy propagation, including cprop into
847 conditional jumps. */
848
849 max_gcse_regno = max_reg_num ();
850 alloc_gcse_mem (f);
851 /* This time, go ahead and allow cprop to alter jumps. */
a0134312 852 one_cprop_pass (pass + 1, 1, 0);
b5ce41ff 853 free_gcse_mem ();
7506f491
DE
854
855 if (file)
856 {
857 fprintf (file, "GCSE of %s: %d basic blocks, ",
faed5cc3 858 current_function_name (), n_basic_blocks);
7506f491
DE
859 fprintf (file, "%d pass%s, %d bytes\n\n",
860 pass, pass > 1 ? "es" : "", max_pass_bytes);
861 }
862
6496a589 863 obstack_free (&gcse_obstack, NULL);
7506f491 864 free_reg_set_mem ();
a13d4ebf
AM
865 /* We are finished with alias. */
866 end_alias_analysis ();
867 allocate_reg_info (max_reg_num (), FALSE, FALSE);
868
47a3dae1 869 if (!optimize_size && flag_gcse_sm)
a13d4ebf 870 store_motion ();
47a3dae1 871
a13d4ebf 872 /* Record where pseudo-registers are set. */
e78d9500 873 return run_jump_opt_after_gcse;
7506f491
DE
874}
875\f
876/* Misc. utilities. */
877
773eae39
EB
878/* Nonzero for each mode that supports (set (reg) (reg)).
879 This is trivially true for integer and floating point values.
880 It may or may not be true for condition codes. */
881static char can_copy[(int) NUM_MACHINE_MODES];
882
7506f491
DE
883/* Compute which modes support reg/reg copy operations. */
884
885static void
1d088dee 886compute_can_copy (void)
7506f491
DE
887{
888 int i;
50b2596f 889#ifndef AVOID_CCMODE_COPIES
8e42ace1 890 rtx reg, insn;
50b2596f 891#endif
773eae39 892 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
893
894 start_sequence ();
895 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
896 if (GET_MODE_CLASS (i) == MODE_CC)
897 {
7506f491 898#ifdef AVOID_CCMODE_COPIES
773eae39 899 can_copy[i] = 0;
7506f491 900#else
c4c81601
RK
901 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
902 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 903 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 904 can_copy[i] = 1;
7506f491 905#endif
c4c81601 906 }
141b5810 907 else
773eae39 908 can_copy[i] = 1;
c4c81601 909
7506f491 910 end_sequence ();
7506f491 911}
773eae39
EB
912
913/* Returns whether the mode supports reg/reg copy operations. */
914
915bool
1d088dee 916can_copy_p (enum machine_mode mode)
773eae39
EB
917{
918 static bool can_copy_init_p = false;
919
920 if (! can_copy_init_p)
921 {
922 compute_can_copy ();
923 can_copy_init_p = true;
924 }
925
926 return can_copy[mode] != 0;
927}
7506f491
DE
928\f
929/* Cover function to xmalloc to record bytes allocated. */
930
703ad42b 931static void *
4ac11022 932gmalloc (size_t size)
7506f491
DE
933{
934 bytes_used += size;
935 return xmalloc (size);
936}
937
9fe15a12
KG
938/* Cover function to xcalloc to record bytes allocated. */
939
940static void *
941gcalloc (size_t nelem, size_t elsize)
942{
943 bytes_used += nelem * elsize;
944 return xcalloc (nelem, elsize);
945}
946
7506f491
DE
947/* Cover function to xrealloc.
948 We don't record the additional size since we don't know it.
949 It won't affect memory usage stats much anyway. */
950
703ad42b 951static void *
9fe15a12 952grealloc (void *ptr, size_t size)
7506f491
DE
953{
954 return xrealloc (ptr, size);
955}
956
77bbd421 957/* Cover function to obstack_alloc. */
7506f491 958
703ad42b 959static void *
1d088dee 960gcse_alloc (unsigned long size)
7506f491 961{
77bbd421 962 bytes_used += size;
703ad42b 963 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
964}
965
966/* Allocate memory for the cuid mapping array,
967 and reg/memory set tracking tables.
968
969 This is called at the start of each pass. */
970
971static void
1d088dee 972alloc_gcse_mem (rtx f)
7506f491 973{
9fe15a12 974 int i;
7506f491
DE
975 rtx insn;
976
977 /* Find the largest UID and create a mapping from UIDs to CUIDs.
978 CUIDs are like UIDs except they increase monotonically, have no gaps,
979 and only apply to real insns. */
980
981 max_uid = get_max_uid ();
9fe15a12 982 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
7506f491
DE
983 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
984 {
2c3c49de 985 if (INSN_P (insn))
b86db3eb 986 uid_cuid[INSN_UID (insn)] = i++;
7506f491 987 else
b86db3eb 988 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
989 }
990
991 /* Create a table mapping cuids to insns. */
992
993 max_cuid = i;
9fe15a12 994 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
7506f491 995 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 996 if (INSN_P (insn))
c4c81601 997 CUID_INSN (i++) = insn;
7506f491
DE
998
999 /* Allocate vars to track sets of regs. */
73991d6a 1000 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
1001
1002 /* Allocate vars to track sets of regs, memory per block. */
703ad42b 1003 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
a13d4ebf
AM
1004 /* Allocate array to keep a list of insns which modify memory in each
1005 basic block. */
9fe15a12
KG
1006 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1007 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
73991d6a
JH
1008 modify_mem_list_set = BITMAP_XMALLOC ();
1009 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1010}
1011
1012/* Free memory allocated by alloc_gcse_mem. */
1013
1014static void
1d088dee 1015free_gcse_mem (void)
7506f491
DE
1016{
1017 free (uid_cuid);
1018 free (cuid_insn);
1019
73991d6a 1020 BITMAP_XFREE (reg_set_bitmap);
7506f491 1021
5a660bff 1022 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1023 free_modify_mem_tables ();
1024 BITMAP_XFREE (modify_mem_list_set);
1025 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1026}
1027
0511851c
MM
1028/* Many of the global optimization algorithms work by solving dataflow
1029 equations for various expressions. Initially, some local value is
c4c81601
RK
1030 computed for each expression in each block. Then, the values across the
1031 various blocks are combined (by following flow graph edges) to arrive at
1032 global values. Conceptually, each set of equations is independent. We
1033 may therefore solve all the equations in parallel, solve them one at a
1034 time, or pick any intermediate approach.
1035
1036 When you're going to need N two-dimensional bitmaps, each X (say, the
1037 number of blocks) by Y (say, the number of expressions), call this
1038 function. It's not important what X and Y represent; only that Y
1039 correspond to the things that can be done in parallel. This function will
1040 return an appropriate chunking factor C; you should solve C sets of
1041 equations in parallel. By going through this function, we can easily
1042 trade space against time; by solving fewer equations in parallel we use
1043 less space. */
0511851c
MM
1044
1045static int
1d088dee 1046get_bitmap_width (int n, int x, int y)
0511851c
MM
1047{
1048 /* It's not really worth figuring out *exactly* how much memory will
1049 be used by a particular choice. The important thing is to get
1050 something approximately right. */
1051 size_t max_bitmap_memory = 10 * 1024 * 1024;
1052
1053 /* The number of bytes we'd use for a single column of minimum
1054 width. */
1055 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1056
1057 /* Often, it's reasonable just to solve all the equations in
1058 parallel. */
1059 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1060 return y;
1061
1062 /* Otherwise, pick the largest width we can, without going over the
1063 limit. */
1064 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1065 / column_size);
1066}
b5ce41ff
JL
1067\f
1068/* Compute the local properties of each recorded expression.
c4c81601
RK
1069
1070 Local properties are those that are defined by the block, irrespective of
1071 other blocks.
b5ce41ff
JL
1072
1073 An expression is transparent in a block if its operands are not modified
1074 in the block.
1075
1076 An expression is computed (locally available) in a block if it is computed
1077 at least once and expression would contain the same value if the
1078 computation was moved to the end of the block.
1079
1080 An expression is locally anticipatable in a block if it is computed at
1081 least once and expression would contain the same value if the computation
1082 was moved to the beginning of the block.
1083
c4c81601
RK
1084 We call this routine for cprop, pre and code hoisting. They all compute
1085 basically the same information and thus can easily share this code.
7506f491 1086
c4c81601
RK
1087 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1088 properties. If NULL, then it is not necessary to compute or record that
1089 particular property.
b5ce41ff 1090
02280659
ZD
1091 TABLE controls which hash table to look at. If it is set hash table,
1092 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1093 ABSALTERED. */
589005ff 1094
b5ce41ff 1095static void
1d088dee 1096compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
b5ce41ff 1097{
02280659 1098 unsigned int i;
589005ff 1099
b5ce41ff
JL
1100 /* Initialize any bitmaps that were passed in. */
1101 if (transp)
695ab36a 1102 {
02280659 1103 if (table->set_p)
d55bc081 1104 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1105 else
d55bc081 1106 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1107 }
c4c81601 1108
b5ce41ff 1109 if (comp)
d55bc081 1110 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1111 if (antloc)
d55bc081 1112 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1113
02280659 1114 for (i = 0; i < table->size; i++)
7506f491 1115 {
b5ce41ff
JL
1116 struct expr *expr;
1117
02280659 1118 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1119 {
b5ce41ff 1120 int indx = expr->bitmap_index;
c4c81601 1121 struct occr *occr;
b5ce41ff
JL
1122
1123 /* The expression is transparent in this block if it is not killed.
1124 We start by assuming all are transparent [none are killed], and
1125 then reset the bits for those that are. */
b5ce41ff 1126 if (transp)
02280659 1127 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1128
1129 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1130 we want to set to nonzero in ANTLOC. */
b5ce41ff 1131 if (antloc)
c4c81601
RK
1132 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1133 {
1134 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1135
c4c81601
RK
1136 /* While we're scanning the table, this is a good place to
1137 initialize this. */
1138 occr->deleted_p = 0;
1139 }
b5ce41ff
JL
1140
1141 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1142 we want to set to nonzero in COMP. */
b5ce41ff 1143 if (comp)
c4c81601
RK
1144 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1145 {
1146 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1147
c4c81601
RK
1148 /* While we're scanning the table, this is a good place to
1149 initialize this. */
1150 occr->copied_p = 0;
1151 }
b5ce41ff
JL
1152
1153 /* While we're scanning the table, this is a good place to
1154 initialize this. */
1155 expr->reaching_reg = 0;
1156 }
7506f491 1157 }
7506f491
DE
1158}
1159\f
1160/* Register set information.
1161
1162 `reg_set_table' records where each register is set or otherwise
1163 modified. */
1164
1165static struct obstack reg_set_obstack;
1166
1167static void
1d088dee 1168alloc_reg_set_mem (int n_regs)
7506f491 1169{
7506f491 1170 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
9fe15a12 1171 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
7506f491
DE
1172
1173 gcc_obstack_init (&reg_set_obstack);
1174}
1175
1176static void
1d088dee 1177free_reg_set_mem (void)
7506f491
DE
1178{
1179 free (reg_set_table);
6496a589 1180 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1181}
1182
b885908b
MH
1183/* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1184 Update the corresponding `reg_set_table' entry accordingly.
1185 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1186
1187static void
1188replace_one_set (int regno, rtx old_insn, rtx new_insn)
1189{
1190 struct reg_set *reg_info;
1191 if (regno >= reg_set_table_size)
1192 return;
1193 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1194 if (reg_info->insn == old_insn)
1195 {
1196 reg_info->insn = new_insn;
1197 break;
1198 }
1199}
1200
7506f491
DE
1201/* Record REGNO in the reg_set table. */
1202
1203static void
1d088dee 1204record_one_set (int regno, rtx insn)
7506f491 1205{
172890a2 1206 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1207 struct reg_set *new_reg_info;
7506f491
DE
1208
1209 /* If the table isn't big enough, enlarge it. */
1210 if (regno >= reg_set_table_size)
1211 {
1212 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601 1213
703ad42b
KG
1214 reg_set_table = grealloc (reg_set_table,
1215 new_size * sizeof (struct reg_set *));
1216 memset (reg_set_table + reg_set_table_size, 0,
8e42ace1 1217 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1218 reg_set_table_size = new_size;
1219 }
1220
703ad42b 1221 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
7506f491
DE
1222 bytes_used += sizeof (struct reg_set);
1223 new_reg_info->insn = insn;
274969ea
MM
1224 new_reg_info->next = reg_set_table[regno];
1225 reg_set_table[regno] = new_reg_info;
7506f491
DE
1226}
1227
c4c81601
RK
1228/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1229 an insn. The DATA is really the instruction in which the SET is
1230 occurring. */
7506f491
DE
1231
1232static void
1d088dee 1233record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1234{
84832317
MM
1235 rtx record_set_insn = (rtx) data;
1236
c4c81601
RK
1237 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1238 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1239}
1240
1241/* Scan the function and record each set of each pseudo-register.
1242
c4c81601 1243 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1244 `reg_set_table' for further documentation. */
7506f491
DE
1245
1246static void
1d088dee 1247compute_sets (rtx f)
7506f491 1248{
c4c81601 1249 rtx insn;
7506f491 1250
c4c81601 1251 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1252 if (INSN_P (insn))
c4c81601 1253 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1254}
1255\f
1256/* Hash table support. */
1257
80c29cc4
RZ
1258struct reg_avail_info
1259{
e0082a72 1260 basic_block last_bb;
80c29cc4
RZ
1261 int first_set;
1262 int last_set;
1263};
1264
1265static struct reg_avail_info *reg_avail_info;
e0082a72 1266static basic_block current_bb;
7506f491 1267
7506f491 1268
fb0c0a12
RK
1269/* See whether X, the source of a set, is something we want to consider for
1270 GCSE. */
7506f491 1271
e2500fed 1272static GTY(()) rtx test_insn;
7506f491 1273static int
1d088dee 1274want_to_gcse_p (rtx x)
7506f491 1275{
fb0c0a12
RK
1276 int num_clobbers = 0;
1277 int icode;
1278
c4c81601 1279 switch (GET_CODE (x))
7506f491
DE
1280 {
1281 case REG:
1282 case SUBREG:
1283 case CONST_INT:
1284 case CONST_DOUBLE:
69ef87e2 1285 case CONST_VECTOR:
7506f491 1286 case CALL:
34ee7f82 1287 case CONSTANT_P_RTX:
7506f491
DE
1288 return 0;
1289
1290 default:
1291 break;
1292 }
1293
fb0c0a12
RK
1294 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1295 if (general_operand (x, GET_MODE (x)))
1296 return 1;
1297 else if (GET_MODE (x) == VOIDmode)
1298 return 0;
1299
1300 /* Otherwise, check if we can make a valid insn from it. First initialize
1301 our test insn if we haven't already. */
1302 if (test_insn == 0)
1303 {
1304 test_insn
1305 = make_insn_raw (gen_rtx_SET (VOIDmode,
1306 gen_rtx_REG (word_mode,
1307 FIRST_PSEUDO_REGISTER * 2),
1308 const0_rtx));
1309 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1310 }
1311
1312 /* Now make an insn like the one we would make when GCSE'ing and see if
1313 valid. */
1314 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1315 SET_SRC (PATTERN (test_insn)) = x;
1316 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1317 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1318}
1319
cc2902df 1320/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1321 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1322 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1323
1324static int
1d088dee 1325oprs_unchanged_p (rtx x, rtx insn, int avail_p)
7506f491 1326{
c4c81601 1327 int i, j;
7506f491 1328 enum rtx_code code;
6f7d635c 1329 const char *fmt;
7506f491 1330
7506f491
DE
1331 if (x == 0)
1332 return 1;
1333
1334 code = GET_CODE (x);
1335 switch (code)
1336 {
1337 case REG:
80c29cc4
RZ
1338 {
1339 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1340
1341 if (info->last_bb != current_bb)
1342 return 1;
589005ff 1343 if (avail_p)
80c29cc4
RZ
1344 return info->last_set < INSN_CUID (insn);
1345 else
1346 return info->first_set >= INSN_CUID (insn);
1347 }
7506f491
DE
1348
1349 case MEM:
e0082a72 1350 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1351 x, avail_p))
1352 return 0;
7506f491 1353 else
c4c81601 1354 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1355
1356 case PRE_DEC:
1357 case PRE_INC:
1358 case POST_DEC:
1359 case POST_INC:
4b983fdc
RH
1360 case PRE_MODIFY:
1361 case POST_MODIFY:
7506f491
DE
1362 return 0;
1363
1364 case PC:
1365 case CC0: /*FIXME*/
1366 case CONST:
1367 case CONST_INT:
1368 case CONST_DOUBLE:
69ef87e2 1369 case CONST_VECTOR:
7506f491
DE
1370 case SYMBOL_REF:
1371 case LABEL_REF:
1372 case ADDR_VEC:
1373 case ADDR_DIFF_VEC:
1374 return 1;
1375
1376 default:
1377 break;
1378 }
1379
c4c81601 1380 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1381 {
1382 if (fmt[i] == 'e')
1383 {
c4c81601
RK
1384 /* If we are about to do the last recursive call needed at this
1385 level, change it into iteration. This function is called enough
1386 to be worth it. */
7506f491 1387 if (i == 0)
c4c81601
RK
1388 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1389
1390 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1391 return 0;
1392 }
1393 else if (fmt[i] == 'E')
c4c81601
RK
1394 for (j = 0; j < XVECLEN (x, i); j++)
1395 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1396 return 0;
7506f491
DE
1397 }
1398
1399 return 1;
1400}
1401
a13d4ebf
AM
1402/* Used for communication between mems_conflict_for_gcse_p and
1403 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1404 conflict between two memory references. */
1405static int gcse_mems_conflict_p;
1406
1407/* Used for communication between mems_conflict_for_gcse_p and
1408 load_killed_in_block_p. A memory reference for a load instruction,
1409 mems_conflict_for_gcse_p will see if a memory store conflicts with
1410 this memory load. */
1411static rtx gcse_mem_operand;
1412
1413/* DEST is the output of an instruction. If it is a memory reference, and
1414 possibly conflicts with the load found in gcse_mem_operand, then set
1415 gcse_mems_conflict_p to a nonzero value. */
1416
1417static void
1d088dee
AJ
1418mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1419 void *data ATTRIBUTE_UNUSED)
a13d4ebf
AM
1420{
1421 while (GET_CODE (dest) == SUBREG
1422 || GET_CODE (dest) == ZERO_EXTRACT
1423 || GET_CODE (dest) == SIGN_EXTRACT
1424 || GET_CODE (dest) == STRICT_LOW_PART)
1425 dest = XEXP (dest, 0);
1426
1427 /* If DEST is not a MEM, then it will not conflict with the load. Note
1428 that function calls are assumed to clobber memory, but are handled
1429 elsewhere. */
1430 if (GET_CODE (dest) != MEM)
1431 return;
aaa4ca30 1432
a13d4ebf 1433 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1434 don't mark as killed this time. */
1435
47a3dae1 1436 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1437 {
1438 if (!find_rtx_in_ldst (dest))
1439 gcse_mems_conflict_p = 1;
1440 return;
1441 }
aaa4ca30 1442
a13d4ebf
AM
1443 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1444 rtx_addr_varies_p))
1445 gcse_mems_conflict_p = 1;
1446}
1447
1448/* Return nonzero if the expression in X (a memory reference) is killed
1449 in block BB before or after the insn with the CUID in UID_LIMIT.
1450 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1451 before UID_LIMIT.
1452
1453 To check the entire block, set UID_LIMIT to max_uid + 1 and
1454 AVAIL_P to 0. */
1455
1456static int
1d088dee 1457load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
a13d4ebf 1458{
0b17ab2f 1459 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1460 while (list_entry)
1461 {
1462 rtx setter;
1463 /* Ignore entries in the list that do not apply. */
1464 if ((avail_p
1465 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1466 || (! avail_p
1467 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1468 {
1469 list_entry = XEXP (list_entry, 1);
1470 continue;
1471 }
1472
1473 setter = XEXP (list_entry, 0);
1474
1475 /* If SETTER is a call everything is clobbered. Note that calls
1476 to pure functions are never put on the list, so we need not
1477 worry about them. */
1478 if (GET_CODE (setter) == CALL_INSN)
1479 return 1;
1480
1481 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1482 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1483
1484 The note_stores interface is pretty limited, so we have to
1485 communicate via global variables. Yuk. */
1486 gcse_mem_operand = x;
1487 gcse_mems_conflict_p = 0;
1488 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1489 if (gcse_mems_conflict_p)
1490 return 1;
1491 list_entry = XEXP (list_entry, 1);
1492 }
1493 return 0;
1494}
1495
cc2902df 1496/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1497 the start of INSN's basic block up to but not including INSN. */
1498
1499static int
1d088dee 1500oprs_anticipatable_p (rtx x, rtx insn)
7506f491
DE
1501{
1502 return oprs_unchanged_p (x, insn, 0);
1503}
1504
cc2902df 1505/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1506 INSN to the end of INSN's basic block. */
1507
1508static int
1d088dee 1509oprs_available_p (rtx x, rtx insn)
7506f491
DE
1510{
1511 return oprs_unchanged_p (x, insn, 1);
1512}
1513
1514/* Hash expression X.
c4c81601
RK
1515
1516 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1517 indicating if a volatile operand is found or if the expression contains
b58b21d5
RS
1518 something we don't want to insert in the table. HASH_TABLE_SIZE is
1519 the current size of the hash table to be probed.
7506f491
DE
1520
1521 ??? One might want to merge this with canon_hash. Later. */
1522
1523static unsigned int
b58b21d5
RS
1524hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1525 int hash_table_size)
7506f491
DE
1526{
1527 unsigned int hash;
1528
1529 *do_not_record_p = 0;
1530
1531 hash = hash_expr_1 (x, mode, do_not_record_p);
1532 return hash % hash_table_size;
1533}
172890a2 1534
6462bb43 1535/* Hash a string. Just add its bytes up. */
172890a2 1536
6462bb43 1537static inline unsigned
1d088dee 1538hash_string_1 (const char *ps)
6462bb43
AO
1539{
1540 unsigned hash = 0;
8e42ace1 1541 const unsigned char *p = (const unsigned char *) ps;
589005ff 1542
6462bb43
AO
1543 if (p)
1544 while (*p)
1545 hash += *p++;
1546
1547 return hash;
1548}
7506f491
DE
1549
1550/* Subroutine of hash_expr to do the actual work. */
1551
1552static unsigned int
1d088dee 1553hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
7506f491
DE
1554{
1555 int i, j;
1556 unsigned hash = 0;
1557 enum rtx_code code;
6f7d635c 1558 const char *fmt;
7506f491 1559
c4c81601 1560 /* Used to turn recursion into iteration. We can't rely on GCC's
fbe5a4a6 1561 tail-recursion elimination since we need to keep accumulating values
c4c81601 1562 in HASH. */
7506f491
DE
1563
1564 if (x == 0)
1565 return hash;
1566
c4c81601 1567 repeat:
7506f491
DE
1568 code = GET_CODE (x);
1569 switch (code)
1570 {
1571 case REG:
c4c81601
RK
1572 hash += ((unsigned int) REG << 7) + REGNO (x);
1573 return hash;
7506f491
DE
1574
1575 case CONST_INT:
c4c81601
RK
1576 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1577 + (unsigned int) INTVAL (x));
1578 return hash;
7506f491
DE
1579
1580 case CONST_DOUBLE:
1581 /* This is like the general case, except that it only counts
1582 the integers representing the constant. */
c4c81601 1583 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1584 if (GET_MODE (x) != VOIDmode)
1585 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1586 hash += (unsigned int) XWINT (x, i);
7506f491 1587 else
c4c81601
RK
1588 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1589 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1590 return hash;
1591
69ef87e2
AH
1592 case CONST_VECTOR:
1593 {
1594 int units;
1595 rtx elt;
1596
1597 units = CONST_VECTOR_NUNITS (x);
1598
1599 for (i = 0; i < units; ++i)
1600 {
1601 elt = CONST_VECTOR_ELT (x, i);
1602 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1603 }
1604
1605 return hash;
1606 }
1607
7506f491
DE
1608 /* Assume there is only one rtx object for any given label. */
1609 case LABEL_REF:
1610 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1611 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1612 hash += (((unsigned int) LABEL_REF << 7)
1613 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1614 return hash;
1615
1616 case SYMBOL_REF:
1617 {
1618 /* Don't hash on the symbol's address to avoid bootstrap differences.
1619 Different hash values may cause expressions to be recorded in
1620 different orders and thus different registers to be used in the
1621 final assembler. This also avoids differences in the dump files
1622 between various stages. */
1623 unsigned int h = 0;
3cce094d 1624 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1625
7506f491
DE
1626 while (*p)
1627 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1628
1629 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1630 return hash;
1631 }
1632
1633 case MEM:
1634 if (MEM_VOLATILE_P (x))
1635 {
1636 *do_not_record_p = 1;
1637 return 0;
1638 }
c4c81601
RK
1639
1640 hash += (unsigned int) MEM;
d51f3632
JH
1641 /* We used alias set for hashing, but this is not good, since the alias
1642 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1643 causing the profiles to fail to match. */
7506f491
DE
1644 x = XEXP (x, 0);
1645 goto repeat;
1646
1647 case PRE_DEC:
1648 case PRE_INC:
1649 case POST_DEC:
1650 case POST_INC:
1651 case PC:
1652 case CC0:
1653 case CALL:
1654 case UNSPEC_VOLATILE:
1655 *do_not_record_p = 1;
1656 return 0;
1657
1658 case ASM_OPERANDS:
1659 if (MEM_VOLATILE_P (x))
1660 {
1661 *do_not_record_p = 1;
1662 return 0;
1663 }
6462bb43
AO
1664 else
1665 {
1666 /* We don't want to take the filename and line into account. */
1667 hash += (unsigned) code + (unsigned) GET_MODE (x)
1668 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1669 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1670 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1671
1672 if (ASM_OPERANDS_INPUT_LENGTH (x))
1673 {
1674 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1675 {
1676 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1677 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1678 do_not_record_p)
1679 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1680 (x, i)));
1681 }
1682
1683 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1684 x = ASM_OPERANDS_INPUT (x, 0);
1685 mode = GET_MODE (x);
1686 goto repeat;
1687 }
1688 return hash;
1689 }
7506f491
DE
1690
1691 default:
1692 break;
1693 }
1694
7506f491 1695 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1696 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1697 {
1698 if (fmt[i] == 'e')
1699 {
7506f491
DE
1700 /* If we are about to do the last recursive call
1701 needed at this level, change it into iteration.
1702 This function is called enough to be worth it. */
1703 if (i == 0)
1704 {
c4c81601 1705 x = XEXP (x, i);
7506f491
DE
1706 goto repeat;
1707 }
c4c81601
RK
1708
1709 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1710 if (*do_not_record_p)
1711 return 0;
1712 }
c4c81601 1713
7506f491
DE
1714 else if (fmt[i] == 'E')
1715 for (j = 0; j < XVECLEN (x, i); j++)
1716 {
1717 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1718 if (*do_not_record_p)
1719 return 0;
1720 }
c4c81601 1721
7506f491 1722 else if (fmt[i] == 's')
6462bb43 1723 hash += hash_string_1 (XSTR (x, i));
7506f491 1724 else if (fmt[i] == 'i')
c4c81601 1725 hash += (unsigned int) XINT (x, i);
7506f491
DE
1726 else
1727 abort ();
1728 }
1729
1730 return hash;
1731}
1732
1733/* Hash a set of register REGNO.
1734
c4c81601
RK
1735 Sets are hashed on the register that is set. This simplifies the PRE copy
1736 propagation code.
7506f491
DE
1737
1738 ??? May need to make things more elaborate. Later, as necessary. */
1739
1740static unsigned int
1d088dee 1741hash_set (int regno, int hash_table_size)
7506f491
DE
1742{
1743 unsigned int hash;
1744
1745 hash = regno;
1746 return hash % hash_table_size;
1747}
1748
cc2902df 1749/* Return nonzero if exp1 is equivalent to exp2.
7506f491
DE
1750 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1751
1752static int
1d088dee 1753expr_equiv_p (rtx x, rtx y)
7506f491 1754{
b3694847
SS
1755 int i, j;
1756 enum rtx_code code;
1757 const char *fmt;
7506f491
DE
1758
1759 if (x == y)
1760 return 1;
c4c81601 1761
7506f491 1762 if (x == 0 || y == 0)
ebd7a7af 1763 return 0;
7506f491
DE
1764
1765 code = GET_CODE (x);
1766 if (code != GET_CODE (y))
1767 return 0;
1768
1769 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1770 if (GET_MODE (x) != GET_MODE (y))
1771 return 0;
1772
1773 switch (code)
1774 {
1775 case PC:
1776 case CC0:
7506f491 1777 case CONST_INT:
ebd7a7af 1778 return 0;
7506f491
DE
1779
1780 case LABEL_REF:
1781 return XEXP (x, 0) == XEXP (y, 0);
1782
1783 case SYMBOL_REF:
1784 return XSTR (x, 0) == XSTR (y, 0);
1785
1786 case REG:
1787 return REGNO (x) == REGNO (y);
1788
297c3335
RH
1789 case MEM:
1790 /* Can't merge two expressions in different alias sets, since we can
1791 decide that the expression is transparent in a block when it isn't,
1792 due to it being set with the different alias set. */
1793 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1794 return 0;
bad998e0
ZD
1795
1796 /* A volatile mem should not be considered equivalent to any other. */
1797 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1798 return 0;
297c3335
RH
1799 break;
1800
7506f491
DE
1801 /* For commutative operations, check both orders. */
1802 case PLUS:
1803 case MULT:
1804 case AND:
1805 case IOR:
1806 case XOR:
1807 case NE:
1808 case EQ:
1809 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1810 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1811 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1812 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1813
6462bb43
AO
1814 case ASM_OPERANDS:
1815 /* We don't use the generic code below because we want to
1816 disregard filename and line numbers. */
1817
1818 /* A volatile asm isn't equivalent to any other. */
1819 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1820 return 0;
1821
1822 if (GET_MODE (x) != GET_MODE (y)
1823 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1824 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1825 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1826 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1827 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1828 return 0;
1829
1830 if (ASM_OPERANDS_INPUT_LENGTH (x))
1831 {
1832 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1833 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1834 ASM_OPERANDS_INPUT (y, i))
1835 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1836 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1837 return 0;
1838 }
1839
1840 return 1;
1841
7506f491
DE
1842 default:
1843 break;
1844 }
1845
1846 /* Compare the elements. If any pair of corresponding elements
1847 fail to match, return 0 for the whole thing. */
1848
1849 fmt = GET_RTX_FORMAT (code);
1850 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1851 {
1852 switch (fmt[i])
1853 {
1854 case 'e':
1855 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1856 return 0;
1857 break;
1858
1859 case 'E':
1860 if (XVECLEN (x, i) != XVECLEN (y, i))
1861 return 0;
1862 for (j = 0; j < XVECLEN (x, i); j++)
1863 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1864 return 0;
1865 break;
1866
1867 case 's':
1868 if (strcmp (XSTR (x, i), XSTR (y, i)))
1869 return 0;
1870 break;
1871
1872 case 'i':
1873 if (XINT (x, i) != XINT (y, i))
1874 return 0;
1875 break;
1876
1877 case 'w':
1878 if (XWINT (x, i) != XWINT (y, i))
1879 return 0;
1880 break;
1881
1882 case '0':
1883 break;
aaa4ca30 1884
7506f491
DE
1885 default:
1886 abort ();
1887 }
8e42ace1 1888 }
7506f491
DE
1889
1890 return 1;
1891}
1892
02280659 1893/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1894 If it is already present, record it as the last occurrence in INSN's
1895 basic block.
1896
1897 MODE is the mode of the value X is being stored into.
1898 It is only used if X is a CONST_INT.
1899
cc2902df
KH
1900 ANTIC_P is nonzero if X is an anticipatable expression.
1901 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1902
1903static void
1d088dee
AJ
1904insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1905 int avail_p, struct hash_table *table)
7506f491
DE
1906{
1907 int found, do_not_record_p;
1908 unsigned int hash;
1909 struct expr *cur_expr, *last_expr = NULL;
1910 struct occr *antic_occr, *avail_occr;
1911 struct occr *last_occr = NULL;
1912
02280659 1913 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1914
1915 /* Do not insert expression in table if it contains volatile operands,
1916 or if hash_expr determines the expression is something we don't want
1917 to or can't handle. */
1918 if (do_not_record_p)
1919 return;
1920
02280659 1921 cur_expr = table->table[hash];
7506f491
DE
1922 found = 0;
1923
c4c81601 1924 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1925 {
1926 /* If the expression isn't found, save a pointer to the end of
1927 the list. */
1928 last_expr = cur_expr;
1929 cur_expr = cur_expr->next_same_hash;
1930 }
1931
1932 if (! found)
1933 {
703ad42b 1934 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1935 bytes_used += sizeof (struct expr);
02280659 1936 if (table->table[hash] == NULL)
c4c81601 1937 /* This is the first pattern that hashed to this index. */
02280659 1938 table->table[hash] = cur_expr;
7506f491 1939 else
c4c81601
RK
1940 /* Add EXPR to end of this hash chain. */
1941 last_expr->next_same_hash = cur_expr;
1942
589005ff 1943 /* Set the fields of the expr element. */
7506f491 1944 cur_expr->expr = x;
02280659 1945 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1946 cur_expr->next_same_hash = NULL;
1947 cur_expr->antic_occr = NULL;
1948 cur_expr->avail_occr = NULL;
1949 }
1950
1951 /* Now record the occurrence(s). */
7506f491
DE
1952 if (antic_p)
1953 {
1954 antic_occr = cur_expr->antic_occr;
1955
1956 /* Search for another occurrence in the same basic block. */
1957 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1958 {
1959 /* If an occurrence isn't found, save a pointer to the end of
1960 the list. */
1961 last_occr = antic_occr;
1962 antic_occr = antic_occr->next;
1963 }
1964
1965 if (antic_occr)
c4c81601
RK
1966 /* Found another instance of the expression in the same basic block.
1967 Prefer the currently recorded one. We want the first one in the
1968 block and the block is scanned from start to end. */
1969 ; /* nothing to do */
7506f491
DE
1970 else
1971 {
1972 /* First occurrence of this expression in this basic block. */
703ad42b 1973 antic_occr = gcse_alloc (sizeof (struct occr));
7506f491
DE
1974 bytes_used += sizeof (struct occr);
1975 /* First occurrence of this expression in any block? */
1976 if (cur_expr->antic_occr == NULL)
1977 cur_expr->antic_occr = antic_occr;
1978 else
1979 last_occr->next = antic_occr;
c4c81601 1980
7506f491
DE
1981 antic_occr->insn = insn;
1982 antic_occr->next = NULL;
1983 }
1984 }
1985
1986 if (avail_p)
1987 {
1988 avail_occr = cur_expr->avail_occr;
1989
1990 /* Search for another occurrence in the same basic block. */
1991 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1992 {
1993 /* If an occurrence isn't found, save a pointer to the end of
1994 the list. */
1995 last_occr = avail_occr;
1996 avail_occr = avail_occr->next;
1997 }
1998
1999 if (avail_occr)
c4c81601
RK
2000 /* Found another instance of the expression in the same basic block.
2001 Prefer this occurrence to the currently recorded one. We want
2002 the last one in the block and the block is scanned from start
2003 to end. */
2004 avail_occr->insn = insn;
7506f491
DE
2005 else
2006 {
2007 /* First occurrence of this expression in this basic block. */
703ad42b 2008 avail_occr = gcse_alloc (sizeof (struct occr));
7506f491 2009 bytes_used += sizeof (struct occr);
c4c81601 2010
7506f491
DE
2011 /* First occurrence of this expression in any block? */
2012 if (cur_expr->avail_occr == NULL)
2013 cur_expr->avail_occr = avail_occr;
2014 else
2015 last_occr->next = avail_occr;
c4c81601 2016
7506f491
DE
2017 avail_occr->insn = insn;
2018 avail_occr->next = NULL;
2019 }
2020 }
2021}
2022
2023/* Insert pattern X in INSN in the hash table.
2024 X is a SET of a reg to either another reg or a constant.
2025 If it is already present, record it as the last occurrence in INSN's
2026 basic block. */
2027
2028static void
1d088dee 2029insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
7506f491
DE
2030{
2031 int found;
2032 unsigned int hash;
2033 struct expr *cur_expr, *last_expr = NULL;
2034 struct occr *cur_occr, *last_occr = NULL;
2035
2036 if (GET_CODE (x) != SET
2037 || GET_CODE (SET_DEST (x)) != REG)
2038 abort ();
2039
02280659 2040 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 2041
02280659 2042 cur_expr = table->table[hash];
7506f491
DE
2043 found = 0;
2044
c4c81601 2045 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2046 {
2047 /* If the expression isn't found, save a pointer to the end of
2048 the list. */
2049 last_expr = cur_expr;
2050 cur_expr = cur_expr->next_same_hash;
2051 }
2052
2053 if (! found)
2054 {
703ad42b 2055 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 2056 bytes_used += sizeof (struct expr);
02280659 2057 if (table->table[hash] == NULL)
c4c81601 2058 /* This is the first pattern that hashed to this index. */
02280659 2059 table->table[hash] = cur_expr;
7506f491 2060 else
c4c81601
RK
2061 /* Add EXPR to end of this hash chain. */
2062 last_expr->next_same_hash = cur_expr;
2063
7506f491
DE
2064 /* Set the fields of the expr element.
2065 We must copy X because it can be modified when copy propagation is
2066 performed on its operands. */
7506f491 2067 cur_expr->expr = copy_rtx (x);
02280659 2068 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2069 cur_expr->next_same_hash = NULL;
2070 cur_expr->antic_occr = NULL;
2071 cur_expr->avail_occr = NULL;
2072 }
2073
2074 /* Now record the occurrence. */
7506f491
DE
2075 cur_occr = cur_expr->avail_occr;
2076
2077 /* Search for another occurrence in the same basic block. */
2078 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2079 {
2080 /* If an occurrence isn't found, save a pointer to the end of
2081 the list. */
2082 last_occr = cur_occr;
2083 cur_occr = cur_occr->next;
2084 }
2085
2086 if (cur_occr)
c4c81601
RK
2087 /* Found another instance of the expression in the same basic block.
2088 Prefer this occurrence to the currently recorded one. We want the
2089 last one in the block and the block is scanned from start to end. */
2090 cur_occr->insn = insn;
7506f491
DE
2091 else
2092 {
2093 /* First occurrence of this expression in this basic block. */
703ad42b 2094 cur_occr = gcse_alloc (sizeof (struct occr));
7506f491 2095 bytes_used += sizeof (struct occr);
c4c81601 2096
7506f491
DE
2097 /* First occurrence of this expression in any block? */
2098 if (cur_expr->avail_occr == NULL)
2099 cur_expr->avail_occr = cur_occr;
2100 else
2101 last_occr->next = cur_occr;
c4c81601 2102
7506f491
DE
2103 cur_occr->insn = insn;
2104 cur_occr->next = NULL;
2105 }
2106}
2107
6b2d1c9e
RS
2108/* Determine whether the rtx X should be treated as a constant for
2109 the purposes of GCSE's constant propagation. */
2110
2111static bool
1d088dee 2112gcse_constant_p (rtx x)
6b2d1c9e
RS
2113{
2114 /* Consider a COMPARE of two integers constant. */
2115 if (GET_CODE (x) == COMPARE
2116 && GET_CODE (XEXP (x, 0)) == CONST_INT
2117 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2118 return true;
2119
db2f435b
AP
2120
2121 /* Consider a COMPARE of the same registers is a constant
938d968e 2122 if they are not floating point registers. */
db2f435b
AP
2123 if (GET_CODE(x) == COMPARE
2124 && GET_CODE (XEXP (x, 0)) == REG
2125 && GET_CODE (XEXP (x, 1)) == REG
2126 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2127 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2128 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2129 return true;
2130
6b2d1c9e
RS
2131 if (GET_CODE (x) == CONSTANT_P_RTX)
2132 return false;
2133
2134 return CONSTANT_P (x);
2135}
2136
02280659
ZD
2137/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2138 expression one). */
7506f491
DE
2139
2140static void
1d088dee 2141hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
7506f491
DE
2142{
2143 rtx src = SET_SRC (pat);
2144 rtx dest = SET_DEST (pat);
172890a2 2145 rtx note;
7506f491
DE
2146
2147 if (GET_CODE (src) == CALL)
02280659 2148 hash_scan_call (src, insn, table);
7506f491 2149
172890a2 2150 else if (GET_CODE (dest) == REG)
7506f491 2151 {
172890a2 2152 unsigned int regno = REGNO (dest);
7506f491
DE
2153 rtx tmp;
2154
172890a2
RK
2155 /* If this is a single set and we are doing constant propagation,
2156 see if a REG_NOTE shows this equivalent to a constant. */
02280659 2157 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
6b2d1c9e 2158 && gcse_constant_p (XEXP (note, 0)))
172890a2
RK
2159 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2160
7506f491 2161 /* Only record sets of pseudo-regs in the hash table. */
02280659 2162 if (! table->set_p
7506f491
DE
2163 && regno >= FIRST_PSEUDO_REGISTER
2164 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 2165 && can_copy_p (GET_MODE (dest))
068473ec
JH
2166 /* GCSE commonly inserts instruction after the insn. We can't
2167 do that easily for EH_REGION notes so disable GCSE on these
2168 for now. */
2169 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2170 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2171 && want_to_gcse_p (src)
2172 /* Don't CSE a nop. */
43e72072
JJ
2173 && ! set_noop_p (pat)
2174 /* Don't GCSE if it has attached REG_EQUIV note.
2175 At this point this only function parameters should have
2176 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2177 explicitly, it means address of parameter has been taken,
43e72072
JJ
2178 so we should not extend the lifetime of the pseudo. */
2179 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2180 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2181 {
2182 /* An expression is not anticipatable if its operands are
52d76e11
RK
2183 modified before this insn or if this is not the only SET in
2184 this insn. */
2185 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2186 /* An expression is not available if its operands are
eb296bd9
GK
2187 subsequently modified, including this insn. It's also not
2188 available if this is a branch, because we can't insert
2189 a set after the branch. */
2190 int avail_p = (oprs_available_p (src, insn)
2191 && ! JUMP_P (insn));
c4c81601 2192
02280659 2193 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 2194 }
c4c81601 2195
7506f491 2196 /* Record sets for constant/copy propagation. */
02280659 2197 else if (table->set_p
7506f491
DE
2198 && regno >= FIRST_PSEUDO_REGISTER
2199 && ((GET_CODE (src) == REG
2200 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 2201 && can_copy_p (GET_MODE (dest))
172890a2 2202 && REGNO (src) != regno)
6b2d1c9e 2203 || gcse_constant_p (src))
7506f491
DE
2204 /* A copy is not available if its src or dest is subsequently
2205 modified. Here we want to search from INSN+1 on, but
2206 oprs_available_p searches from INSN on. */
a813c111 2207 && (insn == BB_END (BLOCK_FOR_INSN (insn))
7506f491
DE
2208 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2209 && oprs_available_p (pat, tmp))))
02280659 2210 insert_set_in_table (pat, insn, table);
7506f491 2211 }
d91edf86 2212 /* In case of store we want to consider the memory value as available in
f5f2e3cd
MH
2213 the REG stored in that memory. This makes it possible to remove
2214 redundant loads from due to stores to the same location. */
2215 else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)
2216 {
2217 unsigned int regno = REGNO (src);
2218
2219 /* Do not do this for constant/copy propagation. */
2220 if (! table->set_p
2221 /* Only record sets of pseudo-regs in the hash table. */
2222 && regno >= FIRST_PSEUDO_REGISTER
2223 /* Don't GCSE something if we can't do a reg/reg copy. */
2224 && can_copy_p (GET_MODE (src))
2225 /* GCSE commonly inserts instruction after the insn. We can't
2226 do that easily for EH_REGION notes so disable GCSE on these
2227 for now. */
2228 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2229 /* Is SET_DEST something we want to gcse? */
2230 && want_to_gcse_p (dest)
2231 /* Don't CSE a nop. */
2232 && ! set_noop_p (pat)
2233 /* Don't GCSE if it has attached REG_EQUIV note.
2234 At this point this only function parameters should have
2235 REG_EQUIV notes and if the argument slot is used somewhere
2236 explicitly, it means address of parameter has been taken,
2237 so we should not extend the lifetime of the pseudo. */
2238 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2239 || GET_CODE (XEXP (note, 0)) != MEM))
2240 {
2241 /* Stores are never anticipatable. */
2242 int antic_p = 0;
2243 /* An expression is not available if its operands are
2244 subsequently modified, including this insn. It's also not
2245 available if this is a branch, because we can't insert
2246 a set after the branch. */
2247 int avail_p = oprs_available_p (dest, insn)
2248 && ! JUMP_P (insn);
2249
2250 /* Record the memory expression (DEST) in the hash table. */
2251 insert_expr_in_table (dest, GET_MODE (dest), insn,
2252 antic_p, avail_p, table);
2253 }
2254 }
7506f491
DE
2255}
2256
2257static void
1d088dee
AJ
2258hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2259 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2260{
2261 /* Currently nothing to do. */
2262}
2263
2264static void
1d088dee
AJ
2265hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2266 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2267{
2268 /* Currently nothing to do. */
2269}
2270
2271/* Process INSN and add hash table entries as appropriate.
2272
2273 Only available expressions that set a single pseudo-reg are recorded.
2274
2275 Single sets in a PARALLEL could be handled, but it's an extra complication
2276 that isn't dealt with right now. The trick is handling the CLOBBERs that
2277 are also in the PARALLEL. Later.
2278
cc2902df 2279 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
2280 otherwise it is for the expression hash table.
2281 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2282 not record any expressions. */
7506f491
DE
2283
2284static void
1d088dee 2285hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
7506f491
DE
2286{
2287 rtx pat = PATTERN (insn);
c4c81601 2288 int i;
7506f491 2289
172890a2
RK
2290 if (in_libcall_block)
2291 return;
2292
7506f491
DE
2293 /* Pick out the sets of INSN and for other forms of instructions record
2294 what's been modified. */
2295
172890a2 2296 if (GET_CODE (pat) == SET)
02280659 2297 hash_scan_set (pat, insn, table);
7506f491 2298 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2299 for (i = 0; i < XVECLEN (pat, 0); i++)
2300 {
2301 rtx x = XVECEXP (pat, 0, i);
7506f491 2302
c4c81601 2303 if (GET_CODE (x) == SET)
02280659 2304 hash_scan_set (x, insn, table);
c4c81601 2305 else if (GET_CODE (x) == CLOBBER)
02280659 2306 hash_scan_clobber (x, insn, table);
c4c81601 2307 else if (GET_CODE (x) == CALL)
02280659 2308 hash_scan_call (x, insn, table);
c4c81601 2309 }
7506f491 2310
7506f491 2311 else if (GET_CODE (pat) == CLOBBER)
02280659 2312 hash_scan_clobber (pat, insn, table);
7506f491 2313 else if (GET_CODE (pat) == CALL)
02280659 2314 hash_scan_call (pat, insn, table);
7506f491
DE
2315}
2316
2317static void
1d088dee 2318dump_hash_table (FILE *file, const char *name, struct hash_table *table)
7506f491
DE
2319{
2320 int i;
2321 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2322 struct expr **flat_table;
2323 unsigned int *hash_val;
c4c81601 2324 struct expr *expr;
4da896b2 2325
703ad42b
KG
2326 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2327 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 2328
02280659
ZD
2329 for (i = 0; i < (int) table->size; i++)
2330 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
2331 {
2332 flat_table[expr->bitmap_index] = expr;
2333 hash_val[expr->bitmap_index] = i;
2334 }
7506f491
DE
2335
2336 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 2337 name, table->size, table->n_elems);
7506f491 2338
02280659 2339 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
2340 if (flat_table[i] != 0)
2341 {
a0ac9e5a 2342 expr = flat_table[i];
21318741
RK
2343 fprintf (file, "Index %d (hash value %d)\n ",
2344 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2345 print_rtl (file, expr->expr);
21318741
RK
2346 fprintf (file, "\n");
2347 }
7506f491
DE
2348
2349 fprintf (file, "\n");
4da896b2 2350
4da896b2
MM
2351 free (flat_table);
2352 free (hash_val);
7506f491
DE
2353}
2354
2355/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2356
80c29cc4 2357 first_set records the first place in the block where the register
7506f491 2358 is set and is used to compute "anticipatability".
c4c81601 2359
80c29cc4 2360 last_set records the last place in the block where the register
7506f491 2361 is set and is used to compute "availability".
c4c81601 2362
80c29cc4
RZ
2363 last_bb records the block for which first_set and last_set are
2364 valid, as a quick test to invalidate them.
2365
7506f491
DE
2366 reg_set_in_block records whether the register is set in the block
2367 and is used to compute "transparency". */
2368
2369static void
1d088dee 2370record_last_reg_set_info (rtx insn, int regno)
7506f491 2371{
80c29cc4
RZ
2372 struct reg_avail_info *info = &reg_avail_info[regno];
2373 int cuid = INSN_CUID (insn);
c4c81601 2374
80c29cc4
RZ
2375 info->last_set = cuid;
2376 if (info->last_bb != current_bb)
2377 {
2378 info->last_bb = current_bb;
2379 info->first_set = cuid;
e0082a72 2380 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2381 }
7506f491
DE
2382}
2383
a13d4ebf
AM
2384
2385/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2386 Note we store a pair of elements in the list, so they have to be
2387 taken off pairwise. */
2388
589005ff 2389static void
1d088dee
AJ
2390canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2391 void * v_insn)
a13d4ebf
AM
2392{
2393 rtx dest_addr, insn;
0fe854a7 2394 int bb;
a13d4ebf
AM
2395
2396 while (GET_CODE (dest) == SUBREG
2397 || GET_CODE (dest) == ZERO_EXTRACT
2398 || GET_CODE (dest) == SIGN_EXTRACT
2399 || GET_CODE (dest) == STRICT_LOW_PART)
2400 dest = XEXP (dest, 0);
2401
2402 /* If DEST is not a MEM, then it will not conflict with a load. Note
2403 that function calls are assumed to clobber memory, but are handled
2404 elsewhere. */
2405
2406 if (GET_CODE (dest) != MEM)
2407 return;
2408
2409 dest_addr = get_addr (XEXP (dest, 0));
2410 dest_addr = canon_rtx (dest_addr);
589005ff 2411 insn = (rtx) v_insn;
0fe854a7 2412 bb = BLOCK_NUM (insn);
a13d4ebf 2413
589005ff 2414 canon_modify_mem_list[bb] =
0fe854a7 2415 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2416 canon_modify_mem_list[bb] =
0fe854a7
RH
2417 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2418 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2419}
2420
a13d4ebf
AM
2421/* Record memory modification information for INSN. We do not actually care
2422 about the memory location(s) that are set, or even how they are set (consider
2423 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2424
2425static void
1d088dee 2426record_last_mem_set_info (rtx insn)
7506f491 2427{
0fe854a7
RH
2428 int bb = BLOCK_NUM (insn);
2429
ccef9ef5 2430 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2431 everything. */
0fe854a7
RH
2432 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2433 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2434
2435 if (GET_CODE (insn) == CALL_INSN)
2436 {
2437 /* Note that traversals of this loop (other than for free-ing)
2438 will break after encountering a CALL_INSN. So, there's no
dc297297 2439 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2440 canon_modify_mem_list[bb] =
2441 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2442 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2443 }
2444 else
0fe854a7 2445 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2446}
2447
7506f491 2448/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2449 SET or CLOBBER in an insn. DATA is really the instruction in which
2450 the SET is taking place. */
7506f491
DE
2451
2452static void
1d088dee 2453record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 2454{
84832317
MM
2455 rtx last_set_insn = (rtx) data;
2456
7506f491
DE
2457 if (GET_CODE (dest) == SUBREG)
2458 dest = SUBREG_REG (dest);
2459
2460 if (GET_CODE (dest) == REG)
2461 record_last_reg_set_info (last_set_insn, REGNO (dest));
2462 else if (GET_CODE (dest) == MEM
2463 /* Ignore pushes, they clobber nothing. */
2464 && ! push_operand (dest, GET_MODE (dest)))
2465 record_last_mem_set_info (last_set_insn);
2466}
2467
2468/* Top level function to create an expression or assignment hash table.
2469
2470 Expression entries are placed in the hash table if
2471 - they are of the form (set (pseudo-reg) src),
2472 - src is something we want to perform GCSE on,
2473 - none of the operands are subsequently modified in the block
2474
2475 Assignment entries are placed in the hash table if
2476 - they are of the form (set (pseudo-reg) src),
2477 - src is something we want to perform const/copy propagation on,
2478 - none of the operands or target are subsequently modified in the block
c4c81601 2479
7506f491
DE
2480 Currently src must be a pseudo-reg or a const_int.
2481
02280659 2482 TABLE is the table computed. */
7506f491
DE
2483
2484static void
1d088dee 2485compute_hash_table_work (struct hash_table *table)
7506f491 2486{
80c29cc4 2487 unsigned int i;
7506f491
DE
2488
2489 /* While we compute the hash table we also compute a bit array of which
2490 registers are set in which blocks.
7506f491
DE
2491 ??? This isn't needed during const/copy propagation, but it's cheap to
2492 compute. Later. */
d55bc081 2493 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2494
a13d4ebf 2495 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2496 clear_modify_mem_tables ();
7506f491 2497 /* Some working arrays used to track first and last set in each block. */
703ad42b 2498 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
80c29cc4
RZ
2499
2500 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2501 reg_avail_info[i].last_bb = NULL;
7506f491 2502
e0082a72 2503 FOR_EACH_BB (current_bb)
7506f491
DE
2504 {
2505 rtx insn;
770ae6cc 2506 unsigned int regno;
ed79bb3d 2507 int in_libcall_block;
7506f491
DE
2508
2509 /* First pass over the instructions records information used to
2510 determine when registers and memory are first and last set.
ccef9ef5 2511 ??? hard-reg reg_set_in_block computation
7506f491
DE
2512 could be moved to compute_sets since they currently don't change. */
2513
a813c111
SB
2514 for (insn = BB_HEAD (current_bb);
2515 insn && insn != NEXT_INSN (BB_END (current_bb));
7506f491
DE
2516 insn = NEXT_INSN (insn))
2517 {
2c3c49de 2518 if (! INSN_P (insn))
7506f491
DE
2519 continue;
2520
2521 if (GET_CODE (insn) == CALL_INSN)
2522 {
19652adf 2523 bool clobbers_all = false;
589005ff 2524#ifdef NON_SAVING_SETJMP
19652adf
ZW
2525 if (NON_SAVING_SETJMP
2526 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2527 clobbers_all = true;
2528#endif
2529
7506f491 2530 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2531 if (clobbers_all
2532 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2533 record_last_reg_set_info (insn, regno);
c4c81601 2534
24a28584 2535 mark_call (insn);
7506f491
DE
2536 }
2537
84832317 2538 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2539 }
2540
fbef91d8
RS
2541 /* Insert implicit sets in the hash table. */
2542 if (table->set_p
2543 && implicit_sets[current_bb->index] != NULL_RTX)
2544 hash_scan_set (implicit_sets[current_bb->index],
a813c111 2545 BB_HEAD (current_bb), table);
fbef91d8 2546
7506f491
DE
2547 /* The next pass builds the hash table. */
2548
a813c111
SB
2549 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2550 insn && insn != NEXT_INSN (BB_END (current_bb));
7506f491 2551 insn = NEXT_INSN (insn))
2c3c49de 2552 if (INSN_P (insn))
c4c81601
RK
2553 {
2554 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2555 in_libcall_block = 1;
02280659 2556 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2557 in_libcall_block = 0;
02280659
ZD
2558 hash_scan_insn (insn, table, in_libcall_block);
2559 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2560 in_libcall_block = 0;
8e42ace1 2561 }
7506f491
DE
2562 }
2563
80c29cc4
RZ
2564 free (reg_avail_info);
2565 reg_avail_info = NULL;
7506f491
DE
2566}
2567
02280659 2568/* Allocate space for the set/expr hash TABLE.
7506f491 2569 N_INSNS is the number of instructions in the function.
02280659
ZD
2570 It is used to determine the number of buckets to use.
2571 SET_P determines whether set or expression table will
2572 be created. */
7506f491
DE
2573
2574static void
1d088dee 2575alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
7506f491
DE
2576{
2577 int n;
2578
02280659
ZD
2579 table->size = n_insns / 4;
2580 if (table->size < 11)
2581 table->size = 11;
c4c81601 2582
7506f491
DE
2583 /* Attempt to maintain efficient use of hash table.
2584 Making it an odd number is simplest for now.
2585 ??? Later take some measurements. */
02280659
ZD
2586 table->size |= 1;
2587 n = table->size * sizeof (struct expr *);
703ad42b 2588 table->table = gmalloc (n);
02280659 2589 table->set_p = set_p;
7506f491
DE
2590}
2591
02280659 2592/* Free things allocated by alloc_hash_table. */
7506f491
DE
2593
2594static void
1d088dee 2595free_hash_table (struct hash_table *table)
7506f491 2596{
02280659 2597 free (table->table);
7506f491
DE
2598}
2599
02280659
ZD
2600/* Compute the hash TABLE for doing copy/const propagation or
2601 expression hash table. */
7506f491
DE
2602
2603static void
1d088dee 2604compute_hash_table (struct hash_table *table)
7506f491
DE
2605{
2606 /* Initialize count of number of entries in hash table. */
02280659 2607 table->n_elems = 0;
703ad42b 2608 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 2609
02280659 2610 compute_hash_table_work (table);
7506f491
DE
2611}
2612\f
2613/* Expression tracking support. */
2614
02280659 2615/* Lookup pattern PAT in the expression TABLE.
7506f491
DE
2616 The result is a pointer to the table entry, or NULL if not found. */
2617
2618static struct expr *
1d088dee 2619lookup_expr (rtx pat, struct hash_table *table)
7506f491
DE
2620{
2621 int do_not_record_p;
2622 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
02280659 2623 table->size);
7506f491
DE
2624 struct expr *expr;
2625
2626 if (do_not_record_p)
2627 return NULL;
2628
02280659 2629 expr = table->table[hash];
7506f491
DE
2630
2631 while (expr && ! expr_equiv_p (expr->expr, pat))
2632 expr = expr->next_same_hash;
2633
2634 return expr;
2635}
2636
ceda50e9
RH
2637/* Lookup REGNO in the set TABLE. The result is a pointer to the
2638 table entry, or NULL if not found. */
7506f491
DE
2639
2640static struct expr *
1d088dee 2641lookup_set (unsigned int regno, struct hash_table *table)
7506f491 2642{
02280659 2643 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2644 struct expr *expr;
2645
02280659 2646 expr = table->table[hash];
7506f491 2647
ceda50e9
RH
2648 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2649 expr = expr->next_same_hash;
7506f491
DE
2650
2651 return expr;
2652}
2653
2654/* Return the next entry for REGNO in list EXPR. */
2655
2656static struct expr *
1d088dee 2657next_set (unsigned int regno, struct expr *expr)
7506f491
DE
2658{
2659 do
2660 expr = expr->next_same_hash;
2661 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2662
7506f491
DE
2663 return expr;
2664}
2665
0fe854a7
RH
2666/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2667 types may be mixed. */
2668
2669static void
1d088dee 2670free_insn_expr_list_list (rtx *listp)
0fe854a7
RH
2671{
2672 rtx list, next;
2673
2674 for (list = *listp; list ; list = next)
2675 {
2676 next = XEXP (list, 1);
2677 if (GET_CODE (list) == EXPR_LIST)
2678 free_EXPR_LIST_node (list);
2679 else
2680 free_INSN_LIST_node (list);
2681 }
2682
2683 *listp = NULL;
2684}
2685
73991d6a
JH
2686/* Clear canon_modify_mem_list and modify_mem_list tables. */
2687static void
1d088dee 2688clear_modify_mem_tables (void)
73991d6a
JH
2689{
2690 int i;
2691
2692 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2693 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2694 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2695
2696 EXECUTE_IF_SET_IN_BITMAP
2697 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2698 free_insn_expr_list_list (canon_modify_mem_list + i));
2699 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2700}
2701
2702/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2703
2704static void
1d088dee 2705free_modify_mem_tables (void)
73991d6a
JH
2706{
2707 clear_modify_mem_tables ();
2708 free (modify_mem_list);
2709 free (canon_modify_mem_list);
2710 modify_mem_list = 0;
2711 canon_modify_mem_list = 0;
2712}
2713
7506f491
DE
2714/* Reset tables used to keep track of what's still available [since the
2715 start of the block]. */
2716
2717static void
1d088dee 2718reset_opr_set_tables (void)
7506f491
DE
2719{
2720 /* Maintain a bitmap of which regs have been set since beginning of
2721 the block. */
73991d6a 2722 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2723
7506f491
DE
2724 /* Also keep a record of the last instruction to modify memory.
2725 For now this is very trivial, we only record whether any memory
2726 location has been modified. */
73991d6a 2727 clear_modify_mem_tables ();
7506f491
DE
2728}
2729
cc2902df 2730/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2731 INSN's basic block. */
2732
2733static int
1d088dee 2734oprs_not_set_p (rtx x, rtx insn)
7506f491 2735{
c4c81601 2736 int i, j;
7506f491 2737 enum rtx_code code;
6f7d635c 2738 const char *fmt;
7506f491 2739
7506f491
DE
2740 if (x == 0)
2741 return 1;
2742
2743 code = GET_CODE (x);
2744 switch (code)
2745 {
2746 case PC:
2747 case CC0:
2748 case CONST:
2749 case CONST_INT:
2750 case CONST_DOUBLE:
69ef87e2 2751 case CONST_VECTOR:
7506f491
DE
2752 case SYMBOL_REF:
2753 case LABEL_REF:
2754 case ADDR_VEC:
2755 case ADDR_DIFF_VEC:
2756 return 1;
2757
2758 case MEM:
589005ff 2759 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2760 INSN_CUID (insn), x, 0))
a13d4ebf 2761 return 0;
c4c81601
RK
2762 else
2763 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2764
2765 case REG:
73991d6a 2766 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2767
2768 default:
2769 break;
2770 }
2771
c4c81601 2772 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2773 {
2774 if (fmt[i] == 'e')
2775 {
7506f491
DE
2776 /* If we are about to do the last recursive call
2777 needed at this level, change it into iteration.
2778 This function is called enough to be worth it. */
2779 if (i == 0)
c4c81601
RK
2780 return oprs_not_set_p (XEXP (x, i), insn);
2781
2782 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2783 return 0;
2784 }
2785 else if (fmt[i] == 'E')
c4c81601
RK
2786 for (j = 0; j < XVECLEN (x, i); j++)
2787 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2788 return 0;
7506f491
DE
2789 }
2790
2791 return 1;
2792}
2793
2794/* Mark things set by a CALL. */
2795
2796static void
1d088dee 2797mark_call (rtx insn)
7506f491 2798{
24a28584 2799 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2800 record_last_mem_set_info (insn);
7506f491
DE
2801}
2802
2803/* Mark things set by a SET. */
2804
2805static void
1d088dee 2806mark_set (rtx pat, rtx insn)
7506f491
DE
2807{
2808 rtx dest = SET_DEST (pat);
2809
2810 while (GET_CODE (dest) == SUBREG
2811 || GET_CODE (dest) == ZERO_EXTRACT
2812 || GET_CODE (dest) == SIGN_EXTRACT
2813 || GET_CODE (dest) == STRICT_LOW_PART)
2814 dest = XEXP (dest, 0);
2815
a13d4ebf 2816 if (GET_CODE (dest) == REG)
73991d6a 2817 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2818 else if (GET_CODE (dest) == MEM)
2819 record_last_mem_set_info (insn);
2820
7506f491 2821 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2822 mark_call (insn);
7506f491
DE
2823}
2824
2825/* Record things set by a CLOBBER. */
2826
2827static void
1d088dee 2828mark_clobber (rtx pat, rtx insn)
7506f491
DE
2829{
2830 rtx clob = XEXP (pat, 0);
2831
2832 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2833 clob = XEXP (clob, 0);
2834
a13d4ebf 2835 if (GET_CODE (clob) == REG)
73991d6a 2836 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2837 else
2838 record_last_mem_set_info (insn);
7506f491
DE
2839}
2840
2841/* Record things set by INSN.
2842 This data is used by oprs_not_set_p. */
2843
2844static void
1d088dee 2845mark_oprs_set (rtx insn)
7506f491
DE
2846{
2847 rtx pat = PATTERN (insn);
c4c81601 2848 int i;
7506f491
DE
2849
2850 if (GET_CODE (pat) == SET)
2851 mark_set (pat, insn);
2852 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2853 for (i = 0; i < XVECLEN (pat, 0); i++)
2854 {
2855 rtx x = XVECEXP (pat, 0, i);
2856
2857 if (GET_CODE (x) == SET)
2858 mark_set (x, insn);
2859 else if (GET_CODE (x) == CLOBBER)
2860 mark_clobber (x, insn);
2861 else if (GET_CODE (x) == CALL)
2862 mark_call (insn);
2863 }
7506f491 2864
7506f491
DE
2865 else if (GET_CODE (pat) == CLOBBER)
2866 mark_clobber (pat, insn);
2867 else if (GET_CODE (pat) == CALL)
b5ce41ff 2868 mark_call (insn);
7506f491 2869}
b5ce41ff 2870
7506f491
DE
2871\f
2872/* Classic GCSE reaching definition support. */
2873
2874/* Allocate reaching def variables. */
2875
2876static void
1d088dee 2877alloc_rd_mem (int n_blocks, int n_insns)
7506f491 2878{
703ad42b 2879 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2880 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491 2881
703ad42b 2882 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2883 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491 2884
703ad42b 2885 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2886 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491 2887
703ad42b 2888 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2889 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2890}
2891
2892/* Free reaching def variables. */
2893
2894static void
1d088dee 2895free_rd_mem (void)
7506f491 2896{
5a660bff
DB
2897 sbitmap_vector_free (rd_kill);
2898 sbitmap_vector_free (rd_gen);
2899 sbitmap_vector_free (reaching_defs);
2900 sbitmap_vector_free (rd_out);
7506f491
DE
2901}
2902
c4c81601 2903/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2904
2905static void
1d088dee 2906handle_rd_kill_set (rtx insn, int regno, basic_block bb)
7506f491 2907{
c4c81601 2908 struct reg_set *this_reg;
7506f491 2909
c4c81601
RK
2910 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2911 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2912 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2913}
2914
7506f491
DE
2915/* Compute the set of kill's for reaching definitions. */
2916
2917static void
1d088dee 2918compute_kill_rd (void)
7506f491 2919{
e0082a72 2920 int cuid;
172890a2
RK
2921 unsigned int regno;
2922 int i;
e0082a72 2923 basic_block bb;
7506f491
DE
2924
2925 /* For each block
2926 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2927 generates a definition in the block)
2928 Call the reg set by the insn corresponding to that bit regx
2929 Look at the linked list starting at reg_set_table[regx]
2930 For each setting of regx in the linked list, which is not in
2931 this block
6d2f8887 2932 Set the bit in `kill' corresponding to that insn. */
e0082a72 2933 FOR_EACH_BB (bb)
c4c81601 2934 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2935 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2936 {
c4c81601
RK
2937 rtx insn = CUID_INSN (cuid);
2938 rtx pat = PATTERN (insn);
7506f491 2939
c4c81601
RK
2940 if (GET_CODE (insn) == CALL_INSN)
2941 {
2942 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 2943 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 2944 handle_rd_kill_set (insn, regno, bb);
c4c81601 2945 }
7506f491 2946
c4c81601
RK
2947 if (GET_CODE (pat) == PARALLEL)
2948 {
2949 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2950 {
c4c81601 2951 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2952
c4c81601
RK
2953 if ((code == SET || code == CLOBBER)
2954 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2955 handle_rd_kill_set (insn,
2956 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 2957 bb);
ac7c5af5 2958 }
ac7c5af5 2959 }
c4c81601
RK
2960 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2961 /* Each setting of this register outside of this block
2962 must be marked in the set of kills in this block. */
e0082a72 2963 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 2964 }
7506f491
DE
2965}
2966
589005ff 2967/* Compute the reaching definitions as in
7506f491
DE
2968 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2969 Chapter 10. It is the same algorithm as used for computing available
2970 expressions but applied to the gens and kills of reaching definitions. */
2971
2972static void
1d088dee 2973compute_rd (void)
7506f491 2974{
e0082a72
ZD
2975 int changed, passes;
2976 basic_block bb;
7506f491 2977
e0082a72
ZD
2978 FOR_EACH_BB (bb)
2979 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
2980
2981 passes = 0;
2982 changed = 1;
2983 while (changed)
2984 {
2985 changed = 0;
e0082a72 2986 FOR_EACH_BB (bb)
ac7c5af5 2987 {
e0082a72
ZD
2988 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2989 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2990 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 2991 }
7506f491
DE
2992 passes++;
2993 }
2994
2995 if (gcse_file)
2996 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2997}
2998\f
2999/* Classic GCSE available expression support. */
3000
3001/* Allocate memory for available expression computation. */
3002
3003static void
1d088dee 3004alloc_avail_expr_mem (int n_blocks, int n_exprs)
7506f491 3005{
703ad42b 3006 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3007 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491 3008
703ad42b 3009 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3010 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491 3011
703ad42b 3012 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3013 sbitmap_vector_zero (ae_in, n_blocks);
7506f491 3014
703ad42b 3015 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 3016 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
3017}
3018
3019static void
1d088dee 3020free_avail_expr_mem (void)
7506f491 3021{
5a660bff
DB
3022 sbitmap_vector_free (ae_kill);
3023 sbitmap_vector_free (ae_gen);
3024 sbitmap_vector_free (ae_in);
3025 sbitmap_vector_free (ae_out);
7506f491
DE
3026}
3027
3028/* Compute the set of available expressions generated in each basic block. */
3029
3030static void
1d088dee 3031compute_ae_gen (struct hash_table *expr_hash_table)
7506f491 3032{
2e653e39 3033 unsigned int i;
c4c81601
RK
3034 struct expr *expr;
3035 struct occr *occr;
7506f491
DE
3036
3037 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3038 This is all we have to do because an expression is not recorded if it
3039 is not available, and the only expressions we want to work with are the
3040 ones that are recorded. */
02280659
ZD
3041 for (i = 0; i < expr_hash_table->size; i++)
3042 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
c4c81601
RK
3043 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3044 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
3045}
3046
cc2902df 3047/* Return nonzero if expression X is killed in BB. */
7506f491
DE
3048
3049static int
1d088dee 3050expr_killed_p (rtx x, basic_block bb)
7506f491 3051{
c4c81601 3052 int i, j;
7506f491 3053 enum rtx_code code;
6f7d635c 3054 const char *fmt;
7506f491 3055
7506f491
DE
3056 if (x == 0)
3057 return 1;
3058
3059 code = GET_CODE (x);
3060 switch (code)
3061 {
3062 case REG:
0b17ab2f 3063 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3064
3065 case MEM:
a13d4ebf
AM
3066 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3067 return 1;
c4c81601
RK
3068 else
3069 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3070
3071 case PC:
3072 case CC0: /*FIXME*/
3073 case CONST:
3074 case CONST_INT:
3075 case CONST_DOUBLE:
69ef87e2 3076 case CONST_VECTOR:
7506f491
DE
3077 case SYMBOL_REF:
3078 case LABEL_REF:
3079 case ADDR_VEC:
3080 case ADDR_DIFF_VEC:
3081 return 0;
3082
3083 default:
3084 break;
3085 }
3086
c4c81601 3087 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3088 {
3089 if (fmt[i] == 'e')
3090 {
7506f491
DE
3091 /* If we are about to do the last recursive call
3092 needed at this level, change it into iteration.
3093 This function is called enough to be worth it. */
3094 if (i == 0)
c4c81601
RK
3095 return expr_killed_p (XEXP (x, i), bb);
3096 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3097 return 1;
3098 }
3099 else if (fmt[i] == 'E')
c4c81601
RK
3100 for (j = 0; j < XVECLEN (x, i); j++)
3101 if (expr_killed_p (XVECEXP (x, i, j), bb))
3102 return 1;
7506f491
DE
3103 }
3104
3105 return 0;
3106}
3107
3108/* Compute the set of available expressions killed in each basic block. */
3109
3110static void
1d088dee
AJ
3111compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3112 struct hash_table *expr_hash_table)
7506f491 3113{
e0082a72 3114 basic_block bb;
2e653e39 3115 unsigned int i;
c4c81601 3116 struct expr *expr;
7506f491 3117
e0082a72 3118 FOR_EACH_BB (bb)
02280659
ZD
3119 for (i = 0; i < expr_hash_table->size; i++)
3120 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
7506f491 3121 {
c4c81601 3122 /* Skip EXPR if generated in this block. */
e0082a72 3123 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3124 continue;
7506f491 3125
e0082a72
ZD
3126 if (expr_killed_p (expr->expr, bb))
3127 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3128 }
7506f491 3129}
7506f491
DE
3130\f
3131/* Actually perform the Classic GCSE optimizations. */
3132
cc2902df 3133/* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
7506f491 3134
cc2902df 3135 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
7506f491
DE
3136 as a positive reach. We want to do this when there are two computations
3137 of the expression in the block.
3138
3139 VISITED is a pointer to a working buffer for tracking which BB's have
3140 been visited. It is NULL for the top-level call.
3141
3142 We treat reaching expressions that go through blocks containing the same
3143 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3144 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3145 2 as not reaching. The intent is to improve the probability of finding
3146 only one reaching expression and to reduce register lifetimes by picking
3147 the closest such expression. */
3148
3149static int
1d088dee
AJ
3150expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3151 basic_block bb, int check_self_loop, char *visited)
7506f491 3152{
36349f8b 3153 edge pred;
7506f491 3154
e2d2ed72 3155 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3156 {
e2d2ed72 3157 basic_block pred_bb = pred->src;
7506f491 3158
0b17ab2f 3159 if (visited[pred_bb->index])
c4c81601 3160 /* This predecessor has already been visited. Nothing to do. */
7506f491 3161 ;
7506f491 3162 else if (pred_bb == bb)
ac7c5af5 3163 {
7506f491
DE
3164 /* BB loops on itself. */
3165 if (check_self_loop
0b17ab2f
RH
3166 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3167 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3168 return 1;
c4c81601 3169
0b17ab2f 3170 visited[pred_bb->index] = 1;
ac7c5af5 3171 }
c4c81601 3172
7506f491 3173 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3174 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3175 visited[pred_bb->index] = 1;
c4c81601 3176
7506f491 3177 /* Does this predecessor generate this expression? */
0b17ab2f 3178 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3179 {
3180 /* Is this the occurrence we're looking for?
3181 Note that there's only one generating occurrence per block
3182 so we just need to check the block number. */
0b17ab2f 3183 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3184 return 1;
c4c81601 3185
0b17ab2f 3186 visited[pred_bb->index] = 1;
7506f491 3187 }
c4c81601 3188
7506f491
DE
3189 /* Neither gen nor kill. */
3190 else
ac7c5af5 3191 {
0b17ab2f 3192 visited[pred_bb->index] = 1;
589005ff 3193 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3194 visited))
c4c81601 3195
7506f491 3196 return 1;
ac7c5af5 3197 }
7506f491
DE
3198 }
3199
3200 /* All paths have been checked. */
3201 return 0;
3202}
3203
283a2545 3204/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3205 memory allocated for that function is returned. */
283a2545
RL
3206
3207static int
1d088dee
AJ
3208expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3209 int check_self_loop)
283a2545
RL
3210{
3211 int rval;
703ad42b 3212 char *visited = xcalloc (last_basic_block, 1);
283a2545 3213
c4c81601 3214 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3215
283a2545 3216 free (visited);
c4c81601 3217 return rval;
283a2545
RL
3218}
3219
7506f491
DE
3220/* Return the instruction that computes EXPR that reaches INSN's basic block.
3221 If there is more than one such instruction, return NULL.
3222
3223 Called only by handle_avail_expr. */
3224
3225static rtx
1d088dee 3226computing_insn (struct expr *expr, rtx insn)
7506f491 3227{
e2d2ed72 3228 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3229
3230 if (expr->avail_occr->next == NULL)
589005ff 3231 {
e2d2ed72 3232 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3233 /* The available expression is actually itself
3234 (i.e. a loop in the flow graph) so do nothing. */
3235 return NULL;
3236
7506f491
DE
3237 /* (FIXME) Case that we found a pattern that was created by
3238 a substitution that took place. */
3239 return expr->avail_occr->insn;
3240 }
3241 else
3242 {
3243 /* Pattern is computed more than once.
589005ff 3244 Search backwards from this insn to see how many of these
7506f491
DE
3245 computations actually reach this insn. */
3246 struct occr *occr;
3247 rtx insn_computes_expr = NULL;
3248 int can_reach = 0;
3249
3250 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3251 {
e2d2ed72 3252 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3253 {
3254 /* The expression is generated in this block.
3255 The only time we care about this is when the expression
3256 is generated later in the block [and thus there's a loop].
3257 We let the normal cse pass handle the other cases. */
c4c81601
RK
3258 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3259 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3260 {
3261 can_reach++;
3262 if (can_reach > 1)
3263 return NULL;
c4c81601 3264
7506f491
DE
3265 insn_computes_expr = occr->insn;
3266 }
3267 }
c4c81601
RK
3268 else if (expr_reaches_here_p (occr, expr, bb, 0))
3269 {
3270 can_reach++;
3271 if (can_reach > 1)
3272 return NULL;
3273
3274 insn_computes_expr = occr->insn;
3275 }
7506f491
DE
3276 }
3277
3278 if (insn_computes_expr == NULL)
3279 abort ();
c4c81601 3280
7506f491
DE
3281 return insn_computes_expr;
3282 }
3283}
3284
cc2902df 3285/* Return nonzero if the definition in DEF_INSN can reach INSN.
7506f491
DE
3286 Only called by can_disregard_other_sets. */
3287
3288static int
1d088dee 3289def_reaches_here_p (rtx insn, rtx def_insn)
7506f491
DE
3290{
3291 rtx reg;
3292
3293 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3294 return 1;
3295
3296 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3297 {
3298 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3299 {
7506f491
DE
3300 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3301 return 1;
c4c81601 3302 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3303 reg = XEXP (PATTERN (def_insn), 0);
3304 else if (GET_CODE (PATTERN (def_insn)) == SET)
3305 reg = SET_DEST (PATTERN (def_insn));
3306 else
3307 abort ();
c4c81601 3308
7506f491
DE
3309 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3310 }
3311 else
3312 return 0;
3313 }
3314
3315 return 0;
3316}
3317
cc2902df 3318/* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
c4c81601
RK
3319 value returned is the number of definitions that reach INSN. Returning a
3320 value of zero means that [maybe] more than one definition reaches INSN and
3321 the caller can't perform whatever optimization it is trying. i.e. it is
3322 always safe to return zero. */
7506f491
DE
3323
3324static int
1d088dee 3325can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
7506f491
DE
3326{
3327 int number_of_reaching_defs = 0;
c4c81601 3328 struct reg_set *this_reg;
7506f491 3329
c4c81601
RK
3330 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3331 if (def_reaches_here_p (insn, this_reg->insn))
3332 {
3333 number_of_reaching_defs++;
3334 /* Ignore parallels for now. */
3335 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3336 return 0;
3337
3338 if (!for_combine
3339 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3340 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3341 SET_SRC (PATTERN (insn)))))
3342 /* A setting of the reg to a different value reaches INSN. */
3343 return 0;
3344
3345 if (number_of_reaching_defs > 1)
3346 {
3347 /* If in this setting the value the register is being set to is
3348 equal to the previous value the register was set to and this
3349 setting reaches the insn we are trying to do the substitution
3350 on then we are ok. */
3351 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3352 return 0;
c4c81601
RK
3353 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3354 SET_SRC (PATTERN (insn))))
3355 return 0;
3356 }
7506f491 3357
589005ff 3358 *addr_this_reg = this_reg;
c4c81601 3359 }
7506f491
DE
3360
3361 return number_of_reaching_defs;
3362}
3363
3364/* Expression computed by insn is available and the substitution is legal,
3365 so try to perform the substitution.
3366
cc2902df 3367 The result is nonzero if any changes were made. */
7506f491
DE
3368
3369static int
1d088dee 3370handle_avail_expr (rtx insn, struct expr *expr)
7506f491 3371{
0631e0bf 3372 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3373 rtx to;
3374 struct reg_set *this_reg;
3375 int found_setting, use_src;
3376 int changed = 0;
3377
3378 /* We only handle the case where one computation of the expression
3379 reaches this instruction. */
3380 insn_computes_expr = computing_insn (expr, insn);
3381 if (insn_computes_expr == NULL)
3382 return 0;
0631e0bf 3383 expr_set = single_set (insn_computes_expr);
e5396f90
RE
3384 /* The set might be in a parallel with multiple sets; we could
3385 probably handle that, but there's currently no easy way to find
3386 the relevant sub-expression. */
0631e0bf 3387 if (!expr_set)
e5396f90 3388 return 0;
7506f491
DE
3389
3390 found_setting = 0;
3391 use_src = 0;
3392
3393 /* At this point we know only one computation of EXPR outside of this
3394 block reaches this insn. Now try to find a register that the
3395 expression is computed into. */
0631e0bf 3396 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3397 {
3398 /* This is the case when the available expression that reaches
3399 here has already been handled as an available expression. */
770ae6cc 3400 unsigned int regnum_for_replacing
0631e0bf 3401 = REGNO (SET_SRC (expr_set));
c4c81601 3402
7506f491
DE
3403 /* If the register was created by GCSE we can't use `reg_set_table',
3404 however we know it's set only once. */
3405 if (regnum_for_replacing >= max_gcse_regno
3406 /* If the register the expression is computed into is set only once,
3407 or only one set reaches this insn, we can use it. */
3408 || (((this_reg = reg_set_table[regnum_for_replacing]),
3409 this_reg->next == NULL)
3410 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3411 {
3412 use_src = 1;
3413 found_setting = 1;
3414 }
7506f491
DE
3415 }
3416
3417 if (!found_setting)
3418 {
770ae6cc 3419 unsigned int regnum_for_replacing
0631e0bf 3420 = REGNO (SET_DEST (expr_set));
c4c81601 3421
7506f491
DE
3422 /* This shouldn't happen. */
3423 if (regnum_for_replacing >= max_gcse_regno)
3424 abort ();
c4c81601 3425
7506f491 3426 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3427
7506f491
DE
3428 /* If the register the expression is computed into is set only once,
3429 or only one set reaches this insn, use it. */
3430 if (this_reg->next == NULL
3431 || can_disregard_other_sets (&this_reg, insn, 0))
3432 found_setting = 1;
3433 }
3434
3435 if (found_setting)
3436 {
3437 pat = PATTERN (insn);
3438 if (use_src)
0631e0bf 3439 to = SET_SRC (expr_set);
7506f491 3440 else
0631e0bf 3441 to = SET_DEST (expr_set);
7506f491
DE
3442 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3443
3444 /* We should be able to ignore the return code from validate_change but
3445 to play it safe we check. */
3446 if (changed)
3447 {
3448 gcse_subst_count++;
3449 if (gcse_file != NULL)
3450 {
c4c81601
RK
3451 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3452 INSN_UID (insn));
3453 fprintf (gcse_file, " reg %d %s insn %d\n",
3454 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3455 INSN_UID (insn_computes_expr));
3456 }
7506f491
DE
3457 }
3458 }
c4c81601 3459
7506f491
DE
3460 /* The register that the expr is computed into is set more than once. */
3461 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3462 {
3463 /* Insert an insn after insnx that copies the reg set in insnx
3464 into a new pseudo register call this new register REGN.
3465 From insnb until end of basic block or until REGB is set
3466 replace all uses of REGB with REGN. */
3467 rtx new_insn;
3468
0631e0bf 3469 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3470
3471 /* Generate the new insn. */
3472 /* ??? If the change fails, we return 0, even though we created
3473 an insn. I think this is ok. */
9e6a5703
JC
3474 new_insn
3475 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3476 SET_DEST (expr_set)),
c4c81601
RK
3477 insn_computes_expr);
3478
7506f491
DE
3479 /* Keep register set table up to date. */
3480 record_one_set (REGNO (to), new_insn);
3481
3482 gcse_create_count++;
3483 if (gcse_file != NULL)
ac7c5af5 3484 {
c4c81601 3485 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3486 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3487 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3488 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3489 INSN_UID (insn_computes_expr));
c4c81601
RK
3490 fprintf (gcse_file, " into newly allocated reg %d\n",
3491 REGNO (to));
ac7c5af5 3492 }
7506f491
DE
3493
3494 pat = PATTERN (insn);
3495
3496 /* Do register replacement for INSN. */
3497 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3498 SET_DEST (PATTERN
3499 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3500 0);
3501
3502 /* We should be able to ignore the return code from validate_change but
3503 to play it safe we check. */
3504 if (changed)
3505 {
3506 gcse_subst_count++;
3507 if (gcse_file != NULL)
3508 {
c4c81601
RK
3509 fprintf (gcse_file,
3510 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3511 INSN_UID (insn),
c4c81601
RK
3512 REGNO (SET_DEST (PATTERN (NEXT_INSN
3513 (insn_computes_expr)))));
3514 fprintf (gcse_file, "set in insn %d\n",
589005ff 3515 INSN_UID (insn_computes_expr));
7506f491 3516 }
7506f491
DE
3517 }
3518 }
3519
3520 return changed;
3521}
3522
c4c81601
RK
3523/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3524 the dataflow analysis has been done.
7506f491 3525
cc2902df 3526 The result is nonzero if a change was made. */
7506f491
DE
3527
3528static int
1d088dee 3529classic_gcse (void)
7506f491 3530{
e0082a72 3531 int changed;
7506f491 3532 rtx insn;
e0082a72 3533 basic_block bb;
7506f491
DE
3534
3535 /* Note we start at block 1. */
3536
e0082a72
ZD
3537 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3538 return 0;
3539
7506f491 3540 changed = 0;
e0082a72 3541 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3542 {
3543 /* Reset tables used to keep track of what's still valid [since the
3544 start of the block]. */
3545 reset_opr_set_tables ();
3546
a813c111
SB
3547 for (insn = BB_HEAD (bb);
3548 insn != NULL && insn != NEXT_INSN (BB_END (bb));
7506f491
DE
3549 insn = NEXT_INSN (insn))
3550 {
3551 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3552 if (GET_CODE (insn) == INSN
3553 && GET_CODE (PATTERN (insn)) == SET
3554 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3555 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3556 {
3557 rtx pat = PATTERN (insn);
3558 rtx src = SET_SRC (pat);
3559 struct expr *expr;
3560
3561 if (want_to_gcse_p (src)
3562 /* Is the expression recorded? */
02280659 3563 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
7506f491
DE
3564 /* Is the expression available [at the start of the
3565 block]? */
e0082a72 3566 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3567 /* Are the operands unchanged since the start of the
3568 block? */
3569 && oprs_not_set_p (src, insn))
3570 changed |= handle_avail_expr (insn, expr);
3571 }
3572
3573 /* Keep track of everything modified by this insn. */
3574 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3575 if (INSN_P (insn))
7506f491 3576 mark_oprs_set (insn);
ac7c5af5 3577 }
7506f491
DE
3578 }
3579
3580 return changed;
3581}
3582
3583/* Top level routine to perform one classic GCSE pass.
3584
cc2902df 3585 Return nonzero if a change was made. */
7506f491
DE
3586
3587static int
1d088dee 3588one_classic_gcse_pass (int pass)
7506f491
DE
3589{
3590 int changed = 0;
3591
3592 gcse_subst_count = 0;
3593 gcse_create_count = 0;
3594
02280659 3595 alloc_hash_table (max_cuid, &expr_hash_table, 0);
d55bc081 3596 alloc_rd_mem (last_basic_block, max_cuid);
02280659 3597 compute_hash_table (&expr_hash_table);
7506f491 3598 if (gcse_file)
02280659 3599 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 3600
02280659 3601 if (expr_hash_table.n_elems > 0)
7506f491
DE
3602 {
3603 compute_kill_rd ();
3604 compute_rd ();
02280659
ZD
3605 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3606 compute_ae_gen (&expr_hash_table);
3607 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
bd0eaec2 3608 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3609 changed = classic_gcse ();
3610 free_avail_expr_mem ();
3611 }
c4c81601 3612
7506f491 3613 free_rd_mem ();
02280659 3614 free_hash_table (&expr_hash_table);
7506f491
DE
3615
3616 if (gcse_file)
3617 {
3618 fprintf (gcse_file, "\n");
c4c81601 3619 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
faed5cc3 3620 current_function_name (), pass, bytes_used, gcse_subst_count);
c4c81601 3621 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3622 }
3623
3624 return changed;
3625}
3626\f
3627/* Compute copy/constant propagation working variables. */
3628
3629/* Local properties of assignments. */
7506f491
DE
3630static sbitmap *cprop_pavloc;
3631static sbitmap *cprop_absaltered;
3632
3633/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3634static sbitmap *cprop_avin;
3635static sbitmap *cprop_avout;
3636
c4c81601
RK
3637/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3638 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3639
3640static void
1d088dee 3641alloc_cprop_mem (int n_blocks, int n_sets)
7506f491
DE
3642{
3643 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3644 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3645
3646 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3647 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3648}
3649
3650/* Free vars used by copy/const propagation. */
3651
3652static void
1d088dee 3653free_cprop_mem (void)
7506f491 3654{
5a660bff
DB
3655 sbitmap_vector_free (cprop_pavloc);
3656 sbitmap_vector_free (cprop_absaltered);
3657 sbitmap_vector_free (cprop_avin);
3658 sbitmap_vector_free (cprop_avout);
7506f491
DE
3659}
3660
c4c81601
RK
3661/* For each block, compute whether X is transparent. X is either an
3662 expression or an assignment [though we don't care which, for this context
3663 an assignment is treated as an expression]. For each block where an
3664 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3665 bit in BMAP. */
7506f491
DE
3666
3667static void
1d088dee 3668compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
7506f491 3669{
e0082a72
ZD
3670 int i, j;
3671 basic_block bb;
7506f491 3672 enum rtx_code code;
c4c81601 3673 reg_set *r;
6f7d635c 3674 const char *fmt;
7506f491 3675
c4c81601
RK
3676 /* repeat is used to turn tail-recursion into iteration since GCC
3677 can't do it when there's no return value. */
7506f491
DE
3678 repeat:
3679
3680 if (x == 0)
3681 return;
3682
3683 code = GET_CODE (x);
3684 switch (code)
3685 {
3686 case REG:
c4c81601
RK
3687 if (set_p)
3688 {
3689 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3690 {
e0082a72
ZD
3691 FOR_EACH_BB (bb)
3692 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3693 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3694 }
3695 else
3696 {
3697 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3698 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3699 }
3700 }
3701 else
3702 {
3703 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3704 {
e0082a72
ZD
3705 FOR_EACH_BB (bb)
3706 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3707 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3708 }
3709 else
3710 {
3711 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3712 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3713 }
3714 }
7506f491 3715
c4c81601 3716 return;
7506f491
DE
3717
3718 case MEM:
e0082a72 3719 FOR_EACH_BB (bb)
a13d4ebf 3720 {
e0082a72 3721 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3722
3723 while (list_entry)
3724 {
3725 rtx dest, dest_addr;
3726
3727 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3728 {
3729 if (set_p)
e0082a72 3730 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3731 else
e0082a72 3732 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3733 break;
3734 }
3735 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3736 Examine each hunk of memory that is modified. */
3737
3738 dest = XEXP (list_entry, 0);
3739 list_entry = XEXP (list_entry, 1);
3740 dest_addr = XEXP (list_entry, 0);
589005ff 3741
a13d4ebf
AM
3742 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3743 x, rtx_addr_varies_p))
3744 {
3745 if (set_p)
e0082a72 3746 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3747 else
e0082a72 3748 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3749 break;
3750 }
3751 list_entry = XEXP (list_entry, 1);
3752 }
3753 }
c4c81601 3754
7506f491
DE
3755 x = XEXP (x, 0);
3756 goto repeat;
3757
3758 case PC:
3759 case CC0: /*FIXME*/
3760 case CONST:
3761 case CONST_INT:
3762 case CONST_DOUBLE:
69ef87e2 3763 case CONST_VECTOR:
7506f491
DE
3764 case SYMBOL_REF:
3765 case LABEL_REF:
3766 case ADDR_VEC:
3767 case ADDR_DIFF_VEC:
3768 return;
3769
3770 default:
3771 break;
3772 }
3773
c4c81601 3774 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3775 {
3776 if (fmt[i] == 'e')
3777 {
7506f491
DE
3778 /* If we are about to do the last recursive call
3779 needed at this level, change it into iteration.
3780 This function is called enough to be worth it. */
3781 if (i == 0)
3782 {
c4c81601 3783 x = XEXP (x, i);
7506f491
DE
3784 goto repeat;
3785 }
c4c81601
RK
3786
3787 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3788 }
3789 else if (fmt[i] == 'E')
c4c81601
RK
3790 for (j = 0; j < XVECLEN (x, i); j++)
3791 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3792 }
3793}
3794
7506f491
DE
3795/* Top level routine to do the dataflow analysis needed by copy/const
3796 propagation. */
3797
3798static void
1d088dee 3799compute_cprop_data (void)
7506f491 3800{
02280659 3801 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
3802 compute_available (cprop_pavloc, cprop_absaltered,
3803 cprop_avout, cprop_avin);
7506f491
DE
3804}
3805\f
3806/* Copy/constant propagation. */
3807
7506f491
DE
3808/* Maximum number of register uses in an insn that we handle. */
3809#define MAX_USES 8
3810
3811/* Table of uses found in an insn.
3812 Allocated statically to avoid alloc/free complexity and overhead. */
3813static struct reg_use reg_use_table[MAX_USES];
3814
3815/* Index into `reg_use_table' while building it. */
3816static int reg_use_count;
3817
c4c81601
RK
3818/* Set up a list of register numbers used in INSN. The found uses are stored
3819 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3820 and contains the number of uses in the table upon exit.
7506f491 3821
c4c81601
RK
3822 ??? If a register appears multiple times we will record it multiple times.
3823 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3824
3825static void
1d088dee 3826find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
7506f491 3827{
c4c81601 3828 int i, j;
7506f491 3829 enum rtx_code code;
6f7d635c 3830 const char *fmt;
9e71c818 3831 rtx x = *xptr;
7506f491 3832
c4c81601
RK
3833 /* repeat is used to turn tail-recursion into iteration since GCC
3834 can't do it when there's no return value. */
7506f491 3835 repeat:
7506f491
DE
3836 if (x == 0)
3837 return;
3838
3839 code = GET_CODE (x);
9e71c818 3840 if (REG_P (x))
7506f491 3841 {
7506f491
DE
3842 if (reg_use_count == MAX_USES)
3843 return;
c4c81601 3844
7506f491
DE
3845 reg_use_table[reg_use_count].reg_rtx = x;
3846 reg_use_count++;
7506f491
DE
3847 }
3848
3849 /* Recursively scan the operands of this expression. */
3850
c4c81601 3851 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3852 {
3853 if (fmt[i] == 'e')
3854 {
3855 /* If we are about to do the last recursive call
3856 needed at this level, change it into iteration.
3857 This function is called enough to be worth it. */
3858 if (i == 0)
3859 {
3860 x = XEXP (x, 0);
3861 goto repeat;
3862 }
c4c81601 3863
9e71c818 3864 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3865 }
3866 else if (fmt[i] == 'E')
c4c81601 3867 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3868 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3869 }
3870}
3871
3872/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 3873 Returns nonzero is successful. */
7506f491
DE
3874
3875static int
1d088dee 3876try_replace_reg (rtx from, rtx to, rtx insn)
7506f491 3877{
172890a2 3878 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3879 rtx src = 0;
172890a2
RK
3880 int success = 0;
3881 rtx set = single_set (insn);
833fc3ad 3882
2b773ee2
JH
3883 validate_replace_src_group (from, to, insn);
3884 if (num_changes_pending () && apply_change_group ())
3885 success = 1;
9e71c818 3886
9feff114
JDA
3887 /* Try to simplify SET_SRC if we have substituted a constant. */
3888 if (success && set && CONSTANT_P (to))
3889 {
3890 src = simplify_rtx (SET_SRC (set));
3891
3892 if (src)
3893 validate_change (insn, &SET_SRC (set), src, 0);
3894 }
3895
ed8395a0
JZ
3896 /* If there is already a NOTE, update the expression in it with our
3897 replacement. */
3898 if (note != 0)
3899 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3900
f305679f 3901 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3902 {
f305679f
JH
3903 /* If above failed and this is a single set, try to simplify the source of
3904 the set given our substitution. We could perhaps try this for multiple
3905 SETs, but it probably won't buy us anything. */
172890a2
RK
3906 src = simplify_replace_rtx (SET_SRC (set), from, to);
3907
9e71c818
JH
3908 if (!rtx_equal_p (src, SET_SRC (set))
3909 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3910 success = 1;
833fc3ad 3911
bbd288a4
FS
3912 /* If we've failed to do replacement, have a single SET, don't already
3913 have a note, and have no special SET, add a REG_EQUAL note to not
3914 lose information. */
3915 if (!success && note == 0 && set != 0
3916 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3917 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
f305679f
JH
3918 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3919 }
e251e2a2 3920
172890a2
RK
3921 /* REG_EQUAL may get simplified into register.
3922 We don't allow that. Remove that note. This code ought
fbe5a4a6 3923 not to happen, because previous code ought to synthesize
172890a2
RK
3924 reg-reg move, but be on the safe side. */
3925 if (note && REG_P (XEXP (note, 0)))
3926 remove_note (insn, note);
833fc3ad 3927
833fc3ad
JH
3928 return success;
3929}
c4c81601
RK
3930
3931/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3932 NULL no such set is found. */
7506f491
DE
3933
3934static struct expr *
1d088dee 3935find_avail_set (int regno, rtx insn)
7506f491 3936{
cafba495
BS
3937 /* SET1 contains the last set found that can be returned to the caller for
3938 use in a substitution. */
3939 struct expr *set1 = 0;
589005ff 3940
cafba495
BS
3941 /* Loops are not possible here. To get a loop we would need two sets
3942 available at the start of the block containing INSN. ie we would
3943 need two sets like this available at the start of the block:
3944
3945 (set (reg X) (reg Y))
3946 (set (reg Y) (reg X))
3947
3948 This can not happen since the set of (reg Y) would have killed the
3949 set of (reg X) making it unavailable at the start of this block. */
3950 while (1)
8e42ace1 3951 {
cafba495 3952 rtx src;
ceda50e9 3953 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
3954
3955 /* Find a set that is available at the start of the block
3956 which contains INSN. */
3957 while (set)
3958 {
3959 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3960 break;
3961 set = next_set (regno, set);
3962 }
7506f491 3963
cafba495
BS
3964 /* If no available set was found we've reached the end of the
3965 (possibly empty) copy chain. */
3966 if (set == 0)
589005ff 3967 break;
cafba495
BS
3968
3969 if (GET_CODE (set->expr) != SET)
3970 abort ();
3971
3972 src = SET_SRC (set->expr);
3973
3974 /* We know the set is available.
3975 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 3976 have changed since the start of the block).
cafba495
BS
3977
3978 If the source operand changed, we may still use it for the next
3979 iteration of this loop, but we may not use it for substitutions. */
c4c81601 3980
6b2d1c9e 3981 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
3982 set1 = set;
3983
3984 /* If the source of the set is anything except a register, then
3985 we have reached the end of the copy chain. */
3986 if (GET_CODE (src) != REG)
7506f491 3987 break;
7506f491 3988
cafba495
BS
3989 /* Follow the copy chain, ie start another iteration of the loop
3990 and see if we have an available copy into SRC. */
3991 regno = REGNO (src);
8e42ace1 3992 }
cafba495
BS
3993
3994 /* SET1 holds the last set that was available and anticipatable at
3995 INSN. */
3996 return set1;
7506f491
DE
3997}
3998
abd535b6 3999/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 4000 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 4001 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 4002 single SET of a register. FROM is what we will try to replace,
0e3f0221 4003 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 4004 if a change was made. */
c4c81601 4005
abd535b6 4006static int
1d088dee 4007cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
abd535b6 4008{
bc6688b4 4009 rtx new, set_src, note_src;
0e3f0221 4010 rtx set = pc_set (jump);
bc6688b4 4011 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 4012
bc6688b4
RS
4013 if (note)
4014 {
4015 note_src = XEXP (note, 0);
4016 if (GET_CODE (note_src) == EXPR_LIST)
4017 note_src = NULL_RTX;
4018 }
4019 else note_src = NULL_RTX;
4020
4021 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4022 set_src = note_src ? note_src : SET_SRC (set);
4023
4024 /* First substitute the SETCC condition into the JUMP instruction,
4025 then substitute that given values into this expanded JUMP. */
4026 if (setcc != NULL_RTX
48ddd46c
JH
4027 && !modified_between_p (from, setcc, jump)
4028 && !modified_between_p (src, setcc, jump))
b2f02503 4029 {
bc6688b4 4030 rtx setcc_src;
b2f02503 4031 rtx setcc_set = single_set (setcc);
bc6688b4
RS
4032 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4033 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4034 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4035 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4036 setcc_src);
b2f02503 4037 }
0e3f0221 4038 else
bc6688b4 4039 setcc = NULL_RTX;
0e3f0221 4040
bc6688b4 4041 new = simplify_replace_rtx (set_src, from, src);
abd535b6 4042
bc6688b4
RS
4043 /* If no simplification can be made, then try the next register. */
4044 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 4045 return 0;
589005ff 4046
7d5ab30e 4047 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 4048 if (new == pc_rtx)
0e3f0221 4049 delete_insn (jump);
7d5ab30e 4050 else
abd535b6 4051 {
48ddd46c
JH
4052 /* Ensure the value computed inside the jump insn to be equivalent
4053 to one computed by setcc. */
bc6688b4 4054 if (setcc && modified_in_p (new, setcc))
48ddd46c 4055 return 0;
0e3f0221 4056 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
4057 {
4058 /* When (some) constants are not valid in a comparison, and there
4059 are two registers to be replaced by constants before the entire
4060 comparison can be folded into a constant, we need to keep
4061 intermediate information in REG_EQUAL notes. For targets with
4062 separate compare insns, such notes are added by try_replace_reg.
4063 When we have a combined compare-and-branch instruction, however,
4064 we need to attach a note to the branch itself to make this
4065 optimization work. */
4066
4067 if (!rtx_equal_p (new, note_src))
4068 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4069 return 0;
4070 }
4071
4072 /* Remove REG_EQUAL note after simplification. */
4073 if (note_src)
4074 remove_note (jump, note);
abd535b6 4075
7d5ab30e
JH
4076 /* If this has turned into an unconditional jump,
4077 then put a barrier after it so that the unreachable
4078 code will be deleted. */
4079 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4080 emit_barrier_after (jump);
7d5ab30e 4081 }
abd535b6 4082
0e3f0221
RS
4083#ifdef HAVE_cc0
4084 /* Delete the cc0 setter. */
818b6b7f 4085 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4086 delete_insn (setcc);
4087#endif
4088
172890a2 4089 run_jump_opt_after_gcse = 1;
c4c81601 4090
172890a2
RK
4091 const_prop_count++;
4092 if (gcse_file != NULL)
4093 {
4094 fprintf (gcse_file,
818b6b7f 4095 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4096 REGNO (from), INSN_UID (jump));
172890a2
RK
4097 print_rtl (gcse_file, src);
4098 fprintf (gcse_file, "\n");
abd535b6 4099 }
0005550b 4100 purge_dead_edges (bb);
172890a2
RK
4101
4102 return 1;
abd535b6
BS
4103}
4104
ae860ff7 4105static bool
1d088dee 4106constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
ae860ff7
JH
4107{
4108 rtx sset;
4109
4110 /* Check for reg or cc0 setting instructions followed by
4111 conditional branch instructions first. */
4112 if (alter_jumps
4113 && (sset = single_set (insn)) != NULL
244d05fb 4114 && NEXT_INSN (insn)
ae860ff7
JH
4115 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4116 {
4117 rtx dest = SET_DEST (sset);
4118 if ((REG_P (dest) || CC0_P (dest))
4119 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4120 return 1;
4121 }
4122
4123 /* Handle normal insns next. */
4124 if (GET_CODE (insn) == INSN
4125 && try_replace_reg (from, to, insn))
4126 return 1;
4127
4128 /* Try to propagate a CONST_INT into a conditional jump.
4129 We're pretty specific about what we will handle in this
4130 code, we can extend this as necessary over time.
4131
4132 Right now the insn in question must look like
4133 (set (pc) (if_then_else ...)) */
4134 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4135 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4136 return 0;
4137}
4138
7506f491 4139/* Perform constant and copy propagation on INSN.
cc2902df 4140 The result is nonzero if a change was made. */
7506f491
DE
4141
4142static int
1d088dee 4143cprop_insn (rtx insn, int alter_jumps)
7506f491
DE
4144{
4145 struct reg_use *reg_used;
4146 int changed = 0;
833fc3ad 4147 rtx note;
7506f491 4148
9e71c818 4149 if (!INSN_P (insn))
7506f491
DE
4150 return 0;
4151
4152 reg_use_count = 0;
9e71c818 4153 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4154
172890a2 4155 note = find_reg_equal_equiv_note (insn);
833fc3ad 4156
dc297297 4157 /* We may win even when propagating constants into notes. */
833fc3ad 4158 if (note)
9e71c818 4159 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4160
c4c81601
RK
4161 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4162 reg_used++, reg_use_count--)
7506f491 4163 {
770ae6cc 4164 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4165 rtx pat, src;
4166 struct expr *set;
7506f491
DE
4167
4168 /* Ignore registers created by GCSE.
dc297297 4169 We do this because ... */
7506f491
DE
4170 if (regno >= max_gcse_regno)
4171 continue;
4172
4173 /* If the register has already been set in this block, there's
4174 nothing we can do. */
4175 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4176 continue;
4177
4178 /* Find an assignment that sets reg_used and is available
4179 at the start of the block. */
4180 set = find_avail_set (regno, insn);
4181 if (! set)
4182 continue;
589005ff 4183
7506f491
DE
4184 pat = set->expr;
4185 /* ??? We might be able to handle PARALLELs. Later. */
4186 if (GET_CODE (pat) != SET)
4187 abort ();
c4c81601 4188
7506f491
DE
4189 src = SET_SRC (pat);
4190
e78d9500 4191 /* Constant propagation. */
6b2d1c9e 4192 if (gcse_constant_p (src))
7506f491 4193 {
ae860ff7 4194 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4195 {
4196 changed = 1;
4197 const_prop_count++;
4198 if (gcse_file != NULL)
4199 {
ae860ff7
JH
4200 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4201 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4202 print_rtl (gcse_file, src);
7506f491
DE
4203 fprintf (gcse_file, "\n");
4204 }
bc6688b4
RS
4205 if (INSN_DELETED_P (insn))
4206 return 1;
7506f491
DE
4207 }
4208 }
4209 else if (GET_CODE (src) == REG
4210 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4211 && REGNO (src) != regno)
4212 {
cafba495 4213 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4214 {
cafba495
BS
4215 changed = 1;
4216 copy_prop_count++;
4217 if (gcse_file != NULL)
7506f491 4218 {
ae860ff7 4219 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4220 regno, INSN_UID (insn));
4221 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4222 }
cafba495
BS
4223
4224 /* The original insn setting reg_used may or may not now be
4225 deletable. We leave the deletion to flow. */
4226 /* FIXME: If it turns out that the insn isn't deletable,
4227 then we may have unnecessarily extended register lifetimes
4228 and made things worse. */
7506f491
DE
4229 }
4230 }
4231 }
4232
4233 return changed;
4234}
4235
710ee3ed
RH
4236/* Like find_used_regs, but avoid recording uses that appear in
4237 input-output contexts such as zero_extract or pre_dec. This
4238 restricts the cases we consider to those for which local cprop
4239 can legitimately make replacements. */
4240
4241static void
1d088dee 4242local_cprop_find_used_regs (rtx *xptr, void *data)
710ee3ed
RH
4243{
4244 rtx x = *xptr;
4245
4246 if (x == 0)
4247 return;
4248
4249 switch (GET_CODE (x))
4250 {
4251 case ZERO_EXTRACT:
4252 case SIGN_EXTRACT:
4253 case STRICT_LOW_PART:
4254 return;
4255
4256 case PRE_DEC:
4257 case PRE_INC:
4258 case POST_DEC:
4259 case POST_INC:
4260 case PRE_MODIFY:
4261 case POST_MODIFY:
4262 /* Can only legitimately appear this early in the context of
4263 stack pushes for function arguments, but handle all of the
4264 codes nonetheless. */
4265 return;
4266
4267 case SUBREG:
4268 /* Setting a subreg of a register larger than word_mode leaves
4269 the non-written words unchanged. */
4270 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4271 return;
4272 break;
4273
4274 default:
4275 break;
4276 }
4277
4278 find_used_regs (xptr, data);
4279}
1d088dee 4280
8ba46434
R
4281/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4282 their REG_EQUAL notes need updating. */
e197b6fc 4283
ae860ff7 4284static bool
1d088dee 4285do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
ae860ff7
JH
4286{
4287 rtx newreg = NULL, newcnst = NULL;
4288
e197b6fc
RH
4289 /* Rule out USE instructions and ASM statements as we don't want to
4290 change the hard registers mentioned. */
ae860ff7
JH
4291 if (GET_CODE (x) == REG
4292 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
4293 || (GET_CODE (PATTERN (insn)) != USE
4294 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
4295 {
4296 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4297 struct elt_loc_list *l;
4298
4299 if (!val)
4300 return false;
4301 for (l = val->locs; l; l = l->next)
4302 {
4303 rtx this_rtx = l->loc;
46690369
JH
4304 rtx note;
4305
9635cfad
JH
4306 if (l->in_libcall)
4307 continue;
4308
6b2d1c9e 4309 if (gcse_constant_p (this_rtx))
ae860ff7 4310 newcnst = this_rtx;
46690369
JH
4311 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4312 /* Don't copy propagate if it has attached REG_EQUIV note.
4313 At this point this only function parameters should have
4314 REG_EQUIV notes and if the argument slot is used somewhere
4315 explicitly, it means address of parameter has been taken,
4316 so we should not extend the lifetime of the pseudo. */
4317 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4318 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4319 newreg = this_rtx;
4320 }
4321 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4322 {
8ba46434 4323 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 4324 match the new register, we either have to abandon this replacement
8ba46434
R
4325 or fix delete_trivially_dead_insns to preserve the setting insn,
4326 or make it delete the REG_EUAQL note, and fix up all passes that
4327 require the REG_EQUAL note there. */
4328 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4329 abort ();
ae860ff7
JH
4330 if (gcse_file != NULL)
4331 {
4332 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4333 REGNO (x));
4334 fprintf (gcse_file, "insn %d with constant ",
4335 INSN_UID (insn));
4336 print_rtl (gcse_file, newcnst);
4337 fprintf (gcse_file, "\n");
4338 }
4339 const_prop_count++;
4340 return true;
4341 }
4342 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4343 {
8ba46434 4344 adjust_libcall_notes (x, newreg, insn, libcall_sp);
ae860ff7
JH
4345 if (gcse_file != NULL)
4346 {
4347 fprintf (gcse_file,
4348 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4349 REGNO (x), INSN_UID (insn));
4350 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4351 }
4352 copy_prop_count++;
4353 return true;
4354 }
4355 }
4356 return false;
4357}
4358
8ba46434
R
4359/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4360 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
4361 replaced with NEWVAL in INSN. Return true if all substitutions could
4362 be made. */
8ba46434 4363static bool
1d088dee 4364adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
8ba46434 4365{
f4e3e618 4366 rtx end;
8ba46434
R
4367
4368 while ((end = *libcall_sp++))
4369 {
f4e3e618 4370 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
4371
4372 if (! note)
4373 continue;
4374
4375 if (REG_P (newval))
4376 {
4377 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4378 {
4379 do
4380 {
4381 note = find_reg_equal_equiv_note (end);
4382 if (! note)
4383 continue;
4384 if (reg_mentioned_p (newval, XEXP (note, 0)))
4385 return false;
4386 }
4387 while ((end = *libcall_sp++));
4388 return true;
4389 }
4390 }
4391 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4392 insn = end;
4393 }
4394 return true;
4395}
4396
4397#define MAX_NESTED_LIBCALLS 9
4398
ae860ff7 4399static void
1d088dee 4400local_cprop_pass (int alter_jumps)
ae860ff7
JH
4401{
4402 rtx insn;
4403 struct reg_use *reg_used;
8ba46434 4404 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 4405 bool changed = false;
ae860ff7
JH
4406
4407 cselib_init ();
8ba46434
R
4408 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4409 *libcall_sp = 0;
ae860ff7
JH
4410 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4411 {
4412 if (INSN_P (insn))
4413 {
8ba46434 4414 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 4415
8ba46434
R
4416 if (note)
4417 {
4418 if (libcall_sp == libcall_stack)
4419 abort ();
4420 *--libcall_sp = XEXP (note, 0);
4421 }
4422 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4423 if (note)
4424 libcall_sp++;
4425 note = find_reg_equal_equiv_note (insn);
ae860ff7
JH
4426 do
4427 {
4428 reg_use_count = 0;
710ee3ed 4429 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
ae860ff7 4430 if (note)
710ee3ed 4431 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
ae860ff7
JH
4432
4433 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4434 reg_used++, reg_use_count--)
8ba46434
R
4435 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4436 libcall_sp))
1649d92f
JH
4437 {
4438 changed = true;
4439 break;
4440 }
bc6688b4
RS
4441 if (INSN_DELETED_P (insn))
4442 break;
ae860ff7
JH
4443 }
4444 while (reg_use_count);
4445 }
4446 cselib_process_insn (insn);
4447 }
4448 cselib_finish ();
1649d92f
JH
4449 /* Global analysis may get into infinite loops for unreachable blocks. */
4450 if (changed && alter_jumps)
5f0bea72
JH
4451 {
4452 delete_unreachable_blocks ();
4453 free_reg_set_mem ();
4454 alloc_reg_set_mem (max_reg_num ());
4455 compute_sets (get_insns ());
4456 }
ae860ff7
JH
4457}
4458
c4c81601 4459/* Forward propagate copies. This includes copies and constants. Return
cc2902df 4460 nonzero if a change was made. */
7506f491
DE
4461
4462static int
1d088dee 4463cprop (int alter_jumps)
7506f491 4464{
e0082a72
ZD
4465 int changed;
4466 basic_block bb;
7506f491
DE
4467 rtx insn;
4468
4469 /* Note we start at block 1. */
e0082a72
ZD
4470 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4471 {
4472 if (gcse_file != NULL)
4473 fprintf (gcse_file, "\n");
4474 return 0;
4475 }
7506f491
DE
4476
4477 changed = 0;
e0082a72 4478 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4479 {
4480 /* Reset tables used to keep track of what's still valid [since the
4481 start of the block]. */
4482 reset_opr_set_tables ();
4483
a813c111
SB
4484 for (insn = BB_HEAD (bb);
4485 insn != NULL && insn != NEXT_INSN (BB_END (bb));
7506f491 4486 insn = NEXT_INSN (insn))
172890a2
RK
4487 if (INSN_P (insn))
4488 {
ae860ff7 4489 changed |= cprop_insn (insn, alter_jumps);
7506f491 4490
172890a2
RK
4491 /* Keep track of everything modified by this insn. */
4492 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4493 call mark_oprs_set if we turned the insn into a NOTE. */
4494 if (GET_CODE (insn) != NOTE)
4495 mark_oprs_set (insn);
8e42ace1 4496 }
7506f491
DE
4497 }
4498
4499 if (gcse_file != NULL)
4500 fprintf (gcse_file, "\n");
4501
4502 return changed;
4503}
4504
fbef91d8
RS
4505/* Similar to get_condition, only the resulting condition must be
4506 valid at JUMP, instead of at EARLIEST.
4507
4508 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4509 settle for the condition variable in the jump instruction being integral.
4510 We prefer to be able to record the value of a user variable, rather than
4511 the value of a temporary used in a condition. This could be solved by
4512 recording the value of *every* register scaned by canonicalize_condition,
4513 but this would require some code reorganization. */
4514
2fa4a849 4515rtx
1d088dee 4516fis_get_condition (rtx jump)
fbef91d8
RS
4517{
4518 rtx cond, set, tmp, insn, earliest;
4519 bool reverse;
4520
4521 if (! any_condjump_p (jump))
4522 return NULL_RTX;
4523
4524 set = pc_set (jump);
4525 cond = XEXP (SET_SRC (set), 0);
4526
4527 /* If this branches to JUMP_LABEL when the condition is false,
4528 reverse the condition. */
4529 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4530 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4531
4532 /* Use canonicalize_condition to do the dirty work of manipulating
4533 MODE_CC values and COMPARE rtx codes. */
ec6ec6aa
ZD
4534 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4535 false);
fbef91d8
RS
4536 if (!tmp)
4537 return NULL_RTX;
4538
4539 /* Verify that the given condition is valid at JUMP by virtue of not
4540 having been modified since EARLIEST. */
4541 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4542 if (INSN_P (insn) && modified_in_p (tmp, insn))
4543 break;
4544 if (insn == jump)
4545 return tmp;
4546
4547 /* The condition was modified. See if we can get a partial result
4548 that doesn't follow all the reversals. Perhaps combine can fold
4549 them together later. */
4550 tmp = XEXP (tmp, 0);
4551 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4552 return NULL_RTX;
ec6ec6aa
ZD
4553 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4554 false);
fbef91d8
RS
4555 if (!tmp)
4556 return NULL_RTX;
4557
4558 /* For sanity's sake, re-validate the new result. */
4559 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4560 if (INSN_P (insn) && modified_in_p (tmp, insn))
4561 return NULL_RTX;
4562
4563 return tmp;
4564}
4565
b0656d8b
JW
4566/* Check the comparison COND to see if we can safely form an implicit set from
4567 it. COND is either an EQ or NE comparison. */
4568
4569static bool
4570implicit_set_cond_p (rtx cond)
4571{
4572 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
4573 rtx cst = XEXP (cond, 1);
4574
4575 /* We can't perform this optimization if either operand might be or might
4576 contain a signed zero. */
4577 if (HONOR_SIGNED_ZEROS (mode))
4578 {
4579 /* It is sufficient to check if CST is or contains a zero. We must
4580 handle float, complex, and vector. If any subpart is a zero, then
4581 the optimization can't be performed. */
4582 /* ??? The complex and vector checks are not implemented yet. We just
4583 always return zero for them. */
4584 if (GET_CODE (cst) == CONST_DOUBLE)
4585 {
4586 REAL_VALUE_TYPE d;
4587 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
4588 if (REAL_VALUES_EQUAL (d, dconst0))
4589 return 0;
4590 }
4591 else
4592 return 0;
4593 }
4594
4595 return gcse_constant_p (cst);
4596}
4597
fbef91d8
RS
4598/* Find the implicit sets of a function. An "implicit set" is a constraint
4599 on the value of a variable, implied by a conditional jump. For example,
4600 following "if (x == 2)", the then branch may be optimized as though the
4601 conditional performed an "explicit set", in this example, "x = 2". This
4602 function records the set patterns that are implicit at the start of each
4603 basic block. */
4604
4605static void
1d088dee 4606find_implicit_sets (void)
fbef91d8
RS
4607{
4608 basic_block bb, dest;
4609 unsigned int count;
4610 rtx cond, new;
4611
4612 count = 0;
4613 FOR_EACH_BB (bb)
a98ebe2e 4614 /* Check for more than one successor. */
fbef91d8
RS
4615 if (bb->succ && bb->succ->succ_next)
4616 {
a813c111 4617 cond = fis_get_condition (BB_END (bb));
fbef91d8
RS
4618
4619 if (cond
4620 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4621 && GET_CODE (XEXP (cond, 0)) == REG
4622 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
b0656d8b 4623 && implicit_set_cond_p (cond))
fbef91d8
RS
4624 {
4625 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4626 : FALLTHRU_EDGE (bb)->dest;
4627
4628 if (dest && ! dest->pred->pred_next
4629 && dest != EXIT_BLOCK_PTR)
4630 {
4631 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4632 XEXP (cond, 1));
4633 implicit_sets[dest->index] = new;
4634 if (gcse_file)
4635 {
4636 fprintf(gcse_file, "Implicit set of reg %d in ",
4637 REGNO (XEXP (cond, 0)));
4638 fprintf(gcse_file, "basic block %d\n", dest->index);
4639 }
4640 count++;
4641 }
4642 }
4643 }
4644
4645 if (gcse_file)
4646 fprintf (gcse_file, "Found %d implicit sets\n", count);
4647}
4648
7506f491 4649/* Perform one copy/constant propagation pass.
a0134312
RS
4650 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4651 propagation into conditional jumps. If BYPASS_JUMPS is true,
4652 perform conditional jump bypassing optimizations. */
7506f491
DE
4653
4654static int
1d088dee 4655one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
7506f491
DE
4656{
4657 int changed = 0;
4658
4659 const_prop_count = 0;
4660 copy_prop_count = 0;
4661
a0134312 4662 local_cprop_pass (cprop_jumps);
ae860ff7 4663
fbef91d8 4664 /* Determine implicit sets. */
703ad42b 4665 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
fbef91d8
RS
4666 find_implicit_sets ();
4667
02280659
ZD
4668 alloc_hash_table (max_cuid, &set_hash_table, 1);
4669 compute_hash_table (&set_hash_table);
fbef91d8
RS
4670
4671 /* Free implicit_sets before peak usage. */
4672 free (implicit_sets);
4673 implicit_sets = NULL;
4674
7506f491 4675 if (gcse_file)
02280659
ZD
4676 dump_hash_table (gcse_file, "SET", &set_hash_table);
4677 if (set_hash_table.n_elems > 0)
7506f491 4678 {
02280659 4679 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 4680 compute_cprop_data ();
a0134312
RS
4681 changed = cprop (cprop_jumps);
4682 if (bypass_jumps)
0e3f0221 4683 changed |= bypass_conditional_jumps ();
7506f491
DE
4684 free_cprop_mem ();
4685 }
c4c81601 4686
02280659 4687 free_hash_table (&set_hash_table);
7506f491
DE
4688
4689 if (gcse_file)
4690 {
c4c81601 4691 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
faed5cc3 4692 current_function_name (), pass, bytes_used);
c4c81601
RK
4693 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4694 const_prop_count, copy_prop_count);
7506f491 4695 }
1649d92f
JH
4696 /* Global analysis may get into infinite loops for unreachable blocks. */
4697 if (changed && cprop_jumps)
4698 delete_unreachable_blocks ();
7506f491
DE
4699
4700 return changed;
4701}
4702\f
0e3f0221
RS
4703/* Bypass conditional jumps. */
4704
7821bfc7
RS
4705/* The value of last_basic_block at the beginning of the jump_bypass
4706 pass. The use of redirect_edge_and_branch_force may introduce new
4707 basic blocks, but the data flow analysis is only valid for basic
4708 block indices less than bypass_last_basic_block. */
4709
4710static int bypass_last_basic_block;
4711
0e3f0221
RS
4712/* Find a set of REGNO to a constant that is available at the end of basic
4713 block BB. Returns NULL if no such set is found. Based heavily upon
4714 find_avail_set. */
4715
4716static struct expr *
1d088dee 4717find_bypass_set (int regno, int bb)
0e3f0221
RS
4718{
4719 struct expr *result = 0;
4720
4721 for (;;)
4722 {
4723 rtx src;
ceda50e9 4724 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
4725
4726 while (set)
4727 {
4728 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4729 break;
4730 set = next_set (regno, set);
4731 }
4732
4733 if (set == 0)
4734 break;
4735
4736 if (GET_CODE (set->expr) != SET)
4737 abort ();
4738
4739 src = SET_SRC (set->expr);
6b2d1c9e 4740 if (gcse_constant_p (src))
0e3f0221
RS
4741 result = set;
4742
4743 if (GET_CODE (src) != REG)
4744 break;
4745
4746 regno = REGNO (src);
4747 }
4748 return result;
4749}
4750
4751
e129b3f9
RS
4752/* Subroutine of bypass_block that checks whether a pseudo is killed by
4753 any of the instructions inserted on an edge. Jump bypassing places
4754 condition code setters on CFG edges using insert_insn_on_edge. This
4755 function is required to check that our data flow analysis is still
4756 valid prior to commit_edge_insertions. */
4757
4758static bool
1d088dee 4759reg_killed_on_edge (rtx reg, edge e)
e129b3f9
RS
4760{
4761 rtx insn;
4762
4763 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4764 if (INSN_P (insn) && reg_set_p (reg, insn))
4765 return true;
4766
4767 return false;
4768}
4769
0e3f0221
RS
4770/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4771 basic block BB which has more than one predecessor. If not NULL, SETCC
4772 is the first instruction of BB, which is immediately followed by JUMP_INSN
4773 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
4774 Returns nonzero if a change was made.
4775
e0bb17a8 4776 During the jump bypassing pass, we may place copies of SETCC instructions
e129b3f9
RS
4777 on CFG edges. The following routine must be careful to pay attention to
4778 these inserted insns when performing its transformations. */
0e3f0221
RS
4779
4780static int
1d088dee 4781bypass_block (basic_block bb, rtx setcc, rtx jump)
0e3f0221
RS
4782{
4783 rtx insn, note;
e129b3f9 4784 edge e, enext, edest;
818b6b7f 4785 int i, change;
72b8d451 4786 int may_be_loop_header;
0e3f0221
RS
4787
4788 insn = (setcc != NULL) ? setcc : jump;
4789
4790 /* Determine set of register uses in INSN. */
4791 reg_use_count = 0;
4792 note_uses (&PATTERN (insn), find_used_regs, NULL);
4793 note = find_reg_equal_equiv_note (insn);
4794 if (note)
4795 find_used_regs (&XEXP (note, 0), NULL);
4796
72b8d451
ZD
4797 may_be_loop_header = false;
4798 for (e = bb->pred; e; e = e->pred_next)
4799 if (e->flags & EDGE_DFS_BACK)
4800 {
4801 may_be_loop_header = true;
4802 break;
4803 }
4804
0e3f0221
RS
4805 change = 0;
4806 for (e = bb->pred; e; e = enext)
4807 {
4808 enext = e->pred_next;
7821bfc7
RS
4809 if (e->flags & EDGE_COMPLEX)
4810 continue;
4811
4812 /* We can't redirect edges from new basic blocks. */
4813 if (e->src->index >= bypass_last_basic_block)
4814 continue;
4815
72b8d451 4816 /* The irreducible loops created by redirecting of edges entering the
e0bb17a8
KH
4817 loop from outside would decrease effectiveness of some of the following
4818 optimizations, so prevent this. */
72b8d451
ZD
4819 if (may_be_loop_header
4820 && !(e->flags & EDGE_DFS_BACK))
4821 continue;
4822
0e3f0221
RS
4823 for (i = 0; i < reg_use_count; i++)
4824 {
4825 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4826 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4827 basic_block dest, old_dest;
589005ff
KH
4828 struct expr *set;
4829 rtx src, new;
0e3f0221 4830
589005ff
KH
4831 if (regno >= max_gcse_regno)
4832 continue;
0e3f0221 4833
589005ff 4834 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4835
4836 if (! set)
4837 continue;
4838
e129b3f9
RS
4839 /* Check the data flow is valid after edge insertions. */
4840 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4841 continue;
4842
589005ff 4843 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4844
4845 if (setcc != NULL)
4846 src = simplify_replace_rtx (src,
589005ff
KH
4847 SET_DEST (PATTERN (setcc)),
4848 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4849
4850 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4851 SET_SRC (set->expr));
0e3f0221 4852
1d088dee 4853 /* Jump bypassing may have already placed instructions on
e129b3f9
RS
4854 edges of the CFG. We can't bypass an outgoing edge that
4855 has instructions associated with it, as these insns won't
4856 get executed if the incoming edge is redirected. */
4857
589005ff 4858 if (new == pc_rtx)
e129b3f9
RS
4859 {
4860 edest = FALLTHRU_EDGE (bb);
4861 dest = edest->insns ? NULL : edest->dest;
4862 }
0e3f0221 4863 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
4864 {
4865 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4866 /* Don't bypass edges containing instructions. */
4867 for (edest = bb->succ; edest; edest = edest->succ_next)
4868 if (edest->dest == dest && edest->insns)
4869 {
4870 dest = NULL;
4871 break;
4872 }
4873 }
0e3f0221
RS
4874 else
4875 dest = NULL;
4876
a544524a
JH
4877 /* Avoid unification of the edge with other edges from original
4878 branch. We would end up emitting the instruction on "both"
4879 edges. */
4880
f0cad2d5 4881 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
a544524a
JH
4882 {
4883 edge e2;
4884 for (e2 = e->src->succ; e2; e2 = e2->succ_next)
4885 if (e2->dest == dest)
4886 {
4887 dest = NULL;
4888 break;
4889 }
4890 }
4891
818b6b7f 4892 old_dest = e->dest;
7821bfc7
RS
4893 if (dest != NULL
4894 && dest != old_dest
4895 && dest != EXIT_BLOCK_PTR)
4896 {
4897 redirect_edge_and_branch_force (e, dest);
4898
818b6b7f 4899 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4900 Don't copy CC0 setters, as CC0 is dead after jump. */
4901 if (setcc)
4902 {
4903 rtx pat = PATTERN (setcc);
818b6b7f 4904 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4905 insert_insn_on_edge (copy_insn (pat), e);
4906 }
4907
4908 if (gcse_file != NULL)
4909 {
818b6b7f
RH
4910 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4911 regno, INSN_UID (jump));
0e3f0221
RS
4912 print_rtl (gcse_file, SET_SRC (set->expr));
4913 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4914 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4915 }
4916 change = 1;
4917 break;
4918 }
4919 }
4920 }
4921 return change;
4922}
4923
4924/* Find basic blocks with more than one predecessor that only contain a
4925 single conditional jump. If the result of the comparison is known at
4926 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
4927 appropriate target. Returns nonzero if a change was made.
4928
4929 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
4930
4931static int
1d088dee 4932bypass_conditional_jumps (void)
0e3f0221
RS
4933{
4934 basic_block bb;
4935 int changed;
4936 rtx setcc;
4937 rtx insn;
4938 rtx dest;
4939
4940 /* Note we start at block 1. */
4941 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4942 return 0;
4943
7821bfc7 4944 bypass_last_basic_block = last_basic_block;
72b8d451 4945 mark_dfs_back_edges ();
7821bfc7 4946
0e3f0221
RS
4947 changed = 0;
4948 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 4949 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
4950 {
4951 /* Check for more than one predecessor. */
4952 if (bb->pred && bb->pred->pred_next)
4953 {
4954 setcc = NULL_RTX;
a813c111
SB
4955 for (insn = BB_HEAD (bb);
4956 insn != NULL && insn != NEXT_INSN (BB_END (bb));
0e3f0221
RS
4957 insn = NEXT_INSN (insn))
4958 if (GET_CODE (insn) == INSN)
4959 {
9543a9d2 4960 if (setcc)
0e3f0221 4961 break;
ba4f7968 4962 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
4963 break;
4964
ba4f7968 4965 dest = SET_DEST (PATTERN (insn));
818b6b7f 4966 if (REG_P (dest) || CC0_P (dest))
0e3f0221 4967 setcc = insn;
0e3f0221
RS
4968 else
4969 break;
4970 }
4971 else if (GET_CODE (insn) == JUMP_INSN)
4972 {
9a71ece1
RH
4973 if ((any_condjump_p (insn) || computed_jump_p (insn))
4974 && onlyjump_p (insn))
0e3f0221
RS
4975 changed |= bypass_block (bb, setcc, insn);
4976 break;
4977 }
4978 else if (INSN_P (insn))
4979 break;
4980 }
4981 }
4982
818b6b7f 4983 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 4984 copy on the redirected edge. These need to be committed. */
0e3f0221
RS
4985 if (changed)
4986 commit_edge_insertions();
4987
4988 return changed;
4989}
4990\f
a65f3558 4991/* Compute PRE+LCM working variables. */
7506f491
DE
4992
4993/* Local properties of expressions. */
4994/* Nonzero for expressions that are transparent in the block. */
a65f3558 4995static sbitmap *transp;
7506f491 4996
5c35539b
RH
4997/* Nonzero for expressions that are transparent at the end of the block.
4998 This is only zero for expressions killed by abnormal critical edge
4999 created by a calls. */
a65f3558 5000static sbitmap *transpout;
5c35539b 5001
a65f3558
JL
5002/* Nonzero for expressions that are computed (available) in the block. */
5003static sbitmap *comp;
7506f491 5004
a65f3558
JL
5005/* Nonzero for expressions that are locally anticipatable in the block. */
5006static sbitmap *antloc;
7506f491 5007
a65f3558
JL
5008/* Nonzero for expressions where this block is an optimal computation
5009 point. */
5010static sbitmap *pre_optimal;
5c35539b 5011
a65f3558
JL
5012/* Nonzero for expressions which are redundant in a particular block. */
5013static sbitmap *pre_redundant;
7506f491 5014
a42cd965
AM
5015/* Nonzero for expressions which should be inserted on a specific edge. */
5016static sbitmap *pre_insert_map;
5017
5018/* Nonzero for expressions which should be deleted in a specific block. */
5019static sbitmap *pre_delete_map;
5020
5021/* Contains the edge_list returned by pre_edge_lcm. */
5022static struct edge_list *edge_list;
5023
a65f3558
JL
5024/* Redundant insns. */
5025static sbitmap pre_redundant_insns;
7506f491 5026
a65f3558 5027/* Allocate vars used for PRE analysis. */
7506f491
DE
5028
5029static void
1d088dee 5030alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 5031{
a65f3558
JL
5032 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5033 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5034 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 5035
a42cd965
AM
5036 pre_optimal = NULL;
5037 pre_redundant = NULL;
5038 pre_insert_map = NULL;
5039 pre_delete_map = NULL;
5040 ae_in = NULL;
5041 ae_out = NULL;
a42cd965 5042 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 5043
a42cd965 5044 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
5045}
5046
a65f3558 5047/* Free vars used for PRE analysis. */
7506f491
DE
5048
5049static void
1d088dee 5050free_pre_mem (void)
7506f491 5051{
5a660bff
DB
5052 sbitmap_vector_free (transp);
5053 sbitmap_vector_free (comp);
bd3675fc
JL
5054
5055 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 5056
a42cd965 5057 if (pre_optimal)
5a660bff 5058 sbitmap_vector_free (pre_optimal);
a42cd965 5059 if (pre_redundant)
5a660bff 5060 sbitmap_vector_free (pre_redundant);
a42cd965 5061 if (pre_insert_map)
5a660bff 5062 sbitmap_vector_free (pre_insert_map);
a42cd965 5063 if (pre_delete_map)
5a660bff 5064 sbitmap_vector_free (pre_delete_map);
a42cd965 5065 if (ae_in)
5a660bff 5066 sbitmap_vector_free (ae_in);
a42cd965 5067 if (ae_out)
5a660bff 5068 sbitmap_vector_free (ae_out);
a42cd965 5069
bd3675fc 5070 transp = comp = NULL;
a42cd965 5071 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 5072 ae_in = ae_out = NULL;
7506f491
DE
5073}
5074
5075/* Top level routine to do the dataflow analysis needed by PRE. */
5076
5077static void
1d088dee 5078compute_pre_data (void)
7506f491 5079{
b614171e 5080 sbitmap trapping_expr;
e0082a72 5081 basic_block bb;
b614171e 5082 unsigned int ui;
c66e8ae9 5083
02280659 5084 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 5085 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 5086
b614171e 5087 /* Collect expressions which might trap. */
02280659 5088 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 5089 sbitmap_zero (trapping_expr);
02280659 5090 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
5091 {
5092 struct expr *e;
02280659 5093 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
5094 if (may_trap_p (e->expr))
5095 SET_BIT (trapping_expr, e->bitmap_index);
5096 }
5097
c66e8ae9
JL
5098 /* Compute ae_kill for each basic block using:
5099
5100 ~(TRANSP | COMP)
5101
a2e90653 5102 This is significantly faster than compute_ae_kill. */
c66e8ae9 5103
e0082a72 5104 FOR_EACH_BB (bb)
c66e8ae9 5105 {
b614171e
MM
5106 edge e;
5107
5108 /* If the current block is the destination of an abnormal edge, we
5109 kill all trapping expressions because we won't be able to properly
5110 place the instruction on the edge. So make them neither
5111 anticipatable nor transparent. This is fairly conservative. */
e0082a72 5112 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
5113 if (e->flags & EDGE_ABNORMAL)
5114 {
e0082a72
ZD
5115 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5116 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
5117 break;
5118 }
5119
e0082a72
ZD
5120 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5121 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
5122 }
5123
02280659 5124 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 5125 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 5126 sbitmap_vector_free (antloc);
bd3675fc 5127 antloc = NULL;
5a660bff 5128 sbitmap_vector_free (ae_kill);
589005ff 5129 ae_kill = NULL;
76ac938b 5130 sbitmap_free (trapping_expr);
7506f491
DE
5131}
5132\f
5133/* PRE utilities */
5134
cc2902df 5135/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 5136 block BB.
7506f491
DE
5137
5138 VISITED is a pointer to a working buffer for tracking which BB's have
5139 been visited. It is NULL for the top-level call.
5140
5141 We treat reaching expressions that go through blocks containing the same
5142 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5143 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5144 2 as not reaching. The intent is to improve the probability of finding
5145 only one reaching expression and to reduce register lifetimes by picking
5146 the closest such expression. */
5147
5148static int
1d088dee 5149pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
7506f491 5150{
36349f8b 5151 edge pred;
7506f491 5152
e2d2ed72 5153 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 5154 {
e2d2ed72 5155 basic_block pred_bb = pred->src;
7506f491 5156
36349f8b 5157 if (pred->src == ENTRY_BLOCK_PTR
7506f491 5158 /* Has predecessor has already been visited? */
0b17ab2f 5159 || visited[pred_bb->index])
c4c81601
RK
5160 ;/* Nothing to do. */
5161
7506f491 5162 /* Does this predecessor generate this expression? */
0b17ab2f 5163 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
5164 {
5165 /* Is this the occurrence we're looking for?
5166 Note that there's only one generating occurrence per block
5167 so we just need to check the block number. */
a65f3558 5168 if (occr_bb == pred_bb)
7506f491 5169 return 1;
c4c81601 5170
0b17ab2f 5171 visited[pred_bb->index] = 1;
7506f491
DE
5172 }
5173 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
5174 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5175 visited[pred_bb->index] = 1;
c4c81601 5176
7506f491
DE
5177 /* Neither gen nor kill. */
5178 else
ac7c5af5 5179 {
0b17ab2f 5180 visited[pred_bb->index] = 1;
89e606c9 5181 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 5182 return 1;
ac7c5af5 5183 }
7506f491
DE
5184 }
5185
5186 /* All paths have been checked. */
5187 return 0;
5188}
283a2545
RL
5189
5190/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 5191 memory allocated for that function is returned. */
283a2545
RL
5192
5193static int
1d088dee 5194pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
5195{
5196 int rval;
703ad42b 5197 char *visited = xcalloc (last_basic_block, 1);
283a2545 5198
8e42ace1 5199 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
5200
5201 free (visited);
c4c81601 5202 return rval;
283a2545 5203}
7506f491 5204\f
a42cd965
AM
5205
5206/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 5207 or on an edge. Set the block number of any insns generated to
a42cd965
AM
5208 the value of BB. */
5209
5210static rtx
1d088dee 5211process_insert_insn (struct expr *expr)
a42cd965
AM
5212{
5213 rtx reg = expr->reaching_reg;
fb0c0a12
RK
5214 rtx exp = copy_rtx (expr->expr);
5215 rtx pat;
a42cd965
AM
5216
5217 start_sequence ();
fb0c0a12
RK
5218
5219 /* If the expression is something that's an operand, like a constant,
5220 just copy it to a register. */
5221 if (general_operand (exp, GET_MODE (reg)))
5222 emit_move_insn (reg, exp);
5223
5224 /* Otherwise, make a new insn to compute this expression and make sure the
5225 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5226 expression to make sure we don't have any sharing issues. */
8d444206 5227 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 5228 abort ();
589005ff 5229
2f937369 5230 pat = get_insns ();
a42cd965
AM
5231 end_sequence ();
5232
5233 return pat;
5234}
589005ff 5235
a65f3558
JL
5236/* Add EXPR to the end of basic block BB.
5237
5238 This is used by both the PRE and code hoisting.
5239
5240 For PRE, we want to verify that the expr is either transparent
5241 or locally anticipatable in the target block. This check makes
5242 no sense for code hoisting. */
7506f491
DE
5243
5244static void
1d088dee 5245insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
7506f491 5246{
a813c111 5247 rtx insn = BB_END (bb);
7506f491
DE
5248 rtx new_insn;
5249 rtx reg = expr->reaching_reg;
5250 int regno = REGNO (reg);
2f937369 5251 rtx pat, pat_end;
7506f491 5252
a42cd965 5253 pat = process_insert_insn (expr);
2f937369
DM
5254 if (pat == NULL_RTX || ! INSN_P (pat))
5255 abort ();
5256
5257 pat_end = pat;
5258 while (NEXT_INSN (pat_end) != NULL_RTX)
5259 pat_end = NEXT_INSN (pat_end);
7506f491
DE
5260
5261 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 5262 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 5263 instructions in presence of non-call exceptions. */
7506f491 5264
068473ec
JH
5265 if (GET_CODE (insn) == JUMP_INSN
5266 || (GET_CODE (insn) == INSN
5267 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 5268 {
50b2596f 5269#ifdef HAVE_cc0
7506f491 5270 rtx note;
50b2596f 5271#endif
068473ec
JH
5272 /* It should always be the case that we can put these instructions
5273 anywhere in the basic block with performing PRE optimizations.
5274 Check this. */
3b25fbfe 5275 if (GET_CODE (insn) == INSN && pre
0b17ab2f 5276 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5277 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 5278 abort ();
7506f491
DE
5279
5280 /* If this is a jump table, then we can't insert stuff here. Since
5281 we know the previous real insn must be the tablejump, we insert
5282 the new instruction just before the tablejump. */
5283 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5284 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5285 insn = prev_real_insn (insn);
5286
5287#ifdef HAVE_cc0
5288 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5289 if cc0 isn't set. */
5290 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5291 if (note)
5292 insn = XEXP (note, 0);
5293 else
5294 {
5295 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5296 if (maybe_cc0_setter
2c3c49de 5297 && INSN_P (maybe_cc0_setter)
7506f491
DE
5298 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5299 insn = maybe_cc0_setter;
5300 }
5301#endif
5302 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 5303 new_insn = emit_insn_before (pat, insn);
3947e2f9 5304 }
c4c81601 5305
3947e2f9
RH
5306 /* Likewise if the last insn is a call, as will happen in the presence
5307 of exception handling. */
068473ec
JH
5308 else if (GET_CODE (insn) == CALL_INSN
5309 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5310 {
3947e2f9
RH
5311 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5312 we search backward and place the instructions before the first
5313 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 5314 presumption that we'll get better code elsewhere as well.
3947e2f9 5315
c4c81601 5316 It should always be the case that we can put these instructions
a65f3558
JL
5317 anywhere in the basic block with performing PRE optimizations.
5318 Check this. */
c4c81601 5319
a65f3558 5320 if (pre
0b17ab2f 5321 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5322 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5323 abort ();
5324
5325 /* Since different machines initialize their parameter registers
5326 in different orders, assume nothing. Collect the set of all
5327 parameter registers. */
a813c111 5328 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3947e2f9 5329
b1d26727
JL
5330 /* If we found all the parameter loads, then we want to insert
5331 before the first parameter load.
5332
5333 If we did not find all the parameter loads, then we might have
5334 stopped on the head of the block, which could be a CODE_LABEL.
5335 If we inserted before the CODE_LABEL, then we would be putting
5336 the insn in the wrong basic block. In that case, put the insn
b5229628 5337 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5338 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5339 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5340 insn = NEXT_INSN (insn);
c4c81601 5341
3c030e88 5342 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5343 }
5344 else
3c030e88 5345 new_insn = emit_insn_after (pat, insn);
7506f491 5346
2f937369 5347 while (1)
a65f3558 5348 {
2f937369 5349 if (INSN_P (pat))
a65f3558 5350 {
2f937369
DM
5351 add_label_notes (PATTERN (pat), new_insn);
5352 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5353 }
2f937369
DM
5354 if (pat == pat_end)
5355 break;
5356 pat = NEXT_INSN (pat);
a65f3558 5357 }
3947e2f9 5358
7506f491
DE
5359 gcse_create_count++;
5360
5361 if (gcse_file)
5362 {
c4c81601 5363 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5364 bb->index, INSN_UID (new_insn));
c4c81601
RK
5365 fprintf (gcse_file, "copying expression %d to reg %d\n",
5366 expr->bitmap_index, regno);
7506f491
DE
5367 }
5368}
5369
a42cd965
AM
5370/* Insert partially redundant expressions on edges in the CFG to make
5371 the expressions fully redundant. */
7506f491 5372
a42cd965 5373static int
1d088dee 5374pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 5375{
c4c81601 5376 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5377 sbitmap *inserted;
5378
a42cd965
AM
5379 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5380 if it reaches any of the deleted expressions. */
7506f491 5381
a42cd965
AM
5382 set_size = pre_insert_map[0]->size;
5383 num_edges = NUM_EDGES (edge_list);
02280659 5384 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 5385 sbitmap_vector_zero (inserted, num_edges);
7506f491 5386
a42cd965 5387 for (e = 0; e < num_edges; e++)
7506f491
DE
5388 {
5389 int indx;
e2d2ed72 5390 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5391
a65f3558 5392 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5393 {
a42cd965 5394 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5395
02280659 5396 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
5397 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5398 {
5399 struct expr *expr = index_map[j];
5400 struct occr *occr;
a65f3558 5401
ff7cc307 5402 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5403 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5404 {
5405 if (! occr->deleted_p)
5406 continue;
5407
5408 /* Insert this expression on this edge if if it would
ff7cc307 5409 reach the deleted occurrence in BB. */
c4c81601
RK
5410 if (!TEST_BIT (inserted[e], j))
5411 {
5412 rtx insn;
5413 edge eg = INDEX_EDGE (edge_list, e);
5414
5415 /* We can't insert anything on an abnormal and
5416 critical edge, so we insert the insn at the end of
5417 the previous block. There are several alternatives
5418 detailed in Morgans book P277 (sec 10.5) for
5419 handling this situation. This one is easiest for
5420 now. */
5421
5422 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5423 insert_insn_end_bb (index_map[j], bb, 0);
5424 else
5425 {
5426 insn = process_insert_insn (index_map[j]);
5427 insert_insn_on_edge (insn, eg);
5428 }
5429
5430 if (gcse_file)
5431 {
5432 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5433 bb->index,
5434 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5435 fprintf (gcse_file, "copy expression %d\n",
5436 expr->bitmap_index);
5437 }
5438
a13d4ebf 5439 update_ld_motion_stores (expr);
c4c81601
RK
5440 SET_BIT (inserted[e], j);
5441 did_insert = 1;
5442 gcse_create_count++;
5443 }
5444 }
5445 }
7506f491
DE
5446 }
5447 }
5faf03ae 5448
5a660bff 5449 sbitmap_vector_free (inserted);
a42cd965 5450 return did_insert;
7506f491
DE
5451}
5452
073089a7 5453/* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
b885908b
MH
5454 Given "old_reg <- expr" (INSN), instead of adding after it
5455 reaching_reg <- old_reg
5456 it's better to do the following:
5457 reaching_reg <- expr
5458 old_reg <- reaching_reg
5459 because this way copy propagation can discover additional PRE
f5f2e3cd
MH
5460 opportunities. But if this fails, we try the old way.
5461 When "expr" is a store, i.e.
5462 given "MEM <- old_reg", instead of adding after it
5463 reaching_reg <- old_reg
5464 it's better to add it before as follows:
5465 reaching_reg <- old_reg
5466 MEM <- reaching_reg. */
7506f491
DE
5467
5468static void
1d088dee 5469pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
5470{
5471 rtx reg = expr->reaching_reg;
5472 int regno = REGNO (reg);
5473 int indx = expr->bitmap_index;
073089a7
RS
5474 rtx pat = PATTERN (insn);
5475 rtx set, new_insn;
b885908b 5476 rtx old_reg;
073089a7 5477 int i;
7506f491 5478
073089a7
RS
5479 /* This block matches the logic in hash_scan_insn. */
5480 if (GET_CODE (pat) == SET)
5481 set = pat;
5482 else if (GET_CODE (pat) == PARALLEL)
5483 {
5484 /* Search through the parallel looking for the set whose
5485 source was the expression that we're interested in. */
5486 set = NULL_RTX;
5487 for (i = 0; i < XVECLEN (pat, 0); i++)
5488 {
5489 rtx x = XVECEXP (pat, 0, i);
5490 if (GET_CODE (x) == SET
5491 && expr_equiv_p (SET_SRC (x), expr->expr))
5492 {
5493 set = x;
5494 break;
5495 }
5496 }
5497 }
5498 else
7506f491 5499 abort ();
c4c81601 5500
f5f2e3cd 5501 if (GET_CODE (SET_DEST (set)) == REG)
073089a7 5502 {
f5f2e3cd
MH
5503 old_reg = SET_DEST (set);
5504 /* Check if we can modify the set destination in the original insn. */
5505 if (validate_change (insn, &SET_DEST (set), reg, 0))
5506 {
5507 new_insn = gen_move_insn (old_reg, reg);
5508 new_insn = emit_insn_after (new_insn, insn);
5509
5510 /* Keep register set table up to date. */
5511 replace_one_set (REGNO (old_reg), insn, new_insn);
5512 record_one_set (regno, insn);
5513 }
5514 else
5515 {
5516 new_insn = gen_move_insn (reg, old_reg);
5517 new_insn = emit_insn_after (new_insn, insn);
073089a7 5518
f5f2e3cd
MH
5519 /* Keep register set table up to date. */
5520 record_one_set (regno, new_insn);
5521 }
073089a7 5522 }
f5f2e3cd 5523 else /* This is possible only in case of a store to memory. */
073089a7 5524 {
f5f2e3cd 5525 old_reg = SET_SRC (set);
073089a7 5526 new_insn = gen_move_insn (reg, old_reg);
f5f2e3cd
MH
5527
5528 /* Check if we can modify the set source in the original insn. */
5529 if (validate_change (insn, &SET_SRC (set), reg, 0))
5530 new_insn = emit_insn_before (new_insn, insn);
5531 else
5532 new_insn = emit_insn_after (new_insn, insn);
c4c81601 5533
073089a7
RS
5534 /* Keep register set table up to date. */
5535 record_one_set (regno, new_insn);
5536 }
7506f491
DE
5537
5538 gcse_create_count++;
5539
5540 if (gcse_file)
a42cd965
AM
5541 fprintf (gcse_file,
5542 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5543 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5544 INSN_UID (insn), regno);
7506f491
DE
5545}
5546
5547/* Copy available expressions that reach the redundant expression
5548 to `reaching_reg'. */
5549
5550static void
1d088dee 5551pre_insert_copies (void)
7506f491 5552{
f5f2e3cd 5553 unsigned int i, added_copy;
c4c81601
RK
5554 struct expr *expr;
5555 struct occr *occr;
5556 struct occr *avail;
a65f3558 5557
7506f491
DE
5558 /* For each available expression in the table, copy the result to
5559 `reaching_reg' if the expression reaches a deleted one.
5560
5561 ??? The current algorithm is rather brute force.
5562 Need to do some profiling. */
5563
02280659
ZD
5564 for (i = 0; i < expr_hash_table.size; i++)
5565 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5566 {
5567 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5568 we don't want to insert a copy here because the expression may not
5569 really be redundant. So only insert an insn if the expression was
5570 deleted. This test also avoids further processing if the
5571 expression wasn't deleted anywhere. */
5572 if (expr->reaching_reg == NULL)
5573 continue;
f5f2e3cd
MH
5574
5575 /* Set when we add a copy for that expression. */
5576 added_copy = 0;
c4c81601
RK
5577
5578 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5579 {
5580 if (! occr->deleted_p)
5581 continue;
7506f491 5582
c4c81601
RK
5583 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5584 {
5585 rtx insn = avail->insn;
7506f491 5586
c4c81601
RK
5587 /* No need to handle this one if handled already. */
5588 if (avail->copied_p)
5589 continue;
7506f491 5590
c4c81601
RK
5591 /* Don't handle this one if it's a redundant one. */
5592 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5593 continue;
7506f491 5594
c4c81601 5595 /* Or if the expression doesn't reach the deleted one. */
589005ff 5596 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5597 expr,
5598 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5599 continue;
7506f491 5600
f5f2e3cd
MH
5601 added_copy = 1;
5602
c4c81601
RK
5603 /* Copy the result of avail to reaching_reg. */
5604 pre_insert_copy_insn (expr, insn);
5605 avail->copied_p = 1;
5606 }
5607 }
f5f2e3cd
MH
5608
5609 if (added_copy)
5610 update_ld_motion_stores (expr);
c4c81601 5611 }
7506f491
DE
5612}
5613
10d1bb36
JH
5614/* Emit move from SRC to DEST noting the equivalence with expression computed
5615 in INSN. */
5616static rtx
1d088dee 5617gcse_emit_move_after (rtx src, rtx dest, rtx insn)
10d1bb36
JH
5618{
5619 rtx new;
6bdb8dd6 5620 rtx set = single_set (insn), set2;
10d1bb36
JH
5621 rtx note;
5622 rtx eqv;
5623
5624 /* This should never fail since we're creating a reg->reg copy
5625 we've verified to be valid. */
5626
6bdb8dd6 5627 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5628
10d1bb36 5629 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5630 set2 = single_set (new);
5631 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5632 return new;
10d1bb36
JH
5633 if ((note = find_reg_equal_equiv_note (insn)))
5634 eqv = XEXP (note, 0);
5635 else
5636 eqv = SET_SRC (set);
5637
a500466b 5638 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
5639
5640 return new;
5641}
5642
7506f491 5643/* Delete redundant computations.
7506f491
DE
5644 Deletion is done by changing the insn to copy the `reaching_reg' of
5645 the expression into the result of the SET. It is left to later passes
5646 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5647
cc2902df 5648 Returns nonzero if a change is made. */
7506f491
DE
5649
5650static int
1d088dee 5651pre_delete (void)
7506f491 5652{
2e653e39 5653 unsigned int i;
63bc1d05 5654 int changed;
c4c81601
RK
5655 struct expr *expr;
5656 struct occr *occr;
a65f3558 5657
7506f491 5658 changed = 0;
02280659 5659 for (i = 0; i < expr_hash_table.size; i++)
073089a7
RS
5660 for (expr = expr_hash_table.table[i];
5661 expr != NULL;
5662 expr = expr->next_same_hash)
c4c81601
RK
5663 {
5664 int indx = expr->bitmap_index;
7506f491 5665
c4c81601
RK
5666 /* We only need to search antic_occr since we require
5667 ANTLOC != 0. */
7506f491 5668
c4c81601
RK
5669 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5670 {
5671 rtx insn = occr->insn;
5672 rtx set;
e2d2ed72 5673 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5674
073089a7
RS
5675 /* We only delete insns that have a single_set. */
5676 if (TEST_BIT (pre_delete_map[bb->index], indx)
5677 && (set = single_set (insn)) != 0)
c4c81601 5678 {
c4c81601
RK
5679 /* Create a pseudo-reg to store the result of reaching
5680 expressions into. Get the mode for the new pseudo from
5681 the mode of the original destination pseudo. */
5682 if (expr->reaching_reg == NULL)
5683 expr->reaching_reg
5684 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5685
10d1bb36
JH
5686 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5687 delete_insn (insn);
5688 occr->deleted_p = 1;
5689 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5690 changed = 1;
5691 gcse_subst_count++;
7506f491 5692
c4c81601
RK
5693 if (gcse_file)
5694 {
5695 fprintf (gcse_file,
5696 "PRE: redundant insn %d (expression %d) in ",
5697 INSN_UID (insn), indx);
5698 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5699 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5700 }
5701 }
5702 }
5703 }
7506f491
DE
5704
5705 return changed;
5706}
5707
5708/* Perform GCSE optimizations using PRE.
5709 This is called by one_pre_gcse_pass after all the dataflow analysis
5710 has been done.
5711
c4c81601
RK
5712 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5713 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5714 Compiler Design and Implementation.
7506f491 5715
c4c81601
RK
5716 ??? A new pseudo reg is created to hold the reaching expression. The nice
5717 thing about the classical approach is that it would try to use an existing
5718 reg. If the register can't be adequately optimized [i.e. we introduce
5719 reload problems], one could add a pass here to propagate the new register
5720 through the block.
7506f491 5721
c4c81601
RK
5722 ??? We don't handle single sets in PARALLELs because we're [currently] not
5723 able to copy the rest of the parallel when we insert copies to create full
5724 redundancies from partial redundancies. However, there's no reason why we
5725 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5726 redundancies. */
5727
5728static int
1d088dee 5729pre_gcse (void)
7506f491 5730{
2e653e39
RK
5731 unsigned int i;
5732 int did_insert, changed;
7506f491 5733 struct expr **index_map;
c4c81601 5734 struct expr *expr;
7506f491
DE
5735
5736 /* Compute a mapping from expression number (`bitmap_index') to
5737 hash table entry. */
5738
703ad42b 5739 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
5740 for (i = 0; i < expr_hash_table.size; i++)
5741 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 5742 index_map[expr->bitmap_index] = expr;
7506f491
DE
5743
5744 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5745 pre_redundant_insns = sbitmap_alloc (max_cuid);
5746 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5747
5748 /* Delete the redundant insns first so that
5749 - we know what register to use for the new insns and for the other
5750 ones with reaching expressions
5751 - we know which insns are redundant when we go to create copies */
c4c81601 5752
7506f491
DE
5753 changed = pre_delete ();
5754
a42cd965 5755 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5756
7506f491 5757 /* In other places with reaching expressions, copy the expression to the
a42cd965 5758 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5759 pre_insert_copies ();
a42cd965
AM
5760 if (did_insert)
5761 {
5762 commit_edge_insertions ();
5763 changed = 1;
5764 }
7506f491 5765
283a2545 5766 free (index_map);
76ac938b 5767 sbitmap_free (pre_redundant_insns);
7506f491
DE
5768 return changed;
5769}
5770
5771/* Top level routine to perform one PRE GCSE pass.
5772
cc2902df 5773 Return nonzero if a change was made. */
7506f491
DE
5774
5775static int
1d088dee 5776one_pre_gcse_pass (int pass)
7506f491
DE
5777{
5778 int changed = 0;
5779
5780 gcse_subst_count = 0;
5781 gcse_create_count = 0;
5782
02280659 5783 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 5784 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5785 if (flag_gcse_lm)
5786 compute_ld_motion_mems ();
5787
02280659 5788 compute_hash_table (&expr_hash_table);
a13d4ebf 5789 trim_ld_motion_mems ();
7506f491 5790 if (gcse_file)
02280659 5791 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 5792
02280659 5793 if (expr_hash_table.n_elems > 0)
7506f491 5794 {
02280659 5795 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
5796 compute_pre_data ();
5797 changed |= pre_gcse ();
a42cd965 5798 free_edge_list (edge_list);
7506f491
DE
5799 free_pre_mem ();
5800 }
c4c81601 5801
a13d4ebf 5802 free_ldst_mems ();
a42cd965 5803 remove_fake_edges ();
02280659 5804 free_hash_table (&expr_hash_table);
7506f491
DE
5805
5806 if (gcse_file)
5807 {
c4c81601 5808 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
faed5cc3 5809 current_function_name (), pass, bytes_used);
c4c81601
RK
5810 fprintf (gcse_file, "%d substs, %d insns created\n",
5811 gcse_subst_count, gcse_create_count);
7506f491
DE
5812 }
5813
5814 return changed;
5815}
aeb2f500
JW
5816\f
5817/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5818 If notes are added to an insn which references a CODE_LABEL, the
5819 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5820 because the following loop optimization pass requires them. */
aeb2f500
JW
5821
5822/* ??? This is very similar to the loop.c add_label_notes function. We
5823 could probably share code here. */
5824
5825/* ??? If there was a jump optimization pass after gcse and before loop,
5826 then we would not need to do this here, because jump would add the
5827 necessary REG_LABEL notes. */
5828
5829static void
1d088dee 5830add_label_notes (rtx x, rtx insn)
aeb2f500
JW
5831{
5832 enum rtx_code code = GET_CODE (x);
5833 int i, j;
6f7d635c 5834 const char *fmt;
aeb2f500
JW
5835
5836 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5837 {
6b3603c2 5838 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 5839 avoid flow generating (slightly) worse code.
6b3603c2 5840
ac7c5af5
JL
5841 We no longer ignore such label references (see LABEL_REF handling in
5842 mark_jump_label for additional information). */
c4c81601 5843
6b8c9327 5844 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5845 REG_NOTES (insn));
5b1ef594 5846 if (LABEL_P (XEXP (x, 0)))
589005ff 5847 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5848 return;
5849 }
5850
c4c81601 5851 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5852 {
5853 if (fmt[i] == 'e')
5854 add_label_notes (XEXP (x, i), insn);
5855 else if (fmt[i] == 'E')
5856 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5857 add_label_notes (XVECEXP (x, i, j), insn);
5858 }
5859}
a65f3558
JL
5860
5861/* Compute transparent outgoing information for each block.
5862
5863 An expression is transparent to an edge unless it is killed by
5864 the edge itself. This can only happen with abnormal control flow,
5865 when the edge is traversed through a call. This happens with
5866 non-local labels and exceptions.
5867
5868 This would not be necessary if we split the edge. While this is
5869 normally impossible for abnormal critical edges, with some effort
5870 it should be possible with exception handling, since we still have
5871 control over which handler should be invoked. But due to increased
5872 EH table sizes, this may not be worthwhile. */
5873
5874static void
1d088dee 5875compute_transpout (void)
a65f3558 5876{
e0082a72 5877 basic_block bb;
2e653e39 5878 unsigned int i;
c4c81601 5879 struct expr *expr;
a65f3558 5880
d55bc081 5881 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5882
e0082a72 5883 FOR_EACH_BB (bb)
a65f3558 5884 {
a65f3558
JL
5885 /* Note that flow inserted a nop a the end of basic blocks that
5886 end in call instructions for reasons other than abnormal
5887 control flow. */
a813c111 5888 if (GET_CODE (BB_END (bb)) != CALL_INSN)
a65f3558
JL
5889 continue;
5890
02280659
ZD
5891 for (i = 0; i < expr_hash_table.size; i++)
5892 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
c4c81601
RK
5893 if (GET_CODE (expr->expr) == MEM)
5894 {
5895 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5896 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5897 continue;
589005ff 5898
c4c81601
RK
5899 /* ??? Optimally, we would use interprocedural alias
5900 analysis to determine if this mem is actually killed
5901 by this call. */
e0082a72 5902 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5903 }
a65f3558
JL
5904 }
5905}
dfdb644f
JL
5906
5907/* Removal of useless null pointer checks */
5908
dfdb644f 5909/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5910 invalidate nonnull_local and set nonnull_killed. DATA is really a
5911 `null_pointer_info *'.
dfdb644f
JL
5912
5913 We ignore hard registers. */
c4c81601 5914
dfdb644f 5915static void
1d088dee 5916invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
dfdb644f 5917{
770ae6cc
RK
5918 unsigned int regno;
5919 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5920
dfdb644f
JL
5921 while (GET_CODE (x) == SUBREG)
5922 x = SUBREG_REG (x);
5923
5924 /* Ignore anything that is not a register or is a hard register. */
5925 if (GET_CODE (x) != REG
0511851c
MM
5926 || REGNO (x) < npi->min_reg
5927 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5928 return;
5929
0511851c 5930 regno = REGNO (x) - npi->min_reg;
dfdb644f 5931
e0082a72
ZD
5932 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5933 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5934}
5935
0511851c
MM
5936/* Do null-pointer check elimination for the registers indicated in
5937 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5938 they are not our responsibility to free. */
dfdb644f 5939
99a15921 5940static int
1d088dee
AJ
5941delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5942 sbitmap *nonnull_avout,
5943 struct null_pointer_info *npi)
dfdb644f 5944{
e0082a72 5945 basic_block bb, current_block;
0511851c
MM
5946 sbitmap *nonnull_local = npi->nonnull_local;
5947 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5948 int something_changed = 0;
589005ff 5949
dfdb644f
JL
5950 /* Compute local properties, nonnull and killed. A register will have
5951 the nonnull property if at the end of the current block its value is
5952 known to be nonnull. The killed property indicates that somewhere in
5953 the block any information we had about the register is killed.
5954
5955 Note that a register can have both properties in a single block. That
5956 indicates that it's killed, then later in the block a new value is
5957 computed. */
d55bc081
ZD
5958 sbitmap_vector_zero (nonnull_local, last_basic_block);
5959 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5960
e0082a72 5961 FOR_EACH_BB (current_block)
dfdb644f
JL
5962 {
5963 rtx insn, stop_insn;
5964
0511851c
MM
5965 /* Set the current block for invalidate_nonnull_info. */
5966 npi->current_block = current_block;
5967
dfdb644f
JL
5968 /* Scan each insn in the basic block looking for memory references and
5969 register sets. */
cb7c3c3f 5970 stop_insn = NEXT_INSN (BB_END (current_block));
a813c111 5971 for (insn = BB_HEAD (current_block);
dfdb644f
JL
5972 insn != stop_insn;
5973 insn = NEXT_INSN (insn))
5974 {
5975 rtx set;
0511851c 5976 rtx reg;
dfdb644f
JL
5977
5978 /* Ignore anything that is not a normal insn. */
2c3c49de 5979 if (! INSN_P (insn))
dfdb644f
JL
5980 continue;
5981
5982 /* Basically ignore anything that is not a simple SET. We do have
5983 to make sure to invalidate nonnull_local and set nonnull_killed
5984 for such insns though. */
5985 set = single_set (insn);
5986 if (!set)
5987 {
0511851c 5988 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5989 continue;
5990 }
5991
f63d1bf7 5992 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
5993 in case it uses its address register as a dest (which kills
5994 the nonnull property). */
5995 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5996 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5997 && REGNO (reg) >= npi->min_reg
5998 && REGNO (reg) < npi->max_reg)
e0082a72 5999 SET_BIT (nonnull_local[current_block->index],
0511851c 6000 REGNO (reg) - npi->min_reg);
dfdb644f
JL
6001
6002 /* Now invalidate stuff clobbered by this insn. */
0511851c 6003 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
6004
6005 /* And handle stores, we do these last since any sets in INSN can
6006 not kill the nonnull property if it is derived from a MEM
6007 appearing in a SET_DEST. */
6008 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
6009 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
6010 && REGNO (reg) >= npi->min_reg
6011 && REGNO (reg) < npi->max_reg)
e0082a72 6012 SET_BIT (nonnull_local[current_block->index],
0511851c 6013 REGNO (reg) - npi->min_reg);
dfdb644f
JL
6014 }
6015 }
6016
6017 /* Now compute global properties based on the local properties. This
fbe5a4a6 6018 is a classic global availability algorithm. */
ce724250
JL
6019 compute_available (nonnull_local, nonnull_killed,
6020 nonnull_avout, nonnull_avin);
dfdb644f
JL
6021
6022 /* Now look at each bb and see if it ends with a compare of a value
6023 against zero. */
e0082a72 6024 FOR_EACH_BB (bb)
dfdb644f 6025 {
a813c111 6026 rtx last_insn = BB_END (bb);
0511851c 6027 rtx condition, earliest;
dfdb644f
JL
6028 int compare_and_branch;
6029
0511851c
MM
6030 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6031 since BLOCK_REG[BB] is zero if this block did not end with a
6032 comparison against zero, this condition works. */
e0082a72
ZD
6033 if (block_reg[bb->index] < npi->min_reg
6034 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
6035 continue;
6036
6037 /* LAST_INSN is a conditional jump. Get its condition. */
ec6ec6aa 6038 condition = get_condition (last_insn, &earliest, false);
dfdb644f 6039
40d7a3fe
NB
6040 /* If we can't determine the condition then skip. */
6041 if (! condition)
6042 continue;
6043
dfdb644f 6044 /* Is the register known to have a nonzero value? */
e0082a72 6045 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
6046 continue;
6047
6048 /* Try to compute whether the compare/branch at the loop end is one or
6049 two instructions. */
6050 if (earliest == last_insn)
6051 compare_and_branch = 1;
6052 else if (earliest == prev_nonnote_insn (last_insn))
6053 compare_and_branch = 2;
6054 else
6055 continue;
6056
6057 /* We know the register in this comparison is nonnull at exit from
6058 this block. We can optimize this comparison. */
6059 if (GET_CODE (condition) == NE)
6060 {
6061 rtx new_jump;
6062
38c1593d
JH
6063 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6064 last_insn);
dfdb644f
JL
6065 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6066 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6067 emit_barrier_after (new_jump);
6068 }
8e184d9c 6069
99a15921 6070 something_changed = 1;
9cd56be1 6071 delete_insn (last_insn);
fb643f64 6072#ifdef HAVE_cc0
dfdb644f 6073 if (compare_and_branch == 2)
589005ff 6074 delete_insn (earliest);
fb643f64 6075#endif
e0082a72 6076 purge_dead_edges (bb);
0511851c 6077
a813c111 6078 /* Don't check this block again. (Note that BB_END is
589005ff 6079 invalid here; we deleted the last instruction in the
0511851c 6080 block.) */
e0082a72 6081 block_reg[bb->index] = 0;
0511851c 6082 }
99a15921
JL
6083
6084 return something_changed;
0511851c
MM
6085}
6086
6087/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6088 at compile time.
6089
6090 This is conceptually similar to global constant/copy propagation and
6091 classic global CSE (it even uses the same dataflow equations as cprop).
6092
6093 If a register is used as memory address with the form (mem (reg)), then we
6094 know that REG can not be zero at that point in the program. Any instruction
6095 which sets REG "kills" this property.
6096
6097 So, if every path leading to a conditional branch has an available memory
6098 reference of that form, then we know the register can not have the value
589005ff 6099 zero at the conditional branch.
0511851c 6100
fbe5a4a6 6101 So we merely need to compute the local properties and propagate that data
0511851c
MM
6102 around the cfg, then optimize where possible.
6103
6104 We run this pass two times. Once before CSE, then again after CSE. This
6105 has proven to be the most profitable approach. It is rare for new
6106 optimization opportunities of this nature to appear after the first CSE
6107 pass.
6108
6109 This could probably be integrated with global cprop with a little work. */
6110
99a15921 6111int
1d088dee 6112delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
0511851c 6113{
0511851c 6114 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 6115 unsigned int *block_reg;
e0082a72 6116 basic_block bb;
0511851c
MM
6117 int reg;
6118 int regs_per_pass;
d128effb 6119 int max_reg = max_reg_num ();
0511851c 6120 struct null_pointer_info npi;
99a15921 6121 int something_changed = 0;
0511851c 6122
d128effb
NS
6123 /* If we have only a single block, or it is too expensive, give up. */
6124 if (n_basic_blocks <= 1
6125 || is_too_expensive (_ ("NULL pointer checks disabled")))
99a15921 6126 return 0;
0511851c 6127
0511851c
MM
6128 /* We need four bitmaps, each with a bit for each register in each
6129 basic block. */
d55bc081 6130 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
6131
6132 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
6133 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6134 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6135 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6136 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
6137
6138 /* Go through the basic blocks, seeing whether or not each block
6139 ends with a conditional branch whose condition is a comparison
6140 against zero. Record the register compared in BLOCK_REG. */
703ad42b 6141 block_reg = xcalloc (last_basic_block, sizeof (int));
e0082a72 6142 FOR_EACH_BB (bb)
0511851c 6143 {
a813c111 6144 rtx last_insn = BB_END (bb);
0511851c
MM
6145 rtx condition, earliest, reg;
6146
6147 /* We only want conditional branches. */
6148 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
6149 || !any_condjump_p (last_insn)
6150 || !onlyjump_p (last_insn))
0511851c
MM
6151 continue;
6152
6153 /* LAST_INSN is a conditional jump. Get its condition. */
ec6ec6aa 6154 condition = get_condition (last_insn, &earliest, false);
0511851c 6155
4fe9b91c 6156 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
6157 comparison against zero then there's nothing we can do. */
6158 if (!condition
6159 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6160 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 6161 || (XEXP (condition, 1)
0511851c
MM
6162 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6163 continue;
6164
6165 /* We must be checking a register against zero. */
6166 reg = XEXP (condition, 0);
6167 if (GET_CODE (reg) != REG)
6168 continue;
6169
e0082a72 6170 block_reg[bb->index] = REGNO (reg);
0511851c
MM
6171 }
6172
6173 /* Go through the algorithm for each block of registers. */
6174 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6175 {
6176 npi.min_reg = reg;
6177 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
6178 something_changed |= delete_null_pointer_checks_1 (block_reg,
6179 nonnull_avin,
6180 nonnull_avout,
6181 &npi);
dfdb644f
JL
6182 }
6183
0511851c
MM
6184 /* Free the table of registers compared at the end of every block. */
6185 free (block_reg);
6186
dfdb644f 6187 /* Free bitmaps. */
5a660bff
DB
6188 sbitmap_vector_free (npi.nonnull_local);
6189 sbitmap_vector_free (npi.nonnull_killed);
6190 sbitmap_vector_free (nonnull_avin);
6191 sbitmap_vector_free (nonnull_avout);
99a15921
JL
6192
6193 return something_changed;
dfdb644f 6194}
bb457bd9
JL
6195
6196/* Code Hoisting variables and subroutines. */
6197
6198/* Very busy expressions. */
6199static sbitmap *hoist_vbein;
6200static sbitmap *hoist_vbeout;
6201
6202/* Hoistable expressions. */
6203static sbitmap *hoist_exprs;
6204
bb457bd9 6205/* ??? We could compute post dominators and run this algorithm in
68e82b83 6206 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
6207 more effective than the tail merging code in jump.c.
6208
6209 It's unclear if tail merging could be run in parallel with
6210 code hoisting. It would be nice. */
6211
6212/* Allocate vars used for code hoisting analysis. */
6213
6214static void
1d088dee 6215alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
6216{
6217 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6218 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6219 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6220
6221 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6222 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6223 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6224 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
6225}
6226
6227/* Free vars used for code hoisting analysis. */
6228
6229static void
1d088dee 6230free_code_hoist_mem (void)
bb457bd9 6231{
5a660bff
DB
6232 sbitmap_vector_free (antloc);
6233 sbitmap_vector_free (transp);
6234 sbitmap_vector_free (comp);
bb457bd9 6235
5a660bff
DB
6236 sbitmap_vector_free (hoist_vbein);
6237 sbitmap_vector_free (hoist_vbeout);
6238 sbitmap_vector_free (hoist_exprs);
6239 sbitmap_vector_free (transpout);
bb457bd9 6240
d47cc544 6241 free_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6242}
6243
6244/* Compute the very busy expressions at entry/exit from each block.
6245
6246 An expression is very busy if all paths from a given point
6247 compute the expression. */
6248
6249static void
1d088dee 6250compute_code_hoist_vbeinout (void)
bb457bd9 6251{
e0082a72
ZD
6252 int changed, passes;
6253 basic_block bb;
bb457bd9 6254
d55bc081
ZD
6255 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6256 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
6257
6258 passes = 0;
6259 changed = 1;
c4c81601 6260
bb457bd9
JL
6261 while (changed)
6262 {
6263 changed = 0;
c4c81601 6264
bb457bd9
JL
6265 /* We scan the blocks in the reverse order to speed up
6266 the convergence. */
e0082a72 6267 FOR_EACH_BB_REVERSE (bb)
bb457bd9 6268 {
e0082a72
ZD
6269 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6270 hoist_vbeout[bb->index], transp[bb->index]);
6271 if (bb->next_bb != EXIT_BLOCK_PTR)
6272 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 6273 }
c4c81601 6274
bb457bd9
JL
6275 passes++;
6276 }
6277
6278 if (gcse_file)
6279 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6280}
6281
6282/* Top level routine to do the dataflow analysis needed by code hoisting. */
6283
6284static void
1d088dee 6285compute_code_hoist_data (void)
bb457bd9 6286{
02280659 6287 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
6288 compute_transpout ();
6289 compute_code_hoist_vbeinout ();
d47cc544 6290 calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6291 if (gcse_file)
6292 fprintf (gcse_file, "\n");
6293}
6294
6295/* Determine if the expression identified by EXPR_INDEX would
6296 reach BB unimpared if it was placed at the end of EXPR_BB.
6297
6298 It's unclear exactly what Muchnick meant by "unimpared". It seems
6299 to me that the expression must either be computed or transparent in
6300 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6301 would allow the expression to be hoisted out of loops, even if
6302 the expression wasn't a loop invariant.
6303
6304 Contrast this to reachability for PRE where an expression is
6305 considered reachable if *any* path reaches instead of *all*
6306 paths. */
6307
6308static int
1d088dee 6309hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
bb457bd9
JL
6310{
6311 edge pred;
283a2545 6312 int visited_allocated_locally = 0;
589005ff 6313
bb457bd9
JL
6314
6315 if (visited == NULL)
6316 {
8e42ace1 6317 visited_allocated_locally = 1;
d55bc081 6318 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
6319 }
6320
e2d2ed72 6321 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 6322 {
e2d2ed72 6323 basic_block pred_bb = pred->src;
bb457bd9
JL
6324
6325 if (pred->src == ENTRY_BLOCK_PTR)
6326 break;
f305679f
JH
6327 else if (pred_bb == expr_bb)
6328 continue;
0b17ab2f 6329 else if (visited[pred_bb->index])
bb457bd9 6330 continue;
c4c81601 6331
bb457bd9 6332 /* Does this predecessor generate this expression? */
0b17ab2f 6333 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 6334 break;
0b17ab2f 6335 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 6336 break;
c4c81601 6337
bb457bd9
JL
6338 /* Not killed. */
6339 else
6340 {
0b17ab2f 6341 visited[pred_bb->index] = 1;
bb457bd9
JL
6342 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6343 pred_bb, visited))
6344 break;
6345 }
6346 }
589005ff 6347 if (visited_allocated_locally)
283a2545 6348 free (visited);
c4c81601 6349
bb457bd9
JL
6350 return (pred == NULL);
6351}
6352\f
6353/* Actually perform code hoisting. */
c4c81601 6354
bb457bd9 6355static void
1d088dee 6356hoist_code (void)
bb457bd9 6357{
e0082a72 6358 basic_block bb, dominated;
c635a1ec
DB
6359 basic_block *domby;
6360 unsigned int domby_len;
6361 unsigned int i,j;
bb457bd9 6362 struct expr **index_map;
c4c81601 6363 struct expr *expr;
bb457bd9 6364
d55bc081 6365 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6366
6367 /* Compute a mapping from expression number (`bitmap_index') to
6368 hash table entry. */
6369
703ad42b 6370 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
6371 for (i = 0; i < expr_hash_table.size; i++)
6372 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 6373 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6374
6375 /* Walk over each basic block looking for potentially hoistable
6376 expressions, nothing gets hoisted from the entry block. */
e0082a72 6377 FOR_EACH_BB (bb)
bb457bd9
JL
6378 {
6379 int found = 0;
6380 int insn_inserted_p;
6381
d47cc544 6382 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
bb457bd9
JL
6383 /* Examine each expression that is very busy at the exit of this
6384 block. These are the potentially hoistable expressions. */
e0082a72 6385 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6386 {
6387 int hoistable = 0;
c4c81601 6388
c635a1ec
DB
6389 if (TEST_BIT (hoist_vbeout[bb->index], i)
6390 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6391 {
6392 /* We've found a potentially hoistable expression, now
6393 we look at every block BB dominates to see if it
6394 computes the expression. */
c635a1ec 6395 for (j = 0; j < domby_len; j++)
bb457bd9 6396 {
c635a1ec 6397 dominated = domby[j];
bb457bd9 6398 /* Ignore self dominance. */
c635a1ec 6399 if (bb == dominated)
bb457bd9 6400 continue;
bb457bd9
JL
6401 /* We've found a dominated block, now see if it computes
6402 the busy expression and whether or not moving that
6403 expression to the "beginning" of that block is safe. */
e0082a72 6404 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6405 continue;
6406
6407 /* Note if the expression would reach the dominated block
589005ff 6408 unimpared if it was placed at the end of BB.
bb457bd9
JL
6409
6410 Keep track of how many times this expression is hoistable
6411 from a dominated block into BB. */
e0082a72 6412 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6413 hoistable++;
6414 }
6415
ff7cc307 6416 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6417 expression, then note it in the bitmap of expressions to
6418 hoist. It makes no sense to hoist things which are computed
6419 in only one BB, and doing so tends to pessimize register
6420 allocation. One could increase this value to try harder
6421 to avoid any possible code expansion due to register
6422 allocation issues; however experiments have shown that
6423 the vast majority of hoistable expressions are only movable
e0bb17a8 6424 from two successors, so raising this threshold is likely
bb457bd9
JL
6425 to nullify any benefit we get from code hoisting. */
6426 if (hoistable > 1)
6427 {
e0082a72 6428 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6429 found = 1;
6430 }
6431 }
6432 }
bb457bd9
JL
6433 /* If we found nothing to hoist, then quit now. */
6434 if (! found)
c635a1ec 6435 {
1d088dee 6436 free (domby);
bb457bd9 6437 continue;
c635a1ec 6438 }
bb457bd9
JL
6439
6440 /* Loop over all the hoistable expressions. */
e0082a72 6441 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6442 {
6443 /* We want to insert the expression into BB only once, so
6444 note when we've inserted it. */
6445 insn_inserted_p = 0;
6446
6447 /* These tests should be the same as the tests above. */
e0082a72 6448 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6449 {
6450 /* We've found a potentially hoistable expression, now
6451 we look at every block BB dominates to see if it
6452 computes the expression. */
c635a1ec 6453 for (j = 0; j < domby_len; j++)
bb457bd9 6454 {
c635a1ec 6455 dominated = domby[j];
bb457bd9 6456 /* Ignore self dominance. */
c635a1ec 6457 if (bb == dominated)
bb457bd9
JL
6458 continue;
6459
6460 /* We've found a dominated block, now see if it computes
6461 the busy expression and whether or not moving that
6462 expression to the "beginning" of that block is safe. */
e0082a72 6463 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6464 continue;
6465
6466 /* The expression is computed in the dominated block and
6467 it would be safe to compute it at the start of the
6468 dominated block. Now we have to determine if the
ff7cc307 6469 expression would reach the dominated block if it was
bb457bd9 6470 placed at the end of BB. */
e0082a72 6471 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6472 {
6473 struct expr *expr = index_map[i];
6474 struct occr *occr = expr->antic_occr;
6475 rtx insn;
6476 rtx set;
6477
ff7cc307 6478 /* Find the right occurrence of this expression. */
e0082a72 6479 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6480 occr = occr->next;
6481
6482 /* Should never happen. */
6483 if (!occr)
6484 abort ();
6485
6486 insn = occr->insn;
589005ff 6487
bb457bd9
JL
6488 set = single_set (insn);
6489 if (! set)
6490 abort ();
6491
6492 /* Create a pseudo-reg to store the result of reaching
6493 expressions into. Get the mode for the new pseudo
6494 from the mode of the original destination pseudo. */
6495 if (expr->reaching_reg == NULL)
6496 expr->reaching_reg
6497 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6498
10d1bb36
JH
6499 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6500 delete_insn (insn);
6501 occr->deleted_p = 1;
6502 if (!insn_inserted_p)
bb457bd9 6503 {
10d1bb36
JH
6504 insert_insn_end_bb (index_map[i], bb, 0);
6505 insn_inserted_p = 1;
bb457bd9
JL
6506 }
6507 }
6508 }
6509 }
6510 }
c635a1ec 6511 free (domby);
bb457bd9 6512 }
c4c81601 6513
8e42ace1 6514 free (index_map);
bb457bd9
JL
6515}
6516
6517/* Top level routine to perform one code hoisting (aka unification) pass
6518
cc2902df 6519 Return nonzero if a change was made. */
bb457bd9
JL
6520
6521static int
1d088dee 6522one_code_hoisting_pass (void)
bb457bd9
JL
6523{
6524 int changed = 0;
6525
02280659
ZD
6526 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6527 compute_hash_table (&expr_hash_table);
bb457bd9 6528 if (gcse_file)
02280659 6529 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 6530
02280659 6531 if (expr_hash_table.n_elems > 0)
bb457bd9 6532 {
02280659 6533 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
6534 compute_code_hoist_data ();
6535 hoist_code ();
6536 free_code_hoist_mem ();
6537 }
c4c81601 6538
02280659 6539 free_hash_table (&expr_hash_table);
bb457bd9
JL
6540
6541 return changed;
6542}
a13d4ebf
AM
6543\f
6544/* Here we provide the things required to do store motion towards
6545 the exit. In order for this to be effective, gcse also needed to
6546 be taught how to move a load when it is kill only by a store to itself.
6547
6548 int i;
6549 float a[10];
6550
6551 void foo(float scale)
6552 {
6553 for (i=0; i<10; i++)
6554 a[i] *= scale;
6555 }
6556
6557 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6558 the load out since its live around the loop, and stored at the bottom
6559 of the loop.
a13d4ebf 6560
589005ff 6561 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6562 an enhancement to gcse which when using edge based lcm, recognizes
6563 this situation and allows gcse to move the load out of the loop.
6564
6565 Once gcse has hoisted the load, store motion can then push this
6566 load towards the exit, and we end up with no loads or stores of 'i'
6567 in the loop. */
6568
ff7cc307 6569/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6570 doesn't find one, we create one and initialize it. */
6571
6572static struct ls_expr *
1d088dee 6573ldst_entry (rtx x)
a13d4ebf 6574{
b58b21d5 6575 int do_not_record_p = 0;
a13d4ebf 6576 struct ls_expr * ptr;
b58b21d5 6577 unsigned int hash;
a13d4ebf 6578
b58b21d5 6579 hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
a13d4ebf 6580
b58b21d5
RS
6581 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6582 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
6583 return ptr;
6584
6585 ptr = xmalloc (sizeof (struct ls_expr));
6586
6587 ptr->next = pre_ldst_mems;
6588 ptr->expr = NULL;
6589 ptr->pattern = x;
6590 ptr->pattern_regs = NULL_RTX;
6591 ptr->loads = NULL_RTX;
6592 ptr->stores = NULL_RTX;
6593 ptr->reaching_reg = NULL_RTX;
6594 ptr->invalid = 0;
6595 ptr->index = 0;
6596 ptr->hash_index = hash;
6597 pre_ldst_mems = ptr;
589005ff 6598
a13d4ebf
AM
6599 return ptr;
6600}
6601
6602/* Free up an individual ldst entry. */
6603
589005ff 6604static void
1d088dee 6605free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 6606{
aaa4ca30
AJ
6607 free_INSN_LIST_list (& ptr->loads);
6608 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6609
6610 free (ptr);
6611}
6612
6613/* Free up all memory associated with the ldst list. */
6614
6615static void
1d088dee 6616free_ldst_mems (void)
a13d4ebf 6617{
589005ff 6618 while (pre_ldst_mems)
a13d4ebf
AM
6619 {
6620 struct ls_expr * tmp = pre_ldst_mems;
6621
6622 pre_ldst_mems = pre_ldst_mems->next;
6623
6624 free_ldst_entry (tmp);
6625 }
6626
6627 pre_ldst_mems = NULL;
6628}
6629
6630/* Dump debugging info about the ldst list. */
6631
6632static void
1d088dee 6633print_ldst_list (FILE * file)
a13d4ebf
AM
6634{
6635 struct ls_expr * ptr;
6636
6637 fprintf (file, "LDST list: \n");
6638
6639 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6640 {
6641 fprintf (file, " Pattern (%3d): ", ptr->index);
6642
6643 print_rtl (file, ptr->pattern);
6644
6645 fprintf (file, "\n Loads : ");
6646
6647 if (ptr->loads)
6648 print_rtl (file, ptr->loads);
6649 else
6650 fprintf (file, "(nil)");
6651
6652 fprintf (file, "\n Stores : ");
6653
6654 if (ptr->stores)
6655 print_rtl (file, ptr->stores);
6656 else
6657 fprintf (file, "(nil)");
6658
6659 fprintf (file, "\n\n");
6660 }
6661
6662 fprintf (file, "\n");
6663}
6664
6665/* Returns 1 if X is in the list of ldst only expressions. */
6666
6667static struct ls_expr *
1d088dee 6668find_rtx_in_ldst (rtx x)
a13d4ebf
AM
6669{
6670 struct ls_expr * ptr;
589005ff 6671
a13d4ebf
AM
6672 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6673 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6674 return ptr;
6675
6676 return NULL;
6677}
6678
6679/* Assign each element of the list of mems a monotonically increasing value. */
6680
6681static int
1d088dee 6682enumerate_ldsts (void)
a13d4ebf
AM
6683{
6684 struct ls_expr * ptr;
6685 int n = 0;
6686
6687 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6688 ptr->index = n++;
6689
6690 return n;
6691}
6692
6693/* Return first item in the list. */
6694
6695static inline struct ls_expr *
1d088dee 6696first_ls_expr (void)
a13d4ebf
AM
6697{
6698 return pre_ldst_mems;
6699}
6700
0e8a66de 6701/* Return the next item in the list after the specified one. */
a13d4ebf
AM
6702
6703static inline struct ls_expr *
1d088dee 6704next_ls_expr (struct ls_expr * ptr)
a13d4ebf
AM
6705{
6706 return ptr->next;
6707}
6708\f
6709/* Load Motion for loads which only kill themselves. */
6710
6711/* Return true if x is a simple MEM operation, with no registers or
6712 side effects. These are the types of loads we consider for the
6713 ld_motion list, otherwise we let the usual aliasing take care of it. */
6714
589005ff 6715static int
1d088dee 6716simple_mem (rtx x)
a13d4ebf
AM
6717{
6718 if (GET_CODE (x) != MEM)
6719 return 0;
589005ff 6720
a13d4ebf
AM
6721 if (MEM_VOLATILE_P (x))
6722 return 0;
589005ff 6723
a13d4ebf
AM
6724 if (GET_MODE (x) == BLKmode)
6725 return 0;
aaa4ca30 6726
47a3dae1
ZD
6727 /* If we are handling exceptions, we must be careful with memory references
6728 that may trap. If we are not, the behavior is undefined, so we may just
6729 continue. */
6730 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
6731 return 0;
6732
47a3dae1
ZD
6733 if (side_effects_p (x))
6734 return 0;
589005ff 6735
47a3dae1
ZD
6736 /* Do not consider function arguments passed on stack. */
6737 if (reg_mentioned_p (stack_pointer_rtx, x))
6738 return 0;
6739
6740 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6741 return 0;
6742
6743 return 1;
a13d4ebf
AM
6744}
6745
589005ff
KH
6746/* Make sure there isn't a buried reference in this pattern anywhere.
6747 If there is, invalidate the entry for it since we're not capable
6748 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6749 loads since the aliasing code will allow all entries in the
6750 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6751 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6752 fix it up. */
6753
6754static void
1d088dee 6755invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
6756{
6757 const char * fmt;
8e42ace1 6758 int i, j;
a13d4ebf
AM
6759 struct ls_expr * ptr;
6760
6761 /* Invalidate it in the list. */
6762 if (GET_CODE (x) == MEM && simple_mem (x))
6763 {
6764 ptr = ldst_entry (x);
6765 ptr->invalid = 1;
6766 }
6767
6768 /* Recursively process the insn. */
6769 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6770
a13d4ebf
AM
6771 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6772 {
6773 if (fmt[i] == 'e')
6774 invalidate_any_buried_refs (XEXP (x, i));
6775 else if (fmt[i] == 'E')
6776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6777 invalidate_any_buried_refs (XVECEXP (x, i, j));
6778 }
6779}
6780
4d3eb89a
HPN
6781/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6782 being defined as MEM loads and stores to symbols, with no side effects
6783 and no registers in the expression. For a MEM destination, we also
6784 check that the insn is still valid if we replace the destination with a
6785 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6786 which don't match this criteria, they are invalidated and trimmed out
6787 later. */
a13d4ebf 6788
589005ff 6789static void
1d088dee 6790compute_ld_motion_mems (void)
a13d4ebf
AM
6791{
6792 struct ls_expr * ptr;
e0082a72 6793 basic_block bb;
a13d4ebf 6794 rtx insn;
589005ff 6795
a13d4ebf
AM
6796 pre_ldst_mems = NULL;
6797
e0082a72 6798 FOR_EACH_BB (bb)
a13d4ebf 6799 {
a813c111
SB
6800 for (insn = BB_HEAD (bb);
6801 insn && insn != NEXT_INSN (BB_END (bb));
a13d4ebf
AM
6802 insn = NEXT_INSN (insn))
6803 {
735e8085 6804 if (INSN_P (insn))
a13d4ebf
AM
6805 {
6806 if (GET_CODE (PATTERN (insn)) == SET)
6807 {
6808 rtx src = SET_SRC (PATTERN (insn));
6809 rtx dest = SET_DEST (PATTERN (insn));
6810
6811 /* Check for a simple LOAD... */
6812 if (GET_CODE (src) == MEM && simple_mem (src))
6813 {
6814 ptr = ldst_entry (src);
6815 if (GET_CODE (dest) == REG)
6816 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6817 else
6818 ptr->invalid = 1;
6819 }
6820 else
6821 {
6822 /* Make sure there isn't a buried load somewhere. */
6823 invalidate_any_buried_refs (src);
6824 }
589005ff 6825
a13d4ebf
AM
6826 /* Check for stores. Don't worry about aliased ones, they
6827 will block any movement we might do later. We only care
6828 about this exact pattern since those are the only
6829 circumstance that we will ignore the aliasing info. */
6830 if (GET_CODE (dest) == MEM && simple_mem (dest))
6831 {
6832 ptr = ldst_entry (dest);
589005ff 6833
f54104df 6834 if (GET_CODE (src) != MEM
4d3eb89a
HPN
6835 && GET_CODE (src) != ASM_OPERANDS
6836 /* Check for REG manually since want_to_gcse_p
6837 returns 0 for all REGs. */
6838 && (REG_P (src) || want_to_gcse_p (src)))
a13d4ebf
AM
6839 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6840 else
6841 ptr->invalid = 1;
6842 }
6843 }
6844 else
6845 invalidate_any_buried_refs (PATTERN (insn));
6846 }
6847 }
6848 }
6849}
6850
589005ff 6851/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6852 expression list for pre gcse. */
6853
6854static void
1d088dee 6855trim_ld_motion_mems (void)
a13d4ebf 6856{
b58b21d5
RS
6857 struct ls_expr * * last = & pre_ldst_mems;
6858 struct ls_expr * ptr = pre_ldst_mems;
a13d4ebf
AM
6859
6860 while (ptr != NULL)
6861 {
b58b21d5 6862 struct expr * expr;
589005ff 6863
a13d4ebf 6864 /* Delete if entry has been made invalid. */
b58b21d5 6865 if (! ptr->invalid)
a13d4ebf 6866 {
a13d4ebf 6867 /* Delete if we cannot find this mem in the expression list. */
b58b21d5 6868 unsigned int hash = ptr->hash_index % expr_hash_table.size;
589005ff 6869
b58b21d5
RS
6870 for (expr = expr_hash_table.table[hash];
6871 expr != NULL;
6872 expr = expr->next_same_hash)
6873 if (expr_equiv_p (expr->expr, ptr->pattern))
6874 break;
a13d4ebf
AM
6875 }
6876 else
b58b21d5
RS
6877 expr = (struct expr *) 0;
6878
6879 if (expr)
a13d4ebf
AM
6880 {
6881 /* Set the expression field if we are keeping it. */
a13d4ebf 6882 ptr->expr = expr;
b58b21d5 6883 last = & ptr->next;
a13d4ebf
AM
6884 ptr = ptr->next;
6885 }
b58b21d5
RS
6886 else
6887 {
6888 *last = ptr->next;
6889 free_ldst_entry (ptr);
6890 ptr = * last;
6891 }
a13d4ebf
AM
6892 }
6893
6894 /* Show the world what we've found. */
6895 if (gcse_file && pre_ldst_mems != NULL)
6896 print_ldst_list (gcse_file);
6897}
6898
6899/* This routine will take an expression which we are replacing with
6900 a reaching register, and update any stores that are needed if
6901 that expression is in the ld_motion list. Stores are updated by
a98ebe2e 6902 copying their SRC to the reaching register, and then storing
a13d4ebf
AM
6903 the reaching register into the store location. These keeps the
6904 correct value in the reaching register for the loads. */
6905
6906static void
1d088dee 6907update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
6908{
6909 struct ls_expr * mem_ptr;
6910
6911 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6912 {
589005ff
KH
6913 /* We can try to find just the REACHED stores, but is shouldn't
6914 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6915 dead and should be eliminated later. */
6916
4d3eb89a
HPN
6917 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6918 where reg is the reaching reg used in the load. We checked in
6919 compute_ld_motion_mems that we can replace (set mem expr) with
6920 (set reg expr) in that insn. */
a13d4ebf 6921 rtx list = mem_ptr->stores;
589005ff 6922
a13d4ebf
AM
6923 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6924 {
6925 rtx insn = XEXP (list, 0);
6926 rtx pat = PATTERN (insn);
6927 rtx src = SET_SRC (pat);
6928 rtx reg = expr->reaching_reg;
c57718d3 6929 rtx copy, new;
a13d4ebf
AM
6930
6931 /* If we've already copied it, continue. */
6932 if (expr->reaching_reg == src)
6933 continue;
589005ff 6934
a13d4ebf
AM
6935 if (gcse_file)
6936 {
6937 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6938 print_rtl (gcse_file, expr->reaching_reg);
6939 fprintf (gcse_file, ":\n ");
6940 print_inline_rtx (gcse_file, insn, 8);
6941 fprintf (gcse_file, "\n");
6942 }
589005ff 6943
47a3dae1 6944 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
6945 new = emit_insn_before (copy, insn);
6946 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6947 SET_SRC (pat) = reg;
6948
6949 /* un-recognize this pattern since it's probably different now. */
6950 INSN_CODE (insn) = -1;
6951 gcse_create_count++;
6952 }
6953 }
6954}
6955\f
6956/* Store motion code. */
6957
47a3dae1
ZD
6958#define ANTIC_STORE_LIST(x) ((x)->loads)
6959#define AVAIL_STORE_LIST(x) ((x)->stores)
6960#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6961
589005ff 6962/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 6963 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
6964static int * regvec;
6965
6966/* And current insn, for the same routine. */
6967static rtx compute_store_table_current_insn;
aaa4ca30 6968
a13d4ebf
AM
6969/* Used in computing the reverse edge graph bit vectors. */
6970static sbitmap * st_antloc;
6971
6972/* Global holding the number of store expressions we are dealing with. */
6973static int num_stores;
6974
01c43039
RE
6975/* Checks to set if we need to mark a register set. Called from
6976 note_stores. */
a13d4ebf 6977
aaa4ca30 6978static void
1d088dee 6979reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
01c43039 6980 void *data)
a13d4ebf 6981{
01c43039
RE
6982 sbitmap bb_reg = data;
6983
aaa4ca30
AJ
6984 if (GET_CODE (dest) == SUBREG)
6985 dest = SUBREG_REG (dest);
adfcce61 6986
aaa4ca30 6987 if (GET_CODE (dest) == REG)
01c43039
RE
6988 {
6989 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6990 if (bb_reg)
6991 SET_BIT (bb_reg, REGNO (dest));
6992 }
6993}
6994
6995/* Clear any mark that says that this insn sets dest. Called from
6996 note_stores. */
6997
6998static void
6999reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
7000 void *data)
7001{
7002 int *dead_vec = data;
7003
7004 if (GET_CODE (dest) == SUBREG)
7005 dest = SUBREG_REG (dest);
7006
7007 if (GET_CODE (dest) == REG &&
7008 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
7009 dead_vec[REGNO (dest)] = 0;
a13d4ebf
AM
7010}
7011
47a3dae1
ZD
7012/* Return zero if some of the registers in list X are killed
7013 due to set of registers in bitmap REGS_SET. */
1d088dee 7014
47a3dae1 7015static bool
1d088dee 7016store_ops_ok (rtx x, int *regs_set)
47a3dae1
ZD
7017{
7018 rtx reg;
7019
7020 for (; x; x = XEXP (x, 1))
7021 {
7022 reg = XEXP (x, 0);
7023 if (regs_set[REGNO(reg)])
1d088dee 7024 return false;
47a3dae1 7025 }
a13d4ebf 7026
47a3dae1
ZD
7027 return true;
7028}
7029
7030/* Returns a list of registers mentioned in X. */
7031static rtx
1d088dee 7032extract_mentioned_regs (rtx x)
47a3dae1
ZD
7033{
7034 return extract_mentioned_regs_helper (x, NULL_RTX);
7035}
7036
7037/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7038 registers. */
7039static rtx
1d088dee 7040extract_mentioned_regs_helper (rtx x, rtx accum)
a13d4ebf
AM
7041{
7042 int i;
7043 enum rtx_code code;
7044 const char * fmt;
7045
7046 /* Repeat is used to turn tail-recursion into iteration. */
7047 repeat:
7048
7049 if (x == 0)
47a3dae1 7050 return accum;
a13d4ebf
AM
7051
7052 code = GET_CODE (x);
7053 switch (code)
7054 {
7055 case REG:
47a3dae1 7056 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
7057
7058 case MEM:
7059 x = XEXP (x, 0);
7060 goto repeat;
7061
7062 case PRE_DEC:
7063 case PRE_INC:
7064 case POST_DEC:
7065 case POST_INC:
47a3dae1
ZD
7066 /* We do not run this function with arguments having side effects. */
7067 abort ();
a13d4ebf
AM
7068
7069 case PC:
7070 case CC0: /*FIXME*/
7071 case CONST:
7072 case CONST_INT:
7073 case CONST_DOUBLE:
69ef87e2 7074 case CONST_VECTOR:
a13d4ebf
AM
7075 case SYMBOL_REF:
7076 case LABEL_REF:
7077 case ADDR_VEC:
7078 case ADDR_DIFF_VEC:
47a3dae1 7079 return accum;
a13d4ebf
AM
7080
7081 default:
7082 break;
7083 }
7084
7085 i = GET_RTX_LENGTH (code) - 1;
7086 fmt = GET_RTX_FORMAT (code);
589005ff 7087
a13d4ebf
AM
7088 for (; i >= 0; i--)
7089 {
7090 if (fmt[i] == 'e')
7091 {
7092 rtx tem = XEXP (x, i);
7093
7094 /* If we are about to do the last recursive call
47a3dae1 7095 needed at this level, change it into iteration. */
a13d4ebf
AM
7096 if (i == 0)
7097 {
7098 x = tem;
7099 goto repeat;
7100 }
589005ff 7101
47a3dae1 7102 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
7103 }
7104 else if (fmt[i] == 'E')
7105 {
7106 int j;
589005ff 7107
a13d4ebf 7108 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 7109 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
7110 }
7111 }
7112
47a3dae1 7113 return accum;
a13d4ebf
AM
7114}
7115
47a3dae1
ZD
7116/* Determine whether INSN is MEM store pattern that we will consider moving.
7117 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7118 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7119 including) the insn in this basic block. We must be passing through BB from
7120 head to end, as we are using this fact to speed things up.
1d088dee 7121
47a3dae1
ZD
7122 The results are stored this way:
7123
7124 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7125 -- if the processed expression is not anticipatable, NULL_RTX is added
7126 there instead, so that we can use it as indicator that no further
7127 expression of this type may be anticipatable
7128 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7129 consequently, all of them but this head are dead and may be deleted.
7130 -- if the expression is not available, the insn due to that it fails to be
7131 available is stored in reaching_reg.
7132
7133 The things are complicated a bit by fact that there already may be stores
7134 to the same MEM from other blocks; also caller must take care of the
e0bb17a8 7135 necessary cleanup of the temporary markers after end of the basic block.
47a3dae1 7136 */
a13d4ebf
AM
7137
7138static void
1d088dee 7139find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
a13d4ebf
AM
7140{
7141 struct ls_expr * ptr;
47a3dae1
ZD
7142 rtx dest, set, tmp;
7143 int check_anticipatable, check_available;
7144 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 7145
47a3dae1
ZD
7146 set = single_set (insn);
7147 if (!set)
a13d4ebf
AM
7148 return;
7149
47a3dae1 7150 dest = SET_DEST (set);
589005ff 7151
a13d4ebf
AM
7152 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7153 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
7154 return;
7155
47a3dae1
ZD
7156 if (side_effects_p (dest))
7157 return;
aaa4ca30 7158
47a3dae1
ZD
7159 /* If we are handling exceptions, we must be careful with memory references
7160 that may trap. If we are not, the behavior is undefined, so we may just
7161 continue. */
94f24ddc 7162 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1 7163 return;
1d088dee 7164
a13d4ebf 7165 ptr = ldst_entry (dest);
47a3dae1
ZD
7166 if (!ptr->pattern_regs)
7167 ptr->pattern_regs = extract_mentioned_regs (dest);
7168
7169 /* Do not check for anticipatability if we either found one anticipatable
7170 store already, or tested for one and found out that it was killed. */
7171 check_anticipatable = 0;
7172 if (!ANTIC_STORE_LIST (ptr))
7173 check_anticipatable = 1;
7174 else
7175 {
7176 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7177 if (tmp != NULL_RTX
7178 && BLOCK_FOR_INSN (tmp) != bb)
7179 check_anticipatable = 1;
7180 }
7181 if (check_anticipatable)
7182 {
7183 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7184 tmp = NULL_RTX;
7185 else
7186 tmp = insn;
7187 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7188 ANTIC_STORE_LIST (ptr));
7189 }
a13d4ebf 7190
e0bb17a8 7191 /* It is not necessary to check whether store is available if we did
47a3dae1
ZD
7192 it successfully before; if we failed before, do not bother to check
7193 until we reach the insn that caused us to fail. */
7194 check_available = 0;
7195 if (!AVAIL_STORE_LIST (ptr))
7196 check_available = 1;
7197 else
7198 {
7199 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7200 if (BLOCK_FOR_INSN (tmp) != bb)
7201 check_available = 1;
7202 }
7203 if (check_available)
7204 {
7205 /* Check that we have already reached the insn at that the check
7206 failed last time. */
7207 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7208 {
a813c111 7209 for (tmp = BB_END (bb);
47a3dae1
ZD
7210 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7211 tmp = PREV_INSN (tmp))
7212 continue;
7213 if (tmp == insn)
7214 check_available = 0;
7215 }
7216 else
7217 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7218 bb, regs_set_after,
7219 &LAST_AVAIL_CHECK_FAILURE (ptr));
7220 }
7221 if (!check_available)
7222 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7223}
1d088dee 7224
47a3dae1 7225/* Find available and anticipatable stores. */
a13d4ebf
AM
7226
7227static int
1d088dee 7228compute_store_table (void)
a13d4ebf 7229{
e0082a72
ZD
7230 int ret;
7231 basic_block bb;
aaa4ca30 7232 unsigned regno;
47a3dae1
ZD
7233 rtx insn, pat, tmp;
7234 int *last_set_in, *already_set;
7235 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 7236
a13d4ebf
AM
7237 max_gcse_regno = max_reg_num ();
7238
703ad42b 7239 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
aaa4ca30 7240 max_gcse_regno);
d55bc081 7241 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 7242 pre_ldst_mems = 0;
01c43039 7243 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
47a3dae1 7244 already_set = xmalloc (sizeof (int) * max_gcse_regno);
aaa4ca30 7245
a13d4ebf 7246 /* Find all the stores we care about. */
e0082a72 7247 FOR_EACH_BB (bb)
a13d4ebf 7248 {
47a3dae1 7249 /* First compute the registers set in this block. */
47a3dae1
ZD
7250 regvec = last_set_in;
7251
a813c111
SB
7252 for (insn = BB_HEAD (bb);
7253 insn != NEXT_INSN (BB_END (bb));
47a3dae1
ZD
7254 insn = NEXT_INSN (insn))
7255 {
7256 if (! INSN_P (insn))
7257 continue;
7258
7259 if (GET_CODE (insn) == CALL_INSN)
7260 {
7261 bool clobbers_all = false;
7262#ifdef NON_SAVING_SETJMP
7263 if (NON_SAVING_SETJMP
7264 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7265 clobbers_all = true;
7266#endif
7267
7268 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7269 if (clobbers_all
7270 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
01c43039
RE
7271 {
7272 last_set_in[regno] = INSN_UID (insn);
7273 SET_BIT (reg_set_in_block[bb->index], regno);
7274 }
47a3dae1
ZD
7275 }
7276
7277 pat = PATTERN (insn);
7278 compute_store_table_current_insn = insn;
01c43039 7279 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
47a3dae1
ZD
7280 }
7281
47a3dae1
ZD
7282 /* Now find the stores. */
7283 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7284 regvec = already_set;
a813c111
SB
7285 for (insn = BB_HEAD (bb);
7286 insn != NEXT_INSN (BB_END (bb));
47a3dae1 7287 insn = NEXT_INSN (insn))
a13d4ebf 7288 {
19652adf 7289 if (! INSN_P (insn))
a13d4ebf
AM
7290 continue;
7291
aaa4ca30
AJ
7292 if (GET_CODE (insn) == CALL_INSN)
7293 {
19652adf 7294 bool clobbers_all = false;
589005ff 7295#ifdef NON_SAVING_SETJMP
19652adf
ZW
7296 if (NON_SAVING_SETJMP
7297 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7298 clobbers_all = true;
7299#endif
7300
aaa4ca30 7301 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
7302 if (clobbers_all
7303 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 7304 already_set[regno] = 1;
aaa4ca30 7305 }
589005ff 7306
a13d4ebf 7307 pat = PATTERN (insn);
aaa4ca30 7308 note_stores (pat, reg_set_info, NULL);
589005ff 7309
a13d4ebf 7310 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
7311 find_moveable_store (insn, already_set, last_set_in);
7312
7313 /* Unmark regs that are no longer set. */
01c43039
RE
7314 compute_store_table_current_insn = insn;
7315 note_stores (pat, reg_clear_last_set, last_set_in);
7316 if (GET_CODE (insn) == CALL_INSN)
7317 {
7318 bool clobbers_all = false;
7319#ifdef NON_SAVING_SETJMP
7320 if (NON_SAVING_SETJMP
7321 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7322 clobbers_all = true;
7323#endif
7324
7325 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7326 if ((clobbers_all
7327 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7328 && last_set_in[regno] == INSN_UID (insn))
7329 last_set_in[regno] = 0;
7330 }
47a3dae1
ZD
7331 }
7332
01c43039
RE
7333#ifdef ENABLE_CHECKING
7334 /* last_set_in should now be all-zero. */
7335 for (regno = 0; regno < max_gcse_regno; regno++)
7336 if (last_set_in[regno] != 0)
7337 abort ();
7338#endif
7339
47a3dae1
ZD
7340 /* Clear temporary marks. */
7341 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7342 {
7343 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7344 if (ANTIC_STORE_LIST (ptr)
7345 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7346 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7347 }
7348 }
7349
7350 /* Remove the stores that are not available anywhere, as there will
7351 be no opportunity to optimize them. */
7352 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7353 ptr != NULL;
7354 ptr = *prev_next_ptr_ptr)
7355 {
7356 if (!AVAIL_STORE_LIST (ptr))
7357 {
7358 *prev_next_ptr_ptr = ptr->next;
7359 free_ldst_entry (ptr);
a13d4ebf 7360 }
47a3dae1
ZD
7361 else
7362 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
7363 }
7364
7365 ret = enumerate_ldsts ();
589005ff 7366
a13d4ebf
AM
7367 if (gcse_file)
7368 {
47a3dae1 7369 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
a13d4ebf
AM
7370 print_ldst_list (gcse_file);
7371 }
589005ff 7372
47a3dae1
ZD
7373 free (last_set_in);
7374 free (already_set);
a13d4ebf
AM
7375 return ret;
7376}
7377
3b14e3af
ZD
7378/* Check to see if the load X is aliased with STORE_PATTERN.
7379 AFTER is true if we are checking the case when STORE_PATTERN occurs
7380 after the X. */
a13d4ebf 7381
47a3dae1 7382static bool
3b14e3af 7383load_kills_store (rtx x, rtx store_pattern, int after)
a13d4ebf 7384{
3b14e3af
ZD
7385 if (after)
7386 return anti_dependence (x, store_pattern);
7387 else
7388 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7389 rtx_addr_varies_p);
a13d4ebf
AM
7390}
7391
589005ff 7392/* Go through the entire insn X, looking for any loads which might alias
3b14e3af
ZD
7393 STORE_PATTERN. Return true if found.
7394 AFTER is true if we are checking the case when STORE_PATTERN occurs
7395 after the insn X. */
a13d4ebf 7396
47a3dae1 7397static bool
3b14e3af 7398find_loads (rtx x, rtx store_pattern, int after)
a13d4ebf
AM
7399{
7400 const char * fmt;
8e42ace1 7401 int i, j;
47a3dae1 7402 int ret = false;
a13d4ebf 7403
24a28584 7404 if (!x)
47a3dae1 7405 return false;
24a28584 7406
589005ff 7407 if (GET_CODE (x) == SET)
a13d4ebf
AM
7408 x = SET_SRC (x);
7409
7410 if (GET_CODE (x) == MEM)
7411 {
3b14e3af 7412 if (load_kills_store (x, store_pattern, after))
47a3dae1 7413 return true;
a13d4ebf
AM
7414 }
7415
7416 /* Recursively process the insn. */
7417 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 7418
a13d4ebf
AM
7419 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7420 {
7421 if (fmt[i] == 'e')
3b14e3af 7422 ret |= find_loads (XEXP (x, i), store_pattern, after);
a13d4ebf
AM
7423 else if (fmt[i] == 'E')
7424 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3b14e3af 7425 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
a13d4ebf
AM
7426 }
7427 return ret;
7428}
7429
589005ff 7430/* Check if INSN kills the store pattern X (is aliased with it).
3b14e3af
ZD
7431 AFTER is true if we are checking the case when store X occurs
7432 after the insn. Return true if it it does. */
a13d4ebf 7433
47a3dae1 7434static bool
3b14e3af 7435store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
a13d4ebf 7436{
d088acea 7437 rtx reg, base, note;
94f24ddc 7438
735e8085 7439 if (!INSN_P (insn))
47a3dae1 7440 return false;
589005ff 7441
a13d4ebf
AM
7442 if (GET_CODE (insn) == CALL_INSN)
7443 {
1218665b
JJ
7444 /* A normal or pure call might read from pattern,
7445 but a const call will not. */
47a3dae1
ZD
7446 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7447 return true;
7448
94f24ddc
ZD
7449 /* But even a const call reads its parameters. Check whether the
7450 base of some of registers used in mem is stack pointer. */
7451 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7452 {
bc083e18 7453 base = find_base_term (XEXP (reg, 0));
94f24ddc
ZD
7454 if (!base
7455 || (GET_CODE (base) == ADDRESS
7456 && GET_MODE (base) == Pmode
7457 && XEXP (base, 0) == stack_pointer_rtx))
7458 return true;
7459 }
47a3dae1
ZD
7460
7461 return false;
a13d4ebf 7462 }
589005ff 7463
a13d4ebf
AM
7464 if (GET_CODE (PATTERN (insn)) == SET)
7465 {
7466 rtx pat = PATTERN (insn);
3b14e3af
ZD
7467 rtx dest = SET_DEST (pat);
7468
7469 if (GET_CODE (dest) == SIGN_EXTRACT
7470 || GET_CODE (dest) == ZERO_EXTRACT)
7471 dest = XEXP (dest, 0);
7472
a13d4ebf 7473 /* Check for memory stores to aliased objects. */
3b14e3af
ZD
7474 if (GET_CODE (dest) == MEM
7475 && !expr_equiv_p (dest, x))
7476 {
7477 if (after)
7478 {
7479 if (output_dependence (dest, x))
7480 return true;
7481 }
7482 else
7483 {
7484 if (output_dependence (x, dest))
7485 return true;
7486 }
7487 }
d088acea
ZD
7488 if (find_loads (SET_SRC (pat), x, after))
7489 return true;
a13d4ebf 7490 }
d088acea
ZD
7491 else if (find_loads (PATTERN (insn), x, after))
7492 return true;
7493
7494 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7495 location aliased with X, then this insn kills X. */
7496 note = find_reg_equal_equiv_note (insn);
7497 if (! note)
7498 return false;
7499 note = XEXP (note, 0);
7500
7501 /* However, if the note represents a must alias rather than a may
7502 alias relationship, then it does not kill X. */
7503 if (expr_equiv_p (note, x))
7504 return false;
7505
7506 /* See if there are any aliased loads in the note. */
7507 return find_loads (note, x, after);
a13d4ebf
AM
7508}
7509
47a3dae1
ZD
7510/* Returns true if the expression X is loaded or clobbered on or after INSN
7511 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7512 or after the insn. X_REGS is list of registers mentioned in X. If the store
7513 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 7514
47a3dae1 7515static bool
1d088dee
AJ
7516store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7517 int *regs_set_after, rtx *fail_insn)
a13d4ebf 7518{
a813c111 7519 rtx last = BB_END (bb), act;
aaa4ca30 7520
47a3dae1 7521 if (!store_ops_ok (x_regs, regs_set_after))
1d088dee 7522 {
47a3dae1
ZD
7523 /* We do not know where it will happen. */
7524 if (fail_insn)
7525 *fail_insn = NULL_RTX;
7526 return true;
7527 }
a13d4ebf 7528
47a3dae1
ZD
7529 /* Scan from the end, so that fail_insn is determined correctly. */
7530 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
3b14e3af 7531 if (store_killed_in_insn (x, x_regs, act, false))
47a3dae1
ZD
7532 {
7533 if (fail_insn)
7534 *fail_insn = act;
7535 return true;
7536 }
589005ff 7537
47a3dae1 7538 return false;
a13d4ebf 7539}
1d088dee 7540
47a3dae1
ZD
7541/* Returns true if the expression X is loaded or clobbered on or before INSN
7542 within basic block BB. X_REGS is list of registers mentioned in X.
7543 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7544static bool
1d088dee
AJ
7545store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7546 int *regs_set_before)
a13d4ebf 7547{
a813c111 7548 rtx first = BB_HEAD (bb);
a13d4ebf 7549
47a3dae1
ZD
7550 if (!store_ops_ok (x_regs, regs_set_before))
7551 return true;
a13d4ebf 7552
47a3dae1 7553 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
3b14e3af 7554 if (store_killed_in_insn (x, x_regs, insn, true))
47a3dae1 7555 return true;
589005ff 7556
47a3dae1 7557 return false;
a13d4ebf 7558}
1d088dee 7559
47a3dae1
ZD
7560/* Fill in available, anticipatable, transparent and kill vectors in
7561 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 7562static void
1d088dee 7563build_store_vectors (void)
a13d4ebf 7564{
47a3dae1
ZD
7565 basic_block bb;
7566 int *regs_set_in_block;
a13d4ebf
AM
7567 rtx insn, st;
7568 struct ls_expr * ptr;
47a3dae1 7569 unsigned regno;
a13d4ebf
AM
7570
7571 /* Build the gen_vector. This is any store in the table which is not killed
7572 by aliasing later in its block. */
703ad42b 7573 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7574 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 7575
703ad42b 7576 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7577 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 7578
a13d4ebf 7579 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 7580 {
47a3dae1 7581 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
7582 {
7583 insn = XEXP (st, 0);
e2d2ed72 7584 bb = BLOCK_FOR_INSN (insn);
589005ff 7585
47a3dae1
ZD
7586 /* If we've already seen an available expression in this block,
7587 we can delete this one (It occurs earlier in the block). We'll
7588 copy the SRC expression to an unused register in case there
7589 are any side effects. */
7590 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 7591 {
47a3dae1
ZD
7592 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7593 if (gcse_file)
7594 fprintf (gcse_file, "Removing redundant store:\n");
d088acea 7595 replace_store_insn (r, XEXP (st, 0), bb, ptr);
47a3dae1 7596 continue;
a13d4ebf 7597 }
47a3dae1 7598 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 7599 }
589005ff 7600
47a3dae1
ZD
7601 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7602 {
7603 insn = XEXP (st, 0);
7604 bb = BLOCK_FOR_INSN (insn);
7605 SET_BIT (st_antloc[bb->index], ptr->index);
7606 }
a13d4ebf 7607 }
589005ff 7608
703ad42b 7609 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7610 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7611
703ad42b 7612 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7613 sbitmap_vector_zero (transp, last_basic_block);
47a3dae1 7614 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
a13d4ebf 7615
47a3dae1
ZD
7616 FOR_EACH_BB (bb)
7617 {
7618 for (regno = 0; regno < max_gcse_regno; regno++)
7619 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7620
7621 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7622 {
a813c111 7623 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
47a3dae1
ZD
7624 bb, regs_set_in_block, NULL))
7625 {
e0bb17a8 7626 /* It should not be necessary to consider the expression
47a3dae1
ZD
7627 killed if it is both anticipatable and available. */
7628 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7629 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7630 SET_BIT (ae_kill[bb->index], ptr->index);
1d088dee
AJ
7631 }
7632 else
7633 SET_BIT (transp[bb->index], ptr->index);
7634 }
47a3dae1
ZD
7635 }
7636
7637 free (regs_set_in_block);
aaa4ca30 7638
589005ff 7639 if (gcse_file)
aaa4ca30 7640 {
d55bc081
ZD
7641 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7642 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7643 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7644 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7645 }
7646}
7647
fbe5a4a6 7648/* Insert an instruction at the beginning of a basic block, and update
a813c111 7649 the BB_HEAD if needed. */
a13d4ebf 7650
589005ff 7651static void
1d088dee 7652insert_insn_start_bb (rtx insn, basic_block bb)
a13d4ebf
AM
7653{
7654 /* Insert at start of successor block. */
a813c111
SB
7655 rtx prev = PREV_INSN (BB_HEAD (bb));
7656 rtx before = BB_HEAD (bb);
a13d4ebf
AM
7657 while (before != 0)
7658 {
7659 if (GET_CODE (before) != CODE_LABEL
7660 && (GET_CODE (before) != NOTE
7661 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7662 break;
7663 prev = before;
a813c111 7664 if (prev == BB_END (bb))
a13d4ebf
AM
7665 break;
7666 before = NEXT_INSN (before);
7667 }
7668
7669 insn = emit_insn_after (insn, prev);
7670
a13d4ebf
AM
7671 if (gcse_file)
7672 {
7673 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7674 bb->index);
a13d4ebf
AM
7675 print_inline_rtx (gcse_file, insn, 6);
7676 fprintf (gcse_file, "\n");
7677 }
7678}
7679
7680/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 7681 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
7682 if an edge insertion was performed. */
7683
7684static int
1d088dee 7685insert_store (struct ls_expr * expr, edge e)
a13d4ebf
AM
7686{
7687 rtx reg, insn;
e2d2ed72 7688 basic_block bb;
a13d4ebf
AM
7689 edge tmp;
7690
7691 /* We did all the deleted before this insert, so if we didn't delete a
7692 store, then we haven't set the reaching reg yet either. */
7693 if (expr->reaching_reg == NULL_RTX)
7694 return 0;
7695
a0c8285b
JH
7696 if (e->flags & EDGE_FAKE)
7697 return 0;
7698
a13d4ebf 7699 reg = expr->reaching_reg;
47a3dae1 7700 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 7701
a13d4ebf
AM
7702 /* If we are inserting this expression on ALL predecessor edges of a BB,
7703 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7704 edges so we don't try to insert it on the other edges. */
e2d2ed72 7705 bb = e->dest;
a13d4ebf 7706 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
3f2eae23 7707 if (!(tmp->flags & EDGE_FAKE))
a0c8285b
JH
7708 {
7709 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7710 if (index == EDGE_INDEX_NO_EDGE)
7711 abort ();
7712 if (! TEST_BIT (pre_insert_map[index], expr->index))
7713 break;
7714 }
a13d4ebf
AM
7715
7716 /* If tmp is NULL, we found an insertion on every edge, blank the
7717 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7718 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7719 {
7720 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7721 {
7722 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7723 RESET_BIT (pre_insert_map[index], expr->index);
7724 }
7725 insert_insn_start_bb (insn, bb);
7726 return 0;
7727 }
589005ff 7728
a13d4ebf
AM
7729 /* We can't insert on this edge, so we'll insert at the head of the
7730 successors block. See Morgan, sec 10.5. */
7731 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7732 {
7733 insert_insn_start_bb (insn, bb);
7734 return 0;
7735 }
7736
7737 insert_insn_on_edge (insn, e);
589005ff 7738
a13d4ebf
AM
7739 if (gcse_file)
7740 {
7741 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7742 e->src->index, e->dest->index);
a13d4ebf
AM
7743 print_inline_rtx (gcse_file, insn, 6);
7744 fprintf (gcse_file, "\n");
7745 }
589005ff 7746
a13d4ebf
AM
7747 return 1;
7748}
7749
d088acea
ZD
7750/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7751 memory location in SMEXPR set in basic block BB.
7752
7753 This could be rather expensive. */
7754
7755static void
7756remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7757{
7758 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7759 sbitmap visited = sbitmap_alloc (last_basic_block);
7760 int stack_top = 0;
7761 rtx last, insn, note;
7762 rtx mem = smexpr->pattern;
7763
7764 sbitmap_zero (visited);
7765 act = bb->succ;
7766
7767 while (1)
7768 {
7769 if (!act)
7770 {
7771 if (!stack_top)
7772 {
7773 free (stack);
7774 sbitmap_free (visited);
7775 return;
7776 }
7777 act = stack[--stack_top];
7778 }
7779 bb = act->dest;
7780
7781 if (bb == EXIT_BLOCK_PTR
7782 || TEST_BIT (visited, bb->index)
7783 || TEST_BIT (ae_kill[bb->index], smexpr->index))
7784 {
7785 act = act->succ_next;
7786 continue;
7787 }
7788 SET_BIT (visited, bb->index);
7789
7790 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7791 {
7792 for (last = ANTIC_STORE_LIST (smexpr);
7793 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7794 last = XEXP (last, 1))
7795 continue;
7796 last = XEXP (last, 0);
7797 }
7798 else
a813c111 7799 last = NEXT_INSN (BB_END (bb));
d088acea 7800
a813c111 7801 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
d088acea
ZD
7802 if (INSN_P (insn))
7803 {
7804 note = find_reg_equal_equiv_note (insn);
7805 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7806 continue;
7807
7808 if (gcse_file)
7809 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7810 INSN_UID (insn));
7811 remove_note (insn, note);
7812 }
7813 act = act->succ_next;
7814 if (bb->succ)
7815 {
7816 if (act)
7817 stack[stack_top++] = act;
7818 act = bb->succ;
7819 }
7820 }
7821}
7822
a13d4ebf
AM
7823/* This routine will replace a store with a SET to a specified register. */
7824
7825static void
d088acea 7826replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
a13d4ebf 7827{
d088acea 7828 rtx insn, mem, note, set, ptr;
589005ff 7829
d088acea 7830 mem = smexpr->pattern;
9a318d30 7831 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
a13d4ebf 7832 insn = emit_insn_after (insn, del);
589005ff 7833
a13d4ebf
AM
7834 if (gcse_file)
7835 {
589005ff 7836 fprintf (gcse_file,
0b17ab2f 7837 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7838 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7839 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7840 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7841 fprintf (gcse_file, "\n");
a13d4ebf 7842 }
589005ff 7843
d088acea
ZD
7844 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7845 if (XEXP (ptr, 0) == del)
7846 {
7847 XEXP (ptr, 0) = insn;
7848 break;
7849 }
49ce134f 7850 delete_insn (del);
d088acea
ZD
7851
7852 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7853 they are no longer accurate provided that they are reached by this
7854 definition, so drop them. */
a813c111 7855 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
d088acea
ZD
7856 if (INSN_P (insn))
7857 {
7858 set = single_set (insn);
7859 if (!set)
7860 continue;
7861 if (expr_equiv_p (SET_DEST (set), mem))
7862 return;
7863 note = find_reg_equal_equiv_note (insn);
7864 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7865 continue;
7866
7867 if (gcse_file)
7868 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7869 INSN_UID (insn));
7870 remove_note (insn, note);
7871 }
7872 remove_reachable_equiv_notes (bb, smexpr);
a13d4ebf
AM
7873}
7874
7875
7876/* Delete a store, but copy the value that would have been stored into
7877 the reaching_reg for later storing. */
7878
7879static void
1d088dee 7880delete_store (struct ls_expr * expr, basic_block bb)
a13d4ebf
AM
7881{
7882 rtx reg, i, del;
7883
7884 if (expr->reaching_reg == NULL_RTX)
7885 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7886
a13d4ebf 7887 reg = expr->reaching_reg;
589005ff 7888
a13d4ebf
AM
7889 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7890 {
7891 del = XEXP (i, 0);
e2d2ed72 7892 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7893 {
589005ff 7894 /* We know there is only one since we deleted redundant
a13d4ebf 7895 ones during the available computation. */
d088acea 7896 replace_store_insn (reg, del, bb, expr);
a13d4ebf
AM
7897 break;
7898 }
7899 }
7900}
7901
7902/* Free memory used by store motion. */
7903
589005ff 7904static void
1d088dee 7905free_store_memory (void)
a13d4ebf
AM
7906{
7907 free_ldst_mems ();
589005ff 7908
a13d4ebf 7909 if (ae_gen)
5a660bff 7910 sbitmap_vector_free (ae_gen);
a13d4ebf 7911 if (ae_kill)
5a660bff 7912 sbitmap_vector_free (ae_kill);
a13d4ebf 7913 if (transp)
5a660bff 7914 sbitmap_vector_free (transp);
a13d4ebf 7915 if (st_antloc)
5a660bff 7916 sbitmap_vector_free (st_antloc);
a13d4ebf 7917 if (pre_insert_map)
5a660bff 7918 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7919 if (pre_delete_map)
5a660bff 7920 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7921 if (reg_set_in_block)
7922 sbitmap_vector_free (reg_set_in_block);
589005ff 7923
a13d4ebf
AM
7924 ae_gen = ae_kill = transp = st_antloc = NULL;
7925 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7926}
7927
7928/* Perform store motion. Much like gcse, except we move expressions the
7929 other way by looking at the flowgraph in reverse. */
7930
7931static void
1d088dee 7932store_motion (void)
a13d4ebf 7933{
e0082a72 7934 basic_block bb;
0b17ab2f 7935 int x;
a13d4ebf 7936 struct ls_expr * ptr;
adfcce61 7937 int update_flow = 0;
aaa4ca30 7938
a13d4ebf
AM
7939 if (gcse_file)
7940 {
7941 fprintf (gcse_file, "before store motion\n");
7942 print_rtl (gcse_file, get_insns ());
7943 }
7944
a13d4ebf 7945 init_alias_analysis ();
aaa4ca30 7946
47a3dae1 7947 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
7948 num_stores = compute_store_table ();
7949 if (num_stores == 0)
7950 {
aaa4ca30 7951 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7952 end_alias_analysis ();
7953 return;
7954 }
7955
47a3dae1 7956 /* Now compute kill & transp vectors. */
a13d4ebf 7957 build_store_vectors ();
47a3dae1 7958 add_noreturn_fake_exit_edges ();
2a868ea4 7959 connect_infinite_loops_to_exit ();
a13d4ebf 7960
589005ff
KH
7961 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7962 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7963 &pre_delete_map);
7964
7965 /* Now we want to insert the new stores which are going to be needed. */
7966 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7967 {
e0082a72
ZD
7968 FOR_EACH_BB (bb)
7969 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7970 delete_store (ptr, bb);
a13d4ebf 7971
0b17ab2f
RH
7972 for (x = 0; x < NUM_EDGES (edge_list); x++)
7973 if (TEST_BIT (pre_insert_map[x], ptr->index))
7974 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7975 }
7976
7977 if (update_flow)
7978 commit_edge_insertions ();
aaa4ca30 7979
a13d4ebf
AM
7980 free_store_memory ();
7981 free_edge_list (edge_list);
7982 remove_fake_edges ();
7983 end_alias_analysis ();
7984}
e2500fed 7985
a0134312
RS
7986\f
7987/* Entry point for jump bypassing optimization pass. */
7988
7989int
1d088dee 7990bypass_jumps (FILE *file)
a0134312
RS
7991{
7992 int changed;
7993
7994 /* We do not construct an accurate cfg in functions which call
7995 setjmp, so just punt to be safe. */
7996 if (current_function_calls_setjmp)
7997 return 0;
7998
7999 /* For calling dump_foo fns from gdb. */
8000 debug_stderr = stderr;
8001 gcse_file = file;
8002
8003 /* Identify the basic block information for this function, including
8004 successors and predecessors. */
8005 max_gcse_regno = max_reg_num ();
8006
8007 if (file)
8008 dump_flow_info (file);
8009
6614fd40 8010 /* Return if there's nothing to do, or it is too expensive. */
d128effb 8011 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
a0134312
RS
8012 return 0;
8013
a0134312
RS
8014 gcc_obstack_init (&gcse_obstack);
8015 bytes_used = 0;
8016
8017 /* We need alias. */
8018 init_alias_analysis ();
8019
8020 /* Record where pseudo-registers are set. This data is kept accurate
8021 during each pass. ??? We could also record hard-reg information here
8022 [since it's unchanging], however it is currently done during hash table
8023 computation.
8024
8025 It may be tempting to compute MEM set information here too, but MEM sets
8026 will be subject to code motion one day and thus we need to compute
8027 information about memory sets when we build the hash tables. */
8028
8029 alloc_reg_set_mem (max_gcse_regno);
8030 compute_sets (get_insns ());
8031
8032 max_gcse_regno = max_reg_num ();
8033 alloc_gcse_mem (get_insns ());
8034 changed = one_cprop_pass (1, 1, 1);
8035 free_gcse_mem ();
8036
8037 if (file)
8038 {
8039 fprintf (file, "BYPASS of %s: %d basic blocks, ",
faed5cc3 8040 current_function_name (), n_basic_blocks);
a0134312
RS
8041 fprintf (file, "%d bytes\n\n", bytes_used);
8042 }
8043
8044 obstack_free (&gcse_obstack, NULL);
8045 free_reg_set_mem ();
8046
8047 /* We are finished with alias. */
8048 end_alias_analysis ();
8049 allocate_reg_info (max_reg_num (), FALSE, FALSE);
8050
8051 return changed;
8052}
8053
d128effb
NS
8054/* Return true if the graph is too expensive to optimize. PASS is the
8055 optimization about to be performed. */
8056
8057static bool
8058is_too_expensive (const char *pass)
8059{
8060 /* Trying to perform global optimizations on flow graphs which have
8061 a high connectivity will take a long time and is unlikely to be
8062 particularly useful.
8063
8064 In normal circumstances a cfg should have about twice as many
8065 edges as blocks. But we do not want to punish small functions
8066 which have a couple switch statements. Rather than simply
8067 threshold the number of blocks, uses something with a more
8068 graceful degradation. */
8069 if (n_edges > 20000 + n_basic_blocks * 4)
8070 {
8071 if (warn_disabled_optimization)
8072 warning ("%s: %d basic blocks and %d edges/basic block",
8073 pass, n_basic_blocks, n_edges / n_basic_blocks);
8074
8075 return true;
8076 }
8077
8078 /* If allocating memory for the cprop bitmap would take up too much
8079 storage it's better just to disable the optimization. */
8080 if ((n_basic_blocks
8081 * SBITMAP_SET_SIZE (max_reg_num ())
8082 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
8083 {
8084 if (warn_disabled_optimization)
8085 warning ("%s: %d basic blocks and %d registers",
8086 pass, n_basic_blocks, max_reg_num ());
8087
8088 return true;
8089 }
8090
8091 return false;
8092}
8093
e2500fed 8094#include "gt-gcse.h"