]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gcse.c
s390.md ("builtin_setjmp_setup"): Insn deleted.
[thirdparty/gcc.git] / gcc / gcse.c
CommitLineData
f4e584dc 1/* Global common subexpression elimination/Partial redundancy elimination
7506f491 2 and global constant/copy propagation for GNU compiler.
a0134312 3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
8e42ace1 4 Free Software Foundation, Inc.
7506f491 5
1322177d 6This file is part of GCC.
7506f491 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7506f491 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7506f491
DE
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
7506f491
DE
22
23/* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
f4e584dc
JL
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
7506f491
DE
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
34
7506f491
DE
35*/
36
37/* References searched while implementing this.
7506f491
DE
38
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
42
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
50
7506f491
DE
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69
7506f491
DE
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
79
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
84
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
116
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
120
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
124
f4e584dc
JL
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
128
a42cd965
AM
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
132
f4e584dc
JL
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141
7506f491
DE
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
144*/
145
146#include "config.h"
50b2596f 147#include "system.h"
4977bab6
ZW
148#include "coretypes.h"
149#include "tm.h"
01198c2f 150#include "toplev.h"
7506f491
DE
151
152#include "rtl.h"
6baf1cc8 153#include "tm_p.h"
7506f491
DE
154#include "regs.h"
155#include "hard-reg-set.h"
156#include "flags.h"
157#include "real.h"
158#include "insn-config.h"
159#include "recog.h"
160#include "basic-block.h"
50b2596f 161#include "output.h"
49ad7cfa 162#include "function.h"
589005ff 163#include "expr.h"
e7d482b9 164#include "except.h"
fb0c0a12 165#include "ggc.h"
f1fa37ff 166#include "params.h"
ae860ff7 167#include "cselib.h"
d128effb 168#include "intl.h"
7506f491 169#include "obstack.h"
4fa31c2a 170
7506f491
DE
171/* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
173
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
177
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
7506f491 182
f4e584dc
JL
183/* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
7506f491 185
f4e584dc 186 We perform the following steps:
7506f491
DE
187
188 1) Compute basic block information.
189
190 2) Compute table of places where registers are set.
191
192 3) Perform copy/constant propagation.
193
194 4) Perform global cse.
195
e78d9500 196 5) Perform another pass of copy/constant propagation.
7506f491
DE
197
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
204
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
208
209 PRE handles moving invariant expressions out of loops (by treating them as
f4e584dc 210 partially redundant).
7506f491
DE
211
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
215
216 **********************
217
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
223
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
228
229 It was found doing copy propagation between each pass enables further
230 substitutions.
231
232 PRE is quite expensive in complicated functions because the DFA can take
740f35a0 233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
7506f491
DE
234 be modified if one wants to experiment.
235
236 **********************
237
238 The steps for PRE are:
239
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
241
242 2) Perform the data flow analysis for PRE.
243
244 3) Delete the redundant instructions
245
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
248
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
251
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
254
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
258
f4e584dc 259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
7506f491
DE
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
265
266 **********************
267
7506f491
DE
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
275
276 Help stamp out big monolithic functions! */
277\f
278/* GCSE global vars. */
279
280/* -dG dump file. */
281static FILE *gcse_file;
282
f4e584dc
JL
283/* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
285
286 * If we changed any jumps via cprop.
287
288 * If we added any labels via edge splitting. */
289
290static int run_jump_opt_after_gcse;
291
7506f491
DE
292/* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297static FILE *debug_stderr;
298
299/* An obstack for our working variables. */
300static struct obstack gcse_obstack;
301
c4c81601 302struct reg_use {rtx reg_rtx; };
abd535b6 303
7506f491
DE
304/* Hash table of expressions. */
305
306struct expr
307{
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
f4e584dc
JL
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
7506f491
DE
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
329};
330
331/* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
334
335struct occr
336{
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
cc2902df 341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
7506f491 342 char deleted_p;
cc2902df 343 /* Nonzero if this [available] occurrence has been copied to
7506f491
DE
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
348};
349
350/* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
c4c81601 357 Someday I'll perform the computation and figure it out. */
7506f491 358
02280659
ZD
359struct hash_table
360{
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
364
365 /* Size of the hash table, in elements. */
366 unsigned int size;
2e653e39 367
02280659
ZD
368 /* Number of hash table elements. */
369 unsigned int n_elems;
7506f491 370
02280659
ZD
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
373};
c4c81601 374
02280659
ZD
375/* Expression hash table. */
376static struct hash_table expr_hash_table;
377
378/* Copy propagation hash table. */
379static struct hash_table set_hash_table;
7506f491
DE
380
381/* Mapping of uids to cuids.
382 Only real insns get cuids. */
383static int *uid_cuid;
384
385/* Highest UID in UID_CUID. */
386static int max_uid;
387
388/* Get the cuid of an insn. */
b86db3eb
BS
389#ifdef ENABLE_CHECKING
390#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391#else
7506f491 392#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
b86db3eb 393#endif
7506f491
DE
394
395/* Number of cuids. */
396static int max_cuid;
397
398/* Mapping of cuids to insns. */
399static rtx *cuid_insn;
400
401/* Get insn from cuid. */
402#define CUID_INSN(CUID) (cuid_insn[CUID])
403
404/* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
770ae6cc 407static unsigned int max_gcse_regno;
7506f491 408
7506f491 409/* Table of registers that are modified.
c4c81601 410
7506f491
DE
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
413
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
f4e584dc 416 a bitmap instead of the lists `reg_set_table' uses].
7506f491 417
c4c81601
RK
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
7506f491
DE
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
c4c81601
RK
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
7506f491 431
c4c81601
RK
432typedef struct reg_set
433{
7506f491
DE
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438} reg_set;
c4c81601 439
7506f491 440static reg_set **reg_set_table;
c4c81601 441
7506f491
DE
442/* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445static int reg_set_table_size;
c4c81601 446
7506f491
DE
447/* Amount to grow `reg_set_table' by when it's full. */
448#define REG_SET_TABLE_SLOP 100
449
a13d4ebf 450/* This is a list of expressions which are MEMs and will be used by load
589005ff 451 or store motion.
a13d4ebf
AM
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
589005ff 454 We can then allow movement of these MEM refs with a little special
a13d4ebf
AM
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
589005ff 457 no side effects so we can re-issue the setter value.
a13d4ebf
AM
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
460
461struct ls_expr
462{
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
47a3dae1 465 rtx pattern_regs; /* List of registers mentioned by the mem. */
aaa4ca30
AJ
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
a13d4ebf
AM
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
473};
474
fbef91d8
RS
475/* Array of implicit set patterns indexed by basic block index. */
476static rtx *implicit_sets;
477
a13d4ebf
AM
478/* Head of the list of load/store memory refs. */
479static struct ls_expr * pre_ldst_mems = NULL;
480
7506f491
DE
481/* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
73991d6a 484static regset reg_set_bitmap;
7506f491
DE
485
486/* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491static sbitmap *reg_set_in_block;
492
a13d4ebf
AM
493/* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495static rtx * modify_mem_list;
73991d6a 496bitmap modify_mem_list_set;
a13d4ebf
AM
497
498/* This array parallels modify_mem_list, but is kept canonicalized. */
499static rtx * canon_modify_mem_list;
73991d6a 500bitmap canon_modify_mem_list_set;
7506f491
DE
501/* Various variables for statistics gathering. */
502
503/* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506static int bytes_used;
c4c81601 507
7506f491
DE
508/* GCSE substitutions made. */
509static int gcse_subst_count;
510/* Number of copy instructions created. */
511static int gcse_create_count;
512/* Number of constants propagated. */
513static int const_prop_count;
514/* Number of copys propagated. */
515static int copy_prop_count;
7506f491
DE
516\f
517/* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
520
7506f491
DE
521/* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
523
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
526
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
c4c81601 529 ae_kill[block_num][expr_num] */
7506f491
DE
530
531/* For reaching defs */
532static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
533
534/* for available exprs */
535static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
b5ce41ff 536
0511851c
MM
537/* Objects of this type are passed around by the null-pointer check
538 removal routines. */
c4c81601
RK
539struct null_pointer_info
540{
0511851c 541 /* The basic block being processed. */
e0082a72 542 basic_block current_block;
0511851c 543 /* The first register to be handled in this pass. */
770ae6cc 544 unsigned int min_reg;
0511851c 545 /* One greater than the last register to be handled in this pass. */
770ae6cc 546 unsigned int max_reg;
0511851c
MM
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
549};
7506f491 550\f
1d088dee 551static void compute_can_copy (void);
9fe15a12
KG
552static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
553static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
554static void *grealloc (void *, size_t);
703ad42b 555static void *gcse_alloc (unsigned long);
1d088dee
AJ
556static void alloc_gcse_mem (rtx);
557static void free_gcse_mem (void);
558static void alloc_reg_set_mem (int);
559static void free_reg_set_mem (void);
560static int get_bitmap_width (int, int, int);
561static void record_one_set (int, rtx);
b885908b 562static void replace_one_set (int, rtx, rtx);
1d088dee
AJ
563static void record_set_info (rtx, rtx, void *);
564static void compute_sets (rtx);
565static void hash_scan_insn (rtx, struct hash_table *, int);
566static void hash_scan_set (rtx, rtx, struct hash_table *);
567static void hash_scan_clobber (rtx, rtx, struct hash_table *);
568static void hash_scan_call (rtx, rtx, struct hash_table *);
569static int want_to_gcse_p (rtx);
570static bool gcse_constant_p (rtx);
571static int oprs_unchanged_p (rtx, rtx, int);
572static int oprs_anticipatable_p (rtx, rtx);
573static int oprs_available_p (rtx, rtx);
574static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
575 struct hash_table *);
576static void insert_set_in_table (rtx, rtx, struct hash_table *);
577static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
578static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
579static unsigned int hash_string_1 (const char *);
580static unsigned int hash_set (int, int);
581static int expr_equiv_p (rtx, rtx);
582static void record_last_reg_set_info (rtx, int);
583static void record_last_mem_set_info (rtx);
584static void record_last_set_info (rtx, rtx, void *);
585static void compute_hash_table (struct hash_table *);
586static void alloc_hash_table (int, struct hash_table *, int);
587static void free_hash_table (struct hash_table *);
588static void compute_hash_table_work (struct hash_table *);
589static void dump_hash_table (FILE *, const char *, struct hash_table *);
590static struct expr *lookup_expr (rtx, struct hash_table *);
591static struct expr *lookup_set (unsigned int, struct hash_table *);
592static struct expr *next_set (unsigned int, struct expr *);
593static void reset_opr_set_tables (void);
594static int oprs_not_set_p (rtx, rtx);
595static void mark_call (rtx);
596static void mark_set (rtx, rtx);
597static void mark_clobber (rtx, rtx);
598static void mark_oprs_set (rtx);
599static void alloc_cprop_mem (int, int);
600static void free_cprop_mem (void);
601static void compute_transp (rtx, int, sbitmap *, int);
602static void compute_transpout (void);
603static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
604 struct hash_table *);
605static void compute_cprop_data (void);
606static void find_used_regs (rtx *, void *);
607static int try_replace_reg (rtx, rtx, rtx);
608static struct expr *find_avail_set (int, rtx);
609static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
610static void mems_conflict_for_gcse_p (rtx, rtx, void *);
611static int load_killed_in_block_p (basic_block, int, rtx, int);
612static void canon_list_insert (rtx, rtx, void *);
613static int cprop_insn (rtx, int);
614static int cprop (int);
615static void find_implicit_sets (void);
616static int one_cprop_pass (int, int, int);
617static bool constprop_register (rtx, rtx, rtx, int);
618static struct expr *find_bypass_set (int, int);
619static bool reg_killed_on_edge (rtx, edge);
620static int bypass_block (basic_block, rtx, rtx);
621static int bypass_conditional_jumps (void);
622static void alloc_pre_mem (int, int);
623static void free_pre_mem (void);
624static void compute_pre_data (void);
625static int pre_expr_reaches_here_p (basic_block, struct expr *,
626 basic_block);
627static void insert_insn_end_bb (struct expr *, basic_block, int);
628static void pre_insert_copy_insn (struct expr *, rtx);
629static void pre_insert_copies (void);
630static int pre_delete (void);
631static int pre_gcse (void);
632static int one_pre_gcse_pass (int);
633static void add_label_notes (rtx, rtx);
634static void alloc_code_hoist_mem (int, int);
635static void free_code_hoist_mem (void);
636static void compute_code_hoist_vbeinout (void);
637static void compute_code_hoist_data (void);
638static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
639static void hoist_code (void);
640static int one_code_hoisting_pass (void);
641static void alloc_rd_mem (int, int);
642static void free_rd_mem (void);
643static void handle_rd_kill_set (rtx, int, basic_block);
644static void compute_kill_rd (void);
645static void compute_rd (void);
646static void alloc_avail_expr_mem (int, int);
647static void free_avail_expr_mem (void);
648static void compute_ae_gen (struct hash_table *);
649static int expr_killed_p (rtx, basic_block);
650static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
651static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
652 int);
653static rtx computing_insn (struct expr *, rtx);
654static int def_reaches_here_p (rtx, rtx);
655static int can_disregard_other_sets (struct reg_set **, rtx, int);
656static int handle_avail_expr (rtx, struct expr *);
657static int classic_gcse (void);
658static int one_classic_gcse_pass (int);
659static void invalidate_nonnull_info (rtx, rtx, void *);
660static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
661 struct null_pointer_info *);
662static rtx process_insert_insn (struct expr *);
663static int pre_edge_insert (struct edge_list *, struct expr **);
664static int expr_reaches_here_p_work (struct occr *, struct expr *,
665 basic_block, int, char *);
666static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
667 basic_block, char *);
668static struct ls_expr * ldst_entry (rtx);
669static void free_ldst_entry (struct ls_expr *);
670static void free_ldst_mems (void);
671static void print_ldst_list (FILE *);
672static struct ls_expr * find_rtx_in_ldst (rtx);
673static int enumerate_ldsts (void);
674static inline struct ls_expr * first_ls_expr (void);
675static inline struct ls_expr * next_ls_expr (struct ls_expr *);
676static int simple_mem (rtx);
677static void invalidate_any_buried_refs (rtx);
678static void compute_ld_motion_mems (void);
679static void trim_ld_motion_mems (void);
680static void update_ld_motion_stores (struct expr *);
681static void reg_set_info (rtx, rtx, void *);
682static bool store_ops_ok (rtx, int *);
683static rtx extract_mentioned_regs (rtx);
684static rtx extract_mentioned_regs_helper (rtx, rtx);
685static void find_moveable_store (rtx, int *, int *);
686static int compute_store_table (void);
3b14e3af
ZD
687static bool load_kills_store (rtx, rtx, int);
688static bool find_loads (rtx, rtx, int);
689static bool store_killed_in_insn (rtx, rtx, rtx, int);
1d088dee
AJ
690static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
691static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
692static void build_store_vectors (void);
693static void insert_insn_start_bb (rtx, basic_block);
694static int insert_store (struct ls_expr *, edge);
d088acea
ZD
695static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
696static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
1d088dee
AJ
697static void delete_store (struct ls_expr *, basic_block);
698static void free_store_memory (void);
699static void store_motion (void);
700static void free_insn_expr_list_list (rtx *);
701static void clear_modify_mem_tables (void);
702static void free_modify_mem_tables (void);
703static rtx gcse_emit_move_after (rtx, rtx, rtx);
704static void local_cprop_find_used_regs (rtx *, void *);
705static bool do_local_cprop (rtx, rtx, int, rtx*);
706static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
707static void local_cprop_pass (int);
d128effb 708static bool is_too_expensive (const char *);
7506f491 709\f
d128effb 710
7506f491
DE
711/* Entry point for global common subexpression elimination.
712 F is the first instruction in the function. */
713
e78d9500 714int
1d088dee 715gcse_main (rtx f, FILE *file)
7506f491
DE
716{
717 int changed, pass;
718 /* Bytes used at start of pass. */
719 int initial_bytes_used;
720 /* Maximum number of bytes used by a pass. */
721 int max_pass_bytes;
722 /* Point to release obstack data from for each pass. */
723 char *gcse_obstack_bottom;
724
b5ce41ff
JL
725 /* We do not construct an accurate cfg in functions which call
726 setjmp, so just punt to be safe. */
7506f491 727 if (current_function_calls_setjmp)
e78d9500 728 return 0;
589005ff 729
b5ce41ff
JL
730 /* Assume that we do not need to run jump optimizations after gcse. */
731 run_jump_opt_after_gcse = 0;
732
7506f491
DE
733 /* For calling dump_foo fns from gdb. */
734 debug_stderr = stderr;
b5ce41ff 735 gcse_file = file;
7506f491 736
b5ce41ff
JL
737 /* Identify the basic block information for this function, including
738 successors and predecessors. */
7506f491 739 max_gcse_regno = max_reg_num ();
7506f491 740
a42cd965
AM
741 if (file)
742 dump_flow_info (file);
743
d128effb
NS
744 /* Return if there's nothing to do, or it is too expensive. */
745 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
a18820c6 746 return 0;
d128effb 747
7506f491 748 gcc_obstack_init (&gcse_obstack);
a42cd965 749 bytes_used = 0;
7506f491 750
a13d4ebf
AM
751 /* We need alias. */
752 init_alias_analysis ();
c4c81601
RK
753 /* Record where pseudo-registers are set. This data is kept accurate
754 during each pass. ??? We could also record hard-reg information here
755 [since it's unchanging], however it is currently done during hash table
756 computation.
b5ce41ff 757
c4c81601
RK
758 It may be tempting to compute MEM set information here too, but MEM sets
759 will be subject to code motion one day and thus we need to compute
b5ce41ff 760 information about memory sets when we build the hash tables. */
7506f491
DE
761
762 alloc_reg_set_mem (max_gcse_regno);
763 compute_sets (f);
764
765 pass = 0;
766 initial_bytes_used = bytes_used;
767 max_pass_bytes = 0;
768 gcse_obstack_bottom = gcse_alloc (1);
769 changed = 1;
740f35a0 770 while (changed && pass < MAX_GCSE_PASSES)
7506f491
DE
771 {
772 changed = 0;
773 if (file)
774 fprintf (file, "GCSE pass %d\n\n", pass + 1);
775
776 /* Initialize bytes_used to the space for the pred/succ lists,
777 and the reg_set_table data. */
778 bytes_used = initial_bytes_used;
779
780 /* Each pass may create new registers, so recalculate each time. */
781 max_gcse_regno = max_reg_num ();
782
783 alloc_gcse_mem (f);
784
b5ce41ff
JL
785 /* Don't allow constant propagation to modify jumps
786 during this pass. */
a0134312 787 changed = one_cprop_pass (pass + 1, 0, 0);
7506f491
DE
788
789 if (optimize_size)
b5ce41ff 790 changed |= one_classic_gcse_pass (pass + 1);
7506f491 791 else
589005ff 792 {
a42cd965 793 changed |= one_pre_gcse_pass (pass + 1);
a13d4ebf
AM
794 /* We may have just created new basic blocks. Release and
795 recompute various things which are sized on the number of
796 basic blocks. */
797 if (changed)
798 {
73991d6a 799 free_modify_mem_tables ();
9fe15a12
KG
800 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
801 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
a13d4ebf 802 }
a42cd965
AM
803 free_reg_set_mem ();
804 alloc_reg_set_mem (max_reg_num ());
805 compute_sets (f);
806 run_jump_opt_after_gcse = 1;
807 }
7506f491
DE
808
809 if (max_pass_bytes < bytes_used)
810 max_pass_bytes = bytes_used;
811
bb457bd9
JL
812 /* Free up memory, then reallocate for code hoisting. We can
813 not re-use the existing allocated memory because the tables
814 will not have info for the insns or registers created by
815 partial redundancy elimination. */
7506f491
DE
816 free_gcse_mem ();
817
bb457bd9
JL
818 /* It does not make sense to run code hoisting unless we optimizing
819 for code size -- it rarely makes programs faster, and can make
820 them bigger if we did partial redundancy elimination (when optimizing
821 for space, we use a classic gcse algorithm instead of partial
822 redundancy algorithms). */
823 if (optimize_size)
589005ff 824 {
bb457bd9
JL
825 max_gcse_regno = max_reg_num ();
826 alloc_gcse_mem (f);
827 changed |= one_code_hoisting_pass ();
828 free_gcse_mem ();
829
830 if (max_pass_bytes < bytes_used)
831 max_pass_bytes = bytes_used;
589005ff 832 }
bb457bd9 833
7506f491
DE
834 if (file)
835 {
836 fprintf (file, "\n");
837 fflush (file);
838 }
c4c81601 839
7506f491
DE
840 obstack_free (&gcse_obstack, gcse_obstack_bottom);
841 pass++;
842 }
843
b5ce41ff
JL
844 /* Do one last pass of copy propagation, including cprop into
845 conditional jumps. */
846
847 max_gcse_regno = max_reg_num ();
848 alloc_gcse_mem (f);
849 /* This time, go ahead and allow cprop to alter jumps. */
a0134312 850 one_cprop_pass (pass + 1, 1, 0);
b5ce41ff 851 free_gcse_mem ();
7506f491
DE
852
853 if (file)
854 {
855 fprintf (file, "GCSE of %s: %d basic blocks, ",
0b17ab2f 856 current_function_name, n_basic_blocks);
7506f491
DE
857 fprintf (file, "%d pass%s, %d bytes\n\n",
858 pass, pass > 1 ? "es" : "", max_pass_bytes);
859 }
860
6496a589 861 obstack_free (&gcse_obstack, NULL);
7506f491 862 free_reg_set_mem ();
a13d4ebf
AM
863 /* We are finished with alias. */
864 end_alias_analysis ();
865 allocate_reg_info (max_reg_num (), FALSE, FALSE);
866
47a3dae1 867 if (!optimize_size && flag_gcse_sm)
a13d4ebf 868 store_motion ();
47a3dae1 869
a13d4ebf 870 /* Record where pseudo-registers are set. */
e78d9500 871 return run_jump_opt_after_gcse;
7506f491
DE
872}
873\f
874/* Misc. utilities. */
875
773eae39
EB
876/* Nonzero for each mode that supports (set (reg) (reg)).
877 This is trivially true for integer and floating point values.
878 It may or may not be true for condition codes. */
879static char can_copy[(int) NUM_MACHINE_MODES];
880
7506f491
DE
881/* Compute which modes support reg/reg copy operations. */
882
883static void
1d088dee 884compute_can_copy (void)
7506f491
DE
885{
886 int i;
50b2596f 887#ifndef AVOID_CCMODE_COPIES
8e42ace1 888 rtx reg, insn;
50b2596f 889#endif
773eae39 890 memset (can_copy, 0, NUM_MACHINE_MODES);
7506f491
DE
891
892 start_sequence ();
893 for (i = 0; i < NUM_MACHINE_MODES; i++)
c4c81601
RK
894 if (GET_MODE_CLASS (i) == MODE_CC)
895 {
7506f491 896#ifdef AVOID_CCMODE_COPIES
773eae39 897 can_copy[i] = 0;
7506f491 898#else
c4c81601
RK
899 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
900 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
9714cf43 901 if (recog (PATTERN (insn), insn, NULL) >= 0)
773eae39 902 can_copy[i] = 1;
7506f491 903#endif
c4c81601 904 }
141b5810 905 else
773eae39 906 can_copy[i] = 1;
c4c81601 907
7506f491 908 end_sequence ();
7506f491 909}
773eae39
EB
910
911/* Returns whether the mode supports reg/reg copy operations. */
912
913bool
1d088dee 914can_copy_p (enum machine_mode mode)
773eae39
EB
915{
916 static bool can_copy_init_p = false;
917
918 if (! can_copy_init_p)
919 {
920 compute_can_copy ();
921 can_copy_init_p = true;
922 }
923
924 return can_copy[mode] != 0;
925}
7506f491
DE
926\f
927/* Cover function to xmalloc to record bytes allocated. */
928
703ad42b 929static void *
4ac11022 930gmalloc (size_t size)
7506f491
DE
931{
932 bytes_used += size;
933 return xmalloc (size);
934}
935
9fe15a12
KG
936/* Cover function to xcalloc to record bytes allocated. */
937
938static void *
939gcalloc (size_t nelem, size_t elsize)
940{
941 bytes_used += nelem * elsize;
942 return xcalloc (nelem, elsize);
943}
944
7506f491
DE
945/* Cover function to xrealloc.
946 We don't record the additional size since we don't know it.
947 It won't affect memory usage stats much anyway. */
948
703ad42b 949static void *
9fe15a12 950grealloc (void *ptr, size_t size)
7506f491
DE
951{
952 return xrealloc (ptr, size);
953}
954
77bbd421 955/* Cover function to obstack_alloc. */
7506f491 956
703ad42b 957static void *
1d088dee 958gcse_alloc (unsigned long size)
7506f491 959{
77bbd421 960 bytes_used += size;
703ad42b 961 return obstack_alloc (&gcse_obstack, size);
7506f491
DE
962}
963
964/* Allocate memory for the cuid mapping array,
965 and reg/memory set tracking tables.
966
967 This is called at the start of each pass. */
968
969static void
1d088dee 970alloc_gcse_mem (rtx f)
7506f491 971{
9fe15a12 972 int i;
7506f491
DE
973 rtx insn;
974
975 /* Find the largest UID and create a mapping from UIDs to CUIDs.
976 CUIDs are like UIDs except they increase monotonically, have no gaps,
977 and only apply to real insns. */
978
979 max_uid = get_max_uid ();
9fe15a12 980 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
7506f491
DE
981 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
982 {
2c3c49de 983 if (INSN_P (insn))
b86db3eb 984 uid_cuid[INSN_UID (insn)] = i++;
7506f491 985 else
b86db3eb 986 uid_cuid[INSN_UID (insn)] = i;
7506f491
DE
987 }
988
989 /* Create a table mapping cuids to insns. */
990
991 max_cuid = i;
9fe15a12 992 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
7506f491 993 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
2c3c49de 994 if (INSN_P (insn))
c4c81601 995 CUID_INSN (i++) = insn;
7506f491
DE
996
997 /* Allocate vars to track sets of regs. */
73991d6a 998 reg_set_bitmap = BITMAP_XMALLOC ();
7506f491
DE
999
1000 /* Allocate vars to track sets of regs, memory per block. */
703ad42b 1001 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
a13d4ebf
AM
1002 /* Allocate array to keep a list of insns which modify memory in each
1003 basic block. */
9fe15a12
KG
1004 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1005 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
73991d6a
JH
1006 modify_mem_list_set = BITMAP_XMALLOC ();
1007 canon_modify_mem_list_set = BITMAP_XMALLOC ();
7506f491
DE
1008}
1009
1010/* Free memory allocated by alloc_gcse_mem. */
1011
1012static void
1d088dee 1013free_gcse_mem (void)
7506f491
DE
1014{
1015 free (uid_cuid);
1016 free (cuid_insn);
1017
73991d6a 1018 BITMAP_XFREE (reg_set_bitmap);
7506f491 1019
5a660bff 1020 sbitmap_vector_free (reg_set_in_block);
73991d6a
JH
1021 free_modify_mem_tables ();
1022 BITMAP_XFREE (modify_mem_list_set);
1023 BITMAP_XFREE (canon_modify_mem_list_set);
7506f491
DE
1024}
1025
0511851c
MM
1026/* Many of the global optimization algorithms work by solving dataflow
1027 equations for various expressions. Initially, some local value is
c4c81601
RK
1028 computed for each expression in each block. Then, the values across the
1029 various blocks are combined (by following flow graph edges) to arrive at
1030 global values. Conceptually, each set of equations is independent. We
1031 may therefore solve all the equations in parallel, solve them one at a
1032 time, or pick any intermediate approach.
1033
1034 When you're going to need N two-dimensional bitmaps, each X (say, the
1035 number of blocks) by Y (say, the number of expressions), call this
1036 function. It's not important what X and Y represent; only that Y
1037 correspond to the things that can be done in parallel. This function will
1038 return an appropriate chunking factor C; you should solve C sets of
1039 equations in parallel. By going through this function, we can easily
1040 trade space against time; by solving fewer equations in parallel we use
1041 less space. */
0511851c
MM
1042
1043static int
1d088dee 1044get_bitmap_width (int n, int x, int y)
0511851c
MM
1045{
1046 /* It's not really worth figuring out *exactly* how much memory will
1047 be used by a particular choice. The important thing is to get
1048 something approximately right. */
1049 size_t max_bitmap_memory = 10 * 1024 * 1024;
1050
1051 /* The number of bytes we'd use for a single column of minimum
1052 width. */
1053 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1054
1055 /* Often, it's reasonable just to solve all the equations in
1056 parallel. */
1057 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1058 return y;
1059
1060 /* Otherwise, pick the largest width we can, without going over the
1061 limit. */
1062 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1063 / column_size);
1064}
b5ce41ff
JL
1065\f
1066/* Compute the local properties of each recorded expression.
c4c81601
RK
1067
1068 Local properties are those that are defined by the block, irrespective of
1069 other blocks.
b5ce41ff
JL
1070
1071 An expression is transparent in a block if its operands are not modified
1072 in the block.
1073
1074 An expression is computed (locally available) in a block if it is computed
1075 at least once and expression would contain the same value if the
1076 computation was moved to the end of the block.
1077
1078 An expression is locally anticipatable in a block if it is computed at
1079 least once and expression would contain the same value if the computation
1080 was moved to the beginning of the block.
1081
c4c81601
RK
1082 We call this routine for cprop, pre and code hoisting. They all compute
1083 basically the same information and thus can easily share this code.
7506f491 1084
c4c81601
RK
1085 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1086 properties. If NULL, then it is not necessary to compute or record that
1087 particular property.
b5ce41ff 1088
02280659
ZD
1089 TABLE controls which hash table to look at. If it is set hash table,
1090 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
c4c81601 1091 ABSALTERED. */
589005ff 1092
b5ce41ff 1093static void
1d088dee 1094compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
b5ce41ff 1095{
02280659 1096 unsigned int i;
589005ff 1097
b5ce41ff
JL
1098 /* Initialize any bitmaps that were passed in. */
1099 if (transp)
695ab36a 1100 {
02280659 1101 if (table->set_p)
d55bc081 1102 sbitmap_vector_zero (transp, last_basic_block);
695ab36a 1103 else
d55bc081 1104 sbitmap_vector_ones (transp, last_basic_block);
695ab36a 1105 }
c4c81601 1106
b5ce41ff 1107 if (comp)
d55bc081 1108 sbitmap_vector_zero (comp, last_basic_block);
b5ce41ff 1109 if (antloc)
d55bc081 1110 sbitmap_vector_zero (antloc, last_basic_block);
b5ce41ff 1111
02280659 1112 for (i = 0; i < table->size; i++)
7506f491 1113 {
b5ce41ff
JL
1114 struct expr *expr;
1115
02280659 1116 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
b5ce41ff 1117 {
b5ce41ff 1118 int indx = expr->bitmap_index;
c4c81601 1119 struct occr *occr;
b5ce41ff
JL
1120
1121 /* The expression is transparent in this block if it is not killed.
1122 We start by assuming all are transparent [none are killed], and
1123 then reset the bits for those that are. */
b5ce41ff 1124 if (transp)
02280659 1125 compute_transp (expr->expr, indx, transp, table->set_p);
b5ce41ff
JL
1126
1127 /* The occurrences recorded in antic_occr are exactly those that
cc2902df 1128 we want to set to nonzero in ANTLOC. */
b5ce41ff 1129 if (antloc)
c4c81601
RK
1130 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1131 {
1132 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1133
c4c81601
RK
1134 /* While we're scanning the table, this is a good place to
1135 initialize this. */
1136 occr->deleted_p = 0;
1137 }
b5ce41ff
JL
1138
1139 /* The occurrences recorded in avail_occr are exactly those that
cc2902df 1140 we want to set to nonzero in COMP. */
b5ce41ff 1141 if (comp)
c4c81601
RK
1142 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1143 {
1144 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
b5ce41ff 1145
c4c81601
RK
1146 /* While we're scanning the table, this is a good place to
1147 initialize this. */
1148 occr->copied_p = 0;
1149 }
b5ce41ff
JL
1150
1151 /* While we're scanning the table, this is a good place to
1152 initialize this. */
1153 expr->reaching_reg = 0;
1154 }
7506f491 1155 }
7506f491
DE
1156}
1157\f
1158/* Register set information.
1159
1160 `reg_set_table' records where each register is set or otherwise
1161 modified. */
1162
1163static struct obstack reg_set_obstack;
1164
1165static void
1d088dee 1166alloc_reg_set_mem (int n_regs)
7506f491 1167{
7506f491 1168 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
9fe15a12 1169 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
7506f491
DE
1170
1171 gcc_obstack_init (&reg_set_obstack);
1172}
1173
1174static void
1d088dee 1175free_reg_set_mem (void)
7506f491
DE
1176{
1177 free (reg_set_table);
6496a589 1178 obstack_free (&reg_set_obstack, NULL);
7506f491
DE
1179}
1180
b885908b
MH
1181/* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1182 Update the corresponding `reg_set_table' entry accordingly.
1183 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1184
1185static void
1186replace_one_set (int regno, rtx old_insn, rtx new_insn)
1187{
1188 struct reg_set *reg_info;
1189 if (regno >= reg_set_table_size)
1190 return;
1191 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1192 if (reg_info->insn == old_insn)
1193 {
1194 reg_info->insn = new_insn;
1195 break;
1196 }
1197}
1198
7506f491
DE
1199/* Record REGNO in the reg_set table. */
1200
1201static void
1d088dee 1202record_one_set (int regno, rtx insn)
7506f491 1203{
172890a2 1204 /* Allocate a new reg_set element and link it onto the list. */
63bc1d05 1205 struct reg_set *new_reg_info;
7506f491
DE
1206
1207 /* If the table isn't big enough, enlarge it. */
1208 if (regno >= reg_set_table_size)
1209 {
1210 int new_size = regno + REG_SET_TABLE_SLOP;
c4c81601 1211
703ad42b
KG
1212 reg_set_table = grealloc (reg_set_table,
1213 new_size * sizeof (struct reg_set *));
1214 memset (reg_set_table + reg_set_table_size, 0,
8e42ace1 1215 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
7506f491
DE
1216 reg_set_table_size = new_size;
1217 }
1218
703ad42b 1219 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
7506f491
DE
1220 bytes_used += sizeof (struct reg_set);
1221 new_reg_info->insn = insn;
274969ea
MM
1222 new_reg_info->next = reg_set_table[regno];
1223 reg_set_table[regno] = new_reg_info;
7506f491
DE
1224}
1225
c4c81601
RK
1226/* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1227 an insn. The DATA is really the instruction in which the SET is
1228 occurring. */
7506f491
DE
1229
1230static void
1d088dee 1231record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 1232{
84832317
MM
1233 rtx record_set_insn = (rtx) data;
1234
c4c81601
RK
1235 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1236 record_one_set (REGNO (dest), record_set_insn);
7506f491
DE
1237}
1238
1239/* Scan the function and record each set of each pseudo-register.
1240
c4c81601 1241 This is called once, at the start of the gcse pass. See the comments for
fbe5a4a6 1242 `reg_set_table' for further documentation. */
7506f491
DE
1243
1244static void
1d088dee 1245compute_sets (rtx f)
7506f491 1246{
c4c81601 1247 rtx insn;
7506f491 1248
c4c81601 1249 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
2c3c49de 1250 if (INSN_P (insn))
c4c81601 1251 note_stores (PATTERN (insn), record_set_info, insn);
7506f491
DE
1252}
1253\f
1254/* Hash table support. */
1255
80c29cc4
RZ
1256struct reg_avail_info
1257{
e0082a72 1258 basic_block last_bb;
80c29cc4
RZ
1259 int first_set;
1260 int last_set;
1261};
1262
1263static struct reg_avail_info *reg_avail_info;
e0082a72 1264static basic_block current_bb;
7506f491 1265
7506f491 1266
fb0c0a12
RK
1267/* See whether X, the source of a set, is something we want to consider for
1268 GCSE. */
7506f491 1269
e2500fed 1270static GTY(()) rtx test_insn;
7506f491 1271static int
1d088dee 1272want_to_gcse_p (rtx x)
7506f491 1273{
fb0c0a12
RK
1274 int num_clobbers = 0;
1275 int icode;
1276
c4c81601 1277 switch (GET_CODE (x))
7506f491
DE
1278 {
1279 case REG:
1280 case SUBREG:
1281 case CONST_INT:
1282 case CONST_DOUBLE:
69ef87e2 1283 case CONST_VECTOR:
7506f491 1284 case CALL:
34ee7f82 1285 case CONSTANT_P_RTX:
7506f491
DE
1286 return 0;
1287
1288 default:
1289 break;
1290 }
1291
fb0c0a12
RK
1292 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1293 if (general_operand (x, GET_MODE (x)))
1294 return 1;
1295 else if (GET_MODE (x) == VOIDmode)
1296 return 0;
1297
1298 /* Otherwise, check if we can make a valid insn from it. First initialize
1299 our test insn if we haven't already. */
1300 if (test_insn == 0)
1301 {
1302 test_insn
1303 = make_insn_raw (gen_rtx_SET (VOIDmode,
1304 gen_rtx_REG (word_mode,
1305 FIRST_PSEUDO_REGISTER * 2),
1306 const0_rtx));
1307 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
fb0c0a12
RK
1308 }
1309
1310 /* Now make an insn like the one we would make when GCSE'ing and see if
1311 valid. */
1312 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1313 SET_SRC (PATTERN (test_insn)) = x;
1314 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1315 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
7506f491
DE
1316}
1317
cc2902df 1318/* Return nonzero if the operands of expression X are unchanged from the
7506f491
DE
1319 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1320 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1321
1322static int
1d088dee 1323oprs_unchanged_p (rtx x, rtx insn, int avail_p)
7506f491 1324{
c4c81601 1325 int i, j;
7506f491 1326 enum rtx_code code;
6f7d635c 1327 const char *fmt;
7506f491 1328
7506f491
DE
1329 if (x == 0)
1330 return 1;
1331
1332 code = GET_CODE (x);
1333 switch (code)
1334 {
1335 case REG:
80c29cc4
RZ
1336 {
1337 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1338
1339 if (info->last_bb != current_bb)
1340 return 1;
589005ff 1341 if (avail_p)
80c29cc4
RZ
1342 return info->last_set < INSN_CUID (insn);
1343 else
1344 return info->first_set >= INSN_CUID (insn);
1345 }
7506f491
DE
1346
1347 case MEM:
e0082a72 1348 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
a13d4ebf
AM
1349 x, avail_p))
1350 return 0;
7506f491 1351 else
c4c81601 1352 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
7506f491
DE
1353
1354 case PRE_DEC:
1355 case PRE_INC:
1356 case POST_DEC:
1357 case POST_INC:
4b983fdc
RH
1358 case PRE_MODIFY:
1359 case POST_MODIFY:
7506f491
DE
1360 return 0;
1361
1362 case PC:
1363 case CC0: /*FIXME*/
1364 case CONST:
1365 case CONST_INT:
1366 case CONST_DOUBLE:
69ef87e2 1367 case CONST_VECTOR:
7506f491
DE
1368 case SYMBOL_REF:
1369 case LABEL_REF:
1370 case ADDR_VEC:
1371 case ADDR_DIFF_VEC:
1372 return 1;
1373
1374 default:
1375 break;
1376 }
1377
c4c81601 1378 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1379 {
1380 if (fmt[i] == 'e')
1381 {
c4c81601
RK
1382 /* If we are about to do the last recursive call needed at this
1383 level, change it into iteration. This function is called enough
1384 to be worth it. */
7506f491 1385 if (i == 0)
c4c81601
RK
1386 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1387
1388 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
7506f491
DE
1389 return 0;
1390 }
1391 else if (fmt[i] == 'E')
c4c81601
RK
1392 for (j = 0; j < XVECLEN (x, i); j++)
1393 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1394 return 0;
7506f491
DE
1395 }
1396
1397 return 1;
1398}
1399
a13d4ebf
AM
1400/* Used for communication between mems_conflict_for_gcse_p and
1401 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1402 conflict between two memory references. */
1403static int gcse_mems_conflict_p;
1404
1405/* Used for communication between mems_conflict_for_gcse_p and
1406 load_killed_in_block_p. A memory reference for a load instruction,
1407 mems_conflict_for_gcse_p will see if a memory store conflicts with
1408 this memory load. */
1409static rtx gcse_mem_operand;
1410
1411/* DEST is the output of an instruction. If it is a memory reference, and
1412 possibly conflicts with the load found in gcse_mem_operand, then set
1413 gcse_mems_conflict_p to a nonzero value. */
1414
1415static void
1d088dee
AJ
1416mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1417 void *data ATTRIBUTE_UNUSED)
a13d4ebf
AM
1418{
1419 while (GET_CODE (dest) == SUBREG
1420 || GET_CODE (dest) == ZERO_EXTRACT
1421 || GET_CODE (dest) == SIGN_EXTRACT
1422 || GET_CODE (dest) == STRICT_LOW_PART)
1423 dest = XEXP (dest, 0);
1424
1425 /* If DEST is not a MEM, then it will not conflict with the load. Note
1426 that function calls are assumed to clobber memory, but are handled
1427 elsewhere. */
1428 if (GET_CODE (dest) != MEM)
1429 return;
aaa4ca30 1430
a13d4ebf 1431 /* If we are setting a MEM in our list of specially recognized MEMs,
589005ff
KH
1432 don't mark as killed this time. */
1433
47a3dae1 1434 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
a13d4ebf
AM
1435 {
1436 if (!find_rtx_in_ldst (dest))
1437 gcse_mems_conflict_p = 1;
1438 return;
1439 }
aaa4ca30 1440
a13d4ebf
AM
1441 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1442 rtx_addr_varies_p))
1443 gcse_mems_conflict_p = 1;
1444}
1445
1446/* Return nonzero if the expression in X (a memory reference) is killed
1447 in block BB before or after the insn with the CUID in UID_LIMIT.
1448 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1449 before UID_LIMIT.
1450
1451 To check the entire block, set UID_LIMIT to max_uid + 1 and
1452 AVAIL_P to 0. */
1453
1454static int
1d088dee 1455load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
a13d4ebf 1456{
0b17ab2f 1457 rtx list_entry = modify_mem_list[bb->index];
a13d4ebf
AM
1458 while (list_entry)
1459 {
1460 rtx setter;
1461 /* Ignore entries in the list that do not apply. */
1462 if ((avail_p
1463 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1464 || (! avail_p
1465 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1466 {
1467 list_entry = XEXP (list_entry, 1);
1468 continue;
1469 }
1470
1471 setter = XEXP (list_entry, 0);
1472
1473 /* If SETTER is a call everything is clobbered. Note that calls
1474 to pure functions are never put on the list, so we need not
1475 worry about them. */
1476 if (GET_CODE (setter) == CALL_INSN)
1477 return 1;
1478
1479 /* SETTER must be an INSN of some kind that sets memory. Call
589005ff 1480 note_stores to examine each hunk of memory that is modified.
a13d4ebf
AM
1481
1482 The note_stores interface is pretty limited, so we have to
1483 communicate via global variables. Yuk. */
1484 gcse_mem_operand = x;
1485 gcse_mems_conflict_p = 0;
1486 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1487 if (gcse_mems_conflict_p)
1488 return 1;
1489 list_entry = XEXP (list_entry, 1);
1490 }
1491 return 0;
1492}
1493
cc2902df 1494/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1495 the start of INSN's basic block up to but not including INSN. */
1496
1497static int
1d088dee 1498oprs_anticipatable_p (rtx x, rtx insn)
7506f491
DE
1499{
1500 return oprs_unchanged_p (x, insn, 0);
1501}
1502
cc2902df 1503/* Return nonzero if the operands of expression X are unchanged from
7506f491
DE
1504 INSN to the end of INSN's basic block. */
1505
1506static int
1d088dee 1507oprs_available_p (rtx x, rtx insn)
7506f491
DE
1508{
1509 return oprs_unchanged_p (x, insn, 1);
1510}
1511
1512/* Hash expression X.
c4c81601
RK
1513
1514 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1515 indicating if a volatile operand is found or if the expression contains
1516 something we don't want to insert in the table.
7506f491
DE
1517
1518 ??? One might want to merge this with canon_hash. Later. */
1519
1520static unsigned int
1d088dee 1521hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
7506f491
DE
1522{
1523 unsigned int hash;
1524
1525 *do_not_record_p = 0;
1526
1527 hash = hash_expr_1 (x, mode, do_not_record_p);
1528 return hash % hash_table_size;
1529}
172890a2 1530
6462bb43 1531/* Hash a string. Just add its bytes up. */
172890a2 1532
6462bb43 1533static inline unsigned
1d088dee 1534hash_string_1 (const char *ps)
6462bb43
AO
1535{
1536 unsigned hash = 0;
8e42ace1 1537 const unsigned char *p = (const unsigned char *) ps;
589005ff 1538
6462bb43
AO
1539 if (p)
1540 while (*p)
1541 hash += *p++;
1542
1543 return hash;
1544}
7506f491
DE
1545
1546/* Subroutine of hash_expr to do the actual work. */
1547
1548static unsigned int
1d088dee 1549hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
7506f491
DE
1550{
1551 int i, j;
1552 unsigned hash = 0;
1553 enum rtx_code code;
6f7d635c 1554 const char *fmt;
7506f491 1555
c4c81601 1556 /* Used to turn recursion into iteration. We can't rely on GCC's
fbe5a4a6 1557 tail-recursion elimination since we need to keep accumulating values
c4c81601 1558 in HASH. */
7506f491
DE
1559
1560 if (x == 0)
1561 return hash;
1562
c4c81601 1563 repeat:
7506f491
DE
1564 code = GET_CODE (x);
1565 switch (code)
1566 {
1567 case REG:
c4c81601
RK
1568 hash += ((unsigned int) REG << 7) + REGNO (x);
1569 return hash;
7506f491
DE
1570
1571 case CONST_INT:
c4c81601
RK
1572 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1573 + (unsigned int) INTVAL (x));
1574 return hash;
7506f491
DE
1575
1576 case CONST_DOUBLE:
1577 /* This is like the general case, except that it only counts
1578 the integers representing the constant. */
c4c81601 1579 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
7506f491
DE
1580 if (GET_MODE (x) != VOIDmode)
1581 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
c4c81601 1582 hash += (unsigned int) XWINT (x, i);
7506f491 1583 else
c4c81601
RK
1584 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1585 + (unsigned int) CONST_DOUBLE_HIGH (x));
7506f491
DE
1586 return hash;
1587
69ef87e2
AH
1588 case CONST_VECTOR:
1589 {
1590 int units;
1591 rtx elt;
1592
1593 units = CONST_VECTOR_NUNITS (x);
1594
1595 for (i = 0; i < units; ++i)
1596 {
1597 elt = CONST_VECTOR_ELT (x, i);
1598 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1599 }
1600
1601 return hash;
1602 }
1603
7506f491
DE
1604 /* Assume there is only one rtx object for any given label. */
1605 case LABEL_REF:
1606 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1607 differences and differences between each stage's debugging dumps. */
c4c81601
RK
1608 hash += (((unsigned int) LABEL_REF << 7)
1609 + CODE_LABEL_NUMBER (XEXP (x, 0)));
7506f491
DE
1610 return hash;
1611
1612 case SYMBOL_REF:
1613 {
1614 /* Don't hash on the symbol's address to avoid bootstrap differences.
1615 Different hash values may cause expressions to be recorded in
1616 different orders and thus different registers to be used in the
1617 final assembler. This also avoids differences in the dump files
1618 between various stages. */
1619 unsigned int h = 0;
3cce094d 1620 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
c4c81601 1621
7506f491
DE
1622 while (*p)
1623 h += (h << 7) + *p++; /* ??? revisit */
c4c81601
RK
1624
1625 hash += ((unsigned int) SYMBOL_REF << 7) + h;
7506f491
DE
1626 return hash;
1627 }
1628
1629 case MEM:
1630 if (MEM_VOLATILE_P (x))
1631 {
1632 *do_not_record_p = 1;
1633 return 0;
1634 }
c4c81601
RK
1635
1636 hash += (unsigned int) MEM;
d51f3632
JH
1637 /* We used alias set for hashing, but this is not good, since the alias
1638 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1639 causing the profiles to fail to match. */
7506f491
DE
1640 x = XEXP (x, 0);
1641 goto repeat;
1642
1643 case PRE_DEC:
1644 case PRE_INC:
1645 case POST_DEC:
1646 case POST_INC:
1647 case PC:
1648 case CC0:
1649 case CALL:
1650 case UNSPEC_VOLATILE:
1651 *do_not_record_p = 1;
1652 return 0;
1653
1654 case ASM_OPERANDS:
1655 if (MEM_VOLATILE_P (x))
1656 {
1657 *do_not_record_p = 1;
1658 return 0;
1659 }
6462bb43
AO
1660 else
1661 {
1662 /* We don't want to take the filename and line into account. */
1663 hash += (unsigned) code + (unsigned) GET_MODE (x)
1664 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1665 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1666 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1667
1668 if (ASM_OPERANDS_INPUT_LENGTH (x))
1669 {
1670 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1671 {
1672 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1673 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1674 do_not_record_p)
1675 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1676 (x, i)));
1677 }
1678
1679 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1680 x = ASM_OPERANDS_INPUT (x, 0);
1681 mode = GET_MODE (x);
1682 goto repeat;
1683 }
1684 return hash;
1685 }
7506f491
DE
1686
1687 default:
1688 break;
1689 }
1690
7506f491 1691 hash += (unsigned) code + (unsigned) GET_MODE (x);
c4c81601 1692 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
1693 {
1694 if (fmt[i] == 'e')
1695 {
7506f491
DE
1696 /* If we are about to do the last recursive call
1697 needed at this level, change it into iteration.
1698 This function is called enough to be worth it. */
1699 if (i == 0)
1700 {
c4c81601 1701 x = XEXP (x, i);
7506f491
DE
1702 goto repeat;
1703 }
c4c81601
RK
1704
1705 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
7506f491
DE
1706 if (*do_not_record_p)
1707 return 0;
1708 }
c4c81601 1709
7506f491
DE
1710 else if (fmt[i] == 'E')
1711 for (j = 0; j < XVECLEN (x, i); j++)
1712 {
1713 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1714 if (*do_not_record_p)
1715 return 0;
1716 }
c4c81601 1717
7506f491 1718 else if (fmt[i] == 's')
6462bb43 1719 hash += hash_string_1 (XSTR (x, i));
7506f491 1720 else if (fmt[i] == 'i')
c4c81601 1721 hash += (unsigned int) XINT (x, i);
7506f491
DE
1722 else
1723 abort ();
1724 }
1725
1726 return hash;
1727}
1728
1729/* Hash a set of register REGNO.
1730
c4c81601
RK
1731 Sets are hashed on the register that is set. This simplifies the PRE copy
1732 propagation code.
7506f491
DE
1733
1734 ??? May need to make things more elaborate. Later, as necessary. */
1735
1736static unsigned int
1d088dee 1737hash_set (int regno, int hash_table_size)
7506f491
DE
1738{
1739 unsigned int hash;
1740
1741 hash = regno;
1742 return hash % hash_table_size;
1743}
1744
cc2902df 1745/* Return nonzero if exp1 is equivalent to exp2.
7506f491
DE
1746 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1747
1748static int
1d088dee 1749expr_equiv_p (rtx x, rtx y)
7506f491 1750{
b3694847
SS
1751 int i, j;
1752 enum rtx_code code;
1753 const char *fmt;
7506f491
DE
1754
1755 if (x == y)
1756 return 1;
c4c81601 1757
7506f491 1758 if (x == 0 || y == 0)
ebd7a7af 1759 return 0;
7506f491
DE
1760
1761 code = GET_CODE (x);
1762 if (code != GET_CODE (y))
1763 return 0;
1764
1765 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1766 if (GET_MODE (x) != GET_MODE (y))
1767 return 0;
1768
1769 switch (code)
1770 {
1771 case PC:
1772 case CC0:
7506f491 1773 case CONST_INT:
ebd7a7af 1774 return 0;
7506f491
DE
1775
1776 case LABEL_REF:
1777 return XEXP (x, 0) == XEXP (y, 0);
1778
1779 case SYMBOL_REF:
1780 return XSTR (x, 0) == XSTR (y, 0);
1781
1782 case REG:
1783 return REGNO (x) == REGNO (y);
1784
297c3335
RH
1785 case MEM:
1786 /* Can't merge two expressions in different alias sets, since we can
1787 decide that the expression is transparent in a block when it isn't,
1788 due to it being set with the different alias set. */
1789 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1790 return 0;
bad998e0
ZD
1791
1792 /* A volatile mem should not be considered equivalent to any other. */
1793 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1794 return 0;
297c3335
RH
1795 break;
1796
7506f491
DE
1797 /* For commutative operations, check both orders. */
1798 case PLUS:
1799 case MULT:
1800 case AND:
1801 case IOR:
1802 case XOR:
1803 case NE:
1804 case EQ:
1805 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1806 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1807 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1808 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1809
6462bb43
AO
1810 case ASM_OPERANDS:
1811 /* We don't use the generic code below because we want to
1812 disregard filename and line numbers. */
1813
1814 /* A volatile asm isn't equivalent to any other. */
1815 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1816 return 0;
1817
1818 if (GET_MODE (x) != GET_MODE (y)
1819 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1820 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1821 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1822 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1823 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1824 return 0;
1825
1826 if (ASM_OPERANDS_INPUT_LENGTH (x))
1827 {
1828 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1829 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1830 ASM_OPERANDS_INPUT (y, i))
1831 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1832 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1833 return 0;
1834 }
1835
1836 return 1;
1837
7506f491
DE
1838 default:
1839 break;
1840 }
1841
1842 /* Compare the elements. If any pair of corresponding elements
1843 fail to match, return 0 for the whole thing. */
1844
1845 fmt = GET_RTX_FORMAT (code);
1846 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1847 {
1848 switch (fmt[i])
1849 {
1850 case 'e':
1851 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1852 return 0;
1853 break;
1854
1855 case 'E':
1856 if (XVECLEN (x, i) != XVECLEN (y, i))
1857 return 0;
1858 for (j = 0; j < XVECLEN (x, i); j++)
1859 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1860 return 0;
1861 break;
1862
1863 case 's':
1864 if (strcmp (XSTR (x, i), XSTR (y, i)))
1865 return 0;
1866 break;
1867
1868 case 'i':
1869 if (XINT (x, i) != XINT (y, i))
1870 return 0;
1871 break;
1872
1873 case 'w':
1874 if (XWINT (x, i) != XWINT (y, i))
1875 return 0;
1876 break;
1877
1878 case '0':
1879 break;
aaa4ca30 1880
7506f491
DE
1881 default:
1882 abort ();
1883 }
8e42ace1 1884 }
7506f491
DE
1885
1886 return 1;
1887}
1888
02280659 1889/* Insert expression X in INSN in the hash TABLE.
7506f491
DE
1890 If it is already present, record it as the last occurrence in INSN's
1891 basic block.
1892
1893 MODE is the mode of the value X is being stored into.
1894 It is only used if X is a CONST_INT.
1895
cc2902df
KH
1896 ANTIC_P is nonzero if X is an anticipatable expression.
1897 AVAIL_P is nonzero if X is an available expression. */
7506f491
DE
1898
1899static void
1d088dee
AJ
1900insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1901 int avail_p, struct hash_table *table)
7506f491
DE
1902{
1903 int found, do_not_record_p;
1904 unsigned int hash;
1905 struct expr *cur_expr, *last_expr = NULL;
1906 struct occr *antic_occr, *avail_occr;
1907 struct occr *last_occr = NULL;
1908
02280659 1909 hash = hash_expr (x, mode, &do_not_record_p, table->size);
7506f491
DE
1910
1911 /* Do not insert expression in table if it contains volatile operands,
1912 or if hash_expr determines the expression is something we don't want
1913 to or can't handle. */
1914 if (do_not_record_p)
1915 return;
1916
02280659 1917 cur_expr = table->table[hash];
7506f491
DE
1918 found = 0;
1919
c4c81601 1920 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
1921 {
1922 /* If the expression isn't found, save a pointer to the end of
1923 the list. */
1924 last_expr = cur_expr;
1925 cur_expr = cur_expr->next_same_hash;
1926 }
1927
1928 if (! found)
1929 {
703ad42b 1930 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 1931 bytes_used += sizeof (struct expr);
02280659 1932 if (table->table[hash] == NULL)
c4c81601 1933 /* This is the first pattern that hashed to this index. */
02280659 1934 table->table[hash] = cur_expr;
7506f491 1935 else
c4c81601
RK
1936 /* Add EXPR to end of this hash chain. */
1937 last_expr->next_same_hash = cur_expr;
1938
589005ff 1939 /* Set the fields of the expr element. */
7506f491 1940 cur_expr->expr = x;
02280659 1941 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
1942 cur_expr->next_same_hash = NULL;
1943 cur_expr->antic_occr = NULL;
1944 cur_expr->avail_occr = NULL;
1945 }
1946
1947 /* Now record the occurrence(s). */
7506f491
DE
1948 if (antic_p)
1949 {
1950 antic_occr = cur_expr->antic_occr;
1951
1952 /* Search for another occurrence in the same basic block. */
1953 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1954 {
1955 /* If an occurrence isn't found, save a pointer to the end of
1956 the list. */
1957 last_occr = antic_occr;
1958 antic_occr = antic_occr->next;
1959 }
1960
1961 if (antic_occr)
c4c81601
RK
1962 /* Found another instance of the expression in the same basic block.
1963 Prefer the currently recorded one. We want the first one in the
1964 block and the block is scanned from start to end. */
1965 ; /* nothing to do */
7506f491
DE
1966 else
1967 {
1968 /* First occurrence of this expression in this basic block. */
703ad42b 1969 antic_occr = gcse_alloc (sizeof (struct occr));
7506f491
DE
1970 bytes_used += sizeof (struct occr);
1971 /* First occurrence of this expression in any block? */
1972 if (cur_expr->antic_occr == NULL)
1973 cur_expr->antic_occr = antic_occr;
1974 else
1975 last_occr->next = antic_occr;
c4c81601 1976
7506f491
DE
1977 antic_occr->insn = insn;
1978 antic_occr->next = NULL;
1979 }
1980 }
1981
1982 if (avail_p)
1983 {
1984 avail_occr = cur_expr->avail_occr;
1985
1986 /* Search for another occurrence in the same basic block. */
1987 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1988 {
1989 /* If an occurrence isn't found, save a pointer to the end of
1990 the list. */
1991 last_occr = avail_occr;
1992 avail_occr = avail_occr->next;
1993 }
1994
1995 if (avail_occr)
c4c81601
RK
1996 /* Found another instance of the expression in the same basic block.
1997 Prefer this occurrence to the currently recorded one. We want
1998 the last one in the block and the block is scanned from start
1999 to end. */
2000 avail_occr->insn = insn;
7506f491
DE
2001 else
2002 {
2003 /* First occurrence of this expression in this basic block. */
703ad42b 2004 avail_occr = gcse_alloc (sizeof (struct occr));
7506f491 2005 bytes_used += sizeof (struct occr);
c4c81601 2006
7506f491
DE
2007 /* First occurrence of this expression in any block? */
2008 if (cur_expr->avail_occr == NULL)
2009 cur_expr->avail_occr = avail_occr;
2010 else
2011 last_occr->next = avail_occr;
c4c81601 2012
7506f491
DE
2013 avail_occr->insn = insn;
2014 avail_occr->next = NULL;
2015 }
2016 }
2017}
2018
2019/* Insert pattern X in INSN in the hash table.
2020 X is a SET of a reg to either another reg or a constant.
2021 If it is already present, record it as the last occurrence in INSN's
2022 basic block. */
2023
2024static void
1d088dee 2025insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
7506f491
DE
2026{
2027 int found;
2028 unsigned int hash;
2029 struct expr *cur_expr, *last_expr = NULL;
2030 struct occr *cur_occr, *last_occr = NULL;
2031
2032 if (GET_CODE (x) != SET
2033 || GET_CODE (SET_DEST (x)) != REG)
2034 abort ();
2035
02280659 2036 hash = hash_set (REGNO (SET_DEST (x)), table->size);
7506f491 2037
02280659 2038 cur_expr = table->table[hash];
7506f491
DE
2039 found = 0;
2040
c4c81601 2041 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
7506f491
DE
2042 {
2043 /* If the expression isn't found, save a pointer to the end of
2044 the list. */
2045 last_expr = cur_expr;
2046 cur_expr = cur_expr->next_same_hash;
2047 }
2048
2049 if (! found)
2050 {
703ad42b 2051 cur_expr = gcse_alloc (sizeof (struct expr));
7506f491 2052 bytes_used += sizeof (struct expr);
02280659 2053 if (table->table[hash] == NULL)
c4c81601 2054 /* This is the first pattern that hashed to this index. */
02280659 2055 table->table[hash] = cur_expr;
7506f491 2056 else
c4c81601
RK
2057 /* Add EXPR to end of this hash chain. */
2058 last_expr->next_same_hash = cur_expr;
2059
7506f491
DE
2060 /* Set the fields of the expr element.
2061 We must copy X because it can be modified when copy propagation is
2062 performed on its operands. */
7506f491 2063 cur_expr->expr = copy_rtx (x);
02280659 2064 cur_expr->bitmap_index = table->n_elems++;
7506f491
DE
2065 cur_expr->next_same_hash = NULL;
2066 cur_expr->antic_occr = NULL;
2067 cur_expr->avail_occr = NULL;
2068 }
2069
2070 /* Now record the occurrence. */
7506f491
DE
2071 cur_occr = cur_expr->avail_occr;
2072
2073 /* Search for another occurrence in the same basic block. */
2074 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2075 {
2076 /* If an occurrence isn't found, save a pointer to the end of
2077 the list. */
2078 last_occr = cur_occr;
2079 cur_occr = cur_occr->next;
2080 }
2081
2082 if (cur_occr)
c4c81601
RK
2083 /* Found another instance of the expression in the same basic block.
2084 Prefer this occurrence to the currently recorded one. We want the
2085 last one in the block and the block is scanned from start to end. */
2086 cur_occr->insn = insn;
7506f491
DE
2087 else
2088 {
2089 /* First occurrence of this expression in this basic block. */
703ad42b 2090 cur_occr = gcse_alloc (sizeof (struct occr));
7506f491 2091 bytes_used += sizeof (struct occr);
c4c81601 2092
7506f491
DE
2093 /* First occurrence of this expression in any block? */
2094 if (cur_expr->avail_occr == NULL)
2095 cur_expr->avail_occr = cur_occr;
2096 else
2097 last_occr->next = cur_occr;
c4c81601 2098
7506f491
DE
2099 cur_occr->insn = insn;
2100 cur_occr->next = NULL;
2101 }
2102}
2103
6b2d1c9e
RS
2104/* Determine whether the rtx X should be treated as a constant for
2105 the purposes of GCSE's constant propagation. */
2106
2107static bool
1d088dee 2108gcse_constant_p (rtx x)
6b2d1c9e
RS
2109{
2110 /* Consider a COMPARE of two integers constant. */
2111 if (GET_CODE (x) == COMPARE
2112 && GET_CODE (XEXP (x, 0)) == CONST_INT
2113 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2114 return true;
2115
db2f435b
AP
2116
2117 /* Consider a COMPARE of the same registers is a constant
938d968e 2118 if they are not floating point registers. */
db2f435b
AP
2119 if (GET_CODE(x) == COMPARE
2120 && GET_CODE (XEXP (x, 0)) == REG
2121 && GET_CODE (XEXP (x, 1)) == REG
2122 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2123 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2124 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2125 return true;
2126
6b2d1c9e
RS
2127 if (GET_CODE (x) == CONSTANT_P_RTX)
2128 return false;
2129
2130 return CONSTANT_P (x);
2131}
2132
02280659
ZD
2133/* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2134 expression one). */
7506f491
DE
2135
2136static void
1d088dee 2137hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
7506f491
DE
2138{
2139 rtx src = SET_SRC (pat);
2140 rtx dest = SET_DEST (pat);
172890a2 2141 rtx note;
7506f491
DE
2142
2143 if (GET_CODE (src) == CALL)
02280659 2144 hash_scan_call (src, insn, table);
7506f491 2145
172890a2 2146 else if (GET_CODE (dest) == REG)
7506f491 2147 {
172890a2 2148 unsigned int regno = REGNO (dest);
7506f491
DE
2149 rtx tmp;
2150
172890a2
RK
2151 /* If this is a single set and we are doing constant propagation,
2152 see if a REG_NOTE shows this equivalent to a constant. */
02280659 2153 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
6b2d1c9e 2154 && gcse_constant_p (XEXP (note, 0)))
172890a2
RK
2155 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2156
7506f491 2157 /* Only record sets of pseudo-regs in the hash table. */
02280659 2158 if (! table->set_p
7506f491
DE
2159 && regno >= FIRST_PSEUDO_REGISTER
2160 /* Don't GCSE something if we can't do a reg/reg copy. */
773eae39 2161 && can_copy_p (GET_MODE (dest))
068473ec
JH
2162 /* GCSE commonly inserts instruction after the insn. We can't
2163 do that easily for EH_REGION notes so disable GCSE on these
2164 for now. */
2165 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7506f491 2166 /* Is SET_SRC something we want to gcse? */
172890a2
RK
2167 && want_to_gcse_p (src)
2168 /* Don't CSE a nop. */
43e72072
JJ
2169 && ! set_noop_p (pat)
2170 /* Don't GCSE if it has attached REG_EQUIV note.
2171 At this point this only function parameters should have
2172 REG_EQUIV notes and if the argument slot is used somewhere
a1f300c0 2173 explicitly, it means address of parameter has been taken,
43e72072
JJ
2174 so we should not extend the lifetime of the pseudo. */
2175 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2176 || GET_CODE (XEXP (note, 0)) != MEM))
7506f491
DE
2177 {
2178 /* An expression is not anticipatable if its operands are
52d76e11
RK
2179 modified before this insn or if this is not the only SET in
2180 this insn. */
2181 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
7506f491 2182 /* An expression is not available if its operands are
eb296bd9
GK
2183 subsequently modified, including this insn. It's also not
2184 available if this is a branch, because we can't insert
2185 a set after the branch. */
2186 int avail_p = (oprs_available_p (src, insn)
2187 && ! JUMP_P (insn));
c4c81601 2188
02280659 2189 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
7506f491 2190 }
c4c81601 2191
7506f491 2192 /* Record sets for constant/copy propagation. */
02280659 2193 else if (table->set_p
7506f491
DE
2194 && regno >= FIRST_PSEUDO_REGISTER
2195 && ((GET_CODE (src) == REG
2196 && REGNO (src) >= FIRST_PSEUDO_REGISTER
773eae39 2197 && can_copy_p (GET_MODE (dest))
172890a2 2198 && REGNO (src) != regno)
6b2d1c9e 2199 || gcse_constant_p (src))
7506f491
DE
2200 /* A copy is not available if its src or dest is subsequently
2201 modified. Here we want to search from INSN+1 on, but
2202 oprs_available_p searches from INSN on. */
2203 && (insn == BLOCK_END (BLOCK_NUM (insn))
2204 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2205 && oprs_available_p (pat, tmp))))
02280659 2206 insert_set_in_table (pat, insn, table);
7506f491 2207 }
7506f491
DE
2208}
2209
2210static void
1d088dee
AJ
2211hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2212 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2213{
2214 /* Currently nothing to do. */
2215}
2216
2217static void
1d088dee
AJ
2218hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2219 struct hash_table *table ATTRIBUTE_UNUSED)
7506f491
DE
2220{
2221 /* Currently nothing to do. */
2222}
2223
2224/* Process INSN and add hash table entries as appropriate.
2225
2226 Only available expressions that set a single pseudo-reg are recorded.
2227
2228 Single sets in a PARALLEL could be handled, but it's an extra complication
2229 that isn't dealt with right now. The trick is handling the CLOBBERs that
2230 are also in the PARALLEL. Later.
2231
cc2902df 2232 If SET_P is nonzero, this is for the assignment hash table,
ed79bb3d
R
2233 otherwise it is for the expression hash table.
2234 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2235 not record any expressions. */
7506f491
DE
2236
2237static void
1d088dee 2238hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
7506f491
DE
2239{
2240 rtx pat = PATTERN (insn);
c4c81601 2241 int i;
7506f491 2242
172890a2
RK
2243 if (in_libcall_block)
2244 return;
2245
7506f491
DE
2246 /* Pick out the sets of INSN and for other forms of instructions record
2247 what's been modified. */
2248
172890a2 2249 if (GET_CODE (pat) == SET)
02280659 2250 hash_scan_set (pat, insn, table);
7506f491 2251 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2252 for (i = 0; i < XVECLEN (pat, 0); i++)
2253 {
2254 rtx x = XVECEXP (pat, 0, i);
7506f491 2255
c4c81601 2256 if (GET_CODE (x) == SET)
02280659 2257 hash_scan_set (x, insn, table);
c4c81601 2258 else if (GET_CODE (x) == CLOBBER)
02280659 2259 hash_scan_clobber (x, insn, table);
c4c81601 2260 else if (GET_CODE (x) == CALL)
02280659 2261 hash_scan_call (x, insn, table);
c4c81601 2262 }
7506f491 2263
7506f491 2264 else if (GET_CODE (pat) == CLOBBER)
02280659 2265 hash_scan_clobber (pat, insn, table);
7506f491 2266 else if (GET_CODE (pat) == CALL)
02280659 2267 hash_scan_call (pat, insn, table);
7506f491
DE
2268}
2269
2270static void
1d088dee 2271dump_hash_table (FILE *file, const char *name, struct hash_table *table)
7506f491
DE
2272{
2273 int i;
2274 /* Flattened out table, so it's printed in proper order. */
4da896b2
MM
2275 struct expr **flat_table;
2276 unsigned int *hash_val;
c4c81601 2277 struct expr *expr;
4da896b2 2278
703ad42b
KG
2279 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2280 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
7506f491 2281
02280659
ZD
2282 for (i = 0; i < (int) table->size; i++)
2283 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
2284 {
2285 flat_table[expr->bitmap_index] = expr;
2286 hash_val[expr->bitmap_index] = i;
2287 }
7506f491
DE
2288
2289 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
02280659 2290 name, table->size, table->n_elems);
7506f491 2291
02280659 2292 for (i = 0; i < (int) table->n_elems; i++)
21318741
RK
2293 if (flat_table[i] != 0)
2294 {
a0ac9e5a 2295 expr = flat_table[i];
21318741
RK
2296 fprintf (file, "Index %d (hash value %d)\n ",
2297 expr->bitmap_index, hash_val[i]);
a0ac9e5a 2298 print_rtl (file, expr->expr);
21318741
RK
2299 fprintf (file, "\n");
2300 }
7506f491
DE
2301
2302 fprintf (file, "\n");
4da896b2 2303
4da896b2
MM
2304 free (flat_table);
2305 free (hash_val);
7506f491
DE
2306}
2307
2308/* Record register first/last/block set information for REGNO in INSN.
c4c81601 2309
80c29cc4 2310 first_set records the first place in the block where the register
7506f491 2311 is set and is used to compute "anticipatability".
c4c81601 2312
80c29cc4 2313 last_set records the last place in the block where the register
7506f491 2314 is set and is used to compute "availability".
c4c81601 2315
80c29cc4
RZ
2316 last_bb records the block for which first_set and last_set are
2317 valid, as a quick test to invalidate them.
2318
7506f491
DE
2319 reg_set_in_block records whether the register is set in the block
2320 and is used to compute "transparency". */
2321
2322static void
1d088dee 2323record_last_reg_set_info (rtx insn, int regno)
7506f491 2324{
80c29cc4
RZ
2325 struct reg_avail_info *info = &reg_avail_info[regno];
2326 int cuid = INSN_CUID (insn);
c4c81601 2327
80c29cc4
RZ
2328 info->last_set = cuid;
2329 if (info->last_bb != current_bb)
2330 {
2331 info->last_bb = current_bb;
2332 info->first_set = cuid;
e0082a72 2333 SET_BIT (reg_set_in_block[current_bb->index], regno);
80c29cc4 2334 }
7506f491
DE
2335}
2336
a13d4ebf
AM
2337
2338/* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2339 Note we store a pair of elements in the list, so they have to be
2340 taken off pairwise. */
2341
589005ff 2342static void
1d088dee
AJ
2343canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2344 void * v_insn)
a13d4ebf
AM
2345{
2346 rtx dest_addr, insn;
0fe854a7 2347 int bb;
a13d4ebf
AM
2348
2349 while (GET_CODE (dest) == SUBREG
2350 || GET_CODE (dest) == ZERO_EXTRACT
2351 || GET_CODE (dest) == SIGN_EXTRACT
2352 || GET_CODE (dest) == STRICT_LOW_PART)
2353 dest = XEXP (dest, 0);
2354
2355 /* If DEST is not a MEM, then it will not conflict with a load. Note
2356 that function calls are assumed to clobber memory, but are handled
2357 elsewhere. */
2358
2359 if (GET_CODE (dest) != MEM)
2360 return;
2361
2362 dest_addr = get_addr (XEXP (dest, 0));
2363 dest_addr = canon_rtx (dest_addr);
589005ff 2364 insn = (rtx) v_insn;
0fe854a7 2365 bb = BLOCK_NUM (insn);
a13d4ebf 2366
589005ff 2367 canon_modify_mem_list[bb] =
0fe854a7 2368 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
589005ff 2369 canon_modify_mem_list[bb] =
0fe854a7
RH
2370 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2371 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2372}
2373
a13d4ebf
AM
2374/* Record memory modification information for INSN. We do not actually care
2375 about the memory location(s) that are set, or even how they are set (consider
2376 a CALL_INSN). We merely need to record which insns modify memory. */
7506f491
DE
2377
2378static void
1d088dee 2379record_last_mem_set_info (rtx insn)
7506f491 2380{
0fe854a7
RH
2381 int bb = BLOCK_NUM (insn);
2382
ccef9ef5 2383 /* load_killed_in_block_p will handle the case of calls clobbering
dc297297 2384 everything. */
0fe854a7
RH
2385 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2386 bitmap_set_bit (modify_mem_list_set, bb);
a13d4ebf
AM
2387
2388 if (GET_CODE (insn) == CALL_INSN)
2389 {
2390 /* Note that traversals of this loop (other than for free-ing)
2391 will break after encountering a CALL_INSN. So, there's no
dc297297 2392 need to insert a pair of items, as canon_list_insert does. */
589005ff
KH
2393 canon_modify_mem_list[bb] =
2394 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
0fe854a7 2395 bitmap_set_bit (canon_modify_mem_list_set, bb);
a13d4ebf
AM
2396 }
2397 else
0fe854a7 2398 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
7506f491
DE
2399}
2400
7506f491 2401/* Called from compute_hash_table via note_stores to handle one
84832317
MM
2402 SET or CLOBBER in an insn. DATA is really the instruction in which
2403 the SET is taking place. */
7506f491
DE
2404
2405static void
1d088dee 2406record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
7506f491 2407{
84832317
MM
2408 rtx last_set_insn = (rtx) data;
2409
7506f491
DE
2410 if (GET_CODE (dest) == SUBREG)
2411 dest = SUBREG_REG (dest);
2412
2413 if (GET_CODE (dest) == REG)
2414 record_last_reg_set_info (last_set_insn, REGNO (dest));
2415 else if (GET_CODE (dest) == MEM
2416 /* Ignore pushes, they clobber nothing. */
2417 && ! push_operand (dest, GET_MODE (dest)))
2418 record_last_mem_set_info (last_set_insn);
2419}
2420
2421/* Top level function to create an expression or assignment hash table.
2422
2423 Expression entries are placed in the hash table if
2424 - they are of the form (set (pseudo-reg) src),
2425 - src is something we want to perform GCSE on,
2426 - none of the operands are subsequently modified in the block
2427
2428 Assignment entries are placed in the hash table if
2429 - they are of the form (set (pseudo-reg) src),
2430 - src is something we want to perform const/copy propagation on,
2431 - none of the operands or target are subsequently modified in the block
c4c81601 2432
7506f491
DE
2433 Currently src must be a pseudo-reg or a const_int.
2434
02280659 2435 TABLE is the table computed. */
7506f491
DE
2436
2437static void
1d088dee 2438compute_hash_table_work (struct hash_table *table)
7506f491 2439{
80c29cc4 2440 unsigned int i;
7506f491
DE
2441
2442 /* While we compute the hash table we also compute a bit array of which
2443 registers are set in which blocks.
7506f491
DE
2444 ??? This isn't needed during const/copy propagation, but it's cheap to
2445 compute. Later. */
d55bc081 2446 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7506f491 2447
a13d4ebf 2448 /* re-Cache any INSN_LIST nodes we have allocated. */
73991d6a 2449 clear_modify_mem_tables ();
7506f491 2450 /* Some working arrays used to track first and last set in each block. */
703ad42b 2451 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
80c29cc4
RZ
2452
2453 for (i = 0; i < max_gcse_regno; ++i)
e0082a72 2454 reg_avail_info[i].last_bb = NULL;
7506f491 2455
e0082a72 2456 FOR_EACH_BB (current_bb)
7506f491
DE
2457 {
2458 rtx insn;
770ae6cc 2459 unsigned int regno;
ed79bb3d 2460 int in_libcall_block;
7506f491
DE
2461
2462 /* First pass over the instructions records information used to
2463 determine when registers and memory are first and last set.
ccef9ef5 2464 ??? hard-reg reg_set_in_block computation
7506f491
DE
2465 could be moved to compute_sets since they currently don't change. */
2466
e0082a72
ZD
2467 for (insn = current_bb->head;
2468 insn && insn != NEXT_INSN (current_bb->end);
7506f491
DE
2469 insn = NEXT_INSN (insn))
2470 {
2c3c49de 2471 if (! INSN_P (insn))
7506f491
DE
2472 continue;
2473
2474 if (GET_CODE (insn) == CALL_INSN)
2475 {
19652adf 2476 bool clobbers_all = false;
589005ff 2477#ifdef NON_SAVING_SETJMP
19652adf
ZW
2478 if (NON_SAVING_SETJMP
2479 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2480 clobbers_all = true;
2481#endif
2482
7506f491 2483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
2484 if (clobbers_all
2485 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7506f491 2486 record_last_reg_set_info (insn, regno);
c4c81601 2487
24a28584 2488 mark_call (insn);
7506f491
DE
2489 }
2490
84832317 2491 note_stores (PATTERN (insn), record_last_set_info, insn);
7506f491
DE
2492 }
2493
fbef91d8
RS
2494 /* Insert implicit sets in the hash table. */
2495 if (table->set_p
2496 && implicit_sets[current_bb->index] != NULL_RTX)
2497 hash_scan_set (implicit_sets[current_bb->index],
2498 current_bb->head, table);
2499
7506f491
DE
2500 /* The next pass builds the hash table. */
2501
e0082a72
ZD
2502 for (insn = current_bb->head, in_libcall_block = 0;
2503 insn && insn != NEXT_INSN (current_bb->end);
7506f491 2504 insn = NEXT_INSN (insn))
2c3c49de 2505 if (INSN_P (insn))
c4c81601
RK
2506 {
2507 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
589005ff 2508 in_libcall_block = 1;
02280659 2509 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2510 in_libcall_block = 0;
02280659
ZD
2511 hash_scan_insn (insn, table, in_libcall_block);
2512 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
589005ff 2513 in_libcall_block = 0;
8e42ace1 2514 }
7506f491
DE
2515 }
2516
80c29cc4
RZ
2517 free (reg_avail_info);
2518 reg_avail_info = NULL;
7506f491
DE
2519}
2520
02280659 2521/* Allocate space for the set/expr hash TABLE.
7506f491 2522 N_INSNS is the number of instructions in the function.
02280659
ZD
2523 It is used to determine the number of buckets to use.
2524 SET_P determines whether set or expression table will
2525 be created. */
7506f491
DE
2526
2527static void
1d088dee 2528alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
7506f491
DE
2529{
2530 int n;
2531
02280659
ZD
2532 table->size = n_insns / 4;
2533 if (table->size < 11)
2534 table->size = 11;
c4c81601 2535
7506f491
DE
2536 /* Attempt to maintain efficient use of hash table.
2537 Making it an odd number is simplest for now.
2538 ??? Later take some measurements. */
02280659
ZD
2539 table->size |= 1;
2540 n = table->size * sizeof (struct expr *);
703ad42b 2541 table->table = gmalloc (n);
02280659 2542 table->set_p = set_p;
7506f491
DE
2543}
2544
02280659 2545/* Free things allocated by alloc_hash_table. */
7506f491
DE
2546
2547static void
1d088dee 2548free_hash_table (struct hash_table *table)
7506f491 2549{
02280659 2550 free (table->table);
7506f491
DE
2551}
2552
02280659
ZD
2553/* Compute the hash TABLE for doing copy/const propagation or
2554 expression hash table. */
7506f491
DE
2555
2556static void
1d088dee 2557compute_hash_table (struct hash_table *table)
7506f491
DE
2558{
2559 /* Initialize count of number of entries in hash table. */
02280659 2560 table->n_elems = 0;
703ad42b 2561 memset (table->table, 0, table->size * sizeof (struct expr *));
7506f491 2562
02280659 2563 compute_hash_table_work (table);
7506f491
DE
2564}
2565\f
2566/* Expression tracking support. */
2567
02280659 2568/* Lookup pattern PAT in the expression TABLE.
7506f491
DE
2569 The result is a pointer to the table entry, or NULL if not found. */
2570
2571static struct expr *
1d088dee 2572lookup_expr (rtx pat, struct hash_table *table)
7506f491
DE
2573{
2574 int do_not_record_p;
2575 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
02280659 2576 table->size);
7506f491
DE
2577 struct expr *expr;
2578
2579 if (do_not_record_p)
2580 return NULL;
2581
02280659 2582 expr = table->table[hash];
7506f491
DE
2583
2584 while (expr && ! expr_equiv_p (expr->expr, pat))
2585 expr = expr->next_same_hash;
2586
2587 return expr;
2588}
2589
ceda50e9
RH
2590/* Lookup REGNO in the set TABLE. The result is a pointer to the
2591 table entry, or NULL if not found. */
7506f491
DE
2592
2593static struct expr *
1d088dee 2594lookup_set (unsigned int regno, struct hash_table *table)
7506f491 2595{
02280659 2596 unsigned int hash = hash_set (regno, table->size);
7506f491
DE
2597 struct expr *expr;
2598
02280659 2599 expr = table->table[hash];
7506f491 2600
ceda50e9
RH
2601 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2602 expr = expr->next_same_hash;
7506f491
DE
2603
2604 return expr;
2605}
2606
2607/* Return the next entry for REGNO in list EXPR. */
2608
2609static struct expr *
1d088dee 2610next_set (unsigned int regno, struct expr *expr)
7506f491
DE
2611{
2612 do
2613 expr = expr->next_same_hash;
2614 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
c4c81601 2615
7506f491
DE
2616 return expr;
2617}
2618
0fe854a7
RH
2619/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2620 types may be mixed. */
2621
2622static void
1d088dee 2623free_insn_expr_list_list (rtx *listp)
0fe854a7
RH
2624{
2625 rtx list, next;
2626
2627 for (list = *listp; list ; list = next)
2628 {
2629 next = XEXP (list, 1);
2630 if (GET_CODE (list) == EXPR_LIST)
2631 free_EXPR_LIST_node (list);
2632 else
2633 free_INSN_LIST_node (list);
2634 }
2635
2636 *listp = NULL;
2637}
2638
73991d6a
JH
2639/* Clear canon_modify_mem_list and modify_mem_list tables. */
2640static void
1d088dee 2641clear_modify_mem_tables (void)
73991d6a
JH
2642{
2643 int i;
2644
2645 EXECUTE_IF_SET_IN_BITMAP
0fe854a7
RH
2646 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2647 bitmap_clear (modify_mem_list_set);
73991d6a
JH
2648
2649 EXECUTE_IF_SET_IN_BITMAP
2650 (canon_modify_mem_list_set, 0, i,
0fe854a7
RH
2651 free_insn_expr_list_list (canon_modify_mem_list + i));
2652 bitmap_clear (canon_modify_mem_list_set);
73991d6a
JH
2653}
2654
2655/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2656
2657static void
1d088dee 2658free_modify_mem_tables (void)
73991d6a
JH
2659{
2660 clear_modify_mem_tables ();
2661 free (modify_mem_list);
2662 free (canon_modify_mem_list);
2663 modify_mem_list = 0;
2664 canon_modify_mem_list = 0;
2665}
2666
7506f491
DE
2667/* Reset tables used to keep track of what's still available [since the
2668 start of the block]. */
2669
2670static void
1d088dee 2671reset_opr_set_tables (void)
7506f491
DE
2672{
2673 /* Maintain a bitmap of which regs have been set since beginning of
2674 the block. */
73991d6a 2675 CLEAR_REG_SET (reg_set_bitmap);
c4c81601 2676
7506f491
DE
2677 /* Also keep a record of the last instruction to modify memory.
2678 For now this is very trivial, we only record whether any memory
2679 location has been modified. */
73991d6a 2680 clear_modify_mem_tables ();
7506f491
DE
2681}
2682
cc2902df 2683/* Return nonzero if the operands of X are not set before INSN in
7506f491
DE
2684 INSN's basic block. */
2685
2686static int
1d088dee 2687oprs_not_set_p (rtx x, rtx insn)
7506f491 2688{
c4c81601 2689 int i, j;
7506f491 2690 enum rtx_code code;
6f7d635c 2691 const char *fmt;
7506f491 2692
7506f491
DE
2693 if (x == 0)
2694 return 1;
2695
2696 code = GET_CODE (x);
2697 switch (code)
2698 {
2699 case PC:
2700 case CC0:
2701 case CONST:
2702 case CONST_INT:
2703 case CONST_DOUBLE:
69ef87e2 2704 case CONST_VECTOR:
7506f491
DE
2705 case SYMBOL_REF:
2706 case LABEL_REF:
2707 case ADDR_VEC:
2708 case ADDR_DIFF_VEC:
2709 return 1;
2710
2711 case MEM:
589005ff 2712 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
e2d2ed72 2713 INSN_CUID (insn), x, 0))
a13d4ebf 2714 return 0;
c4c81601
RK
2715 else
2716 return oprs_not_set_p (XEXP (x, 0), insn);
7506f491
DE
2717
2718 case REG:
73991d6a 2719 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
7506f491
DE
2720
2721 default:
2722 break;
2723 }
2724
c4c81601 2725 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
2726 {
2727 if (fmt[i] == 'e')
2728 {
7506f491
DE
2729 /* If we are about to do the last recursive call
2730 needed at this level, change it into iteration.
2731 This function is called enough to be worth it. */
2732 if (i == 0)
c4c81601
RK
2733 return oprs_not_set_p (XEXP (x, i), insn);
2734
2735 if (! oprs_not_set_p (XEXP (x, i), insn))
7506f491
DE
2736 return 0;
2737 }
2738 else if (fmt[i] == 'E')
c4c81601
RK
2739 for (j = 0; j < XVECLEN (x, i); j++)
2740 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2741 return 0;
7506f491
DE
2742 }
2743
2744 return 1;
2745}
2746
2747/* Mark things set by a CALL. */
2748
2749static void
1d088dee 2750mark_call (rtx insn)
7506f491 2751{
24a28584 2752 if (! CONST_OR_PURE_CALL_P (insn))
a13d4ebf 2753 record_last_mem_set_info (insn);
7506f491
DE
2754}
2755
2756/* Mark things set by a SET. */
2757
2758static void
1d088dee 2759mark_set (rtx pat, rtx insn)
7506f491
DE
2760{
2761 rtx dest = SET_DEST (pat);
2762
2763 while (GET_CODE (dest) == SUBREG
2764 || GET_CODE (dest) == ZERO_EXTRACT
2765 || GET_CODE (dest) == SIGN_EXTRACT
2766 || GET_CODE (dest) == STRICT_LOW_PART)
2767 dest = XEXP (dest, 0);
2768
a13d4ebf 2769 if (GET_CODE (dest) == REG)
73991d6a 2770 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
a13d4ebf
AM
2771 else if (GET_CODE (dest) == MEM)
2772 record_last_mem_set_info (insn);
2773
7506f491 2774 if (GET_CODE (SET_SRC (pat)) == CALL)
b5ce41ff 2775 mark_call (insn);
7506f491
DE
2776}
2777
2778/* Record things set by a CLOBBER. */
2779
2780static void
1d088dee 2781mark_clobber (rtx pat, rtx insn)
7506f491
DE
2782{
2783 rtx clob = XEXP (pat, 0);
2784
2785 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2786 clob = XEXP (clob, 0);
2787
a13d4ebf 2788 if (GET_CODE (clob) == REG)
73991d6a 2789 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
a13d4ebf
AM
2790 else
2791 record_last_mem_set_info (insn);
7506f491
DE
2792}
2793
2794/* Record things set by INSN.
2795 This data is used by oprs_not_set_p. */
2796
2797static void
1d088dee 2798mark_oprs_set (rtx insn)
7506f491
DE
2799{
2800 rtx pat = PATTERN (insn);
c4c81601 2801 int i;
7506f491
DE
2802
2803 if (GET_CODE (pat) == SET)
2804 mark_set (pat, insn);
2805 else if (GET_CODE (pat) == PARALLEL)
c4c81601
RK
2806 for (i = 0; i < XVECLEN (pat, 0); i++)
2807 {
2808 rtx x = XVECEXP (pat, 0, i);
2809
2810 if (GET_CODE (x) == SET)
2811 mark_set (x, insn);
2812 else if (GET_CODE (x) == CLOBBER)
2813 mark_clobber (x, insn);
2814 else if (GET_CODE (x) == CALL)
2815 mark_call (insn);
2816 }
7506f491 2817
7506f491
DE
2818 else if (GET_CODE (pat) == CLOBBER)
2819 mark_clobber (pat, insn);
2820 else if (GET_CODE (pat) == CALL)
b5ce41ff 2821 mark_call (insn);
7506f491 2822}
b5ce41ff 2823
7506f491
DE
2824\f
2825/* Classic GCSE reaching definition support. */
2826
2827/* Allocate reaching def variables. */
2828
2829static void
1d088dee 2830alloc_rd_mem (int n_blocks, int n_insns)
7506f491 2831{
703ad42b 2832 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2833 sbitmap_vector_zero (rd_kill, n_blocks);
7506f491 2834
703ad42b 2835 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2836 sbitmap_vector_zero (rd_gen, n_blocks);
7506f491 2837
703ad42b 2838 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2839 sbitmap_vector_zero (reaching_defs, n_blocks);
7506f491 2840
703ad42b 2841 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
d55bc081 2842 sbitmap_vector_zero (rd_out, n_blocks);
7506f491
DE
2843}
2844
2845/* Free reaching def variables. */
2846
2847static void
1d088dee 2848free_rd_mem (void)
7506f491 2849{
5a660bff
DB
2850 sbitmap_vector_free (rd_kill);
2851 sbitmap_vector_free (rd_gen);
2852 sbitmap_vector_free (reaching_defs);
2853 sbitmap_vector_free (rd_out);
7506f491
DE
2854}
2855
c4c81601 2856/* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
7506f491
DE
2857
2858static void
1d088dee 2859handle_rd_kill_set (rtx insn, int regno, basic_block bb)
7506f491 2860{
c4c81601 2861 struct reg_set *this_reg;
7506f491 2862
c4c81601
RK
2863 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2864 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
0b17ab2f 2865 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
7506f491
DE
2866}
2867
7506f491
DE
2868/* Compute the set of kill's for reaching definitions. */
2869
2870static void
1d088dee 2871compute_kill_rd (void)
7506f491 2872{
e0082a72 2873 int cuid;
172890a2
RK
2874 unsigned int regno;
2875 int i;
e0082a72 2876 basic_block bb;
7506f491
DE
2877
2878 /* For each block
2879 For each set bit in `gen' of the block (i.e each insn which
ac7c5af5
JL
2880 generates a definition in the block)
2881 Call the reg set by the insn corresponding to that bit regx
2882 Look at the linked list starting at reg_set_table[regx]
2883 For each setting of regx in the linked list, which is not in
2884 this block
6d2f8887 2885 Set the bit in `kill' corresponding to that insn. */
e0082a72 2886 FOR_EACH_BB (bb)
c4c81601 2887 for (cuid = 0; cuid < max_cuid; cuid++)
e0082a72 2888 if (TEST_BIT (rd_gen[bb->index], cuid))
7506f491 2889 {
c4c81601
RK
2890 rtx insn = CUID_INSN (cuid);
2891 rtx pat = PATTERN (insn);
7506f491 2892
c4c81601
RK
2893 if (GET_CODE (insn) == CALL_INSN)
2894 {
2895 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4e2db584 2896 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
e0082a72 2897 handle_rd_kill_set (insn, regno, bb);
c4c81601 2898 }
7506f491 2899
c4c81601
RK
2900 if (GET_CODE (pat) == PARALLEL)
2901 {
2902 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7506f491 2903 {
c4c81601 2904 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
7506f491 2905
c4c81601
RK
2906 if ((code == SET || code == CLOBBER)
2907 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2908 handle_rd_kill_set (insn,
2909 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
e0082a72 2910 bb);
ac7c5af5 2911 }
ac7c5af5 2912 }
c4c81601
RK
2913 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2914 /* Each setting of this register outside of this block
2915 must be marked in the set of kills in this block. */
e0082a72 2916 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
7506f491 2917 }
7506f491
DE
2918}
2919
589005ff 2920/* Compute the reaching definitions as in
7506f491
DE
2921 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2922 Chapter 10. It is the same algorithm as used for computing available
2923 expressions but applied to the gens and kills of reaching definitions. */
2924
2925static void
1d088dee 2926compute_rd (void)
7506f491 2927{
e0082a72
ZD
2928 int changed, passes;
2929 basic_block bb;
7506f491 2930
e0082a72
ZD
2931 FOR_EACH_BB (bb)
2932 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
7506f491
DE
2933
2934 passes = 0;
2935 changed = 1;
2936 while (changed)
2937 {
2938 changed = 0;
e0082a72 2939 FOR_EACH_BB (bb)
ac7c5af5 2940 {
e0082a72
ZD
2941 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2942 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2943 reaching_defs[bb->index], rd_kill[bb->index]);
ac7c5af5 2944 }
7506f491
DE
2945 passes++;
2946 }
2947
2948 if (gcse_file)
2949 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2950}
2951\f
2952/* Classic GCSE available expression support. */
2953
2954/* Allocate memory for available expression computation. */
2955
2956static void
1d088dee 2957alloc_avail_expr_mem (int n_blocks, int n_exprs)
7506f491 2958{
703ad42b 2959 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2960 sbitmap_vector_zero (ae_kill, n_blocks);
7506f491 2961
703ad42b 2962 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2963 sbitmap_vector_zero (ae_gen, n_blocks);
7506f491 2964
703ad42b 2965 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2966 sbitmap_vector_zero (ae_in, n_blocks);
7506f491 2967
703ad42b 2968 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
d55bc081 2969 sbitmap_vector_zero (ae_out, n_blocks);
7506f491
DE
2970}
2971
2972static void
1d088dee 2973free_avail_expr_mem (void)
7506f491 2974{
5a660bff
DB
2975 sbitmap_vector_free (ae_kill);
2976 sbitmap_vector_free (ae_gen);
2977 sbitmap_vector_free (ae_in);
2978 sbitmap_vector_free (ae_out);
7506f491
DE
2979}
2980
2981/* Compute the set of available expressions generated in each basic block. */
2982
2983static void
1d088dee 2984compute_ae_gen (struct hash_table *expr_hash_table)
7506f491 2985{
2e653e39 2986 unsigned int i;
c4c81601
RK
2987 struct expr *expr;
2988 struct occr *occr;
7506f491
DE
2989
2990 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2991 This is all we have to do because an expression is not recorded if it
2992 is not available, and the only expressions we want to work with are the
2993 ones that are recorded. */
02280659
ZD
2994 for (i = 0; i < expr_hash_table->size; i++)
2995 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
c4c81601
RK
2996 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
2997 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
7506f491
DE
2998}
2999
cc2902df 3000/* Return nonzero if expression X is killed in BB. */
7506f491
DE
3001
3002static int
1d088dee 3003expr_killed_p (rtx x, basic_block bb)
7506f491 3004{
c4c81601 3005 int i, j;
7506f491 3006 enum rtx_code code;
6f7d635c 3007 const char *fmt;
7506f491 3008
7506f491
DE
3009 if (x == 0)
3010 return 1;
3011
3012 code = GET_CODE (x);
3013 switch (code)
3014 {
3015 case REG:
0b17ab2f 3016 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
7506f491
DE
3017
3018 case MEM:
a13d4ebf
AM
3019 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3020 return 1;
c4c81601
RK
3021 else
3022 return expr_killed_p (XEXP (x, 0), bb);
7506f491
DE
3023
3024 case PC:
3025 case CC0: /*FIXME*/
3026 case CONST:
3027 case CONST_INT:
3028 case CONST_DOUBLE:
69ef87e2 3029 case CONST_VECTOR:
7506f491
DE
3030 case SYMBOL_REF:
3031 case LABEL_REF:
3032 case ADDR_VEC:
3033 case ADDR_DIFF_VEC:
3034 return 0;
3035
3036 default:
3037 break;
3038 }
3039
c4c81601 3040 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3041 {
3042 if (fmt[i] == 'e')
3043 {
7506f491
DE
3044 /* If we are about to do the last recursive call
3045 needed at this level, change it into iteration.
3046 This function is called enough to be worth it. */
3047 if (i == 0)
c4c81601
RK
3048 return expr_killed_p (XEXP (x, i), bb);
3049 else if (expr_killed_p (XEXP (x, i), bb))
7506f491
DE
3050 return 1;
3051 }
3052 else if (fmt[i] == 'E')
c4c81601
RK
3053 for (j = 0; j < XVECLEN (x, i); j++)
3054 if (expr_killed_p (XVECEXP (x, i, j), bb))
3055 return 1;
7506f491
DE
3056 }
3057
3058 return 0;
3059}
3060
3061/* Compute the set of available expressions killed in each basic block. */
3062
3063static void
1d088dee
AJ
3064compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3065 struct hash_table *expr_hash_table)
7506f491 3066{
e0082a72 3067 basic_block bb;
2e653e39 3068 unsigned int i;
c4c81601 3069 struct expr *expr;
7506f491 3070
e0082a72 3071 FOR_EACH_BB (bb)
02280659
ZD
3072 for (i = 0; i < expr_hash_table->size; i++)
3073 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
7506f491 3074 {
c4c81601 3075 /* Skip EXPR if generated in this block. */
e0082a72 3076 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
c4c81601 3077 continue;
7506f491 3078
e0082a72
ZD
3079 if (expr_killed_p (expr->expr, bb))
3080 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
7506f491 3081 }
7506f491 3082}
7506f491
DE
3083\f
3084/* Actually perform the Classic GCSE optimizations. */
3085
cc2902df 3086/* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
7506f491 3087
cc2902df 3088 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
7506f491
DE
3089 as a positive reach. We want to do this when there are two computations
3090 of the expression in the block.
3091
3092 VISITED is a pointer to a working buffer for tracking which BB's have
3093 been visited. It is NULL for the top-level call.
3094
3095 We treat reaching expressions that go through blocks containing the same
3096 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3097 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3098 2 as not reaching. The intent is to improve the probability of finding
3099 only one reaching expression and to reduce register lifetimes by picking
3100 the closest such expression. */
3101
3102static int
1d088dee
AJ
3103expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3104 basic_block bb, int check_self_loop, char *visited)
7506f491 3105{
36349f8b 3106 edge pred;
7506f491 3107
e2d2ed72 3108 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 3109 {
e2d2ed72 3110 basic_block pred_bb = pred->src;
7506f491 3111
0b17ab2f 3112 if (visited[pred_bb->index])
c4c81601 3113 /* This predecessor has already been visited. Nothing to do. */
7506f491 3114 ;
7506f491 3115 else if (pred_bb == bb)
ac7c5af5 3116 {
7506f491
DE
3117 /* BB loops on itself. */
3118 if (check_self_loop
0b17ab2f
RH
3119 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3120 && BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3121 return 1;
c4c81601 3122
0b17ab2f 3123 visited[pred_bb->index] = 1;
ac7c5af5 3124 }
c4c81601 3125
7506f491 3126 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
3127 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3128 visited[pred_bb->index] = 1;
c4c81601 3129
7506f491 3130 /* Does this predecessor generate this expression? */
0b17ab2f 3131 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
7506f491
DE
3132 {
3133 /* Is this the occurrence we're looking for?
3134 Note that there's only one generating occurrence per block
3135 so we just need to check the block number. */
0b17ab2f 3136 if (BLOCK_NUM (occr->insn) == pred_bb->index)
7506f491 3137 return 1;
c4c81601 3138
0b17ab2f 3139 visited[pred_bb->index] = 1;
7506f491 3140 }
c4c81601 3141
7506f491
DE
3142 /* Neither gen nor kill. */
3143 else
ac7c5af5 3144 {
0b17ab2f 3145 visited[pred_bb->index] = 1;
589005ff 3146 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
283a2545 3147 visited))
c4c81601 3148
7506f491 3149 return 1;
ac7c5af5 3150 }
7506f491
DE
3151 }
3152
3153 /* All paths have been checked. */
3154 return 0;
3155}
3156
283a2545 3157/* This wrapper for expr_reaches_here_p_work() is to ensure that any
dc297297 3158 memory allocated for that function is returned. */
283a2545
RL
3159
3160static int
1d088dee
AJ
3161expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3162 int check_self_loop)
283a2545
RL
3163{
3164 int rval;
703ad42b 3165 char *visited = xcalloc (last_basic_block, 1);
283a2545 3166
c4c81601 3167 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
589005ff 3168
283a2545 3169 free (visited);
c4c81601 3170 return rval;
283a2545
RL
3171}
3172
7506f491
DE
3173/* Return the instruction that computes EXPR that reaches INSN's basic block.
3174 If there is more than one such instruction, return NULL.
3175
3176 Called only by handle_avail_expr. */
3177
3178static rtx
1d088dee 3179computing_insn (struct expr *expr, rtx insn)
7506f491 3180{
e2d2ed72 3181 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491
DE
3182
3183 if (expr->avail_occr->next == NULL)
589005ff 3184 {
e2d2ed72 3185 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
c4c81601
RK
3186 /* The available expression is actually itself
3187 (i.e. a loop in the flow graph) so do nothing. */
3188 return NULL;
3189
7506f491
DE
3190 /* (FIXME) Case that we found a pattern that was created by
3191 a substitution that took place. */
3192 return expr->avail_occr->insn;
3193 }
3194 else
3195 {
3196 /* Pattern is computed more than once.
589005ff 3197 Search backwards from this insn to see how many of these
7506f491
DE
3198 computations actually reach this insn. */
3199 struct occr *occr;
3200 rtx insn_computes_expr = NULL;
3201 int can_reach = 0;
3202
3203 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3204 {
e2d2ed72 3205 if (BLOCK_FOR_INSN (occr->insn) == bb)
7506f491
DE
3206 {
3207 /* The expression is generated in this block.
3208 The only time we care about this is when the expression
3209 is generated later in the block [and thus there's a loop].
3210 We let the normal cse pass handle the other cases. */
c4c81601
RK
3211 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3212 && expr_reaches_here_p (occr, expr, bb, 1))
7506f491
DE
3213 {
3214 can_reach++;
3215 if (can_reach > 1)
3216 return NULL;
c4c81601 3217
7506f491
DE
3218 insn_computes_expr = occr->insn;
3219 }
3220 }
c4c81601
RK
3221 else if (expr_reaches_here_p (occr, expr, bb, 0))
3222 {
3223 can_reach++;
3224 if (can_reach > 1)
3225 return NULL;
3226
3227 insn_computes_expr = occr->insn;
3228 }
7506f491
DE
3229 }
3230
3231 if (insn_computes_expr == NULL)
3232 abort ();
c4c81601 3233
7506f491
DE
3234 return insn_computes_expr;
3235 }
3236}
3237
cc2902df 3238/* Return nonzero if the definition in DEF_INSN can reach INSN.
7506f491
DE
3239 Only called by can_disregard_other_sets. */
3240
3241static int
1d088dee 3242def_reaches_here_p (rtx insn, rtx def_insn)
7506f491
DE
3243{
3244 rtx reg;
3245
3246 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3247 return 1;
3248
3249 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3250 {
3251 if (INSN_CUID (def_insn) < INSN_CUID (insn))
ac7c5af5 3252 {
7506f491
DE
3253 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3254 return 1;
c4c81601 3255 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
7506f491
DE
3256 reg = XEXP (PATTERN (def_insn), 0);
3257 else if (GET_CODE (PATTERN (def_insn)) == SET)
3258 reg = SET_DEST (PATTERN (def_insn));
3259 else
3260 abort ();
c4c81601 3261
7506f491
DE
3262 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3263 }
3264 else
3265 return 0;
3266 }
3267
3268 return 0;
3269}
3270
cc2902df 3271/* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
c4c81601
RK
3272 value returned is the number of definitions that reach INSN. Returning a
3273 value of zero means that [maybe] more than one definition reaches INSN and
3274 the caller can't perform whatever optimization it is trying. i.e. it is
3275 always safe to return zero. */
7506f491
DE
3276
3277static int
1d088dee 3278can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
7506f491
DE
3279{
3280 int number_of_reaching_defs = 0;
c4c81601 3281 struct reg_set *this_reg;
7506f491 3282
c4c81601
RK
3283 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3284 if (def_reaches_here_p (insn, this_reg->insn))
3285 {
3286 number_of_reaching_defs++;
3287 /* Ignore parallels for now. */
3288 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3289 return 0;
3290
3291 if (!for_combine
3292 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3293 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3294 SET_SRC (PATTERN (insn)))))
3295 /* A setting of the reg to a different value reaches INSN. */
3296 return 0;
3297
3298 if (number_of_reaching_defs > 1)
3299 {
3300 /* If in this setting the value the register is being set to is
3301 equal to the previous value the register was set to and this
3302 setting reaches the insn we are trying to do the substitution
3303 on then we are ok. */
3304 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
7506f491 3305 return 0;
c4c81601
RK
3306 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3307 SET_SRC (PATTERN (insn))))
3308 return 0;
3309 }
7506f491 3310
589005ff 3311 *addr_this_reg = this_reg;
c4c81601 3312 }
7506f491
DE
3313
3314 return number_of_reaching_defs;
3315}
3316
3317/* Expression computed by insn is available and the substitution is legal,
3318 so try to perform the substitution.
3319
cc2902df 3320 The result is nonzero if any changes were made. */
7506f491
DE
3321
3322static int
1d088dee 3323handle_avail_expr (rtx insn, struct expr *expr)
7506f491 3324{
0631e0bf 3325 rtx pat, insn_computes_expr, expr_set;
7506f491
DE
3326 rtx to;
3327 struct reg_set *this_reg;
3328 int found_setting, use_src;
3329 int changed = 0;
3330
3331 /* We only handle the case where one computation of the expression
3332 reaches this instruction. */
3333 insn_computes_expr = computing_insn (expr, insn);
3334 if (insn_computes_expr == NULL)
3335 return 0;
0631e0bf
JH
3336 expr_set = single_set (insn_computes_expr);
3337 if (!expr_set)
3338 abort ();
7506f491
DE
3339
3340 found_setting = 0;
3341 use_src = 0;
3342
3343 /* At this point we know only one computation of EXPR outside of this
3344 block reaches this insn. Now try to find a register that the
3345 expression is computed into. */
0631e0bf 3346 if (GET_CODE (SET_SRC (expr_set)) == REG)
7506f491
DE
3347 {
3348 /* This is the case when the available expression that reaches
3349 here has already been handled as an available expression. */
770ae6cc 3350 unsigned int regnum_for_replacing
0631e0bf 3351 = REGNO (SET_SRC (expr_set));
c4c81601 3352
7506f491
DE
3353 /* If the register was created by GCSE we can't use `reg_set_table',
3354 however we know it's set only once. */
3355 if (regnum_for_replacing >= max_gcse_regno
3356 /* If the register the expression is computed into is set only once,
3357 or only one set reaches this insn, we can use it. */
3358 || (((this_reg = reg_set_table[regnum_for_replacing]),
3359 this_reg->next == NULL)
3360 || can_disregard_other_sets (&this_reg, insn, 0)))
8e42ace1
KH
3361 {
3362 use_src = 1;
3363 found_setting = 1;
3364 }
7506f491
DE
3365 }
3366
3367 if (!found_setting)
3368 {
770ae6cc 3369 unsigned int regnum_for_replacing
0631e0bf 3370 = REGNO (SET_DEST (expr_set));
c4c81601 3371
7506f491
DE
3372 /* This shouldn't happen. */
3373 if (regnum_for_replacing >= max_gcse_regno)
3374 abort ();
c4c81601 3375
7506f491 3376 this_reg = reg_set_table[regnum_for_replacing];
c4c81601 3377
7506f491
DE
3378 /* If the register the expression is computed into is set only once,
3379 or only one set reaches this insn, use it. */
3380 if (this_reg->next == NULL
3381 || can_disregard_other_sets (&this_reg, insn, 0))
3382 found_setting = 1;
3383 }
3384
3385 if (found_setting)
3386 {
3387 pat = PATTERN (insn);
3388 if (use_src)
0631e0bf 3389 to = SET_SRC (expr_set);
7506f491 3390 else
0631e0bf 3391 to = SET_DEST (expr_set);
7506f491
DE
3392 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3393
3394 /* We should be able to ignore the return code from validate_change but
3395 to play it safe we check. */
3396 if (changed)
3397 {
3398 gcse_subst_count++;
3399 if (gcse_file != NULL)
3400 {
c4c81601
RK
3401 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3402 INSN_UID (insn));
3403 fprintf (gcse_file, " reg %d %s insn %d\n",
3404 REGNO (to), use_src ? "from" : "set in",
7506f491
DE
3405 INSN_UID (insn_computes_expr));
3406 }
7506f491
DE
3407 }
3408 }
c4c81601 3409
7506f491
DE
3410 /* The register that the expr is computed into is set more than once. */
3411 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3412 {
3413 /* Insert an insn after insnx that copies the reg set in insnx
3414 into a new pseudo register call this new register REGN.
3415 From insnb until end of basic block or until REGB is set
3416 replace all uses of REGB with REGN. */
3417 rtx new_insn;
3418
0631e0bf 3419 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
7506f491
DE
3420
3421 /* Generate the new insn. */
3422 /* ??? If the change fails, we return 0, even though we created
3423 an insn. I think this is ok. */
9e6a5703
JC
3424 new_insn
3425 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
0631e0bf 3426 SET_DEST (expr_set)),
c4c81601
RK
3427 insn_computes_expr);
3428
7506f491
DE
3429 /* Keep register set table up to date. */
3430 record_one_set (REGNO (to), new_insn);
3431
3432 gcse_create_count++;
3433 if (gcse_file != NULL)
ac7c5af5 3434 {
c4c81601 3435 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
7506f491 3436 INSN_UID (NEXT_INSN (insn_computes_expr)),
c4c81601
RK
3437 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3438 fprintf (gcse_file, ", computed in insn %d,\n",
7506f491 3439 INSN_UID (insn_computes_expr));
c4c81601
RK
3440 fprintf (gcse_file, " into newly allocated reg %d\n",
3441 REGNO (to));
ac7c5af5 3442 }
7506f491
DE
3443
3444 pat = PATTERN (insn);
3445
3446 /* Do register replacement for INSN. */
3447 changed = validate_change (insn, &SET_SRC (pat),
c4c81601
RK
3448 SET_DEST (PATTERN
3449 (NEXT_INSN (insn_computes_expr))),
7506f491
DE
3450 0);
3451
3452 /* We should be able to ignore the return code from validate_change but
3453 to play it safe we check. */
3454 if (changed)
3455 {
3456 gcse_subst_count++;
3457 if (gcse_file != NULL)
3458 {
c4c81601
RK
3459 fprintf (gcse_file,
3460 "GCSE: Replacing the source in insn %d with reg %d ",
7506f491 3461 INSN_UID (insn),
c4c81601
RK
3462 REGNO (SET_DEST (PATTERN (NEXT_INSN
3463 (insn_computes_expr)))));
3464 fprintf (gcse_file, "set in insn %d\n",
589005ff 3465 INSN_UID (insn_computes_expr));
7506f491 3466 }
7506f491
DE
3467 }
3468 }
3469
3470 return changed;
3471}
3472
c4c81601
RK
3473/* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3474 the dataflow analysis has been done.
7506f491 3475
cc2902df 3476 The result is nonzero if a change was made. */
7506f491
DE
3477
3478static int
1d088dee 3479classic_gcse (void)
7506f491 3480{
e0082a72 3481 int changed;
7506f491 3482 rtx insn;
e0082a72 3483 basic_block bb;
7506f491
DE
3484
3485 /* Note we start at block 1. */
3486
e0082a72
ZD
3487 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3488 return 0;
3489
7506f491 3490 changed = 0;
e0082a72 3491 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
3492 {
3493 /* Reset tables used to keep track of what's still valid [since the
3494 start of the block]. */
3495 reset_opr_set_tables ();
3496
e0082a72
ZD
3497 for (insn = bb->head;
3498 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491
DE
3499 insn = NEXT_INSN (insn))
3500 {
3501 /* Is insn of form (set (pseudo-reg) ...)? */
7506f491
DE
3502 if (GET_CODE (insn) == INSN
3503 && GET_CODE (PATTERN (insn)) == SET
3504 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3505 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3506 {
3507 rtx pat = PATTERN (insn);
3508 rtx src = SET_SRC (pat);
3509 struct expr *expr;
3510
3511 if (want_to_gcse_p (src)
3512 /* Is the expression recorded? */
02280659 3513 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
7506f491
DE
3514 /* Is the expression available [at the start of the
3515 block]? */
e0082a72 3516 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
7506f491
DE
3517 /* Are the operands unchanged since the start of the
3518 block? */
3519 && oprs_not_set_p (src, insn))
3520 changed |= handle_avail_expr (insn, expr);
3521 }
3522
3523 /* Keep track of everything modified by this insn. */
3524 /* ??? Need to be careful w.r.t. mods done to INSN. */
2c3c49de 3525 if (INSN_P (insn))
7506f491 3526 mark_oprs_set (insn);
ac7c5af5 3527 }
7506f491
DE
3528 }
3529
3530 return changed;
3531}
3532
3533/* Top level routine to perform one classic GCSE pass.
3534
cc2902df 3535 Return nonzero if a change was made. */
7506f491
DE
3536
3537static int
1d088dee 3538one_classic_gcse_pass (int pass)
7506f491
DE
3539{
3540 int changed = 0;
3541
3542 gcse_subst_count = 0;
3543 gcse_create_count = 0;
3544
02280659 3545 alloc_hash_table (max_cuid, &expr_hash_table, 0);
d55bc081 3546 alloc_rd_mem (last_basic_block, max_cuid);
02280659 3547 compute_hash_table (&expr_hash_table);
7506f491 3548 if (gcse_file)
02280659 3549 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 3550
02280659 3551 if (expr_hash_table.n_elems > 0)
7506f491
DE
3552 {
3553 compute_kill_rd ();
3554 compute_rd ();
02280659
ZD
3555 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3556 compute_ae_gen (&expr_hash_table);
3557 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
bd0eaec2 3558 compute_available (ae_gen, ae_kill, ae_out, ae_in);
7506f491
DE
3559 changed = classic_gcse ();
3560 free_avail_expr_mem ();
3561 }
c4c81601 3562
7506f491 3563 free_rd_mem ();
02280659 3564 free_hash_table (&expr_hash_table);
7506f491
DE
3565
3566 if (gcse_file)
3567 {
3568 fprintf (gcse_file, "\n");
c4c81601
RK
3569 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3570 current_function_name, pass, bytes_used, gcse_subst_count);
3571 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
7506f491
DE
3572 }
3573
3574 return changed;
3575}
3576\f
3577/* Compute copy/constant propagation working variables. */
3578
3579/* Local properties of assignments. */
7506f491
DE
3580static sbitmap *cprop_pavloc;
3581static sbitmap *cprop_absaltered;
3582
3583/* Global properties of assignments (computed from the local properties). */
7506f491
DE
3584static sbitmap *cprop_avin;
3585static sbitmap *cprop_avout;
3586
c4c81601
RK
3587/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3588 basic blocks. N_SETS is the number of sets. */
7506f491
DE
3589
3590static void
1d088dee 3591alloc_cprop_mem (int n_blocks, int n_sets)
7506f491
DE
3592{
3593 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3594 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3595
3596 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3597 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3598}
3599
3600/* Free vars used by copy/const propagation. */
3601
3602static void
1d088dee 3603free_cprop_mem (void)
7506f491 3604{
5a660bff
DB
3605 sbitmap_vector_free (cprop_pavloc);
3606 sbitmap_vector_free (cprop_absaltered);
3607 sbitmap_vector_free (cprop_avin);
3608 sbitmap_vector_free (cprop_avout);
7506f491
DE
3609}
3610
c4c81601
RK
3611/* For each block, compute whether X is transparent. X is either an
3612 expression or an assignment [though we don't care which, for this context
3613 an assignment is treated as an expression]. For each block where an
3614 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3615 bit in BMAP. */
7506f491
DE
3616
3617static void
1d088dee 3618compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
7506f491 3619{
e0082a72
ZD
3620 int i, j;
3621 basic_block bb;
7506f491 3622 enum rtx_code code;
c4c81601 3623 reg_set *r;
6f7d635c 3624 const char *fmt;
7506f491 3625
c4c81601
RK
3626 /* repeat is used to turn tail-recursion into iteration since GCC
3627 can't do it when there's no return value. */
7506f491
DE
3628 repeat:
3629
3630 if (x == 0)
3631 return;
3632
3633 code = GET_CODE (x);
3634 switch (code)
3635 {
3636 case REG:
c4c81601
RK
3637 if (set_p)
3638 {
3639 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3640 {
e0082a72
ZD
3641 FOR_EACH_BB (bb)
3642 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3643 SET_BIT (bmap[bb->index], indx);
c4c81601
RK
3644 }
3645 else
3646 {
3647 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3648 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3649 }
3650 }
3651 else
3652 {
3653 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3654 {
e0082a72
ZD
3655 FOR_EACH_BB (bb)
3656 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3657 RESET_BIT (bmap[bb->index], indx);
c4c81601
RK
3658 }
3659 else
3660 {
3661 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3662 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3663 }
3664 }
7506f491 3665
c4c81601 3666 return;
7506f491
DE
3667
3668 case MEM:
e0082a72 3669 FOR_EACH_BB (bb)
a13d4ebf 3670 {
e0082a72 3671 rtx list_entry = canon_modify_mem_list[bb->index];
a13d4ebf
AM
3672
3673 while (list_entry)
3674 {
3675 rtx dest, dest_addr;
3676
3677 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3678 {
3679 if (set_p)
e0082a72 3680 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3681 else
e0082a72 3682 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3683 break;
3684 }
3685 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3686 Examine each hunk of memory that is modified. */
3687
3688 dest = XEXP (list_entry, 0);
3689 list_entry = XEXP (list_entry, 1);
3690 dest_addr = XEXP (list_entry, 0);
589005ff 3691
a13d4ebf
AM
3692 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3693 x, rtx_addr_varies_p))
3694 {
3695 if (set_p)
e0082a72 3696 SET_BIT (bmap[bb->index], indx);
a13d4ebf 3697 else
e0082a72 3698 RESET_BIT (bmap[bb->index], indx);
a13d4ebf
AM
3699 break;
3700 }
3701 list_entry = XEXP (list_entry, 1);
3702 }
3703 }
c4c81601 3704
7506f491
DE
3705 x = XEXP (x, 0);
3706 goto repeat;
3707
3708 case PC:
3709 case CC0: /*FIXME*/
3710 case CONST:
3711 case CONST_INT:
3712 case CONST_DOUBLE:
69ef87e2 3713 case CONST_VECTOR:
7506f491
DE
3714 case SYMBOL_REF:
3715 case LABEL_REF:
3716 case ADDR_VEC:
3717 case ADDR_DIFF_VEC:
3718 return;
3719
3720 default:
3721 break;
3722 }
3723
c4c81601 3724 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3725 {
3726 if (fmt[i] == 'e')
3727 {
7506f491
DE
3728 /* If we are about to do the last recursive call
3729 needed at this level, change it into iteration.
3730 This function is called enough to be worth it. */
3731 if (i == 0)
3732 {
c4c81601 3733 x = XEXP (x, i);
7506f491
DE
3734 goto repeat;
3735 }
c4c81601
RK
3736
3737 compute_transp (XEXP (x, i), indx, bmap, set_p);
7506f491
DE
3738 }
3739 else if (fmt[i] == 'E')
c4c81601
RK
3740 for (j = 0; j < XVECLEN (x, i); j++)
3741 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
7506f491
DE
3742 }
3743}
3744
7506f491
DE
3745/* Top level routine to do the dataflow analysis needed by copy/const
3746 propagation. */
3747
3748static void
1d088dee 3749compute_cprop_data (void)
7506f491 3750{
02280659 3751 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
ce724250
JL
3752 compute_available (cprop_pavloc, cprop_absaltered,
3753 cprop_avout, cprop_avin);
7506f491
DE
3754}
3755\f
3756/* Copy/constant propagation. */
3757
7506f491
DE
3758/* Maximum number of register uses in an insn that we handle. */
3759#define MAX_USES 8
3760
3761/* Table of uses found in an insn.
3762 Allocated statically to avoid alloc/free complexity and overhead. */
3763static struct reg_use reg_use_table[MAX_USES];
3764
3765/* Index into `reg_use_table' while building it. */
3766static int reg_use_count;
3767
c4c81601
RK
3768/* Set up a list of register numbers used in INSN. The found uses are stored
3769 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3770 and contains the number of uses in the table upon exit.
7506f491 3771
c4c81601
RK
3772 ??? If a register appears multiple times we will record it multiple times.
3773 This doesn't hurt anything but it will slow things down. */
7506f491
DE
3774
3775static void
1d088dee 3776find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
7506f491 3777{
c4c81601 3778 int i, j;
7506f491 3779 enum rtx_code code;
6f7d635c 3780 const char *fmt;
9e71c818 3781 rtx x = *xptr;
7506f491 3782
c4c81601
RK
3783 /* repeat is used to turn tail-recursion into iteration since GCC
3784 can't do it when there's no return value. */
7506f491 3785 repeat:
7506f491
DE
3786 if (x == 0)
3787 return;
3788
3789 code = GET_CODE (x);
9e71c818 3790 if (REG_P (x))
7506f491 3791 {
7506f491
DE
3792 if (reg_use_count == MAX_USES)
3793 return;
c4c81601 3794
7506f491
DE
3795 reg_use_table[reg_use_count].reg_rtx = x;
3796 reg_use_count++;
7506f491
DE
3797 }
3798
3799 /* Recursively scan the operands of this expression. */
3800
c4c81601 3801 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
7506f491
DE
3802 {
3803 if (fmt[i] == 'e')
3804 {
3805 /* If we are about to do the last recursive call
3806 needed at this level, change it into iteration.
3807 This function is called enough to be worth it. */
3808 if (i == 0)
3809 {
3810 x = XEXP (x, 0);
3811 goto repeat;
3812 }
c4c81601 3813
9e71c818 3814 find_used_regs (&XEXP (x, i), data);
7506f491
DE
3815 }
3816 else if (fmt[i] == 'E')
c4c81601 3817 for (j = 0; j < XVECLEN (x, i); j++)
9e71c818 3818 find_used_regs (&XVECEXP (x, i, j), data);
7506f491
DE
3819 }
3820}
3821
3822/* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
cc2902df 3823 Returns nonzero is successful. */
7506f491
DE
3824
3825static int
1d088dee 3826try_replace_reg (rtx from, rtx to, rtx insn)
7506f491 3827{
172890a2 3828 rtx note = find_reg_equal_equiv_note (insn);
fb0c0a12 3829 rtx src = 0;
172890a2
RK
3830 int success = 0;
3831 rtx set = single_set (insn);
833fc3ad 3832
2b773ee2
JH
3833 validate_replace_src_group (from, to, insn);
3834 if (num_changes_pending () && apply_change_group ())
3835 success = 1;
9e71c818 3836
9feff114
JDA
3837 /* Try to simplify SET_SRC if we have substituted a constant. */
3838 if (success && set && CONSTANT_P (to))
3839 {
3840 src = simplify_rtx (SET_SRC (set));
3841
3842 if (src)
3843 validate_change (insn, &SET_SRC (set), src, 0);
3844 }
3845
ed8395a0
JZ
3846 /* If there is already a NOTE, update the expression in it with our
3847 replacement. */
3848 if (note != 0)
3849 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3850
f305679f 3851 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
833fc3ad 3852 {
f305679f
JH
3853 /* If above failed and this is a single set, try to simplify the source of
3854 the set given our substitution. We could perhaps try this for multiple
3855 SETs, but it probably won't buy us anything. */
172890a2
RK
3856 src = simplify_replace_rtx (SET_SRC (set), from, to);
3857
9e71c818
JH
3858 if (!rtx_equal_p (src, SET_SRC (set))
3859 && validate_change (insn, &SET_SRC (set), src, 0))
172890a2 3860 success = 1;
833fc3ad 3861
bbd288a4
FS
3862 /* If we've failed to do replacement, have a single SET, don't already
3863 have a note, and have no special SET, add a REG_EQUAL note to not
3864 lose information. */
3865 if (!success && note == 0 && set != 0
3866 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3867 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
f305679f
JH
3868 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3869 }
e251e2a2 3870
172890a2
RK
3871 /* REG_EQUAL may get simplified into register.
3872 We don't allow that. Remove that note. This code ought
fbe5a4a6 3873 not to happen, because previous code ought to synthesize
172890a2
RK
3874 reg-reg move, but be on the safe side. */
3875 if (note && REG_P (XEXP (note, 0)))
3876 remove_note (insn, note);
833fc3ad 3877
833fc3ad
JH
3878 return success;
3879}
c4c81601
RK
3880
3881/* Find a set of REGNOs that are available on entry to INSN's block. Returns
3882 NULL no such set is found. */
7506f491
DE
3883
3884static struct expr *
1d088dee 3885find_avail_set (int regno, rtx insn)
7506f491 3886{
cafba495
BS
3887 /* SET1 contains the last set found that can be returned to the caller for
3888 use in a substitution. */
3889 struct expr *set1 = 0;
589005ff 3890
cafba495
BS
3891 /* Loops are not possible here. To get a loop we would need two sets
3892 available at the start of the block containing INSN. ie we would
3893 need two sets like this available at the start of the block:
3894
3895 (set (reg X) (reg Y))
3896 (set (reg Y) (reg X))
3897
3898 This can not happen since the set of (reg Y) would have killed the
3899 set of (reg X) making it unavailable at the start of this block. */
3900 while (1)
8e42ace1 3901 {
cafba495 3902 rtx src;
ceda50e9 3903 struct expr *set = lookup_set (regno, &set_hash_table);
cafba495
BS
3904
3905 /* Find a set that is available at the start of the block
3906 which contains INSN. */
3907 while (set)
3908 {
3909 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3910 break;
3911 set = next_set (regno, set);
3912 }
7506f491 3913
cafba495
BS
3914 /* If no available set was found we've reached the end of the
3915 (possibly empty) copy chain. */
3916 if (set == 0)
589005ff 3917 break;
cafba495
BS
3918
3919 if (GET_CODE (set->expr) != SET)
3920 abort ();
3921
3922 src = SET_SRC (set->expr);
3923
3924 /* We know the set is available.
3925 Now check that SRC is ANTLOC (i.e. none of the source operands
589005ff 3926 have changed since the start of the block).
cafba495
BS
3927
3928 If the source operand changed, we may still use it for the next
3929 iteration of this loop, but we may not use it for substitutions. */
c4c81601 3930
6b2d1c9e 3931 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
cafba495
BS
3932 set1 = set;
3933
3934 /* If the source of the set is anything except a register, then
3935 we have reached the end of the copy chain. */
3936 if (GET_CODE (src) != REG)
7506f491 3937 break;
7506f491 3938
cafba495
BS
3939 /* Follow the copy chain, ie start another iteration of the loop
3940 and see if we have an available copy into SRC. */
3941 regno = REGNO (src);
8e42ace1 3942 }
cafba495
BS
3943
3944 /* SET1 holds the last set that was available and anticipatable at
3945 INSN. */
3946 return set1;
7506f491
DE
3947}
3948
abd535b6 3949/* Subroutine of cprop_insn that tries to propagate constants into
0e3f0221 3950 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
fbe5a4a6 3951 it is the instruction that immediately precedes JUMP, and must be a
818b6b7f 3952 single SET of a register. FROM is what we will try to replace,
0e3f0221 3953 SRC is the constant we will try to substitute for it. Returns nonzero
589005ff 3954 if a change was made. */
c4c81601 3955
abd535b6 3956static int
1d088dee 3957cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
abd535b6 3958{
bc6688b4 3959 rtx new, set_src, note_src;
0e3f0221 3960 rtx set = pc_set (jump);
bc6688b4 3961 rtx note = find_reg_equal_equiv_note (jump);
0e3f0221 3962
bc6688b4
RS
3963 if (note)
3964 {
3965 note_src = XEXP (note, 0);
3966 if (GET_CODE (note_src) == EXPR_LIST)
3967 note_src = NULL_RTX;
3968 }
3969 else note_src = NULL_RTX;
3970
3971 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3972 set_src = note_src ? note_src : SET_SRC (set);
3973
3974 /* First substitute the SETCC condition into the JUMP instruction,
3975 then substitute that given values into this expanded JUMP. */
3976 if (setcc != NULL_RTX
48ddd46c
JH
3977 && !modified_between_p (from, setcc, jump)
3978 && !modified_between_p (src, setcc, jump))
b2f02503 3979 {
bc6688b4 3980 rtx setcc_src;
b2f02503 3981 rtx setcc_set = single_set (setcc);
bc6688b4
RS
3982 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3983 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3984 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3985 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3986 setcc_src);
b2f02503 3987 }
0e3f0221 3988 else
bc6688b4 3989 setcc = NULL_RTX;
0e3f0221 3990
bc6688b4 3991 new = simplify_replace_rtx (set_src, from, src);
abd535b6 3992
bc6688b4
RS
3993 /* If no simplification can be made, then try the next register. */
3994 if (rtx_equal_p (new, SET_SRC (set)))
9e48c409 3995 return 0;
589005ff 3996
7d5ab30e 3997 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
172890a2 3998 if (new == pc_rtx)
0e3f0221 3999 delete_insn (jump);
7d5ab30e 4000 else
abd535b6 4001 {
48ddd46c
JH
4002 /* Ensure the value computed inside the jump insn to be equivalent
4003 to one computed by setcc. */
bc6688b4 4004 if (setcc && modified_in_p (new, setcc))
48ddd46c 4005 return 0;
0e3f0221 4006 if (! validate_change (jump, &SET_SRC (set), new, 0))
bc6688b4
RS
4007 {
4008 /* When (some) constants are not valid in a comparison, and there
4009 are two registers to be replaced by constants before the entire
4010 comparison can be folded into a constant, we need to keep
4011 intermediate information in REG_EQUAL notes. For targets with
4012 separate compare insns, such notes are added by try_replace_reg.
4013 When we have a combined compare-and-branch instruction, however,
4014 we need to attach a note to the branch itself to make this
4015 optimization work. */
4016
4017 if (!rtx_equal_p (new, note_src))
4018 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4019 return 0;
4020 }
4021
4022 /* Remove REG_EQUAL note after simplification. */
4023 if (note_src)
4024 remove_note (jump, note);
abd535b6 4025
7d5ab30e
JH
4026 /* If this has turned into an unconditional jump,
4027 then put a barrier after it so that the unreachable
4028 code will be deleted. */
4029 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
0e3f0221 4030 emit_barrier_after (jump);
7d5ab30e 4031 }
abd535b6 4032
0e3f0221
RS
4033#ifdef HAVE_cc0
4034 /* Delete the cc0 setter. */
818b6b7f 4035 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
0e3f0221
RS
4036 delete_insn (setcc);
4037#endif
4038
172890a2 4039 run_jump_opt_after_gcse = 1;
c4c81601 4040
172890a2
RK
4041 const_prop_count++;
4042 if (gcse_file != NULL)
4043 {
4044 fprintf (gcse_file,
818b6b7f 4045 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
0e3f0221 4046 REGNO (from), INSN_UID (jump));
172890a2
RK
4047 print_rtl (gcse_file, src);
4048 fprintf (gcse_file, "\n");
abd535b6 4049 }
0005550b 4050 purge_dead_edges (bb);
172890a2
RK
4051
4052 return 1;
abd535b6
BS
4053}
4054
ae860ff7 4055static bool
1d088dee 4056constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
ae860ff7
JH
4057{
4058 rtx sset;
4059
4060 /* Check for reg or cc0 setting instructions followed by
4061 conditional branch instructions first. */
4062 if (alter_jumps
4063 && (sset = single_set (insn)) != NULL
244d05fb 4064 && NEXT_INSN (insn)
ae860ff7
JH
4065 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4066 {
4067 rtx dest = SET_DEST (sset);
4068 if ((REG_P (dest) || CC0_P (dest))
4069 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4070 return 1;
4071 }
4072
4073 /* Handle normal insns next. */
4074 if (GET_CODE (insn) == INSN
4075 && try_replace_reg (from, to, insn))
4076 return 1;
4077
4078 /* Try to propagate a CONST_INT into a conditional jump.
4079 We're pretty specific about what we will handle in this
4080 code, we can extend this as necessary over time.
4081
4082 Right now the insn in question must look like
4083 (set (pc) (if_then_else ...)) */
4084 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4085 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4086 return 0;
4087}
4088
7506f491 4089/* Perform constant and copy propagation on INSN.
cc2902df 4090 The result is nonzero if a change was made. */
7506f491
DE
4091
4092static int
1d088dee 4093cprop_insn (rtx insn, int alter_jumps)
7506f491
DE
4094{
4095 struct reg_use *reg_used;
4096 int changed = 0;
833fc3ad 4097 rtx note;
7506f491 4098
9e71c818 4099 if (!INSN_P (insn))
7506f491
DE
4100 return 0;
4101
4102 reg_use_count = 0;
9e71c818 4103 note_uses (&PATTERN (insn), find_used_regs, NULL);
589005ff 4104
172890a2 4105 note = find_reg_equal_equiv_note (insn);
833fc3ad 4106
dc297297 4107 /* We may win even when propagating constants into notes. */
833fc3ad 4108 if (note)
9e71c818 4109 find_used_regs (&XEXP (note, 0), NULL);
7506f491 4110
c4c81601
RK
4111 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4112 reg_used++, reg_use_count--)
7506f491 4113 {
770ae6cc 4114 unsigned int regno = REGNO (reg_used->reg_rtx);
7506f491
DE
4115 rtx pat, src;
4116 struct expr *set;
7506f491
DE
4117
4118 /* Ignore registers created by GCSE.
dc297297 4119 We do this because ... */
7506f491
DE
4120 if (regno >= max_gcse_regno)
4121 continue;
4122
4123 /* If the register has already been set in this block, there's
4124 nothing we can do. */
4125 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4126 continue;
4127
4128 /* Find an assignment that sets reg_used and is available
4129 at the start of the block. */
4130 set = find_avail_set (regno, insn);
4131 if (! set)
4132 continue;
589005ff 4133
7506f491
DE
4134 pat = set->expr;
4135 /* ??? We might be able to handle PARALLELs. Later. */
4136 if (GET_CODE (pat) != SET)
4137 abort ();
c4c81601 4138
7506f491
DE
4139 src = SET_SRC (pat);
4140
e78d9500 4141 /* Constant propagation. */
6b2d1c9e 4142 if (gcse_constant_p (src))
7506f491 4143 {
ae860ff7 4144 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
7506f491
DE
4145 {
4146 changed = 1;
4147 const_prop_count++;
4148 if (gcse_file != NULL)
4149 {
ae860ff7
JH
4150 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4151 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
e78d9500 4152 print_rtl (gcse_file, src);
7506f491
DE
4153 fprintf (gcse_file, "\n");
4154 }
bc6688b4
RS
4155 if (INSN_DELETED_P (insn))
4156 return 1;
7506f491
DE
4157 }
4158 }
4159 else if (GET_CODE (src) == REG
4160 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4161 && REGNO (src) != regno)
4162 {
cafba495 4163 if (try_replace_reg (reg_used->reg_rtx, src, insn))
7506f491 4164 {
cafba495
BS
4165 changed = 1;
4166 copy_prop_count++;
4167 if (gcse_file != NULL)
7506f491 4168 {
ae860ff7 4169 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
c4c81601
RK
4170 regno, INSN_UID (insn));
4171 fprintf (gcse_file, " with reg %d\n", REGNO (src));
7506f491 4172 }
cafba495
BS
4173
4174 /* The original insn setting reg_used may or may not now be
4175 deletable. We leave the deletion to flow. */
4176 /* FIXME: If it turns out that the insn isn't deletable,
4177 then we may have unnecessarily extended register lifetimes
4178 and made things worse. */
7506f491
DE
4179 }
4180 }
4181 }
4182
4183 return changed;
4184}
4185
710ee3ed
RH
4186/* Like find_used_regs, but avoid recording uses that appear in
4187 input-output contexts such as zero_extract or pre_dec. This
4188 restricts the cases we consider to those for which local cprop
4189 can legitimately make replacements. */
4190
4191static void
1d088dee 4192local_cprop_find_used_regs (rtx *xptr, void *data)
710ee3ed
RH
4193{
4194 rtx x = *xptr;
4195
4196 if (x == 0)
4197 return;
4198
4199 switch (GET_CODE (x))
4200 {
4201 case ZERO_EXTRACT:
4202 case SIGN_EXTRACT:
4203 case STRICT_LOW_PART:
4204 return;
4205
4206 case PRE_DEC:
4207 case PRE_INC:
4208 case POST_DEC:
4209 case POST_INC:
4210 case PRE_MODIFY:
4211 case POST_MODIFY:
4212 /* Can only legitimately appear this early in the context of
4213 stack pushes for function arguments, but handle all of the
4214 codes nonetheless. */
4215 return;
4216
4217 case SUBREG:
4218 /* Setting a subreg of a register larger than word_mode leaves
4219 the non-written words unchanged. */
4220 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4221 return;
4222 break;
4223
4224 default:
4225 break;
4226 }
4227
4228 find_used_regs (xptr, data);
4229}
1d088dee 4230
8ba46434
R
4231/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4232 their REG_EQUAL notes need updating. */
e197b6fc 4233
ae860ff7 4234static bool
1d088dee 4235do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
ae860ff7
JH
4236{
4237 rtx newreg = NULL, newcnst = NULL;
4238
e197b6fc
RH
4239 /* Rule out USE instructions and ASM statements as we don't want to
4240 change the hard registers mentioned. */
ae860ff7
JH
4241 if (GET_CODE (x) == REG
4242 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
e197b6fc
RH
4243 || (GET_CODE (PATTERN (insn)) != USE
4244 && asm_noperands (PATTERN (insn)) < 0)))
ae860ff7
JH
4245 {
4246 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4247 struct elt_loc_list *l;
4248
4249 if (!val)
4250 return false;
4251 for (l = val->locs; l; l = l->next)
4252 {
4253 rtx this_rtx = l->loc;
46690369
JH
4254 rtx note;
4255
9635cfad
JH
4256 if (l->in_libcall)
4257 continue;
4258
6b2d1c9e 4259 if (gcse_constant_p (this_rtx))
ae860ff7 4260 newcnst = this_rtx;
46690369
JH
4261 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4262 /* Don't copy propagate if it has attached REG_EQUIV note.
4263 At this point this only function parameters should have
4264 REG_EQUIV notes and if the argument slot is used somewhere
4265 explicitly, it means address of parameter has been taken,
4266 so we should not extend the lifetime of the pseudo. */
4267 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4268 || GET_CODE (XEXP (note, 0)) != MEM))
ae860ff7
JH
4269 newreg = this_rtx;
4270 }
4271 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4272 {
8ba46434 4273 /* If we find a case where we can't fix the retval REG_EQUAL notes
fbe5a4a6 4274 match the new register, we either have to abandon this replacement
8ba46434
R
4275 or fix delete_trivially_dead_insns to preserve the setting insn,
4276 or make it delete the REG_EUAQL note, and fix up all passes that
4277 require the REG_EQUAL note there. */
4278 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4279 abort ();
ae860ff7
JH
4280 if (gcse_file != NULL)
4281 {
4282 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4283 REGNO (x));
4284 fprintf (gcse_file, "insn %d with constant ",
4285 INSN_UID (insn));
4286 print_rtl (gcse_file, newcnst);
4287 fprintf (gcse_file, "\n");
4288 }
4289 const_prop_count++;
4290 return true;
4291 }
4292 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4293 {
8ba46434 4294 adjust_libcall_notes (x, newreg, insn, libcall_sp);
ae860ff7
JH
4295 if (gcse_file != NULL)
4296 {
4297 fprintf (gcse_file,
4298 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4299 REGNO (x), INSN_UID (insn));
4300 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4301 }
4302 copy_prop_count++;
4303 return true;
4304 }
4305 }
4306 return false;
4307}
4308
8ba46434
R
4309/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4310 their REG_EQUAL notes need updating to reflect that OLDREG has been
f4e3e618
RH
4311 replaced with NEWVAL in INSN. Return true if all substitutions could
4312 be made. */
8ba46434 4313static bool
1d088dee 4314adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
8ba46434 4315{
f4e3e618 4316 rtx end;
8ba46434
R
4317
4318 while ((end = *libcall_sp++))
4319 {
f4e3e618 4320 rtx note = find_reg_equal_equiv_note (end);
8ba46434
R
4321
4322 if (! note)
4323 continue;
4324
4325 if (REG_P (newval))
4326 {
4327 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4328 {
4329 do
4330 {
4331 note = find_reg_equal_equiv_note (end);
4332 if (! note)
4333 continue;
4334 if (reg_mentioned_p (newval, XEXP (note, 0)))
4335 return false;
4336 }
4337 while ((end = *libcall_sp++));
4338 return true;
4339 }
4340 }
4341 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4342 insn = end;
4343 }
4344 return true;
4345}
4346
4347#define MAX_NESTED_LIBCALLS 9
4348
ae860ff7 4349static void
1d088dee 4350local_cprop_pass (int alter_jumps)
ae860ff7
JH
4351{
4352 rtx insn;
4353 struct reg_use *reg_used;
8ba46434 4354 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
1649d92f 4355 bool changed = false;
ae860ff7
JH
4356
4357 cselib_init ();
8ba46434
R
4358 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4359 *libcall_sp = 0;
ae860ff7
JH
4360 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4361 {
4362 if (INSN_P (insn))
4363 {
8ba46434 4364 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
ae860ff7 4365
8ba46434
R
4366 if (note)
4367 {
4368 if (libcall_sp == libcall_stack)
4369 abort ();
4370 *--libcall_sp = XEXP (note, 0);
4371 }
4372 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4373 if (note)
4374 libcall_sp++;
4375 note = find_reg_equal_equiv_note (insn);
ae860ff7
JH
4376 do
4377 {
4378 reg_use_count = 0;
710ee3ed 4379 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
ae860ff7 4380 if (note)
710ee3ed 4381 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
ae860ff7
JH
4382
4383 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4384 reg_used++, reg_use_count--)
8ba46434
R
4385 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4386 libcall_sp))
1649d92f
JH
4387 {
4388 changed = true;
4389 break;
4390 }
bc6688b4
RS
4391 if (INSN_DELETED_P (insn))
4392 break;
ae860ff7
JH
4393 }
4394 while (reg_use_count);
4395 }
4396 cselib_process_insn (insn);
4397 }
4398 cselib_finish ();
1649d92f
JH
4399 /* Global analysis may get into infinite loops for unreachable blocks. */
4400 if (changed && alter_jumps)
5f0bea72
JH
4401 {
4402 delete_unreachable_blocks ();
4403 free_reg_set_mem ();
4404 alloc_reg_set_mem (max_reg_num ());
4405 compute_sets (get_insns ());
4406 }
ae860ff7
JH
4407}
4408
c4c81601 4409/* Forward propagate copies. This includes copies and constants. Return
cc2902df 4410 nonzero if a change was made. */
7506f491
DE
4411
4412static int
1d088dee 4413cprop (int alter_jumps)
7506f491 4414{
e0082a72
ZD
4415 int changed;
4416 basic_block bb;
7506f491
DE
4417 rtx insn;
4418
4419 /* Note we start at block 1. */
e0082a72
ZD
4420 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4421 {
4422 if (gcse_file != NULL)
4423 fprintf (gcse_file, "\n");
4424 return 0;
4425 }
7506f491
DE
4426
4427 changed = 0;
e0082a72 4428 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
7506f491
DE
4429 {
4430 /* Reset tables used to keep track of what's still valid [since the
4431 start of the block]. */
4432 reset_opr_set_tables ();
4433
e0082a72
ZD
4434 for (insn = bb->head;
4435 insn != NULL && insn != NEXT_INSN (bb->end);
7506f491 4436 insn = NEXT_INSN (insn))
172890a2
RK
4437 if (INSN_P (insn))
4438 {
ae860ff7 4439 changed |= cprop_insn (insn, alter_jumps);
7506f491 4440
172890a2
RK
4441 /* Keep track of everything modified by this insn. */
4442 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4443 call mark_oprs_set if we turned the insn into a NOTE. */
4444 if (GET_CODE (insn) != NOTE)
4445 mark_oprs_set (insn);
8e42ace1 4446 }
7506f491
DE
4447 }
4448
4449 if (gcse_file != NULL)
4450 fprintf (gcse_file, "\n");
4451
4452 return changed;
4453}
4454
fbef91d8
RS
4455/* Similar to get_condition, only the resulting condition must be
4456 valid at JUMP, instead of at EARLIEST.
4457
4458 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4459 settle for the condition variable in the jump instruction being integral.
4460 We prefer to be able to record the value of a user variable, rather than
4461 the value of a temporary used in a condition. This could be solved by
4462 recording the value of *every* register scaned by canonicalize_condition,
4463 but this would require some code reorganization. */
4464
2fa4a849 4465rtx
1d088dee 4466fis_get_condition (rtx jump)
fbef91d8
RS
4467{
4468 rtx cond, set, tmp, insn, earliest;
4469 bool reverse;
4470
4471 if (! any_condjump_p (jump))
4472 return NULL_RTX;
4473
4474 set = pc_set (jump);
4475 cond = XEXP (SET_SRC (set), 0);
4476
4477 /* If this branches to JUMP_LABEL when the condition is false,
4478 reverse the condition. */
4479 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4480 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4481
4482 /* Use canonicalize_condition to do the dirty work of manipulating
4483 MODE_CC values and COMPARE rtx codes. */
ec6ec6aa
ZD
4484 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4485 false);
fbef91d8
RS
4486 if (!tmp)
4487 return NULL_RTX;
4488
4489 /* Verify that the given condition is valid at JUMP by virtue of not
4490 having been modified since EARLIEST. */
4491 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4492 if (INSN_P (insn) && modified_in_p (tmp, insn))
4493 break;
4494 if (insn == jump)
4495 return tmp;
4496
4497 /* The condition was modified. See if we can get a partial result
4498 that doesn't follow all the reversals. Perhaps combine can fold
4499 them together later. */
4500 tmp = XEXP (tmp, 0);
4501 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4502 return NULL_RTX;
ec6ec6aa
ZD
4503 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4504 false);
fbef91d8
RS
4505 if (!tmp)
4506 return NULL_RTX;
4507
4508 /* For sanity's sake, re-validate the new result. */
4509 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4510 if (INSN_P (insn) && modified_in_p (tmp, insn))
4511 return NULL_RTX;
4512
4513 return tmp;
4514}
4515
4516/* Find the implicit sets of a function. An "implicit set" is a constraint
4517 on the value of a variable, implied by a conditional jump. For example,
4518 following "if (x == 2)", the then branch may be optimized as though the
4519 conditional performed an "explicit set", in this example, "x = 2". This
4520 function records the set patterns that are implicit at the start of each
4521 basic block. */
4522
4523static void
1d088dee 4524find_implicit_sets (void)
fbef91d8
RS
4525{
4526 basic_block bb, dest;
4527 unsigned int count;
4528 rtx cond, new;
4529
4530 count = 0;
4531 FOR_EACH_BB (bb)
a98ebe2e 4532 /* Check for more than one successor. */
fbef91d8
RS
4533 if (bb->succ && bb->succ->succ_next)
4534 {
4535 cond = fis_get_condition (bb->end);
4536
4537 if (cond
4538 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4539 && GET_CODE (XEXP (cond, 0)) == REG
4540 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
6b2d1c9e 4541 && gcse_constant_p (XEXP (cond, 1)))
fbef91d8
RS
4542 {
4543 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4544 : FALLTHRU_EDGE (bb)->dest;
4545
4546 if (dest && ! dest->pred->pred_next
4547 && dest != EXIT_BLOCK_PTR)
4548 {
4549 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4550 XEXP (cond, 1));
4551 implicit_sets[dest->index] = new;
4552 if (gcse_file)
4553 {
4554 fprintf(gcse_file, "Implicit set of reg %d in ",
4555 REGNO (XEXP (cond, 0)));
4556 fprintf(gcse_file, "basic block %d\n", dest->index);
4557 }
4558 count++;
4559 }
4560 }
4561 }
4562
4563 if (gcse_file)
4564 fprintf (gcse_file, "Found %d implicit sets\n", count);
4565}
4566
7506f491 4567/* Perform one copy/constant propagation pass.
a0134312
RS
4568 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4569 propagation into conditional jumps. If BYPASS_JUMPS is true,
4570 perform conditional jump bypassing optimizations. */
7506f491
DE
4571
4572static int
1d088dee 4573one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
7506f491
DE
4574{
4575 int changed = 0;
4576
4577 const_prop_count = 0;
4578 copy_prop_count = 0;
4579
a0134312 4580 local_cprop_pass (cprop_jumps);
ae860ff7 4581
fbef91d8 4582 /* Determine implicit sets. */
703ad42b 4583 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
fbef91d8
RS
4584 find_implicit_sets ();
4585
02280659
ZD
4586 alloc_hash_table (max_cuid, &set_hash_table, 1);
4587 compute_hash_table (&set_hash_table);
fbef91d8
RS
4588
4589 /* Free implicit_sets before peak usage. */
4590 free (implicit_sets);
4591 implicit_sets = NULL;
4592
7506f491 4593 if (gcse_file)
02280659
ZD
4594 dump_hash_table (gcse_file, "SET", &set_hash_table);
4595 if (set_hash_table.n_elems > 0)
7506f491 4596 {
02280659 4597 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
7506f491 4598 compute_cprop_data ();
a0134312
RS
4599 changed = cprop (cprop_jumps);
4600 if (bypass_jumps)
0e3f0221 4601 changed |= bypass_conditional_jumps ();
7506f491
DE
4602 free_cprop_mem ();
4603 }
c4c81601 4604
02280659 4605 free_hash_table (&set_hash_table);
7506f491
DE
4606
4607 if (gcse_file)
4608 {
c4c81601
RK
4609 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4610 current_function_name, pass, bytes_used);
4611 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4612 const_prop_count, copy_prop_count);
7506f491 4613 }
1649d92f
JH
4614 /* Global analysis may get into infinite loops for unreachable blocks. */
4615 if (changed && cprop_jumps)
4616 delete_unreachable_blocks ();
7506f491
DE
4617
4618 return changed;
4619}
4620\f
0e3f0221
RS
4621/* Bypass conditional jumps. */
4622
7821bfc7
RS
4623/* The value of last_basic_block at the beginning of the jump_bypass
4624 pass. The use of redirect_edge_and_branch_force may introduce new
4625 basic blocks, but the data flow analysis is only valid for basic
4626 block indices less than bypass_last_basic_block. */
4627
4628static int bypass_last_basic_block;
4629
0e3f0221
RS
4630/* Find a set of REGNO to a constant that is available at the end of basic
4631 block BB. Returns NULL if no such set is found. Based heavily upon
4632 find_avail_set. */
4633
4634static struct expr *
1d088dee 4635find_bypass_set (int regno, int bb)
0e3f0221
RS
4636{
4637 struct expr *result = 0;
4638
4639 for (;;)
4640 {
4641 rtx src;
ceda50e9 4642 struct expr *set = lookup_set (regno, &set_hash_table);
0e3f0221
RS
4643
4644 while (set)
4645 {
4646 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4647 break;
4648 set = next_set (regno, set);
4649 }
4650
4651 if (set == 0)
4652 break;
4653
4654 if (GET_CODE (set->expr) != SET)
4655 abort ();
4656
4657 src = SET_SRC (set->expr);
6b2d1c9e 4658 if (gcse_constant_p (src))
0e3f0221
RS
4659 result = set;
4660
4661 if (GET_CODE (src) != REG)
4662 break;
4663
4664 regno = REGNO (src);
4665 }
4666 return result;
4667}
4668
4669
e129b3f9
RS
4670/* Subroutine of bypass_block that checks whether a pseudo is killed by
4671 any of the instructions inserted on an edge. Jump bypassing places
4672 condition code setters on CFG edges using insert_insn_on_edge. This
4673 function is required to check that our data flow analysis is still
4674 valid prior to commit_edge_insertions. */
4675
4676static bool
1d088dee 4677reg_killed_on_edge (rtx reg, edge e)
e129b3f9
RS
4678{
4679 rtx insn;
4680
4681 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4682 if (INSN_P (insn) && reg_set_p (reg, insn))
4683 return true;
4684
4685 return false;
4686}
4687
0e3f0221
RS
4688/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4689 basic block BB which has more than one predecessor. If not NULL, SETCC
4690 is the first instruction of BB, which is immediately followed by JUMP_INSN
4691 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
e129b3f9
RS
4692 Returns nonzero if a change was made.
4693
e0bb17a8 4694 During the jump bypassing pass, we may place copies of SETCC instructions
e129b3f9
RS
4695 on CFG edges. The following routine must be careful to pay attention to
4696 these inserted insns when performing its transformations. */
0e3f0221
RS
4697
4698static int
1d088dee 4699bypass_block (basic_block bb, rtx setcc, rtx jump)
0e3f0221
RS
4700{
4701 rtx insn, note;
e129b3f9 4702 edge e, enext, edest;
818b6b7f 4703 int i, change;
72b8d451 4704 int may_be_loop_header;
0e3f0221
RS
4705
4706 insn = (setcc != NULL) ? setcc : jump;
4707
4708 /* Determine set of register uses in INSN. */
4709 reg_use_count = 0;
4710 note_uses (&PATTERN (insn), find_used_regs, NULL);
4711 note = find_reg_equal_equiv_note (insn);
4712 if (note)
4713 find_used_regs (&XEXP (note, 0), NULL);
4714
72b8d451
ZD
4715 may_be_loop_header = false;
4716 for (e = bb->pred; e; e = e->pred_next)
4717 if (e->flags & EDGE_DFS_BACK)
4718 {
4719 may_be_loop_header = true;
4720 break;
4721 }
4722
0e3f0221
RS
4723 change = 0;
4724 for (e = bb->pred; e; e = enext)
4725 {
4726 enext = e->pred_next;
7821bfc7
RS
4727 if (e->flags & EDGE_COMPLEX)
4728 continue;
4729
4730 /* We can't redirect edges from new basic blocks. */
4731 if (e->src->index >= bypass_last_basic_block)
4732 continue;
4733
72b8d451 4734 /* The irreducible loops created by redirecting of edges entering the
e0bb17a8
KH
4735 loop from outside would decrease effectiveness of some of the following
4736 optimizations, so prevent this. */
72b8d451
ZD
4737 if (may_be_loop_header
4738 && !(e->flags & EDGE_DFS_BACK))
4739 continue;
4740
0e3f0221
RS
4741 for (i = 0; i < reg_use_count; i++)
4742 {
4743 struct reg_use *reg_used = &reg_use_table[i];
589005ff 4744 unsigned int regno = REGNO (reg_used->reg_rtx);
818b6b7f 4745 basic_block dest, old_dest;
589005ff
KH
4746 struct expr *set;
4747 rtx src, new;
0e3f0221 4748
589005ff
KH
4749 if (regno >= max_gcse_regno)
4750 continue;
0e3f0221 4751
589005ff 4752 set = find_bypass_set (regno, e->src->index);
0e3f0221
RS
4753
4754 if (! set)
4755 continue;
4756
e129b3f9
RS
4757 /* Check the data flow is valid after edge insertions. */
4758 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4759 continue;
4760
589005ff 4761 src = SET_SRC (pc_set (jump));
0e3f0221
RS
4762
4763 if (setcc != NULL)
4764 src = simplify_replace_rtx (src,
589005ff
KH
4765 SET_DEST (PATTERN (setcc)),
4766 SET_SRC (PATTERN (setcc)));
0e3f0221
RS
4767
4768 new = simplify_replace_rtx (src, reg_used->reg_rtx,
589005ff 4769 SET_SRC (set->expr));
0e3f0221 4770
1d088dee 4771 /* Jump bypassing may have already placed instructions on
e129b3f9
RS
4772 edges of the CFG. We can't bypass an outgoing edge that
4773 has instructions associated with it, as these insns won't
4774 get executed if the incoming edge is redirected. */
4775
589005ff 4776 if (new == pc_rtx)
e129b3f9
RS
4777 {
4778 edest = FALLTHRU_EDGE (bb);
4779 dest = edest->insns ? NULL : edest->dest;
4780 }
0e3f0221 4781 else if (GET_CODE (new) == LABEL_REF)
e129b3f9
RS
4782 {
4783 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4784 /* Don't bypass edges containing instructions. */
4785 for (edest = bb->succ; edest; edest = edest->succ_next)
4786 if (edest->dest == dest && edest->insns)
4787 {
4788 dest = NULL;
4789 break;
4790 }
4791 }
0e3f0221
RS
4792 else
4793 dest = NULL;
4794
818b6b7f 4795 old_dest = e->dest;
7821bfc7
RS
4796 if (dest != NULL
4797 && dest != old_dest
4798 && dest != EXIT_BLOCK_PTR)
4799 {
4800 redirect_edge_and_branch_force (e, dest);
4801
818b6b7f 4802 /* Copy the register setter to the redirected edge.
0e3f0221
RS
4803 Don't copy CC0 setters, as CC0 is dead after jump. */
4804 if (setcc)
4805 {
4806 rtx pat = PATTERN (setcc);
818b6b7f 4807 if (!CC0_P (SET_DEST (pat)))
0e3f0221
RS
4808 insert_insn_on_edge (copy_insn (pat), e);
4809 }
4810
4811 if (gcse_file != NULL)
4812 {
818b6b7f
RH
4813 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4814 regno, INSN_UID (jump));
0e3f0221
RS
4815 print_rtl (gcse_file, SET_SRC (set->expr));
4816 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
818b6b7f 4817 e->src->index, old_dest->index, dest->index);
0e3f0221
RS
4818 }
4819 change = 1;
4820 break;
4821 }
4822 }
4823 }
4824 return change;
4825}
4826
4827/* Find basic blocks with more than one predecessor that only contain a
4828 single conditional jump. If the result of the comparison is known at
4829 compile-time from any incoming edge, redirect that edge to the
9a71ece1
RH
4830 appropriate target. Returns nonzero if a change was made.
4831
4832 This function is now mis-named, because we also handle indirect jumps. */
0e3f0221
RS
4833
4834static int
1d088dee 4835bypass_conditional_jumps (void)
0e3f0221
RS
4836{
4837 basic_block bb;
4838 int changed;
4839 rtx setcc;
4840 rtx insn;
4841 rtx dest;
4842
4843 /* Note we start at block 1. */
4844 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4845 return 0;
4846
7821bfc7 4847 bypass_last_basic_block = last_basic_block;
72b8d451 4848 mark_dfs_back_edges ();
7821bfc7 4849
0e3f0221
RS
4850 changed = 0;
4851 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
589005ff 4852 EXIT_BLOCK_PTR, next_bb)
0e3f0221
RS
4853 {
4854 /* Check for more than one predecessor. */
4855 if (bb->pred && bb->pred->pred_next)
4856 {
4857 setcc = NULL_RTX;
4858 for (insn = bb->head;
4859 insn != NULL && insn != NEXT_INSN (bb->end);
4860 insn = NEXT_INSN (insn))
4861 if (GET_CODE (insn) == INSN)
4862 {
9543a9d2 4863 if (setcc)
0e3f0221 4864 break;
ba4f7968 4865 if (GET_CODE (PATTERN (insn)) != SET)
0e3f0221
RS
4866 break;
4867
ba4f7968 4868 dest = SET_DEST (PATTERN (insn));
818b6b7f 4869 if (REG_P (dest) || CC0_P (dest))
0e3f0221 4870 setcc = insn;
0e3f0221
RS
4871 else
4872 break;
4873 }
4874 else if (GET_CODE (insn) == JUMP_INSN)
4875 {
9a71ece1
RH
4876 if ((any_condjump_p (insn) || computed_jump_p (insn))
4877 && onlyjump_p (insn))
0e3f0221
RS
4878 changed |= bypass_block (bb, setcc, insn);
4879 break;
4880 }
4881 else if (INSN_P (insn))
4882 break;
4883 }
4884 }
4885
818b6b7f 4886 /* If we bypassed any register setting insns, we inserted a
fbe5a4a6 4887 copy on the redirected edge. These need to be committed. */
0e3f0221
RS
4888 if (changed)
4889 commit_edge_insertions();
4890
4891 return changed;
4892}
4893\f
a65f3558 4894/* Compute PRE+LCM working variables. */
7506f491
DE
4895
4896/* Local properties of expressions. */
4897/* Nonzero for expressions that are transparent in the block. */
a65f3558 4898static sbitmap *transp;
7506f491 4899
5c35539b
RH
4900/* Nonzero for expressions that are transparent at the end of the block.
4901 This is only zero for expressions killed by abnormal critical edge
4902 created by a calls. */
a65f3558 4903static sbitmap *transpout;
5c35539b 4904
a65f3558
JL
4905/* Nonzero for expressions that are computed (available) in the block. */
4906static sbitmap *comp;
7506f491 4907
a65f3558
JL
4908/* Nonzero for expressions that are locally anticipatable in the block. */
4909static sbitmap *antloc;
7506f491 4910
a65f3558
JL
4911/* Nonzero for expressions where this block is an optimal computation
4912 point. */
4913static sbitmap *pre_optimal;
5c35539b 4914
a65f3558
JL
4915/* Nonzero for expressions which are redundant in a particular block. */
4916static sbitmap *pre_redundant;
7506f491 4917
a42cd965
AM
4918/* Nonzero for expressions which should be inserted on a specific edge. */
4919static sbitmap *pre_insert_map;
4920
4921/* Nonzero for expressions which should be deleted in a specific block. */
4922static sbitmap *pre_delete_map;
4923
4924/* Contains the edge_list returned by pre_edge_lcm. */
4925static struct edge_list *edge_list;
4926
a65f3558
JL
4927/* Redundant insns. */
4928static sbitmap pre_redundant_insns;
7506f491 4929
a65f3558 4930/* Allocate vars used for PRE analysis. */
7506f491
DE
4931
4932static void
1d088dee 4933alloc_pre_mem (int n_blocks, int n_exprs)
7506f491 4934{
a65f3558
JL
4935 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4936 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4937 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5faf03ae 4938
a42cd965
AM
4939 pre_optimal = NULL;
4940 pre_redundant = NULL;
4941 pre_insert_map = NULL;
4942 pre_delete_map = NULL;
4943 ae_in = NULL;
4944 ae_out = NULL;
a42cd965 4945 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
c4c81601 4946
a42cd965 4947 /* pre_insert and pre_delete are allocated later. */
7506f491
DE
4948}
4949
a65f3558 4950/* Free vars used for PRE analysis. */
7506f491
DE
4951
4952static void
1d088dee 4953free_pre_mem (void)
7506f491 4954{
5a660bff
DB
4955 sbitmap_vector_free (transp);
4956 sbitmap_vector_free (comp);
bd3675fc
JL
4957
4958 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
7506f491 4959
a42cd965 4960 if (pre_optimal)
5a660bff 4961 sbitmap_vector_free (pre_optimal);
a42cd965 4962 if (pre_redundant)
5a660bff 4963 sbitmap_vector_free (pre_redundant);
a42cd965 4964 if (pre_insert_map)
5a660bff 4965 sbitmap_vector_free (pre_insert_map);
a42cd965 4966 if (pre_delete_map)
5a660bff 4967 sbitmap_vector_free (pre_delete_map);
a42cd965 4968 if (ae_in)
5a660bff 4969 sbitmap_vector_free (ae_in);
a42cd965 4970 if (ae_out)
5a660bff 4971 sbitmap_vector_free (ae_out);
a42cd965 4972
bd3675fc 4973 transp = comp = NULL;
a42cd965 4974 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
55d3f917 4975 ae_in = ae_out = NULL;
7506f491
DE
4976}
4977
4978/* Top level routine to do the dataflow analysis needed by PRE. */
4979
4980static void
1d088dee 4981compute_pre_data (void)
7506f491 4982{
b614171e 4983 sbitmap trapping_expr;
e0082a72 4984 basic_block bb;
b614171e 4985 unsigned int ui;
c66e8ae9 4986
02280659 4987 compute_local_properties (transp, comp, antloc, &expr_hash_table);
d55bc081 4988 sbitmap_vector_zero (ae_kill, last_basic_block);
c66e8ae9 4989
b614171e 4990 /* Collect expressions which might trap. */
02280659 4991 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
b614171e 4992 sbitmap_zero (trapping_expr);
02280659 4993 for (ui = 0; ui < expr_hash_table.size; ui++)
b614171e
MM
4994 {
4995 struct expr *e;
02280659 4996 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
b614171e
MM
4997 if (may_trap_p (e->expr))
4998 SET_BIT (trapping_expr, e->bitmap_index);
4999 }
5000
c66e8ae9
JL
5001 /* Compute ae_kill for each basic block using:
5002
5003 ~(TRANSP | COMP)
5004
a2e90653 5005 This is significantly faster than compute_ae_kill. */
c66e8ae9 5006
e0082a72 5007 FOR_EACH_BB (bb)
c66e8ae9 5008 {
b614171e
MM
5009 edge e;
5010
5011 /* If the current block is the destination of an abnormal edge, we
5012 kill all trapping expressions because we won't be able to properly
5013 place the instruction on the edge. So make them neither
5014 anticipatable nor transparent. This is fairly conservative. */
e0082a72 5015 for (e = bb->pred; e ; e = e->pred_next)
b614171e
MM
5016 if (e->flags & EDGE_ABNORMAL)
5017 {
e0082a72
ZD
5018 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5019 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
b614171e
MM
5020 break;
5021 }
5022
e0082a72
ZD
5023 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5024 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
c66e8ae9
JL
5025 }
5026
02280659 5027 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
a42cd965 5028 ae_kill, &pre_insert_map, &pre_delete_map);
5a660bff 5029 sbitmap_vector_free (antloc);
bd3675fc 5030 antloc = NULL;
5a660bff 5031 sbitmap_vector_free (ae_kill);
589005ff 5032 ae_kill = NULL;
76ac938b 5033 sbitmap_free (trapping_expr);
7506f491
DE
5034}
5035\f
5036/* PRE utilities */
5037
cc2902df 5038/* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
a65f3558 5039 block BB.
7506f491
DE
5040
5041 VISITED is a pointer to a working buffer for tracking which BB's have
5042 been visited. It is NULL for the top-level call.
5043
5044 We treat reaching expressions that go through blocks containing the same
5045 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5046 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5047 2 as not reaching. The intent is to improve the probability of finding
5048 only one reaching expression and to reduce register lifetimes by picking
5049 the closest such expression. */
5050
5051static int
1d088dee 5052pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
7506f491 5053{
36349f8b 5054 edge pred;
7506f491 5055
e2d2ed72 5056 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7506f491 5057 {
e2d2ed72 5058 basic_block pred_bb = pred->src;
7506f491 5059
36349f8b 5060 if (pred->src == ENTRY_BLOCK_PTR
7506f491 5061 /* Has predecessor has already been visited? */
0b17ab2f 5062 || visited[pred_bb->index])
c4c81601
RK
5063 ;/* Nothing to do. */
5064
7506f491 5065 /* Does this predecessor generate this expression? */
0b17ab2f 5066 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
7506f491
DE
5067 {
5068 /* Is this the occurrence we're looking for?
5069 Note that there's only one generating occurrence per block
5070 so we just need to check the block number. */
a65f3558 5071 if (occr_bb == pred_bb)
7506f491 5072 return 1;
c4c81601 5073
0b17ab2f 5074 visited[pred_bb->index] = 1;
7506f491
DE
5075 }
5076 /* Ignore this predecessor if it kills the expression. */
0b17ab2f
RH
5077 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5078 visited[pred_bb->index] = 1;
c4c81601 5079
7506f491
DE
5080 /* Neither gen nor kill. */
5081 else
ac7c5af5 5082 {
0b17ab2f 5083 visited[pred_bb->index] = 1;
89e606c9 5084 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
7506f491 5085 return 1;
ac7c5af5 5086 }
7506f491
DE
5087 }
5088
5089 /* All paths have been checked. */
5090 return 0;
5091}
283a2545
RL
5092
5093/* The wrapper for pre_expr_reaches_here_work that ensures that any
dc297297 5094 memory allocated for that function is returned. */
283a2545
RL
5095
5096static int
1d088dee 5097pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
283a2545
RL
5098{
5099 int rval;
703ad42b 5100 char *visited = xcalloc (last_basic_block, 1);
283a2545 5101
8e42ace1 5102 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
283a2545
RL
5103
5104 free (visited);
c4c81601 5105 return rval;
283a2545 5106}
7506f491 5107\f
a42cd965
AM
5108
5109/* Given an expr, generate RTL which we can insert at the end of a BB,
589005ff 5110 or on an edge. Set the block number of any insns generated to
a42cd965
AM
5111 the value of BB. */
5112
5113static rtx
1d088dee 5114process_insert_insn (struct expr *expr)
a42cd965
AM
5115{
5116 rtx reg = expr->reaching_reg;
fb0c0a12
RK
5117 rtx exp = copy_rtx (expr->expr);
5118 rtx pat;
a42cd965
AM
5119
5120 start_sequence ();
fb0c0a12
RK
5121
5122 /* If the expression is something that's an operand, like a constant,
5123 just copy it to a register. */
5124 if (general_operand (exp, GET_MODE (reg)))
5125 emit_move_insn (reg, exp);
5126
5127 /* Otherwise, make a new insn to compute this expression and make sure the
5128 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5129 expression to make sure we don't have any sharing issues. */
8d444206 5130 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
fb0c0a12 5131 abort ();
589005ff 5132
2f937369 5133 pat = get_insns ();
a42cd965
AM
5134 end_sequence ();
5135
5136 return pat;
5137}
589005ff 5138
a65f3558
JL
5139/* Add EXPR to the end of basic block BB.
5140
5141 This is used by both the PRE and code hoisting.
5142
5143 For PRE, we want to verify that the expr is either transparent
5144 or locally anticipatable in the target block. This check makes
5145 no sense for code hoisting. */
7506f491
DE
5146
5147static void
1d088dee 5148insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
7506f491 5149{
e2d2ed72 5150 rtx insn = bb->end;
7506f491
DE
5151 rtx new_insn;
5152 rtx reg = expr->reaching_reg;
5153 int regno = REGNO (reg);
2f937369 5154 rtx pat, pat_end;
7506f491 5155
a42cd965 5156 pat = process_insert_insn (expr);
2f937369
DM
5157 if (pat == NULL_RTX || ! INSN_P (pat))
5158 abort ();
5159
5160 pat_end = pat;
5161 while (NEXT_INSN (pat_end) != NULL_RTX)
5162 pat_end = NEXT_INSN (pat_end);
7506f491
DE
5163
5164 /* If the last insn is a jump, insert EXPR in front [taking care to
4d6922ee 5165 handle cc0, etc. properly]. Similarly we need to care trapping
068473ec 5166 instructions in presence of non-call exceptions. */
7506f491 5167
068473ec
JH
5168 if (GET_CODE (insn) == JUMP_INSN
5169 || (GET_CODE (insn) == INSN
5170 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
7506f491 5171 {
50b2596f 5172#ifdef HAVE_cc0
7506f491 5173 rtx note;
50b2596f 5174#endif
068473ec
JH
5175 /* It should always be the case that we can put these instructions
5176 anywhere in the basic block with performing PRE optimizations.
5177 Check this. */
3b25fbfe 5178 if (GET_CODE (insn) == INSN && pre
0b17ab2f 5179 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5180 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
068473ec 5181 abort ();
7506f491
DE
5182
5183 /* If this is a jump table, then we can't insert stuff here. Since
5184 we know the previous real insn must be the tablejump, we insert
5185 the new instruction just before the tablejump. */
5186 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5187 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5188 insn = prev_real_insn (insn);
5189
5190#ifdef HAVE_cc0
5191 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5192 if cc0 isn't set. */
5193 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5194 if (note)
5195 insn = XEXP (note, 0);
5196 else
5197 {
5198 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5199 if (maybe_cc0_setter
2c3c49de 5200 && INSN_P (maybe_cc0_setter)
7506f491
DE
5201 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5202 insn = maybe_cc0_setter;
5203 }
5204#endif
5205 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3c030e88 5206 new_insn = emit_insn_before (pat, insn);
3947e2f9 5207 }
c4c81601 5208
3947e2f9
RH
5209 /* Likewise if the last insn is a call, as will happen in the presence
5210 of exception handling. */
068473ec
JH
5211 else if (GET_CODE (insn) == CALL_INSN
5212 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
3947e2f9 5213 {
3947e2f9
RH
5214 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5215 we search backward and place the instructions before the first
5216 parameter is loaded. Do this for everyone for consistency and a
fbe5a4a6 5217 presumption that we'll get better code elsewhere as well.
3947e2f9 5218
c4c81601 5219 It should always be the case that we can put these instructions
a65f3558
JL
5220 anywhere in the basic block with performing PRE optimizations.
5221 Check this. */
c4c81601 5222
a65f3558 5223 if (pre
0b17ab2f 5224 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
589005ff 5225 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
3947e2f9
RH
5226 abort ();
5227
5228 /* Since different machines initialize their parameter registers
5229 in different orders, assume nothing. Collect the set of all
5230 parameter registers. */
833366d6 5231 insn = find_first_parameter_load (insn, bb->head);
3947e2f9 5232
b1d26727
JL
5233 /* If we found all the parameter loads, then we want to insert
5234 before the first parameter load.
5235
5236 If we did not find all the parameter loads, then we might have
5237 stopped on the head of the block, which could be a CODE_LABEL.
5238 If we inserted before the CODE_LABEL, then we would be putting
5239 the insn in the wrong basic block. In that case, put the insn
b5229628 5240 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
0a377997 5241 while (GET_CODE (insn) == CODE_LABEL
589ca5cb 5242 || NOTE_INSN_BASIC_BLOCK_P (insn))
b5229628 5243 insn = NEXT_INSN (insn);
c4c81601 5244
3c030e88 5245 new_insn = emit_insn_before (pat, insn);
7506f491
DE
5246 }
5247 else
3c030e88 5248 new_insn = emit_insn_after (pat, insn);
7506f491 5249
2f937369 5250 while (1)
a65f3558 5251 {
2f937369 5252 if (INSN_P (pat))
a65f3558 5253 {
2f937369
DM
5254 add_label_notes (PATTERN (pat), new_insn);
5255 note_stores (PATTERN (pat), record_set_info, pat);
a65f3558 5256 }
2f937369
DM
5257 if (pat == pat_end)
5258 break;
5259 pat = NEXT_INSN (pat);
a65f3558 5260 }
3947e2f9 5261
7506f491
DE
5262 gcse_create_count++;
5263
5264 if (gcse_file)
5265 {
c4c81601 5266 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
0b17ab2f 5267 bb->index, INSN_UID (new_insn));
c4c81601
RK
5268 fprintf (gcse_file, "copying expression %d to reg %d\n",
5269 expr->bitmap_index, regno);
7506f491
DE
5270 }
5271}
5272
a42cd965
AM
5273/* Insert partially redundant expressions on edges in the CFG to make
5274 the expressions fully redundant. */
7506f491 5275
a42cd965 5276static int
1d088dee 5277pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
7506f491 5278{
c4c81601 5279 int e, i, j, num_edges, set_size, did_insert = 0;
a65f3558
JL
5280 sbitmap *inserted;
5281
a42cd965
AM
5282 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5283 if it reaches any of the deleted expressions. */
7506f491 5284
a42cd965
AM
5285 set_size = pre_insert_map[0]->size;
5286 num_edges = NUM_EDGES (edge_list);
02280659 5287 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
a42cd965 5288 sbitmap_vector_zero (inserted, num_edges);
7506f491 5289
a42cd965 5290 for (e = 0; e < num_edges; e++)
7506f491
DE
5291 {
5292 int indx;
e2d2ed72 5293 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
a65f3558 5294
a65f3558 5295 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
7506f491 5296 {
a42cd965 5297 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
7506f491 5298
02280659 5299 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
c4c81601
RK
5300 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5301 {
5302 struct expr *expr = index_map[j];
5303 struct occr *occr;
a65f3558 5304
ff7cc307 5305 /* Now look at each deleted occurrence of this expression. */
c4c81601
RK
5306 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5307 {
5308 if (! occr->deleted_p)
5309 continue;
5310
5311 /* Insert this expression on this edge if if it would
ff7cc307 5312 reach the deleted occurrence in BB. */
c4c81601
RK
5313 if (!TEST_BIT (inserted[e], j))
5314 {
5315 rtx insn;
5316 edge eg = INDEX_EDGE (edge_list, e);
5317
5318 /* We can't insert anything on an abnormal and
5319 critical edge, so we insert the insn at the end of
5320 the previous block. There are several alternatives
5321 detailed in Morgans book P277 (sec 10.5) for
5322 handling this situation. This one is easiest for
5323 now. */
5324
5325 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5326 insert_insn_end_bb (index_map[j], bb, 0);
5327 else
5328 {
5329 insn = process_insert_insn (index_map[j]);
5330 insert_insn_on_edge (insn, eg);
5331 }
5332
5333 if (gcse_file)
5334 {
5335 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
0b17ab2f
RH
5336 bb->index,
5337 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
c4c81601
RK
5338 fprintf (gcse_file, "copy expression %d\n",
5339 expr->bitmap_index);
5340 }
5341
a13d4ebf 5342 update_ld_motion_stores (expr);
c4c81601
RK
5343 SET_BIT (inserted[e], j);
5344 did_insert = 1;
5345 gcse_create_count++;
5346 }
5347 }
5348 }
7506f491
DE
5349 }
5350 }
5faf03ae 5351
5a660bff 5352 sbitmap_vector_free (inserted);
a42cd965 5353 return did_insert;
7506f491
DE
5354}
5355
b885908b
MH
5356/* Copy the result of INSN to REG. INDX is the expression number.
5357 Given "old_reg <- expr" (INSN), instead of adding after it
5358 reaching_reg <- old_reg
5359 it's better to do the following:
5360 reaching_reg <- expr
5361 old_reg <- reaching_reg
5362 because this way copy propagation can discover additional PRE
5363 opportunuties. */
7506f491
DE
5364
5365static void
1d088dee 5366pre_insert_copy_insn (struct expr *expr, rtx insn)
7506f491
DE
5367{
5368 rtx reg = expr->reaching_reg;
5369 int regno = REGNO (reg);
5370 int indx = expr->bitmap_index;
5371 rtx set = single_set (insn);
5372 rtx new_insn;
b885908b
MH
5373 rtx new_set;
5374 rtx old_reg;
7506f491
DE
5375
5376 if (!set)
5377 abort ();
c4c81601 5378
b885908b
MH
5379 old_reg = SET_DEST (set);
5380 new_insn = emit_insn_after (gen_move_insn (old_reg,
5381 reg),
5382 insn);
5383 new_set = single_set (new_insn);
5384
5385 if (!new_set)
5386 abort();
5387 SET_DEST (set) = reg;
c4c81601 5388
7506f491 5389 /* Keep register set table up to date. */
b885908b
MH
5390 replace_one_set (REGNO (old_reg), insn, new_insn);
5391 record_one_set (regno, insn);
7506f491
DE
5392
5393 gcse_create_count++;
5394
5395 if (gcse_file)
a42cd965
AM
5396 fprintf (gcse_file,
5397 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5398 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5399 INSN_UID (insn), regno);
222f7ba9 5400 update_ld_motion_stores (expr);
7506f491
DE
5401}
5402
5403/* Copy available expressions that reach the redundant expression
5404 to `reaching_reg'. */
5405
5406static void
1d088dee 5407pre_insert_copies (void)
7506f491 5408{
2e653e39 5409 unsigned int i;
c4c81601
RK
5410 struct expr *expr;
5411 struct occr *occr;
5412 struct occr *avail;
a65f3558 5413
7506f491
DE
5414 /* For each available expression in the table, copy the result to
5415 `reaching_reg' if the expression reaches a deleted one.
5416
5417 ??? The current algorithm is rather brute force.
5418 Need to do some profiling. */
5419
02280659
ZD
5420 for (i = 0; i < expr_hash_table.size; i++)
5421 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5422 {
5423 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5424 we don't want to insert a copy here because the expression may not
5425 really be redundant. So only insert an insn if the expression was
5426 deleted. This test also avoids further processing if the
5427 expression wasn't deleted anywhere. */
5428 if (expr->reaching_reg == NULL)
5429 continue;
5430
5431 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5432 {
5433 if (! occr->deleted_p)
5434 continue;
7506f491 5435
c4c81601
RK
5436 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5437 {
5438 rtx insn = avail->insn;
7506f491 5439
c4c81601
RK
5440 /* No need to handle this one if handled already. */
5441 if (avail->copied_p)
5442 continue;
7506f491 5443
c4c81601
RK
5444 /* Don't handle this one if it's a redundant one. */
5445 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5446 continue;
7506f491 5447
c4c81601 5448 /* Or if the expression doesn't reach the deleted one. */
589005ff 5449 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
e2d2ed72
AM
5450 expr,
5451 BLOCK_FOR_INSN (occr->insn)))
c4c81601 5452 continue;
7506f491 5453
c4c81601
RK
5454 /* Copy the result of avail to reaching_reg. */
5455 pre_insert_copy_insn (expr, insn);
5456 avail->copied_p = 1;
5457 }
5458 }
5459 }
7506f491
DE
5460}
5461
10d1bb36
JH
5462/* Emit move from SRC to DEST noting the equivalence with expression computed
5463 in INSN. */
5464static rtx
1d088dee 5465gcse_emit_move_after (rtx src, rtx dest, rtx insn)
10d1bb36
JH
5466{
5467 rtx new;
6bdb8dd6 5468 rtx set = single_set (insn), set2;
10d1bb36
JH
5469 rtx note;
5470 rtx eqv;
5471
5472 /* This should never fail since we're creating a reg->reg copy
5473 we've verified to be valid. */
5474
6bdb8dd6 5475 new = emit_insn_after (gen_move_insn (dest, src), insn);
285464d0 5476
10d1bb36 5477 /* Note the equivalence for local CSE pass. */
6bdb8dd6
JH
5478 set2 = single_set (new);
5479 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5480 return new;
10d1bb36
JH
5481 if ((note = find_reg_equal_equiv_note (insn)))
5482 eqv = XEXP (note, 0);
5483 else
5484 eqv = SET_SRC (set);
5485
a500466b 5486 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
10d1bb36
JH
5487
5488 return new;
5489}
5490
7506f491 5491/* Delete redundant computations.
7506f491
DE
5492 Deletion is done by changing the insn to copy the `reaching_reg' of
5493 the expression into the result of the SET. It is left to later passes
5494 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5495
cc2902df 5496 Returns nonzero if a change is made. */
7506f491
DE
5497
5498static int
1d088dee 5499pre_delete (void)
7506f491 5500{
2e653e39 5501 unsigned int i;
63bc1d05 5502 int changed;
c4c81601
RK
5503 struct expr *expr;
5504 struct occr *occr;
a65f3558 5505
7506f491 5506 changed = 0;
02280659
ZD
5507 for (i = 0; i < expr_hash_table.size; i++)
5508 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601
RK
5509 {
5510 int indx = expr->bitmap_index;
7506f491 5511
c4c81601
RK
5512 /* We only need to search antic_occr since we require
5513 ANTLOC != 0. */
7506f491 5514
c4c81601
RK
5515 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5516 {
5517 rtx insn = occr->insn;
5518 rtx set;
e2d2ed72 5519 basic_block bb = BLOCK_FOR_INSN (insn);
7506f491 5520
0b17ab2f 5521 if (TEST_BIT (pre_delete_map[bb->index], indx))
c4c81601
RK
5522 {
5523 set = single_set (insn);
5524 if (! set)
5525 abort ();
5526
5527 /* Create a pseudo-reg to store the result of reaching
5528 expressions into. Get the mode for the new pseudo from
5529 the mode of the original destination pseudo. */
5530 if (expr->reaching_reg == NULL)
5531 expr->reaching_reg
5532 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5533
10d1bb36
JH
5534 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5535 delete_insn (insn);
5536 occr->deleted_p = 1;
5537 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5538 changed = 1;
5539 gcse_subst_count++;
7506f491 5540
c4c81601
RK
5541 if (gcse_file)
5542 {
5543 fprintf (gcse_file,
5544 "PRE: redundant insn %d (expression %d) in ",
5545 INSN_UID (insn), indx);
5546 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
0b17ab2f 5547 bb->index, REGNO (expr->reaching_reg));
c4c81601
RK
5548 }
5549 }
5550 }
5551 }
7506f491
DE
5552
5553 return changed;
5554}
5555
5556/* Perform GCSE optimizations using PRE.
5557 This is called by one_pre_gcse_pass after all the dataflow analysis
5558 has been done.
5559
c4c81601
RK
5560 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5561 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5562 Compiler Design and Implementation.
7506f491 5563
c4c81601
RK
5564 ??? A new pseudo reg is created to hold the reaching expression. The nice
5565 thing about the classical approach is that it would try to use an existing
5566 reg. If the register can't be adequately optimized [i.e. we introduce
5567 reload problems], one could add a pass here to propagate the new register
5568 through the block.
7506f491 5569
c4c81601
RK
5570 ??? We don't handle single sets in PARALLELs because we're [currently] not
5571 able to copy the rest of the parallel when we insert copies to create full
5572 redundancies from partial redundancies. However, there's no reason why we
5573 can't handle PARALLELs in the cases where there are no partial
7506f491
DE
5574 redundancies. */
5575
5576static int
1d088dee 5577pre_gcse (void)
7506f491 5578{
2e653e39
RK
5579 unsigned int i;
5580 int did_insert, changed;
7506f491 5581 struct expr **index_map;
c4c81601 5582 struct expr *expr;
7506f491
DE
5583
5584 /* Compute a mapping from expression number (`bitmap_index') to
5585 hash table entry. */
5586
703ad42b 5587 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
5588 for (i = 0; i < expr_hash_table.size; i++)
5589 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 5590 index_map[expr->bitmap_index] = expr;
7506f491
DE
5591
5592 /* Reset bitmap used to track which insns are redundant. */
a65f3558
JL
5593 pre_redundant_insns = sbitmap_alloc (max_cuid);
5594 sbitmap_zero (pre_redundant_insns);
7506f491
DE
5595
5596 /* Delete the redundant insns first so that
5597 - we know what register to use for the new insns and for the other
5598 ones with reaching expressions
5599 - we know which insns are redundant when we go to create copies */
c4c81601 5600
7506f491
DE
5601 changed = pre_delete ();
5602
a42cd965 5603 did_insert = pre_edge_insert (edge_list, index_map);
c4c81601 5604
7506f491 5605 /* In other places with reaching expressions, copy the expression to the
a42cd965 5606 specially allocated pseudo-reg that reaches the redundant expr. */
7506f491 5607 pre_insert_copies ();
a42cd965
AM
5608 if (did_insert)
5609 {
5610 commit_edge_insertions ();
5611 changed = 1;
5612 }
7506f491 5613
283a2545 5614 free (index_map);
76ac938b 5615 sbitmap_free (pre_redundant_insns);
7506f491
DE
5616 return changed;
5617}
5618
5619/* Top level routine to perform one PRE GCSE pass.
5620
cc2902df 5621 Return nonzero if a change was made. */
7506f491
DE
5622
5623static int
1d088dee 5624one_pre_gcse_pass (int pass)
7506f491
DE
5625{
5626 int changed = 0;
5627
5628 gcse_subst_count = 0;
5629 gcse_create_count = 0;
5630
02280659 5631 alloc_hash_table (max_cuid, &expr_hash_table, 0);
a42cd965 5632 add_noreturn_fake_exit_edges ();
a13d4ebf
AM
5633 if (flag_gcse_lm)
5634 compute_ld_motion_mems ();
5635
02280659 5636 compute_hash_table (&expr_hash_table);
a13d4ebf 5637 trim_ld_motion_mems ();
7506f491 5638 if (gcse_file)
02280659 5639 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
c4c81601 5640
02280659 5641 if (expr_hash_table.n_elems > 0)
7506f491 5642 {
02280659 5643 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
7506f491
DE
5644 compute_pre_data ();
5645 changed |= pre_gcse ();
a42cd965 5646 free_edge_list (edge_list);
7506f491
DE
5647 free_pre_mem ();
5648 }
c4c81601 5649
a13d4ebf 5650 free_ldst_mems ();
a42cd965 5651 remove_fake_edges ();
02280659 5652 free_hash_table (&expr_hash_table);
7506f491
DE
5653
5654 if (gcse_file)
5655 {
c4c81601
RK
5656 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5657 current_function_name, pass, bytes_used);
5658 fprintf (gcse_file, "%d substs, %d insns created\n",
5659 gcse_subst_count, gcse_create_count);
7506f491
DE
5660 }
5661
5662 return changed;
5663}
aeb2f500
JW
5664\f
5665/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5b1ef594
JDA
5666 If notes are added to an insn which references a CODE_LABEL, the
5667 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5668 because the following loop optimization pass requires them. */
aeb2f500
JW
5669
5670/* ??? This is very similar to the loop.c add_label_notes function. We
5671 could probably share code here. */
5672
5673/* ??? If there was a jump optimization pass after gcse and before loop,
5674 then we would not need to do this here, because jump would add the
5675 necessary REG_LABEL notes. */
5676
5677static void
1d088dee 5678add_label_notes (rtx x, rtx insn)
aeb2f500
JW
5679{
5680 enum rtx_code code = GET_CODE (x);
5681 int i, j;
6f7d635c 5682 const char *fmt;
aeb2f500
JW
5683
5684 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5685 {
6b3603c2 5686 /* This code used to ignore labels that referred to dispatch tables to
e0bb17a8 5687 avoid flow generating (slightly) worse code.
6b3603c2 5688
ac7c5af5
JL
5689 We no longer ignore such label references (see LABEL_REF handling in
5690 mark_jump_label for additional information). */
c4c81601 5691
6b8c9327 5692 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
6b3603c2 5693 REG_NOTES (insn));
5b1ef594 5694 if (LABEL_P (XEXP (x, 0)))
589005ff 5695 LABEL_NUSES (XEXP (x, 0))++;
aeb2f500
JW
5696 return;
5697 }
5698
c4c81601 5699 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
aeb2f500
JW
5700 {
5701 if (fmt[i] == 'e')
5702 add_label_notes (XEXP (x, i), insn);
5703 else if (fmt[i] == 'E')
5704 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5705 add_label_notes (XVECEXP (x, i, j), insn);
5706 }
5707}
a65f3558
JL
5708
5709/* Compute transparent outgoing information for each block.
5710
5711 An expression is transparent to an edge unless it is killed by
5712 the edge itself. This can only happen with abnormal control flow,
5713 when the edge is traversed through a call. This happens with
5714 non-local labels and exceptions.
5715
5716 This would not be necessary if we split the edge. While this is
5717 normally impossible for abnormal critical edges, with some effort
5718 it should be possible with exception handling, since we still have
5719 control over which handler should be invoked. But due to increased
5720 EH table sizes, this may not be worthwhile. */
5721
5722static void
1d088dee 5723compute_transpout (void)
a65f3558 5724{
e0082a72 5725 basic_block bb;
2e653e39 5726 unsigned int i;
c4c81601 5727 struct expr *expr;
a65f3558 5728
d55bc081 5729 sbitmap_vector_ones (transpout, last_basic_block);
a65f3558 5730
e0082a72 5731 FOR_EACH_BB (bb)
a65f3558 5732 {
a65f3558
JL
5733 /* Note that flow inserted a nop a the end of basic blocks that
5734 end in call instructions for reasons other than abnormal
5735 control flow. */
e0082a72 5736 if (GET_CODE (bb->end) != CALL_INSN)
a65f3558
JL
5737 continue;
5738
02280659
ZD
5739 for (i = 0; i < expr_hash_table.size; i++)
5740 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
c4c81601
RK
5741 if (GET_CODE (expr->expr) == MEM)
5742 {
5743 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5744 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5745 continue;
589005ff 5746
c4c81601
RK
5747 /* ??? Optimally, we would use interprocedural alias
5748 analysis to determine if this mem is actually killed
5749 by this call. */
e0082a72 5750 RESET_BIT (transpout[bb->index], expr->bitmap_index);
c4c81601 5751 }
a65f3558
JL
5752 }
5753}
dfdb644f
JL
5754
5755/* Removal of useless null pointer checks */
5756
dfdb644f 5757/* Called via note_stores. X is set by SETTER. If X is a register we must
0511851c
MM
5758 invalidate nonnull_local and set nonnull_killed. DATA is really a
5759 `null_pointer_info *'.
dfdb644f
JL
5760
5761 We ignore hard registers. */
c4c81601 5762
dfdb644f 5763static void
1d088dee 5764invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
dfdb644f 5765{
770ae6cc
RK
5766 unsigned int regno;
5767 struct null_pointer_info *npi = (struct null_pointer_info *) data;
c4c81601 5768
dfdb644f
JL
5769 while (GET_CODE (x) == SUBREG)
5770 x = SUBREG_REG (x);
5771
5772 /* Ignore anything that is not a register or is a hard register. */
5773 if (GET_CODE (x) != REG
0511851c
MM
5774 || REGNO (x) < npi->min_reg
5775 || REGNO (x) >= npi->max_reg)
dfdb644f
JL
5776 return;
5777
0511851c 5778 regno = REGNO (x) - npi->min_reg;
dfdb644f 5779
e0082a72
ZD
5780 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5781 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
dfdb644f
JL
5782}
5783
0511851c
MM
5784/* Do null-pointer check elimination for the registers indicated in
5785 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5786 they are not our responsibility to free. */
dfdb644f 5787
99a15921 5788static int
1d088dee
AJ
5789delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5790 sbitmap *nonnull_avout,
5791 struct null_pointer_info *npi)
dfdb644f 5792{
e0082a72 5793 basic_block bb, current_block;
0511851c
MM
5794 sbitmap *nonnull_local = npi->nonnull_local;
5795 sbitmap *nonnull_killed = npi->nonnull_killed;
99a15921 5796 int something_changed = 0;
589005ff 5797
dfdb644f
JL
5798 /* Compute local properties, nonnull and killed. A register will have
5799 the nonnull property if at the end of the current block its value is
5800 known to be nonnull. The killed property indicates that somewhere in
5801 the block any information we had about the register is killed.
5802
5803 Note that a register can have both properties in a single block. That
5804 indicates that it's killed, then later in the block a new value is
5805 computed. */
d55bc081
ZD
5806 sbitmap_vector_zero (nonnull_local, last_basic_block);
5807 sbitmap_vector_zero (nonnull_killed, last_basic_block);
c4c81601 5808
e0082a72 5809 FOR_EACH_BB (current_block)
dfdb644f
JL
5810 {
5811 rtx insn, stop_insn;
5812
0511851c
MM
5813 /* Set the current block for invalidate_nonnull_info. */
5814 npi->current_block = current_block;
5815
dfdb644f
JL
5816 /* Scan each insn in the basic block looking for memory references and
5817 register sets. */
e0082a72
ZD
5818 stop_insn = NEXT_INSN (current_block->end);
5819 for (insn = current_block->head;
dfdb644f
JL
5820 insn != stop_insn;
5821 insn = NEXT_INSN (insn))
5822 {
5823 rtx set;
0511851c 5824 rtx reg;
dfdb644f
JL
5825
5826 /* Ignore anything that is not a normal insn. */
2c3c49de 5827 if (! INSN_P (insn))
dfdb644f
JL
5828 continue;
5829
5830 /* Basically ignore anything that is not a simple SET. We do have
5831 to make sure to invalidate nonnull_local and set nonnull_killed
5832 for such insns though. */
5833 set = single_set (insn);
5834 if (!set)
5835 {
0511851c 5836 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5837 continue;
5838 }
5839
f63d1bf7 5840 /* See if we've got a usable memory load. We handle it first
dfdb644f
JL
5841 in case it uses its address register as a dest (which kills
5842 the nonnull property). */
5843 if (GET_CODE (SET_SRC (set)) == MEM
0511851c
MM
5844 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5845 && REGNO (reg) >= npi->min_reg
5846 && REGNO (reg) < npi->max_reg)
e0082a72 5847 SET_BIT (nonnull_local[current_block->index],
0511851c 5848 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5849
5850 /* Now invalidate stuff clobbered by this insn. */
0511851c 5851 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
dfdb644f
JL
5852
5853 /* And handle stores, we do these last since any sets in INSN can
5854 not kill the nonnull property if it is derived from a MEM
5855 appearing in a SET_DEST. */
5856 if (GET_CODE (SET_DEST (set)) == MEM
0511851c
MM
5857 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5858 && REGNO (reg) >= npi->min_reg
5859 && REGNO (reg) < npi->max_reg)
e0082a72 5860 SET_BIT (nonnull_local[current_block->index],
0511851c 5861 REGNO (reg) - npi->min_reg);
dfdb644f
JL
5862 }
5863 }
5864
5865 /* Now compute global properties based on the local properties. This
fbe5a4a6 5866 is a classic global availability algorithm. */
ce724250
JL
5867 compute_available (nonnull_local, nonnull_killed,
5868 nonnull_avout, nonnull_avin);
dfdb644f
JL
5869
5870 /* Now look at each bb and see if it ends with a compare of a value
5871 against zero. */
e0082a72 5872 FOR_EACH_BB (bb)
dfdb644f 5873 {
e0082a72 5874 rtx last_insn = bb->end;
0511851c 5875 rtx condition, earliest;
dfdb644f
JL
5876 int compare_and_branch;
5877
0511851c
MM
5878 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5879 since BLOCK_REG[BB] is zero if this block did not end with a
5880 comparison against zero, this condition works. */
e0082a72
ZD
5881 if (block_reg[bb->index] < npi->min_reg
5882 || block_reg[bb->index] >= npi->max_reg)
dfdb644f
JL
5883 continue;
5884
5885 /* LAST_INSN is a conditional jump. Get its condition. */
ec6ec6aa 5886 condition = get_condition (last_insn, &earliest, false);
dfdb644f 5887
40d7a3fe
NB
5888 /* If we can't determine the condition then skip. */
5889 if (! condition)
5890 continue;
5891
dfdb644f 5892 /* Is the register known to have a nonzero value? */
e0082a72 5893 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
dfdb644f
JL
5894 continue;
5895
5896 /* Try to compute whether the compare/branch at the loop end is one or
5897 two instructions. */
5898 if (earliest == last_insn)
5899 compare_and_branch = 1;
5900 else if (earliest == prev_nonnote_insn (last_insn))
5901 compare_and_branch = 2;
5902 else
5903 continue;
5904
5905 /* We know the register in this comparison is nonnull at exit from
5906 this block. We can optimize this comparison. */
5907 if (GET_CODE (condition) == NE)
5908 {
5909 rtx new_jump;
5910
38c1593d
JH
5911 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5912 last_insn);
dfdb644f
JL
5913 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5914 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5915 emit_barrier_after (new_jump);
5916 }
8e184d9c 5917
99a15921 5918 something_changed = 1;
9cd56be1 5919 delete_insn (last_insn);
dfdb644f 5920 if (compare_and_branch == 2)
589005ff 5921 delete_insn (earliest);
e0082a72 5922 purge_dead_edges (bb);
0511851c
MM
5923
5924 /* Don't check this block again. (Note that BLOCK_END is
589005ff 5925 invalid here; we deleted the last instruction in the
0511851c 5926 block.) */
e0082a72 5927 block_reg[bb->index] = 0;
0511851c 5928 }
99a15921
JL
5929
5930 return something_changed;
0511851c
MM
5931}
5932
5933/* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5934 at compile time.
5935
5936 This is conceptually similar to global constant/copy propagation and
5937 classic global CSE (it even uses the same dataflow equations as cprop).
5938
5939 If a register is used as memory address with the form (mem (reg)), then we
5940 know that REG can not be zero at that point in the program. Any instruction
5941 which sets REG "kills" this property.
5942
5943 So, if every path leading to a conditional branch has an available memory
5944 reference of that form, then we know the register can not have the value
589005ff 5945 zero at the conditional branch.
0511851c 5946
fbe5a4a6 5947 So we merely need to compute the local properties and propagate that data
0511851c
MM
5948 around the cfg, then optimize where possible.
5949
5950 We run this pass two times. Once before CSE, then again after CSE. This
5951 has proven to be the most profitable approach. It is rare for new
5952 optimization opportunities of this nature to appear after the first CSE
5953 pass.
5954
5955 This could probably be integrated with global cprop with a little work. */
5956
99a15921 5957int
1d088dee 5958delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
0511851c 5959{
0511851c 5960 sbitmap *nonnull_avin, *nonnull_avout;
770ae6cc 5961 unsigned int *block_reg;
e0082a72 5962 basic_block bb;
0511851c
MM
5963 int reg;
5964 int regs_per_pass;
d128effb 5965 int max_reg = max_reg_num ();
0511851c 5966 struct null_pointer_info npi;
99a15921 5967 int something_changed = 0;
0511851c 5968
d128effb
NS
5969 /* If we have only a single block, or it is too expensive, give up. */
5970 if (n_basic_blocks <= 1
5971 || is_too_expensive (_ ("NULL pointer checks disabled")))
99a15921 5972 return 0;
0511851c 5973
0511851c
MM
5974 /* We need four bitmaps, each with a bit for each register in each
5975 basic block. */
d55bc081 5976 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
0511851c
MM
5977
5978 /* Allocate bitmaps to hold local and global properties. */
d55bc081
ZD
5979 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5980 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5981 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5982 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
0511851c
MM
5983
5984 /* Go through the basic blocks, seeing whether or not each block
5985 ends with a conditional branch whose condition is a comparison
5986 against zero. Record the register compared in BLOCK_REG. */
703ad42b 5987 block_reg = xcalloc (last_basic_block, sizeof (int));
e0082a72 5988 FOR_EACH_BB (bb)
0511851c 5989 {
e0082a72 5990 rtx last_insn = bb->end;
0511851c
MM
5991 rtx condition, earliest, reg;
5992
5993 /* We only want conditional branches. */
5994 if (GET_CODE (last_insn) != JUMP_INSN
7f1c097d
JH
5995 || !any_condjump_p (last_insn)
5996 || !onlyjump_p (last_insn))
0511851c
MM
5997 continue;
5998
5999 /* LAST_INSN is a conditional jump. Get its condition. */
ec6ec6aa 6000 condition = get_condition (last_insn, &earliest, false);
0511851c 6001
4fe9b91c 6002 /* If we were unable to get the condition, or it is not an equality
0511851c
MM
6003 comparison against zero then there's nothing we can do. */
6004 if (!condition
6005 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6006 || GET_CODE (XEXP (condition, 1)) != CONST_INT
589005ff 6007 || (XEXP (condition, 1)
0511851c
MM
6008 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6009 continue;
6010
6011 /* We must be checking a register against zero. */
6012 reg = XEXP (condition, 0);
6013 if (GET_CODE (reg) != REG)
6014 continue;
6015
e0082a72 6016 block_reg[bb->index] = REGNO (reg);
0511851c
MM
6017 }
6018
6019 /* Go through the algorithm for each block of registers. */
6020 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6021 {
6022 npi.min_reg = reg;
6023 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
99a15921
JL
6024 something_changed |= delete_null_pointer_checks_1 (block_reg,
6025 nonnull_avin,
6026 nonnull_avout,
6027 &npi);
dfdb644f
JL
6028 }
6029
0511851c
MM
6030 /* Free the table of registers compared at the end of every block. */
6031 free (block_reg);
6032
dfdb644f 6033 /* Free bitmaps. */
5a660bff
DB
6034 sbitmap_vector_free (npi.nonnull_local);
6035 sbitmap_vector_free (npi.nonnull_killed);
6036 sbitmap_vector_free (nonnull_avin);
6037 sbitmap_vector_free (nonnull_avout);
99a15921
JL
6038
6039 return something_changed;
dfdb644f 6040}
bb457bd9
JL
6041
6042/* Code Hoisting variables and subroutines. */
6043
6044/* Very busy expressions. */
6045static sbitmap *hoist_vbein;
6046static sbitmap *hoist_vbeout;
6047
6048/* Hoistable expressions. */
6049static sbitmap *hoist_exprs;
6050
6051/* Dominator bitmaps. */
355be0dc 6052dominance_info dominators;
bb457bd9
JL
6053
6054/* ??? We could compute post dominators and run this algorithm in
68e82b83 6055 reverse to perform tail merging, doing so would probably be
bb457bd9
JL
6056 more effective than the tail merging code in jump.c.
6057
6058 It's unclear if tail merging could be run in parallel with
6059 code hoisting. It would be nice. */
6060
6061/* Allocate vars used for code hoisting analysis. */
6062
6063static void
1d088dee 6064alloc_code_hoist_mem (int n_blocks, int n_exprs)
bb457bd9
JL
6065{
6066 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6067 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6068 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6069
6070 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6071 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6072 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6073 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
bb457bd9
JL
6074}
6075
6076/* Free vars used for code hoisting analysis. */
6077
6078static void
1d088dee 6079free_code_hoist_mem (void)
bb457bd9 6080{
5a660bff
DB
6081 sbitmap_vector_free (antloc);
6082 sbitmap_vector_free (transp);
6083 sbitmap_vector_free (comp);
bb457bd9 6084
5a660bff
DB
6085 sbitmap_vector_free (hoist_vbein);
6086 sbitmap_vector_free (hoist_vbeout);
6087 sbitmap_vector_free (hoist_exprs);
6088 sbitmap_vector_free (transpout);
bb457bd9 6089
355be0dc 6090 free_dominance_info (dominators);
bb457bd9
JL
6091}
6092
6093/* Compute the very busy expressions at entry/exit from each block.
6094
6095 An expression is very busy if all paths from a given point
6096 compute the expression. */
6097
6098static void
1d088dee 6099compute_code_hoist_vbeinout (void)
bb457bd9 6100{
e0082a72
ZD
6101 int changed, passes;
6102 basic_block bb;
bb457bd9 6103
d55bc081
ZD
6104 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6105 sbitmap_vector_zero (hoist_vbein, last_basic_block);
bb457bd9
JL
6106
6107 passes = 0;
6108 changed = 1;
c4c81601 6109
bb457bd9
JL
6110 while (changed)
6111 {
6112 changed = 0;
c4c81601 6113
bb457bd9
JL
6114 /* We scan the blocks in the reverse order to speed up
6115 the convergence. */
e0082a72 6116 FOR_EACH_BB_REVERSE (bb)
bb457bd9 6117 {
e0082a72
ZD
6118 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6119 hoist_vbeout[bb->index], transp[bb->index]);
6120 if (bb->next_bb != EXIT_BLOCK_PTR)
6121 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
bb457bd9 6122 }
c4c81601 6123
bb457bd9
JL
6124 passes++;
6125 }
6126
6127 if (gcse_file)
6128 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6129}
6130
6131/* Top level routine to do the dataflow analysis needed by code hoisting. */
6132
6133static void
1d088dee 6134compute_code_hoist_data (void)
bb457bd9 6135{
02280659 6136 compute_local_properties (transp, comp, antloc, &expr_hash_table);
bb457bd9
JL
6137 compute_transpout ();
6138 compute_code_hoist_vbeinout ();
355be0dc 6139 dominators = calculate_dominance_info (CDI_DOMINATORS);
bb457bd9
JL
6140 if (gcse_file)
6141 fprintf (gcse_file, "\n");
6142}
6143
6144/* Determine if the expression identified by EXPR_INDEX would
6145 reach BB unimpared if it was placed at the end of EXPR_BB.
6146
6147 It's unclear exactly what Muchnick meant by "unimpared". It seems
6148 to me that the expression must either be computed or transparent in
6149 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6150 would allow the expression to be hoisted out of loops, even if
6151 the expression wasn't a loop invariant.
6152
6153 Contrast this to reachability for PRE where an expression is
6154 considered reachable if *any* path reaches instead of *all*
6155 paths. */
6156
6157static int
1d088dee 6158hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
bb457bd9
JL
6159{
6160 edge pred;
283a2545 6161 int visited_allocated_locally = 0;
589005ff 6162
bb457bd9
JL
6163
6164 if (visited == NULL)
6165 {
8e42ace1 6166 visited_allocated_locally = 1;
d55bc081 6167 visited = xcalloc (last_basic_block, 1);
bb457bd9
JL
6168 }
6169
e2d2ed72 6170 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
bb457bd9 6171 {
e2d2ed72 6172 basic_block pred_bb = pred->src;
bb457bd9
JL
6173
6174 if (pred->src == ENTRY_BLOCK_PTR)
6175 break;
f305679f
JH
6176 else if (pred_bb == expr_bb)
6177 continue;
0b17ab2f 6178 else if (visited[pred_bb->index])
bb457bd9 6179 continue;
c4c81601 6180
bb457bd9 6181 /* Does this predecessor generate this expression? */
0b17ab2f 6182 else if (TEST_BIT (comp[pred_bb->index], expr_index))
bb457bd9 6183 break;
0b17ab2f 6184 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
bb457bd9 6185 break;
c4c81601 6186
bb457bd9
JL
6187 /* Not killed. */
6188 else
6189 {
0b17ab2f 6190 visited[pred_bb->index] = 1;
bb457bd9
JL
6191 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6192 pred_bb, visited))
6193 break;
6194 }
6195 }
589005ff 6196 if (visited_allocated_locally)
283a2545 6197 free (visited);
c4c81601 6198
bb457bd9
JL
6199 return (pred == NULL);
6200}
6201\f
6202/* Actually perform code hoisting. */
c4c81601 6203
bb457bd9 6204static void
1d088dee 6205hoist_code (void)
bb457bd9 6206{
e0082a72 6207 basic_block bb, dominated;
c635a1ec
DB
6208 basic_block *domby;
6209 unsigned int domby_len;
6210 unsigned int i,j;
bb457bd9 6211 struct expr **index_map;
c4c81601 6212 struct expr *expr;
bb457bd9 6213
d55bc081 6214 sbitmap_vector_zero (hoist_exprs, last_basic_block);
bb457bd9
JL
6215
6216 /* Compute a mapping from expression number (`bitmap_index') to
6217 hash table entry. */
6218
703ad42b 6219 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
02280659
ZD
6220 for (i = 0; i < expr_hash_table.size; i++)
6221 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
c4c81601 6222 index_map[expr->bitmap_index] = expr;
bb457bd9
JL
6223
6224 /* Walk over each basic block looking for potentially hoistable
6225 expressions, nothing gets hoisted from the entry block. */
e0082a72 6226 FOR_EACH_BB (bb)
bb457bd9
JL
6227 {
6228 int found = 0;
6229 int insn_inserted_p;
6230
c635a1ec 6231 domby_len = get_dominated_by (dominators, bb, &domby);
bb457bd9
JL
6232 /* Examine each expression that is very busy at the exit of this
6233 block. These are the potentially hoistable expressions. */
e0082a72 6234 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
bb457bd9
JL
6235 {
6236 int hoistable = 0;
c4c81601 6237
c635a1ec
DB
6238 if (TEST_BIT (hoist_vbeout[bb->index], i)
6239 && TEST_BIT (transpout[bb->index], i))
bb457bd9
JL
6240 {
6241 /* We've found a potentially hoistable expression, now
6242 we look at every block BB dominates to see if it
6243 computes the expression. */
c635a1ec 6244 for (j = 0; j < domby_len; j++)
bb457bd9 6245 {
c635a1ec 6246 dominated = domby[j];
bb457bd9 6247 /* Ignore self dominance. */
c635a1ec 6248 if (bb == dominated)
bb457bd9 6249 continue;
bb457bd9
JL
6250 /* We've found a dominated block, now see if it computes
6251 the busy expression and whether or not moving that
6252 expression to the "beginning" of that block is safe. */
e0082a72 6253 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6254 continue;
6255
6256 /* Note if the expression would reach the dominated block
589005ff 6257 unimpared if it was placed at the end of BB.
bb457bd9
JL
6258
6259 Keep track of how many times this expression is hoistable
6260 from a dominated block into BB. */
e0082a72 6261 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6262 hoistable++;
6263 }
6264
ff7cc307 6265 /* If we found more than one hoistable occurrence of this
bb457bd9
JL
6266 expression, then note it in the bitmap of expressions to
6267 hoist. It makes no sense to hoist things which are computed
6268 in only one BB, and doing so tends to pessimize register
6269 allocation. One could increase this value to try harder
6270 to avoid any possible code expansion due to register
6271 allocation issues; however experiments have shown that
6272 the vast majority of hoistable expressions are only movable
e0bb17a8 6273 from two successors, so raising this threshold is likely
bb457bd9
JL
6274 to nullify any benefit we get from code hoisting. */
6275 if (hoistable > 1)
6276 {
e0082a72 6277 SET_BIT (hoist_exprs[bb->index], i);
bb457bd9
JL
6278 found = 1;
6279 }
6280 }
6281 }
bb457bd9
JL
6282 /* If we found nothing to hoist, then quit now. */
6283 if (! found)
c635a1ec 6284 {
1d088dee 6285 free (domby);
bb457bd9 6286 continue;
c635a1ec 6287 }
bb457bd9
JL
6288
6289 /* Loop over all the hoistable expressions. */
e0082a72 6290 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
bb457bd9
JL
6291 {
6292 /* We want to insert the expression into BB only once, so
6293 note when we've inserted it. */
6294 insn_inserted_p = 0;
6295
6296 /* These tests should be the same as the tests above. */
e0082a72 6297 if (TEST_BIT (hoist_vbeout[bb->index], i))
bb457bd9
JL
6298 {
6299 /* We've found a potentially hoistable expression, now
6300 we look at every block BB dominates to see if it
6301 computes the expression. */
c635a1ec 6302 for (j = 0; j < domby_len; j++)
bb457bd9 6303 {
c635a1ec 6304 dominated = domby[j];
bb457bd9 6305 /* Ignore self dominance. */
c635a1ec 6306 if (bb == dominated)
bb457bd9
JL
6307 continue;
6308
6309 /* We've found a dominated block, now see if it computes
6310 the busy expression and whether or not moving that
6311 expression to the "beginning" of that block is safe. */
e0082a72 6312 if (!TEST_BIT (antloc[dominated->index], i))
bb457bd9
JL
6313 continue;
6314
6315 /* The expression is computed in the dominated block and
6316 it would be safe to compute it at the start of the
6317 dominated block. Now we have to determine if the
ff7cc307 6318 expression would reach the dominated block if it was
bb457bd9 6319 placed at the end of BB. */
e0082a72 6320 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
bb457bd9
JL
6321 {
6322 struct expr *expr = index_map[i];
6323 struct occr *occr = expr->antic_occr;
6324 rtx insn;
6325 rtx set;
6326
ff7cc307 6327 /* Find the right occurrence of this expression. */
e0082a72 6328 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
bb457bd9
JL
6329 occr = occr->next;
6330
6331 /* Should never happen. */
6332 if (!occr)
6333 abort ();
6334
6335 insn = occr->insn;
589005ff 6336
bb457bd9
JL
6337 set = single_set (insn);
6338 if (! set)
6339 abort ();
6340
6341 /* Create a pseudo-reg to store the result of reaching
6342 expressions into. Get the mode for the new pseudo
6343 from the mode of the original destination pseudo. */
6344 if (expr->reaching_reg == NULL)
6345 expr->reaching_reg
6346 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6347
10d1bb36
JH
6348 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6349 delete_insn (insn);
6350 occr->deleted_p = 1;
6351 if (!insn_inserted_p)
bb457bd9 6352 {
10d1bb36
JH
6353 insert_insn_end_bb (index_map[i], bb, 0);
6354 insn_inserted_p = 1;
bb457bd9
JL
6355 }
6356 }
6357 }
6358 }
6359 }
c635a1ec 6360 free (domby);
bb457bd9 6361 }
c4c81601 6362
8e42ace1 6363 free (index_map);
bb457bd9
JL
6364}
6365
6366/* Top level routine to perform one code hoisting (aka unification) pass
6367
cc2902df 6368 Return nonzero if a change was made. */
bb457bd9
JL
6369
6370static int
1d088dee 6371one_code_hoisting_pass (void)
bb457bd9
JL
6372{
6373 int changed = 0;
6374
02280659
ZD
6375 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6376 compute_hash_table (&expr_hash_table);
bb457bd9 6377 if (gcse_file)
02280659 6378 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
c4c81601 6379
02280659 6380 if (expr_hash_table.n_elems > 0)
bb457bd9 6381 {
02280659 6382 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
bb457bd9
JL
6383 compute_code_hoist_data ();
6384 hoist_code ();
6385 free_code_hoist_mem ();
6386 }
c4c81601 6387
02280659 6388 free_hash_table (&expr_hash_table);
bb457bd9
JL
6389
6390 return changed;
6391}
a13d4ebf
AM
6392\f
6393/* Here we provide the things required to do store motion towards
6394 the exit. In order for this to be effective, gcse also needed to
6395 be taught how to move a load when it is kill only by a store to itself.
6396
6397 int i;
6398 float a[10];
6399
6400 void foo(float scale)
6401 {
6402 for (i=0; i<10; i++)
6403 a[i] *= scale;
6404 }
6405
6406 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
589005ff
KH
6407 the load out since its live around the loop, and stored at the bottom
6408 of the loop.
a13d4ebf 6409
589005ff 6410 The 'Load Motion' referred to and implemented in this file is
a13d4ebf
AM
6411 an enhancement to gcse which when using edge based lcm, recognizes
6412 this situation and allows gcse to move the load out of the loop.
6413
6414 Once gcse has hoisted the load, store motion can then push this
6415 load towards the exit, and we end up with no loads or stores of 'i'
6416 in the loop. */
6417
ff7cc307 6418/* This will search the ldst list for a matching expression. If it
a13d4ebf
AM
6419 doesn't find one, we create one and initialize it. */
6420
6421static struct ls_expr *
1d088dee 6422ldst_entry (rtx x)
a13d4ebf
AM
6423{
6424 struct ls_expr * ptr;
6425
6426 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6427 if (expr_equiv_p (ptr->pattern, x))
6428 break;
6429
6430 if (!ptr)
6431 {
703ad42b 6432 ptr = xmalloc (sizeof (struct ls_expr));
a13d4ebf
AM
6433
6434 ptr->next = pre_ldst_mems;
6435 ptr->expr = NULL;
6436 ptr->pattern = x;
47a3dae1 6437 ptr->pattern_regs = NULL_RTX;
a13d4ebf
AM
6438 ptr->loads = NULL_RTX;
6439 ptr->stores = NULL_RTX;
6440 ptr->reaching_reg = NULL_RTX;
6441 ptr->invalid = 0;
6442 ptr->index = 0;
6443 ptr->hash_index = 0;
6444 pre_ldst_mems = ptr;
6445 }
589005ff 6446
a13d4ebf
AM
6447 return ptr;
6448}
6449
6450/* Free up an individual ldst entry. */
6451
589005ff 6452static void
1d088dee 6453free_ldst_entry (struct ls_expr * ptr)
a13d4ebf 6454{
aaa4ca30
AJ
6455 free_INSN_LIST_list (& ptr->loads);
6456 free_INSN_LIST_list (& ptr->stores);
a13d4ebf
AM
6457
6458 free (ptr);
6459}
6460
6461/* Free up all memory associated with the ldst list. */
6462
6463static void
1d088dee 6464free_ldst_mems (void)
a13d4ebf 6465{
589005ff 6466 while (pre_ldst_mems)
a13d4ebf
AM
6467 {
6468 struct ls_expr * tmp = pre_ldst_mems;
6469
6470 pre_ldst_mems = pre_ldst_mems->next;
6471
6472 free_ldst_entry (tmp);
6473 }
6474
6475 pre_ldst_mems = NULL;
6476}
6477
6478/* Dump debugging info about the ldst list. */
6479
6480static void
1d088dee 6481print_ldst_list (FILE * file)
a13d4ebf
AM
6482{
6483 struct ls_expr * ptr;
6484
6485 fprintf (file, "LDST list: \n");
6486
6487 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6488 {
6489 fprintf (file, " Pattern (%3d): ", ptr->index);
6490
6491 print_rtl (file, ptr->pattern);
6492
6493 fprintf (file, "\n Loads : ");
6494
6495 if (ptr->loads)
6496 print_rtl (file, ptr->loads);
6497 else
6498 fprintf (file, "(nil)");
6499
6500 fprintf (file, "\n Stores : ");
6501
6502 if (ptr->stores)
6503 print_rtl (file, ptr->stores);
6504 else
6505 fprintf (file, "(nil)");
6506
6507 fprintf (file, "\n\n");
6508 }
6509
6510 fprintf (file, "\n");
6511}
6512
6513/* Returns 1 if X is in the list of ldst only expressions. */
6514
6515static struct ls_expr *
1d088dee 6516find_rtx_in_ldst (rtx x)
a13d4ebf
AM
6517{
6518 struct ls_expr * ptr;
589005ff 6519
a13d4ebf
AM
6520 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6521 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6522 return ptr;
6523
6524 return NULL;
6525}
6526
6527/* Assign each element of the list of mems a monotonically increasing value. */
6528
6529static int
1d088dee 6530enumerate_ldsts (void)
a13d4ebf
AM
6531{
6532 struct ls_expr * ptr;
6533 int n = 0;
6534
6535 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6536 ptr->index = n++;
6537
6538 return n;
6539}
6540
6541/* Return first item in the list. */
6542
6543static inline struct ls_expr *
1d088dee 6544first_ls_expr (void)
a13d4ebf
AM
6545{
6546 return pre_ldst_mems;
6547}
6548
0e8a66de 6549/* Return the next item in the list after the specified one. */
a13d4ebf
AM
6550
6551static inline struct ls_expr *
1d088dee 6552next_ls_expr (struct ls_expr * ptr)
a13d4ebf
AM
6553{
6554 return ptr->next;
6555}
6556\f
6557/* Load Motion for loads which only kill themselves. */
6558
6559/* Return true if x is a simple MEM operation, with no registers or
6560 side effects. These are the types of loads we consider for the
6561 ld_motion list, otherwise we let the usual aliasing take care of it. */
6562
589005ff 6563static int
1d088dee 6564simple_mem (rtx x)
a13d4ebf
AM
6565{
6566 if (GET_CODE (x) != MEM)
6567 return 0;
589005ff 6568
a13d4ebf
AM
6569 if (MEM_VOLATILE_P (x))
6570 return 0;
589005ff 6571
a13d4ebf
AM
6572 if (GET_MODE (x) == BLKmode)
6573 return 0;
aaa4ca30 6574
47a3dae1
ZD
6575 /* If we are handling exceptions, we must be careful with memory references
6576 that may trap. If we are not, the behavior is undefined, so we may just
6577 continue. */
6578 if (flag_non_call_exceptions && may_trap_p (x))
98d3d336
RS
6579 return 0;
6580
47a3dae1
ZD
6581 if (side_effects_p (x))
6582 return 0;
589005ff 6583
47a3dae1
ZD
6584 /* Do not consider function arguments passed on stack. */
6585 if (reg_mentioned_p (stack_pointer_rtx, x))
6586 return 0;
6587
6588 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6589 return 0;
6590
6591 return 1;
a13d4ebf
AM
6592}
6593
589005ff
KH
6594/* Make sure there isn't a buried reference in this pattern anywhere.
6595 If there is, invalidate the entry for it since we're not capable
6596 of fixing it up just yet.. We have to be sure we know about ALL
a13d4ebf
AM
6597 loads since the aliasing code will allow all entries in the
6598 ld_motion list to not-alias itself. If we miss a load, we will get
589005ff 6599 the wrong value since gcse might common it and we won't know to
a13d4ebf
AM
6600 fix it up. */
6601
6602static void
1d088dee 6603invalidate_any_buried_refs (rtx x)
a13d4ebf
AM
6604{
6605 const char * fmt;
8e42ace1 6606 int i, j;
a13d4ebf
AM
6607 struct ls_expr * ptr;
6608
6609 /* Invalidate it in the list. */
6610 if (GET_CODE (x) == MEM && simple_mem (x))
6611 {
6612 ptr = ldst_entry (x);
6613 ptr->invalid = 1;
6614 }
6615
6616 /* Recursively process the insn. */
6617 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 6618
a13d4ebf
AM
6619 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6620 {
6621 if (fmt[i] == 'e')
6622 invalidate_any_buried_refs (XEXP (x, i));
6623 else if (fmt[i] == 'E')
6624 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6625 invalidate_any_buried_refs (XVECEXP (x, i, j));
6626 }
6627}
6628
4d3eb89a
HPN
6629/* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6630 being defined as MEM loads and stores to symbols, with no side effects
6631 and no registers in the expression. For a MEM destination, we also
6632 check that the insn is still valid if we replace the destination with a
6633 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6634 which don't match this criteria, they are invalidated and trimmed out
6635 later. */
a13d4ebf 6636
589005ff 6637static void
1d088dee 6638compute_ld_motion_mems (void)
a13d4ebf
AM
6639{
6640 struct ls_expr * ptr;
e0082a72 6641 basic_block bb;
a13d4ebf 6642 rtx insn;
589005ff 6643
a13d4ebf
AM
6644 pre_ldst_mems = NULL;
6645
e0082a72 6646 FOR_EACH_BB (bb)
a13d4ebf 6647 {
e0082a72
ZD
6648 for (insn = bb->head;
6649 insn && insn != NEXT_INSN (bb->end);
a13d4ebf
AM
6650 insn = NEXT_INSN (insn))
6651 {
735e8085 6652 if (INSN_P (insn))
a13d4ebf
AM
6653 {
6654 if (GET_CODE (PATTERN (insn)) == SET)
6655 {
6656 rtx src = SET_SRC (PATTERN (insn));
6657 rtx dest = SET_DEST (PATTERN (insn));
6658
6659 /* Check for a simple LOAD... */
6660 if (GET_CODE (src) == MEM && simple_mem (src))
6661 {
6662 ptr = ldst_entry (src);
6663 if (GET_CODE (dest) == REG)
6664 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6665 else
6666 ptr->invalid = 1;
6667 }
6668 else
6669 {
6670 /* Make sure there isn't a buried load somewhere. */
6671 invalidate_any_buried_refs (src);
6672 }
589005ff 6673
a13d4ebf
AM
6674 /* Check for stores. Don't worry about aliased ones, they
6675 will block any movement we might do later. We only care
6676 about this exact pattern since those are the only
6677 circumstance that we will ignore the aliasing info. */
6678 if (GET_CODE (dest) == MEM && simple_mem (dest))
6679 {
6680 ptr = ldst_entry (dest);
589005ff 6681
f54104df 6682 if (GET_CODE (src) != MEM
4d3eb89a
HPN
6683 && GET_CODE (src) != ASM_OPERANDS
6684 /* Check for REG manually since want_to_gcse_p
6685 returns 0 for all REGs. */
6686 && (REG_P (src) || want_to_gcse_p (src)))
a13d4ebf
AM
6687 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6688 else
6689 ptr->invalid = 1;
6690 }
6691 }
6692 else
6693 invalidate_any_buried_refs (PATTERN (insn));
6694 }
6695 }
6696 }
6697}
6698
589005ff 6699/* Remove any references that have been either invalidated or are not in the
a13d4ebf
AM
6700 expression list for pre gcse. */
6701
6702static void
1d088dee 6703trim_ld_motion_mems (void)
a13d4ebf
AM
6704{
6705 struct ls_expr * last = NULL;
6706 struct ls_expr * ptr = first_ls_expr ();
6707
6708 while (ptr != NULL)
6709 {
6710 int del = ptr->invalid;
6711 struct expr * expr = NULL;
589005ff 6712
a13d4ebf 6713 /* Delete if entry has been made invalid. */
589005ff 6714 if (!del)
a13d4ebf
AM
6715 {
6716 unsigned int i;
589005ff 6717
a13d4ebf
AM
6718 del = 1;
6719 /* Delete if we cannot find this mem in the expression list. */
02280659 6720 for (i = 0; i < expr_hash_table.size && del; i++)
a13d4ebf 6721 {
02280659 6722 for (expr = expr_hash_table.table[i];
589005ff 6723 expr != NULL;
a13d4ebf
AM
6724 expr = expr->next_same_hash)
6725 if (expr_equiv_p (expr->expr, ptr->pattern))
6726 {
6727 del = 0;
6728 break;
6729 }
6730 }
6731 }
589005ff 6732
a13d4ebf
AM
6733 if (del)
6734 {
6735 if (last != NULL)
6736 {
6737 last->next = ptr->next;
6738 free_ldst_entry (ptr);
6739 ptr = last->next;
6740 }
6741 else
6742 {
6743 pre_ldst_mems = pre_ldst_mems->next;
6744 free_ldst_entry (ptr);
6745 ptr = pre_ldst_mems;
6746 }
6747 }
6748 else
6749 {
6750 /* Set the expression field if we are keeping it. */
6751 last = ptr;
6752 ptr->expr = expr;
6753 ptr = ptr->next;
6754 }
6755 }
6756
6757 /* Show the world what we've found. */
6758 if (gcse_file && pre_ldst_mems != NULL)
6759 print_ldst_list (gcse_file);
6760}
6761
6762/* This routine will take an expression which we are replacing with
6763 a reaching register, and update any stores that are needed if
6764 that expression is in the ld_motion list. Stores are updated by
a98ebe2e 6765 copying their SRC to the reaching register, and then storing
a13d4ebf
AM
6766 the reaching register into the store location. These keeps the
6767 correct value in the reaching register for the loads. */
6768
6769static void
1d088dee 6770update_ld_motion_stores (struct expr * expr)
a13d4ebf
AM
6771{
6772 struct ls_expr * mem_ptr;
6773
6774 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6775 {
589005ff
KH
6776 /* We can try to find just the REACHED stores, but is shouldn't
6777 matter to set the reaching reg everywhere... some might be
a13d4ebf
AM
6778 dead and should be eliminated later. */
6779
4d3eb89a
HPN
6780 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6781 where reg is the reaching reg used in the load. We checked in
6782 compute_ld_motion_mems that we can replace (set mem expr) with
6783 (set reg expr) in that insn. */
a13d4ebf 6784 rtx list = mem_ptr->stores;
589005ff 6785
a13d4ebf
AM
6786 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6787 {
6788 rtx insn = XEXP (list, 0);
6789 rtx pat = PATTERN (insn);
6790 rtx src = SET_SRC (pat);
6791 rtx reg = expr->reaching_reg;
c57718d3 6792 rtx copy, new;
a13d4ebf
AM
6793
6794 /* If we've already copied it, continue. */
6795 if (expr->reaching_reg == src)
6796 continue;
589005ff 6797
a13d4ebf
AM
6798 if (gcse_file)
6799 {
6800 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6801 print_rtl (gcse_file, expr->reaching_reg);
6802 fprintf (gcse_file, ":\n ");
6803 print_inline_rtx (gcse_file, insn, 8);
6804 fprintf (gcse_file, "\n");
6805 }
589005ff 6806
47a3dae1 6807 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
c57718d3
RK
6808 new = emit_insn_before (copy, insn);
6809 record_one_set (REGNO (reg), new);
a13d4ebf
AM
6810 SET_SRC (pat) = reg;
6811
6812 /* un-recognize this pattern since it's probably different now. */
6813 INSN_CODE (insn) = -1;
6814 gcse_create_count++;
6815 }
6816 }
6817}
6818\f
6819/* Store motion code. */
6820
47a3dae1
ZD
6821#define ANTIC_STORE_LIST(x) ((x)->loads)
6822#define AVAIL_STORE_LIST(x) ((x)->stores)
6823#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6824
589005ff 6825/* This is used to communicate the target bitvector we want to use in the
aaa4ca30 6826 reg_set_info routine when called via the note_stores mechanism. */
47a3dae1
ZD
6827static int * regvec;
6828
6829/* And current insn, for the same routine. */
6830static rtx compute_store_table_current_insn;
aaa4ca30 6831
a13d4ebf
AM
6832/* Used in computing the reverse edge graph bit vectors. */
6833static sbitmap * st_antloc;
6834
6835/* Global holding the number of store expressions we are dealing with. */
6836static int num_stores;
6837
aaa4ca30 6838/* Checks to set if we need to mark a register set. Called from note_stores. */
a13d4ebf 6839
aaa4ca30 6840static void
1d088dee
AJ
6841reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6842 void *data ATTRIBUTE_UNUSED)
a13d4ebf 6843{
aaa4ca30
AJ
6844 if (GET_CODE (dest) == SUBREG)
6845 dest = SUBREG_REG (dest);
adfcce61 6846
aaa4ca30 6847 if (GET_CODE (dest) == REG)
47a3dae1 6848 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
a13d4ebf
AM
6849}
6850
47a3dae1
ZD
6851/* Return zero if some of the registers in list X are killed
6852 due to set of registers in bitmap REGS_SET. */
1d088dee 6853
47a3dae1 6854static bool
1d088dee 6855store_ops_ok (rtx x, int *regs_set)
47a3dae1
ZD
6856{
6857 rtx reg;
6858
6859 for (; x; x = XEXP (x, 1))
6860 {
6861 reg = XEXP (x, 0);
6862 if (regs_set[REGNO(reg)])
1d088dee 6863 return false;
47a3dae1 6864 }
a13d4ebf 6865
47a3dae1
ZD
6866 return true;
6867}
6868
6869/* Returns a list of registers mentioned in X. */
6870static rtx
1d088dee 6871extract_mentioned_regs (rtx x)
47a3dae1
ZD
6872{
6873 return extract_mentioned_regs_helper (x, NULL_RTX);
6874}
6875
6876/* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6877 registers. */
6878static rtx
1d088dee 6879extract_mentioned_regs_helper (rtx x, rtx accum)
a13d4ebf
AM
6880{
6881 int i;
6882 enum rtx_code code;
6883 const char * fmt;
6884
6885 /* Repeat is used to turn tail-recursion into iteration. */
6886 repeat:
6887
6888 if (x == 0)
47a3dae1 6889 return accum;
a13d4ebf
AM
6890
6891 code = GET_CODE (x);
6892 switch (code)
6893 {
6894 case REG:
47a3dae1 6895 return alloc_EXPR_LIST (0, x, accum);
a13d4ebf
AM
6896
6897 case MEM:
6898 x = XEXP (x, 0);
6899 goto repeat;
6900
6901 case PRE_DEC:
6902 case PRE_INC:
6903 case POST_DEC:
6904 case POST_INC:
47a3dae1
ZD
6905 /* We do not run this function with arguments having side effects. */
6906 abort ();
a13d4ebf
AM
6907
6908 case PC:
6909 case CC0: /*FIXME*/
6910 case CONST:
6911 case CONST_INT:
6912 case CONST_DOUBLE:
69ef87e2 6913 case CONST_VECTOR:
a13d4ebf
AM
6914 case SYMBOL_REF:
6915 case LABEL_REF:
6916 case ADDR_VEC:
6917 case ADDR_DIFF_VEC:
47a3dae1 6918 return accum;
a13d4ebf
AM
6919
6920 default:
6921 break;
6922 }
6923
6924 i = GET_RTX_LENGTH (code) - 1;
6925 fmt = GET_RTX_FORMAT (code);
589005ff 6926
a13d4ebf
AM
6927 for (; i >= 0; i--)
6928 {
6929 if (fmt[i] == 'e')
6930 {
6931 rtx tem = XEXP (x, i);
6932
6933 /* If we are about to do the last recursive call
47a3dae1 6934 needed at this level, change it into iteration. */
a13d4ebf
AM
6935 if (i == 0)
6936 {
6937 x = tem;
6938 goto repeat;
6939 }
589005ff 6940
47a3dae1 6941 accum = extract_mentioned_regs_helper (tem, accum);
a13d4ebf
AM
6942 }
6943 else if (fmt[i] == 'E')
6944 {
6945 int j;
589005ff 6946
a13d4ebf 6947 for (j = 0; j < XVECLEN (x, i); j++)
47a3dae1 6948 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
a13d4ebf
AM
6949 }
6950 }
6951
47a3dae1 6952 return accum;
a13d4ebf
AM
6953}
6954
47a3dae1
ZD
6955/* Determine whether INSN is MEM store pattern that we will consider moving.
6956 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6957 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6958 including) the insn in this basic block. We must be passing through BB from
6959 head to end, as we are using this fact to speed things up.
1d088dee 6960
47a3dae1
ZD
6961 The results are stored this way:
6962
6963 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6964 -- if the processed expression is not anticipatable, NULL_RTX is added
6965 there instead, so that we can use it as indicator that no further
6966 expression of this type may be anticipatable
6967 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6968 consequently, all of them but this head are dead and may be deleted.
6969 -- if the expression is not available, the insn due to that it fails to be
6970 available is stored in reaching_reg.
6971
6972 The things are complicated a bit by fact that there already may be stores
6973 to the same MEM from other blocks; also caller must take care of the
e0bb17a8 6974 necessary cleanup of the temporary markers after end of the basic block.
47a3dae1 6975 */
a13d4ebf
AM
6976
6977static void
1d088dee 6978find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
a13d4ebf
AM
6979{
6980 struct ls_expr * ptr;
47a3dae1
ZD
6981 rtx dest, set, tmp;
6982 int check_anticipatable, check_available;
6983 basic_block bb = BLOCK_FOR_INSN (insn);
a13d4ebf 6984
47a3dae1
ZD
6985 set = single_set (insn);
6986 if (!set)
a13d4ebf
AM
6987 return;
6988
47a3dae1 6989 dest = SET_DEST (set);
589005ff 6990
a13d4ebf
AM
6991 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6992 || GET_MODE (dest) == BLKmode)
aaa4ca30
AJ
6993 return;
6994
47a3dae1
ZD
6995 if (side_effects_p (dest))
6996 return;
aaa4ca30 6997
47a3dae1
ZD
6998 /* If we are handling exceptions, we must be careful with memory references
6999 that may trap. If we are not, the behavior is undefined, so we may just
7000 continue. */
94f24ddc 7001 if (flag_non_call_exceptions && may_trap_p (dest))
47a3dae1 7002 return;
1d088dee 7003
a13d4ebf 7004 ptr = ldst_entry (dest);
47a3dae1
ZD
7005 if (!ptr->pattern_regs)
7006 ptr->pattern_regs = extract_mentioned_regs (dest);
7007
7008 /* Do not check for anticipatability if we either found one anticipatable
7009 store already, or tested for one and found out that it was killed. */
7010 check_anticipatable = 0;
7011 if (!ANTIC_STORE_LIST (ptr))
7012 check_anticipatable = 1;
7013 else
7014 {
7015 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7016 if (tmp != NULL_RTX
7017 && BLOCK_FOR_INSN (tmp) != bb)
7018 check_anticipatable = 1;
7019 }
7020 if (check_anticipatable)
7021 {
7022 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7023 tmp = NULL_RTX;
7024 else
7025 tmp = insn;
7026 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7027 ANTIC_STORE_LIST (ptr));
7028 }
a13d4ebf 7029
e0bb17a8 7030 /* It is not necessary to check whether store is available if we did
47a3dae1
ZD
7031 it successfully before; if we failed before, do not bother to check
7032 until we reach the insn that caused us to fail. */
7033 check_available = 0;
7034 if (!AVAIL_STORE_LIST (ptr))
7035 check_available = 1;
7036 else
7037 {
7038 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7039 if (BLOCK_FOR_INSN (tmp) != bb)
7040 check_available = 1;
7041 }
7042 if (check_available)
7043 {
7044 /* Check that we have already reached the insn at that the check
7045 failed last time. */
7046 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7047 {
7048 for (tmp = bb->end;
7049 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7050 tmp = PREV_INSN (tmp))
7051 continue;
7052 if (tmp == insn)
7053 check_available = 0;
7054 }
7055 else
7056 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7057 bb, regs_set_after,
7058 &LAST_AVAIL_CHECK_FAILURE (ptr));
7059 }
7060 if (!check_available)
7061 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7062}
1d088dee 7063
47a3dae1 7064/* Find available and anticipatable stores. */
a13d4ebf
AM
7065
7066static int
1d088dee 7067compute_store_table (void)
a13d4ebf 7068{
e0082a72
ZD
7069 int ret;
7070 basic_block bb;
aaa4ca30 7071 unsigned regno;
47a3dae1
ZD
7072 rtx insn, pat, tmp;
7073 int *last_set_in, *already_set;
7074 struct ls_expr * ptr, **prev_next_ptr_ptr;
aaa4ca30 7075
a13d4ebf
AM
7076 max_gcse_regno = max_reg_num ();
7077
703ad42b 7078 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
aaa4ca30 7079 max_gcse_regno);
d55bc081 7080 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
a13d4ebf 7081 pre_ldst_mems = 0;
47a3dae1
ZD
7082 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7083 already_set = xmalloc (sizeof (int) * max_gcse_regno);
aaa4ca30 7084
a13d4ebf 7085 /* Find all the stores we care about. */
e0082a72 7086 FOR_EACH_BB (bb)
a13d4ebf 7087 {
47a3dae1
ZD
7088 /* First compute the registers set in this block. */
7089 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7090 regvec = last_set_in;
7091
7092 for (insn = bb->head;
7093 insn != NEXT_INSN (bb->end);
7094 insn = NEXT_INSN (insn))
7095 {
7096 if (! INSN_P (insn))
7097 continue;
7098
7099 if (GET_CODE (insn) == CALL_INSN)
7100 {
7101 bool clobbers_all = false;
7102#ifdef NON_SAVING_SETJMP
7103 if (NON_SAVING_SETJMP
7104 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7105 clobbers_all = true;
7106#endif
7107
7108 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7109 if (clobbers_all
7110 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7111 last_set_in[regno] = INSN_UID (insn);
7112 }
7113
7114 pat = PATTERN (insn);
7115 compute_store_table_current_insn = insn;
7116 note_stores (pat, reg_set_info, NULL);
7117 }
7118
7119 /* Record the set registers. */
7120 for (regno = 0; regno < max_gcse_regno; regno++)
7121 if (last_set_in[regno])
7122 SET_BIT (reg_set_in_block[bb->index], regno);
7123
7124 /* Now find the stores. */
7125 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7126 regvec = already_set;
7127 for (insn = bb->head;
7128 insn != NEXT_INSN (bb->end);
7129 insn = NEXT_INSN (insn))
a13d4ebf 7130 {
19652adf 7131 if (! INSN_P (insn))
a13d4ebf
AM
7132 continue;
7133
aaa4ca30
AJ
7134 if (GET_CODE (insn) == CALL_INSN)
7135 {
19652adf 7136 bool clobbers_all = false;
589005ff 7137#ifdef NON_SAVING_SETJMP
19652adf
ZW
7138 if (NON_SAVING_SETJMP
7139 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7140 clobbers_all = true;
7141#endif
7142
aaa4ca30 7143 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
19652adf
ZW
7144 if (clobbers_all
7145 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
47a3dae1 7146 already_set[regno] = 1;
aaa4ca30 7147 }
589005ff 7148
a13d4ebf 7149 pat = PATTERN (insn);
aaa4ca30 7150 note_stores (pat, reg_set_info, NULL);
589005ff 7151
a13d4ebf 7152 /* Now that we've marked regs, look for stores. */
47a3dae1
ZD
7153 find_moveable_store (insn, already_set, last_set_in);
7154
7155 /* Unmark regs that are no longer set. */
7156 for (regno = 0; regno < max_gcse_regno; regno++)
7157 if (last_set_in[regno] == INSN_UID (insn))
7158 last_set_in[regno] = 0;
7159 }
7160
7161 /* Clear temporary marks. */
7162 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7163 {
7164 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7165 if (ANTIC_STORE_LIST (ptr)
7166 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7167 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7168 }
7169 }
7170
7171 /* Remove the stores that are not available anywhere, as there will
7172 be no opportunity to optimize them. */
7173 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7174 ptr != NULL;
7175 ptr = *prev_next_ptr_ptr)
7176 {
7177 if (!AVAIL_STORE_LIST (ptr))
7178 {
7179 *prev_next_ptr_ptr = ptr->next;
7180 free_ldst_entry (ptr);
a13d4ebf 7181 }
47a3dae1
ZD
7182 else
7183 prev_next_ptr_ptr = &ptr->next;
a13d4ebf
AM
7184 }
7185
7186 ret = enumerate_ldsts ();
589005ff 7187
a13d4ebf
AM
7188 if (gcse_file)
7189 {
47a3dae1 7190 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
a13d4ebf
AM
7191 print_ldst_list (gcse_file);
7192 }
589005ff 7193
47a3dae1
ZD
7194 free (last_set_in);
7195 free (already_set);
a13d4ebf
AM
7196 return ret;
7197}
7198
3b14e3af
ZD
7199/* Check to see if the load X is aliased with STORE_PATTERN.
7200 AFTER is true if we are checking the case when STORE_PATTERN occurs
7201 after the X. */
a13d4ebf 7202
47a3dae1 7203static bool
3b14e3af 7204load_kills_store (rtx x, rtx store_pattern, int after)
a13d4ebf 7205{
3b14e3af
ZD
7206 if (after)
7207 return anti_dependence (x, store_pattern);
7208 else
7209 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7210 rtx_addr_varies_p);
a13d4ebf
AM
7211}
7212
589005ff 7213/* Go through the entire insn X, looking for any loads which might alias
3b14e3af
ZD
7214 STORE_PATTERN. Return true if found.
7215 AFTER is true if we are checking the case when STORE_PATTERN occurs
7216 after the insn X. */
a13d4ebf 7217
47a3dae1 7218static bool
3b14e3af 7219find_loads (rtx x, rtx store_pattern, int after)
a13d4ebf
AM
7220{
7221 const char * fmt;
8e42ace1 7222 int i, j;
47a3dae1 7223 int ret = false;
a13d4ebf 7224
24a28584 7225 if (!x)
47a3dae1 7226 return false;
24a28584 7227
589005ff 7228 if (GET_CODE (x) == SET)
a13d4ebf
AM
7229 x = SET_SRC (x);
7230
7231 if (GET_CODE (x) == MEM)
7232 {
3b14e3af 7233 if (load_kills_store (x, store_pattern, after))
47a3dae1 7234 return true;
a13d4ebf
AM
7235 }
7236
7237 /* Recursively process the insn. */
7238 fmt = GET_RTX_FORMAT (GET_CODE (x));
589005ff 7239
a13d4ebf
AM
7240 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7241 {
7242 if (fmt[i] == 'e')
3b14e3af 7243 ret |= find_loads (XEXP (x, i), store_pattern, after);
a13d4ebf
AM
7244 else if (fmt[i] == 'E')
7245 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3b14e3af 7246 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
a13d4ebf
AM
7247 }
7248 return ret;
7249}
7250
589005ff 7251/* Check if INSN kills the store pattern X (is aliased with it).
3b14e3af
ZD
7252 AFTER is true if we are checking the case when store X occurs
7253 after the insn. Return true if it it does. */
a13d4ebf 7254
47a3dae1 7255static bool
3b14e3af 7256store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
a13d4ebf 7257{
d088acea 7258 rtx reg, base, note;
94f24ddc 7259
735e8085 7260 if (!INSN_P (insn))
47a3dae1 7261 return false;
589005ff 7262
a13d4ebf
AM
7263 if (GET_CODE (insn) == CALL_INSN)
7264 {
1218665b
JJ
7265 /* A normal or pure call might read from pattern,
7266 but a const call will not. */
47a3dae1
ZD
7267 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7268 return true;
7269
94f24ddc
ZD
7270 /* But even a const call reads its parameters. Check whether the
7271 base of some of registers used in mem is stack pointer. */
7272 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7273 {
bc083e18 7274 base = find_base_term (XEXP (reg, 0));
94f24ddc
ZD
7275 if (!base
7276 || (GET_CODE (base) == ADDRESS
7277 && GET_MODE (base) == Pmode
7278 && XEXP (base, 0) == stack_pointer_rtx))
7279 return true;
7280 }
47a3dae1
ZD
7281
7282 return false;
a13d4ebf 7283 }
589005ff 7284
a13d4ebf
AM
7285 if (GET_CODE (PATTERN (insn)) == SET)
7286 {
7287 rtx pat = PATTERN (insn);
3b14e3af
ZD
7288 rtx dest = SET_DEST (pat);
7289
7290 if (GET_CODE (dest) == SIGN_EXTRACT
7291 || GET_CODE (dest) == ZERO_EXTRACT)
7292 dest = XEXP (dest, 0);
7293
a13d4ebf 7294 /* Check for memory stores to aliased objects. */
3b14e3af
ZD
7295 if (GET_CODE (dest) == MEM
7296 && !expr_equiv_p (dest, x))
7297 {
7298 if (after)
7299 {
7300 if (output_dependence (dest, x))
7301 return true;
7302 }
7303 else
7304 {
7305 if (output_dependence (x, dest))
7306 return true;
7307 }
7308 }
d088acea
ZD
7309 if (find_loads (SET_SRC (pat), x, after))
7310 return true;
a13d4ebf 7311 }
d088acea
ZD
7312 else if (find_loads (PATTERN (insn), x, after))
7313 return true;
7314
7315 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7316 location aliased with X, then this insn kills X. */
7317 note = find_reg_equal_equiv_note (insn);
7318 if (! note)
7319 return false;
7320 note = XEXP (note, 0);
7321
7322 /* However, if the note represents a must alias rather than a may
7323 alias relationship, then it does not kill X. */
7324 if (expr_equiv_p (note, x))
7325 return false;
7326
7327 /* See if there are any aliased loads in the note. */
7328 return find_loads (note, x, after);
a13d4ebf
AM
7329}
7330
47a3dae1
ZD
7331/* Returns true if the expression X is loaded or clobbered on or after INSN
7332 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7333 or after the insn. X_REGS is list of registers mentioned in X. If the store
7334 is killed, return the last insn in that it occurs in FAIL_INSN. */
a13d4ebf 7335
47a3dae1 7336static bool
1d088dee
AJ
7337store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7338 int *regs_set_after, rtx *fail_insn)
a13d4ebf 7339{
47a3dae1 7340 rtx last = bb->end, act;
aaa4ca30 7341
47a3dae1 7342 if (!store_ops_ok (x_regs, regs_set_after))
1d088dee 7343 {
47a3dae1
ZD
7344 /* We do not know where it will happen. */
7345 if (fail_insn)
7346 *fail_insn = NULL_RTX;
7347 return true;
7348 }
a13d4ebf 7349
47a3dae1
ZD
7350 /* Scan from the end, so that fail_insn is determined correctly. */
7351 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
3b14e3af 7352 if (store_killed_in_insn (x, x_regs, act, false))
47a3dae1
ZD
7353 {
7354 if (fail_insn)
7355 *fail_insn = act;
7356 return true;
7357 }
589005ff 7358
47a3dae1 7359 return false;
a13d4ebf 7360}
1d088dee 7361
47a3dae1
ZD
7362/* Returns true if the expression X is loaded or clobbered on or before INSN
7363 within basic block BB. X_REGS is list of registers mentioned in X.
7364 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7365static bool
1d088dee
AJ
7366store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7367 int *regs_set_before)
a13d4ebf 7368{
8e42ace1 7369 rtx first = bb->head;
a13d4ebf 7370
47a3dae1
ZD
7371 if (!store_ops_ok (x_regs, regs_set_before))
7372 return true;
a13d4ebf 7373
47a3dae1 7374 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
3b14e3af 7375 if (store_killed_in_insn (x, x_regs, insn, true))
47a3dae1 7376 return true;
589005ff 7377
47a3dae1 7378 return false;
a13d4ebf 7379}
1d088dee 7380
47a3dae1
ZD
7381/* Fill in available, anticipatable, transparent and kill vectors in
7382 STORE_DATA, based on lists of available and anticipatable stores. */
a13d4ebf 7383static void
1d088dee 7384build_store_vectors (void)
a13d4ebf 7385{
47a3dae1
ZD
7386 basic_block bb;
7387 int *regs_set_in_block;
a13d4ebf
AM
7388 rtx insn, st;
7389 struct ls_expr * ptr;
47a3dae1 7390 unsigned regno;
a13d4ebf
AM
7391
7392 /* Build the gen_vector. This is any store in the table which is not killed
7393 by aliasing later in its block. */
703ad42b 7394 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7395 sbitmap_vector_zero (ae_gen, last_basic_block);
a13d4ebf 7396
703ad42b 7397 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7398 sbitmap_vector_zero (st_antloc, last_basic_block);
aaa4ca30 7399
a13d4ebf 7400 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
589005ff 7401 {
47a3dae1 7402 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
a13d4ebf
AM
7403 {
7404 insn = XEXP (st, 0);
e2d2ed72 7405 bb = BLOCK_FOR_INSN (insn);
589005ff 7406
47a3dae1
ZD
7407 /* If we've already seen an available expression in this block,
7408 we can delete this one (It occurs earlier in the block). We'll
7409 copy the SRC expression to an unused register in case there
7410 are any side effects. */
7411 if (TEST_BIT (ae_gen[bb->index], ptr->index))
a13d4ebf 7412 {
47a3dae1
ZD
7413 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7414 if (gcse_file)
7415 fprintf (gcse_file, "Removing redundant store:\n");
d088acea 7416 replace_store_insn (r, XEXP (st, 0), bb, ptr);
47a3dae1 7417 continue;
a13d4ebf 7418 }
47a3dae1 7419 SET_BIT (ae_gen[bb->index], ptr->index);
a13d4ebf 7420 }
589005ff 7421
47a3dae1
ZD
7422 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7423 {
7424 insn = XEXP (st, 0);
7425 bb = BLOCK_FOR_INSN (insn);
7426 SET_BIT (st_antloc[bb->index], ptr->index);
7427 }
a13d4ebf 7428 }
589005ff 7429
703ad42b 7430 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7431 sbitmap_vector_zero (ae_kill, last_basic_block);
a13d4ebf 7432
703ad42b 7433 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
d55bc081 7434 sbitmap_vector_zero (transp, last_basic_block);
47a3dae1 7435 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
a13d4ebf 7436
47a3dae1
ZD
7437 FOR_EACH_BB (bb)
7438 {
7439 for (regno = 0; regno < max_gcse_regno; regno++)
7440 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7441
7442 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7443 {
7444 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7445 bb, regs_set_in_block, NULL))
7446 {
e0bb17a8 7447 /* It should not be necessary to consider the expression
47a3dae1
ZD
7448 killed if it is both anticipatable and available. */
7449 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7450 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7451 SET_BIT (ae_kill[bb->index], ptr->index);
1d088dee
AJ
7452 }
7453 else
7454 SET_BIT (transp[bb->index], ptr->index);
7455 }
47a3dae1
ZD
7456 }
7457
7458 free (regs_set_in_block);
aaa4ca30 7459
589005ff 7460 if (gcse_file)
aaa4ca30 7461 {
d55bc081
ZD
7462 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7463 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7464 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7465 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
a13d4ebf
AM
7466 }
7467}
7468
fbe5a4a6 7469/* Insert an instruction at the beginning of a basic block, and update
a13d4ebf
AM
7470 the BLOCK_HEAD if needed. */
7471
589005ff 7472static void
1d088dee 7473insert_insn_start_bb (rtx insn, basic_block bb)
a13d4ebf
AM
7474{
7475 /* Insert at start of successor block. */
e2d2ed72
AM
7476 rtx prev = PREV_INSN (bb->head);
7477 rtx before = bb->head;
a13d4ebf
AM
7478 while (before != 0)
7479 {
7480 if (GET_CODE (before) != CODE_LABEL
7481 && (GET_CODE (before) != NOTE
7482 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7483 break;
7484 prev = before;
e2d2ed72 7485 if (prev == bb->end)
a13d4ebf
AM
7486 break;
7487 before = NEXT_INSN (before);
7488 }
7489
7490 insn = emit_insn_after (insn, prev);
7491
a13d4ebf
AM
7492 if (gcse_file)
7493 {
7494 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
0b17ab2f 7495 bb->index);
a13d4ebf
AM
7496 print_inline_rtx (gcse_file, insn, 6);
7497 fprintf (gcse_file, "\n");
7498 }
7499}
7500
7501/* This routine will insert a store on an edge. EXPR is the ldst entry for
cc2902df 7502 the memory reference, and E is the edge to insert it on. Returns nonzero
a13d4ebf
AM
7503 if an edge insertion was performed. */
7504
7505static int
1d088dee 7506insert_store (struct ls_expr * expr, edge e)
a13d4ebf
AM
7507{
7508 rtx reg, insn;
e2d2ed72 7509 basic_block bb;
a13d4ebf
AM
7510 edge tmp;
7511
7512 /* We did all the deleted before this insert, so if we didn't delete a
7513 store, then we haven't set the reaching reg yet either. */
7514 if (expr->reaching_reg == NULL_RTX)
7515 return 0;
7516
a0c8285b
JH
7517 if (e->flags & EDGE_FAKE)
7518 return 0;
7519
a13d4ebf 7520 reg = expr->reaching_reg;
47a3dae1 7521 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
589005ff 7522
a13d4ebf
AM
7523 /* If we are inserting this expression on ALL predecessor edges of a BB,
7524 insert it at the start of the BB, and reset the insert bits on the other
ff7cc307 7525 edges so we don't try to insert it on the other edges. */
e2d2ed72 7526 bb = e->dest;
a13d4ebf 7527 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
3f2eae23 7528 if (!(tmp->flags & EDGE_FAKE))
a0c8285b
JH
7529 {
7530 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7531 if (index == EDGE_INDEX_NO_EDGE)
7532 abort ();
7533 if (! TEST_BIT (pre_insert_map[index], expr->index))
7534 break;
7535 }
a13d4ebf
AM
7536
7537 /* If tmp is NULL, we found an insertion on every edge, blank the
7538 insertion vector for these edges, and insert at the start of the BB. */
e2d2ed72 7539 if (!tmp && bb != EXIT_BLOCK_PTR)
a13d4ebf
AM
7540 {
7541 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7542 {
7543 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7544 RESET_BIT (pre_insert_map[index], expr->index);
7545 }
7546 insert_insn_start_bb (insn, bb);
7547 return 0;
7548 }
589005ff 7549
a13d4ebf
AM
7550 /* We can't insert on this edge, so we'll insert at the head of the
7551 successors block. See Morgan, sec 10.5. */
7552 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7553 {
7554 insert_insn_start_bb (insn, bb);
7555 return 0;
7556 }
7557
7558 insert_insn_on_edge (insn, e);
589005ff 7559
a13d4ebf
AM
7560 if (gcse_file)
7561 {
7562 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
0b17ab2f 7563 e->src->index, e->dest->index);
a13d4ebf
AM
7564 print_inline_rtx (gcse_file, insn, 6);
7565 fprintf (gcse_file, "\n");
7566 }
589005ff 7567
a13d4ebf
AM
7568 return 1;
7569}
7570
d088acea
ZD
7571/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7572 memory location in SMEXPR set in basic block BB.
7573
7574 This could be rather expensive. */
7575
7576static void
7577remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7578{
7579 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7580 sbitmap visited = sbitmap_alloc (last_basic_block);
7581 int stack_top = 0;
7582 rtx last, insn, note;
7583 rtx mem = smexpr->pattern;
7584
7585 sbitmap_zero (visited);
7586 act = bb->succ;
7587
7588 while (1)
7589 {
7590 if (!act)
7591 {
7592 if (!stack_top)
7593 {
7594 free (stack);
7595 sbitmap_free (visited);
7596 return;
7597 }
7598 act = stack[--stack_top];
7599 }
7600 bb = act->dest;
7601
7602 if (bb == EXIT_BLOCK_PTR
7603 || TEST_BIT (visited, bb->index)
7604 || TEST_BIT (ae_kill[bb->index], smexpr->index))
7605 {
7606 act = act->succ_next;
7607 continue;
7608 }
7609 SET_BIT (visited, bb->index);
7610
7611 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7612 {
7613 for (last = ANTIC_STORE_LIST (smexpr);
7614 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7615 last = XEXP (last, 1))
7616 continue;
7617 last = XEXP (last, 0);
7618 }
7619 else
7620 last = NEXT_INSN (bb->end);
7621
7622 for (insn = bb->head; insn != last; insn = NEXT_INSN (insn))
7623 if (INSN_P (insn))
7624 {
7625 note = find_reg_equal_equiv_note (insn);
7626 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7627 continue;
7628
7629 if (gcse_file)
7630 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7631 INSN_UID (insn));
7632 remove_note (insn, note);
7633 }
7634 act = act->succ_next;
7635 if (bb->succ)
7636 {
7637 if (act)
7638 stack[stack_top++] = act;
7639 act = bb->succ;
7640 }
7641 }
7642}
7643
a13d4ebf
AM
7644/* This routine will replace a store with a SET to a specified register. */
7645
7646static void
d088acea 7647replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
a13d4ebf 7648{
d088acea 7649 rtx insn, mem, note, set, ptr;
589005ff 7650
d088acea 7651 mem = smexpr->pattern;
9a318d30 7652 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
a13d4ebf 7653 insn = emit_insn_after (insn, del);
589005ff 7654
a13d4ebf
AM
7655 if (gcse_file)
7656 {
589005ff 7657 fprintf (gcse_file,
0b17ab2f 7658 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
a13d4ebf 7659 print_inline_rtx (gcse_file, del, 6);
8e42ace1 7660 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
a13d4ebf 7661 print_inline_rtx (gcse_file, insn, 6);
8e42ace1 7662 fprintf (gcse_file, "\n");
a13d4ebf 7663 }
589005ff 7664
d088acea
ZD
7665 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7666 if (XEXP (ptr, 0) == del)
7667 {
7668 XEXP (ptr, 0) = insn;
7669 break;
7670 }
49ce134f 7671 delete_insn (del);
d088acea
ZD
7672
7673 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7674 they are no longer accurate provided that they are reached by this
7675 definition, so drop them. */
7676 for (; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
7677 if (INSN_P (insn))
7678 {
7679 set = single_set (insn);
7680 if (!set)
7681 continue;
7682 if (expr_equiv_p (SET_DEST (set), mem))
7683 return;
7684 note = find_reg_equal_equiv_note (insn);
7685 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7686 continue;
7687
7688 if (gcse_file)
7689 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7690 INSN_UID (insn));
7691 remove_note (insn, note);
7692 }
7693 remove_reachable_equiv_notes (bb, smexpr);
a13d4ebf
AM
7694}
7695
7696
7697/* Delete a store, but copy the value that would have been stored into
7698 the reaching_reg for later storing. */
7699
7700static void
1d088dee 7701delete_store (struct ls_expr * expr, basic_block bb)
a13d4ebf
AM
7702{
7703 rtx reg, i, del;
7704
7705 if (expr->reaching_reg == NULL_RTX)
7706 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
a13d4ebf 7707
a13d4ebf 7708 reg = expr->reaching_reg;
589005ff 7709
a13d4ebf
AM
7710 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7711 {
7712 del = XEXP (i, 0);
e2d2ed72 7713 if (BLOCK_FOR_INSN (del) == bb)
a13d4ebf 7714 {
589005ff 7715 /* We know there is only one since we deleted redundant
a13d4ebf 7716 ones during the available computation. */
d088acea 7717 replace_store_insn (reg, del, bb, expr);
a13d4ebf
AM
7718 break;
7719 }
7720 }
7721}
7722
7723/* Free memory used by store motion. */
7724
589005ff 7725static void
1d088dee 7726free_store_memory (void)
a13d4ebf
AM
7727{
7728 free_ldst_mems ();
589005ff 7729
a13d4ebf 7730 if (ae_gen)
5a660bff 7731 sbitmap_vector_free (ae_gen);
a13d4ebf 7732 if (ae_kill)
5a660bff 7733 sbitmap_vector_free (ae_kill);
a13d4ebf 7734 if (transp)
5a660bff 7735 sbitmap_vector_free (transp);
a13d4ebf 7736 if (st_antloc)
5a660bff 7737 sbitmap_vector_free (st_antloc);
a13d4ebf 7738 if (pre_insert_map)
5a660bff 7739 sbitmap_vector_free (pre_insert_map);
a13d4ebf 7740 if (pre_delete_map)
5a660bff 7741 sbitmap_vector_free (pre_delete_map);
aaa4ca30
AJ
7742 if (reg_set_in_block)
7743 sbitmap_vector_free (reg_set_in_block);
589005ff 7744
a13d4ebf
AM
7745 ae_gen = ae_kill = transp = st_antloc = NULL;
7746 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7747}
7748
7749/* Perform store motion. Much like gcse, except we move expressions the
7750 other way by looking at the flowgraph in reverse. */
7751
7752static void
1d088dee 7753store_motion (void)
a13d4ebf 7754{
e0082a72 7755 basic_block bb;
0b17ab2f 7756 int x;
a13d4ebf 7757 struct ls_expr * ptr;
adfcce61 7758 int update_flow = 0;
aaa4ca30 7759
a13d4ebf
AM
7760 if (gcse_file)
7761 {
7762 fprintf (gcse_file, "before store motion\n");
7763 print_rtl (gcse_file, get_insns ());
7764 }
7765
a13d4ebf 7766 init_alias_analysis ();
aaa4ca30 7767
47a3dae1 7768 /* Find all the available and anticipatable stores. */
a13d4ebf
AM
7769 num_stores = compute_store_table ();
7770 if (num_stores == 0)
7771 {
aaa4ca30 7772 sbitmap_vector_free (reg_set_in_block);
a13d4ebf
AM
7773 end_alias_analysis ();
7774 return;
7775 }
7776
47a3dae1 7777 /* Now compute kill & transp vectors. */
a13d4ebf 7778 build_store_vectors ();
47a3dae1 7779 add_noreturn_fake_exit_edges ();
2a868ea4 7780 connect_infinite_loops_to_exit ();
a13d4ebf 7781
589005ff
KH
7782 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7783 st_antloc, ae_kill, &pre_insert_map,
a13d4ebf
AM
7784 &pre_delete_map);
7785
7786 /* Now we want to insert the new stores which are going to be needed. */
7787 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7788 {
e0082a72
ZD
7789 FOR_EACH_BB (bb)
7790 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7791 delete_store (ptr, bb);
a13d4ebf 7792
0b17ab2f
RH
7793 for (x = 0; x < NUM_EDGES (edge_list); x++)
7794 if (TEST_BIT (pre_insert_map[x], ptr->index))
7795 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
a13d4ebf
AM
7796 }
7797
7798 if (update_flow)
7799 commit_edge_insertions ();
aaa4ca30 7800
a13d4ebf
AM
7801 free_store_memory ();
7802 free_edge_list (edge_list);
7803 remove_fake_edges ();
7804 end_alias_analysis ();
7805}
e2500fed 7806
a0134312
RS
7807\f
7808/* Entry point for jump bypassing optimization pass. */
7809
7810int
1d088dee 7811bypass_jumps (FILE *file)
a0134312
RS
7812{
7813 int changed;
7814
7815 /* We do not construct an accurate cfg in functions which call
7816 setjmp, so just punt to be safe. */
7817 if (current_function_calls_setjmp)
7818 return 0;
7819
7820 /* For calling dump_foo fns from gdb. */
7821 debug_stderr = stderr;
7822 gcse_file = file;
7823
7824 /* Identify the basic block information for this function, including
7825 successors and predecessors. */
7826 max_gcse_regno = max_reg_num ();
7827
7828 if (file)
7829 dump_flow_info (file);
7830
d128effb
NS
7831 /* Return if there's nothing to do, or it is too expensive */
7832 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
a0134312
RS
7833 return 0;
7834
a0134312
RS
7835 gcc_obstack_init (&gcse_obstack);
7836 bytes_used = 0;
7837
7838 /* We need alias. */
7839 init_alias_analysis ();
7840
7841 /* Record where pseudo-registers are set. This data is kept accurate
7842 during each pass. ??? We could also record hard-reg information here
7843 [since it's unchanging], however it is currently done during hash table
7844 computation.
7845
7846 It may be tempting to compute MEM set information here too, but MEM sets
7847 will be subject to code motion one day and thus we need to compute
7848 information about memory sets when we build the hash tables. */
7849
7850 alloc_reg_set_mem (max_gcse_regno);
7851 compute_sets (get_insns ());
7852
7853 max_gcse_regno = max_reg_num ();
7854 alloc_gcse_mem (get_insns ());
7855 changed = one_cprop_pass (1, 1, 1);
7856 free_gcse_mem ();
7857
7858 if (file)
7859 {
7860 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7861 current_function_name, n_basic_blocks);
7862 fprintf (file, "%d bytes\n\n", bytes_used);
7863 }
7864
7865 obstack_free (&gcse_obstack, NULL);
7866 free_reg_set_mem ();
7867
7868 /* We are finished with alias. */
7869 end_alias_analysis ();
7870 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7871
7872 return changed;
7873}
7874
d128effb
NS
7875/* Return true if the graph is too expensive to optimize. PASS is the
7876 optimization about to be performed. */
7877
7878static bool
7879is_too_expensive (const char *pass)
7880{
7881 /* Trying to perform global optimizations on flow graphs which have
7882 a high connectivity will take a long time and is unlikely to be
7883 particularly useful.
7884
7885 In normal circumstances a cfg should have about twice as many
7886 edges as blocks. But we do not want to punish small functions
7887 which have a couple switch statements. Rather than simply
7888 threshold the number of blocks, uses something with a more
7889 graceful degradation. */
7890 if (n_edges > 20000 + n_basic_blocks * 4)
7891 {
7892 if (warn_disabled_optimization)
7893 warning ("%s: %d basic blocks and %d edges/basic block",
7894 pass, n_basic_blocks, n_edges / n_basic_blocks);
7895
7896 return true;
7897 }
7898
7899 /* If allocating memory for the cprop bitmap would take up too much
7900 storage it's better just to disable the optimization. */
7901 if ((n_basic_blocks
7902 * SBITMAP_SET_SIZE (max_reg_num ())
7903 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7904 {
7905 if (warn_disabled_optimization)
7906 warning ("%s: %d basic blocks and %d registers",
7907 pass, n_basic_blocks, max_reg_num ());
7908
7909 return true;
7910 }
7911
7912 return false;
7913}
7914
e2500fed 7915#include "gt-gcse.h"