]>
Commit | Line | Data |
---|---|---|
f4e584dc | 1 | /* Global common subexpression elimination/Partial redundancy elimination |
7506f491 | 2 | and global constant/copy propagation for GNU compiler. |
a0134312 | 3 | Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003 |
8e42ace1 | 4 | Free Software Foundation, Inc. |
7506f491 | 5 | |
1322177d | 6 | This file is part of GCC. |
7506f491 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 2, or (at your option) any later | |
11 | version. | |
7506f491 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
7506f491 DE |
17 | |
18 | You should have received a copy of the GNU General Public License | |
1322177d LB |
19 | along with GCC; see the file COPYING. If not, write to the Free |
20 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
21 | 02111-1307, USA. */ | |
7506f491 DE |
22 | |
23 | /* TODO | |
24 | - reordering of memory allocation and freeing to be more space efficient | |
25 | - do rough calc of how many regs are needed in each block, and a rough | |
26 | calc of how many regs are available in each class and use that to | |
27 | throttle back the code in cases where RTX_COST is minimal. | |
f4e584dc JL |
28 | - a store to the same address as a load does not kill the load if the |
29 | source of the store is also the destination of the load. Handling this | |
30 | allows more load motion, particularly out of loops. | |
7506f491 DE |
31 | - ability to realloc sbitmap vectors would allow one initial computation |
32 | of reg_set_in_block with only subsequent additions, rather than | |
33 | recomputing it for each pass | |
34 | ||
7506f491 DE |
35 | */ |
36 | ||
37 | /* References searched while implementing this. | |
7506f491 DE |
38 | |
39 | Compilers Principles, Techniques and Tools | |
40 | Aho, Sethi, Ullman | |
41 | Addison-Wesley, 1988 | |
42 | ||
43 | Global Optimization by Suppression of Partial Redundancies | |
44 | E. Morel, C. Renvoise | |
45 | communications of the acm, Vol. 22, Num. 2, Feb. 1979 | |
46 | ||
47 | A Portable Machine-Independent Global Optimizer - Design and Measurements | |
48 | Frederick Chow | |
49 | Stanford Ph.D. thesis, Dec. 1983 | |
50 | ||
7506f491 DE |
51 | A Fast Algorithm for Code Movement Optimization |
52 | D.M. Dhamdhere | |
53 | SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988 | |
54 | ||
55 | A Solution to a Problem with Morel and Renvoise's | |
56 | Global Optimization by Suppression of Partial Redundancies | |
57 | K-H Drechsler, M.P. Stadel | |
58 | ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988 | |
59 | ||
60 | Practical Adaptation of the Global Optimization | |
61 | Algorithm of Morel and Renvoise | |
62 | D.M. Dhamdhere | |
63 | ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991 | |
64 | ||
65 | Efficiently Computing Static Single Assignment Form and the Control | |
66 | Dependence Graph | |
67 | R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck | |
68 | ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991 | |
69 | ||
7506f491 DE |
70 | Lazy Code Motion |
71 | J. Knoop, O. Ruthing, B. Steffen | |
72 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
73 | ||
74 | What's In a Region? Or Computing Control Dependence Regions in Near-Linear | |
75 | Time for Reducible Flow Control | |
76 | Thomas Ball | |
77 | ACM Letters on Programming Languages and Systems, | |
78 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
79 | ||
80 | An Efficient Representation for Sparse Sets | |
81 | Preston Briggs, Linda Torczon | |
82 | ACM Letters on Programming Languages and Systems, | |
83 | Vol. 2, Num. 1-4, Mar-Dec 1993 | |
84 | ||
85 | A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion | |
86 | K-H Drechsler, M.P. Stadel | |
87 | ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993 | |
88 | ||
89 | Partial Dead Code Elimination | |
90 | J. Knoop, O. Ruthing, B. Steffen | |
91 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
92 | ||
93 | Effective Partial Redundancy Elimination | |
94 | P. Briggs, K.D. Cooper | |
95 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
96 | ||
97 | The Program Structure Tree: Computing Control Regions in Linear Time | |
98 | R. Johnson, D. Pearson, K. Pingali | |
99 | ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994 | |
100 | ||
101 | Optimal Code Motion: Theory and Practice | |
102 | J. Knoop, O. Ruthing, B. Steffen | |
103 | ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994 | |
104 | ||
105 | The power of assignment motion | |
106 | J. Knoop, O. Ruthing, B. Steffen | |
107 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
108 | ||
109 | Global code motion / global value numbering | |
110 | C. Click | |
111 | ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI | |
112 | ||
113 | Value Driven Redundancy Elimination | |
114 | L.T. Simpson | |
115 | Rice University Ph.D. thesis, Apr. 1996 | |
116 | ||
117 | Value Numbering | |
118 | L.T. Simpson | |
119 | Massively Scalar Compiler Project, Rice University, Sep. 1996 | |
120 | ||
121 | High Performance Compilers for Parallel Computing | |
122 | Michael Wolfe | |
123 | Addison-Wesley, 1996 | |
124 | ||
f4e584dc JL |
125 | Advanced Compiler Design and Implementation |
126 | Steven Muchnick | |
127 | Morgan Kaufmann, 1997 | |
128 | ||
a42cd965 AM |
129 | Building an Optimizing Compiler |
130 | Robert Morgan | |
131 | Digital Press, 1998 | |
132 | ||
f4e584dc JL |
133 | People wishing to speed up the code here should read: |
134 | Elimination Algorithms for Data Flow Analysis | |
135 | B.G. Ryder, M.C. Paull | |
136 | ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986 | |
137 | ||
138 | How to Analyze Large Programs Efficiently and Informatively | |
139 | D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck | |
140 | ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI | |
141 | ||
7506f491 DE |
142 | People wishing to do something different can find various possibilities |
143 | in the above papers and elsewhere. | |
144 | */ | |
145 | ||
146 | #include "config.h" | |
50b2596f | 147 | #include "system.h" |
4977bab6 ZW |
148 | #include "coretypes.h" |
149 | #include "tm.h" | |
01198c2f | 150 | #include "toplev.h" |
7506f491 DE |
151 | |
152 | #include "rtl.h" | |
6baf1cc8 | 153 | #include "tm_p.h" |
7506f491 DE |
154 | #include "regs.h" |
155 | #include "hard-reg-set.h" | |
156 | #include "flags.h" | |
157 | #include "real.h" | |
158 | #include "insn-config.h" | |
159 | #include "recog.h" | |
160 | #include "basic-block.h" | |
50b2596f | 161 | #include "output.h" |
49ad7cfa | 162 | #include "function.h" |
589005ff | 163 | #include "expr.h" |
e7d482b9 | 164 | #include "except.h" |
fb0c0a12 | 165 | #include "ggc.h" |
f1fa37ff | 166 | #include "params.h" |
ae860ff7 | 167 | #include "cselib.h" |
d128effb | 168 | #include "intl.h" |
7506f491 | 169 | #include "obstack.h" |
4fa31c2a | 170 | |
7506f491 DE |
171 | /* Propagate flow information through back edges and thus enable PRE's |
172 | moving loop invariant calculations out of loops. | |
173 | ||
174 | Originally this tended to create worse overall code, but several | |
175 | improvements during the development of PRE seem to have made following | |
176 | back edges generally a win. | |
177 | ||
178 | Note much of the loop invariant code motion done here would normally | |
179 | be done by loop.c, which has more heuristics for when to move invariants | |
180 | out of loops. At some point we might need to move some of those | |
181 | heuristics into gcse.c. */ | |
7506f491 | 182 | |
f4e584dc JL |
183 | /* We support GCSE via Partial Redundancy Elimination. PRE optimizations |
184 | are a superset of those done by GCSE. | |
7506f491 | 185 | |
f4e584dc | 186 | We perform the following steps: |
7506f491 DE |
187 | |
188 | 1) Compute basic block information. | |
189 | ||
190 | 2) Compute table of places where registers are set. | |
191 | ||
192 | 3) Perform copy/constant propagation. | |
193 | ||
194 | 4) Perform global cse. | |
195 | ||
e78d9500 | 196 | 5) Perform another pass of copy/constant propagation. |
7506f491 DE |
197 | |
198 | Two passes of copy/constant propagation are done because the first one | |
199 | enables more GCSE and the second one helps to clean up the copies that | |
200 | GCSE creates. This is needed more for PRE than for Classic because Classic | |
201 | GCSE will try to use an existing register containing the common | |
202 | subexpression rather than create a new one. This is harder to do for PRE | |
203 | because of the code motion (which Classic GCSE doesn't do). | |
204 | ||
205 | Expressions we are interested in GCSE-ing are of the form | |
206 | (set (pseudo-reg) (expression)). | |
207 | Function want_to_gcse_p says what these are. | |
208 | ||
209 | PRE handles moving invariant expressions out of loops (by treating them as | |
f4e584dc | 210 | partially redundant). |
7506f491 DE |
211 | |
212 | Eventually it would be nice to replace cse.c/gcse.c with SSA (static single | |
213 | assignment) based GVN (global value numbering). L. T. Simpson's paper | |
214 | (Rice University) on value numbering is a useful reference for this. | |
215 | ||
216 | ********************** | |
217 | ||
218 | We used to support multiple passes but there are diminishing returns in | |
219 | doing so. The first pass usually makes 90% of the changes that are doable. | |
220 | A second pass can make a few more changes made possible by the first pass. | |
221 | Experiments show any further passes don't make enough changes to justify | |
222 | the expense. | |
223 | ||
224 | A study of spec92 using an unlimited number of passes: | |
225 | [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83, | |
226 | [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2, | |
227 | [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1 | |
228 | ||
229 | It was found doing copy propagation between each pass enables further | |
230 | substitutions. | |
231 | ||
232 | PRE is quite expensive in complicated functions because the DFA can take | |
740f35a0 | 233 | awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can |
7506f491 DE |
234 | be modified if one wants to experiment. |
235 | ||
236 | ********************** | |
237 | ||
238 | The steps for PRE are: | |
239 | ||
240 | 1) Build the hash table of expressions we wish to GCSE (expr_hash_table). | |
241 | ||
242 | 2) Perform the data flow analysis for PRE. | |
243 | ||
244 | 3) Delete the redundant instructions | |
245 | ||
246 | 4) Insert the required copies [if any] that make the partially | |
247 | redundant instructions fully redundant. | |
248 | ||
249 | 5) For other reaching expressions, insert an instruction to copy the value | |
250 | to a newly created pseudo that will reach the redundant instruction. | |
251 | ||
252 | The deletion is done first so that when we do insertions we | |
253 | know which pseudo reg to use. | |
254 | ||
255 | Various papers have argued that PRE DFA is expensive (O(n^2)) and others | |
256 | argue it is not. The number of iterations for the algorithm to converge | |
257 | is typically 2-4 so I don't view it as that expensive (relatively speaking). | |
258 | ||
f4e584dc | 259 | PRE GCSE depends heavily on the second CSE pass to clean up the copies |
7506f491 DE |
260 | we create. To make an expression reach the place where it's redundant, |
261 | the result of the expression is copied to a new register, and the redundant | |
262 | expression is deleted by replacing it with this new register. Classic GCSE | |
263 | doesn't have this problem as much as it computes the reaching defs of | |
264 | each register in each block and thus can try to use an existing register. | |
265 | ||
266 | ********************** | |
267 | ||
7506f491 DE |
268 | A fair bit of simplicity is created by creating small functions for simple |
269 | tasks, even when the function is only called in one place. This may | |
270 | measurably slow things down [or may not] by creating more function call | |
271 | overhead than is necessary. The source is laid out so that it's trivial | |
272 | to make the affected functions inline so that one can measure what speed | |
273 | up, if any, can be achieved, and maybe later when things settle things can | |
274 | be rearranged. | |
275 | ||
276 | Help stamp out big monolithic functions! */ | |
277 | \f | |
278 | /* GCSE global vars. */ | |
279 | ||
280 | /* -dG dump file. */ | |
281 | static FILE *gcse_file; | |
282 | ||
f4e584dc JL |
283 | /* Note whether or not we should run jump optimization after gcse. We |
284 | want to do this for two cases. | |
285 | ||
286 | * If we changed any jumps via cprop. | |
287 | ||
288 | * If we added any labels via edge splitting. */ | |
289 | ||
290 | static int run_jump_opt_after_gcse; | |
291 | ||
7506f491 DE |
292 | /* Bitmaps are normally not included in debugging dumps. |
293 | However it's useful to be able to print them from GDB. | |
294 | We could create special functions for this, but it's simpler to | |
295 | just allow passing stderr to the dump_foo fns. Since stderr can | |
296 | be a macro, we store a copy here. */ | |
297 | static FILE *debug_stderr; | |
298 | ||
299 | /* An obstack for our working variables. */ | |
300 | static struct obstack gcse_obstack; | |
301 | ||
c4c81601 | 302 | struct reg_use {rtx reg_rtx; }; |
abd535b6 | 303 | |
7506f491 DE |
304 | /* Hash table of expressions. */ |
305 | ||
306 | struct expr | |
307 | { | |
308 | /* The expression (SET_SRC for expressions, PATTERN for assignments). */ | |
309 | rtx expr; | |
310 | /* Index in the available expression bitmaps. */ | |
311 | int bitmap_index; | |
312 | /* Next entry with the same hash. */ | |
313 | struct expr *next_same_hash; | |
314 | /* List of anticipatable occurrences in basic blocks in the function. | |
315 | An "anticipatable occurrence" is one that is the first occurrence in the | |
f4e584dc JL |
316 | basic block, the operands are not modified in the basic block prior |
317 | to the occurrence and the output is not used between the start of | |
318 | the block and the occurrence. */ | |
7506f491 DE |
319 | struct occr *antic_occr; |
320 | /* List of available occurrence in basic blocks in the function. | |
321 | An "available occurrence" is one that is the last occurrence in the | |
322 | basic block and the operands are not modified by following statements in | |
323 | the basic block [including this insn]. */ | |
324 | struct occr *avail_occr; | |
325 | /* Non-null if the computation is PRE redundant. | |
326 | The value is the newly created pseudo-reg to record a copy of the | |
327 | expression in all the places that reach the redundant copy. */ | |
328 | rtx reaching_reg; | |
329 | }; | |
330 | ||
331 | /* Occurrence of an expression. | |
332 | There is one per basic block. If a pattern appears more than once the | |
333 | last appearance is used [or first for anticipatable expressions]. */ | |
334 | ||
335 | struct occr | |
336 | { | |
337 | /* Next occurrence of this expression. */ | |
338 | struct occr *next; | |
339 | /* The insn that computes the expression. */ | |
340 | rtx insn; | |
cc2902df | 341 | /* Nonzero if this [anticipatable] occurrence has been deleted. */ |
7506f491 | 342 | char deleted_p; |
cc2902df | 343 | /* Nonzero if this [available] occurrence has been copied to |
7506f491 DE |
344 | reaching_reg. */ |
345 | /* ??? This is mutually exclusive with deleted_p, so they could share | |
346 | the same byte. */ | |
347 | char copied_p; | |
348 | }; | |
349 | ||
350 | /* Expression and copy propagation hash tables. | |
351 | Each hash table is an array of buckets. | |
352 | ??? It is known that if it were an array of entries, structure elements | |
353 | `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is | |
354 | not clear whether in the final analysis a sufficient amount of memory would | |
355 | be saved as the size of the available expression bitmaps would be larger | |
356 | [one could build a mapping table without holes afterwards though]. | |
c4c81601 | 357 | Someday I'll perform the computation and figure it out. */ |
7506f491 | 358 | |
02280659 ZD |
359 | struct hash_table |
360 | { | |
361 | /* The table itself. | |
362 | This is an array of `expr_hash_table_size' elements. */ | |
363 | struct expr **table; | |
364 | ||
365 | /* Size of the hash table, in elements. */ | |
366 | unsigned int size; | |
2e653e39 | 367 | |
02280659 ZD |
368 | /* Number of hash table elements. */ |
369 | unsigned int n_elems; | |
7506f491 | 370 | |
02280659 ZD |
371 | /* Whether the table is expression of copy propagation one. */ |
372 | int set_p; | |
373 | }; | |
c4c81601 | 374 | |
02280659 ZD |
375 | /* Expression hash table. */ |
376 | static struct hash_table expr_hash_table; | |
377 | ||
378 | /* Copy propagation hash table. */ | |
379 | static struct hash_table set_hash_table; | |
7506f491 DE |
380 | |
381 | /* Mapping of uids to cuids. | |
382 | Only real insns get cuids. */ | |
383 | static int *uid_cuid; | |
384 | ||
385 | /* Highest UID in UID_CUID. */ | |
386 | static int max_uid; | |
387 | ||
388 | /* Get the cuid of an insn. */ | |
b86db3eb BS |
389 | #ifdef ENABLE_CHECKING |
390 | #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)]) | |
391 | #else | |
7506f491 | 392 | #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)]) |
b86db3eb | 393 | #endif |
7506f491 DE |
394 | |
395 | /* Number of cuids. */ | |
396 | static int max_cuid; | |
397 | ||
398 | /* Mapping of cuids to insns. */ | |
399 | static rtx *cuid_insn; | |
400 | ||
401 | /* Get insn from cuid. */ | |
402 | #define CUID_INSN(CUID) (cuid_insn[CUID]) | |
403 | ||
404 | /* Maximum register number in function prior to doing gcse + 1. | |
405 | Registers created during this pass have regno >= max_gcse_regno. | |
406 | This is named with "gcse" to not collide with global of same name. */ | |
770ae6cc | 407 | static unsigned int max_gcse_regno; |
7506f491 | 408 | |
7506f491 | 409 | /* Table of registers that are modified. |
c4c81601 | 410 | |
7506f491 DE |
411 | For each register, each element is a list of places where the pseudo-reg |
412 | is set. | |
413 | ||
414 | For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only | |
415 | requires knowledge of which blocks kill which regs [and thus could use | |
f4e584dc | 416 | a bitmap instead of the lists `reg_set_table' uses]. |
7506f491 | 417 | |
c4c81601 RK |
418 | `reg_set_table' and could be turned into an array of bitmaps (num-bbs x |
419 | num-regs) [however perhaps it may be useful to keep the data as is]. One | |
420 | advantage of recording things this way is that `reg_set_table' is fairly | |
421 | sparse with respect to pseudo regs but for hard regs could be fairly dense | |
422 | [relatively speaking]. And recording sets of pseudo-regs in lists speeds | |
7506f491 DE |
423 | up functions like compute_transp since in the case of pseudo-regs we only |
424 | need to iterate over the number of times a pseudo-reg is set, not over the | |
425 | number of basic blocks [clearly there is a bit of a slow down in the cases | |
426 | where a pseudo is set more than once in a block, however it is believed | |
427 | that the net effect is to speed things up]. This isn't done for hard-regs | |
428 | because recording call-clobbered hard-regs in `reg_set_table' at each | |
c4c81601 RK |
429 | function call can consume a fair bit of memory, and iterating over |
430 | hard-regs stored this way in compute_transp will be more expensive. */ | |
7506f491 | 431 | |
c4c81601 RK |
432 | typedef struct reg_set |
433 | { | |
7506f491 DE |
434 | /* The next setting of this register. */ |
435 | struct reg_set *next; | |
436 | /* The insn where it was set. */ | |
437 | rtx insn; | |
438 | } reg_set; | |
c4c81601 | 439 | |
7506f491 | 440 | static reg_set **reg_set_table; |
c4c81601 | 441 | |
7506f491 DE |
442 | /* Size of `reg_set_table'. |
443 | The table starts out at max_gcse_regno + slop, and is enlarged as | |
444 | necessary. */ | |
445 | static int reg_set_table_size; | |
c4c81601 | 446 | |
7506f491 DE |
447 | /* Amount to grow `reg_set_table' by when it's full. */ |
448 | #define REG_SET_TABLE_SLOP 100 | |
449 | ||
a13d4ebf | 450 | /* This is a list of expressions which are MEMs and will be used by load |
589005ff | 451 | or store motion. |
a13d4ebf AM |
452 | Load motion tracks MEMs which aren't killed by |
453 | anything except itself. (ie, loads and stores to a single location). | |
589005ff | 454 | We can then allow movement of these MEM refs with a little special |
a13d4ebf AM |
455 | allowance. (all stores copy the same value to the reaching reg used |
456 | for the loads). This means all values used to store into memory must have | |
589005ff | 457 | no side effects so we can re-issue the setter value. |
a13d4ebf AM |
458 | Store Motion uses this structure as an expression table to track stores |
459 | which look interesting, and might be moveable towards the exit block. */ | |
460 | ||
461 | struct ls_expr | |
462 | { | |
463 | struct expr * expr; /* Gcse expression reference for LM. */ | |
464 | rtx pattern; /* Pattern of this mem. */ | |
47a3dae1 | 465 | rtx pattern_regs; /* List of registers mentioned by the mem. */ |
aaa4ca30 AJ |
466 | rtx loads; /* INSN list of loads seen. */ |
467 | rtx stores; /* INSN list of stores seen. */ | |
a13d4ebf AM |
468 | struct ls_expr * next; /* Next in the list. */ |
469 | int invalid; /* Invalid for some reason. */ | |
470 | int index; /* If it maps to a bitmap index. */ | |
b58b21d5 | 471 | unsigned int hash_index; /* Index when in a hash table. */ |
a13d4ebf AM |
472 | rtx reaching_reg; /* Register to use when re-writing. */ |
473 | }; | |
474 | ||
fbef91d8 RS |
475 | /* Array of implicit set patterns indexed by basic block index. */ |
476 | static rtx *implicit_sets; | |
477 | ||
a13d4ebf AM |
478 | /* Head of the list of load/store memory refs. */ |
479 | static struct ls_expr * pre_ldst_mems = NULL; | |
480 | ||
7506f491 DE |
481 | /* Bitmap containing one bit for each register in the program. |
482 | Used when performing GCSE to track which registers have been set since | |
483 | the start of the basic block. */ | |
73991d6a | 484 | static regset reg_set_bitmap; |
7506f491 DE |
485 | |
486 | /* For each block, a bitmap of registers set in the block. | |
487 | This is used by expr_killed_p and compute_transp. | |
488 | It is computed during hash table computation and not by compute_sets | |
489 | as it includes registers added since the last pass (or between cprop and | |
490 | gcse) and it's currently not easy to realloc sbitmap vectors. */ | |
491 | static sbitmap *reg_set_in_block; | |
492 | ||
a13d4ebf AM |
493 | /* Array, indexed by basic block number for a list of insns which modify |
494 | memory within that block. */ | |
495 | static rtx * modify_mem_list; | |
73991d6a | 496 | bitmap modify_mem_list_set; |
a13d4ebf AM |
497 | |
498 | /* This array parallels modify_mem_list, but is kept canonicalized. */ | |
499 | static rtx * canon_modify_mem_list; | |
73991d6a | 500 | bitmap canon_modify_mem_list_set; |
7506f491 DE |
501 | /* Various variables for statistics gathering. */ |
502 | ||
503 | /* Memory used in a pass. | |
504 | This isn't intended to be absolutely precise. Its intent is only | |
505 | to keep an eye on memory usage. */ | |
506 | static int bytes_used; | |
c4c81601 | 507 | |
7506f491 DE |
508 | /* GCSE substitutions made. */ |
509 | static int gcse_subst_count; | |
510 | /* Number of copy instructions created. */ | |
511 | static int gcse_create_count; | |
512 | /* Number of constants propagated. */ | |
513 | static int const_prop_count; | |
514 | /* Number of copys propagated. */ | |
515 | static int copy_prop_count; | |
7506f491 DE |
516 | \f |
517 | /* These variables are used by classic GCSE. | |
518 | Normally they'd be defined a bit later, but `rd_gen' needs to | |
519 | be declared sooner. */ | |
520 | ||
7506f491 DE |
521 | /* Each block has a bitmap of each type. |
522 | The length of each blocks bitmap is: | |
523 | ||
524 | max_cuid - for reaching definitions | |
525 | n_exprs - for available expressions | |
526 | ||
527 | Thus we view the bitmaps as 2 dimensional arrays. i.e. | |
528 | rd_kill[block_num][cuid_num] | |
c4c81601 | 529 | ae_kill[block_num][expr_num] */ |
7506f491 DE |
530 | |
531 | /* For reaching defs */ | |
532 | static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out; | |
533 | ||
534 | /* for available exprs */ | |
535 | static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out; | |
b5ce41ff | 536 | |
0511851c MM |
537 | /* Objects of this type are passed around by the null-pointer check |
538 | removal routines. */ | |
c4c81601 RK |
539 | struct null_pointer_info |
540 | { | |
0511851c | 541 | /* The basic block being processed. */ |
e0082a72 | 542 | basic_block current_block; |
0511851c | 543 | /* The first register to be handled in this pass. */ |
770ae6cc | 544 | unsigned int min_reg; |
0511851c | 545 | /* One greater than the last register to be handled in this pass. */ |
770ae6cc | 546 | unsigned int max_reg; |
0511851c MM |
547 | sbitmap *nonnull_local; |
548 | sbitmap *nonnull_killed; | |
549 | }; | |
7506f491 | 550 | \f |
1d088dee | 551 | static void compute_can_copy (void); |
9fe15a12 KG |
552 | static void *gmalloc (size_t) ATTRIBUTE_MALLOC; |
553 | static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC; | |
554 | static void *grealloc (void *, size_t); | |
703ad42b | 555 | static void *gcse_alloc (unsigned long); |
1d088dee AJ |
556 | static void alloc_gcse_mem (rtx); |
557 | static void free_gcse_mem (void); | |
558 | static void alloc_reg_set_mem (int); | |
559 | static void free_reg_set_mem (void); | |
560 | static int get_bitmap_width (int, int, int); | |
561 | static void record_one_set (int, rtx); | |
b885908b | 562 | static void replace_one_set (int, rtx, rtx); |
1d088dee AJ |
563 | static void record_set_info (rtx, rtx, void *); |
564 | static void compute_sets (rtx); | |
565 | static void hash_scan_insn (rtx, struct hash_table *, int); | |
566 | static void hash_scan_set (rtx, rtx, struct hash_table *); | |
567 | static void hash_scan_clobber (rtx, rtx, struct hash_table *); | |
568 | static void hash_scan_call (rtx, rtx, struct hash_table *); | |
569 | static int want_to_gcse_p (rtx); | |
570 | static bool gcse_constant_p (rtx); | |
571 | static int oprs_unchanged_p (rtx, rtx, int); | |
572 | static int oprs_anticipatable_p (rtx, rtx); | |
573 | static int oprs_available_p (rtx, rtx); | |
574 | static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int, | |
575 | struct hash_table *); | |
576 | static void insert_set_in_table (rtx, rtx, struct hash_table *); | |
577 | static unsigned int hash_expr (rtx, enum machine_mode, int *, int); | |
578 | static unsigned int hash_expr_1 (rtx, enum machine_mode, int *); | |
579 | static unsigned int hash_string_1 (const char *); | |
580 | static unsigned int hash_set (int, int); | |
581 | static int expr_equiv_p (rtx, rtx); | |
582 | static void record_last_reg_set_info (rtx, int); | |
583 | static void record_last_mem_set_info (rtx); | |
584 | static void record_last_set_info (rtx, rtx, void *); | |
585 | static void compute_hash_table (struct hash_table *); | |
586 | static void alloc_hash_table (int, struct hash_table *, int); | |
587 | static void free_hash_table (struct hash_table *); | |
588 | static void compute_hash_table_work (struct hash_table *); | |
589 | static void dump_hash_table (FILE *, const char *, struct hash_table *); | |
590 | static struct expr *lookup_expr (rtx, struct hash_table *); | |
591 | static struct expr *lookup_set (unsigned int, struct hash_table *); | |
592 | static struct expr *next_set (unsigned int, struct expr *); | |
593 | static void reset_opr_set_tables (void); | |
594 | static int oprs_not_set_p (rtx, rtx); | |
595 | static void mark_call (rtx); | |
596 | static void mark_set (rtx, rtx); | |
597 | static void mark_clobber (rtx, rtx); | |
598 | static void mark_oprs_set (rtx); | |
599 | static void alloc_cprop_mem (int, int); | |
600 | static void free_cprop_mem (void); | |
601 | static void compute_transp (rtx, int, sbitmap *, int); | |
602 | static void compute_transpout (void); | |
603 | static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *, | |
604 | struct hash_table *); | |
605 | static void compute_cprop_data (void); | |
606 | static void find_used_regs (rtx *, void *); | |
607 | static int try_replace_reg (rtx, rtx, rtx); | |
608 | static struct expr *find_avail_set (int, rtx); | |
609 | static int cprop_jump (basic_block, rtx, rtx, rtx, rtx); | |
610 | static void mems_conflict_for_gcse_p (rtx, rtx, void *); | |
611 | static int load_killed_in_block_p (basic_block, int, rtx, int); | |
612 | static void canon_list_insert (rtx, rtx, void *); | |
613 | static int cprop_insn (rtx, int); | |
614 | static int cprop (int); | |
615 | static void find_implicit_sets (void); | |
616 | static int one_cprop_pass (int, int, int); | |
617 | static bool constprop_register (rtx, rtx, rtx, int); | |
618 | static struct expr *find_bypass_set (int, int); | |
619 | static bool reg_killed_on_edge (rtx, edge); | |
620 | static int bypass_block (basic_block, rtx, rtx); | |
621 | static int bypass_conditional_jumps (void); | |
622 | static void alloc_pre_mem (int, int); | |
623 | static void free_pre_mem (void); | |
624 | static void compute_pre_data (void); | |
625 | static int pre_expr_reaches_here_p (basic_block, struct expr *, | |
626 | basic_block); | |
627 | static void insert_insn_end_bb (struct expr *, basic_block, int); | |
628 | static void pre_insert_copy_insn (struct expr *, rtx); | |
629 | static void pre_insert_copies (void); | |
630 | static int pre_delete (void); | |
631 | static int pre_gcse (void); | |
632 | static int one_pre_gcse_pass (int); | |
633 | static void add_label_notes (rtx, rtx); | |
634 | static void alloc_code_hoist_mem (int, int); | |
635 | static void free_code_hoist_mem (void); | |
636 | static void compute_code_hoist_vbeinout (void); | |
637 | static void compute_code_hoist_data (void); | |
638 | static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *); | |
639 | static void hoist_code (void); | |
640 | static int one_code_hoisting_pass (void); | |
641 | static void alloc_rd_mem (int, int); | |
642 | static void free_rd_mem (void); | |
643 | static void handle_rd_kill_set (rtx, int, basic_block); | |
644 | static void compute_kill_rd (void); | |
645 | static void compute_rd (void); | |
646 | static void alloc_avail_expr_mem (int, int); | |
647 | static void free_avail_expr_mem (void); | |
648 | static void compute_ae_gen (struct hash_table *); | |
649 | static int expr_killed_p (rtx, basic_block); | |
650 | static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *); | |
651 | static int expr_reaches_here_p (struct occr *, struct expr *, basic_block, | |
652 | int); | |
653 | static rtx computing_insn (struct expr *, rtx); | |
654 | static int def_reaches_here_p (rtx, rtx); | |
655 | static int can_disregard_other_sets (struct reg_set **, rtx, int); | |
656 | static int handle_avail_expr (rtx, struct expr *); | |
657 | static int classic_gcse (void); | |
658 | static int one_classic_gcse_pass (int); | |
659 | static void invalidate_nonnull_info (rtx, rtx, void *); | |
660 | static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *, | |
661 | struct null_pointer_info *); | |
662 | static rtx process_insert_insn (struct expr *); | |
663 | static int pre_edge_insert (struct edge_list *, struct expr **); | |
664 | static int expr_reaches_here_p_work (struct occr *, struct expr *, | |
665 | basic_block, int, char *); | |
666 | static int pre_expr_reaches_here_p_work (basic_block, struct expr *, | |
667 | basic_block, char *); | |
668 | static struct ls_expr * ldst_entry (rtx); | |
669 | static void free_ldst_entry (struct ls_expr *); | |
670 | static void free_ldst_mems (void); | |
671 | static void print_ldst_list (FILE *); | |
672 | static struct ls_expr * find_rtx_in_ldst (rtx); | |
673 | static int enumerate_ldsts (void); | |
674 | static inline struct ls_expr * first_ls_expr (void); | |
675 | static inline struct ls_expr * next_ls_expr (struct ls_expr *); | |
676 | static int simple_mem (rtx); | |
677 | static void invalidate_any_buried_refs (rtx); | |
678 | static void compute_ld_motion_mems (void); | |
679 | static void trim_ld_motion_mems (void); | |
680 | static void update_ld_motion_stores (struct expr *); | |
681 | static void reg_set_info (rtx, rtx, void *); | |
01c43039 | 682 | static void reg_clear_last_set (rtx, rtx, void *); |
1d088dee AJ |
683 | static bool store_ops_ok (rtx, int *); |
684 | static rtx extract_mentioned_regs (rtx); | |
685 | static rtx extract_mentioned_regs_helper (rtx, rtx); | |
686 | static void find_moveable_store (rtx, int *, int *); | |
687 | static int compute_store_table (void); | |
3b14e3af ZD |
688 | static bool load_kills_store (rtx, rtx, int); |
689 | static bool find_loads (rtx, rtx, int); | |
690 | static bool store_killed_in_insn (rtx, rtx, rtx, int); | |
1d088dee AJ |
691 | static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *); |
692 | static bool store_killed_before (rtx, rtx, rtx, basic_block, int *); | |
693 | static void build_store_vectors (void); | |
694 | static void insert_insn_start_bb (rtx, basic_block); | |
695 | static int insert_store (struct ls_expr *, edge); | |
d088acea ZD |
696 | static void remove_reachable_equiv_notes (basic_block, struct ls_expr *); |
697 | static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *); | |
1d088dee AJ |
698 | static void delete_store (struct ls_expr *, basic_block); |
699 | static void free_store_memory (void); | |
700 | static void store_motion (void); | |
701 | static void free_insn_expr_list_list (rtx *); | |
702 | static void clear_modify_mem_tables (void); | |
703 | static void free_modify_mem_tables (void); | |
704 | static rtx gcse_emit_move_after (rtx, rtx, rtx); | |
705 | static void local_cprop_find_used_regs (rtx *, void *); | |
706 | static bool do_local_cprop (rtx, rtx, int, rtx*); | |
707 | static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*); | |
708 | static void local_cprop_pass (int); | |
d128effb | 709 | static bool is_too_expensive (const char *); |
7506f491 | 710 | \f |
d128effb | 711 | |
7506f491 DE |
712 | /* Entry point for global common subexpression elimination. |
713 | F is the first instruction in the function. */ | |
714 | ||
e78d9500 | 715 | int |
1d088dee | 716 | gcse_main (rtx f, FILE *file) |
7506f491 DE |
717 | { |
718 | int changed, pass; | |
719 | /* Bytes used at start of pass. */ | |
720 | int initial_bytes_used; | |
721 | /* Maximum number of bytes used by a pass. */ | |
722 | int max_pass_bytes; | |
723 | /* Point to release obstack data from for each pass. */ | |
724 | char *gcse_obstack_bottom; | |
725 | ||
b5ce41ff JL |
726 | /* We do not construct an accurate cfg in functions which call |
727 | setjmp, so just punt to be safe. */ | |
7506f491 | 728 | if (current_function_calls_setjmp) |
e78d9500 | 729 | return 0; |
589005ff | 730 | |
b5ce41ff JL |
731 | /* Assume that we do not need to run jump optimizations after gcse. */ |
732 | run_jump_opt_after_gcse = 0; | |
733 | ||
7506f491 DE |
734 | /* For calling dump_foo fns from gdb. */ |
735 | debug_stderr = stderr; | |
b5ce41ff | 736 | gcse_file = file; |
7506f491 | 737 | |
b5ce41ff JL |
738 | /* Identify the basic block information for this function, including |
739 | successors and predecessors. */ | |
7506f491 | 740 | max_gcse_regno = max_reg_num (); |
7506f491 | 741 | |
a42cd965 AM |
742 | if (file) |
743 | dump_flow_info (file); | |
744 | ||
d128effb NS |
745 | /* Return if there's nothing to do, or it is too expensive. */ |
746 | if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled"))) | |
a18820c6 | 747 | return 0; |
d128effb | 748 | |
7506f491 | 749 | gcc_obstack_init (&gcse_obstack); |
a42cd965 | 750 | bytes_used = 0; |
7506f491 | 751 | |
a13d4ebf AM |
752 | /* We need alias. */ |
753 | init_alias_analysis (); | |
c4c81601 RK |
754 | /* Record where pseudo-registers are set. This data is kept accurate |
755 | during each pass. ??? We could also record hard-reg information here | |
756 | [since it's unchanging], however it is currently done during hash table | |
757 | computation. | |
b5ce41ff | 758 | |
c4c81601 RK |
759 | It may be tempting to compute MEM set information here too, but MEM sets |
760 | will be subject to code motion one day and thus we need to compute | |
b5ce41ff | 761 | information about memory sets when we build the hash tables. */ |
7506f491 DE |
762 | |
763 | alloc_reg_set_mem (max_gcse_regno); | |
764 | compute_sets (f); | |
765 | ||
766 | pass = 0; | |
767 | initial_bytes_used = bytes_used; | |
768 | max_pass_bytes = 0; | |
769 | gcse_obstack_bottom = gcse_alloc (1); | |
770 | changed = 1; | |
740f35a0 | 771 | while (changed && pass < MAX_GCSE_PASSES) |
7506f491 DE |
772 | { |
773 | changed = 0; | |
774 | if (file) | |
775 | fprintf (file, "GCSE pass %d\n\n", pass + 1); | |
776 | ||
777 | /* Initialize bytes_used to the space for the pred/succ lists, | |
778 | and the reg_set_table data. */ | |
779 | bytes_used = initial_bytes_used; | |
780 | ||
781 | /* Each pass may create new registers, so recalculate each time. */ | |
782 | max_gcse_regno = max_reg_num (); | |
783 | ||
784 | alloc_gcse_mem (f); | |
785 | ||
b5ce41ff JL |
786 | /* Don't allow constant propagation to modify jumps |
787 | during this pass. */ | |
a0134312 | 788 | changed = one_cprop_pass (pass + 1, 0, 0); |
7506f491 DE |
789 | |
790 | if (optimize_size) | |
b5ce41ff | 791 | changed |= one_classic_gcse_pass (pass + 1); |
7506f491 | 792 | else |
589005ff | 793 | { |
a42cd965 | 794 | changed |= one_pre_gcse_pass (pass + 1); |
a13d4ebf AM |
795 | /* We may have just created new basic blocks. Release and |
796 | recompute various things which are sized on the number of | |
797 | basic blocks. */ | |
798 | if (changed) | |
799 | { | |
73991d6a | 800 | free_modify_mem_tables (); |
9fe15a12 KG |
801 | modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); |
802 | canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); | |
a13d4ebf | 803 | } |
a42cd965 AM |
804 | free_reg_set_mem (); |
805 | alloc_reg_set_mem (max_reg_num ()); | |
806 | compute_sets (f); | |
807 | run_jump_opt_after_gcse = 1; | |
808 | } | |
7506f491 DE |
809 | |
810 | if (max_pass_bytes < bytes_used) | |
811 | max_pass_bytes = bytes_used; | |
812 | ||
bb457bd9 JL |
813 | /* Free up memory, then reallocate for code hoisting. We can |
814 | not re-use the existing allocated memory because the tables | |
815 | will not have info for the insns or registers created by | |
816 | partial redundancy elimination. */ | |
7506f491 DE |
817 | free_gcse_mem (); |
818 | ||
bb457bd9 JL |
819 | /* It does not make sense to run code hoisting unless we optimizing |
820 | for code size -- it rarely makes programs faster, and can make | |
821 | them bigger if we did partial redundancy elimination (when optimizing | |
822 | for space, we use a classic gcse algorithm instead of partial | |
823 | redundancy algorithms). */ | |
824 | if (optimize_size) | |
589005ff | 825 | { |
bb457bd9 JL |
826 | max_gcse_regno = max_reg_num (); |
827 | alloc_gcse_mem (f); | |
828 | changed |= one_code_hoisting_pass (); | |
829 | free_gcse_mem (); | |
830 | ||
831 | if (max_pass_bytes < bytes_used) | |
832 | max_pass_bytes = bytes_used; | |
589005ff | 833 | } |
bb457bd9 | 834 | |
7506f491 DE |
835 | if (file) |
836 | { | |
837 | fprintf (file, "\n"); | |
838 | fflush (file); | |
839 | } | |
c4c81601 | 840 | |
7506f491 DE |
841 | obstack_free (&gcse_obstack, gcse_obstack_bottom); |
842 | pass++; | |
843 | } | |
844 | ||
b5ce41ff JL |
845 | /* Do one last pass of copy propagation, including cprop into |
846 | conditional jumps. */ | |
847 | ||
848 | max_gcse_regno = max_reg_num (); | |
849 | alloc_gcse_mem (f); | |
850 | /* This time, go ahead and allow cprop to alter jumps. */ | |
a0134312 | 851 | one_cprop_pass (pass + 1, 1, 0); |
b5ce41ff | 852 | free_gcse_mem (); |
7506f491 DE |
853 | |
854 | if (file) | |
855 | { | |
856 | fprintf (file, "GCSE of %s: %d basic blocks, ", | |
0b17ab2f | 857 | current_function_name, n_basic_blocks); |
7506f491 DE |
858 | fprintf (file, "%d pass%s, %d bytes\n\n", |
859 | pass, pass > 1 ? "es" : "", max_pass_bytes); | |
860 | } | |
861 | ||
6496a589 | 862 | obstack_free (&gcse_obstack, NULL); |
7506f491 | 863 | free_reg_set_mem (); |
a13d4ebf AM |
864 | /* We are finished with alias. */ |
865 | end_alias_analysis (); | |
866 | allocate_reg_info (max_reg_num (), FALSE, FALSE); | |
867 | ||
47a3dae1 | 868 | if (!optimize_size && flag_gcse_sm) |
a13d4ebf | 869 | store_motion (); |
47a3dae1 | 870 | |
a13d4ebf | 871 | /* Record where pseudo-registers are set. */ |
e78d9500 | 872 | return run_jump_opt_after_gcse; |
7506f491 DE |
873 | } |
874 | \f | |
875 | /* Misc. utilities. */ | |
876 | ||
773eae39 EB |
877 | /* Nonzero for each mode that supports (set (reg) (reg)). |
878 | This is trivially true for integer and floating point values. | |
879 | It may or may not be true for condition codes. */ | |
880 | static char can_copy[(int) NUM_MACHINE_MODES]; | |
881 | ||
7506f491 DE |
882 | /* Compute which modes support reg/reg copy operations. */ |
883 | ||
884 | static void | |
1d088dee | 885 | compute_can_copy (void) |
7506f491 DE |
886 | { |
887 | int i; | |
50b2596f | 888 | #ifndef AVOID_CCMODE_COPIES |
8e42ace1 | 889 | rtx reg, insn; |
50b2596f | 890 | #endif |
773eae39 | 891 | memset (can_copy, 0, NUM_MACHINE_MODES); |
7506f491 DE |
892 | |
893 | start_sequence (); | |
894 | for (i = 0; i < NUM_MACHINE_MODES; i++) | |
c4c81601 RK |
895 | if (GET_MODE_CLASS (i) == MODE_CC) |
896 | { | |
7506f491 | 897 | #ifdef AVOID_CCMODE_COPIES |
773eae39 | 898 | can_copy[i] = 0; |
7506f491 | 899 | #else |
c4c81601 RK |
900 | reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1); |
901 | insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg)); | |
9714cf43 | 902 | if (recog (PATTERN (insn), insn, NULL) >= 0) |
773eae39 | 903 | can_copy[i] = 1; |
7506f491 | 904 | #endif |
c4c81601 | 905 | } |
141b5810 | 906 | else |
773eae39 | 907 | can_copy[i] = 1; |
c4c81601 | 908 | |
7506f491 | 909 | end_sequence (); |
7506f491 | 910 | } |
773eae39 EB |
911 | |
912 | /* Returns whether the mode supports reg/reg copy operations. */ | |
913 | ||
914 | bool | |
1d088dee | 915 | can_copy_p (enum machine_mode mode) |
773eae39 EB |
916 | { |
917 | static bool can_copy_init_p = false; | |
918 | ||
919 | if (! can_copy_init_p) | |
920 | { | |
921 | compute_can_copy (); | |
922 | can_copy_init_p = true; | |
923 | } | |
924 | ||
925 | return can_copy[mode] != 0; | |
926 | } | |
7506f491 DE |
927 | \f |
928 | /* Cover function to xmalloc to record bytes allocated. */ | |
929 | ||
703ad42b | 930 | static void * |
4ac11022 | 931 | gmalloc (size_t size) |
7506f491 DE |
932 | { |
933 | bytes_used += size; | |
934 | return xmalloc (size); | |
935 | } | |
936 | ||
9fe15a12 KG |
937 | /* Cover function to xcalloc to record bytes allocated. */ |
938 | ||
939 | static void * | |
940 | gcalloc (size_t nelem, size_t elsize) | |
941 | { | |
942 | bytes_used += nelem * elsize; | |
943 | return xcalloc (nelem, elsize); | |
944 | } | |
945 | ||
7506f491 DE |
946 | /* Cover function to xrealloc. |
947 | We don't record the additional size since we don't know it. | |
948 | It won't affect memory usage stats much anyway. */ | |
949 | ||
703ad42b | 950 | static void * |
9fe15a12 | 951 | grealloc (void *ptr, size_t size) |
7506f491 DE |
952 | { |
953 | return xrealloc (ptr, size); | |
954 | } | |
955 | ||
77bbd421 | 956 | /* Cover function to obstack_alloc. */ |
7506f491 | 957 | |
703ad42b | 958 | static void * |
1d088dee | 959 | gcse_alloc (unsigned long size) |
7506f491 | 960 | { |
77bbd421 | 961 | bytes_used += size; |
703ad42b | 962 | return obstack_alloc (&gcse_obstack, size); |
7506f491 DE |
963 | } |
964 | ||
965 | /* Allocate memory for the cuid mapping array, | |
966 | and reg/memory set tracking tables. | |
967 | ||
968 | This is called at the start of each pass. */ | |
969 | ||
970 | static void | |
1d088dee | 971 | alloc_gcse_mem (rtx f) |
7506f491 | 972 | { |
9fe15a12 | 973 | int i; |
7506f491 DE |
974 | rtx insn; |
975 | ||
976 | /* Find the largest UID and create a mapping from UIDs to CUIDs. | |
977 | CUIDs are like UIDs except they increase monotonically, have no gaps, | |
978 | and only apply to real insns. */ | |
979 | ||
980 | max_uid = get_max_uid (); | |
9fe15a12 | 981 | uid_cuid = gcalloc (max_uid + 1, sizeof (int)); |
7506f491 DE |
982 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
983 | { | |
2c3c49de | 984 | if (INSN_P (insn)) |
b86db3eb | 985 | uid_cuid[INSN_UID (insn)] = i++; |
7506f491 | 986 | else |
b86db3eb | 987 | uid_cuid[INSN_UID (insn)] = i; |
7506f491 DE |
988 | } |
989 | ||
990 | /* Create a table mapping cuids to insns. */ | |
991 | ||
992 | max_cuid = i; | |
9fe15a12 | 993 | cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx)); |
7506f491 | 994 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
2c3c49de | 995 | if (INSN_P (insn)) |
c4c81601 | 996 | CUID_INSN (i++) = insn; |
7506f491 DE |
997 | |
998 | /* Allocate vars to track sets of regs. */ | |
73991d6a | 999 | reg_set_bitmap = BITMAP_XMALLOC (); |
7506f491 DE |
1000 | |
1001 | /* Allocate vars to track sets of regs, memory per block. */ | |
703ad42b | 1002 | reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno); |
a13d4ebf AM |
1003 | /* Allocate array to keep a list of insns which modify memory in each |
1004 | basic block. */ | |
9fe15a12 KG |
1005 | modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); |
1006 | canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx)); | |
73991d6a JH |
1007 | modify_mem_list_set = BITMAP_XMALLOC (); |
1008 | canon_modify_mem_list_set = BITMAP_XMALLOC (); | |
7506f491 DE |
1009 | } |
1010 | ||
1011 | /* Free memory allocated by alloc_gcse_mem. */ | |
1012 | ||
1013 | static void | |
1d088dee | 1014 | free_gcse_mem (void) |
7506f491 DE |
1015 | { |
1016 | free (uid_cuid); | |
1017 | free (cuid_insn); | |
1018 | ||
73991d6a | 1019 | BITMAP_XFREE (reg_set_bitmap); |
7506f491 | 1020 | |
5a660bff | 1021 | sbitmap_vector_free (reg_set_in_block); |
73991d6a JH |
1022 | free_modify_mem_tables (); |
1023 | BITMAP_XFREE (modify_mem_list_set); | |
1024 | BITMAP_XFREE (canon_modify_mem_list_set); | |
7506f491 DE |
1025 | } |
1026 | ||
0511851c MM |
1027 | /* Many of the global optimization algorithms work by solving dataflow |
1028 | equations for various expressions. Initially, some local value is | |
c4c81601 RK |
1029 | computed for each expression in each block. Then, the values across the |
1030 | various blocks are combined (by following flow graph edges) to arrive at | |
1031 | global values. Conceptually, each set of equations is independent. We | |
1032 | may therefore solve all the equations in parallel, solve them one at a | |
1033 | time, or pick any intermediate approach. | |
1034 | ||
1035 | When you're going to need N two-dimensional bitmaps, each X (say, the | |
1036 | number of blocks) by Y (say, the number of expressions), call this | |
1037 | function. It's not important what X and Y represent; only that Y | |
1038 | correspond to the things that can be done in parallel. This function will | |
1039 | return an appropriate chunking factor C; you should solve C sets of | |
1040 | equations in parallel. By going through this function, we can easily | |
1041 | trade space against time; by solving fewer equations in parallel we use | |
1042 | less space. */ | |
0511851c MM |
1043 | |
1044 | static int | |
1d088dee | 1045 | get_bitmap_width (int n, int x, int y) |
0511851c MM |
1046 | { |
1047 | /* It's not really worth figuring out *exactly* how much memory will | |
1048 | be used by a particular choice. The important thing is to get | |
1049 | something approximately right. */ | |
1050 | size_t max_bitmap_memory = 10 * 1024 * 1024; | |
1051 | ||
1052 | /* The number of bytes we'd use for a single column of minimum | |
1053 | width. */ | |
1054 | size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE); | |
1055 | ||
1056 | /* Often, it's reasonable just to solve all the equations in | |
1057 | parallel. */ | |
1058 | if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory) | |
1059 | return y; | |
1060 | ||
1061 | /* Otherwise, pick the largest width we can, without going over the | |
1062 | limit. */ | |
1063 | return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1) | |
1064 | / column_size); | |
1065 | } | |
b5ce41ff JL |
1066 | \f |
1067 | /* Compute the local properties of each recorded expression. | |
c4c81601 RK |
1068 | |
1069 | Local properties are those that are defined by the block, irrespective of | |
1070 | other blocks. | |
b5ce41ff JL |
1071 | |
1072 | An expression is transparent in a block if its operands are not modified | |
1073 | in the block. | |
1074 | ||
1075 | An expression is computed (locally available) in a block if it is computed | |
1076 | at least once and expression would contain the same value if the | |
1077 | computation was moved to the end of the block. | |
1078 | ||
1079 | An expression is locally anticipatable in a block if it is computed at | |
1080 | least once and expression would contain the same value if the computation | |
1081 | was moved to the beginning of the block. | |
1082 | ||
c4c81601 RK |
1083 | We call this routine for cprop, pre and code hoisting. They all compute |
1084 | basically the same information and thus can easily share this code. | |
7506f491 | 1085 | |
c4c81601 RK |
1086 | TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local |
1087 | properties. If NULL, then it is not necessary to compute or record that | |
1088 | particular property. | |
b5ce41ff | 1089 | |
02280659 ZD |
1090 | TABLE controls which hash table to look at. If it is set hash table, |
1091 | additionally, TRANSP is computed as ~TRANSP, since this is really cprop's | |
c4c81601 | 1092 | ABSALTERED. */ |
589005ff | 1093 | |
b5ce41ff | 1094 | static void |
1d088dee | 1095 | compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table) |
b5ce41ff | 1096 | { |
02280659 | 1097 | unsigned int i; |
589005ff | 1098 | |
b5ce41ff JL |
1099 | /* Initialize any bitmaps that were passed in. */ |
1100 | if (transp) | |
695ab36a | 1101 | { |
02280659 | 1102 | if (table->set_p) |
d55bc081 | 1103 | sbitmap_vector_zero (transp, last_basic_block); |
695ab36a | 1104 | else |
d55bc081 | 1105 | sbitmap_vector_ones (transp, last_basic_block); |
695ab36a | 1106 | } |
c4c81601 | 1107 | |
b5ce41ff | 1108 | if (comp) |
d55bc081 | 1109 | sbitmap_vector_zero (comp, last_basic_block); |
b5ce41ff | 1110 | if (antloc) |
d55bc081 | 1111 | sbitmap_vector_zero (antloc, last_basic_block); |
b5ce41ff | 1112 | |
02280659 | 1113 | for (i = 0; i < table->size; i++) |
7506f491 | 1114 | { |
b5ce41ff JL |
1115 | struct expr *expr; |
1116 | ||
02280659 | 1117 | for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash) |
b5ce41ff | 1118 | { |
b5ce41ff | 1119 | int indx = expr->bitmap_index; |
c4c81601 | 1120 | struct occr *occr; |
b5ce41ff JL |
1121 | |
1122 | /* The expression is transparent in this block if it is not killed. | |
1123 | We start by assuming all are transparent [none are killed], and | |
1124 | then reset the bits for those that are. */ | |
b5ce41ff | 1125 | if (transp) |
02280659 | 1126 | compute_transp (expr->expr, indx, transp, table->set_p); |
b5ce41ff JL |
1127 | |
1128 | /* The occurrences recorded in antic_occr are exactly those that | |
cc2902df | 1129 | we want to set to nonzero in ANTLOC. */ |
b5ce41ff | 1130 | if (antloc) |
c4c81601 RK |
1131 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
1132 | { | |
1133 | SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx); | |
b5ce41ff | 1134 | |
c4c81601 RK |
1135 | /* While we're scanning the table, this is a good place to |
1136 | initialize this. */ | |
1137 | occr->deleted_p = 0; | |
1138 | } | |
b5ce41ff JL |
1139 | |
1140 | /* The occurrences recorded in avail_occr are exactly those that | |
cc2902df | 1141 | we want to set to nonzero in COMP. */ |
b5ce41ff | 1142 | if (comp) |
c4c81601 RK |
1143 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) |
1144 | { | |
1145 | SET_BIT (comp[BLOCK_NUM (occr->insn)], indx); | |
b5ce41ff | 1146 | |
c4c81601 RK |
1147 | /* While we're scanning the table, this is a good place to |
1148 | initialize this. */ | |
1149 | occr->copied_p = 0; | |
1150 | } | |
b5ce41ff JL |
1151 | |
1152 | /* While we're scanning the table, this is a good place to | |
1153 | initialize this. */ | |
1154 | expr->reaching_reg = 0; | |
1155 | } | |
7506f491 | 1156 | } |
7506f491 DE |
1157 | } |
1158 | \f | |
1159 | /* Register set information. | |
1160 | ||
1161 | `reg_set_table' records where each register is set or otherwise | |
1162 | modified. */ | |
1163 | ||
1164 | static struct obstack reg_set_obstack; | |
1165 | ||
1166 | static void | |
1d088dee | 1167 | alloc_reg_set_mem (int n_regs) |
7506f491 | 1168 | { |
7506f491 | 1169 | reg_set_table_size = n_regs + REG_SET_TABLE_SLOP; |
9fe15a12 | 1170 | reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *)); |
7506f491 DE |
1171 | |
1172 | gcc_obstack_init (®_set_obstack); | |
1173 | } | |
1174 | ||
1175 | static void | |
1d088dee | 1176 | free_reg_set_mem (void) |
7506f491 DE |
1177 | { |
1178 | free (reg_set_table); | |
6496a589 | 1179 | obstack_free (®_set_obstack, NULL); |
7506f491 DE |
1180 | } |
1181 | ||
b885908b MH |
1182 | /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN. |
1183 | Update the corresponding `reg_set_table' entry accordingly. | |
1184 | We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */ | |
1185 | ||
1186 | static void | |
1187 | replace_one_set (int regno, rtx old_insn, rtx new_insn) | |
1188 | { | |
1189 | struct reg_set *reg_info; | |
1190 | if (regno >= reg_set_table_size) | |
1191 | return; | |
1192 | for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next) | |
1193 | if (reg_info->insn == old_insn) | |
1194 | { | |
1195 | reg_info->insn = new_insn; | |
1196 | break; | |
1197 | } | |
1198 | } | |
1199 | ||
7506f491 DE |
1200 | /* Record REGNO in the reg_set table. */ |
1201 | ||
1202 | static void | |
1d088dee | 1203 | record_one_set (int regno, rtx insn) |
7506f491 | 1204 | { |
172890a2 | 1205 | /* Allocate a new reg_set element and link it onto the list. */ |
63bc1d05 | 1206 | struct reg_set *new_reg_info; |
7506f491 DE |
1207 | |
1208 | /* If the table isn't big enough, enlarge it. */ | |
1209 | if (regno >= reg_set_table_size) | |
1210 | { | |
1211 | int new_size = regno + REG_SET_TABLE_SLOP; | |
c4c81601 | 1212 | |
703ad42b KG |
1213 | reg_set_table = grealloc (reg_set_table, |
1214 | new_size * sizeof (struct reg_set *)); | |
1215 | memset (reg_set_table + reg_set_table_size, 0, | |
8e42ace1 | 1216 | (new_size - reg_set_table_size) * sizeof (struct reg_set *)); |
7506f491 DE |
1217 | reg_set_table_size = new_size; |
1218 | } | |
1219 | ||
703ad42b | 1220 | new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set)); |
7506f491 DE |
1221 | bytes_used += sizeof (struct reg_set); |
1222 | new_reg_info->insn = insn; | |
274969ea MM |
1223 | new_reg_info->next = reg_set_table[regno]; |
1224 | reg_set_table[regno] = new_reg_info; | |
7506f491 DE |
1225 | } |
1226 | ||
c4c81601 RK |
1227 | /* Called from compute_sets via note_stores to handle one SET or CLOBBER in |
1228 | an insn. The DATA is really the instruction in which the SET is | |
1229 | occurring. */ | |
7506f491 DE |
1230 | |
1231 | static void | |
1d088dee | 1232 | record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data) |
7506f491 | 1233 | { |
84832317 MM |
1234 | rtx record_set_insn = (rtx) data; |
1235 | ||
c4c81601 RK |
1236 | if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER) |
1237 | record_one_set (REGNO (dest), record_set_insn); | |
7506f491 DE |
1238 | } |
1239 | ||
1240 | /* Scan the function and record each set of each pseudo-register. | |
1241 | ||
c4c81601 | 1242 | This is called once, at the start of the gcse pass. See the comments for |
fbe5a4a6 | 1243 | `reg_set_table' for further documentation. */ |
7506f491 DE |
1244 | |
1245 | static void | |
1d088dee | 1246 | compute_sets (rtx f) |
7506f491 | 1247 | { |
c4c81601 | 1248 | rtx insn; |
7506f491 | 1249 | |
c4c81601 | 1250 | for (insn = f; insn != 0; insn = NEXT_INSN (insn)) |
2c3c49de | 1251 | if (INSN_P (insn)) |
c4c81601 | 1252 | note_stores (PATTERN (insn), record_set_info, insn); |
7506f491 DE |
1253 | } |
1254 | \f | |
1255 | /* Hash table support. */ | |
1256 | ||
80c29cc4 RZ |
1257 | struct reg_avail_info |
1258 | { | |
e0082a72 | 1259 | basic_block last_bb; |
80c29cc4 RZ |
1260 | int first_set; |
1261 | int last_set; | |
1262 | }; | |
1263 | ||
1264 | static struct reg_avail_info *reg_avail_info; | |
e0082a72 | 1265 | static basic_block current_bb; |
7506f491 | 1266 | |
7506f491 | 1267 | |
fb0c0a12 RK |
1268 | /* See whether X, the source of a set, is something we want to consider for |
1269 | GCSE. */ | |
7506f491 | 1270 | |
e2500fed | 1271 | static GTY(()) rtx test_insn; |
7506f491 | 1272 | static int |
1d088dee | 1273 | want_to_gcse_p (rtx x) |
7506f491 | 1274 | { |
fb0c0a12 RK |
1275 | int num_clobbers = 0; |
1276 | int icode; | |
1277 | ||
c4c81601 | 1278 | switch (GET_CODE (x)) |
7506f491 DE |
1279 | { |
1280 | case REG: | |
1281 | case SUBREG: | |
1282 | case CONST_INT: | |
1283 | case CONST_DOUBLE: | |
69ef87e2 | 1284 | case CONST_VECTOR: |
7506f491 | 1285 | case CALL: |
34ee7f82 | 1286 | case CONSTANT_P_RTX: |
7506f491 DE |
1287 | return 0; |
1288 | ||
1289 | default: | |
1290 | break; | |
1291 | } | |
1292 | ||
fb0c0a12 RK |
1293 | /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */ |
1294 | if (general_operand (x, GET_MODE (x))) | |
1295 | return 1; | |
1296 | else if (GET_MODE (x) == VOIDmode) | |
1297 | return 0; | |
1298 | ||
1299 | /* Otherwise, check if we can make a valid insn from it. First initialize | |
1300 | our test insn if we haven't already. */ | |
1301 | if (test_insn == 0) | |
1302 | { | |
1303 | test_insn | |
1304 | = make_insn_raw (gen_rtx_SET (VOIDmode, | |
1305 | gen_rtx_REG (word_mode, | |
1306 | FIRST_PSEUDO_REGISTER * 2), | |
1307 | const0_rtx)); | |
1308 | NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0; | |
fb0c0a12 RK |
1309 | } |
1310 | ||
1311 | /* Now make an insn like the one we would make when GCSE'ing and see if | |
1312 | valid. */ | |
1313 | PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x)); | |
1314 | SET_SRC (PATTERN (test_insn)) = x; | |
1315 | return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0 | |
1316 | && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode))); | |
7506f491 DE |
1317 | } |
1318 | ||
cc2902df | 1319 | /* Return nonzero if the operands of expression X are unchanged from the |
7506f491 DE |
1320 | start of INSN's basic block up to but not including INSN (if AVAIL_P == 0), |
1321 | or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */ | |
1322 | ||
1323 | static int | |
1d088dee | 1324 | oprs_unchanged_p (rtx x, rtx insn, int avail_p) |
7506f491 | 1325 | { |
c4c81601 | 1326 | int i, j; |
7506f491 | 1327 | enum rtx_code code; |
6f7d635c | 1328 | const char *fmt; |
7506f491 | 1329 | |
7506f491 DE |
1330 | if (x == 0) |
1331 | return 1; | |
1332 | ||
1333 | code = GET_CODE (x); | |
1334 | switch (code) | |
1335 | { | |
1336 | case REG: | |
80c29cc4 RZ |
1337 | { |
1338 | struct reg_avail_info *info = ®_avail_info[REGNO (x)]; | |
1339 | ||
1340 | if (info->last_bb != current_bb) | |
1341 | return 1; | |
589005ff | 1342 | if (avail_p) |
80c29cc4 RZ |
1343 | return info->last_set < INSN_CUID (insn); |
1344 | else | |
1345 | return info->first_set >= INSN_CUID (insn); | |
1346 | } | |
7506f491 DE |
1347 | |
1348 | case MEM: | |
e0082a72 | 1349 | if (load_killed_in_block_p (current_bb, INSN_CUID (insn), |
a13d4ebf AM |
1350 | x, avail_p)) |
1351 | return 0; | |
7506f491 | 1352 | else |
c4c81601 | 1353 | return oprs_unchanged_p (XEXP (x, 0), insn, avail_p); |
7506f491 DE |
1354 | |
1355 | case PRE_DEC: | |
1356 | case PRE_INC: | |
1357 | case POST_DEC: | |
1358 | case POST_INC: | |
4b983fdc RH |
1359 | case PRE_MODIFY: |
1360 | case POST_MODIFY: | |
7506f491 DE |
1361 | return 0; |
1362 | ||
1363 | case PC: | |
1364 | case CC0: /*FIXME*/ | |
1365 | case CONST: | |
1366 | case CONST_INT: | |
1367 | case CONST_DOUBLE: | |
69ef87e2 | 1368 | case CONST_VECTOR: |
7506f491 DE |
1369 | case SYMBOL_REF: |
1370 | case LABEL_REF: | |
1371 | case ADDR_VEC: | |
1372 | case ADDR_DIFF_VEC: | |
1373 | return 1; | |
1374 | ||
1375 | default: | |
1376 | break; | |
1377 | } | |
1378 | ||
c4c81601 | 1379 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
1380 | { |
1381 | if (fmt[i] == 'e') | |
1382 | { | |
c4c81601 RK |
1383 | /* If we are about to do the last recursive call needed at this |
1384 | level, change it into iteration. This function is called enough | |
1385 | to be worth it. */ | |
7506f491 | 1386 | if (i == 0) |
c4c81601 RK |
1387 | return oprs_unchanged_p (XEXP (x, i), insn, avail_p); |
1388 | ||
1389 | else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p)) | |
7506f491 DE |
1390 | return 0; |
1391 | } | |
1392 | else if (fmt[i] == 'E') | |
c4c81601 RK |
1393 | for (j = 0; j < XVECLEN (x, i); j++) |
1394 | if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p)) | |
1395 | return 0; | |
7506f491 DE |
1396 | } |
1397 | ||
1398 | return 1; | |
1399 | } | |
1400 | ||
a13d4ebf AM |
1401 | /* Used for communication between mems_conflict_for_gcse_p and |
1402 | load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a | |
1403 | conflict between two memory references. */ | |
1404 | static int gcse_mems_conflict_p; | |
1405 | ||
1406 | /* Used for communication between mems_conflict_for_gcse_p and | |
1407 | load_killed_in_block_p. A memory reference for a load instruction, | |
1408 | mems_conflict_for_gcse_p will see if a memory store conflicts with | |
1409 | this memory load. */ | |
1410 | static rtx gcse_mem_operand; | |
1411 | ||
1412 | /* DEST is the output of an instruction. If it is a memory reference, and | |
1413 | possibly conflicts with the load found in gcse_mem_operand, then set | |
1414 | gcse_mems_conflict_p to a nonzero value. */ | |
1415 | ||
1416 | static void | |
1d088dee AJ |
1417 | mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED, |
1418 | void *data ATTRIBUTE_UNUSED) | |
a13d4ebf AM |
1419 | { |
1420 | while (GET_CODE (dest) == SUBREG | |
1421 | || GET_CODE (dest) == ZERO_EXTRACT | |
1422 | || GET_CODE (dest) == SIGN_EXTRACT | |
1423 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1424 | dest = XEXP (dest, 0); | |
1425 | ||
1426 | /* If DEST is not a MEM, then it will not conflict with the load. Note | |
1427 | that function calls are assumed to clobber memory, but are handled | |
1428 | elsewhere. */ | |
1429 | if (GET_CODE (dest) != MEM) | |
1430 | return; | |
aaa4ca30 | 1431 | |
a13d4ebf | 1432 | /* If we are setting a MEM in our list of specially recognized MEMs, |
589005ff KH |
1433 | don't mark as killed this time. */ |
1434 | ||
47a3dae1 | 1435 | if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL) |
a13d4ebf AM |
1436 | { |
1437 | if (!find_rtx_in_ldst (dest)) | |
1438 | gcse_mems_conflict_p = 1; | |
1439 | return; | |
1440 | } | |
aaa4ca30 | 1441 | |
a13d4ebf AM |
1442 | if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand, |
1443 | rtx_addr_varies_p)) | |
1444 | gcse_mems_conflict_p = 1; | |
1445 | } | |
1446 | ||
1447 | /* Return nonzero if the expression in X (a memory reference) is killed | |
1448 | in block BB before or after the insn with the CUID in UID_LIMIT. | |
1449 | AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills | |
1450 | before UID_LIMIT. | |
1451 | ||
1452 | To check the entire block, set UID_LIMIT to max_uid + 1 and | |
1453 | AVAIL_P to 0. */ | |
1454 | ||
1455 | static int | |
1d088dee | 1456 | load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p) |
a13d4ebf | 1457 | { |
0b17ab2f | 1458 | rtx list_entry = modify_mem_list[bb->index]; |
a13d4ebf AM |
1459 | while (list_entry) |
1460 | { | |
1461 | rtx setter; | |
1462 | /* Ignore entries in the list that do not apply. */ | |
1463 | if ((avail_p | |
1464 | && INSN_CUID (XEXP (list_entry, 0)) < uid_limit) | |
1465 | || (! avail_p | |
1466 | && INSN_CUID (XEXP (list_entry, 0)) > uid_limit)) | |
1467 | { | |
1468 | list_entry = XEXP (list_entry, 1); | |
1469 | continue; | |
1470 | } | |
1471 | ||
1472 | setter = XEXP (list_entry, 0); | |
1473 | ||
1474 | /* If SETTER is a call everything is clobbered. Note that calls | |
1475 | to pure functions are never put on the list, so we need not | |
1476 | worry about them. */ | |
1477 | if (GET_CODE (setter) == CALL_INSN) | |
1478 | return 1; | |
1479 | ||
1480 | /* SETTER must be an INSN of some kind that sets memory. Call | |
589005ff | 1481 | note_stores to examine each hunk of memory that is modified. |
a13d4ebf AM |
1482 | |
1483 | The note_stores interface is pretty limited, so we have to | |
1484 | communicate via global variables. Yuk. */ | |
1485 | gcse_mem_operand = x; | |
1486 | gcse_mems_conflict_p = 0; | |
1487 | note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL); | |
1488 | if (gcse_mems_conflict_p) | |
1489 | return 1; | |
1490 | list_entry = XEXP (list_entry, 1); | |
1491 | } | |
1492 | return 0; | |
1493 | } | |
1494 | ||
cc2902df | 1495 | /* Return nonzero if the operands of expression X are unchanged from |
7506f491 DE |
1496 | the start of INSN's basic block up to but not including INSN. */ |
1497 | ||
1498 | static int | |
1d088dee | 1499 | oprs_anticipatable_p (rtx x, rtx insn) |
7506f491 DE |
1500 | { |
1501 | return oprs_unchanged_p (x, insn, 0); | |
1502 | } | |
1503 | ||
cc2902df | 1504 | /* Return nonzero if the operands of expression X are unchanged from |
7506f491 DE |
1505 | INSN to the end of INSN's basic block. */ |
1506 | ||
1507 | static int | |
1d088dee | 1508 | oprs_available_p (rtx x, rtx insn) |
7506f491 DE |
1509 | { |
1510 | return oprs_unchanged_p (x, insn, 1); | |
1511 | } | |
1512 | ||
1513 | /* Hash expression X. | |
c4c81601 RK |
1514 | |
1515 | MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean | |
1516 | indicating if a volatile operand is found or if the expression contains | |
b58b21d5 RS |
1517 | something we don't want to insert in the table. HASH_TABLE_SIZE is |
1518 | the current size of the hash table to be probed. | |
7506f491 DE |
1519 | |
1520 | ??? One might want to merge this with canon_hash. Later. */ | |
1521 | ||
1522 | static unsigned int | |
b58b21d5 RS |
1523 | hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, |
1524 | int hash_table_size) | |
7506f491 DE |
1525 | { |
1526 | unsigned int hash; | |
1527 | ||
1528 | *do_not_record_p = 0; | |
1529 | ||
1530 | hash = hash_expr_1 (x, mode, do_not_record_p); | |
1531 | return hash % hash_table_size; | |
1532 | } | |
172890a2 | 1533 | |
6462bb43 | 1534 | /* Hash a string. Just add its bytes up. */ |
172890a2 | 1535 | |
6462bb43 | 1536 | static inline unsigned |
1d088dee | 1537 | hash_string_1 (const char *ps) |
6462bb43 AO |
1538 | { |
1539 | unsigned hash = 0; | |
8e42ace1 | 1540 | const unsigned char *p = (const unsigned char *) ps; |
589005ff | 1541 | |
6462bb43 AO |
1542 | if (p) |
1543 | while (*p) | |
1544 | hash += *p++; | |
1545 | ||
1546 | return hash; | |
1547 | } | |
7506f491 DE |
1548 | |
1549 | /* Subroutine of hash_expr to do the actual work. */ | |
1550 | ||
1551 | static unsigned int | |
1d088dee | 1552 | hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p) |
7506f491 DE |
1553 | { |
1554 | int i, j; | |
1555 | unsigned hash = 0; | |
1556 | enum rtx_code code; | |
6f7d635c | 1557 | const char *fmt; |
7506f491 | 1558 | |
c4c81601 | 1559 | /* Used to turn recursion into iteration. We can't rely on GCC's |
fbe5a4a6 | 1560 | tail-recursion elimination since we need to keep accumulating values |
c4c81601 | 1561 | in HASH. */ |
7506f491 DE |
1562 | |
1563 | if (x == 0) | |
1564 | return hash; | |
1565 | ||
c4c81601 | 1566 | repeat: |
7506f491 DE |
1567 | code = GET_CODE (x); |
1568 | switch (code) | |
1569 | { | |
1570 | case REG: | |
c4c81601 RK |
1571 | hash += ((unsigned int) REG << 7) + REGNO (x); |
1572 | return hash; | |
7506f491 DE |
1573 | |
1574 | case CONST_INT: | |
c4c81601 RK |
1575 | hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode |
1576 | + (unsigned int) INTVAL (x)); | |
1577 | return hash; | |
7506f491 DE |
1578 | |
1579 | case CONST_DOUBLE: | |
1580 | /* This is like the general case, except that it only counts | |
1581 | the integers representing the constant. */ | |
c4c81601 | 1582 | hash += (unsigned int) code + (unsigned int) GET_MODE (x); |
7506f491 DE |
1583 | if (GET_MODE (x) != VOIDmode) |
1584 | for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++) | |
c4c81601 | 1585 | hash += (unsigned int) XWINT (x, i); |
7506f491 | 1586 | else |
c4c81601 RK |
1587 | hash += ((unsigned int) CONST_DOUBLE_LOW (x) |
1588 | + (unsigned int) CONST_DOUBLE_HIGH (x)); | |
7506f491 DE |
1589 | return hash; |
1590 | ||
69ef87e2 AH |
1591 | case CONST_VECTOR: |
1592 | { | |
1593 | int units; | |
1594 | rtx elt; | |
1595 | ||
1596 | units = CONST_VECTOR_NUNITS (x); | |
1597 | ||
1598 | for (i = 0; i < units; ++i) | |
1599 | { | |
1600 | elt = CONST_VECTOR_ELT (x, i); | |
1601 | hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p); | |
1602 | } | |
1603 | ||
1604 | return hash; | |
1605 | } | |
1606 | ||
7506f491 DE |
1607 | /* Assume there is only one rtx object for any given label. */ |
1608 | case LABEL_REF: | |
1609 | /* We don't hash on the address of the CODE_LABEL to avoid bootstrap | |
1610 | differences and differences between each stage's debugging dumps. */ | |
c4c81601 RK |
1611 | hash += (((unsigned int) LABEL_REF << 7) |
1612 | + CODE_LABEL_NUMBER (XEXP (x, 0))); | |
7506f491 DE |
1613 | return hash; |
1614 | ||
1615 | case SYMBOL_REF: | |
1616 | { | |
1617 | /* Don't hash on the symbol's address to avoid bootstrap differences. | |
1618 | Different hash values may cause expressions to be recorded in | |
1619 | different orders and thus different registers to be used in the | |
1620 | final assembler. This also avoids differences in the dump files | |
1621 | between various stages. */ | |
1622 | unsigned int h = 0; | |
3cce094d | 1623 | const unsigned char *p = (const unsigned char *) XSTR (x, 0); |
c4c81601 | 1624 | |
7506f491 DE |
1625 | while (*p) |
1626 | h += (h << 7) + *p++; /* ??? revisit */ | |
c4c81601 RK |
1627 | |
1628 | hash += ((unsigned int) SYMBOL_REF << 7) + h; | |
7506f491 DE |
1629 | return hash; |
1630 | } | |
1631 | ||
1632 | case MEM: | |
1633 | if (MEM_VOLATILE_P (x)) | |
1634 | { | |
1635 | *do_not_record_p = 1; | |
1636 | return 0; | |
1637 | } | |
c4c81601 RK |
1638 | |
1639 | hash += (unsigned int) MEM; | |
d51f3632 JH |
1640 | /* We used alias set for hashing, but this is not good, since the alias |
1641 | set may differ in -fprofile-arcs and -fbranch-probabilities compilation | |
1642 | causing the profiles to fail to match. */ | |
7506f491 DE |
1643 | x = XEXP (x, 0); |
1644 | goto repeat; | |
1645 | ||
1646 | case PRE_DEC: | |
1647 | case PRE_INC: | |
1648 | case POST_DEC: | |
1649 | case POST_INC: | |
1650 | case PC: | |
1651 | case CC0: | |
1652 | case CALL: | |
1653 | case UNSPEC_VOLATILE: | |
1654 | *do_not_record_p = 1; | |
1655 | return 0; | |
1656 | ||
1657 | case ASM_OPERANDS: | |
1658 | if (MEM_VOLATILE_P (x)) | |
1659 | { | |
1660 | *do_not_record_p = 1; | |
1661 | return 0; | |
1662 | } | |
6462bb43 AO |
1663 | else |
1664 | { | |
1665 | /* We don't want to take the filename and line into account. */ | |
1666 | hash += (unsigned) code + (unsigned) GET_MODE (x) | |
1667 | + hash_string_1 (ASM_OPERANDS_TEMPLATE (x)) | |
1668 | + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x)) | |
1669 | + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x); | |
1670 | ||
1671 | if (ASM_OPERANDS_INPUT_LENGTH (x)) | |
1672 | { | |
1673 | for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
1674 | { | |
1675 | hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i), | |
1676 | GET_MODE (ASM_OPERANDS_INPUT (x, i)), | |
1677 | do_not_record_p) | |
1678 | + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT | |
1679 | (x, i))); | |
1680 | } | |
1681 | ||
1682 | hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0)); | |
1683 | x = ASM_OPERANDS_INPUT (x, 0); | |
1684 | mode = GET_MODE (x); | |
1685 | goto repeat; | |
1686 | } | |
1687 | return hash; | |
1688 | } | |
7506f491 DE |
1689 | |
1690 | default: | |
1691 | break; | |
1692 | } | |
1693 | ||
7506f491 | 1694 | hash += (unsigned) code + (unsigned) GET_MODE (x); |
c4c81601 | 1695 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
1696 | { |
1697 | if (fmt[i] == 'e') | |
1698 | { | |
7506f491 DE |
1699 | /* If we are about to do the last recursive call |
1700 | needed at this level, change it into iteration. | |
1701 | This function is called enough to be worth it. */ | |
1702 | if (i == 0) | |
1703 | { | |
c4c81601 | 1704 | x = XEXP (x, i); |
7506f491 DE |
1705 | goto repeat; |
1706 | } | |
c4c81601 RK |
1707 | |
1708 | hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p); | |
7506f491 DE |
1709 | if (*do_not_record_p) |
1710 | return 0; | |
1711 | } | |
c4c81601 | 1712 | |
7506f491 DE |
1713 | else if (fmt[i] == 'E') |
1714 | for (j = 0; j < XVECLEN (x, i); j++) | |
1715 | { | |
1716 | hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p); | |
1717 | if (*do_not_record_p) | |
1718 | return 0; | |
1719 | } | |
c4c81601 | 1720 | |
7506f491 | 1721 | else if (fmt[i] == 's') |
6462bb43 | 1722 | hash += hash_string_1 (XSTR (x, i)); |
7506f491 | 1723 | else if (fmt[i] == 'i') |
c4c81601 | 1724 | hash += (unsigned int) XINT (x, i); |
7506f491 DE |
1725 | else |
1726 | abort (); | |
1727 | } | |
1728 | ||
1729 | return hash; | |
1730 | } | |
1731 | ||
1732 | /* Hash a set of register REGNO. | |
1733 | ||
c4c81601 RK |
1734 | Sets are hashed on the register that is set. This simplifies the PRE copy |
1735 | propagation code. | |
7506f491 DE |
1736 | |
1737 | ??? May need to make things more elaborate. Later, as necessary. */ | |
1738 | ||
1739 | static unsigned int | |
1d088dee | 1740 | hash_set (int regno, int hash_table_size) |
7506f491 DE |
1741 | { |
1742 | unsigned int hash; | |
1743 | ||
1744 | hash = regno; | |
1745 | return hash % hash_table_size; | |
1746 | } | |
1747 | ||
cc2902df | 1748 | /* Return nonzero if exp1 is equivalent to exp2. |
7506f491 DE |
1749 | ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */ |
1750 | ||
1751 | static int | |
1d088dee | 1752 | expr_equiv_p (rtx x, rtx y) |
7506f491 | 1753 | { |
b3694847 SS |
1754 | int i, j; |
1755 | enum rtx_code code; | |
1756 | const char *fmt; | |
7506f491 DE |
1757 | |
1758 | if (x == y) | |
1759 | return 1; | |
c4c81601 | 1760 | |
7506f491 | 1761 | if (x == 0 || y == 0) |
ebd7a7af | 1762 | return 0; |
7506f491 DE |
1763 | |
1764 | code = GET_CODE (x); | |
1765 | if (code != GET_CODE (y)) | |
1766 | return 0; | |
1767 | ||
1768 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ | |
1769 | if (GET_MODE (x) != GET_MODE (y)) | |
1770 | return 0; | |
1771 | ||
1772 | switch (code) | |
1773 | { | |
1774 | case PC: | |
1775 | case CC0: | |
7506f491 | 1776 | case CONST_INT: |
ebd7a7af | 1777 | return 0; |
7506f491 DE |
1778 | |
1779 | case LABEL_REF: | |
1780 | return XEXP (x, 0) == XEXP (y, 0); | |
1781 | ||
1782 | case SYMBOL_REF: | |
1783 | return XSTR (x, 0) == XSTR (y, 0); | |
1784 | ||
1785 | case REG: | |
1786 | return REGNO (x) == REGNO (y); | |
1787 | ||
297c3335 RH |
1788 | case MEM: |
1789 | /* Can't merge two expressions in different alias sets, since we can | |
1790 | decide that the expression is transparent in a block when it isn't, | |
1791 | due to it being set with the different alias set. */ | |
1792 | if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y)) | |
1793 | return 0; | |
bad998e0 ZD |
1794 | |
1795 | /* A volatile mem should not be considered equivalent to any other. */ | |
1796 | if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) | |
1797 | return 0; | |
297c3335 RH |
1798 | break; |
1799 | ||
7506f491 DE |
1800 | /* For commutative operations, check both orders. */ |
1801 | case PLUS: | |
1802 | case MULT: | |
1803 | case AND: | |
1804 | case IOR: | |
1805 | case XOR: | |
1806 | case NE: | |
1807 | case EQ: | |
1808 | return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0)) | |
1809 | && expr_equiv_p (XEXP (x, 1), XEXP (y, 1))) | |
1810 | || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1)) | |
1811 | && expr_equiv_p (XEXP (x, 1), XEXP (y, 0)))); | |
1812 | ||
6462bb43 AO |
1813 | case ASM_OPERANDS: |
1814 | /* We don't use the generic code below because we want to | |
1815 | disregard filename and line numbers. */ | |
1816 | ||
1817 | /* A volatile asm isn't equivalent to any other. */ | |
1818 | if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) | |
1819 | return 0; | |
1820 | ||
1821 | if (GET_MODE (x) != GET_MODE (y) | |
1822 | || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y)) | |
1823 | || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x), | |
1824 | ASM_OPERANDS_OUTPUT_CONSTRAINT (y)) | |
1825 | || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y) | |
1826 | || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y)) | |
1827 | return 0; | |
1828 | ||
1829 | if (ASM_OPERANDS_INPUT_LENGTH (x)) | |
1830 | { | |
1831 | for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) | |
1832 | if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i), | |
1833 | ASM_OPERANDS_INPUT (y, i)) | |
1834 | || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i), | |
1835 | ASM_OPERANDS_INPUT_CONSTRAINT (y, i))) | |
1836 | return 0; | |
1837 | } | |
1838 | ||
1839 | return 1; | |
1840 | ||
7506f491 DE |
1841 | default: |
1842 | break; | |
1843 | } | |
1844 | ||
1845 | /* Compare the elements. If any pair of corresponding elements | |
1846 | fail to match, return 0 for the whole thing. */ | |
1847 | ||
1848 | fmt = GET_RTX_FORMAT (code); | |
1849 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1850 | { | |
1851 | switch (fmt[i]) | |
1852 | { | |
1853 | case 'e': | |
1854 | if (! expr_equiv_p (XEXP (x, i), XEXP (y, i))) | |
1855 | return 0; | |
1856 | break; | |
1857 | ||
1858 | case 'E': | |
1859 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
1860 | return 0; | |
1861 | for (j = 0; j < XVECLEN (x, i); j++) | |
1862 | if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j))) | |
1863 | return 0; | |
1864 | break; | |
1865 | ||
1866 | case 's': | |
1867 | if (strcmp (XSTR (x, i), XSTR (y, i))) | |
1868 | return 0; | |
1869 | break; | |
1870 | ||
1871 | case 'i': | |
1872 | if (XINT (x, i) != XINT (y, i)) | |
1873 | return 0; | |
1874 | break; | |
1875 | ||
1876 | case 'w': | |
1877 | if (XWINT (x, i) != XWINT (y, i)) | |
1878 | return 0; | |
1879 | break; | |
1880 | ||
1881 | case '0': | |
1882 | break; | |
aaa4ca30 | 1883 | |
7506f491 DE |
1884 | default: |
1885 | abort (); | |
1886 | } | |
8e42ace1 | 1887 | } |
7506f491 DE |
1888 | |
1889 | return 1; | |
1890 | } | |
1891 | ||
02280659 | 1892 | /* Insert expression X in INSN in the hash TABLE. |
7506f491 DE |
1893 | If it is already present, record it as the last occurrence in INSN's |
1894 | basic block. | |
1895 | ||
1896 | MODE is the mode of the value X is being stored into. | |
1897 | It is only used if X is a CONST_INT. | |
1898 | ||
cc2902df KH |
1899 | ANTIC_P is nonzero if X is an anticipatable expression. |
1900 | AVAIL_P is nonzero if X is an available expression. */ | |
7506f491 DE |
1901 | |
1902 | static void | |
1d088dee AJ |
1903 | insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p, |
1904 | int avail_p, struct hash_table *table) | |
7506f491 DE |
1905 | { |
1906 | int found, do_not_record_p; | |
1907 | unsigned int hash; | |
1908 | struct expr *cur_expr, *last_expr = NULL; | |
1909 | struct occr *antic_occr, *avail_occr; | |
1910 | struct occr *last_occr = NULL; | |
1911 | ||
02280659 | 1912 | hash = hash_expr (x, mode, &do_not_record_p, table->size); |
7506f491 DE |
1913 | |
1914 | /* Do not insert expression in table if it contains volatile operands, | |
1915 | or if hash_expr determines the expression is something we don't want | |
1916 | to or can't handle. */ | |
1917 | if (do_not_record_p) | |
1918 | return; | |
1919 | ||
02280659 | 1920 | cur_expr = table->table[hash]; |
7506f491 DE |
1921 | found = 0; |
1922 | ||
c4c81601 | 1923 | while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x))) |
7506f491 DE |
1924 | { |
1925 | /* If the expression isn't found, save a pointer to the end of | |
1926 | the list. */ | |
1927 | last_expr = cur_expr; | |
1928 | cur_expr = cur_expr->next_same_hash; | |
1929 | } | |
1930 | ||
1931 | if (! found) | |
1932 | { | |
703ad42b | 1933 | cur_expr = gcse_alloc (sizeof (struct expr)); |
7506f491 | 1934 | bytes_used += sizeof (struct expr); |
02280659 | 1935 | if (table->table[hash] == NULL) |
c4c81601 | 1936 | /* This is the first pattern that hashed to this index. */ |
02280659 | 1937 | table->table[hash] = cur_expr; |
7506f491 | 1938 | else |
c4c81601 RK |
1939 | /* Add EXPR to end of this hash chain. */ |
1940 | last_expr->next_same_hash = cur_expr; | |
1941 | ||
589005ff | 1942 | /* Set the fields of the expr element. */ |
7506f491 | 1943 | cur_expr->expr = x; |
02280659 | 1944 | cur_expr->bitmap_index = table->n_elems++; |
7506f491 DE |
1945 | cur_expr->next_same_hash = NULL; |
1946 | cur_expr->antic_occr = NULL; | |
1947 | cur_expr->avail_occr = NULL; | |
1948 | } | |
1949 | ||
1950 | /* Now record the occurrence(s). */ | |
7506f491 DE |
1951 | if (antic_p) |
1952 | { | |
1953 | antic_occr = cur_expr->antic_occr; | |
1954 | ||
1955 | /* Search for another occurrence in the same basic block. */ | |
1956 | while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn)) | |
1957 | { | |
1958 | /* If an occurrence isn't found, save a pointer to the end of | |
1959 | the list. */ | |
1960 | last_occr = antic_occr; | |
1961 | antic_occr = antic_occr->next; | |
1962 | } | |
1963 | ||
1964 | if (antic_occr) | |
c4c81601 RK |
1965 | /* Found another instance of the expression in the same basic block. |
1966 | Prefer the currently recorded one. We want the first one in the | |
1967 | block and the block is scanned from start to end. */ | |
1968 | ; /* nothing to do */ | |
7506f491 DE |
1969 | else |
1970 | { | |
1971 | /* First occurrence of this expression in this basic block. */ | |
703ad42b | 1972 | antic_occr = gcse_alloc (sizeof (struct occr)); |
7506f491 DE |
1973 | bytes_used += sizeof (struct occr); |
1974 | /* First occurrence of this expression in any block? */ | |
1975 | if (cur_expr->antic_occr == NULL) | |
1976 | cur_expr->antic_occr = antic_occr; | |
1977 | else | |
1978 | last_occr->next = antic_occr; | |
c4c81601 | 1979 | |
7506f491 DE |
1980 | antic_occr->insn = insn; |
1981 | antic_occr->next = NULL; | |
1982 | } | |
1983 | } | |
1984 | ||
1985 | if (avail_p) | |
1986 | { | |
1987 | avail_occr = cur_expr->avail_occr; | |
1988 | ||
1989 | /* Search for another occurrence in the same basic block. */ | |
1990 | while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn)) | |
1991 | { | |
1992 | /* If an occurrence isn't found, save a pointer to the end of | |
1993 | the list. */ | |
1994 | last_occr = avail_occr; | |
1995 | avail_occr = avail_occr->next; | |
1996 | } | |
1997 | ||
1998 | if (avail_occr) | |
c4c81601 RK |
1999 | /* Found another instance of the expression in the same basic block. |
2000 | Prefer this occurrence to the currently recorded one. We want | |
2001 | the last one in the block and the block is scanned from start | |
2002 | to end. */ | |
2003 | avail_occr->insn = insn; | |
7506f491 DE |
2004 | else |
2005 | { | |
2006 | /* First occurrence of this expression in this basic block. */ | |
703ad42b | 2007 | avail_occr = gcse_alloc (sizeof (struct occr)); |
7506f491 | 2008 | bytes_used += sizeof (struct occr); |
c4c81601 | 2009 | |
7506f491 DE |
2010 | /* First occurrence of this expression in any block? */ |
2011 | if (cur_expr->avail_occr == NULL) | |
2012 | cur_expr->avail_occr = avail_occr; | |
2013 | else | |
2014 | last_occr->next = avail_occr; | |
c4c81601 | 2015 | |
7506f491 DE |
2016 | avail_occr->insn = insn; |
2017 | avail_occr->next = NULL; | |
2018 | } | |
2019 | } | |
2020 | } | |
2021 | ||
2022 | /* Insert pattern X in INSN in the hash table. | |
2023 | X is a SET of a reg to either another reg or a constant. | |
2024 | If it is already present, record it as the last occurrence in INSN's | |
2025 | basic block. */ | |
2026 | ||
2027 | static void | |
1d088dee | 2028 | insert_set_in_table (rtx x, rtx insn, struct hash_table *table) |
7506f491 DE |
2029 | { |
2030 | int found; | |
2031 | unsigned int hash; | |
2032 | struct expr *cur_expr, *last_expr = NULL; | |
2033 | struct occr *cur_occr, *last_occr = NULL; | |
2034 | ||
2035 | if (GET_CODE (x) != SET | |
2036 | || GET_CODE (SET_DEST (x)) != REG) | |
2037 | abort (); | |
2038 | ||
02280659 | 2039 | hash = hash_set (REGNO (SET_DEST (x)), table->size); |
7506f491 | 2040 | |
02280659 | 2041 | cur_expr = table->table[hash]; |
7506f491 DE |
2042 | found = 0; |
2043 | ||
c4c81601 | 2044 | while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x))) |
7506f491 DE |
2045 | { |
2046 | /* If the expression isn't found, save a pointer to the end of | |
2047 | the list. */ | |
2048 | last_expr = cur_expr; | |
2049 | cur_expr = cur_expr->next_same_hash; | |
2050 | } | |
2051 | ||
2052 | if (! found) | |
2053 | { | |
703ad42b | 2054 | cur_expr = gcse_alloc (sizeof (struct expr)); |
7506f491 | 2055 | bytes_used += sizeof (struct expr); |
02280659 | 2056 | if (table->table[hash] == NULL) |
c4c81601 | 2057 | /* This is the first pattern that hashed to this index. */ |
02280659 | 2058 | table->table[hash] = cur_expr; |
7506f491 | 2059 | else |
c4c81601 RK |
2060 | /* Add EXPR to end of this hash chain. */ |
2061 | last_expr->next_same_hash = cur_expr; | |
2062 | ||
7506f491 DE |
2063 | /* Set the fields of the expr element. |
2064 | We must copy X because it can be modified when copy propagation is | |
2065 | performed on its operands. */ | |
7506f491 | 2066 | cur_expr->expr = copy_rtx (x); |
02280659 | 2067 | cur_expr->bitmap_index = table->n_elems++; |
7506f491 DE |
2068 | cur_expr->next_same_hash = NULL; |
2069 | cur_expr->antic_occr = NULL; | |
2070 | cur_expr->avail_occr = NULL; | |
2071 | } | |
2072 | ||
2073 | /* Now record the occurrence. */ | |
7506f491 DE |
2074 | cur_occr = cur_expr->avail_occr; |
2075 | ||
2076 | /* Search for another occurrence in the same basic block. */ | |
2077 | while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn)) | |
2078 | { | |
2079 | /* If an occurrence isn't found, save a pointer to the end of | |
2080 | the list. */ | |
2081 | last_occr = cur_occr; | |
2082 | cur_occr = cur_occr->next; | |
2083 | } | |
2084 | ||
2085 | if (cur_occr) | |
c4c81601 RK |
2086 | /* Found another instance of the expression in the same basic block. |
2087 | Prefer this occurrence to the currently recorded one. We want the | |
2088 | last one in the block and the block is scanned from start to end. */ | |
2089 | cur_occr->insn = insn; | |
7506f491 DE |
2090 | else |
2091 | { | |
2092 | /* First occurrence of this expression in this basic block. */ | |
703ad42b | 2093 | cur_occr = gcse_alloc (sizeof (struct occr)); |
7506f491 | 2094 | bytes_used += sizeof (struct occr); |
c4c81601 | 2095 | |
7506f491 DE |
2096 | /* First occurrence of this expression in any block? */ |
2097 | if (cur_expr->avail_occr == NULL) | |
2098 | cur_expr->avail_occr = cur_occr; | |
2099 | else | |
2100 | last_occr->next = cur_occr; | |
c4c81601 | 2101 | |
7506f491 DE |
2102 | cur_occr->insn = insn; |
2103 | cur_occr->next = NULL; | |
2104 | } | |
2105 | } | |
2106 | ||
6b2d1c9e RS |
2107 | /* Determine whether the rtx X should be treated as a constant for |
2108 | the purposes of GCSE's constant propagation. */ | |
2109 | ||
2110 | static bool | |
1d088dee | 2111 | gcse_constant_p (rtx x) |
6b2d1c9e RS |
2112 | { |
2113 | /* Consider a COMPARE of two integers constant. */ | |
2114 | if (GET_CODE (x) == COMPARE | |
2115 | && GET_CODE (XEXP (x, 0)) == CONST_INT | |
2116 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
2117 | return true; | |
2118 | ||
db2f435b AP |
2119 | |
2120 | /* Consider a COMPARE of the same registers is a constant | |
938d968e | 2121 | if they are not floating point registers. */ |
db2f435b AP |
2122 | if (GET_CODE(x) == COMPARE |
2123 | && GET_CODE (XEXP (x, 0)) == REG | |
2124 | && GET_CODE (XEXP (x, 1)) == REG | |
2125 | && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1)) | |
2126 | && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))) | |
2127 | && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1)))) | |
2128 | return true; | |
2129 | ||
6b2d1c9e RS |
2130 | if (GET_CODE (x) == CONSTANT_P_RTX) |
2131 | return false; | |
2132 | ||
2133 | return CONSTANT_P (x); | |
2134 | } | |
2135 | ||
02280659 ZD |
2136 | /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or |
2137 | expression one). */ | |
7506f491 DE |
2138 | |
2139 | static void | |
1d088dee | 2140 | hash_scan_set (rtx pat, rtx insn, struct hash_table *table) |
7506f491 DE |
2141 | { |
2142 | rtx src = SET_SRC (pat); | |
2143 | rtx dest = SET_DEST (pat); | |
172890a2 | 2144 | rtx note; |
7506f491 DE |
2145 | |
2146 | if (GET_CODE (src) == CALL) | |
02280659 | 2147 | hash_scan_call (src, insn, table); |
7506f491 | 2148 | |
172890a2 | 2149 | else if (GET_CODE (dest) == REG) |
7506f491 | 2150 | { |
172890a2 | 2151 | unsigned int regno = REGNO (dest); |
7506f491 DE |
2152 | rtx tmp; |
2153 | ||
172890a2 RK |
2154 | /* If this is a single set and we are doing constant propagation, |
2155 | see if a REG_NOTE shows this equivalent to a constant. */ | |
02280659 | 2156 | if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0 |
6b2d1c9e | 2157 | && gcse_constant_p (XEXP (note, 0))) |
172890a2 RK |
2158 | src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src); |
2159 | ||
7506f491 | 2160 | /* Only record sets of pseudo-regs in the hash table. */ |
02280659 | 2161 | if (! table->set_p |
7506f491 DE |
2162 | && regno >= FIRST_PSEUDO_REGISTER |
2163 | /* Don't GCSE something if we can't do a reg/reg copy. */ | |
773eae39 | 2164 | && can_copy_p (GET_MODE (dest)) |
068473ec JH |
2165 | /* GCSE commonly inserts instruction after the insn. We can't |
2166 | do that easily for EH_REGION notes so disable GCSE on these | |
2167 | for now. */ | |
2168 | && !find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
7506f491 | 2169 | /* Is SET_SRC something we want to gcse? */ |
172890a2 RK |
2170 | && want_to_gcse_p (src) |
2171 | /* Don't CSE a nop. */ | |
43e72072 JJ |
2172 | && ! set_noop_p (pat) |
2173 | /* Don't GCSE if it has attached REG_EQUIV note. | |
2174 | At this point this only function parameters should have | |
2175 | REG_EQUIV notes and if the argument slot is used somewhere | |
a1f300c0 | 2176 | explicitly, it means address of parameter has been taken, |
43e72072 JJ |
2177 | so we should not extend the lifetime of the pseudo. */ |
2178 | && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0 | |
2179 | || GET_CODE (XEXP (note, 0)) != MEM)) | |
7506f491 DE |
2180 | { |
2181 | /* An expression is not anticipatable if its operands are | |
52d76e11 RK |
2182 | modified before this insn or if this is not the only SET in |
2183 | this insn. */ | |
2184 | int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn); | |
7506f491 | 2185 | /* An expression is not available if its operands are |
eb296bd9 GK |
2186 | subsequently modified, including this insn. It's also not |
2187 | available if this is a branch, because we can't insert | |
2188 | a set after the branch. */ | |
2189 | int avail_p = (oprs_available_p (src, insn) | |
2190 | && ! JUMP_P (insn)); | |
c4c81601 | 2191 | |
02280659 | 2192 | insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table); |
7506f491 | 2193 | } |
c4c81601 | 2194 | |
7506f491 | 2195 | /* Record sets for constant/copy propagation. */ |
02280659 | 2196 | else if (table->set_p |
7506f491 DE |
2197 | && regno >= FIRST_PSEUDO_REGISTER |
2198 | && ((GET_CODE (src) == REG | |
2199 | && REGNO (src) >= FIRST_PSEUDO_REGISTER | |
773eae39 | 2200 | && can_copy_p (GET_MODE (dest)) |
172890a2 | 2201 | && REGNO (src) != regno) |
6b2d1c9e | 2202 | || gcse_constant_p (src)) |
7506f491 DE |
2203 | /* A copy is not available if its src or dest is subsequently |
2204 | modified. Here we want to search from INSN+1 on, but | |
2205 | oprs_available_p searches from INSN on. */ | |
2206 | && (insn == BLOCK_END (BLOCK_NUM (insn)) | |
2207 | || ((tmp = next_nonnote_insn (insn)) != NULL_RTX | |
2208 | && oprs_available_p (pat, tmp)))) | |
02280659 | 2209 | insert_set_in_table (pat, insn, table); |
7506f491 | 2210 | } |
d91edf86 | 2211 | /* In case of store we want to consider the memory value as available in |
f5f2e3cd MH |
2212 | the REG stored in that memory. This makes it possible to remove |
2213 | redundant loads from due to stores to the same location. */ | |
2214 | else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM) | |
2215 | { | |
2216 | unsigned int regno = REGNO (src); | |
2217 | ||
2218 | /* Do not do this for constant/copy propagation. */ | |
2219 | if (! table->set_p | |
2220 | /* Only record sets of pseudo-regs in the hash table. */ | |
2221 | && regno >= FIRST_PSEUDO_REGISTER | |
2222 | /* Don't GCSE something if we can't do a reg/reg copy. */ | |
2223 | && can_copy_p (GET_MODE (src)) | |
2224 | /* GCSE commonly inserts instruction after the insn. We can't | |
2225 | do that easily for EH_REGION notes so disable GCSE on these | |
2226 | for now. */ | |
2227 | && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
2228 | /* Is SET_DEST something we want to gcse? */ | |
2229 | && want_to_gcse_p (dest) | |
2230 | /* Don't CSE a nop. */ | |
2231 | && ! set_noop_p (pat) | |
2232 | /* Don't GCSE if it has attached REG_EQUIV note. | |
2233 | At this point this only function parameters should have | |
2234 | REG_EQUIV notes and if the argument slot is used somewhere | |
2235 | explicitly, it means address of parameter has been taken, | |
2236 | so we should not extend the lifetime of the pseudo. */ | |
2237 | && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0 | |
2238 | || GET_CODE (XEXP (note, 0)) != MEM)) | |
2239 | { | |
2240 | /* Stores are never anticipatable. */ | |
2241 | int antic_p = 0; | |
2242 | /* An expression is not available if its operands are | |
2243 | subsequently modified, including this insn. It's also not | |
2244 | available if this is a branch, because we can't insert | |
2245 | a set after the branch. */ | |
2246 | int avail_p = oprs_available_p (dest, insn) | |
2247 | && ! JUMP_P (insn); | |
2248 | ||
2249 | /* Record the memory expression (DEST) in the hash table. */ | |
2250 | insert_expr_in_table (dest, GET_MODE (dest), insn, | |
2251 | antic_p, avail_p, table); | |
2252 | } | |
2253 | } | |
7506f491 DE |
2254 | } |
2255 | ||
2256 | static void | |
1d088dee AJ |
2257 | hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED, |
2258 | struct hash_table *table ATTRIBUTE_UNUSED) | |
7506f491 DE |
2259 | { |
2260 | /* Currently nothing to do. */ | |
2261 | } | |
2262 | ||
2263 | static void | |
1d088dee AJ |
2264 | hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED, |
2265 | struct hash_table *table ATTRIBUTE_UNUSED) | |
7506f491 DE |
2266 | { |
2267 | /* Currently nothing to do. */ | |
2268 | } | |
2269 | ||
2270 | /* Process INSN and add hash table entries as appropriate. | |
2271 | ||
2272 | Only available expressions that set a single pseudo-reg are recorded. | |
2273 | ||
2274 | Single sets in a PARALLEL could be handled, but it's an extra complication | |
2275 | that isn't dealt with right now. The trick is handling the CLOBBERs that | |
2276 | are also in the PARALLEL. Later. | |
2277 | ||
cc2902df | 2278 | If SET_P is nonzero, this is for the assignment hash table, |
ed79bb3d R |
2279 | otherwise it is for the expression hash table. |
2280 | If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should | |
2281 | not record any expressions. */ | |
7506f491 DE |
2282 | |
2283 | static void | |
1d088dee | 2284 | hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block) |
7506f491 DE |
2285 | { |
2286 | rtx pat = PATTERN (insn); | |
c4c81601 | 2287 | int i; |
7506f491 | 2288 | |
172890a2 RK |
2289 | if (in_libcall_block) |
2290 | return; | |
2291 | ||
7506f491 DE |
2292 | /* Pick out the sets of INSN and for other forms of instructions record |
2293 | what's been modified. */ | |
2294 | ||
172890a2 | 2295 | if (GET_CODE (pat) == SET) |
02280659 | 2296 | hash_scan_set (pat, insn, table); |
7506f491 | 2297 | else if (GET_CODE (pat) == PARALLEL) |
c4c81601 RK |
2298 | for (i = 0; i < XVECLEN (pat, 0); i++) |
2299 | { | |
2300 | rtx x = XVECEXP (pat, 0, i); | |
7506f491 | 2301 | |
c4c81601 | 2302 | if (GET_CODE (x) == SET) |
02280659 | 2303 | hash_scan_set (x, insn, table); |
c4c81601 | 2304 | else if (GET_CODE (x) == CLOBBER) |
02280659 | 2305 | hash_scan_clobber (x, insn, table); |
c4c81601 | 2306 | else if (GET_CODE (x) == CALL) |
02280659 | 2307 | hash_scan_call (x, insn, table); |
c4c81601 | 2308 | } |
7506f491 | 2309 | |
7506f491 | 2310 | else if (GET_CODE (pat) == CLOBBER) |
02280659 | 2311 | hash_scan_clobber (pat, insn, table); |
7506f491 | 2312 | else if (GET_CODE (pat) == CALL) |
02280659 | 2313 | hash_scan_call (pat, insn, table); |
7506f491 DE |
2314 | } |
2315 | ||
2316 | static void | |
1d088dee | 2317 | dump_hash_table (FILE *file, const char *name, struct hash_table *table) |
7506f491 DE |
2318 | { |
2319 | int i; | |
2320 | /* Flattened out table, so it's printed in proper order. */ | |
4da896b2 MM |
2321 | struct expr **flat_table; |
2322 | unsigned int *hash_val; | |
c4c81601 | 2323 | struct expr *expr; |
4da896b2 | 2324 | |
703ad42b KG |
2325 | flat_table = xcalloc (table->n_elems, sizeof (struct expr *)); |
2326 | hash_val = xmalloc (table->n_elems * sizeof (unsigned int)); | |
7506f491 | 2327 | |
02280659 ZD |
2328 | for (i = 0; i < (int) table->size; i++) |
2329 | for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 RK |
2330 | { |
2331 | flat_table[expr->bitmap_index] = expr; | |
2332 | hash_val[expr->bitmap_index] = i; | |
2333 | } | |
7506f491 DE |
2334 | |
2335 | fprintf (file, "%s hash table (%d buckets, %d entries)\n", | |
02280659 | 2336 | name, table->size, table->n_elems); |
7506f491 | 2337 | |
02280659 | 2338 | for (i = 0; i < (int) table->n_elems; i++) |
21318741 RK |
2339 | if (flat_table[i] != 0) |
2340 | { | |
a0ac9e5a | 2341 | expr = flat_table[i]; |
21318741 RK |
2342 | fprintf (file, "Index %d (hash value %d)\n ", |
2343 | expr->bitmap_index, hash_val[i]); | |
a0ac9e5a | 2344 | print_rtl (file, expr->expr); |
21318741 RK |
2345 | fprintf (file, "\n"); |
2346 | } | |
7506f491 DE |
2347 | |
2348 | fprintf (file, "\n"); | |
4da896b2 | 2349 | |
4da896b2 MM |
2350 | free (flat_table); |
2351 | free (hash_val); | |
7506f491 DE |
2352 | } |
2353 | ||
2354 | /* Record register first/last/block set information for REGNO in INSN. | |
c4c81601 | 2355 | |
80c29cc4 | 2356 | first_set records the first place in the block where the register |
7506f491 | 2357 | is set and is used to compute "anticipatability". |
c4c81601 | 2358 | |
80c29cc4 | 2359 | last_set records the last place in the block where the register |
7506f491 | 2360 | is set and is used to compute "availability". |
c4c81601 | 2361 | |
80c29cc4 RZ |
2362 | last_bb records the block for which first_set and last_set are |
2363 | valid, as a quick test to invalidate them. | |
2364 | ||
7506f491 DE |
2365 | reg_set_in_block records whether the register is set in the block |
2366 | and is used to compute "transparency". */ | |
2367 | ||
2368 | static void | |
1d088dee | 2369 | record_last_reg_set_info (rtx insn, int regno) |
7506f491 | 2370 | { |
80c29cc4 RZ |
2371 | struct reg_avail_info *info = ®_avail_info[regno]; |
2372 | int cuid = INSN_CUID (insn); | |
c4c81601 | 2373 | |
80c29cc4 RZ |
2374 | info->last_set = cuid; |
2375 | if (info->last_bb != current_bb) | |
2376 | { | |
2377 | info->last_bb = current_bb; | |
2378 | info->first_set = cuid; | |
e0082a72 | 2379 | SET_BIT (reg_set_in_block[current_bb->index], regno); |
80c29cc4 | 2380 | } |
7506f491 DE |
2381 | } |
2382 | ||
a13d4ebf AM |
2383 | |
2384 | /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn. | |
2385 | Note we store a pair of elements in the list, so they have to be | |
2386 | taken off pairwise. */ | |
2387 | ||
589005ff | 2388 | static void |
1d088dee AJ |
2389 | canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED, |
2390 | void * v_insn) | |
a13d4ebf AM |
2391 | { |
2392 | rtx dest_addr, insn; | |
0fe854a7 | 2393 | int bb; |
a13d4ebf AM |
2394 | |
2395 | while (GET_CODE (dest) == SUBREG | |
2396 | || GET_CODE (dest) == ZERO_EXTRACT | |
2397 | || GET_CODE (dest) == SIGN_EXTRACT | |
2398 | || GET_CODE (dest) == STRICT_LOW_PART) | |
2399 | dest = XEXP (dest, 0); | |
2400 | ||
2401 | /* If DEST is not a MEM, then it will not conflict with a load. Note | |
2402 | that function calls are assumed to clobber memory, but are handled | |
2403 | elsewhere. */ | |
2404 | ||
2405 | if (GET_CODE (dest) != MEM) | |
2406 | return; | |
2407 | ||
2408 | dest_addr = get_addr (XEXP (dest, 0)); | |
2409 | dest_addr = canon_rtx (dest_addr); | |
589005ff | 2410 | insn = (rtx) v_insn; |
0fe854a7 | 2411 | bb = BLOCK_NUM (insn); |
a13d4ebf | 2412 | |
589005ff | 2413 | canon_modify_mem_list[bb] = |
0fe854a7 | 2414 | alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]); |
589005ff | 2415 | canon_modify_mem_list[bb] = |
0fe854a7 RH |
2416 | alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]); |
2417 | bitmap_set_bit (canon_modify_mem_list_set, bb); | |
a13d4ebf AM |
2418 | } |
2419 | ||
a13d4ebf AM |
2420 | /* Record memory modification information for INSN. We do not actually care |
2421 | about the memory location(s) that are set, or even how they are set (consider | |
2422 | a CALL_INSN). We merely need to record which insns modify memory. */ | |
7506f491 DE |
2423 | |
2424 | static void | |
1d088dee | 2425 | record_last_mem_set_info (rtx insn) |
7506f491 | 2426 | { |
0fe854a7 RH |
2427 | int bb = BLOCK_NUM (insn); |
2428 | ||
ccef9ef5 | 2429 | /* load_killed_in_block_p will handle the case of calls clobbering |
dc297297 | 2430 | everything. */ |
0fe854a7 RH |
2431 | modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]); |
2432 | bitmap_set_bit (modify_mem_list_set, bb); | |
a13d4ebf AM |
2433 | |
2434 | if (GET_CODE (insn) == CALL_INSN) | |
2435 | { | |
2436 | /* Note that traversals of this loop (other than for free-ing) | |
2437 | will break after encountering a CALL_INSN. So, there's no | |
dc297297 | 2438 | need to insert a pair of items, as canon_list_insert does. */ |
589005ff KH |
2439 | canon_modify_mem_list[bb] = |
2440 | alloc_INSN_LIST (insn, canon_modify_mem_list[bb]); | |
0fe854a7 | 2441 | bitmap_set_bit (canon_modify_mem_list_set, bb); |
a13d4ebf AM |
2442 | } |
2443 | else | |
0fe854a7 | 2444 | note_stores (PATTERN (insn), canon_list_insert, (void*) insn); |
7506f491 DE |
2445 | } |
2446 | ||
7506f491 | 2447 | /* Called from compute_hash_table via note_stores to handle one |
84832317 MM |
2448 | SET or CLOBBER in an insn. DATA is really the instruction in which |
2449 | the SET is taking place. */ | |
7506f491 DE |
2450 | |
2451 | static void | |
1d088dee | 2452 | record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data) |
7506f491 | 2453 | { |
84832317 MM |
2454 | rtx last_set_insn = (rtx) data; |
2455 | ||
7506f491 DE |
2456 | if (GET_CODE (dest) == SUBREG) |
2457 | dest = SUBREG_REG (dest); | |
2458 | ||
2459 | if (GET_CODE (dest) == REG) | |
2460 | record_last_reg_set_info (last_set_insn, REGNO (dest)); | |
2461 | else if (GET_CODE (dest) == MEM | |
2462 | /* Ignore pushes, they clobber nothing. */ | |
2463 | && ! push_operand (dest, GET_MODE (dest))) | |
2464 | record_last_mem_set_info (last_set_insn); | |
2465 | } | |
2466 | ||
2467 | /* Top level function to create an expression or assignment hash table. | |
2468 | ||
2469 | Expression entries are placed in the hash table if | |
2470 | - they are of the form (set (pseudo-reg) src), | |
2471 | - src is something we want to perform GCSE on, | |
2472 | - none of the operands are subsequently modified in the block | |
2473 | ||
2474 | Assignment entries are placed in the hash table if | |
2475 | - they are of the form (set (pseudo-reg) src), | |
2476 | - src is something we want to perform const/copy propagation on, | |
2477 | - none of the operands or target are subsequently modified in the block | |
c4c81601 | 2478 | |
7506f491 DE |
2479 | Currently src must be a pseudo-reg or a const_int. |
2480 | ||
02280659 | 2481 | TABLE is the table computed. */ |
7506f491 DE |
2482 | |
2483 | static void | |
1d088dee | 2484 | compute_hash_table_work (struct hash_table *table) |
7506f491 | 2485 | { |
80c29cc4 | 2486 | unsigned int i; |
7506f491 DE |
2487 | |
2488 | /* While we compute the hash table we also compute a bit array of which | |
2489 | registers are set in which blocks. | |
7506f491 DE |
2490 | ??? This isn't needed during const/copy propagation, but it's cheap to |
2491 | compute. Later. */ | |
d55bc081 | 2492 | sbitmap_vector_zero (reg_set_in_block, last_basic_block); |
7506f491 | 2493 | |
a13d4ebf | 2494 | /* re-Cache any INSN_LIST nodes we have allocated. */ |
73991d6a | 2495 | clear_modify_mem_tables (); |
7506f491 | 2496 | /* Some working arrays used to track first and last set in each block. */ |
703ad42b | 2497 | reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info)); |
80c29cc4 RZ |
2498 | |
2499 | for (i = 0; i < max_gcse_regno; ++i) | |
e0082a72 | 2500 | reg_avail_info[i].last_bb = NULL; |
7506f491 | 2501 | |
e0082a72 | 2502 | FOR_EACH_BB (current_bb) |
7506f491 DE |
2503 | { |
2504 | rtx insn; | |
770ae6cc | 2505 | unsigned int regno; |
ed79bb3d | 2506 | int in_libcall_block; |
7506f491 DE |
2507 | |
2508 | /* First pass over the instructions records information used to | |
2509 | determine when registers and memory are first and last set. | |
ccef9ef5 | 2510 | ??? hard-reg reg_set_in_block computation |
7506f491 DE |
2511 | could be moved to compute_sets since they currently don't change. */ |
2512 | ||
e0082a72 ZD |
2513 | for (insn = current_bb->head; |
2514 | insn && insn != NEXT_INSN (current_bb->end); | |
7506f491 DE |
2515 | insn = NEXT_INSN (insn)) |
2516 | { | |
2c3c49de | 2517 | if (! INSN_P (insn)) |
7506f491 DE |
2518 | continue; |
2519 | ||
2520 | if (GET_CODE (insn) == CALL_INSN) | |
2521 | { | |
19652adf | 2522 | bool clobbers_all = false; |
589005ff | 2523 | #ifdef NON_SAVING_SETJMP |
19652adf ZW |
2524 | if (NON_SAVING_SETJMP |
2525 | && find_reg_note (insn, REG_SETJMP, NULL_RTX)) | |
2526 | clobbers_all = true; | |
2527 | #endif | |
2528 | ||
7506f491 | 2529 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
19652adf ZW |
2530 | if (clobbers_all |
2531 | || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) | |
7506f491 | 2532 | record_last_reg_set_info (insn, regno); |
c4c81601 | 2533 | |
24a28584 | 2534 | mark_call (insn); |
7506f491 DE |
2535 | } |
2536 | ||
84832317 | 2537 | note_stores (PATTERN (insn), record_last_set_info, insn); |
7506f491 DE |
2538 | } |
2539 | ||
fbef91d8 RS |
2540 | /* Insert implicit sets in the hash table. */ |
2541 | if (table->set_p | |
2542 | && implicit_sets[current_bb->index] != NULL_RTX) | |
2543 | hash_scan_set (implicit_sets[current_bb->index], | |
2544 | current_bb->head, table); | |
2545 | ||
7506f491 DE |
2546 | /* The next pass builds the hash table. */ |
2547 | ||
e0082a72 ZD |
2548 | for (insn = current_bb->head, in_libcall_block = 0; |
2549 | insn && insn != NEXT_INSN (current_bb->end); | |
7506f491 | 2550 | insn = NEXT_INSN (insn)) |
2c3c49de | 2551 | if (INSN_P (insn)) |
c4c81601 RK |
2552 | { |
2553 | if (find_reg_note (insn, REG_LIBCALL, NULL_RTX)) | |
589005ff | 2554 | in_libcall_block = 1; |
02280659 | 2555 | else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX)) |
589005ff | 2556 | in_libcall_block = 0; |
02280659 ZD |
2557 | hash_scan_insn (insn, table, in_libcall_block); |
2558 | if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
589005ff | 2559 | in_libcall_block = 0; |
8e42ace1 | 2560 | } |
7506f491 DE |
2561 | } |
2562 | ||
80c29cc4 RZ |
2563 | free (reg_avail_info); |
2564 | reg_avail_info = NULL; | |
7506f491 DE |
2565 | } |
2566 | ||
02280659 | 2567 | /* Allocate space for the set/expr hash TABLE. |
7506f491 | 2568 | N_INSNS is the number of instructions in the function. |
02280659 ZD |
2569 | It is used to determine the number of buckets to use. |
2570 | SET_P determines whether set or expression table will | |
2571 | be created. */ | |
7506f491 DE |
2572 | |
2573 | static void | |
1d088dee | 2574 | alloc_hash_table (int n_insns, struct hash_table *table, int set_p) |
7506f491 DE |
2575 | { |
2576 | int n; | |
2577 | ||
02280659 ZD |
2578 | table->size = n_insns / 4; |
2579 | if (table->size < 11) | |
2580 | table->size = 11; | |
c4c81601 | 2581 | |
7506f491 DE |
2582 | /* Attempt to maintain efficient use of hash table. |
2583 | Making it an odd number is simplest for now. | |
2584 | ??? Later take some measurements. */ | |
02280659 ZD |
2585 | table->size |= 1; |
2586 | n = table->size * sizeof (struct expr *); | |
703ad42b | 2587 | table->table = gmalloc (n); |
02280659 | 2588 | table->set_p = set_p; |
7506f491 DE |
2589 | } |
2590 | ||
02280659 | 2591 | /* Free things allocated by alloc_hash_table. */ |
7506f491 DE |
2592 | |
2593 | static void | |
1d088dee | 2594 | free_hash_table (struct hash_table *table) |
7506f491 | 2595 | { |
02280659 | 2596 | free (table->table); |
7506f491 DE |
2597 | } |
2598 | ||
02280659 ZD |
2599 | /* Compute the hash TABLE for doing copy/const propagation or |
2600 | expression hash table. */ | |
7506f491 DE |
2601 | |
2602 | static void | |
1d088dee | 2603 | compute_hash_table (struct hash_table *table) |
7506f491 DE |
2604 | { |
2605 | /* Initialize count of number of entries in hash table. */ | |
02280659 | 2606 | table->n_elems = 0; |
703ad42b | 2607 | memset (table->table, 0, table->size * sizeof (struct expr *)); |
7506f491 | 2608 | |
02280659 | 2609 | compute_hash_table_work (table); |
7506f491 DE |
2610 | } |
2611 | \f | |
2612 | /* Expression tracking support. */ | |
2613 | ||
02280659 | 2614 | /* Lookup pattern PAT in the expression TABLE. |
7506f491 DE |
2615 | The result is a pointer to the table entry, or NULL if not found. */ |
2616 | ||
2617 | static struct expr * | |
1d088dee | 2618 | lookup_expr (rtx pat, struct hash_table *table) |
7506f491 DE |
2619 | { |
2620 | int do_not_record_p; | |
2621 | unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p, | |
02280659 | 2622 | table->size); |
7506f491 DE |
2623 | struct expr *expr; |
2624 | ||
2625 | if (do_not_record_p) | |
2626 | return NULL; | |
2627 | ||
02280659 | 2628 | expr = table->table[hash]; |
7506f491 DE |
2629 | |
2630 | while (expr && ! expr_equiv_p (expr->expr, pat)) | |
2631 | expr = expr->next_same_hash; | |
2632 | ||
2633 | return expr; | |
2634 | } | |
2635 | ||
ceda50e9 RH |
2636 | /* Lookup REGNO in the set TABLE. The result is a pointer to the |
2637 | table entry, or NULL if not found. */ | |
7506f491 DE |
2638 | |
2639 | static struct expr * | |
1d088dee | 2640 | lookup_set (unsigned int regno, struct hash_table *table) |
7506f491 | 2641 | { |
02280659 | 2642 | unsigned int hash = hash_set (regno, table->size); |
7506f491 DE |
2643 | struct expr *expr; |
2644 | ||
02280659 | 2645 | expr = table->table[hash]; |
7506f491 | 2646 | |
ceda50e9 RH |
2647 | while (expr && REGNO (SET_DEST (expr->expr)) != regno) |
2648 | expr = expr->next_same_hash; | |
7506f491 DE |
2649 | |
2650 | return expr; | |
2651 | } | |
2652 | ||
2653 | /* Return the next entry for REGNO in list EXPR. */ | |
2654 | ||
2655 | static struct expr * | |
1d088dee | 2656 | next_set (unsigned int regno, struct expr *expr) |
7506f491 DE |
2657 | { |
2658 | do | |
2659 | expr = expr->next_same_hash; | |
2660 | while (expr && REGNO (SET_DEST (expr->expr)) != regno); | |
c4c81601 | 2661 | |
7506f491 DE |
2662 | return expr; |
2663 | } | |
2664 | ||
0fe854a7 RH |
2665 | /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node |
2666 | types may be mixed. */ | |
2667 | ||
2668 | static void | |
1d088dee | 2669 | free_insn_expr_list_list (rtx *listp) |
0fe854a7 RH |
2670 | { |
2671 | rtx list, next; | |
2672 | ||
2673 | for (list = *listp; list ; list = next) | |
2674 | { | |
2675 | next = XEXP (list, 1); | |
2676 | if (GET_CODE (list) == EXPR_LIST) | |
2677 | free_EXPR_LIST_node (list); | |
2678 | else | |
2679 | free_INSN_LIST_node (list); | |
2680 | } | |
2681 | ||
2682 | *listp = NULL; | |
2683 | } | |
2684 | ||
73991d6a JH |
2685 | /* Clear canon_modify_mem_list and modify_mem_list tables. */ |
2686 | static void | |
1d088dee | 2687 | clear_modify_mem_tables (void) |
73991d6a JH |
2688 | { |
2689 | int i; | |
2690 | ||
2691 | EXECUTE_IF_SET_IN_BITMAP | |
0fe854a7 RH |
2692 | (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i)); |
2693 | bitmap_clear (modify_mem_list_set); | |
73991d6a JH |
2694 | |
2695 | EXECUTE_IF_SET_IN_BITMAP | |
2696 | (canon_modify_mem_list_set, 0, i, | |
0fe854a7 RH |
2697 | free_insn_expr_list_list (canon_modify_mem_list + i)); |
2698 | bitmap_clear (canon_modify_mem_list_set); | |
73991d6a JH |
2699 | } |
2700 | ||
2701 | /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */ | |
2702 | ||
2703 | static void | |
1d088dee | 2704 | free_modify_mem_tables (void) |
73991d6a JH |
2705 | { |
2706 | clear_modify_mem_tables (); | |
2707 | free (modify_mem_list); | |
2708 | free (canon_modify_mem_list); | |
2709 | modify_mem_list = 0; | |
2710 | canon_modify_mem_list = 0; | |
2711 | } | |
2712 | ||
7506f491 DE |
2713 | /* Reset tables used to keep track of what's still available [since the |
2714 | start of the block]. */ | |
2715 | ||
2716 | static void | |
1d088dee | 2717 | reset_opr_set_tables (void) |
7506f491 DE |
2718 | { |
2719 | /* Maintain a bitmap of which regs have been set since beginning of | |
2720 | the block. */ | |
73991d6a | 2721 | CLEAR_REG_SET (reg_set_bitmap); |
c4c81601 | 2722 | |
7506f491 DE |
2723 | /* Also keep a record of the last instruction to modify memory. |
2724 | For now this is very trivial, we only record whether any memory | |
2725 | location has been modified. */ | |
73991d6a | 2726 | clear_modify_mem_tables (); |
7506f491 DE |
2727 | } |
2728 | ||
cc2902df | 2729 | /* Return nonzero if the operands of X are not set before INSN in |
7506f491 DE |
2730 | INSN's basic block. */ |
2731 | ||
2732 | static int | |
1d088dee | 2733 | oprs_not_set_p (rtx x, rtx insn) |
7506f491 | 2734 | { |
c4c81601 | 2735 | int i, j; |
7506f491 | 2736 | enum rtx_code code; |
6f7d635c | 2737 | const char *fmt; |
7506f491 | 2738 | |
7506f491 DE |
2739 | if (x == 0) |
2740 | return 1; | |
2741 | ||
2742 | code = GET_CODE (x); | |
2743 | switch (code) | |
2744 | { | |
2745 | case PC: | |
2746 | case CC0: | |
2747 | case CONST: | |
2748 | case CONST_INT: | |
2749 | case CONST_DOUBLE: | |
69ef87e2 | 2750 | case CONST_VECTOR: |
7506f491 DE |
2751 | case SYMBOL_REF: |
2752 | case LABEL_REF: | |
2753 | case ADDR_VEC: | |
2754 | case ADDR_DIFF_VEC: | |
2755 | return 1; | |
2756 | ||
2757 | case MEM: | |
589005ff | 2758 | if (load_killed_in_block_p (BLOCK_FOR_INSN (insn), |
e2d2ed72 | 2759 | INSN_CUID (insn), x, 0)) |
a13d4ebf | 2760 | return 0; |
c4c81601 RK |
2761 | else |
2762 | return oprs_not_set_p (XEXP (x, 0), insn); | |
7506f491 DE |
2763 | |
2764 | case REG: | |
73991d6a | 2765 | return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x)); |
7506f491 DE |
2766 | |
2767 | default: | |
2768 | break; | |
2769 | } | |
2770 | ||
c4c81601 | 2771 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
2772 | { |
2773 | if (fmt[i] == 'e') | |
2774 | { | |
7506f491 DE |
2775 | /* If we are about to do the last recursive call |
2776 | needed at this level, change it into iteration. | |
2777 | This function is called enough to be worth it. */ | |
2778 | if (i == 0) | |
c4c81601 RK |
2779 | return oprs_not_set_p (XEXP (x, i), insn); |
2780 | ||
2781 | if (! oprs_not_set_p (XEXP (x, i), insn)) | |
7506f491 DE |
2782 | return 0; |
2783 | } | |
2784 | else if (fmt[i] == 'E') | |
c4c81601 RK |
2785 | for (j = 0; j < XVECLEN (x, i); j++) |
2786 | if (! oprs_not_set_p (XVECEXP (x, i, j), insn)) | |
2787 | return 0; | |
7506f491 DE |
2788 | } |
2789 | ||
2790 | return 1; | |
2791 | } | |
2792 | ||
2793 | /* Mark things set by a CALL. */ | |
2794 | ||
2795 | static void | |
1d088dee | 2796 | mark_call (rtx insn) |
7506f491 | 2797 | { |
24a28584 | 2798 | if (! CONST_OR_PURE_CALL_P (insn)) |
a13d4ebf | 2799 | record_last_mem_set_info (insn); |
7506f491 DE |
2800 | } |
2801 | ||
2802 | /* Mark things set by a SET. */ | |
2803 | ||
2804 | static void | |
1d088dee | 2805 | mark_set (rtx pat, rtx insn) |
7506f491 DE |
2806 | { |
2807 | rtx dest = SET_DEST (pat); | |
2808 | ||
2809 | while (GET_CODE (dest) == SUBREG | |
2810 | || GET_CODE (dest) == ZERO_EXTRACT | |
2811 | || GET_CODE (dest) == SIGN_EXTRACT | |
2812 | || GET_CODE (dest) == STRICT_LOW_PART) | |
2813 | dest = XEXP (dest, 0); | |
2814 | ||
a13d4ebf | 2815 | if (GET_CODE (dest) == REG) |
73991d6a | 2816 | SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest)); |
a13d4ebf AM |
2817 | else if (GET_CODE (dest) == MEM) |
2818 | record_last_mem_set_info (insn); | |
2819 | ||
7506f491 | 2820 | if (GET_CODE (SET_SRC (pat)) == CALL) |
b5ce41ff | 2821 | mark_call (insn); |
7506f491 DE |
2822 | } |
2823 | ||
2824 | /* Record things set by a CLOBBER. */ | |
2825 | ||
2826 | static void | |
1d088dee | 2827 | mark_clobber (rtx pat, rtx insn) |
7506f491 DE |
2828 | { |
2829 | rtx clob = XEXP (pat, 0); | |
2830 | ||
2831 | while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART) | |
2832 | clob = XEXP (clob, 0); | |
2833 | ||
a13d4ebf | 2834 | if (GET_CODE (clob) == REG) |
73991d6a | 2835 | SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob)); |
a13d4ebf AM |
2836 | else |
2837 | record_last_mem_set_info (insn); | |
7506f491 DE |
2838 | } |
2839 | ||
2840 | /* Record things set by INSN. | |
2841 | This data is used by oprs_not_set_p. */ | |
2842 | ||
2843 | static void | |
1d088dee | 2844 | mark_oprs_set (rtx insn) |
7506f491 DE |
2845 | { |
2846 | rtx pat = PATTERN (insn); | |
c4c81601 | 2847 | int i; |
7506f491 DE |
2848 | |
2849 | if (GET_CODE (pat) == SET) | |
2850 | mark_set (pat, insn); | |
2851 | else if (GET_CODE (pat) == PARALLEL) | |
c4c81601 RK |
2852 | for (i = 0; i < XVECLEN (pat, 0); i++) |
2853 | { | |
2854 | rtx x = XVECEXP (pat, 0, i); | |
2855 | ||
2856 | if (GET_CODE (x) == SET) | |
2857 | mark_set (x, insn); | |
2858 | else if (GET_CODE (x) == CLOBBER) | |
2859 | mark_clobber (x, insn); | |
2860 | else if (GET_CODE (x) == CALL) | |
2861 | mark_call (insn); | |
2862 | } | |
7506f491 | 2863 | |
7506f491 DE |
2864 | else if (GET_CODE (pat) == CLOBBER) |
2865 | mark_clobber (pat, insn); | |
2866 | else if (GET_CODE (pat) == CALL) | |
b5ce41ff | 2867 | mark_call (insn); |
7506f491 | 2868 | } |
b5ce41ff | 2869 | |
7506f491 DE |
2870 | \f |
2871 | /* Classic GCSE reaching definition support. */ | |
2872 | ||
2873 | /* Allocate reaching def variables. */ | |
2874 | ||
2875 | static void | |
1d088dee | 2876 | alloc_rd_mem (int n_blocks, int n_insns) |
7506f491 | 2877 | { |
703ad42b | 2878 | rd_kill = sbitmap_vector_alloc (n_blocks, n_insns); |
d55bc081 | 2879 | sbitmap_vector_zero (rd_kill, n_blocks); |
7506f491 | 2880 | |
703ad42b | 2881 | rd_gen = sbitmap_vector_alloc (n_blocks, n_insns); |
d55bc081 | 2882 | sbitmap_vector_zero (rd_gen, n_blocks); |
7506f491 | 2883 | |
703ad42b | 2884 | reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns); |
d55bc081 | 2885 | sbitmap_vector_zero (reaching_defs, n_blocks); |
7506f491 | 2886 | |
703ad42b | 2887 | rd_out = sbitmap_vector_alloc (n_blocks, n_insns); |
d55bc081 | 2888 | sbitmap_vector_zero (rd_out, n_blocks); |
7506f491 DE |
2889 | } |
2890 | ||
2891 | /* Free reaching def variables. */ | |
2892 | ||
2893 | static void | |
1d088dee | 2894 | free_rd_mem (void) |
7506f491 | 2895 | { |
5a660bff DB |
2896 | sbitmap_vector_free (rd_kill); |
2897 | sbitmap_vector_free (rd_gen); | |
2898 | sbitmap_vector_free (reaching_defs); | |
2899 | sbitmap_vector_free (rd_out); | |
7506f491 DE |
2900 | } |
2901 | ||
c4c81601 | 2902 | /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */ |
7506f491 DE |
2903 | |
2904 | static void | |
1d088dee | 2905 | handle_rd_kill_set (rtx insn, int regno, basic_block bb) |
7506f491 | 2906 | { |
c4c81601 | 2907 | struct reg_set *this_reg; |
7506f491 | 2908 | |
c4c81601 RK |
2909 | for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next) |
2910 | if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn)) | |
0b17ab2f | 2911 | SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn)); |
7506f491 DE |
2912 | } |
2913 | ||
7506f491 DE |
2914 | /* Compute the set of kill's for reaching definitions. */ |
2915 | ||
2916 | static void | |
1d088dee | 2917 | compute_kill_rd (void) |
7506f491 | 2918 | { |
e0082a72 | 2919 | int cuid; |
172890a2 RK |
2920 | unsigned int regno; |
2921 | int i; | |
e0082a72 | 2922 | basic_block bb; |
7506f491 DE |
2923 | |
2924 | /* For each block | |
2925 | For each set bit in `gen' of the block (i.e each insn which | |
ac7c5af5 JL |
2926 | generates a definition in the block) |
2927 | Call the reg set by the insn corresponding to that bit regx | |
2928 | Look at the linked list starting at reg_set_table[regx] | |
2929 | For each setting of regx in the linked list, which is not in | |
2930 | this block | |
6d2f8887 | 2931 | Set the bit in `kill' corresponding to that insn. */ |
e0082a72 | 2932 | FOR_EACH_BB (bb) |
c4c81601 | 2933 | for (cuid = 0; cuid < max_cuid; cuid++) |
e0082a72 | 2934 | if (TEST_BIT (rd_gen[bb->index], cuid)) |
7506f491 | 2935 | { |
c4c81601 RK |
2936 | rtx insn = CUID_INSN (cuid); |
2937 | rtx pat = PATTERN (insn); | |
7506f491 | 2938 | |
c4c81601 RK |
2939 | if (GET_CODE (insn) == CALL_INSN) |
2940 | { | |
2941 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
4e2db584 | 2942 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) |
e0082a72 | 2943 | handle_rd_kill_set (insn, regno, bb); |
c4c81601 | 2944 | } |
7506f491 | 2945 | |
c4c81601 RK |
2946 | if (GET_CODE (pat) == PARALLEL) |
2947 | { | |
2948 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) | |
7506f491 | 2949 | { |
c4c81601 | 2950 | enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i)); |
7506f491 | 2951 | |
c4c81601 RK |
2952 | if ((code == SET || code == CLOBBER) |
2953 | && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG) | |
2954 | handle_rd_kill_set (insn, | |
2955 | REGNO (XEXP (XVECEXP (pat, 0, i), 0)), | |
e0082a72 | 2956 | bb); |
ac7c5af5 | 2957 | } |
ac7c5af5 | 2958 | } |
c4c81601 RK |
2959 | else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG) |
2960 | /* Each setting of this register outside of this block | |
2961 | must be marked in the set of kills in this block. */ | |
e0082a72 | 2962 | handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb); |
7506f491 | 2963 | } |
7506f491 DE |
2964 | } |
2965 | ||
589005ff | 2966 | /* Compute the reaching definitions as in |
7506f491 DE |
2967 | Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman, |
2968 | Chapter 10. It is the same algorithm as used for computing available | |
2969 | expressions but applied to the gens and kills of reaching definitions. */ | |
2970 | ||
2971 | static void | |
1d088dee | 2972 | compute_rd (void) |
7506f491 | 2973 | { |
e0082a72 ZD |
2974 | int changed, passes; |
2975 | basic_block bb; | |
7506f491 | 2976 | |
e0082a72 ZD |
2977 | FOR_EACH_BB (bb) |
2978 | sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/); | |
7506f491 DE |
2979 | |
2980 | passes = 0; | |
2981 | changed = 1; | |
2982 | while (changed) | |
2983 | { | |
2984 | changed = 0; | |
e0082a72 | 2985 | FOR_EACH_BB (bb) |
ac7c5af5 | 2986 | { |
e0082a72 ZD |
2987 | sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index); |
2988 | changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index], | |
2989 | reaching_defs[bb->index], rd_kill[bb->index]); | |
ac7c5af5 | 2990 | } |
7506f491 DE |
2991 | passes++; |
2992 | } | |
2993 | ||
2994 | if (gcse_file) | |
2995 | fprintf (gcse_file, "reaching def computation: %d passes\n", passes); | |
2996 | } | |
2997 | \f | |
2998 | /* Classic GCSE available expression support. */ | |
2999 | ||
3000 | /* Allocate memory for available expression computation. */ | |
3001 | ||
3002 | static void | |
1d088dee | 3003 | alloc_avail_expr_mem (int n_blocks, int n_exprs) |
7506f491 | 3004 | { |
703ad42b | 3005 | ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs); |
d55bc081 | 3006 | sbitmap_vector_zero (ae_kill, n_blocks); |
7506f491 | 3007 | |
703ad42b | 3008 | ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs); |
d55bc081 | 3009 | sbitmap_vector_zero (ae_gen, n_blocks); |
7506f491 | 3010 | |
703ad42b | 3011 | ae_in = sbitmap_vector_alloc (n_blocks, n_exprs); |
d55bc081 | 3012 | sbitmap_vector_zero (ae_in, n_blocks); |
7506f491 | 3013 | |
703ad42b | 3014 | ae_out = sbitmap_vector_alloc (n_blocks, n_exprs); |
d55bc081 | 3015 | sbitmap_vector_zero (ae_out, n_blocks); |
7506f491 DE |
3016 | } |
3017 | ||
3018 | static void | |
1d088dee | 3019 | free_avail_expr_mem (void) |
7506f491 | 3020 | { |
5a660bff DB |
3021 | sbitmap_vector_free (ae_kill); |
3022 | sbitmap_vector_free (ae_gen); | |
3023 | sbitmap_vector_free (ae_in); | |
3024 | sbitmap_vector_free (ae_out); | |
7506f491 DE |
3025 | } |
3026 | ||
3027 | /* Compute the set of available expressions generated in each basic block. */ | |
3028 | ||
3029 | static void | |
1d088dee | 3030 | compute_ae_gen (struct hash_table *expr_hash_table) |
7506f491 | 3031 | { |
2e653e39 | 3032 | unsigned int i; |
c4c81601 RK |
3033 | struct expr *expr; |
3034 | struct occr *occr; | |
7506f491 DE |
3035 | |
3036 | /* For each recorded occurrence of each expression, set ae_gen[bb][expr]. | |
3037 | This is all we have to do because an expression is not recorded if it | |
3038 | is not available, and the only expressions we want to work with are the | |
3039 | ones that are recorded. */ | |
02280659 ZD |
3040 | for (i = 0; i < expr_hash_table->size; i++) |
3041 | for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash) | |
c4c81601 RK |
3042 | for (occr = expr->avail_occr; occr != 0; occr = occr->next) |
3043 | SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index); | |
7506f491 DE |
3044 | } |
3045 | ||
cc2902df | 3046 | /* Return nonzero if expression X is killed in BB. */ |
7506f491 DE |
3047 | |
3048 | static int | |
1d088dee | 3049 | expr_killed_p (rtx x, basic_block bb) |
7506f491 | 3050 | { |
c4c81601 | 3051 | int i, j; |
7506f491 | 3052 | enum rtx_code code; |
6f7d635c | 3053 | const char *fmt; |
7506f491 | 3054 | |
7506f491 DE |
3055 | if (x == 0) |
3056 | return 1; | |
3057 | ||
3058 | code = GET_CODE (x); | |
3059 | switch (code) | |
3060 | { | |
3061 | case REG: | |
0b17ab2f | 3062 | return TEST_BIT (reg_set_in_block[bb->index], REGNO (x)); |
7506f491 DE |
3063 | |
3064 | case MEM: | |
a13d4ebf AM |
3065 | if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0)) |
3066 | return 1; | |
c4c81601 RK |
3067 | else |
3068 | return expr_killed_p (XEXP (x, 0), bb); | |
7506f491 DE |
3069 | |
3070 | case PC: | |
3071 | case CC0: /*FIXME*/ | |
3072 | case CONST: | |
3073 | case CONST_INT: | |
3074 | case CONST_DOUBLE: | |
69ef87e2 | 3075 | case CONST_VECTOR: |
7506f491 DE |
3076 | case SYMBOL_REF: |
3077 | case LABEL_REF: | |
3078 | case ADDR_VEC: | |
3079 | case ADDR_DIFF_VEC: | |
3080 | return 0; | |
3081 | ||
3082 | default: | |
3083 | break; | |
3084 | } | |
3085 | ||
c4c81601 | 3086 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
3087 | { |
3088 | if (fmt[i] == 'e') | |
3089 | { | |
7506f491 DE |
3090 | /* If we are about to do the last recursive call |
3091 | needed at this level, change it into iteration. | |
3092 | This function is called enough to be worth it. */ | |
3093 | if (i == 0) | |
c4c81601 RK |
3094 | return expr_killed_p (XEXP (x, i), bb); |
3095 | else if (expr_killed_p (XEXP (x, i), bb)) | |
7506f491 DE |
3096 | return 1; |
3097 | } | |
3098 | else if (fmt[i] == 'E') | |
c4c81601 RK |
3099 | for (j = 0; j < XVECLEN (x, i); j++) |
3100 | if (expr_killed_p (XVECEXP (x, i, j), bb)) | |
3101 | return 1; | |
7506f491 DE |
3102 | } |
3103 | ||
3104 | return 0; | |
3105 | } | |
3106 | ||
3107 | /* Compute the set of available expressions killed in each basic block. */ | |
3108 | ||
3109 | static void | |
1d088dee AJ |
3110 | compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill, |
3111 | struct hash_table *expr_hash_table) | |
7506f491 | 3112 | { |
e0082a72 | 3113 | basic_block bb; |
2e653e39 | 3114 | unsigned int i; |
c4c81601 | 3115 | struct expr *expr; |
7506f491 | 3116 | |
e0082a72 | 3117 | FOR_EACH_BB (bb) |
02280659 ZD |
3118 | for (i = 0; i < expr_hash_table->size; i++) |
3119 | for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash) | |
7506f491 | 3120 | { |
c4c81601 | 3121 | /* Skip EXPR if generated in this block. */ |
e0082a72 | 3122 | if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index)) |
c4c81601 | 3123 | continue; |
7506f491 | 3124 | |
e0082a72 ZD |
3125 | if (expr_killed_p (expr->expr, bb)) |
3126 | SET_BIT (ae_kill[bb->index], expr->bitmap_index); | |
7506f491 | 3127 | } |
7506f491 | 3128 | } |
7506f491 DE |
3129 | \f |
3130 | /* Actually perform the Classic GCSE optimizations. */ | |
3131 | ||
cc2902df | 3132 | /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB. |
7506f491 | 3133 | |
cc2902df | 3134 | CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself |
7506f491 DE |
3135 | as a positive reach. We want to do this when there are two computations |
3136 | of the expression in the block. | |
3137 | ||
3138 | VISITED is a pointer to a working buffer for tracking which BB's have | |
3139 | been visited. It is NULL for the top-level call. | |
3140 | ||
3141 | We treat reaching expressions that go through blocks containing the same | |
3142 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
3143 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
3144 | 2 as not reaching. The intent is to improve the probability of finding | |
3145 | only one reaching expression and to reduce register lifetimes by picking | |
3146 | the closest such expression. */ | |
3147 | ||
3148 | static int | |
1d088dee AJ |
3149 | expr_reaches_here_p_work (struct occr *occr, struct expr *expr, |
3150 | basic_block bb, int check_self_loop, char *visited) | |
7506f491 | 3151 | { |
36349f8b | 3152 | edge pred; |
7506f491 | 3153 | |
e2d2ed72 | 3154 | for (pred = bb->pred; pred != NULL; pred = pred->pred_next) |
7506f491 | 3155 | { |
e2d2ed72 | 3156 | basic_block pred_bb = pred->src; |
7506f491 | 3157 | |
0b17ab2f | 3158 | if (visited[pred_bb->index]) |
c4c81601 | 3159 | /* This predecessor has already been visited. Nothing to do. */ |
7506f491 | 3160 | ; |
7506f491 | 3161 | else if (pred_bb == bb) |
ac7c5af5 | 3162 | { |
7506f491 DE |
3163 | /* BB loops on itself. */ |
3164 | if (check_self_loop | |
0b17ab2f RH |
3165 | && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index) |
3166 | && BLOCK_NUM (occr->insn) == pred_bb->index) | |
7506f491 | 3167 | return 1; |
c4c81601 | 3168 | |
0b17ab2f | 3169 | visited[pred_bb->index] = 1; |
ac7c5af5 | 3170 | } |
c4c81601 | 3171 | |
7506f491 | 3172 | /* Ignore this predecessor if it kills the expression. */ |
0b17ab2f RH |
3173 | else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index)) |
3174 | visited[pred_bb->index] = 1; | |
c4c81601 | 3175 | |
7506f491 | 3176 | /* Does this predecessor generate this expression? */ |
0b17ab2f | 3177 | else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)) |
7506f491 DE |
3178 | { |
3179 | /* Is this the occurrence we're looking for? | |
3180 | Note that there's only one generating occurrence per block | |
3181 | so we just need to check the block number. */ | |
0b17ab2f | 3182 | if (BLOCK_NUM (occr->insn) == pred_bb->index) |
7506f491 | 3183 | return 1; |
c4c81601 | 3184 | |
0b17ab2f | 3185 | visited[pred_bb->index] = 1; |
7506f491 | 3186 | } |
c4c81601 | 3187 | |
7506f491 DE |
3188 | /* Neither gen nor kill. */ |
3189 | else | |
ac7c5af5 | 3190 | { |
0b17ab2f | 3191 | visited[pred_bb->index] = 1; |
589005ff | 3192 | if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop, |
283a2545 | 3193 | visited)) |
c4c81601 | 3194 | |
7506f491 | 3195 | return 1; |
ac7c5af5 | 3196 | } |
7506f491 DE |
3197 | } |
3198 | ||
3199 | /* All paths have been checked. */ | |
3200 | return 0; | |
3201 | } | |
3202 | ||
283a2545 | 3203 | /* This wrapper for expr_reaches_here_p_work() is to ensure that any |
dc297297 | 3204 | memory allocated for that function is returned. */ |
283a2545 RL |
3205 | |
3206 | static int | |
1d088dee AJ |
3207 | expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb, |
3208 | int check_self_loop) | |
283a2545 RL |
3209 | { |
3210 | int rval; | |
703ad42b | 3211 | char *visited = xcalloc (last_basic_block, 1); |
283a2545 | 3212 | |
c4c81601 | 3213 | rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited); |
589005ff | 3214 | |
283a2545 | 3215 | free (visited); |
c4c81601 | 3216 | return rval; |
283a2545 RL |
3217 | } |
3218 | ||
7506f491 DE |
3219 | /* Return the instruction that computes EXPR that reaches INSN's basic block. |
3220 | If there is more than one such instruction, return NULL. | |
3221 | ||
3222 | Called only by handle_avail_expr. */ | |
3223 | ||
3224 | static rtx | |
1d088dee | 3225 | computing_insn (struct expr *expr, rtx insn) |
7506f491 | 3226 | { |
e2d2ed72 | 3227 | basic_block bb = BLOCK_FOR_INSN (insn); |
7506f491 DE |
3228 | |
3229 | if (expr->avail_occr->next == NULL) | |
589005ff | 3230 | { |
e2d2ed72 | 3231 | if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb) |
c4c81601 RK |
3232 | /* The available expression is actually itself |
3233 | (i.e. a loop in the flow graph) so do nothing. */ | |
3234 | return NULL; | |
3235 | ||
7506f491 DE |
3236 | /* (FIXME) Case that we found a pattern that was created by |
3237 | a substitution that took place. */ | |
3238 | return expr->avail_occr->insn; | |
3239 | } | |
3240 | else | |
3241 | { | |
3242 | /* Pattern is computed more than once. | |
589005ff | 3243 | Search backwards from this insn to see how many of these |
7506f491 DE |
3244 | computations actually reach this insn. */ |
3245 | struct occr *occr; | |
3246 | rtx insn_computes_expr = NULL; | |
3247 | int can_reach = 0; | |
3248 | ||
3249 | for (occr = expr->avail_occr; occr != NULL; occr = occr->next) | |
3250 | { | |
e2d2ed72 | 3251 | if (BLOCK_FOR_INSN (occr->insn) == bb) |
7506f491 DE |
3252 | { |
3253 | /* The expression is generated in this block. | |
3254 | The only time we care about this is when the expression | |
3255 | is generated later in the block [and thus there's a loop]. | |
3256 | We let the normal cse pass handle the other cases. */ | |
c4c81601 RK |
3257 | if (INSN_CUID (insn) < INSN_CUID (occr->insn) |
3258 | && expr_reaches_here_p (occr, expr, bb, 1)) | |
7506f491 DE |
3259 | { |
3260 | can_reach++; | |
3261 | if (can_reach > 1) | |
3262 | return NULL; | |
c4c81601 | 3263 | |
7506f491 DE |
3264 | insn_computes_expr = occr->insn; |
3265 | } | |
3266 | } | |
c4c81601 RK |
3267 | else if (expr_reaches_here_p (occr, expr, bb, 0)) |
3268 | { | |
3269 | can_reach++; | |
3270 | if (can_reach > 1) | |
3271 | return NULL; | |
3272 | ||
3273 | insn_computes_expr = occr->insn; | |
3274 | } | |
7506f491 DE |
3275 | } |
3276 | ||
3277 | if (insn_computes_expr == NULL) | |
3278 | abort (); | |
c4c81601 | 3279 | |
7506f491 DE |
3280 | return insn_computes_expr; |
3281 | } | |
3282 | } | |
3283 | ||
cc2902df | 3284 | /* Return nonzero if the definition in DEF_INSN can reach INSN. |
7506f491 DE |
3285 | Only called by can_disregard_other_sets. */ |
3286 | ||
3287 | static int | |
1d088dee | 3288 | def_reaches_here_p (rtx insn, rtx def_insn) |
7506f491 DE |
3289 | { |
3290 | rtx reg; | |
3291 | ||
3292 | if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn))) | |
3293 | return 1; | |
3294 | ||
3295 | if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn)) | |
3296 | { | |
3297 | if (INSN_CUID (def_insn) < INSN_CUID (insn)) | |
ac7c5af5 | 3298 | { |
7506f491 DE |
3299 | if (GET_CODE (PATTERN (def_insn)) == PARALLEL) |
3300 | return 1; | |
c4c81601 | 3301 | else if (GET_CODE (PATTERN (def_insn)) == CLOBBER) |
7506f491 DE |
3302 | reg = XEXP (PATTERN (def_insn), 0); |
3303 | else if (GET_CODE (PATTERN (def_insn)) == SET) | |
3304 | reg = SET_DEST (PATTERN (def_insn)); | |
3305 | else | |
3306 | abort (); | |
c4c81601 | 3307 | |
7506f491 DE |
3308 | return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn); |
3309 | } | |
3310 | else | |
3311 | return 0; | |
3312 | } | |
3313 | ||
3314 | return 0; | |
3315 | } | |
3316 | ||
cc2902df | 3317 | /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The |
c4c81601 RK |
3318 | value returned is the number of definitions that reach INSN. Returning a |
3319 | value of zero means that [maybe] more than one definition reaches INSN and | |
3320 | the caller can't perform whatever optimization it is trying. i.e. it is | |
3321 | always safe to return zero. */ | |
7506f491 DE |
3322 | |
3323 | static int | |
1d088dee | 3324 | can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine) |
7506f491 DE |
3325 | { |
3326 | int number_of_reaching_defs = 0; | |
c4c81601 | 3327 | struct reg_set *this_reg; |
7506f491 | 3328 | |
c4c81601 RK |
3329 | for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next) |
3330 | if (def_reaches_here_p (insn, this_reg->insn)) | |
3331 | { | |
3332 | number_of_reaching_defs++; | |
3333 | /* Ignore parallels for now. */ | |
3334 | if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL) | |
3335 | return 0; | |
3336 | ||
3337 | if (!for_combine | |
3338 | && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER | |
3339 | || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)), | |
3340 | SET_SRC (PATTERN (insn))))) | |
3341 | /* A setting of the reg to a different value reaches INSN. */ | |
3342 | return 0; | |
3343 | ||
3344 | if (number_of_reaching_defs > 1) | |
3345 | { | |
3346 | /* If in this setting the value the register is being set to is | |
3347 | equal to the previous value the register was set to and this | |
3348 | setting reaches the insn we are trying to do the substitution | |
3349 | on then we are ok. */ | |
3350 | if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER) | |
7506f491 | 3351 | return 0; |
c4c81601 RK |
3352 | else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)), |
3353 | SET_SRC (PATTERN (insn)))) | |
3354 | return 0; | |
3355 | } | |
7506f491 | 3356 | |
589005ff | 3357 | *addr_this_reg = this_reg; |
c4c81601 | 3358 | } |
7506f491 DE |
3359 | |
3360 | return number_of_reaching_defs; | |
3361 | } | |
3362 | ||
3363 | /* Expression computed by insn is available and the substitution is legal, | |
3364 | so try to perform the substitution. | |
3365 | ||
cc2902df | 3366 | The result is nonzero if any changes were made. */ |
7506f491 DE |
3367 | |
3368 | static int | |
1d088dee | 3369 | handle_avail_expr (rtx insn, struct expr *expr) |
7506f491 | 3370 | { |
0631e0bf | 3371 | rtx pat, insn_computes_expr, expr_set; |
7506f491 DE |
3372 | rtx to; |
3373 | struct reg_set *this_reg; | |
3374 | int found_setting, use_src; | |
3375 | int changed = 0; | |
3376 | ||
3377 | /* We only handle the case where one computation of the expression | |
3378 | reaches this instruction. */ | |
3379 | insn_computes_expr = computing_insn (expr, insn); | |
3380 | if (insn_computes_expr == NULL) | |
3381 | return 0; | |
0631e0bf JH |
3382 | expr_set = single_set (insn_computes_expr); |
3383 | if (!expr_set) | |
3384 | abort (); | |
7506f491 DE |
3385 | |
3386 | found_setting = 0; | |
3387 | use_src = 0; | |
3388 | ||
3389 | /* At this point we know only one computation of EXPR outside of this | |
3390 | block reaches this insn. Now try to find a register that the | |
3391 | expression is computed into. */ | |
0631e0bf | 3392 | if (GET_CODE (SET_SRC (expr_set)) == REG) |
7506f491 DE |
3393 | { |
3394 | /* This is the case when the available expression that reaches | |
3395 | here has already been handled as an available expression. */ | |
770ae6cc | 3396 | unsigned int regnum_for_replacing |
0631e0bf | 3397 | = REGNO (SET_SRC (expr_set)); |
c4c81601 | 3398 | |
7506f491 DE |
3399 | /* If the register was created by GCSE we can't use `reg_set_table', |
3400 | however we know it's set only once. */ | |
3401 | if (regnum_for_replacing >= max_gcse_regno | |
3402 | /* If the register the expression is computed into is set only once, | |
3403 | or only one set reaches this insn, we can use it. */ | |
3404 | || (((this_reg = reg_set_table[regnum_for_replacing]), | |
3405 | this_reg->next == NULL) | |
3406 | || can_disregard_other_sets (&this_reg, insn, 0))) | |
8e42ace1 KH |
3407 | { |
3408 | use_src = 1; | |
3409 | found_setting = 1; | |
3410 | } | |
7506f491 DE |
3411 | } |
3412 | ||
3413 | if (!found_setting) | |
3414 | { | |
770ae6cc | 3415 | unsigned int regnum_for_replacing |
0631e0bf | 3416 | = REGNO (SET_DEST (expr_set)); |
c4c81601 | 3417 | |
7506f491 DE |
3418 | /* This shouldn't happen. */ |
3419 | if (regnum_for_replacing >= max_gcse_regno) | |
3420 | abort (); | |
c4c81601 | 3421 | |
7506f491 | 3422 | this_reg = reg_set_table[regnum_for_replacing]; |
c4c81601 | 3423 | |
7506f491 DE |
3424 | /* If the register the expression is computed into is set only once, |
3425 | or only one set reaches this insn, use it. */ | |
3426 | if (this_reg->next == NULL | |
3427 | || can_disregard_other_sets (&this_reg, insn, 0)) | |
3428 | found_setting = 1; | |
3429 | } | |
3430 | ||
3431 | if (found_setting) | |
3432 | { | |
3433 | pat = PATTERN (insn); | |
3434 | if (use_src) | |
0631e0bf | 3435 | to = SET_SRC (expr_set); |
7506f491 | 3436 | else |
0631e0bf | 3437 | to = SET_DEST (expr_set); |
7506f491 DE |
3438 | changed = validate_change (insn, &SET_SRC (pat), to, 0); |
3439 | ||
3440 | /* We should be able to ignore the return code from validate_change but | |
3441 | to play it safe we check. */ | |
3442 | if (changed) | |
3443 | { | |
3444 | gcse_subst_count++; | |
3445 | if (gcse_file != NULL) | |
3446 | { | |
c4c81601 RK |
3447 | fprintf (gcse_file, "GCSE: Replacing the source in insn %d with", |
3448 | INSN_UID (insn)); | |
3449 | fprintf (gcse_file, " reg %d %s insn %d\n", | |
3450 | REGNO (to), use_src ? "from" : "set in", | |
7506f491 DE |
3451 | INSN_UID (insn_computes_expr)); |
3452 | } | |
7506f491 DE |
3453 | } |
3454 | } | |
c4c81601 | 3455 | |
7506f491 DE |
3456 | /* The register that the expr is computed into is set more than once. */ |
3457 | else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/) | |
3458 | { | |
3459 | /* Insert an insn after insnx that copies the reg set in insnx | |
3460 | into a new pseudo register call this new register REGN. | |
3461 | From insnb until end of basic block or until REGB is set | |
3462 | replace all uses of REGB with REGN. */ | |
3463 | rtx new_insn; | |
3464 | ||
0631e0bf | 3465 | to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set))); |
7506f491 DE |
3466 | |
3467 | /* Generate the new insn. */ | |
3468 | /* ??? If the change fails, we return 0, even though we created | |
3469 | an insn. I think this is ok. */ | |
9e6a5703 JC |
3470 | new_insn |
3471 | = emit_insn_after (gen_rtx_SET (VOIDmode, to, | |
0631e0bf | 3472 | SET_DEST (expr_set)), |
c4c81601 RK |
3473 | insn_computes_expr); |
3474 | ||
7506f491 DE |
3475 | /* Keep register set table up to date. */ |
3476 | record_one_set (REGNO (to), new_insn); | |
3477 | ||
3478 | gcse_create_count++; | |
3479 | if (gcse_file != NULL) | |
ac7c5af5 | 3480 | { |
c4c81601 | 3481 | fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d", |
7506f491 | 3482 | INSN_UID (NEXT_INSN (insn_computes_expr)), |
c4c81601 RK |
3483 | REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr))))); |
3484 | fprintf (gcse_file, ", computed in insn %d,\n", | |
7506f491 | 3485 | INSN_UID (insn_computes_expr)); |
c4c81601 RK |
3486 | fprintf (gcse_file, " into newly allocated reg %d\n", |
3487 | REGNO (to)); | |
ac7c5af5 | 3488 | } |
7506f491 DE |
3489 | |
3490 | pat = PATTERN (insn); | |
3491 | ||
3492 | /* Do register replacement for INSN. */ | |
3493 | changed = validate_change (insn, &SET_SRC (pat), | |
c4c81601 RK |
3494 | SET_DEST (PATTERN |
3495 | (NEXT_INSN (insn_computes_expr))), | |
7506f491 DE |
3496 | 0); |
3497 | ||
3498 | /* We should be able to ignore the return code from validate_change but | |
3499 | to play it safe we check. */ | |
3500 | if (changed) | |
3501 | { | |
3502 | gcse_subst_count++; | |
3503 | if (gcse_file != NULL) | |
3504 | { | |
c4c81601 RK |
3505 | fprintf (gcse_file, |
3506 | "GCSE: Replacing the source in insn %d with reg %d ", | |
7506f491 | 3507 | INSN_UID (insn), |
c4c81601 RK |
3508 | REGNO (SET_DEST (PATTERN (NEXT_INSN |
3509 | (insn_computes_expr))))); | |
3510 | fprintf (gcse_file, "set in insn %d\n", | |
589005ff | 3511 | INSN_UID (insn_computes_expr)); |
7506f491 | 3512 | } |
7506f491 DE |
3513 | } |
3514 | } | |
3515 | ||
3516 | return changed; | |
3517 | } | |
3518 | ||
c4c81601 RK |
3519 | /* Perform classic GCSE. This is called by one_classic_gcse_pass after all |
3520 | the dataflow analysis has been done. | |
7506f491 | 3521 | |
cc2902df | 3522 | The result is nonzero if a change was made. */ |
7506f491 DE |
3523 | |
3524 | static int | |
1d088dee | 3525 | classic_gcse (void) |
7506f491 | 3526 | { |
e0082a72 | 3527 | int changed; |
7506f491 | 3528 | rtx insn; |
e0082a72 | 3529 | basic_block bb; |
7506f491 DE |
3530 | |
3531 | /* Note we start at block 1. */ | |
3532 | ||
e0082a72 ZD |
3533 | if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR) |
3534 | return 0; | |
3535 | ||
7506f491 | 3536 | changed = 0; |
e0082a72 | 3537 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb) |
7506f491 DE |
3538 | { |
3539 | /* Reset tables used to keep track of what's still valid [since the | |
3540 | start of the block]. */ | |
3541 | reset_opr_set_tables (); | |
3542 | ||
e0082a72 ZD |
3543 | for (insn = bb->head; |
3544 | insn != NULL && insn != NEXT_INSN (bb->end); | |
7506f491 DE |
3545 | insn = NEXT_INSN (insn)) |
3546 | { | |
3547 | /* Is insn of form (set (pseudo-reg) ...)? */ | |
7506f491 DE |
3548 | if (GET_CODE (insn) == INSN |
3549 | && GET_CODE (PATTERN (insn)) == SET | |
3550 | && GET_CODE (SET_DEST (PATTERN (insn))) == REG | |
3551 | && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER) | |
3552 | { | |
3553 | rtx pat = PATTERN (insn); | |
3554 | rtx src = SET_SRC (pat); | |
3555 | struct expr *expr; | |
3556 | ||
3557 | if (want_to_gcse_p (src) | |
3558 | /* Is the expression recorded? */ | |
02280659 | 3559 | && ((expr = lookup_expr (src, &expr_hash_table)) != NULL) |
7506f491 DE |
3560 | /* Is the expression available [at the start of the |
3561 | block]? */ | |
e0082a72 | 3562 | && TEST_BIT (ae_in[bb->index], expr->bitmap_index) |
7506f491 DE |
3563 | /* Are the operands unchanged since the start of the |
3564 | block? */ | |
3565 | && oprs_not_set_p (src, insn)) | |
3566 | changed |= handle_avail_expr (insn, expr); | |
3567 | } | |
3568 | ||
3569 | /* Keep track of everything modified by this insn. */ | |
3570 | /* ??? Need to be careful w.r.t. mods done to INSN. */ | |
2c3c49de | 3571 | if (INSN_P (insn)) |
7506f491 | 3572 | mark_oprs_set (insn); |
ac7c5af5 | 3573 | } |
7506f491 DE |
3574 | } |
3575 | ||
3576 | return changed; | |
3577 | } | |
3578 | ||
3579 | /* Top level routine to perform one classic GCSE pass. | |
3580 | ||
cc2902df | 3581 | Return nonzero if a change was made. */ |
7506f491 DE |
3582 | |
3583 | static int | |
1d088dee | 3584 | one_classic_gcse_pass (int pass) |
7506f491 DE |
3585 | { |
3586 | int changed = 0; | |
3587 | ||
3588 | gcse_subst_count = 0; | |
3589 | gcse_create_count = 0; | |
3590 | ||
02280659 | 3591 | alloc_hash_table (max_cuid, &expr_hash_table, 0); |
d55bc081 | 3592 | alloc_rd_mem (last_basic_block, max_cuid); |
02280659 | 3593 | compute_hash_table (&expr_hash_table); |
7506f491 | 3594 | if (gcse_file) |
02280659 | 3595 | dump_hash_table (gcse_file, "Expression", &expr_hash_table); |
c4c81601 | 3596 | |
02280659 | 3597 | if (expr_hash_table.n_elems > 0) |
7506f491 DE |
3598 | { |
3599 | compute_kill_rd (); | |
3600 | compute_rd (); | |
02280659 ZD |
3601 | alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems); |
3602 | compute_ae_gen (&expr_hash_table); | |
3603 | compute_ae_kill (ae_gen, ae_kill, &expr_hash_table); | |
bd0eaec2 | 3604 | compute_available (ae_gen, ae_kill, ae_out, ae_in); |
7506f491 DE |
3605 | changed = classic_gcse (); |
3606 | free_avail_expr_mem (); | |
3607 | } | |
c4c81601 | 3608 | |
7506f491 | 3609 | free_rd_mem (); |
02280659 | 3610 | free_hash_table (&expr_hash_table); |
7506f491 DE |
3611 | |
3612 | if (gcse_file) | |
3613 | { | |
3614 | fprintf (gcse_file, "\n"); | |
c4c81601 RK |
3615 | fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,", |
3616 | current_function_name, pass, bytes_used, gcse_subst_count); | |
3617 | fprintf (gcse_file, "%d insns created\n", gcse_create_count); | |
7506f491 DE |
3618 | } |
3619 | ||
3620 | return changed; | |
3621 | } | |
3622 | \f | |
3623 | /* Compute copy/constant propagation working variables. */ | |
3624 | ||
3625 | /* Local properties of assignments. */ | |
7506f491 DE |
3626 | static sbitmap *cprop_pavloc; |
3627 | static sbitmap *cprop_absaltered; | |
3628 | ||
3629 | /* Global properties of assignments (computed from the local properties). */ | |
7506f491 DE |
3630 | static sbitmap *cprop_avin; |
3631 | static sbitmap *cprop_avout; | |
3632 | ||
c4c81601 RK |
3633 | /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of |
3634 | basic blocks. N_SETS is the number of sets. */ | |
7506f491 DE |
3635 | |
3636 | static void | |
1d088dee | 3637 | alloc_cprop_mem (int n_blocks, int n_sets) |
7506f491 DE |
3638 | { |
3639 | cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets); | |
3640 | cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets); | |
3641 | ||
3642 | cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); | |
3643 | cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); | |
3644 | } | |
3645 | ||
3646 | /* Free vars used by copy/const propagation. */ | |
3647 | ||
3648 | static void | |
1d088dee | 3649 | free_cprop_mem (void) |
7506f491 | 3650 | { |
5a660bff DB |
3651 | sbitmap_vector_free (cprop_pavloc); |
3652 | sbitmap_vector_free (cprop_absaltered); | |
3653 | sbitmap_vector_free (cprop_avin); | |
3654 | sbitmap_vector_free (cprop_avout); | |
7506f491 DE |
3655 | } |
3656 | ||
c4c81601 RK |
3657 | /* For each block, compute whether X is transparent. X is either an |
3658 | expression or an assignment [though we don't care which, for this context | |
3659 | an assignment is treated as an expression]. For each block where an | |
3660 | element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX | |
3661 | bit in BMAP. */ | |
7506f491 DE |
3662 | |
3663 | static void | |
1d088dee | 3664 | compute_transp (rtx x, int indx, sbitmap *bmap, int set_p) |
7506f491 | 3665 | { |
e0082a72 ZD |
3666 | int i, j; |
3667 | basic_block bb; | |
7506f491 | 3668 | enum rtx_code code; |
c4c81601 | 3669 | reg_set *r; |
6f7d635c | 3670 | const char *fmt; |
7506f491 | 3671 | |
c4c81601 RK |
3672 | /* repeat is used to turn tail-recursion into iteration since GCC |
3673 | can't do it when there's no return value. */ | |
7506f491 DE |
3674 | repeat: |
3675 | ||
3676 | if (x == 0) | |
3677 | return; | |
3678 | ||
3679 | code = GET_CODE (x); | |
3680 | switch (code) | |
3681 | { | |
3682 | case REG: | |
c4c81601 RK |
3683 | if (set_p) |
3684 | { | |
3685 | if (REGNO (x) < FIRST_PSEUDO_REGISTER) | |
3686 | { | |
e0082a72 ZD |
3687 | FOR_EACH_BB (bb) |
3688 | if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x))) | |
3689 | SET_BIT (bmap[bb->index], indx); | |
c4c81601 RK |
3690 | } |
3691 | else | |
3692 | { | |
3693 | for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next) | |
3694 | SET_BIT (bmap[BLOCK_NUM (r->insn)], indx); | |
3695 | } | |
3696 | } | |
3697 | else | |
3698 | { | |
3699 | if (REGNO (x) < FIRST_PSEUDO_REGISTER) | |
3700 | { | |
e0082a72 ZD |
3701 | FOR_EACH_BB (bb) |
3702 | if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x))) | |
3703 | RESET_BIT (bmap[bb->index], indx); | |
c4c81601 RK |
3704 | } |
3705 | else | |
3706 | { | |
3707 | for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next) | |
3708 | RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx); | |
3709 | } | |
3710 | } | |
7506f491 | 3711 | |
c4c81601 | 3712 | return; |
7506f491 DE |
3713 | |
3714 | case MEM: | |
e0082a72 | 3715 | FOR_EACH_BB (bb) |
a13d4ebf | 3716 | { |
e0082a72 | 3717 | rtx list_entry = canon_modify_mem_list[bb->index]; |
a13d4ebf AM |
3718 | |
3719 | while (list_entry) | |
3720 | { | |
3721 | rtx dest, dest_addr; | |
3722 | ||
3723 | if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN) | |
3724 | { | |
3725 | if (set_p) | |
e0082a72 | 3726 | SET_BIT (bmap[bb->index], indx); |
a13d4ebf | 3727 | else |
e0082a72 | 3728 | RESET_BIT (bmap[bb->index], indx); |
a13d4ebf AM |
3729 | break; |
3730 | } | |
3731 | /* LIST_ENTRY must be an INSN of some kind that sets memory. | |
3732 | Examine each hunk of memory that is modified. */ | |
3733 | ||
3734 | dest = XEXP (list_entry, 0); | |
3735 | list_entry = XEXP (list_entry, 1); | |
3736 | dest_addr = XEXP (list_entry, 0); | |
589005ff | 3737 | |
a13d4ebf AM |
3738 | if (canon_true_dependence (dest, GET_MODE (dest), dest_addr, |
3739 | x, rtx_addr_varies_p)) | |
3740 | { | |
3741 | if (set_p) | |
e0082a72 | 3742 | SET_BIT (bmap[bb->index], indx); |
a13d4ebf | 3743 | else |
e0082a72 | 3744 | RESET_BIT (bmap[bb->index], indx); |
a13d4ebf AM |
3745 | break; |
3746 | } | |
3747 | list_entry = XEXP (list_entry, 1); | |
3748 | } | |
3749 | } | |
c4c81601 | 3750 | |
7506f491 DE |
3751 | x = XEXP (x, 0); |
3752 | goto repeat; | |
3753 | ||
3754 | case PC: | |
3755 | case CC0: /*FIXME*/ | |
3756 | case CONST: | |
3757 | case CONST_INT: | |
3758 | case CONST_DOUBLE: | |
69ef87e2 | 3759 | case CONST_VECTOR: |
7506f491 DE |
3760 | case SYMBOL_REF: |
3761 | case LABEL_REF: | |
3762 | case ADDR_VEC: | |
3763 | case ADDR_DIFF_VEC: | |
3764 | return; | |
3765 | ||
3766 | default: | |
3767 | break; | |
3768 | } | |
3769 | ||
c4c81601 | 3770 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
3771 | { |
3772 | if (fmt[i] == 'e') | |
3773 | { | |
7506f491 DE |
3774 | /* If we are about to do the last recursive call |
3775 | needed at this level, change it into iteration. | |
3776 | This function is called enough to be worth it. */ | |
3777 | if (i == 0) | |
3778 | { | |
c4c81601 | 3779 | x = XEXP (x, i); |
7506f491 DE |
3780 | goto repeat; |
3781 | } | |
c4c81601 RK |
3782 | |
3783 | compute_transp (XEXP (x, i), indx, bmap, set_p); | |
7506f491 DE |
3784 | } |
3785 | else if (fmt[i] == 'E') | |
c4c81601 RK |
3786 | for (j = 0; j < XVECLEN (x, i); j++) |
3787 | compute_transp (XVECEXP (x, i, j), indx, bmap, set_p); | |
7506f491 DE |
3788 | } |
3789 | } | |
3790 | ||
7506f491 DE |
3791 | /* Top level routine to do the dataflow analysis needed by copy/const |
3792 | propagation. */ | |
3793 | ||
3794 | static void | |
1d088dee | 3795 | compute_cprop_data (void) |
7506f491 | 3796 | { |
02280659 | 3797 | compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table); |
ce724250 JL |
3798 | compute_available (cprop_pavloc, cprop_absaltered, |
3799 | cprop_avout, cprop_avin); | |
7506f491 DE |
3800 | } |
3801 | \f | |
3802 | /* Copy/constant propagation. */ | |
3803 | ||
7506f491 DE |
3804 | /* Maximum number of register uses in an insn that we handle. */ |
3805 | #define MAX_USES 8 | |
3806 | ||
3807 | /* Table of uses found in an insn. | |
3808 | Allocated statically to avoid alloc/free complexity and overhead. */ | |
3809 | static struct reg_use reg_use_table[MAX_USES]; | |
3810 | ||
3811 | /* Index into `reg_use_table' while building it. */ | |
3812 | static int reg_use_count; | |
3813 | ||
c4c81601 RK |
3814 | /* Set up a list of register numbers used in INSN. The found uses are stored |
3815 | in `reg_use_table'. `reg_use_count' is initialized to zero before entry, | |
3816 | and contains the number of uses in the table upon exit. | |
7506f491 | 3817 | |
c4c81601 RK |
3818 | ??? If a register appears multiple times we will record it multiple times. |
3819 | This doesn't hurt anything but it will slow things down. */ | |
7506f491 DE |
3820 | |
3821 | static void | |
1d088dee | 3822 | find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED) |
7506f491 | 3823 | { |
c4c81601 | 3824 | int i, j; |
7506f491 | 3825 | enum rtx_code code; |
6f7d635c | 3826 | const char *fmt; |
9e71c818 | 3827 | rtx x = *xptr; |
7506f491 | 3828 | |
c4c81601 RK |
3829 | /* repeat is used to turn tail-recursion into iteration since GCC |
3830 | can't do it when there's no return value. */ | |
7506f491 | 3831 | repeat: |
7506f491 DE |
3832 | if (x == 0) |
3833 | return; | |
3834 | ||
3835 | code = GET_CODE (x); | |
9e71c818 | 3836 | if (REG_P (x)) |
7506f491 | 3837 | { |
7506f491 DE |
3838 | if (reg_use_count == MAX_USES) |
3839 | return; | |
c4c81601 | 3840 | |
7506f491 DE |
3841 | reg_use_table[reg_use_count].reg_rtx = x; |
3842 | reg_use_count++; | |
7506f491 DE |
3843 | } |
3844 | ||
3845 | /* Recursively scan the operands of this expression. */ | |
3846 | ||
c4c81601 | 3847 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
7506f491 DE |
3848 | { |
3849 | if (fmt[i] == 'e') | |
3850 | { | |
3851 | /* If we are about to do the last recursive call | |
3852 | needed at this level, change it into iteration. | |
3853 | This function is called enough to be worth it. */ | |
3854 | if (i == 0) | |
3855 | { | |
3856 | x = XEXP (x, 0); | |
3857 | goto repeat; | |
3858 | } | |
c4c81601 | 3859 | |
9e71c818 | 3860 | find_used_regs (&XEXP (x, i), data); |
7506f491 DE |
3861 | } |
3862 | else if (fmt[i] == 'E') | |
c4c81601 | 3863 | for (j = 0; j < XVECLEN (x, i); j++) |
9e71c818 | 3864 | find_used_regs (&XVECEXP (x, i, j), data); |
7506f491 DE |
3865 | } |
3866 | } | |
3867 | ||
3868 | /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO. | |
cc2902df | 3869 | Returns nonzero is successful. */ |
7506f491 DE |
3870 | |
3871 | static int | |
1d088dee | 3872 | try_replace_reg (rtx from, rtx to, rtx insn) |
7506f491 | 3873 | { |
172890a2 | 3874 | rtx note = find_reg_equal_equiv_note (insn); |
fb0c0a12 | 3875 | rtx src = 0; |
172890a2 RK |
3876 | int success = 0; |
3877 | rtx set = single_set (insn); | |
833fc3ad | 3878 | |
2b773ee2 JH |
3879 | validate_replace_src_group (from, to, insn); |
3880 | if (num_changes_pending () && apply_change_group ()) | |
3881 | success = 1; | |
9e71c818 | 3882 | |
9feff114 JDA |
3883 | /* Try to simplify SET_SRC if we have substituted a constant. */ |
3884 | if (success && set && CONSTANT_P (to)) | |
3885 | { | |
3886 | src = simplify_rtx (SET_SRC (set)); | |
3887 | ||
3888 | if (src) | |
3889 | validate_change (insn, &SET_SRC (set), src, 0); | |
3890 | } | |
3891 | ||
ed8395a0 JZ |
3892 | /* If there is already a NOTE, update the expression in it with our |
3893 | replacement. */ | |
3894 | if (note != 0) | |
3895 | XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to); | |
3896 | ||
f305679f | 3897 | if (!success && set && reg_mentioned_p (from, SET_SRC (set))) |
833fc3ad | 3898 | { |
f305679f JH |
3899 | /* If above failed and this is a single set, try to simplify the source of |
3900 | the set given our substitution. We could perhaps try this for multiple | |
3901 | SETs, but it probably won't buy us anything. */ | |
172890a2 RK |
3902 | src = simplify_replace_rtx (SET_SRC (set), from, to); |
3903 | ||
9e71c818 JH |
3904 | if (!rtx_equal_p (src, SET_SRC (set)) |
3905 | && validate_change (insn, &SET_SRC (set), src, 0)) | |
172890a2 | 3906 | success = 1; |
833fc3ad | 3907 | |
bbd288a4 FS |
3908 | /* If we've failed to do replacement, have a single SET, don't already |
3909 | have a note, and have no special SET, add a REG_EQUAL note to not | |
3910 | lose information. */ | |
3911 | if (!success && note == 0 && set != 0 | |
3912 | && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT | |
3913 | && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT) | |
f305679f JH |
3914 | note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src)); |
3915 | } | |
e251e2a2 | 3916 | |
172890a2 RK |
3917 | /* REG_EQUAL may get simplified into register. |
3918 | We don't allow that. Remove that note. This code ought | |
fbe5a4a6 | 3919 | not to happen, because previous code ought to synthesize |
172890a2 RK |
3920 | reg-reg move, but be on the safe side. */ |
3921 | if (note && REG_P (XEXP (note, 0))) | |
3922 | remove_note (insn, note); | |
833fc3ad | 3923 | |
833fc3ad JH |
3924 | return success; |
3925 | } | |
c4c81601 RK |
3926 | |
3927 | /* Find a set of REGNOs that are available on entry to INSN's block. Returns | |
3928 | NULL no such set is found. */ | |
7506f491 DE |
3929 | |
3930 | static struct expr * | |
1d088dee | 3931 | find_avail_set (int regno, rtx insn) |
7506f491 | 3932 | { |
cafba495 BS |
3933 | /* SET1 contains the last set found that can be returned to the caller for |
3934 | use in a substitution. */ | |
3935 | struct expr *set1 = 0; | |
589005ff | 3936 | |
cafba495 BS |
3937 | /* Loops are not possible here. To get a loop we would need two sets |
3938 | available at the start of the block containing INSN. ie we would | |
3939 | need two sets like this available at the start of the block: | |
3940 | ||
3941 | (set (reg X) (reg Y)) | |
3942 | (set (reg Y) (reg X)) | |
3943 | ||
3944 | This can not happen since the set of (reg Y) would have killed the | |
3945 | set of (reg X) making it unavailable at the start of this block. */ | |
3946 | while (1) | |
8e42ace1 | 3947 | { |
cafba495 | 3948 | rtx src; |
ceda50e9 | 3949 | struct expr *set = lookup_set (regno, &set_hash_table); |
cafba495 BS |
3950 | |
3951 | /* Find a set that is available at the start of the block | |
3952 | which contains INSN. */ | |
3953 | while (set) | |
3954 | { | |
3955 | if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index)) | |
3956 | break; | |
3957 | set = next_set (regno, set); | |
3958 | } | |
7506f491 | 3959 | |
cafba495 BS |
3960 | /* If no available set was found we've reached the end of the |
3961 | (possibly empty) copy chain. */ | |
3962 | if (set == 0) | |
589005ff | 3963 | break; |
cafba495 BS |
3964 | |
3965 | if (GET_CODE (set->expr) != SET) | |
3966 | abort (); | |
3967 | ||
3968 | src = SET_SRC (set->expr); | |
3969 | ||
3970 | /* We know the set is available. | |
3971 | Now check that SRC is ANTLOC (i.e. none of the source operands | |
589005ff | 3972 | have changed since the start of the block). |
cafba495 BS |
3973 | |
3974 | If the source operand changed, we may still use it for the next | |
3975 | iteration of this loop, but we may not use it for substitutions. */ | |
c4c81601 | 3976 | |
6b2d1c9e | 3977 | if (gcse_constant_p (src) || oprs_not_set_p (src, insn)) |
cafba495 BS |
3978 | set1 = set; |
3979 | ||
3980 | /* If the source of the set is anything except a register, then | |
3981 | we have reached the end of the copy chain. */ | |
3982 | if (GET_CODE (src) != REG) | |
7506f491 | 3983 | break; |
7506f491 | 3984 | |
cafba495 BS |
3985 | /* Follow the copy chain, ie start another iteration of the loop |
3986 | and see if we have an available copy into SRC. */ | |
3987 | regno = REGNO (src); | |
8e42ace1 | 3988 | } |
cafba495 BS |
3989 | |
3990 | /* SET1 holds the last set that was available and anticipatable at | |
3991 | INSN. */ | |
3992 | return set1; | |
7506f491 DE |
3993 | } |
3994 | ||
abd535b6 | 3995 | /* Subroutine of cprop_insn that tries to propagate constants into |
0e3f0221 | 3996 | JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL |
fbe5a4a6 | 3997 | it is the instruction that immediately precedes JUMP, and must be a |
818b6b7f | 3998 | single SET of a register. FROM is what we will try to replace, |
0e3f0221 | 3999 | SRC is the constant we will try to substitute for it. Returns nonzero |
589005ff | 4000 | if a change was made. */ |
c4c81601 | 4001 | |
abd535b6 | 4002 | static int |
1d088dee | 4003 | cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src) |
abd535b6 | 4004 | { |
bc6688b4 | 4005 | rtx new, set_src, note_src; |
0e3f0221 | 4006 | rtx set = pc_set (jump); |
bc6688b4 | 4007 | rtx note = find_reg_equal_equiv_note (jump); |
0e3f0221 | 4008 | |
bc6688b4 RS |
4009 | if (note) |
4010 | { | |
4011 | note_src = XEXP (note, 0); | |
4012 | if (GET_CODE (note_src) == EXPR_LIST) | |
4013 | note_src = NULL_RTX; | |
4014 | } | |
4015 | else note_src = NULL_RTX; | |
4016 | ||
4017 | /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */ | |
4018 | set_src = note_src ? note_src : SET_SRC (set); | |
4019 | ||
4020 | /* First substitute the SETCC condition into the JUMP instruction, | |
4021 | then substitute that given values into this expanded JUMP. */ | |
4022 | if (setcc != NULL_RTX | |
48ddd46c JH |
4023 | && !modified_between_p (from, setcc, jump) |
4024 | && !modified_between_p (src, setcc, jump)) | |
b2f02503 | 4025 | { |
bc6688b4 | 4026 | rtx setcc_src; |
b2f02503 | 4027 | rtx setcc_set = single_set (setcc); |
bc6688b4 RS |
4028 | rtx setcc_note = find_reg_equal_equiv_note (setcc); |
4029 | setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST) | |
4030 | ? XEXP (setcc_note, 0) : SET_SRC (setcc_set); | |
4031 | set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set), | |
4032 | setcc_src); | |
b2f02503 | 4033 | } |
0e3f0221 | 4034 | else |
bc6688b4 | 4035 | setcc = NULL_RTX; |
0e3f0221 | 4036 | |
bc6688b4 | 4037 | new = simplify_replace_rtx (set_src, from, src); |
abd535b6 | 4038 | |
bc6688b4 RS |
4039 | /* If no simplification can be made, then try the next register. */ |
4040 | if (rtx_equal_p (new, SET_SRC (set))) | |
9e48c409 | 4041 | return 0; |
589005ff | 4042 | |
7d5ab30e | 4043 | /* If this is now a no-op delete it, otherwise this must be a valid insn. */ |
172890a2 | 4044 | if (new == pc_rtx) |
0e3f0221 | 4045 | delete_insn (jump); |
7d5ab30e | 4046 | else |
abd535b6 | 4047 | { |
48ddd46c JH |
4048 | /* Ensure the value computed inside the jump insn to be equivalent |
4049 | to one computed by setcc. */ | |
bc6688b4 | 4050 | if (setcc && modified_in_p (new, setcc)) |
48ddd46c | 4051 | return 0; |
0e3f0221 | 4052 | if (! validate_change (jump, &SET_SRC (set), new, 0)) |
bc6688b4 RS |
4053 | { |
4054 | /* When (some) constants are not valid in a comparison, and there | |
4055 | are two registers to be replaced by constants before the entire | |
4056 | comparison can be folded into a constant, we need to keep | |
4057 | intermediate information in REG_EQUAL notes. For targets with | |
4058 | separate compare insns, such notes are added by try_replace_reg. | |
4059 | When we have a combined compare-and-branch instruction, however, | |
4060 | we need to attach a note to the branch itself to make this | |
4061 | optimization work. */ | |
4062 | ||
4063 | if (!rtx_equal_p (new, note_src)) | |
4064 | set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new)); | |
4065 | return 0; | |
4066 | } | |
4067 | ||
4068 | /* Remove REG_EQUAL note after simplification. */ | |
4069 | if (note_src) | |
4070 | remove_note (jump, note); | |
abd535b6 | 4071 | |
7d5ab30e JH |
4072 | /* If this has turned into an unconditional jump, |
4073 | then put a barrier after it so that the unreachable | |
4074 | code will be deleted. */ | |
4075 | if (GET_CODE (SET_SRC (set)) == LABEL_REF) | |
0e3f0221 | 4076 | emit_barrier_after (jump); |
7d5ab30e | 4077 | } |
abd535b6 | 4078 | |
0e3f0221 RS |
4079 | #ifdef HAVE_cc0 |
4080 | /* Delete the cc0 setter. */ | |
818b6b7f | 4081 | if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc)))) |
0e3f0221 RS |
4082 | delete_insn (setcc); |
4083 | #endif | |
4084 | ||
172890a2 | 4085 | run_jump_opt_after_gcse = 1; |
c4c81601 | 4086 | |
172890a2 RK |
4087 | const_prop_count++; |
4088 | if (gcse_file != NULL) | |
4089 | { | |
4090 | fprintf (gcse_file, | |
818b6b7f | 4091 | "CONST-PROP: Replacing reg %d in jump_insn %d with constant ", |
0e3f0221 | 4092 | REGNO (from), INSN_UID (jump)); |
172890a2 RK |
4093 | print_rtl (gcse_file, src); |
4094 | fprintf (gcse_file, "\n"); | |
abd535b6 | 4095 | } |
0005550b | 4096 | purge_dead_edges (bb); |
172890a2 RK |
4097 | |
4098 | return 1; | |
abd535b6 BS |
4099 | } |
4100 | ||
ae860ff7 | 4101 | static bool |
1d088dee | 4102 | constprop_register (rtx insn, rtx from, rtx to, int alter_jumps) |
ae860ff7 JH |
4103 | { |
4104 | rtx sset; | |
4105 | ||
4106 | /* Check for reg or cc0 setting instructions followed by | |
4107 | conditional branch instructions first. */ | |
4108 | if (alter_jumps | |
4109 | && (sset = single_set (insn)) != NULL | |
244d05fb | 4110 | && NEXT_INSN (insn) |
ae860ff7 JH |
4111 | && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn))) |
4112 | { | |
4113 | rtx dest = SET_DEST (sset); | |
4114 | if ((REG_P (dest) || CC0_P (dest)) | |
4115 | && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to)) | |
4116 | return 1; | |
4117 | } | |
4118 | ||
4119 | /* Handle normal insns next. */ | |
4120 | if (GET_CODE (insn) == INSN | |
4121 | && try_replace_reg (from, to, insn)) | |
4122 | return 1; | |
4123 | ||
4124 | /* Try to propagate a CONST_INT into a conditional jump. | |
4125 | We're pretty specific about what we will handle in this | |
4126 | code, we can extend this as necessary over time. | |
4127 | ||
4128 | Right now the insn in question must look like | |
4129 | (set (pc) (if_then_else ...)) */ | |
4130 | else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn)) | |
4131 | return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to); | |
4132 | return 0; | |
4133 | } | |
4134 | ||
7506f491 | 4135 | /* Perform constant and copy propagation on INSN. |
cc2902df | 4136 | The result is nonzero if a change was made. */ |
7506f491 DE |
4137 | |
4138 | static int | |
1d088dee | 4139 | cprop_insn (rtx insn, int alter_jumps) |
7506f491 DE |
4140 | { |
4141 | struct reg_use *reg_used; | |
4142 | int changed = 0; | |
833fc3ad | 4143 | rtx note; |
7506f491 | 4144 | |
9e71c818 | 4145 | if (!INSN_P (insn)) |
7506f491 DE |
4146 | return 0; |
4147 | ||
4148 | reg_use_count = 0; | |
9e71c818 | 4149 | note_uses (&PATTERN (insn), find_used_regs, NULL); |
589005ff | 4150 | |
172890a2 | 4151 | note = find_reg_equal_equiv_note (insn); |
833fc3ad | 4152 | |
dc297297 | 4153 | /* We may win even when propagating constants into notes. */ |
833fc3ad | 4154 | if (note) |
9e71c818 | 4155 | find_used_regs (&XEXP (note, 0), NULL); |
7506f491 | 4156 | |
c4c81601 RK |
4157 | for (reg_used = ®_use_table[0]; reg_use_count > 0; |
4158 | reg_used++, reg_use_count--) | |
7506f491 | 4159 | { |
770ae6cc | 4160 | unsigned int regno = REGNO (reg_used->reg_rtx); |
7506f491 DE |
4161 | rtx pat, src; |
4162 | struct expr *set; | |
7506f491 DE |
4163 | |
4164 | /* Ignore registers created by GCSE. | |
dc297297 | 4165 | We do this because ... */ |
7506f491 DE |
4166 | if (regno >= max_gcse_regno) |
4167 | continue; | |
4168 | ||
4169 | /* If the register has already been set in this block, there's | |
4170 | nothing we can do. */ | |
4171 | if (! oprs_not_set_p (reg_used->reg_rtx, insn)) | |
4172 | continue; | |
4173 | ||
4174 | /* Find an assignment that sets reg_used and is available | |
4175 | at the start of the block. */ | |
4176 | set = find_avail_set (regno, insn); | |
4177 | if (! set) | |
4178 | continue; | |
589005ff | 4179 | |
7506f491 DE |
4180 | pat = set->expr; |
4181 | /* ??? We might be able to handle PARALLELs. Later. */ | |
4182 | if (GET_CODE (pat) != SET) | |
4183 | abort (); | |
c4c81601 | 4184 | |
7506f491 DE |
4185 | src = SET_SRC (pat); |
4186 | ||
e78d9500 | 4187 | /* Constant propagation. */ |
6b2d1c9e | 4188 | if (gcse_constant_p (src)) |
7506f491 | 4189 | { |
ae860ff7 | 4190 | if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps)) |
7506f491 DE |
4191 | { |
4192 | changed = 1; | |
4193 | const_prop_count++; | |
4194 | if (gcse_file != NULL) | |
4195 | { | |
ae860ff7 JH |
4196 | fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno); |
4197 | fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn)); | |
e78d9500 | 4198 | print_rtl (gcse_file, src); |
7506f491 DE |
4199 | fprintf (gcse_file, "\n"); |
4200 | } | |
bc6688b4 RS |
4201 | if (INSN_DELETED_P (insn)) |
4202 | return 1; | |
7506f491 DE |
4203 | } |
4204 | } | |
4205 | else if (GET_CODE (src) == REG | |
4206 | && REGNO (src) >= FIRST_PSEUDO_REGISTER | |
4207 | && REGNO (src) != regno) | |
4208 | { | |
cafba495 | 4209 | if (try_replace_reg (reg_used->reg_rtx, src, insn)) |
7506f491 | 4210 | { |
cafba495 BS |
4211 | changed = 1; |
4212 | copy_prop_count++; | |
4213 | if (gcse_file != NULL) | |
7506f491 | 4214 | { |
ae860ff7 | 4215 | fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d", |
c4c81601 RK |
4216 | regno, INSN_UID (insn)); |
4217 | fprintf (gcse_file, " with reg %d\n", REGNO (src)); | |
7506f491 | 4218 | } |
cafba495 BS |
4219 | |
4220 | /* The original insn setting reg_used may or may not now be | |
4221 | deletable. We leave the deletion to flow. */ | |
4222 | /* FIXME: If it turns out that the insn isn't deletable, | |
4223 | then we may have unnecessarily extended register lifetimes | |
4224 | and made things worse. */ | |
7506f491 DE |
4225 | } |
4226 | } | |
4227 | } | |
4228 | ||
4229 | return changed; | |
4230 | } | |
4231 | ||
710ee3ed RH |
4232 | /* Like find_used_regs, but avoid recording uses that appear in |
4233 | input-output contexts such as zero_extract or pre_dec. This | |
4234 | restricts the cases we consider to those for which local cprop | |
4235 | can legitimately make replacements. */ | |
4236 | ||
4237 | static void | |
1d088dee | 4238 | local_cprop_find_used_regs (rtx *xptr, void *data) |
710ee3ed RH |
4239 | { |
4240 | rtx x = *xptr; | |
4241 | ||
4242 | if (x == 0) | |
4243 | return; | |
4244 | ||
4245 | switch (GET_CODE (x)) | |
4246 | { | |
4247 | case ZERO_EXTRACT: | |
4248 | case SIGN_EXTRACT: | |
4249 | case STRICT_LOW_PART: | |
4250 | return; | |
4251 | ||
4252 | case PRE_DEC: | |
4253 | case PRE_INC: | |
4254 | case POST_DEC: | |
4255 | case POST_INC: | |
4256 | case PRE_MODIFY: | |
4257 | case POST_MODIFY: | |
4258 | /* Can only legitimately appear this early in the context of | |
4259 | stack pushes for function arguments, but handle all of the | |
4260 | codes nonetheless. */ | |
4261 | return; | |
4262 | ||
4263 | case SUBREG: | |
4264 | /* Setting a subreg of a register larger than word_mode leaves | |
4265 | the non-written words unchanged. */ | |
4266 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD) | |
4267 | return; | |
4268 | break; | |
4269 | ||
4270 | default: | |
4271 | break; | |
4272 | } | |
4273 | ||
4274 | find_used_regs (xptr, data); | |
4275 | } | |
1d088dee | 4276 | |
8ba46434 R |
4277 | /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall; |
4278 | their REG_EQUAL notes need updating. */ | |
e197b6fc | 4279 | |
ae860ff7 | 4280 | static bool |
1d088dee | 4281 | do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp) |
ae860ff7 JH |
4282 | { |
4283 | rtx newreg = NULL, newcnst = NULL; | |
4284 | ||
e197b6fc RH |
4285 | /* Rule out USE instructions and ASM statements as we don't want to |
4286 | change the hard registers mentioned. */ | |
ae860ff7 JH |
4287 | if (GET_CODE (x) == REG |
4288 | && (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
e197b6fc RH |
4289 | || (GET_CODE (PATTERN (insn)) != USE |
4290 | && asm_noperands (PATTERN (insn)) < 0))) | |
ae860ff7 JH |
4291 | { |
4292 | cselib_val *val = cselib_lookup (x, GET_MODE (x), 0); | |
4293 | struct elt_loc_list *l; | |
4294 | ||
4295 | if (!val) | |
4296 | return false; | |
4297 | for (l = val->locs; l; l = l->next) | |
4298 | { | |
4299 | rtx this_rtx = l->loc; | |
46690369 JH |
4300 | rtx note; |
4301 | ||
9635cfad JH |
4302 | if (l->in_libcall) |
4303 | continue; | |
4304 | ||
6b2d1c9e | 4305 | if (gcse_constant_p (this_rtx)) |
ae860ff7 | 4306 | newcnst = this_rtx; |
46690369 JH |
4307 | if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER |
4308 | /* Don't copy propagate if it has attached REG_EQUIV note. | |
4309 | At this point this only function parameters should have | |
4310 | REG_EQUIV notes and if the argument slot is used somewhere | |
4311 | explicitly, it means address of parameter has been taken, | |
4312 | so we should not extend the lifetime of the pseudo. */ | |
4313 | && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX)) | |
4314 | || GET_CODE (XEXP (note, 0)) != MEM)) | |
ae860ff7 JH |
4315 | newreg = this_rtx; |
4316 | } | |
4317 | if (newcnst && constprop_register (insn, x, newcnst, alter_jumps)) | |
4318 | { | |
8ba46434 | 4319 | /* If we find a case where we can't fix the retval REG_EQUAL notes |
fbe5a4a6 | 4320 | match the new register, we either have to abandon this replacement |
8ba46434 R |
4321 | or fix delete_trivially_dead_insns to preserve the setting insn, |
4322 | or make it delete the REG_EUAQL note, and fix up all passes that | |
4323 | require the REG_EQUAL note there. */ | |
4324 | if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp)) | |
4325 | abort (); | |
ae860ff7 JH |
4326 | if (gcse_file != NULL) |
4327 | { | |
4328 | fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ", | |
4329 | REGNO (x)); | |
4330 | fprintf (gcse_file, "insn %d with constant ", | |
4331 | INSN_UID (insn)); | |
4332 | print_rtl (gcse_file, newcnst); | |
4333 | fprintf (gcse_file, "\n"); | |
4334 | } | |
4335 | const_prop_count++; | |
4336 | return true; | |
4337 | } | |
4338 | else if (newreg && newreg != x && try_replace_reg (x, newreg, insn)) | |
4339 | { | |
8ba46434 | 4340 | adjust_libcall_notes (x, newreg, insn, libcall_sp); |
ae860ff7 JH |
4341 | if (gcse_file != NULL) |
4342 | { | |
4343 | fprintf (gcse_file, | |
4344 | "LOCAL COPY-PROP: Replacing reg %d in insn %d", | |
4345 | REGNO (x), INSN_UID (insn)); | |
4346 | fprintf (gcse_file, " with reg %d\n", REGNO (newreg)); | |
4347 | } | |
4348 | copy_prop_count++; | |
4349 | return true; | |
4350 | } | |
4351 | } | |
4352 | return false; | |
4353 | } | |
4354 | ||
8ba46434 R |
4355 | /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall; |
4356 | their REG_EQUAL notes need updating to reflect that OLDREG has been | |
f4e3e618 RH |
4357 | replaced with NEWVAL in INSN. Return true if all substitutions could |
4358 | be made. */ | |
8ba46434 | 4359 | static bool |
1d088dee | 4360 | adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp) |
8ba46434 | 4361 | { |
f4e3e618 | 4362 | rtx end; |
8ba46434 R |
4363 | |
4364 | while ((end = *libcall_sp++)) | |
4365 | { | |
f4e3e618 | 4366 | rtx note = find_reg_equal_equiv_note (end); |
8ba46434 R |
4367 | |
4368 | if (! note) | |
4369 | continue; | |
4370 | ||
4371 | if (REG_P (newval)) | |
4372 | { | |
4373 | if (reg_set_between_p (newval, PREV_INSN (insn), end)) | |
4374 | { | |
4375 | do | |
4376 | { | |
4377 | note = find_reg_equal_equiv_note (end); | |
4378 | if (! note) | |
4379 | continue; | |
4380 | if (reg_mentioned_p (newval, XEXP (note, 0))) | |
4381 | return false; | |
4382 | } | |
4383 | while ((end = *libcall_sp++)); | |
4384 | return true; | |
4385 | } | |
4386 | } | |
4387 | XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval); | |
4388 | insn = end; | |
4389 | } | |
4390 | return true; | |
4391 | } | |
4392 | ||
4393 | #define MAX_NESTED_LIBCALLS 9 | |
4394 | ||
ae860ff7 | 4395 | static void |
1d088dee | 4396 | local_cprop_pass (int alter_jumps) |
ae860ff7 JH |
4397 | { |
4398 | rtx insn; | |
4399 | struct reg_use *reg_used; | |
8ba46434 | 4400 | rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp; |
1649d92f | 4401 | bool changed = false; |
ae860ff7 JH |
4402 | |
4403 | cselib_init (); | |
8ba46434 R |
4404 | libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS]; |
4405 | *libcall_sp = 0; | |
ae860ff7 JH |
4406 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4407 | { | |
4408 | if (INSN_P (insn)) | |
4409 | { | |
8ba46434 | 4410 | rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX); |
ae860ff7 | 4411 | |
8ba46434 R |
4412 | if (note) |
4413 | { | |
4414 | if (libcall_sp == libcall_stack) | |
4415 | abort (); | |
4416 | *--libcall_sp = XEXP (note, 0); | |
4417 | } | |
4418 | note = find_reg_note (insn, REG_RETVAL, NULL_RTX); | |
4419 | if (note) | |
4420 | libcall_sp++; | |
4421 | note = find_reg_equal_equiv_note (insn); | |
ae860ff7 JH |
4422 | do |
4423 | { | |
4424 | reg_use_count = 0; | |
710ee3ed | 4425 | note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL); |
ae860ff7 | 4426 | if (note) |
710ee3ed | 4427 | local_cprop_find_used_regs (&XEXP (note, 0), NULL); |
ae860ff7 JH |
4428 | |
4429 | for (reg_used = ®_use_table[0]; reg_use_count > 0; | |
4430 | reg_used++, reg_use_count--) | |
8ba46434 R |
4431 | if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps, |
4432 | libcall_sp)) | |
1649d92f JH |
4433 | { |
4434 | changed = true; | |
4435 | break; | |
4436 | } | |
bc6688b4 RS |
4437 | if (INSN_DELETED_P (insn)) |
4438 | break; | |
ae860ff7 JH |
4439 | } |
4440 | while (reg_use_count); | |
4441 | } | |
4442 | cselib_process_insn (insn); | |
4443 | } | |
4444 | cselib_finish (); | |
1649d92f JH |
4445 | /* Global analysis may get into infinite loops for unreachable blocks. */ |
4446 | if (changed && alter_jumps) | |
5f0bea72 JH |
4447 | { |
4448 | delete_unreachable_blocks (); | |
4449 | free_reg_set_mem (); | |
4450 | alloc_reg_set_mem (max_reg_num ()); | |
4451 | compute_sets (get_insns ()); | |
4452 | } | |
ae860ff7 JH |
4453 | } |
4454 | ||
c4c81601 | 4455 | /* Forward propagate copies. This includes copies and constants. Return |
cc2902df | 4456 | nonzero if a change was made. */ |
7506f491 DE |
4457 | |
4458 | static int | |
1d088dee | 4459 | cprop (int alter_jumps) |
7506f491 | 4460 | { |
e0082a72 ZD |
4461 | int changed; |
4462 | basic_block bb; | |
7506f491 DE |
4463 | rtx insn; |
4464 | ||
4465 | /* Note we start at block 1. */ | |
e0082a72 ZD |
4466 | if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR) |
4467 | { | |
4468 | if (gcse_file != NULL) | |
4469 | fprintf (gcse_file, "\n"); | |
4470 | return 0; | |
4471 | } | |
7506f491 DE |
4472 | |
4473 | changed = 0; | |
e0082a72 | 4474 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb) |
7506f491 DE |
4475 | { |
4476 | /* Reset tables used to keep track of what's still valid [since the | |
4477 | start of the block]. */ | |
4478 | reset_opr_set_tables (); | |
4479 | ||
e0082a72 ZD |
4480 | for (insn = bb->head; |
4481 | insn != NULL && insn != NEXT_INSN (bb->end); | |
7506f491 | 4482 | insn = NEXT_INSN (insn)) |
172890a2 RK |
4483 | if (INSN_P (insn)) |
4484 | { | |
ae860ff7 | 4485 | changed |= cprop_insn (insn, alter_jumps); |
7506f491 | 4486 | |
172890a2 RK |
4487 | /* Keep track of everything modified by this insn. */ |
4488 | /* ??? Need to be careful w.r.t. mods done to INSN. Don't | |
4489 | call mark_oprs_set if we turned the insn into a NOTE. */ | |
4490 | if (GET_CODE (insn) != NOTE) | |
4491 | mark_oprs_set (insn); | |
8e42ace1 | 4492 | } |
7506f491 DE |
4493 | } |
4494 | ||
4495 | if (gcse_file != NULL) | |
4496 | fprintf (gcse_file, "\n"); | |
4497 | ||
4498 | return changed; | |
4499 | } | |
4500 | ||
fbef91d8 RS |
4501 | /* Similar to get_condition, only the resulting condition must be |
4502 | valid at JUMP, instead of at EARLIEST. | |
4503 | ||
4504 | This differs from noce_get_condition in ifcvt.c in that we prefer not to | |
4505 | settle for the condition variable in the jump instruction being integral. | |
4506 | We prefer to be able to record the value of a user variable, rather than | |
4507 | the value of a temporary used in a condition. This could be solved by | |
4508 | recording the value of *every* register scaned by canonicalize_condition, | |
4509 | but this would require some code reorganization. */ | |
4510 | ||
2fa4a849 | 4511 | rtx |
1d088dee | 4512 | fis_get_condition (rtx jump) |
fbef91d8 RS |
4513 | { |
4514 | rtx cond, set, tmp, insn, earliest; | |
4515 | bool reverse; | |
4516 | ||
4517 | if (! any_condjump_p (jump)) | |
4518 | return NULL_RTX; | |
4519 | ||
4520 | set = pc_set (jump); | |
4521 | cond = XEXP (SET_SRC (set), 0); | |
4522 | ||
4523 | /* If this branches to JUMP_LABEL when the condition is false, | |
4524 | reverse the condition. */ | |
4525 | reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF | |
4526 | && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump)); | |
4527 | ||
4528 | /* Use canonicalize_condition to do the dirty work of manipulating | |
4529 | MODE_CC values and COMPARE rtx codes. */ | |
ec6ec6aa ZD |
4530 | tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX, |
4531 | false); | |
fbef91d8 RS |
4532 | if (!tmp) |
4533 | return NULL_RTX; | |
4534 | ||
4535 | /* Verify that the given condition is valid at JUMP by virtue of not | |
4536 | having been modified since EARLIEST. */ | |
4537 | for (insn = earliest; insn != jump; insn = NEXT_INSN (insn)) | |
4538 | if (INSN_P (insn) && modified_in_p (tmp, insn)) | |
4539 | break; | |
4540 | if (insn == jump) | |
4541 | return tmp; | |
4542 | ||
4543 | /* The condition was modified. See if we can get a partial result | |
4544 | that doesn't follow all the reversals. Perhaps combine can fold | |
4545 | them together later. */ | |
4546 | tmp = XEXP (tmp, 0); | |
4547 | if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT) | |
4548 | return NULL_RTX; | |
ec6ec6aa ZD |
4549 | tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp, |
4550 | false); | |
fbef91d8 RS |
4551 | if (!tmp) |
4552 | return NULL_RTX; | |
4553 | ||
4554 | /* For sanity's sake, re-validate the new result. */ | |
4555 | for (insn = earliest; insn != jump; insn = NEXT_INSN (insn)) | |
4556 | if (INSN_P (insn) && modified_in_p (tmp, insn)) | |
4557 | return NULL_RTX; | |
4558 | ||
4559 | return tmp; | |
4560 | } | |
4561 | ||
4562 | /* Find the implicit sets of a function. An "implicit set" is a constraint | |
4563 | on the value of a variable, implied by a conditional jump. For example, | |
4564 | following "if (x == 2)", the then branch may be optimized as though the | |
4565 | conditional performed an "explicit set", in this example, "x = 2". This | |
4566 | function records the set patterns that are implicit at the start of each | |
4567 | basic block. */ | |
4568 | ||
4569 | static void | |
1d088dee | 4570 | find_implicit_sets (void) |
fbef91d8 RS |
4571 | { |
4572 | basic_block bb, dest; | |
4573 | unsigned int count; | |
4574 | rtx cond, new; | |
4575 | ||
4576 | count = 0; | |
4577 | FOR_EACH_BB (bb) | |
a98ebe2e | 4578 | /* Check for more than one successor. */ |
fbef91d8 RS |
4579 | if (bb->succ && bb->succ->succ_next) |
4580 | { | |
4581 | cond = fis_get_condition (bb->end); | |
4582 | ||
4583 | if (cond | |
4584 | && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE) | |
4585 | && GET_CODE (XEXP (cond, 0)) == REG | |
4586 | && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER | |
6b2d1c9e | 4587 | && gcse_constant_p (XEXP (cond, 1))) |
fbef91d8 RS |
4588 | { |
4589 | dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest | |
4590 | : FALLTHRU_EDGE (bb)->dest; | |
4591 | ||
4592 | if (dest && ! dest->pred->pred_next | |
4593 | && dest != EXIT_BLOCK_PTR) | |
4594 | { | |
4595 | new = gen_rtx_SET (VOIDmode, XEXP (cond, 0), | |
4596 | XEXP (cond, 1)); | |
4597 | implicit_sets[dest->index] = new; | |
4598 | if (gcse_file) | |
4599 | { | |
4600 | fprintf(gcse_file, "Implicit set of reg %d in ", | |
4601 | REGNO (XEXP (cond, 0))); | |
4602 | fprintf(gcse_file, "basic block %d\n", dest->index); | |
4603 | } | |
4604 | count++; | |
4605 | } | |
4606 | } | |
4607 | } | |
4608 | ||
4609 | if (gcse_file) | |
4610 | fprintf (gcse_file, "Found %d implicit sets\n", count); | |
4611 | } | |
4612 | ||
7506f491 | 4613 | /* Perform one copy/constant propagation pass. |
a0134312 RS |
4614 | PASS is the pass count. If CPROP_JUMPS is true, perform constant |
4615 | propagation into conditional jumps. If BYPASS_JUMPS is true, | |
4616 | perform conditional jump bypassing optimizations. */ | |
7506f491 DE |
4617 | |
4618 | static int | |
1d088dee | 4619 | one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps) |
7506f491 DE |
4620 | { |
4621 | int changed = 0; | |
4622 | ||
4623 | const_prop_count = 0; | |
4624 | copy_prop_count = 0; | |
4625 | ||
a0134312 | 4626 | local_cprop_pass (cprop_jumps); |
ae860ff7 | 4627 | |
fbef91d8 | 4628 | /* Determine implicit sets. */ |
703ad42b | 4629 | implicit_sets = xcalloc (last_basic_block, sizeof (rtx)); |
fbef91d8 RS |
4630 | find_implicit_sets (); |
4631 | ||
02280659 ZD |
4632 | alloc_hash_table (max_cuid, &set_hash_table, 1); |
4633 | compute_hash_table (&set_hash_table); | |
fbef91d8 RS |
4634 | |
4635 | /* Free implicit_sets before peak usage. */ | |
4636 | free (implicit_sets); | |
4637 | implicit_sets = NULL; | |
4638 | ||
7506f491 | 4639 | if (gcse_file) |
02280659 ZD |
4640 | dump_hash_table (gcse_file, "SET", &set_hash_table); |
4641 | if (set_hash_table.n_elems > 0) | |
7506f491 | 4642 | { |
02280659 | 4643 | alloc_cprop_mem (last_basic_block, set_hash_table.n_elems); |
7506f491 | 4644 | compute_cprop_data (); |
a0134312 RS |
4645 | changed = cprop (cprop_jumps); |
4646 | if (bypass_jumps) | |
0e3f0221 | 4647 | changed |= bypass_conditional_jumps (); |
7506f491 DE |
4648 | free_cprop_mem (); |
4649 | } | |
c4c81601 | 4650 | |
02280659 | 4651 | free_hash_table (&set_hash_table); |
7506f491 DE |
4652 | |
4653 | if (gcse_file) | |
4654 | { | |
c4c81601 RK |
4655 | fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ", |
4656 | current_function_name, pass, bytes_used); | |
4657 | fprintf (gcse_file, "%d const props, %d copy props\n\n", | |
4658 | const_prop_count, copy_prop_count); | |
7506f491 | 4659 | } |
1649d92f JH |
4660 | /* Global analysis may get into infinite loops for unreachable blocks. */ |
4661 | if (changed && cprop_jumps) | |
4662 | delete_unreachable_blocks (); | |
7506f491 DE |
4663 | |
4664 | return changed; | |
4665 | } | |
4666 | \f | |
0e3f0221 RS |
4667 | /* Bypass conditional jumps. */ |
4668 | ||
7821bfc7 RS |
4669 | /* The value of last_basic_block at the beginning of the jump_bypass |
4670 | pass. The use of redirect_edge_and_branch_force may introduce new | |
4671 | basic blocks, but the data flow analysis is only valid for basic | |
4672 | block indices less than bypass_last_basic_block. */ | |
4673 | ||
4674 | static int bypass_last_basic_block; | |
4675 | ||
0e3f0221 RS |
4676 | /* Find a set of REGNO to a constant that is available at the end of basic |
4677 | block BB. Returns NULL if no such set is found. Based heavily upon | |
4678 | find_avail_set. */ | |
4679 | ||
4680 | static struct expr * | |
1d088dee | 4681 | find_bypass_set (int regno, int bb) |
0e3f0221 RS |
4682 | { |
4683 | struct expr *result = 0; | |
4684 | ||
4685 | for (;;) | |
4686 | { | |
4687 | rtx src; | |
ceda50e9 | 4688 | struct expr *set = lookup_set (regno, &set_hash_table); |
0e3f0221 RS |
4689 | |
4690 | while (set) | |
4691 | { | |
4692 | if (TEST_BIT (cprop_avout[bb], set->bitmap_index)) | |
4693 | break; | |
4694 | set = next_set (regno, set); | |
4695 | } | |
4696 | ||
4697 | if (set == 0) | |
4698 | break; | |
4699 | ||
4700 | if (GET_CODE (set->expr) != SET) | |
4701 | abort (); | |
4702 | ||
4703 | src = SET_SRC (set->expr); | |
6b2d1c9e | 4704 | if (gcse_constant_p (src)) |
0e3f0221 RS |
4705 | result = set; |
4706 | ||
4707 | if (GET_CODE (src) != REG) | |
4708 | break; | |
4709 | ||
4710 | regno = REGNO (src); | |
4711 | } | |
4712 | return result; | |
4713 | } | |
4714 | ||
4715 | ||
e129b3f9 RS |
4716 | /* Subroutine of bypass_block that checks whether a pseudo is killed by |
4717 | any of the instructions inserted on an edge. Jump bypassing places | |
4718 | condition code setters on CFG edges using insert_insn_on_edge. This | |
4719 | function is required to check that our data flow analysis is still | |
4720 | valid prior to commit_edge_insertions. */ | |
4721 | ||
4722 | static bool | |
1d088dee | 4723 | reg_killed_on_edge (rtx reg, edge e) |
e129b3f9 RS |
4724 | { |
4725 | rtx insn; | |
4726 | ||
4727 | for (insn = e->insns; insn; insn = NEXT_INSN (insn)) | |
4728 | if (INSN_P (insn) && reg_set_p (reg, insn)) | |
4729 | return true; | |
4730 | ||
4731 | return false; | |
4732 | } | |
4733 | ||
0e3f0221 RS |
4734 | /* Subroutine of bypass_conditional_jumps that attempts to bypass the given |
4735 | basic block BB which has more than one predecessor. If not NULL, SETCC | |
4736 | is the first instruction of BB, which is immediately followed by JUMP_INSN | |
4737 | JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB. | |
e129b3f9 RS |
4738 | Returns nonzero if a change was made. |
4739 | ||
e0bb17a8 | 4740 | During the jump bypassing pass, we may place copies of SETCC instructions |
e129b3f9 RS |
4741 | on CFG edges. The following routine must be careful to pay attention to |
4742 | these inserted insns when performing its transformations. */ | |
0e3f0221 RS |
4743 | |
4744 | static int | |
1d088dee | 4745 | bypass_block (basic_block bb, rtx setcc, rtx jump) |
0e3f0221 RS |
4746 | { |
4747 | rtx insn, note; | |
e129b3f9 | 4748 | edge e, enext, edest; |
818b6b7f | 4749 | int i, change; |
72b8d451 | 4750 | int may_be_loop_header; |
0e3f0221 RS |
4751 | |
4752 | insn = (setcc != NULL) ? setcc : jump; | |
4753 | ||
4754 | /* Determine set of register uses in INSN. */ | |
4755 | reg_use_count = 0; | |
4756 | note_uses (&PATTERN (insn), find_used_regs, NULL); | |
4757 | note = find_reg_equal_equiv_note (insn); | |
4758 | if (note) | |
4759 | find_used_regs (&XEXP (note, 0), NULL); | |
4760 | ||
72b8d451 ZD |
4761 | may_be_loop_header = false; |
4762 | for (e = bb->pred; e; e = e->pred_next) | |
4763 | if (e->flags & EDGE_DFS_BACK) | |
4764 | { | |
4765 | may_be_loop_header = true; | |
4766 | break; | |
4767 | } | |
4768 | ||
0e3f0221 RS |
4769 | change = 0; |
4770 | for (e = bb->pred; e; e = enext) | |
4771 | { | |
4772 | enext = e->pred_next; | |
7821bfc7 RS |
4773 | if (e->flags & EDGE_COMPLEX) |
4774 | continue; | |
4775 | ||
4776 | /* We can't redirect edges from new basic blocks. */ | |
4777 | if (e->src->index >= bypass_last_basic_block) | |
4778 | continue; | |
4779 | ||
72b8d451 | 4780 | /* The irreducible loops created by redirecting of edges entering the |
e0bb17a8 KH |
4781 | loop from outside would decrease effectiveness of some of the following |
4782 | optimizations, so prevent this. */ | |
72b8d451 ZD |
4783 | if (may_be_loop_header |
4784 | && !(e->flags & EDGE_DFS_BACK)) | |
4785 | continue; | |
4786 | ||
0e3f0221 RS |
4787 | for (i = 0; i < reg_use_count; i++) |
4788 | { | |
4789 | struct reg_use *reg_used = ®_use_table[i]; | |
589005ff | 4790 | unsigned int regno = REGNO (reg_used->reg_rtx); |
818b6b7f | 4791 | basic_block dest, old_dest; |
589005ff KH |
4792 | struct expr *set; |
4793 | rtx src, new; | |
0e3f0221 | 4794 | |
589005ff KH |
4795 | if (regno >= max_gcse_regno) |
4796 | continue; | |
0e3f0221 | 4797 | |
589005ff | 4798 | set = find_bypass_set (regno, e->src->index); |
0e3f0221 RS |
4799 | |
4800 | if (! set) | |
4801 | continue; | |
4802 | ||
e129b3f9 RS |
4803 | /* Check the data flow is valid after edge insertions. */ |
4804 | if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e)) | |
4805 | continue; | |
4806 | ||
589005ff | 4807 | src = SET_SRC (pc_set (jump)); |
0e3f0221 RS |
4808 | |
4809 | if (setcc != NULL) | |
4810 | src = simplify_replace_rtx (src, | |
589005ff KH |
4811 | SET_DEST (PATTERN (setcc)), |
4812 | SET_SRC (PATTERN (setcc))); | |
0e3f0221 RS |
4813 | |
4814 | new = simplify_replace_rtx (src, reg_used->reg_rtx, | |
589005ff | 4815 | SET_SRC (set->expr)); |
0e3f0221 | 4816 | |
1d088dee | 4817 | /* Jump bypassing may have already placed instructions on |
e129b3f9 RS |
4818 | edges of the CFG. We can't bypass an outgoing edge that |
4819 | has instructions associated with it, as these insns won't | |
4820 | get executed if the incoming edge is redirected. */ | |
4821 | ||
589005ff | 4822 | if (new == pc_rtx) |
e129b3f9 RS |
4823 | { |
4824 | edest = FALLTHRU_EDGE (bb); | |
4825 | dest = edest->insns ? NULL : edest->dest; | |
4826 | } | |
0e3f0221 | 4827 | else if (GET_CODE (new) == LABEL_REF) |
e129b3f9 RS |
4828 | { |
4829 | dest = BLOCK_FOR_INSN (XEXP (new, 0)); | |
4830 | /* Don't bypass edges containing instructions. */ | |
4831 | for (edest = bb->succ; edest; edest = edest->succ_next) | |
4832 | if (edest->dest == dest && edest->insns) | |
4833 | { | |
4834 | dest = NULL; | |
4835 | break; | |
4836 | } | |
4837 | } | |
0e3f0221 RS |
4838 | else |
4839 | dest = NULL; | |
4840 | ||
818b6b7f | 4841 | old_dest = e->dest; |
7821bfc7 RS |
4842 | if (dest != NULL |
4843 | && dest != old_dest | |
4844 | && dest != EXIT_BLOCK_PTR) | |
4845 | { | |
4846 | redirect_edge_and_branch_force (e, dest); | |
4847 | ||
818b6b7f | 4848 | /* Copy the register setter to the redirected edge. |
0e3f0221 RS |
4849 | Don't copy CC0 setters, as CC0 is dead after jump. */ |
4850 | if (setcc) | |
4851 | { | |
4852 | rtx pat = PATTERN (setcc); | |
818b6b7f | 4853 | if (!CC0_P (SET_DEST (pat))) |
0e3f0221 RS |
4854 | insert_insn_on_edge (copy_insn (pat), e); |
4855 | } | |
4856 | ||
4857 | if (gcse_file != NULL) | |
4858 | { | |
818b6b7f RH |
4859 | fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ", |
4860 | regno, INSN_UID (jump)); | |
0e3f0221 RS |
4861 | print_rtl (gcse_file, SET_SRC (set->expr)); |
4862 | fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n", | |
818b6b7f | 4863 | e->src->index, old_dest->index, dest->index); |
0e3f0221 RS |
4864 | } |
4865 | change = 1; | |
4866 | break; | |
4867 | } | |
4868 | } | |
4869 | } | |
4870 | return change; | |
4871 | } | |
4872 | ||
4873 | /* Find basic blocks with more than one predecessor that only contain a | |
4874 | single conditional jump. If the result of the comparison is known at | |
4875 | compile-time from any incoming edge, redirect that edge to the | |
9a71ece1 RH |
4876 | appropriate target. Returns nonzero if a change was made. |
4877 | ||
4878 | This function is now mis-named, because we also handle indirect jumps. */ | |
0e3f0221 RS |
4879 | |
4880 | static int | |
1d088dee | 4881 | bypass_conditional_jumps (void) |
0e3f0221 RS |
4882 | { |
4883 | basic_block bb; | |
4884 | int changed; | |
4885 | rtx setcc; | |
4886 | rtx insn; | |
4887 | rtx dest; | |
4888 | ||
4889 | /* Note we start at block 1. */ | |
4890 | if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR) | |
4891 | return 0; | |
4892 | ||
7821bfc7 | 4893 | bypass_last_basic_block = last_basic_block; |
72b8d451 | 4894 | mark_dfs_back_edges (); |
7821bfc7 | 4895 | |
0e3f0221 RS |
4896 | changed = 0; |
4897 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, | |
589005ff | 4898 | EXIT_BLOCK_PTR, next_bb) |
0e3f0221 RS |
4899 | { |
4900 | /* Check for more than one predecessor. */ | |
4901 | if (bb->pred && bb->pred->pred_next) | |
4902 | { | |
4903 | setcc = NULL_RTX; | |
4904 | for (insn = bb->head; | |
4905 | insn != NULL && insn != NEXT_INSN (bb->end); | |
4906 | insn = NEXT_INSN (insn)) | |
4907 | if (GET_CODE (insn) == INSN) | |
4908 | { | |
9543a9d2 | 4909 | if (setcc) |
0e3f0221 | 4910 | break; |
ba4f7968 | 4911 | if (GET_CODE (PATTERN (insn)) != SET) |
0e3f0221 RS |
4912 | break; |
4913 | ||
ba4f7968 | 4914 | dest = SET_DEST (PATTERN (insn)); |
818b6b7f | 4915 | if (REG_P (dest) || CC0_P (dest)) |
0e3f0221 | 4916 | setcc = insn; |
0e3f0221 RS |
4917 | else |
4918 | break; | |
4919 | } | |
4920 | else if (GET_CODE (insn) == JUMP_INSN) | |
4921 | { | |
9a71ece1 RH |
4922 | if ((any_condjump_p (insn) || computed_jump_p (insn)) |
4923 | && onlyjump_p (insn)) | |
0e3f0221 RS |
4924 | changed |= bypass_block (bb, setcc, insn); |
4925 | break; | |
4926 | } | |
4927 | else if (INSN_P (insn)) | |
4928 | break; | |
4929 | } | |
4930 | } | |
4931 | ||
818b6b7f | 4932 | /* If we bypassed any register setting insns, we inserted a |
fbe5a4a6 | 4933 | copy on the redirected edge. These need to be committed. */ |
0e3f0221 RS |
4934 | if (changed) |
4935 | commit_edge_insertions(); | |
4936 | ||
4937 | return changed; | |
4938 | } | |
4939 | \f | |
a65f3558 | 4940 | /* Compute PRE+LCM working variables. */ |
7506f491 DE |
4941 | |
4942 | /* Local properties of expressions. */ | |
4943 | /* Nonzero for expressions that are transparent in the block. */ | |
a65f3558 | 4944 | static sbitmap *transp; |
7506f491 | 4945 | |
5c35539b RH |
4946 | /* Nonzero for expressions that are transparent at the end of the block. |
4947 | This is only zero for expressions killed by abnormal critical edge | |
4948 | created by a calls. */ | |
a65f3558 | 4949 | static sbitmap *transpout; |
5c35539b | 4950 | |
a65f3558 JL |
4951 | /* Nonzero for expressions that are computed (available) in the block. */ |
4952 | static sbitmap *comp; | |
7506f491 | 4953 | |
a65f3558 JL |
4954 | /* Nonzero for expressions that are locally anticipatable in the block. */ |
4955 | static sbitmap *antloc; | |
7506f491 | 4956 | |
a65f3558 JL |
4957 | /* Nonzero for expressions where this block is an optimal computation |
4958 | point. */ | |
4959 | static sbitmap *pre_optimal; | |
5c35539b | 4960 | |
a65f3558 JL |
4961 | /* Nonzero for expressions which are redundant in a particular block. */ |
4962 | static sbitmap *pre_redundant; | |
7506f491 | 4963 | |
a42cd965 AM |
4964 | /* Nonzero for expressions which should be inserted on a specific edge. */ |
4965 | static sbitmap *pre_insert_map; | |
4966 | ||
4967 | /* Nonzero for expressions which should be deleted in a specific block. */ | |
4968 | static sbitmap *pre_delete_map; | |
4969 | ||
4970 | /* Contains the edge_list returned by pre_edge_lcm. */ | |
4971 | static struct edge_list *edge_list; | |
4972 | ||
a65f3558 JL |
4973 | /* Redundant insns. */ |
4974 | static sbitmap pre_redundant_insns; | |
7506f491 | 4975 | |
a65f3558 | 4976 | /* Allocate vars used for PRE analysis. */ |
7506f491 DE |
4977 | |
4978 | static void | |
1d088dee | 4979 | alloc_pre_mem (int n_blocks, int n_exprs) |
7506f491 | 4980 | { |
a65f3558 JL |
4981 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); |
4982 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
4983 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
5faf03ae | 4984 | |
a42cd965 AM |
4985 | pre_optimal = NULL; |
4986 | pre_redundant = NULL; | |
4987 | pre_insert_map = NULL; | |
4988 | pre_delete_map = NULL; | |
4989 | ae_in = NULL; | |
4990 | ae_out = NULL; | |
a42cd965 | 4991 | ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs); |
c4c81601 | 4992 | |
a42cd965 | 4993 | /* pre_insert and pre_delete are allocated later. */ |
7506f491 DE |
4994 | } |
4995 | ||
a65f3558 | 4996 | /* Free vars used for PRE analysis. */ |
7506f491 DE |
4997 | |
4998 | static void | |
1d088dee | 4999 | free_pre_mem (void) |
7506f491 | 5000 | { |
5a660bff DB |
5001 | sbitmap_vector_free (transp); |
5002 | sbitmap_vector_free (comp); | |
bd3675fc JL |
5003 | |
5004 | /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */ | |
7506f491 | 5005 | |
a42cd965 | 5006 | if (pre_optimal) |
5a660bff | 5007 | sbitmap_vector_free (pre_optimal); |
a42cd965 | 5008 | if (pre_redundant) |
5a660bff | 5009 | sbitmap_vector_free (pre_redundant); |
a42cd965 | 5010 | if (pre_insert_map) |
5a660bff | 5011 | sbitmap_vector_free (pre_insert_map); |
a42cd965 | 5012 | if (pre_delete_map) |
5a660bff | 5013 | sbitmap_vector_free (pre_delete_map); |
a42cd965 | 5014 | if (ae_in) |
5a660bff | 5015 | sbitmap_vector_free (ae_in); |
a42cd965 | 5016 | if (ae_out) |
5a660bff | 5017 | sbitmap_vector_free (ae_out); |
a42cd965 | 5018 | |
bd3675fc | 5019 | transp = comp = NULL; |
a42cd965 | 5020 | pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL; |
55d3f917 | 5021 | ae_in = ae_out = NULL; |
7506f491 DE |
5022 | } |
5023 | ||
5024 | /* Top level routine to do the dataflow analysis needed by PRE. */ | |
5025 | ||
5026 | static void | |
1d088dee | 5027 | compute_pre_data (void) |
7506f491 | 5028 | { |
b614171e | 5029 | sbitmap trapping_expr; |
e0082a72 | 5030 | basic_block bb; |
b614171e | 5031 | unsigned int ui; |
c66e8ae9 | 5032 | |
02280659 | 5033 | compute_local_properties (transp, comp, antloc, &expr_hash_table); |
d55bc081 | 5034 | sbitmap_vector_zero (ae_kill, last_basic_block); |
c66e8ae9 | 5035 | |
b614171e | 5036 | /* Collect expressions which might trap. */ |
02280659 | 5037 | trapping_expr = sbitmap_alloc (expr_hash_table.n_elems); |
b614171e | 5038 | sbitmap_zero (trapping_expr); |
02280659 | 5039 | for (ui = 0; ui < expr_hash_table.size; ui++) |
b614171e MM |
5040 | { |
5041 | struct expr *e; | |
02280659 | 5042 | for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash) |
b614171e MM |
5043 | if (may_trap_p (e->expr)) |
5044 | SET_BIT (trapping_expr, e->bitmap_index); | |
5045 | } | |
5046 | ||
c66e8ae9 JL |
5047 | /* Compute ae_kill for each basic block using: |
5048 | ||
5049 | ~(TRANSP | COMP) | |
5050 | ||
a2e90653 | 5051 | This is significantly faster than compute_ae_kill. */ |
c66e8ae9 | 5052 | |
e0082a72 | 5053 | FOR_EACH_BB (bb) |
c66e8ae9 | 5054 | { |
b614171e MM |
5055 | edge e; |
5056 | ||
5057 | /* If the current block is the destination of an abnormal edge, we | |
5058 | kill all trapping expressions because we won't be able to properly | |
5059 | place the instruction on the edge. So make them neither | |
5060 | anticipatable nor transparent. This is fairly conservative. */ | |
e0082a72 | 5061 | for (e = bb->pred; e ; e = e->pred_next) |
b614171e MM |
5062 | if (e->flags & EDGE_ABNORMAL) |
5063 | { | |
e0082a72 ZD |
5064 | sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr); |
5065 | sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr); | |
b614171e MM |
5066 | break; |
5067 | } | |
5068 | ||
e0082a72 ZD |
5069 | sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]); |
5070 | sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]); | |
c66e8ae9 JL |
5071 | } |
5072 | ||
02280659 | 5073 | edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc, |
a42cd965 | 5074 | ae_kill, &pre_insert_map, &pre_delete_map); |
5a660bff | 5075 | sbitmap_vector_free (antloc); |
bd3675fc | 5076 | antloc = NULL; |
5a660bff | 5077 | sbitmap_vector_free (ae_kill); |
589005ff | 5078 | ae_kill = NULL; |
76ac938b | 5079 | sbitmap_free (trapping_expr); |
7506f491 DE |
5080 | } |
5081 | \f | |
5082 | /* PRE utilities */ | |
5083 | ||
cc2902df | 5084 | /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach |
a65f3558 | 5085 | block BB. |
7506f491 DE |
5086 | |
5087 | VISITED is a pointer to a working buffer for tracking which BB's have | |
5088 | been visited. It is NULL for the top-level call. | |
5089 | ||
5090 | We treat reaching expressions that go through blocks containing the same | |
5091 | reaching expression as "not reaching". E.g. if EXPR is generated in blocks | |
5092 | 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block | |
5093 | 2 as not reaching. The intent is to improve the probability of finding | |
5094 | only one reaching expression and to reduce register lifetimes by picking | |
5095 | the closest such expression. */ | |
5096 | ||
5097 | static int | |
1d088dee | 5098 | pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited) |
7506f491 | 5099 | { |
36349f8b | 5100 | edge pred; |
7506f491 | 5101 | |
e2d2ed72 | 5102 | for (pred = bb->pred; pred != NULL; pred = pred->pred_next) |
7506f491 | 5103 | { |
e2d2ed72 | 5104 | basic_block pred_bb = pred->src; |
7506f491 | 5105 | |
36349f8b | 5106 | if (pred->src == ENTRY_BLOCK_PTR |
7506f491 | 5107 | /* Has predecessor has already been visited? */ |
0b17ab2f | 5108 | || visited[pred_bb->index]) |
c4c81601 RK |
5109 | ;/* Nothing to do. */ |
5110 | ||
7506f491 | 5111 | /* Does this predecessor generate this expression? */ |
0b17ab2f | 5112 | else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index)) |
7506f491 DE |
5113 | { |
5114 | /* Is this the occurrence we're looking for? | |
5115 | Note that there's only one generating occurrence per block | |
5116 | so we just need to check the block number. */ | |
a65f3558 | 5117 | if (occr_bb == pred_bb) |
7506f491 | 5118 | return 1; |
c4c81601 | 5119 | |
0b17ab2f | 5120 | visited[pred_bb->index] = 1; |
7506f491 DE |
5121 | } |
5122 | /* Ignore this predecessor if it kills the expression. */ | |
0b17ab2f RH |
5123 | else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index)) |
5124 | visited[pred_bb->index] = 1; | |
c4c81601 | 5125 | |
7506f491 DE |
5126 | /* Neither gen nor kill. */ |
5127 | else | |
ac7c5af5 | 5128 | { |
0b17ab2f | 5129 | visited[pred_bb->index] = 1; |
89e606c9 | 5130 | if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited)) |
7506f491 | 5131 | return 1; |
ac7c5af5 | 5132 | } |
7506f491 DE |
5133 | } |
5134 | ||
5135 | /* All paths have been checked. */ | |
5136 | return 0; | |
5137 | } | |
283a2545 RL |
5138 | |
5139 | /* The wrapper for pre_expr_reaches_here_work that ensures that any | |
dc297297 | 5140 | memory allocated for that function is returned. */ |
283a2545 RL |
5141 | |
5142 | static int | |
1d088dee | 5143 | pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb) |
283a2545 RL |
5144 | { |
5145 | int rval; | |
703ad42b | 5146 | char *visited = xcalloc (last_basic_block, 1); |
283a2545 | 5147 | |
8e42ace1 | 5148 | rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited); |
283a2545 RL |
5149 | |
5150 | free (visited); | |
c4c81601 | 5151 | return rval; |
283a2545 | 5152 | } |
7506f491 | 5153 | \f |
a42cd965 AM |
5154 | |
5155 | /* Given an expr, generate RTL which we can insert at the end of a BB, | |
589005ff | 5156 | or on an edge. Set the block number of any insns generated to |
a42cd965 AM |
5157 | the value of BB. */ |
5158 | ||
5159 | static rtx | |
1d088dee | 5160 | process_insert_insn (struct expr *expr) |
a42cd965 AM |
5161 | { |
5162 | rtx reg = expr->reaching_reg; | |
fb0c0a12 RK |
5163 | rtx exp = copy_rtx (expr->expr); |
5164 | rtx pat; | |
a42cd965 AM |
5165 | |
5166 | start_sequence (); | |
fb0c0a12 RK |
5167 | |
5168 | /* If the expression is something that's an operand, like a constant, | |
5169 | just copy it to a register. */ | |
5170 | if (general_operand (exp, GET_MODE (reg))) | |
5171 | emit_move_insn (reg, exp); | |
5172 | ||
5173 | /* Otherwise, make a new insn to compute this expression and make sure the | |
5174 | insn will be recognized (this also adds any needed CLOBBERs). Copy the | |
5175 | expression to make sure we don't have any sharing issues. */ | |
8d444206 | 5176 | else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp)))) |
fb0c0a12 | 5177 | abort (); |
589005ff | 5178 | |
2f937369 | 5179 | pat = get_insns (); |
a42cd965 AM |
5180 | end_sequence (); |
5181 | ||
5182 | return pat; | |
5183 | } | |
589005ff | 5184 | |
a65f3558 JL |
5185 | /* Add EXPR to the end of basic block BB. |
5186 | ||
5187 | This is used by both the PRE and code hoisting. | |
5188 | ||
5189 | For PRE, we want to verify that the expr is either transparent | |
5190 | or locally anticipatable in the target block. This check makes | |
5191 | no sense for code hoisting. */ | |
7506f491 DE |
5192 | |
5193 | static void | |
1d088dee | 5194 | insert_insn_end_bb (struct expr *expr, basic_block bb, int pre) |
7506f491 | 5195 | { |
e2d2ed72 | 5196 | rtx insn = bb->end; |
7506f491 DE |
5197 | rtx new_insn; |
5198 | rtx reg = expr->reaching_reg; | |
5199 | int regno = REGNO (reg); | |
2f937369 | 5200 | rtx pat, pat_end; |
7506f491 | 5201 | |
a42cd965 | 5202 | pat = process_insert_insn (expr); |
2f937369 DM |
5203 | if (pat == NULL_RTX || ! INSN_P (pat)) |
5204 | abort (); | |
5205 | ||
5206 | pat_end = pat; | |
5207 | while (NEXT_INSN (pat_end) != NULL_RTX) | |
5208 | pat_end = NEXT_INSN (pat_end); | |
7506f491 DE |
5209 | |
5210 | /* If the last insn is a jump, insert EXPR in front [taking care to | |
4d6922ee | 5211 | handle cc0, etc. properly]. Similarly we need to care trapping |
068473ec | 5212 | instructions in presence of non-call exceptions. */ |
7506f491 | 5213 | |
068473ec JH |
5214 | if (GET_CODE (insn) == JUMP_INSN |
5215 | || (GET_CODE (insn) == INSN | |
5216 | && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))) | |
7506f491 | 5217 | { |
50b2596f | 5218 | #ifdef HAVE_cc0 |
7506f491 | 5219 | rtx note; |
50b2596f | 5220 | #endif |
068473ec JH |
5221 | /* It should always be the case that we can put these instructions |
5222 | anywhere in the basic block with performing PRE optimizations. | |
5223 | Check this. */ | |
3b25fbfe | 5224 | if (GET_CODE (insn) == INSN && pre |
0b17ab2f | 5225 | && !TEST_BIT (antloc[bb->index], expr->bitmap_index) |
589005ff | 5226 | && !TEST_BIT (transp[bb->index], expr->bitmap_index)) |
068473ec | 5227 | abort (); |
7506f491 DE |
5228 | |
5229 | /* If this is a jump table, then we can't insert stuff here. Since | |
5230 | we know the previous real insn must be the tablejump, we insert | |
5231 | the new instruction just before the tablejump. */ | |
5232 | if (GET_CODE (PATTERN (insn)) == ADDR_VEC | |
5233 | || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) | |
5234 | insn = prev_real_insn (insn); | |
5235 | ||
5236 | #ifdef HAVE_cc0 | |
5237 | /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts | |
5238 | if cc0 isn't set. */ | |
5239 | note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); | |
5240 | if (note) | |
5241 | insn = XEXP (note, 0); | |
5242 | else | |
5243 | { | |
5244 | rtx maybe_cc0_setter = prev_nonnote_insn (insn); | |
5245 | if (maybe_cc0_setter | |
2c3c49de | 5246 | && INSN_P (maybe_cc0_setter) |
7506f491 DE |
5247 | && sets_cc0_p (PATTERN (maybe_cc0_setter))) |
5248 | insn = maybe_cc0_setter; | |
5249 | } | |
5250 | #endif | |
5251 | /* FIXME: What if something in cc0/jump uses value set in new insn? */ | |
3c030e88 | 5252 | new_insn = emit_insn_before (pat, insn); |
3947e2f9 | 5253 | } |
c4c81601 | 5254 | |
3947e2f9 RH |
5255 | /* Likewise if the last insn is a call, as will happen in the presence |
5256 | of exception handling. */ | |
068473ec JH |
5257 | else if (GET_CODE (insn) == CALL_INSN |
5258 | && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))) | |
3947e2f9 | 5259 | { |
3947e2f9 RH |
5260 | /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers, |
5261 | we search backward and place the instructions before the first | |
5262 | parameter is loaded. Do this for everyone for consistency and a | |
fbe5a4a6 | 5263 | presumption that we'll get better code elsewhere as well. |
3947e2f9 | 5264 | |
c4c81601 | 5265 | It should always be the case that we can put these instructions |
a65f3558 JL |
5266 | anywhere in the basic block with performing PRE optimizations. |
5267 | Check this. */ | |
c4c81601 | 5268 | |
a65f3558 | 5269 | if (pre |
0b17ab2f | 5270 | && !TEST_BIT (antloc[bb->index], expr->bitmap_index) |
589005ff | 5271 | && !TEST_BIT (transp[bb->index], expr->bitmap_index)) |
3947e2f9 RH |
5272 | abort (); |
5273 | ||
5274 | /* Since different machines initialize their parameter registers | |
5275 | in different orders, assume nothing. Collect the set of all | |
5276 | parameter registers. */ | |
833366d6 | 5277 | insn = find_first_parameter_load (insn, bb->head); |
3947e2f9 | 5278 | |
b1d26727 JL |
5279 | /* If we found all the parameter loads, then we want to insert |
5280 | before the first parameter load. | |
5281 | ||
5282 | If we did not find all the parameter loads, then we might have | |
5283 | stopped on the head of the block, which could be a CODE_LABEL. | |
5284 | If we inserted before the CODE_LABEL, then we would be putting | |
5285 | the insn in the wrong basic block. In that case, put the insn | |
b5229628 | 5286 | after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */ |
0a377997 | 5287 | while (GET_CODE (insn) == CODE_LABEL |
589ca5cb | 5288 | || NOTE_INSN_BASIC_BLOCK_P (insn)) |
b5229628 | 5289 | insn = NEXT_INSN (insn); |
c4c81601 | 5290 | |
3c030e88 | 5291 | new_insn = emit_insn_before (pat, insn); |
7506f491 DE |
5292 | } |
5293 | else | |
3c030e88 | 5294 | new_insn = emit_insn_after (pat, insn); |
7506f491 | 5295 | |
2f937369 | 5296 | while (1) |
a65f3558 | 5297 | { |
2f937369 | 5298 | if (INSN_P (pat)) |
a65f3558 | 5299 | { |
2f937369 DM |
5300 | add_label_notes (PATTERN (pat), new_insn); |
5301 | note_stores (PATTERN (pat), record_set_info, pat); | |
a65f3558 | 5302 | } |
2f937369 DM |
5303 | if (pat == pat_end) |
5304 | break; | |
5305 | pat = NEXT_INSN (pat); | |
a65f3558 | 5306 | } |
3947e2f9 | 5307 | |
7506f491 DE |
5308 | gcse_create_count++; |
5309 | ||
5310 | if (gcse_file) | |
5311 | { | |
c4c81601 | 5312 | fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ", |
0b17ab2f | 5313 | bb->index, INSN_UID (new_insn)); |
c4c81601 RK |
5314 | fprintf (gcse_file, "copying expression %d to reg %d\n", |
5315 | expr->bitmap_index, regno); | |
7506f491 DE |
5316 | } |
5317 | } | |
5318 | ||
a42cd965 AM |
5319 | /* Insert partially redundant expressions on edges in the CFG to make |
5320 | the expressions fully redundant. */ | |
7506f491 | 5321 | |
a42cd965 | 5322 | static int |
1d088dee | 5323 | pre_edge_insert (struct edge_list *edge_list, struct expr **index_map) |
7506f491 | 5324 | { |
c4c81601 | 5325 | int e, i, j, num_edges, set_size, did_insert = 0; |
a65f3558 JL |
5326 | sbitmap *inserted; |
5327 | ||
a42cd965 AM |
5328 | /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge |
5329 | if it reaches any of the deleted expressions. */ | |
7506f491 | 5330 | |
a42cd965 AM |
5331 | set_size = pre_insert_map[0]->size; |
5332 | num_edges = NUM_EDGES (edge_list); | |
02280659 | 5333 | inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems); |
a42cd965 | 5334 | sbitmap_vector_zero (inserted, num_edges); |
7506f491 | 5335 | |
a42cd965 | 5336 | for (e = 0; e < num_edges; e++) |
7506f491 DE |
5337 | { |
5338 | int indx; | |
e2d2ed72 | 5339 | basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e); |
a65f3558 | 5340 | |
a65f3558 | 5341 | for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS) |
7506f491 | 5342 | { |
a42cd965 | 5343 | SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i]; |
7506f491 | 5344 | |
02280659 | 5345 | for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1) |
c4c81601 RK |
5346 | if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX) |
5347 | { | |
5348 | struct expr *expr = index_map[j]; | |
5349 | struct occr *occr; | |
a65f3558 | 5350 | |
ff7cc307 | 5351 | /* Now look at each deleted occurrence of this expression. */ |
c4c81601 RK |
5352 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
5353 | { | |
5354 | if (! occr->deleted_p) | |
5355 | continue; | |
5356 | ||
5357 | /* Insert this expression on this edge if if it would | |
ff7cc307 | 5358 | reach the deleted occurrence in BB. */ |
c4c81601 RK |
5359 | if (!TEST_BIT (inserted[e], j)) |
5360 | { | |
5361 | rtx insn; | |
5362 | edge eg = INDEX_EDGE (edge_list, e); | |
5363 | ||
5364 | /* We can't insert anything on an abnormal and | |
5365 | critical edge, so we insert the insn at the end of | |
5366 | the previous block. There are several alternatives | |
5367 | detailed in Morgans book P277 (sec 10.5) for | |
5368 | handling this situation. This one is easiest for | |
5369 | now. */ | |
5370 | ||
5371 | if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL) | |
5372 | insert_insn_end_bb (index_map[j], bb, 0); | |
5373 | else | |
5374 | { | |
5375 | insn = process_insert_insn (index_map[j]); | |
5376 | insert_insn_on_edge (insn, eg); | |
5377 | } | |
5378 | ||
5379 | if (gcse_file) | |
5380 | { | |
5381 | fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ", | |
0b17ab2f RH |
5382 | bb->index, |
5383 | INDEX_EDGE_SUCC_BB (edge_list, e)->index); | |
c4c81601 RK |
5384 | fprintf (gcse_file, "copy expression %d\n", |
5385 | expr->bitmap_index); | |
5386 | } | |
5387 | ||
a13d4ebf | 5388 | update_ld_motion_stores (expr); |
c4c81601 RK |
5389 | SET_BIT (inserted[e], j); |
5390 | did_insert = 1; | |
5391 | gcse_create_count++; | |
5392 | } | |
5393 | } | |
5394 | } | |
7506f491 DE |
5395 | } |
5396 | } | |
5faf03ae | 5397 | |
5a660bff | 5398 | sbitmap_vector_free (inserted); |
a42cd965 | 5399 | return did_insert; |
7506f491 DE |
5400 | } |
5401 | ||
073089a7 | 5402 | /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG. |
b885908b MH |
5403 | Given "old_reg <- expr" (INSN), instead of adding after it |
5404 | reaching_reg <- old_reg | |
5405 | it's better to do the following: | |
5406 | reaching_reg <- expr | |
5407 | old_reg <- reaching_reg | |
5408 | because this way copy propagation can discover additional PRE | |
f5f2e3cd MH |
5409 | opportunities. But if this fails, we try the old way. |
5410 | When "expr" is a store, i.e. | |
5411 | given "MEM <- old_reg", instead of adding after it | |
5412 | reaching_reg <- old_reg | |
5413 | it's better to add it before as follows: | |
5414 | reaching_reg <- old_reg | |
5415 | MEM <- reaching_reg. */ | |
7506f491 DE |
5416 | |
5417 | static void | |
1d088dee | 5418 | pre_insert_copy_insn (struct expr *expr, rtx insn) |
7506f491 DE |
5419 | { |
5420 | rtx reg = expr->reaching_reg; | |
5421 | int regno = REGNO (reg); | |
5422 | int indx = expr->bitmap_index; | |
073089a7 RS |
5423 | rtx pat = PATTERN (insn); |
5424 | rtx set, new_insn; | |
b885908b | 5425 | rtx old_reg; |
073089a7 | 5426 | int i; |
7506f491 | 5427 | |
073089a7 RS |
5428 | /* This block matches the logic in hash_scan_insn. */ |
5429 | if (GET_CODE (pat) == SET) | |
5430 | set = pat; | |
5431 | else if (GET_CODE (pat) == PARALLEL) | |
5432 | { | |
5433 | /* Search through the parallel looking for the set whose | |
5434 | source was the expression that we're interested in. */ | |
5435 | set = NULL_RTX; | |
5436 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
5437 | { | |
5438 | rtx x = XVECEXP (pat, 0, i); | |
5439 | if (GET_CODE (x) == SET | |
5440 | && expr_equiv_p (SET_SRC (x), expr->expr)) | |
5441 | { | |
5442 | set = x; | |
5443 | break; | |
5444 | } | |
5445 | } | |
5446 | } | |
5447 | else | |
7506f491 | 5448 | abort (); |
c4c81601 | 5449 | |
f5f2e3cd | 5450 | if (GET_CODE (SET_DEST (set)) == REG) |
073089a7 | 5451 | { |
f5f2e3cd MH |
5452 | old_reg = SET_DEST (set); |
5453 | /* Check if we can modify the set destination in the original insn. */ | |
5454 | if (validate_change (insn, &SET_DEST (set), reg, 0)) | |
5455 | { | |
5456 | new_insn = gen_move_insn (old_reg, reg); | |
5457 | new_insn = emit_insn_after (new_insn, insn); | |
5458 | ||
5459 | /* Keep register set table up to date. */ | |
5460 | replace_one_set (REGNO (old_reg), insn, new_insn); | |
5461 | record_one_set (regno, insn); | |
5462 | } | |
5463 | else | |
5464 | { | |
5465 | new_insn = gen_move_insn (reg, old_reg); | |
5466 | new_insn = emit_insn_after (new_insn, insn); | |
073089a7 | 5467 | |
f5f2e3cd MH |
5468 | /* Keep register set table up to date. */ |
5469 | record_one_set (regno, new_insn); | |
5470 | } | |
073089a7 | 5471 | } |
f5f2e3cd | 5472 | else /* This is possible only in case of a store to memory. */ |
073089a7 | 5473 | { |
f5f2e3cd | 5474 | old_reg = SET_SRC (set); |
073089a7 | 5475 | new_insn = gen_move_insn (reg, old_reg); |
f5f2e3cd MH |
5476 | |
5477 | /* Check if we can modify the set source in the original insn. */ | |
5478 | if (validate_change (insn, &SET_SRC (set), reg, 0)) | |
5479 | new_insn = emit_insn_before (new_insn, insn); | |
5480 | else | |
5481 | new_insn = emit_insn_after (new_insn, insn); | |
c4c81601 | 5482 | |
073089a7 RS |
5483 | /* Keep register set table up to date. */ |
5484 | record_one_set (regno, new_insn); | |
5485 | } | |
7506f491 DE |
5486 | |
5487 | gcse_create_count++; | |
5488 | ||
5489 | if (gcse_file) | |
a42cd965 AM |
5490 | fprintf (gcse_file, |
5491 | "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n", | |
5492 | BLOCK_NUM (insn), INSN_UID (new_insn), indx, | |
5493 | INSN_UID (insn), regno); | |
7506f491 DE |
5494 | } |
5495 | ||
5496 | /* Copy available expressions that reach the redundant expression | |
5497 | to `reaching_reg'. */ | |
5498 | ||
5499 | static void | |
1d088dee | 5500 | pre_insert_copies (void) |
7506f491 | 5501 | { |
f5f2e3cd | 5502 | unsigned int i, added_copy; |
c4c81601 RK |
5503 | struct expr *expr; |
5504 | struct occr *occr; | |
5505 | struct occr *avail; | |
a65f3558 | 5506 | |
7506f491 DE |
5507 | /* For each available expression in the table, copy the result to |
5508 | `reaching_reg' if the expression reaches a deleted one. | |
5509 | ||
5510 | ??? The current algorithm is rather brute force. | |
5511 | Need to do some profiling. */ | |
5512 | ||
02280659 ZD |
5513 | for (i = 0; i < expr_hash_table.size; i++) |
5514 | for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 RK |
5515 | { |
5516 | /* If the basic block isn't reachable, PPOUT will be TRUE. However, | |
5517 | we don't want to insert a copy here because the expression may not | |
5518 | really be redundant. So only insert an insn if the expression was | |
5519 | deleted. This test also avoids further processing if the | |
5520 | expression wasn't deleted anywhere. */ | |
5521 | if (expr->reaching_reg == NULL) | |
5522 | continue; | |
f5f2e3cd MH |
5523 | |
5524 | /* Set when we add a copy for that expression. */ | |
5525 | added_copy = 0; | |
c4c81601 RK |
5526 | |
5527 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) | |
5528 | { | |
5529 | if (! occr->deleted_p) | |
5530 | continue; | |
7506f491 | 5531 | |
c4c81601 RK |
5532 | for (avail = expr->avail_occr; avail != NULL; avail = avail->next) |
5533 | { | |
5534 | rtx insn = avail->insn; | |
7506f491 | 5535 | |
c4c81601 RK |
5536 | /* No need to handle this one if handled already. */ |
5537 | if (avail->copied_p) | |
5538 | continue; | |
7506f491 | 5539 | |
c4c81601 RK |
5540 | /* Don't handle this one if it's a redundant one. */ |
5541 | if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn))) | |
5542 | continue; | |
7506f491 | 5543 | |
c4c81601 | 5544 | /* Or if the expression doesn't reach the deleted one. */ |
589005ff | 5545 | if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn), |
e2d2ed72 AM |
5546 | expr, |
5547 | BLOCK_FOR_INSN (occr->insn))) | |
c4c81601 | 5548 | continue; |
7506f491 | 5549 | |
f5f2e3cd MH |
5550 | added_copy = 1; |
5551 | ||
c4c81601 RK |
5552 | /* Copy the result of avail to reaching_reg. */ |
5553 | pre_insert_copy_insn (expr, insn); | |
5554 | avail->copied_p = 1; | |
5555 | } | |
5556 | } | |
f5f2e3cd MH |
5557 | |
5558 | if (added_copy) | |
5559 | update_ld_motion_stores (expr); | |
c4c81601 | 5560 | } |
7506f491 DE |
5561 | } |
5562 | ||
10d1bb36 JH |
5563 | /* Emit move from SRC to DEST noting the equivalence with expression computed |
5564 | in INSN. */ | |
5565 | static rtx | |
1d088dee | 5566 | gcse_emit_move_after (rtx src, rtx dest, rtx insn) |
10d1bb36 JH |
5567 | { |
5568 | rtx new; | |
6bdb8dd6 | 5569 | rtx set = single_set (insn), set2; |
10d1bb36 JH |
5570 | rtx note; |
5571 | rtx eqv; | |
5572 | ||
5573 | /* This should never fail since we're creating a reg->reg copy | |
5574 | we've verified to be valid. */ | |
5575 | ||
6bdb8dd6 | 5576 | new = emit_insn_after (gen_move_insn (dest, src), insn); |
285464d0 | 5577 | |
10d1bb36 | 5578 | /* Note the equivalence for local CSE pass. */ |
6bdb8dd6 JH |
5579 | set2 = single_set (new); |
5580 | if (!set2 || !rtx_equal_p (SET_DEST (set2), dest)) | |
5581 | return new; | |
10d1bb36 JH |
5582 | if ((note = find_reg_equal_equiv_note (insn))) |
5583 | eqv = XEXP (note, 0); | |
5584 | else | |
5585 | eqv = SET_SRC (set); | |
5586 | ||
a500466b | 5587 | set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv)); |
10d1bb36 JH |
5588 | |
5589 | return new; | |
5590 | } | |
5591 | ||
7506f491 | 5592 | /* Delete redundant computations. |
7506f491 DE |
5593 | Deletion is done by changing the insn to copy the `reaching_reg' of |
5594 | the expression into the result of the SET. It is left to later passes | |
5595 | (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it. | |
5596 | ||
cc2902df | 5597 | Returns nonzero if a change is made. */ |
7506f491 DE |
5598 | |
5599 | static int | |
1d088dee | 5600 | pre_delete (void) |
7506f491 | 5601 | { |
2e653e39 | 5602 | unsigned int i; |
63bc1d05 | 5603 | int changed; |
c4c81601 RK |
5604 | struct expr *expr; |
5605 | struct occr *occr; | |
a65f3558 | 5606 | |
7506f491 | 5607 | changed = 0; |
02280659 | 5608 | for (i = 0; i < expr_hash_table.size; i++) |
073089a7 RS |
5609 | for (expr = expr_hash_table.table[i]; |
5610 | expr != NULL; | |
5611 | expr = expr->next_same_hash) | |
c4c81601 RK |
5612 | { |
5613 | int indx = expr->bitmap_index; | |
7506f491 | 5614 | |
c4c81601 RK |
5615 | /* We only need to search antic_occr since we require |
5616 | ANTLOC != 0. */ | |
7506f491 | 5617 | |
c4c81601 RK |
5618 | for (occr = expr->antic_occr; occr != NULL; occr = occr->next) |
5619 | { | |
5620 | rtx insn = occr->insn; | |
5621 | rtx set; | |
e2d2ed72 | 5622 | basic_block bb = BLOCK_FOR_INSN (insn); |
7506f491 | 5623 | |
073089a7 RS |
5624 | /* We only delete insns that have a single_set. */ |
5625 | if (TEST_BIT (pre_delete_map[bb->index], indx) | |
5626 | && (set = single_set (insn)) != 0) | |
c4c81601 | 5627 | { |
c4c81601 RK |
5628 | /* Create a pseudo-reg to store the result of reaching |
5629 | expressions into. Get the mode for the new pseudo from | |
5630 | the mode of the original destination pseudo. */ | |
5631 | if (expr->reaching_reg == NULL) | |
5632 | expr->reaching_reg | |
5633 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
5634 | ||
10d1bb36 JH |
5635 | gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn); |
5636 | delete_insn (insn); | |
5637 | occr->deleted_p = 1; | |
5638 | SET_BIT (pre_redundant_insns, INSN_CUID (insn)); | |
5639 | changed = 1; | |
5640 | gcse_subst_count++; | |
7506f491 | 5641 | |
c4c81601 RK |
5642 | if (gcse_file) |
5643 | { | |
5644 | fprintf (gcse_file, | |
5645 | "PRE: redundant insn %d (expression %d) in ", | |
5646 | INSN_UID (insn), indx); | |
5647 | fprintf (gcse_file, "bb %d, reaching reg is %d\n", | |
0b17ab2f | 5648 | bb->index, REGNO (expr->reaching_reg)); |
c4c81601 RK |
5649 | } |
5650 | } | |
5651 | } | |
5652 | } | |
7506f491 DE |
5653 | |
5654 | return changed; | |
5655 | } | |
5656 | ||
5657 | /* Perform GCSE optimizations using PRE. | |
5658 | This is called by one_pre_gcse_pass after all the dataflow analysis | |
5659 | has been done. | |
5660 | ||
c4c81601 RK |
5661 | This is based on the original Morel-Renvoise paper Fred Chow's thesis, and |
5662 | lazy code motion from Knoop, Ruthing and Steffen as described in Advanced | |
5663 | Compiler Design and Implementation. | |
7506f491 | 5664 | |
c4c81601 RK |
5665 | ??? A new pseudo reg is created to hold the reaching expression. The nice |
5666 | thing about the classical approach is that it would try to use an existing | |
5667 | reg. If the register can't be adequately optimized [i.e. we introduce | |
5668 | reload problems], one could add a pass here to propagate the new register | |
5669 | through the block. | |
7506f491 | 5670 | |
c4c81601 RK |
5671 | ??? We don't handle single sets in PARALLELs because we're [currently] not |
5672 | able to copy the rest of the parallel when we insert copies to create full | |
5673 | redundancies from partial redundancies. However, there's no reason why we | |
5674 | can't handle PARALLELs in the cases where there are no partial | |
7506f491 DE |
5675 | redundancies. */ |
5676 | ||
5677 | static int | |
1d088dee | 5678 | pre_gcse (void) |
7506f491 | 5679 | { |
2e653e39 RK |
5680 | unsigned int i; |
5681 | int did_insert, changed; | |
7506f491 | 5682 | struct expr **index_map; |
c4c81601 | 5683 | struct expr *expr; |
7506f491 DE |
5684 | |
5685 | /* Compute a mapping from expression number (`bitmap_index') to | |
5686 | hash table entry. */ | |
5687 | ||
703ad42b | 5688 | index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *)); |
02280659 ZD |
5689 | for (i = 0; i < expr_hash_table.size; i++) |
5690 | for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 | 5691 | index_map[expr->bitmap_index] = expr; |
7506f491 DE |
5692 | |
5693 | /* Reset bitmap used to track which insns are redundant. */ | |
a65f3558 JL |
5694 | pre_redundant_insns = sbitmap_alloc (max_cuid); |
5695 | sbitmap_zero (pre_redundant_insns); | |
7506f491 DE |
5696 | |
5697 | /* Delete the redundant insns first so that | |
5698 | - we know what register to use for the new insns and for the other | |
5699 | ones with reaching expressions | |
5700 | - we know which insns are redundant when we go to create copies */ | |
c4c81601 | 5701 | |
7506f491 DE |
5702 | changed = pre_delete (); |
5703 | ||
a42cd965 | 5704 | did_insert = pre_edge_insert (edge_list, index_map); |
c4c81601 | 5705 | |
7506f491 | 5706 | /* In other places with reaching expressions, copy the expression to the |
a42cd965 | 5707 | specially allocated pseudo-reg that reaches the redundant expr. */ |
7506f491 | 5708 | pre_insert_copies (); |
a42cd965 AM |
5709 | if (did_insert) |
5710 | { | |
5711 | commit_edge_insertions (); | |
5712 | changed = 1; | |
5713 | } | |
7506f491 | 5714 | |
283a2545 | 5715 | free (index_map); |
76ac938b | 5716 | sbitmap_free (pre_redundant_insns); |
7506f491 DE |
5717 | return changed; |
5718 | } | |
5719 | ||
5720 | /* Top level routine to perform one PRE GCSE pass. | |
5721 | ||
cc2902df | 5722 | Return nonzero if a change was made. */ |
7506f491 DE |
5723 | |
5724 | static int | |
1d088dee | 5725 | one_pre_gcse_pass (int pass) |
7506f491 DE |
5726 | { |
5727 | int changed = 0; | |
5728 | ||
5729 | gcse_subst_count = 0; | |
5730 | gcse_create_count = 0; | |
5731 | ||
02280659 | 5732 | alloc_hash_table (max_cuid, &expr_hash_table, 0); |
a42cd965 | 5733 | add_noreturn_fake_exit_edges (); |
a13d4ebf AM |
5734 | if (flag_gcse_lm) |
5735 | compute_ld_motion_mems (); | |
5736 | ||
02280659 | 5737 | compute_hash_table (&expr_hash_table); |
a13d4ebf | 5738 | trim_ld_motion_mems (); |
7506f491 | 5739 | if (gcse_file) |
02280659 | 5740 | dump_hash_table (gcse_file, "Expression", &expr_hash_table); |
c4c81601 | 5741 | |
02280659 | 5742 | if (expr_hash_table.n_elems > 0) |
7506f491 | 5743 | { |
02280659 | 5744 | alloc_pre_mem (last_basic_block, expr_hash_table.n_elems); |
7506f491 DE |
5745 | compute_pre_data (); |
5746 | changed |= pre_gcse (); | |
a42cd965 | 5747 | free_edge_list (edge_list); |
7506f491 DE |
5748 | free_pre_mem (); |
5749 | } | |
c4c81601 | 5750 | |
a13d4ebf | 5751 | free_ldst_mems (); |
a42cd965 | 5752 | remove_fake_edges (); |
02280659 | 5753 | free_hash_table (&expr_hash_table); |
7506f491 DE |
5754 | |
5755 | if (gcse_file) | |
5756 | { | |
c4c81601 RK |
5757 | fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ", |
5758 | current_function_name, pass, bytes_used); | |
5759 | fprintf (gcse_file, "%d substs, %d insns created\n", | |
5760 | gcse_subst_count, gcse_create_count); | |
7506f491 DE |
5761 | } |
5762 | ||
5763 | return changed; | |
5764 | } | |
aeb2f500 JW |
5765 | \f |
5766 | /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN. | |
5b1ef594 JDA |
5767 | If notes are added to an insn which references a CODE_LABEL, the |
5768 | LABEL_NUSES count is incremented. We have to add REG_LABEL notes, | |
5769 | because the following loop optimization pass requires them. */ | |
aeb2f500 JW |
5770 | |
5771 | /* ??? This is very similar to the loop.c add_label_notes function. We | |
5772 | could probably share code here. */ | |
5773 | ||
5774 | /* ??? If there was a jump optimization pass after gcse and before loop, | |
5775 | then we would not need to do this here, because jump would add the | |
5776 | necessary REG_LABEL notes. */ | |
5777 | ||
5778 | static void | |
1d088dee | 5779 | add_label_notes (rtx x, rtx insn) |
aeb2f500 JW |
5780 | { |
5781 | enum rtx_code code = GET_CODE (x); | |
5782 | int i, j; | |
6f7d635c | 5783 | const char *fmt; |
aeb2f500 JW |
5784 | |
5785 | if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x)) | |
5786 | { | |
6b3603c2 | 5787 | /* This code used to ignore labels that referred to dispatch tables to |
e0bb17a8 | 5788 | avoid flow generating (slightly) worse code. |
6b3603c2 | 5789 | |
ac7c5af5 JL |
5790 | We no longer ignore such label references (see LABEL_REF handling in |
5791 | mark_jump_label for additional information). */ | |
c4c81601 | 5792 | |
6b8c9327 | 5793 | REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0), |
6b3603c2 | 5794 | REG_NOTES (insn)); |
5b1ef594 | 5795 | if (LABEL_P (XEXP (x, 0))) |
589005ff | 5796 | LABEL_NUSES (XEXP (x, 0))++; |
aeb2f500 JW |
5797 | return; |
5798 | } | |
5799 | ||
c4c81601 | 5800 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
aeb2f500 JW |
5801 | { |
5802 | if (fmt[i] == 'e') | |
5803 | add_label_notes (XEXP (x, i), insn); | |
5804 | else if (fmt[i] == 'E') | |
5805 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
5806 | add_label_notes (XVECEXP (x, i, j), insn); | |
5807 | } | |
5808 | } | |
a65f3558 JL |
5809 | |
5810 | /* Compute transparent outgoing information for each block. | |
5811 | ||
5812 | An expression is transparent to an edge unless it is killed by | |
5813 | the edge itself. This can only happen with abnormal control flow, | |
5814 | when the edge is traversed through a call. This happens with | |
5815 | non-local labels and exceptions. | |
5816 | ||
5817 | This would not be necessary if we split the edge. While this is | |
5818 | normally impossible for abnormal critical edges, with some effort | |
5819 | it should be possible with exception handling, since we still have | |
5820 | control over which handler should be invoked. But due to increased | |
5821 | EH table sizes, this may not be worthwhile. */ | |
5822 | ||
5823 | static void | |
1d088dee | 5824 | compute_transpout (void) |
a65f3558 | 5825 | { |
e0082a72 | 5826 | basic_block bb; |
2e653e39 | 5827 | unsigned int i; |
c4c81601 | 5828 | struct expr *expr; |
a65f3558 | 5829 | |
d55bc081 | 5830 | sbitmap_vector_ones (transpout, last_basic_block); |
a65f3558 | 5831 | |
e0082a72 | 5832 | FOR_EACH_BB (bb) |
a65f3558 | 5833 | { |
a65f3558 JL |
5834 | /* Note that flow inserted a nop a the end of basic blocks that |
5835 | end in call instructions for reasons other than abnormal | |
5836 | control flow. */ | |
e0082a72 | 5837 | if (GET_CODE (bb->end) != CALL_INSN) |
a65f3558 JL |
5838 | continue; |
5839 | ||
02280659 ZD |
5840 | for (i = 0; i < expr_hash_table.size; i++) |
5841 | for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash) | |
c4c81601 RK |
5842 | if (GET_CODE (expr->expr) == MEM) |
5843 | { | |
5844 | if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF | |
5845 | && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0))) | |
5846 | continue; | |
589005ff | 5847 | |
c4c81601 RK |
5848 | /* ??? Optimally, we would use interprocedural alias |
5849 | analysis to determine if this mem is actually killed | |
5850 | by this call. */ | |
e0082a72 | 5851 | RESET_BIT (transpout[bb->index], expr->bitmap_index); |
c4c81601 | 5852 | } |
a65f3558 JL |
5853 | } |
5854 | } | |
dfdb644f JL |
5855 | |
5856 | /* Removal of useless null pointer checks */ | |
5857 | ||
dfdb644f | 5858 | /* Called via note_stores. X is set by SETTER. If X is a register we must |
0511851c MM |
5859 | invalidate nonnull_local and set nonnull_killed. DATA is really a |
5860 | `null_pointer_info *'. | |
dfdb644f JL |
5861 | |
5862 | We ignore hard registers. */ | |
c4c81601 | 5863 | |
dfdb644f | 5864 | static void |
1d088dee | 5865 | invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data) |
dfdb644f | 5866 | { |
770ae6cc RK |
5867 | unsigned int regno; |
5868 | struct null_pointer_info *npi = (struct null_pointer_info *) data; | |
c4c81601 | 5869 | |
dfdb644f JL |
5870 | while (GET_CODE (x) == SUBREG) |
5871 | x = SUBREG_REG (x); | |
5872 | ||
5873 | /* Ignore anything that is not a register or is a hard register. */ | |
5874 | if (GET_CODE (x) != REG | |
0511851c MM |
5875 | || REGNO (x) < npi->min_reg |
5876 | || REGNO (x) >= npi->max_reg) | |
dfdb644f JL |
5877 | return; |
5878 | ||
0511851c | 5879 | regno = REGNO (x) - npi->min_reg; |
dfdb644f | 5880 | |
e0082a72 ZD |
5881 | RESET_BIT (npi->nonnull_local[npi->current_block->index], regno); |
5882 | SET_BIT (npi->nonnull_killed[npi->current_block->index], regno); | |
dfdb644f JL |
5883 | } |
5884 | ||
0511851c MM |
5885 | /* Do null-pointer check elimination for the registers indicated in |
5886 | NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps; | |
5887 | they are not our responsibility to free. */ | |
dfdb644f | 5888 | |
99a15921 | 5889 | static int |
1d088dee AJ |
5890 | delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin, |
5891 | sbitmap *nonnull_avout, | |
5892 | struct null_pointer_info *npi) | |
dfdb644f | 5893 | { |
e0082a72 | 5894 | basic_block bb, current_block; |
0511851c MM |
5895 | sbitmap *nonnull_local = npi->nonnull_local; |
5896 | sbitmap *nonnull_killed = npi->nonnull_killed; | |
99a15921 | 5897 | int something_changed = 0; |
589005ff | 5898 | |
dfdb644f JL |
5899 | /* Compute local properties, nonnull and killed. A register will have |
5900 | the nonnull property if at the end of the current block its value is | |
5901 | known to be nonnull. The killed property indicates that somewhere in | |
5902 | the block any information we had about the register is killed. | |
5903 | ||
5904 | Note that a register can have both properties in a single block. That | |
5905 | indicates that it's killed, then later in the block a new value is | |
5906 | computed. */ | |
d55bc081 ZD |
5907 | sbitmap_vector_zero (nonnull_local, last_basic_block); |
5908 | sbitmap_vector_zero (nonnull_killed, last_basic_block); | |
c4c81601 | 5909 | |
e0082a72 | 5910 | FOR_EACH_BB (current_block) |
dfdb644f JL |
5911 | { |
5912 | rtx insn, stop_insn; | |
5913 | ||
0511851c MM |
5914 | /* Set the current block for invalidate_nonnull_info. */ |
5915 | npi->current_block = current_block; | |
5916 | ||
dfdb644f JL |
5917 | /* Scan each insn in the basic block looking for memory references and |
5918 | register sets. */ | |
e0082a72 ZD |
5919 | stop_insn = NEXT_INSN (current_block->end); |
5920 | for (insn = current_block->head; | |
dfdb644f JL |
5921 | insn != stop_insn; |
5922 | insn = NEXT_INSN (insn)) | |
5923 | { | |
5924 | rtx set; | |
0511851c | 5925 | rtx reg; |
dfdb644f JL |
5926 | |
5927 | /* Ignore anything that is not a normal insn. */ | |
2c3c49de | 5928 | if (! INSN_P (insn)) |
dfdb644f JL |
5929 | continue; |
5930 | ||
5931 | /* Basically ignore anything that is not a simple SET. We do have | |
5932 | to make sure to invalidate nonnull_local and set nonnull_killed | |
5933 | for such insns though. */ | |
5934 | set = single_set (insn); | |
5935 | if (!set) | |
5936 | { | |
0511851c | 5937 | note_stores (PATTERN (insn), invalidate_nonnull_info, npi); |
dfdb644f JL |
5938 | continue; |
5939 | } | |
5940 | ||
f63d1bf7 | 5941 | /* See if we've got a usable memory load. We handle it first |
dfdb644f JL |
5942 | in case it uses its address register as a dest (which kills |
5943 | the nonnull property). */ | |
5944 | if (GET_CODE (SET_SRC (set)) == MEM | |
0511851c MM |
5945 | && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG |
5946 | && REGNO (reg) >= npi->min_reg | |
5947 | && REGNO (reg) < npi->max_reg) | |
e0082a72 | 5948 | SET_BIT (nonnull_local[current_block->index], |
0511851c | 5949 | REGNO (reg) - npi->min_reg); |
dfdb644f JL |
5950 | |
5951 | /* Now invalidate stuff clobbered by this insn. */ | |
0511851c | 5952 | note_stores (PATTERN (insn), invalidate_nonnull_info, npi); |
dfdb644f JL |
5953 | |
5954 | /* And handle stores, we do these last since any sets in INSN can | |
5955 | not kill the nonnull property if it is derived from a MEM | |
5956 | appearing in a SET_DEST. */ | |
5957 | if (GET_CODE (SET_DEST (set)) == MEM | |
0511851c MM |
5958 | && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG |
5959 | && REGNO (reg) >= npi->min_reg | |
5960 | && REGNO (reg) < npi->max_reg) | |
e0082a72 | 5961 | SET_BIT (nonnull_local[current_block->index], |
0511851c | 5962 | REGNO (reg) - npi->min_reg); |
dfdb644f JL |
5963 | } |
5964 | } | |
5965 | ||
5966 | /* Now compute global properties based on the local properties. This | |
fbe5a4a6 | 5967 | is a classic global availability algorithm. */ |
ce724250 JL |
5968 | compute_available (nonnull_local, nonnull_killed, |
5969 | nonnull_avout, nonnull_avin); | |
dfdb644f JL |
5970 | |
5971 | /* Now look at each bb and see if it ends with a compare of a value | |
5972 | against zero. */ | |
e0082a72 | 5973 | FOR_EACH_BB (bb) |
dfdb644f | 5974 | { |
e0082a72 | 5975 | rtx last_insn = bb->end; |
0511851c | 5976 | rtx condition, earliest; |
dfdb644f JL |
5977 | int compare_and_branch; |
5978 | ||
0511851c MM |
5979 | /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and |
5980 | since BLOCK_REG[BB] is zero if this block did not end with a | |
5981 | comparison against zero, this condition works. */ | |
e0082a72 ZD |
5982 | if (block_reg[bb->index] < npi->min_reg |
5983 | || block_reg[bb->index] >= npi->max_reg) | |
dfdb644f JL |
5984 | continue; |
5985 | ||
5986 | /* LAST_INSN is a conditional jump. Get its condition. */ | |
ec6ec6aa | 5987 | condition = get_condition (last_insn, &earliest, false); |
dfdb644f | 5988 | |
40d7a3fe NB |
5989 | /* If we can't determine the condition then skip. */ |
5990 | if (! condition) | |
5991 | continue; | |
5992 | ||
dfdb644f | 5993 | /* Is the register known to have a nonzero value? */ |
e0082a72 | 5994 | if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg)) |
dfdb644f JL |
5995 | continue; |
5996 | ||
5997 | /* Try to compute whether the compare/branch at the loop end is one or | |
5998 | two instructions. */ | |
5999 | if (earliest == last_insn) | |
6000 | compare_and_branch = 1; | |
6001 | else if (earliest == prev_nonnote_insn (last_insn)) | |
6002 | compare_and_branch = 2; | |
6003 | else | |
6004 | continue; | |
6005 | ||
6006 | /* We know the register in this comparison is nonnull at exit from | |
6007 | this block. We can optimize this comparison. */ | |
6008 | if (GET_CODE (condition) == NE) | |
6009 | { | |
6010 | rtx new_jump; | |
6011 | ||
38c1593d JH |
6012 | new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)), |
6013 | last_insn); | |
dfdb644f JL |
6014 | JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn); |
6015 | LABEL_NUSES (JUMP_LABEL (new_jump))++; | |
6016 | emit_barrier_after (new_jump); | |
6017 | } | |
8e184d9c | 6018 | |
99a15921 | 6019 | something_changed = 1; |
9cd56be1 | 6020 | delete_insn (last_insn); |
dfdb644f | 6021 | if (compare_and_branch == 2) |
589005ff | 6022 | delete_insn (earliest); |
e0082a72 | 6023 | purge_dead_edges (bb); |
0511851c MM |
6024 | |
6025 | /* Don't check this block again. (Note that BLOCK_END is | |
589005ff | 6026 | invalid here; we deleted the last instruction in the |
0511851c | 6027 | block.) */ |
e0082a72 | 6028 | block_reg[bb->index] = 0; |
0511851c | 6029 | } |
99a15921 JL |
6030 | |
6031 | return something_changed; | |
0511851c MM |
6032 | } |
6033 | ||
6034 | /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated | |
6035 | at compile time. | |
6036 | ||
6037 | This is conceptually similar to global constant/copy propagation and | |
6038 | classic global CSE (it even uses the same dataflow equations as cprop). | |
6039 | ||
6040 | If a register is used as memory address with the form (mem (reg)), then we | |
6041 | know that REG can not be zero at that point in the program. Any instruction | |
6042 | which sets REG "kills" this property. | |
6043 | ||
6044 | So, if every path leading to a conditional branch has an available memory | |
6045 | reference of that form, then we know the register can not have the value | |
589005ff | 6046 | zero at the conditional branch. |
0511851c | 6047 | |
fbe5a4a6 | 6048 | So we merely need to compute the local properties and propagate that data |
0511851c MM |
6049 | around the cfg, then optimize where possible. |
6050 | ||
6051 | We run this pass two times. Once before CSE, then again after CSE. This | |
6052 | has proven to be the most profitable approach. It is rare for new | |
6053 | optimization opportunities of this nature to appear after the first CSE | |
6054 | pass. | |
6055 | ||
6056 | This could probably be integrated with global cprop with a little work. */ | |
6057 | ||
99a15921 | 6058 | int |
1d088dee | 6059 | delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED) |
0511851c | 6060 | { |
0511851c | 6061 | sbitmap *nonnull_avin, *nonnull_avout; |
770ae6cc | 6062 | unsigned int *block_reg; |
e0082a72 | 6063 | basic_block bb; |
0511851c MM |
6064 | int reg; |
6065 | int regs_per_pass; | |
d128effb | 6066 | int max_reg = max_reg_num (); |
0511851c | 6067 | struct null_pointer_info npi; |
99a15921 | 6068 | int something_changed = 0; |
0511851c | 6069 | |
d128effb NS |
6070 | /* If we have only a single block, or it is too expensive, give up. */ |
6071 | if (n_basic_blocks <= 1 | |
6072 | || is_too_expensive (_ ("NULL pointer checks disabled"))) | |
99a15921 | 6073 | return 0; |
0511851c | 6074 | |
0511851c MM |
6075 | /* We need four bitmaps, each with a bit for each register in each |
6076 | basic block. */ | |
d55bc081 | 6077 | regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg); |
0511851c MM |
6078 | |
6079 | /* Allocate bitmaps to hold local and global properties. */ | |
d55bc081 ZD |
6080 | npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass); |
6081 | npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass); | |
6082 | nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass); | |
6083 | nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass); | |
0511851c MM |
6084 | |
6085 | /* Go through the basic blocks, seeing whether or not each block | |
6086 | ends with a conditional branch whose condition is a comparison | |
6087 | against zero. Record the register compared in BLOCK_REG. */ | |
703ad42b | 6088 | block_reg = xcalloc (last_basic_block, sizeof (int)); |
e0082a72 | 6089 | FOR_EACH_BB (bb) |
0511851c | 6090 | { |
e0082a72 | 6091 | rtx last_insn = bb->end; |
0511851c MM |
6092 | rtx condition, earliest, reg; |
6093 | ||
6094 | /* We only want conditional branches. */ | |
6095 | if (GET_CODE (last_insn) != JUMP_INSN | |
7f1c097d JH |
6096 | || !any_condjump_p (last_insn) |
6097 | || !onlyjump_p (last_insn)) | |
0511851c MM |
6098 | continue; |
6099 | ||
6100 | /* LAST_INSN is a conditional jump. Get its condition. */ | |
ec6ec6aa | 6101 | condition = get_condition (last_insn, &earliest, false); |
0511851c | 6102 | |
4fe9b91c | 6103 | /* If we were unable to get the condition, or it is not an equality |
0511851c MM |
6104 | comparison against zero then there's nothing we can do. */ |
6105 | if (!condition | |
6106 | || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ) | |
6107 | || GET_CODE (XEXP (condition, 1)) != CONST_INT | |
589005ff | 6108 | || (XEXP (condition, 1) |
0511851c MM |
6109 | != CONST0_RTX (GET_MODE (XEXP (condition, 0))))) |
6110 | continue; | |
6111 | ||
6112 | /* We must be checking a register against zero. */ | |
6113 | reg = XEXP (condition, 0); | |
6114 | if (GET_CODE (reg) != REG) | |
6115 | continue; | |
6116 | ||
e0082a72 | 6117 | block_reg[bb->index] = REGNO (reg); |
0511851c MM |
6118 | } |
6119 | ||
6120 | /* Go through the algorithm for each block of registers. */ | |
6121 | for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass) | |
6122 | { | |
6123 | npi.min_reg = reg; | |
6124 | npi.max_reg = MIN (reg + regs_per_pass, max_reg); | |
99a15921 JL |
6125 | something_changed |= delete_null_pointer_checks_1 (block_reg, |
6126 | nonnull_avin, | |
6127 | nonnull_avout, | |
6128 | &npi); | |
dfdb644f JL |
6129 | } |
6130 | ||
0511851c MM |
6131 | /* Free the table of registers compared at the end of every block. */ |
6132 | free (block_reg); | |
6133 | ||
dfdb644f | 6134 | /* Free bitmaps. */ |
5a660bff DB |
6135 | sbitmap_vector_free (npi.nonnull_local); |
6136 | sbitmap_vector_free (npi.nonnull_killed); | |
6137 | sbitmap_vector_free (nonnull_avin); | |
6138 | sbitmap_vector_free (nonnull_avout); | |
99a15921 JL |
6139 | |
6140 | return something_changed; | |
dfdb644f | 6141 | } |
bb457bd9 JL |
6142 | |
6143 | /* Code Hoisting variables and subroutines. */ | |
6144 | ||
6145 | /* Very busy expressions. */ | |
6146 | static sbitmap *hoist_vbein; | |
6147 | static sbitmap *hoist_vbeout; | |
6148 | ||
6149 | /* Hoistable expressions. */ | |
6150 | static sbitmap *hoist_exprs; | |
6151 | ||
6152 | /* Dominator bitmaps. */ | |
355be0dc | 6153 | dominance_info dominators; |
bb457bd9 JL |
6154 | |
6155 | /* ??? We could compute post dominators and run this algorithm in | |
68e82b83 | 6156 | reverse to perform tail merging, doing so would probably be |
bb457bd9 JL |
6157 | more effective than the tail merging code in jump.c. |
6158 | ||
6159 | It's unclear if tail merging could be run in parallel with | |
6160 | code hoisting. It would be nice. */ | |
6161 | ||
6162 | /* Allocate vars used for code hoisting analysis. */ | |
6163 | ||
6164 | static void | |
1d088dee | 6165 | alloc_code_hoist_mem (int n_blocks, int n_exprs) |
bb457bd9 JL |
6166 | { |
6167 | antloc = sbitmap_vector_alloc (n_blocks, n_exprs); | |
6168 | transp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
6169 | comp = sbitmap_vector_alloc (n_blocks, n_exprs); | |
6170 | ||
6171 | hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs); | |
6172 | hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
6173 | hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs); | |
6174 | transpout = sbitmap_vector_alloc (n_blocks, n_exprs); | |
bb457bd9 JL |
6175 | } |
6176 | ||
6177 | /* Free vars used for code hoisting analysis. */ | |
6178 | ||
6179 | static void | |
1d088dee | 6180 | free_code_hoist_mem (void) |
bb457bd9 | 6181 | { |
5a660bff DB |
6182 | sbitmap_vector_free (antloc); |
6183 | sbitmap_vector_free (transp); | |
6184 | sbitmap_vector_free (comp); | |
bb457bd9 | 6185 | |
5a660bff DB |
6186 | sbitmap_vector_free (hoist_vbein); |
6187 | sbitmap_vector_free (hoist_vbeout); | |
6188 | sbitmap_vector_free (hoist_exprs); | |
6189 | sbitmap_vector_free (transpout); | |
bb457bd9 | 6190 | |
355be0dc | 6191 | free_dominance_info (dominators); |
bb457bd9 JL |
6192 | } |
6193 | ||
6194 | /* Compute the very busy expressions at entry/exit from each block. | |
6195 | ||
6196 | An expression is very busy if all paths from a given point | |
6197 | compute the expression. */ | |
6198 | ||
6199 | static void | |
1d088dee | 6200 | compute_code_hoist_vbeinout (void) |
bb457bd9 | 6201 | { |
e0082a72 ZD |
6202 | int changed, passes; |
6203 | basic_block bb; | |
bb457bd9 | 6204 | |
d55bc081 ZD |
6205 | sbitmap_vector_zero (hoist_vbeout, last_basic_block); |
6206 | sbitmap_vector_zero (hoist_vbein, last_basic_block); | |
bb457bd9 JL |
6207 | |
6208 | passes = 0; | |
6209 | changed = 1; | |
c4c81601 | 6210 | |
bb457bd9 JL |
6211 | while (changed) |
6212 | { | |
6213 | changed = 0; | |
c4c81601 | 6214 | |
bb457bd9 JL |
6215 | /* We scan the blocks in the reverse order to speed up |
6216 | the convergence. */ | |
e0082a72 | 6217 | FOR_EACH_BB_REVERSE (bb) |
bb457bd9 | 6218 | { |
e0082a72 ZD |
6219 | changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index], |
6220 | hoist_vbeout[bb->index], transp[bb->index]); | |
6221 | if (bb->next_bb != EXIT_BLOCK_PTR) | |
6222 | sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index); | |
bb457bd9 | 6223 | } |
c4c81601 | 6224 | |
bb457bd9 JL |
6225 | passes++; |
6226 | } | |
6227 | ||
6228 | if (gcse_file) | |
6229 | fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes); | |
6230 | } | |
6231 | ||
6232 | /* Top level routine to do the dataflow analysis needed by code hoisting. */ | |
6233 | ||
6234 | static void | |
1d088dee | 6235 | compute_code_hoist_data (void) |
bb457bd9 | 6236 | { |
02280659 | 6237 | compute_local_properties (transp, comp, antloc, &expr_hash_table); |
bb457bd9 JL |
6238 | compute_transpout (); |
6239 | compute_code_hoist_vbeinout (); | |
355be0dc | 6240 | dominators = calculate_dominance_info (CDI_DOMINATORS); |
bb457bd9 JL |
6241 | if (gcse_file) |
6242 | fprintf (gcse_file, "\n"); | |
6243 | } | |
6244 | ||
6245 | /* Determine if the expression identified by EXPR_INDEX would | |
6246 | reach BB unimpared if it was placed at the end of EXPR_BB. | |
6247 | ||
6248 | It's unclear exactly what Muchnick meant by "unimpared". It seems | |
6249 | to me that the expression must either be computed or transparent in | |
6250 | *every* block in the path(s) from EXPR_BB to BB. Any other definition | |
6251 | would allow the expression to be hoisted out of loops, even if | |
6252 | the expression wasn't a loop invariant. | |
6253 | ||
6254 | Contrast this to reachability for PRE where an expression is | |
6255 | considered reachable if *any* path reaches instead of *all* | |
6256 | paths. */ | |
6257 | ||
6258 | static int | |
1d088dee | 6259 | hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited) |
bb457bd9 JL |
6260 | { |
6261 | edge pred; | |
283a2545 | 6262 | int visited_allocated_locally = 0; |
589005ff | 6263 | |
bb457bd9 JL |
6264 | |
6265 | if (visited == NULL) | |
6266 | { | |
8e42ace1 | 6267 | visited_allocated_locally = 1; |
d55bc081 | 6268 | visited = xcalloc (last_basic_block, 1); |
bb457bd9 JL |
6269 | } |
6270 | ||
e2d2ed72 | 6271 | for (pred = bb->pred; pred != NULL; pred = pred->pred_next) |
bb457bd9 | 6272 | { |
e2d2ed72 | 6273 | basic_block pred_bb = pred->src; |
bb457bd9 JL |
6274 | |
6275 | if (pred->src == ENTRY_BLOCK_PTR) | |
6276 | break; | |
f305679f JH |
6277 | else if (pred_bb == expr_bb) |
6278 | continue; | |
0b17ab2f | 6279 | else if (visited[pred_bb->index]) |
bb457bd9 | 6280 | continue; |
c4c81601 | 6281 | |
bb457bd9 | 6282 | /* Does this predecessor generate this expression? */ |
0b17ab2f | 6283 | else if (TEST_BIT (comp[pred_bb->index], expr_index)) |
bb457bd9 | 6284 | break; |
0b17ab2f | 6285 | else if (! TEST_BIT (transp[pred_bb->index], expr_index)) |
bb457bd9 | 6286 | break; |
c4c81601 | 6287 | |
bb457bd9 JL |
6288 | /* Not killed. */ |
6289 | else | |
6290 | { | |
0b17ab2f | 6291 | visited[pred_bb->index] = 1; |
bb457bd9 JL |
6292 | if (! hoist_expr_reaches_here_p (expr_bb, expr_index, |
6293 | pred_bb, visited)) | |
6294 | break; | |
6295 | } | |
6296 | } | |
589005ff | 6297 | if (visited_allocated_locally) |
283a2545 | 6298 | free (visited); |
c4c81601 | 6299 | |
bb457bd9 JL |
6300 | return (pred == NULL); |
6301 | } | |
6302 | \f | |
6303 | /* Actually perform code hoisting. */ | |
c4c81601 | 6304 | |
bb457bd9 | 6305 | static void |
1d088dee | 6306 | hoist_code (void) |
bb457bd9 | 6307 | { |
e0082a72 | 6308 | basic_block bb, dominated; |
c635a1ec DB |
6309 | basic_block *domby; |
6310 | unsigned int domby_len; | |
6311 | unsigned int i,j; | |
bb457bd9 | 6312 | struct expr **index_map; |
c4c81601 | 6313 | struct expr *expr; |
bb457bd9 | 6314 | |
d55bc081 | 6315 | sbitmap_vector_zero (hoist_exprs, last_basic_block); |
bb457bd9 JL |
6316 | |
6317 | /* Compute a mapping from expression number (`bitmap_index') to | |
6318 | hash table entry. */ | |
6319 | ||
703ad42b | 6320 | index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *)); |
02280659 ZD |
6321 | for (i = 0; i < expr_hash_table.size; i++) |
6322 | for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash) | |
c4c81601 | 6323 | index_map[expr->bitmap_index] = expr; |
bb457bd9 JL |
6324 | |
6325 | /* Walk over each basic block looking for potentially hoistable | |
6326 | expressions, nothing gets hoisted from the entry block. */ | |
e0082a72 | 6327 | FOR_EACH_BB (bb) |
bb457bd9 JL |
6328 | { |
6329 | int found = 0; | |
6330 | int insn_inserted_p; | |
6331 | ||
c635a1ec | 6332 | domby_len = get_dominated_by (dominators, bb, &domby); |
bb457bd9 JL |
6333 | /* Examine each expression that is very busy at the exit of this |
6334 | block. These are the potentially hoistable expressions. */ | |
e0082a72 | 6335 | for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++) |
bb457bd9 JL |
6336 | { |
6337 | int hoistable = 0; | |
c4c81601 | 6338 | |
c635a1ec DB |
6339 | if (TEST_BIT (hoist_vbeout[bb->index], i) |
6340 | && TEST_BIT (transpout[bb->index], i)) | |
bb457bd9 JL |
6341 | { |
6342 | /* We've found a potentially hoistable expression, now | |
6343 | we look at every block BB dominates to see if it | |
6344 | computes the expression. */ | |
c635a1ec | 6345 | for (j = 0; j < domby_len; j++) |
bb457bd9 | 6346 | { |
c635a1ec | 6347 | dominated = domby[j]; |
bb457bd9 | 6348 | /* Ignore self dominance. */ |
c635a1ec | 6349 | if (bb == dominated) |
bb457bd9 | 6350 | continue; |
bb457bd9 JL |
6351 | /* We've found a dominated block, now see if it computes |
6352 | the busy expression and whether or not moving that | |
6353 | expression to the "beginning" of that block is safe. */ | |
e0082a72 | 6354 | if (!TEST_BIT (antloc[dominated->index], i)) |
bb457bd9 JL |
6355 | continue; |
6356 | ||
6357 | /* Note if the expression would reach the dominated block | |
589005ff | 6358 | unimpared if it was placed at the end of BB. |
bb457bd9 JL |
6359 | |
6360 | Keep track of how many times this expression is hoistable | |
6361 | from a dominated block into BB. */ | |
e0082a72 | 6362 | if (hoist_expr_reaches_here_p (bb, i, dominated, NULL)) |
bb457bd9 JL |
6363 | hoistable++; |
6364 | } | |
6365 | ||
ff7cc307 | 6366 | /* If we found more than one hoistable occurrence of this |
bb457bd9 JL |
6367 | expression, then note it in the bitmap of expressions to |
6368 | hoist. It makes no sense to hoist things which are computed | |
6369 | in only one BB, and doing so tends to pessimize register | |
6370 | allocation. One could increase this value to try harder | |
6371 | to avoid any possible code expansion due to register | |
6372 | allocation issues; however experiments have shown that | |
6373 | the vast majority of hoistable expressions are only movable | |
e0bb17a8 | 6374 | from two successors, so raising this threshold is likely |
bb457bd9 JL |
6375 | to nullify any benefit we get from code hoisting. */ |
6376 | if (hoistable > 1) | |
6377 | { | |
e0082a72 | 6378 | SET_BIT (hoist_exprs[bb->index], i); |
bb457bd9 JL |
6379 | found = 1; |
6380 | } | |
6381 | } | |
6382 | } | |
bb457bd9 JL |
6383 | /* If we found nothing to hoist, then quit now. */ |
6384 | if (! found) | |
c635a1ec | 6385 | { |
1d088dee | 6386 | free (domby); |
bb457bd9 | 6387 | continue; |
c635a1ec | 6388 | } |
bb457bd9 JL |
6389 | |
6390 | /* Loop over all the hoistable expressions. */ | |
e0082a72 | 6391 | for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++) |
bb457bd9 JL |
6392 | { |
6393 | /* We want to insert the expression into BB only once, so | |
6394 | note when we've inserted it. */ | |
6395 | insn_inserted_p = 0; | |
6396 | ||
6397 | /* These tests should be the same as the tests above. */ | |
e0082a72 | 6398 | if (TEST_BIT (hoist_vbeout[bb->index], i)) |
bb457bd9 JL |
6399 | { |
6400 | /* We've found a potentially hoistable expression, now | |
6401 | we look at every block BB dominates to see if it | |
6402 | computes the expression. */ | |
c635a1ec | 6403 | for (j = 0; j < domby_len; j++) |
bb457bd9 | 6404 | { |
c635a1ec | 6405 | dominated = domby[j]; |
bb457bd9 | 6406 | /* Ignore self dominance. */ |
c635a1ec | 6407 | if (bb == dominated) |
bb457bd9 JL |
6408 | continue; |
6409 | ||
6410 | /* We've found a dominated block, now see if it computes | |
6411 | the busy expression and whether or not moving that | |
6412 | expression to the "beginning" of that block is safe. */ | |
e0082a72 | 6413 | if (!TEST_BIT (antloc[dominated->index], i)) |
bb457bd9 JL |
6414 | continue; |
6415 | ||
6416 | /* The expression is computed in the dominated block and | |
6417 | it would be safe to compute it at the start of the | |
6418 | dominated block. Now we have to determine if the | |
ff7cc307 | 6419 | expression would reach the dominated block if it was |
bb457bd9 | 6420 | placed at the end of BB. */ |
e0082a72 | 6421 | if (hoist_expr_reaches_here_p (bb, i, dominated, NULL)) |
bb457bd9 JL |
6422 | { |
6423 | struct expr *expr = index_map[i]; | |
6424 | struct occr *occr = expr->antic_occr; | |
6425 | rtx insn; | |
6426 | rtx set; | |
6427 | ||
ff7cc307 | 6428 | /* Find the right occurrence of this expression. */ |
e0082a72 | 6429 | while (BLOCK_FOR_INSN (occr->insn) != dominated && occr) |
bb457bd9 JL |
6430 | occr = occr->next; |
6431 | ||
6432 | /* Should never happen. */ | |
6433 | if (!occr) | |
6434 | abort (); | |
6435 | ||
6436 | insn = occr->insn; | |
589005ff | 6437 | |
bb457bd9 JL |
6438 | set = single_set (insn); |
6439 | if (! set) | |
6440 | abort (); | |
6441 | ||
6442 | /* Create a pseudo-reg to store the result of reaching | |
6443 | expressions into. Get the mode for the new pseudo | |
6444 | from the mode of the original destination pseudo. */ | |
6445 | if (expr->reaching_reg == NULL) | |
6446 | expr->reaching_reg | |
6447 | = gen_reg_rtx (GET_MODE (SET_DEST (set))); | |
6448 | ||
10d1bb36 JH |
6449 | gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn); |
6450 | delete_insn (insn); | |
6451 | occr->deleted_p = 1; | |
6452 | if (!insn_inserted_p) | |
bb457bd9 | 6453 | { |
10d1bb36 JH |
6454 | insert_insn_end_bb (index_map[i], bb, 0); |
6455 | insn_inserted_p = 1; | |
bb457bd9 JL |
6456 | } |
6457 | } | |
6458 | } | |
6459 | } | |
6460 | } | |
c635a1ec | 6461 | free (domby); |
bb457bd9 | 6462 | } |
c4c81601 | 6463 | |
8e42ace1 | 6464 | free (index_map); |
bb457bd9 JL |
6465 | } |
6466 | ||
6467 | /* Top level routine to perform one code hoisting (aka unification) pass | |
6468 | ||
cc2902df | 6469 | Return nonzero if a change was made. */ |
bb457bd9 JL |
6470 | |
6471 | static int | |
1d088dee | 6472 | one_code_hoisting_pass (void) |
bb457bd9 JL |
6473 | { |
6474 | int changed = 0; | |
6475 | ||
02280659 ZD |
6476 | alloc_hash_table (max_cuid, &expr_hash_table, 0); |
6477 | compute_hash_table (&expr_hash_table); | |
bb457bd9 | 6478 | if (gcse_file) |
02280659 | 6479 | dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table); |
c4c81601 | 6480 | |
02280659 | 6481 | if (expr_hash_table.n_elems > 0) |
bb457bd9 | 6482 | { |
02280659 | 6483 | alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems); |
bb457bd9 JL |
6484 | compute_code_hoist_data (); |
6485 | hoist_code (); | |
6486 | free_code_hoist_mem (); | |
6487 | } | |
c4c81601 | 6488 | |
02280659 | 6489 | free_hash_table (&expr_hash_table); |
bb457bd9 JL |
6490 | |
6491 | return changed; | |
6492 | } | |
a13d4ebf AM |
6493 | \f |
6494 | /* Here we provide the things required to do store motion towards | |
6495 | the exit. In order for this to be effective, gcse also needed to | |
6496 | be taught how to move a load when it is kill only by a store to itself. | |
6497 | ||
6498 | int i; | |
6499 | float a[10]; | |
6500 | ||
6501 | void foo(float scale) | |
6502 | { | |
6503 | for (i=0; i<10; i++) | |
6504 | a[i] *= scale; | |
6505 | } | |
6506 | ||
6507 | 'i' is both loaded and stored to in the loop. Normally, gcse cannot move | |
589005ff KH |
6508 | the load out since its live around the loop, and stored at the bottom |
6509 | of the loop. | |
a13d4ebf | 6510 | |
589005ff | 6511 | The 'Load Motion' referred to and implemented in this file is |
a13d4ebf AM |
6512 | an enhancement to gcse which when using edge based lcm, recognizes |
6513 | this situation and allows gcse to move the load out of the loop. | |
6514 | ||
6515 | Once gcse has hoisted the load, store motion can then push this | |
6516 | load towards the exit, and we end up with no loads or stores of 'i' | |
6517 | in the loop. */ | |
6518 | ||
ff7cc307 | 6519 | /* This will search the ldst list for a matching expression. If it |
a13d4ebf AM |
6520 | doesn't find one, we create one and initialize it. */ |
6521 | ||
6522 | static struct ls_expr * | |
1d088dee | 6523 | ldst_entry (rtx x) |
a13d4ebf | 6524 | { |
b58b21d5 | 6525 | int do_not_record_p = 0; |
a13d4ebf | 6526 | struct ls_expr * ptr; |
b58b21d5 | 6527 | unsigned int hash; |
a13d4ebf | 6528 | |
b58b21d5 | 6529 | hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p); |
a13d4ebf | 6530 | |
b58b21d5 RS |
6531 | for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next) |
6532 | if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x)) | |
6533 | return ptr; | |
6534 | ||
6535 | ptr = xmalloc (sizeof (struct ls_expr)); | |
6536 | ||
6537 | ptr->next = pre_ldst_mems; | |
6538 | ptr->expr = NULL; | |
6539 | ptr->pattern = x; | |
6540 | ptr->pattern_regs = NULL_RTX; | |
6541 | ptr->loads = NULL_RTX; | |
6542 | ptr->stores = NULL_RTX; | |
6543 | ptr->reaching_reg = NULL_RTX; | |
6544 | ptr->invalid = 0; | |
6545 | ptr->index = 0; | |
6546 | ptr->hash_index = hash; | |
6547 | pre_ldst_mems = ptr; | |
589005ff | 6548 | |
a13d4ebf AM |
6549 | return ptr; |
6550 | } | |
6551 | ||
6552 | /* Free up an individual ldst entry. */ | |
6553 | ||
589005ff | 6554 | static void |
1d088dee | 6555 | free_ldst_entry (struct ls_expr * ptr) |
a13d4ebf | 6556 | { |
aaa4ca30 AJ |
6557 | free_INSN_LIST_list (& ptr->loads); |
6558 | free_INSN_LIST_list (& ptr->stores); | |
a13d4ebf AM |
6559 | |
6560 | free (ptr); | |
6561 | } | |
6562 | ||
6563 | /* Free up all memory associated with the ldst list. */ | |
6564 | ||
6565 | static void | |
1d088dee | 6566 | free_ldst_mems (void) |
a13d4ebf | 6567 | { |
589005ff | 6568 | while (pre_ldst_mems) |
a13d4ebf AM |
6569 | { |
6570 | struct ls_expr * tmp = pre_ldst_mems; | |
6571 | ||
6572 | pre_ldst_mems = pre_ldst_mems->next; | |
6573 | ||
6574 | free_ldst_entry (tmp); | |
6575 | } | |
6576 | ||
6577 | pre_ldst_mems = NULL; | |
6578 | } | |
6579 | ||
6580 | /* Dump debugging info about the ldst list. */ | |
6581 | ||
6582 | static void | |
1d088dee | 6583 | print_ldst_list (FILE * file) |
a13d4ebf AM |
6584 | { |
6585 | struct ls_expr * ptr; | |
6586 | ||
6587 | fprintf (file, "LDST list: \n"); | |
6588 | ||
6589 | for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr)) | |
6590 | { | |
6591 | fprintf (file, " Pattern (%3d): ", ptr->index); | |
6592 | ||
6593 | print_rtl (file, ptr->pattern); | |
6594 | ||
6595 | fprintf (file, "\n Loads : "); | |
6596 | ||
6597 | if (ptr->loads) | |
6598 | print_rtl (file, ptr->loads); | |
6599 | else | |
6600 | fprintf (file, "(nil)"); | |
6601 | ||
6602 | fprintf (file, "\n Stores : "); | |
6603 | ||
6604 | if (ptr->stores) | |
6605 | print_rtl (file, ptr->stores); | |
6606 | else | |
6607 | fprintf (file, "(nil)"); | |
6608 | ||
6609 | fprintf (file, "\n\n"); | |
6610 | } | |
6611 | ||
6612 | fprintf (file, "\n"); | |
6613 | } | |
6614 | ||
6615 | /* Returns 1 if X is in the list of ldst only expressions. */ | |
6616 | ||
6617 | static struct ls_expr * | |
1d088dee | 6618 | find_rtx_in_ldst (rtx x) |
a13d4ebf AM |
6619 | { |
6620 | struct ls_expr * ptr; | |
589005ff | 6621 | |
a13d4ebf AM |
6622 | for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next) |
6623 | if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid) | |
6624 | return ptr; | |
6625 | ||
6626 | return NULL; | |
6627 | } | |
6628 | ||
6629 | /* Assign each element of the list of mems a monotonically increasing value. */ | |
6630 | ||
6631 | static int | |
1d088dee | 6632 | enumerate_ldsts (void) |
a13d4ebf AM |
6633 | { |
6634 | struct ls_expr * ptr; | |
6635 | int n = 0; | |
6636 | ||
6637 | for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next) | |
6638 | ptr->index = n++; | |
6639 | ||
6640 | return n; | |
6641 | } | |
6642 | ||
6643 | /* Return first item in the list. */ | |
6644 | ||
6645 | static inline struct ls_expr * | |
1d088dee | 6646 | first_ls_expr (void) |
a13d4ebf AM |
6647 | { |
6648 | return pre_ldst_mems; | |
6649 | } | |
6650 | ||
0e8a66de | 6651 | /* Return the next item in the list after the specified one. */ |
a13d4ebf AM |
6652 | |
6653 | static inline struct ls_expr * | |
1d088dee | 6654 | next_ls_expr (struct ls_expr * ptr) |
a13d4ebf AM |
6655 | { |
6656 | return ptr->next; | |
6657 | } | |
6658 | \f | |
6659 | /* Load Motion for loads which only kill themselves. */ | |
6660 | ||
6661 | /* Return true if x is a simple MEM operation, with no registers or | |
6662 | side effects. These are the types of loads we consider for the | |
6663 | ld_motion list, otherwise we let the usual aliasing take care of it. */ | |
6664 | ||
589005ff | 6665 | static int |
1d088dee | 6666 | simple_mem (rtx x) |
a13d4ebf AM |
6667 | { |
6668 | if (GET_CODE (x) != MEM) | |
6669 | return 0; | |
589005ff | 6670 | |
a13d4ebf AM |
6671 | if (MEM_VOLATILE_P (x)) |
6672 | return 0; | |
589005ff | 6673 | |
a13d4ebf AM |
6674 | if (GET_MODE (x) == BLKmode) |
6675 | return 0; | |
aaa4ca30 | 6676 | |
47a3dae1 ZD |
6677 | /* If we are handling exceptions, we must be careful with memory references |
6678 | that may trap. If we are not, the behavior is undefined, so we may just | |
6679 | continue. */ | |
6680 | if (flag_non_call_exceptions && may_trap_p (x)) | |
98d3d336 RS |
6681 | return 0; |
6682 | ||
47a3dae1 ZD |
6683 | if (side_effects_p (x)) |
6684 | return 0; | |
589005ff | 6685 | |
47a3dae1 ZD |
6686 | /* Do not consider function arguments passed on stack. */ |
6687 | if (reg_mentioned_p (stack_pointer_rtx, x)) | |
6688 | return 0; | |
6689 | ||
6690 | if (flag_float_store && FLOAT_MODE_P (GET_MODE (x))) | |
6691 | return 0; | |
6692 | ||
6693 | return 1; | |
a13d4ebf AM |
6694 | } |
6695 | ||
589005ff KH |
6696 | /* Make sure there isn't a buried reference in this pattern anywhere. |
6697 | If there is, invalidate the entry for it since we're not capable | |
6698 | of fixing it up just yet.. We have to be sure we know about ALL | |
a13d4ebf AM |
6699 | loads since the aliasing code will allow all entries in the |
6700 | ld_motion list to not-alias itself. If we miss a load, we will get | |
589005ff | 6701 | the wrong value since gcse might common it and we won't know to |
a13d4ebf AM |
6702 | fix it up. */ |
6703 | ||
6704 | static void | |
1d088dee | 6705 | invalidate_any_buried_refs (rtx x) |
a13d4ebf AM |
6706 | { |
6707 | const char * fmt; | |
8e42ace1 | 6708 | int i, j; |
a13d4ebf AM |
6709 | struct ls_expr * ptr; |
6710 | ||
6711 | /* Invalidate it in the list. */ | |
6712 | if (GET_CODE (x) == MEM && simple_mem (x)) | |
6713 | { | |
6714 | ptr = ldst_entry (x); | |
6715 | ptr->invalid = 1; | |
6716 | } | |
6717 | ||
6718 | /* Recursively process the insn. */ | |
6719 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
589005ff | 6720 | |
a13d4ebf AM |
6721 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) |
6722 | { | |
6723 | if (fmt[i] == 'e') | |
6724 | invalidate_any_buried_refs (XEXP (x, i)); | |
6725 | else if (fmt[i] == 'E') | |
6726 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
6727 | invalidate_any_buried_refs (XVECEXP (x, i, j)); | |
6728 | } | |
6729 | } | |
6730 | ||
4d3eb89a HPN |
6731 | /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple |
6732 | being defined as MEM loads and stores to symbols, with no side effects | |
6733 | and no registers in the expression. For a MEM destination, we also | |
6734 | check that the insn is still valid if we replace the destination with a | |
6735 | REG, as is done in update_ld_motion_stores. If there are any uses/defs | |
6736 | which don't match this criteria, they are invalidated and trimmed out | |
6737 | later. */ | |
a13d4ebf | 6738 | |
589005ff | 6739 | static void |
1d088dee | 6740 | compute_ld_motion_mems (void) |
a13d4ebf AM |
6741 | { |
6742 | struct ls_expr * ptr; | |
e0082a72 | 6743 | basic_block bb; |
a13d4ebf | 6744 | rtx insn; |
589005ff | 6745 | |
a13d4ebf AM |
6746 | pre_ldst_mems = NULL; |
6747 | ||
e0082a72 | 6748 | FOR_EACH_BB (bb) |
a13d4ebf | 6749 | { |
e0082a72 ZD |
6750 | for (insn = bb->head; |
6751 | insn && insn != NEXT_INSN (bb->end); | |
a13d4ebf AM |
6752 | insn = NEXT_INSN (insn)) |
6753 | { | |
735e8085 | 6754 | if (INSN_P (insn)) |
a13d4ebf AM |
6755 | { |
6756 | if (GET_CODE (PATTERN (insn)) == SET) | |
6757 | { | |
6758 | rtx src = SET_SRC (PATTERN (insn)); | |
6759 | rtx dest = SET_DEST (PATTERN (insn)); | |
6760 | ||
6761 | /* Check for a simple LOAD... */ | |
6762 | if (GET_CODE (src) == MEM && simple_mem (src)) | |
6763 | { | |
6764 | ptr = ldst_entry (src); | |
6765 | if (GET_CODE (dest) == REG) | |
6766 | ptr->loads = alloc_INSN_LIST (insn, ptr->loads); | |
6767 | else | |
6768 | ptr->invalid = 1; | |
6769 | } | |
6770 | else | |
6771 | { | |
6772 | /* Make sure there isn't a buried load somewhere. */ | |
6773 | invalidate_any_buried_refs (src); | |
6774 | } | |
589005ff | 6775 | |
a13d4ebf AM |
6776 | /* Check for stores. Don't worry about aliased ones, they |
6777 | will block any movement we might do later. We only care | |
6778 | about this exact pattern since those are the only | |
6779 | circumstance that we will ignore the aliasing info. */ | |
6780 | if (GET_CODE (dest) == MEM && simple_mem (dest)) | |
6781 | { | |
6782 | ptr = ldst_entry (dest); | |
589005ff | 6783 | |
f54104df | 6784 | if (GET_CODE (src) != MEM |
4d3eb89a HPN |
6785 | && GET_CODE (src) != ASM_OPERANDS |
6786 | /* Check for REG manually since want_to_gcse_p | |
6787 | returns 0 for all REGs. */ | |
6788 | && (REG_P (src) || want_to_gcse_p (src))) | |
a13d4ebf AM |
6789 | ptr->stores = alloc_INSN_LIST (insn, ptr->stores); |
6790 | else | |
6791 | ptr->invalid = 1; | |
6792 | } | |
6793 | } | |
6794 | else | |
6795 | invalidate_any_buried_refs (PATTERN (insn)); | |
6796 | } | |
6797 | } | |
6798 | } | |
6799 | } | |
6800 | ||
589005ff | 6801 | /* Remove any references that have been either invalidated or are not in the |
a13d4ebf AM |
6802 | expression list for pre gcse. */ |
6803 | ||
6804 | static void | |
1d088dee | 6805 | trim_ld_motion_mems (void) |
a13d4ebf | 6806 | { |
b58b21d5 RS |
6807 | struct ls_expr * * last = & pre_ldst_mems; |
6808 | struct ls_expr * ptr = pre_ldst_mems; | |
a13d4ebf AM |
6809 | |
6810 | while (ptr != NULL) | |
6811 | { | |
b58b21d5 | 6812 | struct expr * expr; |
589005ff | 6813 | |
a13d4ebf | 6814 | /* Delete if entry has been made invalid. */ |
b58b21d5 | 6815 | if (! ptr->invalid) |
a13d4ebf | 6816 | { |
a13d4ebf | 6817 | /* Delete if we cannot find this mem in the expression list. */ |
b58b21d5 | 6818 | unsigned int hash = ptr->hash_index % expr_hash_table.size; |
589005ff | 6819 | |
b58b21d5 RS |
6820 | for (expr = expr_hash_table.table[hash]; |
6821 | expr != NULL; | |
6822 | expr = expr->next_same_hash) | |
6823 | if (expr_equiv_p (expr->expr, ptr->pattern)) | |
6824 | break; | |
a13d4ebf AM |
6825 | } |
6826 | else | |
b58b21d5 RS |
6827 | expr = (struct expr *) 0; |
6828 | ||
6829 | if (expr) | |
a13d4ebf AM |
6830 | { |
6831 | /* Set the expression field if we are keeping it. */ | |
a13d4ebf | 6832 | ptr->expr = expr; |
b58b21d5 | 6833 | last = & ptr->next; |
a13d4ebf AM |
6834 | ptr = ptr->next; |
6835 | } | |
b58b21d5 RS |
6836 | else |
6837 | { | |
6838 | *last = ptr->next; | |
6839 | free_ldst_entry (ptr); | |
6840 | ptr = * last; | |
6841 | } | |
a13d4ebf AM |
6842 | } |
6843 | ||
6844 | /* Show the world what we've found. */ | |
6845 | if (gcse_file && pre_ldst_mems != NULL) | |
6846 | print_ldst_list (gcse_file); | |
6847 | } | |
6848 | ||
6849 | /* This routine will take an expression which we are replacing with | |
6850 | a reaching register, and update any stores that are needed if | |
6851 | that expression is in the ld_motion list. Stores are updated by | |
a98ebe2e | 6852 | copying their SRC to the reaching register, and then storing |
a13d4ebf AM |
6853 | the reaching register into the store location. These keeps the |
6854 | correct value in the reaching register for the loads. */ | |
6855 | ||
6856 | static void | |
1d088dee | 6857 | update_ld_motion_stores (struct expr * expr) |
a13d4ebf AM |
6858 | { |
6859 | struct ls_expr * mem_ptr; | |
6860 | ||
6861 | if ((mem_ptr = find_rtx_in_ldst (expr->expr))) | |
6862 | { | |
589005ff KH |
6863 | /* We can try to find just the REACHED stores, but is shouldn't |
6864 | matter to set the reaching reg everywhere... some might be | |
a13d4ebf AM |
6865 | dead and should be eliminated later. */ |
6866 | ||
4d3eb89a HPN |
6867 | /* We replace (set mem expr) with (set reg expr) (set mem reg) |
6868 | where reg is the reaching reg used in the load. We checked in | |
6869 | compute_ld_motion_mems that we can replace (set mem expr) with | |
6870 | (set reg expr) in that insn. */ | |
a13d4ebf | 6871 | rtx list = mem_ptr->stores; |
589005ff | 6872 | |
a13d4ebf AM |
6873 | for ( ; list != NULL_RTX; list = XEXP (list, 1)) |
6874 | { | |
6875 | rtx insn = XEXP (list, 0); | |
6876 | rtx pat = PATTERN (insn); | |
6877 | rtx src = SET_SRC (pat); | |
6878 | rtx reg = expr->reaching_reg; | |
c57718d3 | 6879 | rtx copy, new; |
a13d4ebf AM |
6880 | |
6881 | /* If we've already copied it, continue. */ | |
6882 | if (expr->reaching_reg == src) | |
6883 | continue; | |
589005ff | 6884 | |
a13d4ebf AM |
6885 | if (gcse_file) |
6886 | { | |
6887 | fprintf (gcse_file, "PRE: store updated with reaching reg "); | |
6888 | print_rtl (gcse_file, expr->reaching_reg); | |
6889 | fprintf (gcse_file, ":\n "); | |
6890 | print_inline_rtx (gcse_file, insn, 8); | |
6891 | fprintf (gcse_file, "\n"); | |
6892 | } | |
589005ff | 6893 | |
47a3dae1 | 6894 | copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat))); |
c57718d3 RK |
6895 | new = emit_insn_before (copy, insn); |
6896 | record_one_set (REGNO (reg), new); | |
a13d4ebf AM |
6897 | SET_SRC (pat) = reg; |
6898 | ||
6899 | /* un-recognize this pattern since it's probably different now. */ | |
6900 | INSN_CODE (insn) = -1; | |
6901 | gcse_create_count++; | |
6902 | } | |
6903 | } | |
6904 | } | |
6905 | \f | |
6906 | /* Store motion code. */ | |
6907 | ||
47a3dae1 ZD |
6908 | #define ANTIC_STORE_LIST(x) ((x)->loads) |
6909 | #define AVAIL_STORE_LIST(x) ((x)->stores) | |
6910 | #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg) | |
6911 | ||
589005ff | 6912 | /* This is used to communicate the target bitvector we want to use in the |
aaa4ca30 | 6913 | reg_set_info routine when called via the note_stores mechanism. */ |
47a3dae1 ZD |
6914 | static int * regvec; |
6915 | ||
6916 | /* And current insn, for the same routine. */ | |
6917 | static rtx compute_store_table_current_insn; | |
aaa4ca30 | 6918 | |
a13d4ebf AM |
6919 | /* Used in computing the reverse edge graph bit vectors. */ |
6920 | static sbitmap * st_antloc; | |
6921 | ||
6922 | /* Global holding the number of store expressions we are dealing with. */ | |
6923 | static int num_stores; | |
6924 | ||
01c43039 RE |
6925 | /* Checks to set if we need to mark a register set. Called from |
6926 | note_stores. */ | |
a13d4ebf | 6927 | |
aaa4ca30 | 6928 | static void |
1d088dee | 6929 | reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, |
01c43039 | 6930 | void *data) |
a13d4ebf | 6931 | { |
01c43039 RE |
6932 | sbitmap bb_reg = data; |
6933 | ||
aaa4ca30 AJ |
6934 | if (GET_CODE (dest) == SUBREG) |
6935 | dest = SUBREG_REG (dest); | |
adfcce61 | 6936 | |
aaa4ca30 | 6937 | if (GET_CODE (dest) == REG) |
01c43039 RE |
6938 | { |
6939 | regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn); | |
6940 | if (bb_reg) | |
6941 | SET_BIT (bb_reg, REGNO (dest)); | |
6942 | } | |
6943 | } | |
6944 | ||
6945 | /* Clear any mark that says that this insn sets dest. Called from | |
6946 | note_stores. */ | |
6947 | ||
6948 | static void | |
6949 | reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED, | |
6950 | void *data) | |
6951 | { | |
6952 | int *dead_vec = data; | |
6953 | ||
6954 | if (GET_CODE (dest) == SUBREG) | |
6955 | dest = SUBREG_REG (dest); | |
6956 | ||
6957 | if (GET_CODE (dest) == REG && | |
6958 | dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn)) | |
6959 | dead_vec[REGNO (dest)] = 0; | |
a13d4ebf AM |
6960 | } |
6961 | ||
47a3dae1 ZD |
6962 | /* Return zero if some of the registers in list X are killed |
6963 | due to set of registers in bitmap REGS_SET. */ | |
1d088dee | 6964 | |
47a3dae1 | 6965 | static bool |
1d088dee | 6966 | store_ops_ok (rtx x, int *regs_set) |
47a3dae1 ZD |
6967 | { |
6968 | rtx reg; | |
6969 | ||
6970 | for (; x; x = XEXP (x, 1)) | |
6971 | { | |
6972 | reg = XEXP (x, 0); | |
6973 | if (regs_set[REGNO(reg)]) | |
1d088dee | 6974 | return false; |
47a3dae1 | 6975 | } |
a13d4ebf | 6976 | |
47a3dae1 ZD |
6977 | return true; |
6978 | } | |
6979 | ||
6980 | /* Returns a list of registers mentioned in X. */ | |
6981 | static rtx | |
1d088dee | 6982 | extract_mentioned_regs (rtx x) |
47a3dae1 ZD |
6983 | { |
6984 | return extract_mentioned_regs_helper (x, NULL_RTX); | |
6985 | } | |
6986 | ||
6987 | /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used | |
6988 | registers. */ | |
6989 | static rtx | |
1d088dee | 6990 | extract_mentioned_regs_helper (rtx x, rtx accum) |
a13d4ebf AM |
6991 | { |
6992 | int i; | |
6993 | enum rtx_code code; | |
6994 | const char * fmt; | |
6995 | ||
6996 | /* Repeat is used to turn tail-recursion into iteration. */ | |
6997 | repeat: | |
6998 | ||
6999 | if (x == 0) | |
47a3dae1 | 7000 | return accum; |
a13d4ebf AM |
7001 | |
7002 | code = GET_CODE (x); | |
7003 | switch (code) | |
7004 | { | |
7005 | case REG: | |
47a3dae1 | 7006 | return alloc_EXPR_LIST (0, x, accum); |
a13d4ebf AM |
7007 | |
7008 | case MEM: | |
7009 | x = XEXP (x, 0); | |
7010 | goto repeat; | |
7011 | ||
7012 | case PRE_DEC: | |
7013 | case PRE_INC: | |
7014 | case POST_DEC: | |
7015 | case POST_INC: | |
47a3dae1 ZD |
7016 | /* We do not run this function with arguments having side effects. */ |
7017 | abort (); | |
a13d4ebf AM |
7018 | |
7019 | case PC: | |
7020 | case CC0: /*FIXME*/ | |
7021 | case CONST: | |
7022 | case CONST_INT: | |
7023 | case CONST_DOUBLE: | |
69ef87e2 | 7024 | case CONST_VECTOR: |
a13d4ebf AM |
7025 | case SYMBOL_REF: |
7026 | case LABEL_REF: | |
7027 | case ADDR_VEC: | |
7028 | case ADDR_DIFF_VEC: | |
47a3dae1 | 7029 | return accum; |
a13d4ebf AM |
7030 | |
7031 | default: | |
7032 | break; | |
7033 | } | |
7034 | ||
7035 | i = GET_RTX_LENGTH (code) - 1; | |
7036 | fmt = GET_RTX_FORMAT (code); | |
589005ff | 7037 | |
a13d4ebf AM |
7038 | for (; i >= 0; i--) |
7039 | { | |
7040 | if (fmt[i] == 'e') | |
7041 | { | |
7042 | rtx tem = XEXP (x, i); | |
7043 | ||
7044 | /* If we are about to do the last recursive call | |
47a3dae1 | 7045 | needed at this level, change it into iteration. */ |
a13d4ebf AM |
7046 | if (i == 0) |
7047 | { | |
7048 | x = tem; | |
7049 | goto repeat; | |
7050 | } | |
589005ff | 7051 | |
47a3dae1 | 7052 | accum = extract_mentioned_regs_helper (tem, accum); |
a13d4ebf AM |
7053 | } |
7054 | else if (fmt[i] == 'E') | |
7055 | { | |
7056 | int j; | |
589005ff | 7057 | |
a13d4ebf | 7058 | for (j = 0; j < XVECLEN (x, i); j++) |
47a3dae1 | 7059 | accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum); |
a13d4ebf AM |
7060 | } |
7061 | } | |
7062 | ||
47a3dae1 | 7063 | return accum; |
a13d4ebf AM |
7064 | } |
7065 | ||
47a3dae1 ZD |
7066 | /* Determine whether INSN is MEM store pattern that we will consider moving. |
7067 | REGS_SET_BEFORE is bitmap of registers set before (and including) the | |
7068 | current insn, REGS_SET_AFTER is bitmap of registers set after (and | |
7069 | including) the insn in this basic block. We must be passing through BB from | |
7070 | head to end, as we are using this fact to speed things up. | |
1d088dee | 7071 | |
47a3dae1 ZD |
7072 | The results are stored this way: |
7073 | ||
7074 | -- the first anticipatable expression is added into ANTIC_STORE_LIST | |
7075 | -- if the processed expression is not anticipatable, NULL_RTX is added | |
7076 | there instead, so that we can use it as indicator that no further | |
7077 | expression of this type may be anticipatable | |
7078 | -- if the expression is available, it is added as head of AVAIL_STORE_LIST; | |
7079 | consequently, all of them but this head are dead and may be deleted. | |
7080 | -- if the expression is not available, the insn due to that it fails to be | |
7081 | available is stored in reaching_reg. | |
7082 | ||
7083 | The things are complicated a bit by fact that there already may be stores | |
7084 | to the same MEM from other blocks; also caller must take care of the | |
e0bb17a8 | 7085 | necessary cleanup of the temporary markers after end of the basic block. |
47a3dae1 | 7086 | */ |
a13d4ebf AM |
7087 | |
7088 | static void | |
1d088dee | 7089 | find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after) |
a13d4ebf AM |
7090 | { |
7091 | struct ls_expr * ptr; | |
47a3dae1 ZD |
7092 | rtx dest, set, tmp; |
7093 | int check_anticipatable, check_available; | |
7094 | basic_block bb = BLOCK_FOR_INSN (insn); | |
a13d4ebf | 7095 | |
47a3dae1 ZD |
7096 | set = single_set (insn); |
7097 | if (!set) | |
a13d4ebf AM |
7098 | return; |
7099 | ||
47a3dae1 | 7100 | dest = SET_DEST (set); |
589005ff | 7101 | |
a13d4ebf AM |
7102 | if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest) |
7103 | || GET_MODE (dest) == BLKmode) | |
aaa4ca30 AJ |
7104 | return; |
7105 | ||
47a3dae1 ZD |
7106 | if (side_effects_p (dest)) |
7107 | return; | |
aaa4ca30 | 7108 | |
47a3dae1 ZD |
7109 | /* If we are handling exceptions, we must be careful with memory references |
7110 | that may trap. If we are not, the behavior is undefined, so we may just | |
7111 | continue. */ | |
94f24ddc | 7112 | if (flag_non_call_exceptions && may_trap_p (dest)) |
47a3dae1 | 7113 | return; |
1d088dee | 7114 | |
a13d4ebf | 7115 | ptr = ldst_entry (dest); |
47a3dae1 ZD |
7116 | if (!ptr->pattern_regs) |
7117 | ptr->pattern_regs = extract_mentioned_regs (dest); | |
7118 | ||
7119 | /* Do not check for anticipatability if we either found one anticipatable | |
7120 | store already, or tested for one and found out that it was killed. */ | |
7121 | check_anticipatable = 0; | |
7122 | if (!ANTIC_STORE_LIST (ptr)) | |
7123 | check_anticipatable = 1; | |
7124 | else | |
7125 | { | |
7126 | tmp = XEXP (ANTIC_STORE_LIST (ptr), 0); | |
7127 | if (tmp != NULL_RTX | |
7128 | && BLOCK_FOR_INSN (tmp) != bb) | |
7129 | check_anticipatable = 1; | |
7130 | } | |
7131 | if (check_anticipatable) | |
7132 | { | |
7133 | if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before)) | |
7134 | tmp = NULL_RTX; | |
7135 | else | |
7136 | tmp = insn; | |
7137 | ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp, | |
7138 | ANTIC_STORE_LIST (ptr)); | |
7139 | } | |
a13d4ebf | 7140 | |
e0bb17a8 | 7141 | /* It is not necessary to check whether store is available if we did |
47a3dae1 ZD |
7142 | it successfully before; if we failed before, do not bother to check |
7143 | until we reach the insn that caused us to fail. */ | |
7144 | check_available = 0; | |
7145 | if (!AVAIL_STORE_LIST (ptr)) | |
7146 | check_available = 1; | |
7147 | else | |
7148 | { | |
7149 | tmp = XEXP (AVAIL_STORE_LIST (ptr), 0); | |
7150 | if (BLOCK_FOR_INSN (tmp) != bb) | |
7151 | check_available = 1; | |
7152 | } | |
7153 | if (check_available) | |
7154 | { | |
7155 | /* Check that we have already reached the insn at that the check | |
7156 | failed last time. */ | |
7157 | if (LAST_AVAIL_CHECK_FAILURE (ptr)) | |
7158 | { | |
7159 | for (tmp = bb->end; | |
7160 | tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr); | |
7161 | tmp = PREV_INSN (tmp)) | |
7162 | continue; | |
7163 | if (tmp == insn) | |
7164 | check_available = 0; | |
7165 | } | |
7166 | else | |
7167 | check_available = store_killed_after (dest, ptr->pattern_regs, insn, | |
7168 | bb, regs_set_after, | |
7169 | &LAST_AVAIL_CHECK_FAILURE (ptr)); | |
7170 | } | |
7171 | if (!check_available) | |
7172 | AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr)); | |
7173 | } | |
1d088dee | 7174 | |
47a3dae1 | 7175 | /* Find available and anticipatable stores. */ |
a13d4ebf AM |
7176 | |
7177 | static int | |
1d088dee | 7178 | compute_store_table (void) |
a13d4ebf | 7179 | { |
e0082a72 ZD |
7180 | int ret; |
7181 | basic_block bb; | |
aaa4ca30 | 7182 | unsigned regno; |
47a3dae1 ZD |
7183 | rtx insn, pat, tmp; |
7184 | int *last_set_in, *already_set; | |
7185 | struct ls_expr * ptr, **prev_next_ptr_ptr; | |
aaa4ca30 | 7186 | |
a13d4ebf AM |
7187 | max_gcse_regno = max_reg_num (); |
7188 | ||
703ad42b | 7189 | reg_set_in_block = sbitmap_vector_alloc (last_basic_block, |
aaa4ca30 | 7190 | max_gcse_regno); |
d55bc081 | 7191 | sbitmap_vector_zero (reg_set_in_block, last_basic_block); |
a13d4ebf | 7192 | pre_ldst_mems = 0; |
01c43039 | 7193 | last_set_in = xcalloc (max_gcse_regno, sizeof (int)); |
47a3dae1 | 7194 | already_set = xmalloc (sizeof (int) * max_gcse_regno); |
aaa4ca30 | 7195 | |
a13d4ebf | 7196 | /* Find all the stores we care about. */ |
e0082a72 | 7197 | FOR_EACH_BB (bb) |
a13d4ebf | 7198 | { |
47a3dae1 | 7199 | /* First compute the registers set in this block. */ |
47a3dae1 ZD |
7200 | regvec = last_set_in; |
7201 | ||
7202 | for (insn = bb->head; | |
7203 | insn != NEXT_INSN (bb->end); | |
7204 | insn = NEXT_INSN (insn)) | |
7205 | { | |
7206 | if (! INSN_P (insn)) | |
7207 | continue; | |
7208 | ||
7209 | if (GET_CODE (insn) == CALL_INSN) | |
7210 | { | |
7211 | bool clobbers_all = false; | |
7212 | #ifdef NON_SAVING_SETJMP | |
7213 | if (NON_SAVING_SETJMP | |
7214 | && find_reg_note (insn, REG_SETJMP, NULL_RTX)) | |
7215 | clobbers_all = true; | |
7216 | #endif | |
7217 | ||
7218 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
7219 | if (clobbers_all | |
7220 | || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) | |
01c43039 RE |
7221 | { |
7222 | last_set_in[regno] = INSN_UID (insn); | |
7223 | SET_BIT (reg_set_in_block[bb->index], regno); | |
7224 | } | |
47a3dae1 ZD |
7225 | } |
7226 | ||
7227 | pat = PATTERN (insn); | |
7228 | compute_store_table_current_insn = insn; | |
01c43039 | 7229 | note_stores (pat, reg_set_info, reg_set_in_block[bb->index]); |
47a3dae1 ZD |
7230 | } |
7231 | ||
47a3dae1 ZD |
7232 | /* Now find the stores. */ |
7233 | memset (already_set, 0, sizeof (int) * max_gcse_regno); | |
7234 | regvec = already_set; | |
7235 | for (insn = bb->head; | |
7236 | insn != NEXT_INSN (bb->end); | |
7237 | insn = NEXT_INSN (insn)) | |
a13d4ebf | 7238 | { |
19652adf | 7239 | if (! INSN_P (insn)) |
a13d4ebf AM |
7240 | continue; |
7241 | ||
aaa4ca30 AJ |
7242 | if (GET_CODE (insn) == CALL_INSN) |
7243 | { | |
19652adf | 7244 | bool clobbers_all = false; |
589005ff | 7245 | #ifdef NON_SAVING_SETJMP |
19652adf ZW |
7246 | if (NON_SAVING_SETJMP |
7247 | && find_reg_note (insn, REG_SETJMP, NULL_RTX)) | |
7248 | clobbers_all = true; | |
7249 | #endif | |
7250 | ||
aaa4ca30 | 7251 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
19652adf ZW |
7252 | if (clobbers_all |
7253 | || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) | |
47a3dae1 | 7254 | already_set[regno] = 1; |
aaa4ca30 | 7255 | } |
589005ff | 7256 | |
a13d4ebf | 7257 | pat = PATTERN (insn); |
aaa4ca30 | 7258 | note_stores (pat, reg_set_info, NULL); |
589005ff | 7259 | |
a13d4ebf | 7260 | /* Now that we've marked regs, look for stores. */ |
47a3dae1 ZD |
7261 | find_moveable_store (insn, already_set, last_set_in); |
7262 | ||
7263 | /* Unmark regs that are no longer set. */ | |
01c43039 RE |
7264 | compute_store_table_current_insn = insn; |
7265 | note_stores (pat, reg_clear_last_set, last_set_in); | |
7266 | if (GET_CODE (insn) == CALL_INSN) | |
7267 | { | |
7268 | bool clobbers_all = false; | |
7269 | #ifdef NON_SAVING_SETJMP | |
7270 | if (NON_SAVING_SETJMP | |
7271 | && find_reg_note (insn, REG_SETJMP, NULL_RTX)) | |
7272 | clobbers_all = true; | |
7273 | #endif | |
7274 | ||
7275 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
7276 | if ((clobbers_all | |
7277 | || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) | |
7278 | && last_set_in[regno] == INSN_UID (insn)) | |
7279 | last_set_in[regno] = 0; | |
7280 | } | |
47a3dae1 ZD |
7281 | } |
7282 | ||
01c43039 RE |
7283 | #ifdef ENABLE_CHECKING |
7284 | /* last_set_in should now be all-zero. */ | |
7285 | for (regno = 0; regno < max_gcse_regno; regno++) | |
7286 | if (last_set_in[regno] != 0) | |
7287 | abort (); | |
7288 | #endif | |
7289 | ||
47a3dae1 ZD |
7290 | /* Clear temporary marks. */ |
7291 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
7292 | { | |
7293 | LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX; | |
7294 | if (ANTIC_STORE_LIST (ptr) | |
7295 | && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX) | |
7296 | ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1); | |
7297 | } | |
7298 | } | |
7299 | ||
7300 | /* Remove the stores that are not available anywhere, as there will | |
7301 | be no opportunity to optimize them. */ | |
7302 | for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems; | |
7303 | ptr != NULL; | |
7304 | ptr = *prev_next_ptr_ptr) | |
7305 | { | |
7306 | if (!AVAIL_STORE_LIST (ptr)) | |
7307 | { | |
7308 | *prev_next_ptr_ptr = ptr->next; | |
7309 | free_ldst_entry (ptr); | |
a13d4ebf | 7310 | } |
47a3dae1 ZD |
7311 | else |
7312 | prev_next_ptr_ptr = &ptr->next; | |
a13d4ebf AM |
7313 | } |
7314 | ||
7315 | ret = enumerate_ldsts (); | |
589005ff | 7316 | |
a13d4ebf AM |
7317 | if (gcse_file) |
7318 | { | |
47a3dae1 | 7319 | fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n"); |
a13d4ebf AM |
7320 | print_ldst_list (gcse_file); |
7321 | } | |
589005ff | 7322 | |
47a3dae1 ZD |
7323 | free (last_set_in); |
7324 | free (already_set); | |
a13d4ebf AM |
7325 | return ret; |
7326 | } | |
7327 | ||
3b14e3af ZD |
7328 | /* Check to see if the load X is aliased with STORE_PATTERN. |
7329 | AFTER is true if we are checking the case when STORE_PATTERN occurs | |
7330 | after the X. */ | |
a13d4ebf | 7331 | |
47a3dae1 | 7332 | static bool |
3b14e3af | 7333 | load_kills_store (rtx x, rtx store_pattern, int after) |
a13d4ebf | 7334 | { |
3b14e3af ZD |
7335 | if (after) |
7336 | return anti_dependence (x, store_pattern); | |
7337 | else | |
7338 | return true_dependence (store_pattern, GET_MODE (store_pattern), x, | |
7339 | rtx_addr_varies_p); | |
a13d4ebf AM |
7340 | } |
7341 | ||
589005ff | 7342 | /* Go through the entire insn X, looking for any loads which might alias |
3b14e3af ZD |
7343 | STORE_PATTERN. Return true if found. |
7344 | AFTER is true if we are checking the case when STORE_PATTERN occurs | |
7345 | after the insn X. */ | |
a13d4ebf | 7346 | |
47a3dae1 | 7347 | static bool |
3b14e3af | 7348 | find_loads (rtx x, rtx store_pattern, int after) |
a13d4ebf AM |
7349 | { |
7350 | const char * fmt; | |
8e42ace1 | 7351 | int i, j; |
47a3dae1 | 7352 | int ret = false; |
a13d4ebf | 7353 | |
24a28584 | 7354 | if (!x) |
47a3dae1 | 7355 | return false; |
24a28584 | 7356 | |
589005ff | 7357 | if (GET_CODE (x) == SET) |
a13d4ebf AM |
7358 | x = SET_SRC (x); |
7359 | ||
7360 | if (GET_CODE (x) == MEM) | |
7361 | { | |
3b14e3af | 7362 | if (load_kills_store (x, store_pattern, after)) |
47a3dae1 | 7363 | return true; |
a13d4ebf AM |
7364 | } |
7365 | ||
7366 | /* Recursively process the insn. */ | |
7367 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
589005ff | 7368 | |
a13d4ebf AM |
7369 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--) |
7370 | { | |
7371 | if (fmt[i] == 'e') | |
3b14e3af | 7372 | ret |= find_loads (XEXP (x, i), store_pattern, after); |
a13d4ebf AM |
7373 | else if (fmt[i] == 'E') |
7374 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3b14e3af | 7375 | ret |= find_loads (XVECEXP (x, i, j), store_pattern, after); |
a13d4ebf AM |
7376 | } |
7377 | return ret; | |
7378 | } | |
7379 | ||
589005ff | 7380 | /* Check if INSN kills the store pattern X (is aliased with it). |
3b14e3af ZD |
7381 | AFTER is true if we are checking the case when store X occurs |
7382 | after the insn. Return true if it it does. */ | |
a13d4ebf | 7383 | |
47a3dae1 | 7384 | static bool |
3b14e3af | 7385 | store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after) |
a13d4ebf | 7386 | { |
d088acea | 7387 | rtx reg, base, note; |
94f24ddc | 7388 | |
735e8085 | 7389 | if (!INSN_P (insn)) |
47a3dae1 | 7390 | return false; |
589005ff | 7391 | |
a13d4ebf AM |
7392 | if (GET_CODE (insn) == CALL_INSN) |
7393 | { | |
1218665b JJ |
7394 | /* A normal or pure call might read from pattern, |
7395 | but a const call will not. */ | |
47a3dae1 ZD |
7396 | if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn)) |
7397 | return true; | |
7398 | ||
94f24ddc ZD |
7399 | /* But even a const call reads its parameters. Check whether the |
7400 | base of some of registers used in mem is stack pointer. */ | |
7401 | for (reg = x_regs; reg; reg = XEXP (reg, 1)) | |
7402 | { | |
bc083e18 | 7403 | base = find_base_term (XEXP (reg, 0)); |
94f24ddc ZD |
7404 | if (!base |
7405 | || (GET_CODE (base) == ADDRESS | |
7406 | && GET_MODE (base) == Pmode | |
7407 | && XEXP (base, 0) == stack_pointer_rtx)) | |
7408 | return true; | |
7409 | } | |
47a3dae1 ZD |
7410 | |
7411 | return false; | |
a13d4ebf | 7412 | } |
589005ff | 7413 | |
a13d4ebf AM |
7414 | if (GET_CODE (PATTERN (insn)) == SET) |
7415 | { | |
7416 | rtx pat = PATTERN (insn); | |
3b14e3af ZD |
7417 | rtx dest = SET_DEST (pat); |
7418 | ||
7419 | if (GET_CODE (dest) == SIGN_EXTRACT | |
7420 | || GET_CODE (dest) == ZERO_EXTRACT) | |
7421 | dest = XEXP (dest, 0); | |
7422 | ||
a13d4ebf | 7423 | /* Check for memory stores to aliased objects. */ |
3b14e3af ZD |
7424 | if (GET_CODE (dest) == MEM |
7425 | && !expr_equiv_p (dest, x)) | |
7426 | { | |
7427 | if (after) | |
7428 | { | |
7429 | if (output_dependence (dest, x)) | |
7430 | return true; | |
7431 | } | |
7432 | else | |
7433 | { | |
7434 | if (output_dependence (x, dest)) | |
7435 | return true; | |
7436 | } | |
7437 | } | |
d088acea ZD |
7438 | if (find_loads (SET_SRC (pat), x, after)) |
7439 | return true; | |
a13d4ebf | 7440 | } |
d088acea ZD |
7441 | else if (find_loads (PATTERN (insn), x, after)) |
7442 | return true; | |
7443 | ||
7444 | /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory | |
7445 | location aliased with X, then this insn kills X. */ | |
7446 | note = find_reg_equal_equiv_note (insn); | |
7447 | if (! note) | |
7448 | return false; | |
7449 | note = XEXP (note, 0); | |
7450 | ||
7451 | /* However, if the note represents a must alias rather than a may | |
7452 | alias relationship, then it does not kill X. */ | |
7453 | if (expr_equiv_p (note, x)) | |
7454 | return false; | |
7455 | ||
7456 | /* See if there are any aliased loads in the note. */ | |
7457 | return find_loads (note, x, after); | |
a13d4ebf AM |
7458 | } |
7459 | ||
47a3dae1 ZD |
7460 | /* Returns true if the expression X is loaded or clobbered on or after INSN |
7461 | within basic block BB. REGS_SET_AFTER is bitmap of registers set in | |
7462 | or after the insn. X_REGS is list of registers mentioned in X. If the store | |
7463 | is killed, return the last insn in that it occurs in FAIL_INSN. */ | |
a13d4ebf | 7464 | |
47a3dae1 | 7465 | static bool |
1d088dee AJ |
7466 | store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb, |
7467 | int *regs_set_after, rtx *fail_insn) | |
a13d4ebf | 7468 | { |
47a3dae1 | 7469 | rtx last = bb->end, act; |
aaa4ca30 | 7470 | |
47a3dae1 | 7471 | if (!store_ops_ok (x_regs, regs_set_after)) |
1d088dee | 7472 | { |
47a3dae1 ZD |
7473 | /* We do not know where it will happen. */ |
7474 | if (fail_insn) | |
7475 | *fail_insn = NULL_RTX; | |
7476 | return true; | |
7477 | } | |
a13d4ebf | 7478 | |
47a3dae1 ZD |
7479 | /* Scan from the end, so that fail_insn is determined correctly. */ |
7480 | for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act)) | |
3b14e3af | 7481 | if (store_killed_in_insn (x, x_regs, act, false)) |
47a3dae1 ZD |
7482 | { |
7483 | if (fail_insn) | |
7484 | *fail_insn = act; | |
7485 | return true; | |
7486 | } | |
589005ff | 7487 | |
47a3dae1 | 7488 | return false; |
a13d4ebf | 7489 | } |
1d088dee | 7490 | |
47a3dae1 ZD |
7491 | /* Returns true if the expression X is loaded or clobbered on or before INSN |
7492 | within basic block BB. X_REGS is list of registers mentioned in X. | |
7493 | REGS_SET_BEFORE is bitmap of registers set before or in this insn. */ | |
7494 | static bool | |
1d088dee AJ |
7495 | store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb, |
7496 | int *regs_set_before) | |
a13d4ebf | 7497 | { |
8e42ace1 | 7498 | rtx first = bb->head; |
a13d4ebf | 7499 | |
47a3dae1 ZD |
7500 | if (!store_ops_ok (x_regs, regs_set_before)) |
7501 | return true; | |
a13d4ebf | 7502 | |
47a3dae1 | 7503 | for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn)) |
3b14e3af | 7504 | if (store_killed_in_insn (x, x_regs, insn, true)) |
47a3dae1 | 7505 | return true; |
589005ff | 7506 | |
47a3dae1 | 7507 | return false; |
a13d4ebf | 7508 | } |
1d088dee | 7509 | |
47a3dae1 ZD |
7510 | /* Fill in available, anticipatable, transparent and kill vectors in |
7511 | STORE_DATA, based on lists of available and anticipatable stores. */ | |
a13d4ebf | 7512 | static void |
1d088dee | 7513 | build_store_vectors (void) |
a13d4ebf | 7514 | { |
47a3dae1 ZD |
7515 | basic_block bb; |
7516 | int *regs_set_in_block; | |
a13d4ebf AM |
7517 | rtx insn, st; |
7518 | struct ls_expr * ptr; | |
47a3dae1 | 7519 | unsigned regno; |
a13d4ebf AM |
7520 | |
7521 | /* Build the gen_vector. This is any store in the table which is not killed | |
7522 | by aliasing later in its block. */ | |
703ad42b | 7523 | ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 7524 | sbitmap_vector_zero (ae_gen, last_basic_block); |
a13d4ebf | 7525 | |
703ad42b | 7526 | st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 7527 | sbitmap_vector_zero (st_antloc, last_basic_block); |
aaa4ca30 | 7528 | |
a13d4ebf | 7529 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) |
589005ff | 7530 | { |
47a3dae1 | 7531 | for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1)) |
a13d4ebf AM |
7532 | { |
7533 | insn = XEXP (st, 0); | |
e2d2ed72 | 7534 | bb = BLOCK_FOR_INSN (insn); |
589005ff | 7535 | |
47a3dae1 ZD |
7536 | /* If we've already seen an available expression in this block, |
7537 | we can delete this one (It occurs earlier in the block). We'll | |
7538 | copy the SRC expression to an unused register in case there | |
7539 | are any side effects. */ | |
7540 | if (TEST_BIT (ae_gen[bb->index], ptr->index)) | |
a13d4ebf | 7541 | { |
47a3dae1 ZD |
7542 | rtx r = gen_reg_rtx (GET_MODE (ptr->pattern)); |
7543 | if (gcse_file) | |
7544 | fprintf (gcse_file, "Removing redundant store:\n"); | |
d088acea | 7545 | replace_store_insn (r, XEXP (st, 0), bb, ptr); |
47a3dae1 | 7546 | continue; |
a13d4ebf | 7547 | } |
47a3dae1 | 7548 | SET_BIT (ae_gen[bb->index], ptr->index); |
a13d4ebf | 7549 | } |
589005ff | 7550 | |
47a3dae1 ZD |
7551 | for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1)) |
7552 | { | |
7553 | insn = XEXP (st, 0); | |
7554 | bb = BLOCK_FOR_INSN (insn); | |
7555 | SET_BIT (st_antloc[bb->index], ptr->index); | |
7556 | } | |
a13d4ebf | 7557 | } |
589005ff | 7558 | |
703ad42b | 7559 | ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 7560 | sbitmap_vector_zero (ae_kill, last_basic_block); |
a13d4ebf | 7561 | |
703ad42b | 7562 | transp = sbitmap_vector_alloc (last_basic_block, num_stores); |
d55bc081 | 7563 | sbitmap_vector_zero (transp, last_basic_block); |
47a3dae1 | 7564 | regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno); |
a13d4ebf | 7565 | |
47a3dae1 ZD |
7566 | FOR_EACH_BB (bb) |
7567 | { | |
7568 | for (regno = 0; regno < max_gcse_regno; regno++) | |
7569 | regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno); | |
7570 | ||
7571 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
7572 | { | |
7573 | if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head, | |
7574 | bb, regs_set_in_block, NULL)) | |
7575 | { | |
e0bb17a8 | 7576 | /* It should not be necessary to consider the expression |
47a3dae1 ZD |
7577 | killed if it is both anticipatable and available. */ |
7578 | if (!TEST_BIT (st_antloc[bb->index], ptr->index) | |
7579 | || !TEST_BIT (ae_gen[bb->index], ptr->index)) | |
7580 | SET_BIT (ae_kill[bb->index], ptr->index); | |
1d088dee AJ |
7581 | } |
7582 | else | |
7583 | SET_BIT (transp[bb->index], ptr->index); | |
7584 | } | |
47a3dae1 ZD |
7585 | } |
7586 | ||
7587 | free (regs_set_in_block); | |
aaa4ca30 | 7588 | |
589005ff | 7589 | if (gcse_file) |
aaa4ca30 | 7590 | { |
d55bc081 ZD |
7591 | dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block); |
7592 | dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block); | |
7593 | dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block); | |
7594 | dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block); | |
a13d4ebf AM |
7595 | } |
7596 | } | |
7597 | ||
fbe5a4a6 | 7598 | /* Insert an instruction at the beginning of a basic block, and update |
a13d4ebf AM |
7599 | the BLOCK_HEAD if needed. */ |
7600 | ||
589005ff | 7601 | static void |
1d088dee | 7602 | insert_insn_start_bb (rtx insn, basic_block bb) |
a13d4ebf AM |
7603 | { |
7604 | /* Insert at start of successor block. */ | |
e2d2ed72 AM |
7605 | rtx prev = PREV_INSN (bb->head); |
7606 | rtx before = bb->head; | |
a13d4ebf AM |
7607 | while (before != 0) |
7608 | { | |
7609 | if (GET_CODE (before) != CODE_LABEL | |
7610 | && (GET_CODE (before) != NOTE | |
7611 | || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK)) | |
7612 | break; | |
7613 | prev = before; | |
e2d2ed72 | 7614 | if (prev == bb->end) |
a13d4ebf AM |
7615 | break; |
7616 | before = NEXT_INSN (before); | |
7617 | } | |
7618 | ||
7619 | insn = emit_insn_after (insn, prev); | |
7620 | ||
a13d4ebf AM |
7621 | if (gcse_file) |
7622 | { | |
7623 | fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n", | |
0b17ab2f | 7624 | bb->index); |
a13d4ebf AM |
7625 | print_inline_rtx (gcse_file, insn, 6); |
7626 | fprintf (gcse_file, "\n"); | |
7627 | } | |
7628 | } | |
7629 | ||
7630 | /* This routine will insert a store on an edge. EXPR is the ldst entry for | |
cc2902df | 7631 | the memory reference, and E is the edge to insert it on. Returns nonzero |
a13d4ebf AM |
7632 | if an edge insertion was performed. */ |
7633 | ||
7634 | static int | |
1d088dee | 7635 | insert_store (struct ls_expr * expr, edge e) |
a13d4ebf AM |
7636 | { |
7637 | rtx reg, insn; | |
e2d2ed72 | 7638 | basic_block bb; |
a13d4ebf AM |
7639 | edge tmp; |
7640 | ||
7641 | /* We did all the deleted before this insert, so if we didn't delete a | |
7642 | store, then we haven't set the reaching reg yet either. */ | |
7643 | if (expr->reaching_reg == NULL_RTX) | |
7644 | return 0; | |
7645 | ||
a0c8285b JH |
7646 | if (e->flags & EDGE_FAKE) |
7647 | return 0; | |
7648 | ||
a13d4ebf | 7649 | reg = expr->reaching_reg; |
47a3dae1 | 7650 | insn = gen_move_insn (copy_rtx (expr->pattern), reg); |
589005ff | 7651 | |
a13d4ebf AM |
7652 | /* If we are inserting this expression on ALL predecessor edges of a BB, |
7653 | insert it at the start of the BB, and reset the insert bits on the other | |
ff7cc307 | 7654 | edges so we don't try to insert it on the other edges. */ |
e2d2ed72 | 7655 | bb = e->dest; |
a13d4ebf | 7656 | for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next) |
3f2eae23 | 7657 | if (!(tmp->flags & EDGE_FAKE)) |
a0c8285b JH |
7658 | { |
7659 | int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest); | |
7660 | if (index == EDGE_INDEX_NO_EDGE) | |
7661 | abort (); | |
7662 | if (! TEST_BIT (pre_insert_map[index], expr->index)) | |
7663 | break; | |
7664 | } | |
a13d4ebf AM |
7665 | |
7666 | /* If tmp is NULL, we found an insertion on every edge, blank the | |
7667 | insertion vector for these edges, and insert at the start of the BB. */ | |
e2d2ed72 | 7668 | if (!tmp && bb != EXIT_BLOCK_PTR) |
a13d4ebf AM |
7669 | { |
7670 | for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next) | |
7671 | { | |
7672 | int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest); | |
7673 | RESET_BIT (pre_insert_map[index], expr->index); | |
7674 | } | |
7675 | insert_insn_start_bb (insn, bb); | |
7676 | return 0; | |
7677 | } | |
589005ff | 7678 | |
a13d4ebf AM |
7679 | /* We can't insert on this edge, so we'll insert at the head of the |
7680 | successors block. See Morgan, sec 10.5. */ | |
7681 | if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL) | |
7682 | { | |
7683 | insert_insn_start_bb (insn, bb); | |
7684 | return 0; | |
7685 | } | |
7686 | ||
7687 | insert_insn_on_edge (insn, e); | |
589005ff | 7688 | |
a13d4ebf AM |
7689 | if (gcse_file) |
7690 | { | |
7691 | fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n", | |
0b17ab2f | 7692 | e->src->index, e->dest->index); |
a13d4ebf AM |
7693 | print_inline_rtx (gcse_file, insn, 6); |
7694 | fprintf (gcse_file, "\n"); | |
7695 | } | |
589005ff | 7696 | |
a13d4ebf AM |
7697 | return 1; |
7698 | } | |
7699 | ||
d088acea ZD |
7700 | /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the |
7701 | memory location in SMEXPR set in basic block BB. | |
7702 | ||
7703 | This could be rather expensive. */ | |
7704 | ||
7705 | static void | |
7706 | remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr) | |
7707 | { | |
7708 | edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act; | |
7709 | sbitmap visited = sbitmap_alloc (last_basic_block); | |
7710 | int stack_top = 0; | |
7711 | rtx last, insn, note; | |
7712 | rtx mem = smexpr->pattern; | |
7713 | ||
7714 | sbitmap_zero (visited); | |
7715 | act = bb->succ; | |
7716 | ||
7717 | while (1) | |
7718 | { | |
7719 | if (!act) | |
7720 | { | |
7721 | if (!stack_top) | |
7722 | { | |
7723 | free (stack); | |
7724 | sbitmap_free (visited); | |
7725 | return; | |
7726 | } | |
7727 | act = stack[--stack_top]; | |
7728 | } | |
7729 | bb = act->dest; | |
7730 | ||
7731 | if (bb == EXIT_BLOCK_PTR | |
7732 | || TEST_BIT (visited, bb->index) | |
7733 | || TEST_BIT (ae_kill[bb->index], smexpr->index)) | |
7734 | { | |
7735 | act = act->succ_next; | |
7736 | continue; | |
7737 | } | |
7738 | SET_BIT (visited, bb->index); | |
7739 | ||
7740 | if (TEST_BIT (st_antloc[bb->index], smexpr->index)) | |
7741 | { | |
7742 | for (last = ANTIC_STORE_LIST (smexpr); | |
7743 | BLOCK_FOR_INSN (XEXP (last, 0)) != bb; | |
7744 | last = XEXP (last, 1)) | |
7745 | continue; | |
7746 | last = XEXP (last, 0); | |
7747 | } | |
7748 | else | |
7749 | last = NEXT_INSN (bb->end); | |
7750 | ||
7751 | for (insn = bb->head; insn != last; insn = NEXT_INSN (insn)) | |
7752 | if (INSN_P (insn)) | |
7753 | { | |
7754 | note = find_reg_equal_equiv_note (insn); | |
7755 | if (!note || !expr_equiv_p (XEXP (note, 0), mem)) | |
7756 | continue; | |
7757 | ||
7758 | if (gcse_file) | |
7759 | fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n", | |
7760 | INSN_UID (insn)); | |
7761 | remove_note (insn, note); | |
7762 | } | |
7763 | act = act->succ_next; | |
7764 | if (bb->succ) | |
7765 | { | |
7766 | if (act) | |
7767 | stack[stack_top++] = act; | |
7768 | act = bb->succ; | |
7769 | } | |
7770 | } | |
7771 | } | |
7772 | ||
a13d4ebf AM |
7773 | /* This routine will replace a store with a SET to a specified register. */ |
7774 | ||
7775 | static void | |
d088acea | 7776 | replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr) |
a13d4ebf | 7777 | { |
d088acea | 7778 | rtx insn, mem, note, set, ptr; |
589005ff | 7779 | |
d088acea | 7780 | mem = smexpr->pattern; |
9a318d30 | 7781 | insn = gen_move_insn (reg, SET_SRC (single_set (del))); |
a13d4ebf | 7782 | insn = emit_insn_after (insn, del); |
589005ff | 7783 | |
a13d4ebf AM |
7784 | if (gcse_file) |
7785 | { | |
589005ff | 7786 | fprintf (gcse_file, |
0b17ab2f | 7787 | "STORE_MOTION delete insn in BB %d:\n ", bb->index); |
a13d4ebf | 7788 | print_inline_rtx (gcse_file, del, 6); |
8e42ace1 | 7789 | fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n "); |
a13d4ebf | 7790 | print_inline_rtx (gcse_file, insn, 6); |
8e42ace1 | 7791 | fprintf (gcse_file, "\n"); |
a13d4ebf | 7792 | } |
589005ff | 7793 | |
d088acea ZD |
7794 | for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1)) |
7795 | if (XEXP (ptr, 0) == del) | |
7796 | { | |
7797 | XEXP (ptr, 0) = insn; | |
7798 | break; | |
7799 | } | |
49ce134f | 7800 | delete_insn (del); |
d088acea ZD |
7801 | |
7802 | /* Now we must handle REG_EQUAL notes whose contents is equal to the mem; | |
7803 | they are no longer accurate provided that they are reached by this | |
7804 | definition, so drop them. */ | |
7805 | for (; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn)) | |
7806 | if (INSN_P (insn)) | |
7807 | { | |
7808 | set = single_set (insn); | |
7809 | if (!set) | |
7810 | continue; | |
7811 | if (expr_equiv_p (SET_DEST (set), mem)) | |
7812 | return; | |
7813 | note = find_reg_equal_equiv_note (insn); | |
7814 | if (!note || !expr_equiv_p (XEXP (note, 0), mem)) | |
7815 | continue; | |
7816 | ||
7817 | if (gcse_file) | |
7818 | fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n", | |
7819 | INSN_UID (insn)); | |
7820 | remove_note (insn, note); | |
7821 | } | |
7822 | remove_reachable_equiv_notes (bb, smexpr); | |
a13d4ebf AM |
7823 | } |
7824 | ||
7825 | ||
7826 | /* Delete a store, but copy the value that would have been stored into | |
7827 | the reaching_reg for later storing. */ | |
7828 | ||
7829 | static void | |
1d088dee | 7830 | delete_store (struct ls_expr * expr, basic_block bb) |
a13d4ebf AM |
7831 | { |
7832 | rtx reg, i, del; | |
7833 | ||
7834 | if (expr->reaching_reg == NULL_RTX) | |
7835 | expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern)); | |
a13d4ebf | 7836 | |
a13d4ebf | 7837 | reg = expr->reaching_reg; |
589005ff | 7838 | |
a13d4ebf AM |
7839 | for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1)) |
7840 | { | |
7841 | del = XEXP (i, 0); | |
e2d2ed72 | 7842 | if (BLOCK_FOR_INSN (del) == bb) |
a13d4ebf | 7843 | { |
589005ff | 7844 | /* We know there is only one since we deleted redundant |
a13d4ebf | 7845 | ones during the available computation. */ |
d088acea | 7846 | replace_store_insn (reg, del, bb, expr); |
a13d4ebf AM |
7847 | break; |
7848 | } | |
7849 | } | |
7850 | } | |
7851 | ||
7852 | /* Free memory used by store motion. */ | |
7853 | ||
589005ff | 7854 | static void |
1d088dee | 7855 | free_store_memory (void) |
a13d4ebf AM |
7856 | { |
7857 | free_ldst_mems (); | |
589005ff | 7858 | |
a13d4ebf | 7859 | if (ae_gen) |
5a660bff | 7860 | sbitmap_vector_free (ae_gen); |
a13d4ebf | 7861 | if (ae_kill) |
5a660bff | 7862 | sbitmap_vector_free (ae_kill); |
a13d4ebf | 7863 | if (transp) |
5a660bff | 7864 | sbitmap_vector_free (transp); |
a13d4ebf | 7865 | if (st_antloc) |
5a660bff | 7866 | sbitmap_vector_free (st_antloc); |
a13d4ebf | 7867 | if (pre_insert_map) |
5a660bff | 7868 | sbitmap_vector_free (pre_insert_map); |
a13d4ebf | 7869 | if (pre_delete_map) |
5a660bff | 7870 | sbitmap_vector_free (pre_delete_map); |
aaa4ca30 AJ |
7871 | if (reg_set_in_block) |
7872 | sbitmap_vector_free (reg_set_in_block); | |
589005ff | 7873 | |
a13d4ebf AM |
7874 | ae_gen = ae_kill = transp = st_antloc = NULL; |
7875 | pre_insert_map = pre_delete_map = reg_set_in_block = NULL; | |
7876 | } | |
7877 | ||
7878 | /* Perform store motion. Much like gcse, except we move expressions the | |
7879 | other way by looking at the flowgraph in reverse. */ | |
7880 | ||
7881 | static void | |
1d088dee | 7882 | store_motion (void) |
a13d4ebf | 7883 | { |
e0082a72 | 7884 | basic_block bb; |
0b17ab2f | 7885 | int x; |
a13d4ebf | 7886 | struct ls_expr * ptr; |
adfcce61 | 7887 | int update_flow = 0; |
aaa4ca30 | 7888 | |
a13d4ebf AM |
7889 | if (gcse_file) |
7890 | { | |
7891 | fprintf (gcse_file, "before store motion\n"); | |
7892 | print_rtl (gcse_file, get_insns ()); | |
7893 | } | |
7894 | ||
a13d4ebf | 7895 | init_alias_analysis (); |
aaa4ca30 | 7896 | |
47a3dae1 | 7897 | /* Find all the available and anticipatable stores. */ |
a13d4ebf AM |
7898 | num_stores = compute_store_table (); |
7899 | if (num_stores == 0) | |
7900 | { | |
aaa4ca30 | 7901 | sbitmap_vector_free (reg_set_in_block); |
a13d4ebf AM |
7902 | end_alias_analysis (); |
7903 | return; | |
7904 | } | |
7905 | ||
47a3dae1 | 7906 | /* Now compute kill & transp vectors. */ |
a13d4ebf | 7907 | build_store_vectors (); |
47a3dae1 | 7908 | add_noreturn_fake_exit_edges (); |
2a868ea4 | 7909 | connect_infinite_loops_to_exit (); |
a13d4ebf | 7910 | |
589005ff KH |
7911 | edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen, |
7912 | st_antloc, ae_kill, &pre_insert_map, | |
a13d4ebf AM |
7913 | &pre_delete_map); |
7914 | ||
7915 | /* Now we want to insert the new stores which are going to be needed. */ | |
7916 | for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr)) | |
7917 | { | |
e0082a72 ZD |
7918 | FOR_EACH_BB (bb) |
7919 | if (TEST_BIT (pre_delete_map[bb->index], ptr->index)) | |
7920 | delete_store (ptr, bb); | |
a13d4ebf | 7921 | |
0b17ab2f RH |
7922 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
7923 | if (TEST_BIT (pre_insert_map[x], ptr->index)) | |
7924 | update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x)); | |
a13d4ebf AM |
7925 | } |
7926 | ||
7927 | if (update_flow) | |
7928 | commit_edge_insertions (); | |
aaa4ca30 | 7929 | |
a13d4ebf AM |
7930 | free_store_memory (); |
7931 | free_edge_list (edge_list); | |
7932 | remove_fake_edges (); | |
7933 | end_alias_analysis (); | |
7934 | } | |
e2500fed | 7935 | |
a0134312 RS |
7936 | \f |
7937 | /* Entry point for jump bypassing optimization pass. */ | |
7938 | ||
7939 | int | |
1d088dee | 7940 | bypass_jumps (FILE *file) |
a0134312 RS |
7941 | { |
7942 | int changed; | |
7943 | ||
7944 | /* We do not construct an accurate cfg in functions which call | |
7945 | setjmp, so just punt to be safe. */ | |
7946 | if (current_function_calls_setjmp) | |
7947 | return 0; | |
7948 | ||
7949 | /* For calling dump_foo fns from gdb. */ | |
7950 | debug_stderr = stderr; | |
7951 | gcse_file = file; | |
7952 | ||
7953 | /* Identify the basic block information for this function, including | |
7954 | successors and predecessors. */ | |
7955 | max_gcse_regno = max_reg_num (); | |
7956 | ||
7957 | if (file) | |
7958 | dump_flow_info (file); | |
7959 | ||
d128effb NS |
7960 | /* Return if there's nothing to do, or it is too expensive */ |
7961 | if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled"))) | |
a0134312 RS |
7962 | return 0; |
7963 | ||
a0134312 RS |
7964 | gcc_obstack_init (&gcse_obstack); |
7965 | bytes_used = 0; | |
7966 | ||
7967 | /* We need alias. */ | |
7968 | init_alias_analysis (); | |
7969 | ||
7970 | /* Record where pseudo-registers are set. This data is kept accurate | |
7971 | during each pass. ??? We could also record hard-reg information here | |
7972 | [since it's unchanging], however it is currently done during hash table | |
7973 | computation. | |
7974 | ||
7975 | It may be tempting to compute MEM set information here too, but MEM sets | |
7976 | will be subject to code motion one day and thus we need to compute | |
7977 | information about memory sets when we build the hash tables. */ | |
7978 | ||
7979 | alloc_reg_set_mem (max_gcse_regno); | |
7980 | compute_sets (get_insns ()); | |
7981 | ||
7982 | max_gcse_regno = max_reg_num (); | |
7983 | alloc_gcse_mem (get_insns ()); | |
7984 | changed = one_cprop_pass (1, 1, 1); | |
7985 | free_gcse_mem (); | |
7986 | ||
7987 | if (file) | |
7988 | { | |
7989 | fprintf (file, "BYPASS of %s: %d basic blocks, ", | |
7990 | current_function_name, n_basic_blocks); | |
7991 | fprintf (file, "%d bytes\n\n", bytes_used); | |
7992 | } | |
7993 | ||
7994 | obstack_free (&gcse_obstack, NULL); | |
7995 | free_reg_set_mem (); | |
7996 | ||
7997 | /* We are finished with alias. */ | |
7998 | end_alias_analysis (); | |
7999 | allocate_reg_info (max_reg_num (), FALSE, FALSE); | |
8000 | ||
8001 | return changed; | |
8002 | } | |
8003 | ||
d128effb NS |
8004 | /* Return true if the graph is too expensive to optimize. PASS is the |
8005 | optimization about to be performed. */ | |
8006 | ||
8007 | static bool | |
8008 | is_too_expensive (const char *pass) | |
8009 | { | |
8010 | /* Trying to perform global optimizations on flow graphs which have | |
8011 | a high connectivity will take a long time and is unlikely to be | |
8012 | particularly useful. | |
8013 | ||
8014 | In normal circumstances a cfg should have about twice as many | |
8015 | edges as blocks. But we do not want to punish small functions | |
8016 | which have a couple switch statements. Rather than simply | |
8017 | threshold the number of blocks, uses something with a more | |
8018 | graceful degradation. */ | |
8019 | if (n_edges > 20000 + n_basic_blocks * 4) | |
8020 | { | |
8021 | if (warn_disabled_optimization) | |
8022 | warning ("%s: %d basic blocks and %d edges/basic block", | |
8023 | pass, n_basic_blocks, n_edges / n_basic_blocks); | |
8024 | ||
8025 | return true; | |
8026 | } | |
8027 | ||
8028 | /* If allocating memory for the cprop bitmap would take up too much | |
8029 | storage it's better just to disable the optimization. */ | |
8030 | if ((n_basic_blocks | |
8031 | * SBITMAP_SET_SIZE (max_reg_num ()) | |
8032 | * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY) | |
8033 | { | |
8034 | if (warn_disabled_optimization) | |
8035 | warning ("%s: %d basic blocks and %d registers", | |
8036 | pass, n_basic_blocks, max_reg_num ()); | |
8037 | ||
8038 | return true; | |
8039 | } | |
8040 | ||
8041 | return false; | |
8042 | } | |
8043 | ||
e2500fed | 8044 | #include "gt-gcse.h" |