]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
* fold-const.c (fold): Replace "expr" with "t".
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
5e7b4e25 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
26d107db 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
7afe21cc 4
1322177d 5This file is part of GCC.
7afe21cc 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
7afe21cc 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
7afe21cc
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
7afe21cc 21
7afe21cc 22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9c3b4c8b 27
7afe21cc 28#include "rtl.h"
6baf1cc8 29#include "tm_p.h"
7afe21cc
RK
30#include "regs.h"
31#include "hard-reg-set.h"
630c79be 32#include "basic-block.h"
7afe21cc
RK
33#include "flags.h"
34#include "real.h"
35#include "insn-config.h"
36#include "recog.h"
49ad7cfa 37#include "function.h"
956d6950 38#include "expr.h"
50b2596f
KG
39#include "toplev.h"
40#include "output.h"
1497faf6 41#include "ggc.h"
3dec4024 42#include "timevar.h"
26771da7 43#include "except.h"
3c50106f 44#include "target.h"
9bf8cfbf 45#include "params.h"
7afe21cc
RK
46
47/* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
51
52 It is too complicated to keep track of the different possibilities
e48a7fbe
JL
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
57
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
7afe21cc
RK
61
62 We use two data structures to record the equivalent expressions:
1bb98cec
DM
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
7afe21cc
RK
65
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
74
75Registers and "quantity numbers":
278a83b2 76
7afe21cc
RK
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
85
86 All real quantity numbers are greater than or equal to `max_reg'.
87 If register N has not been assigned a quantity, reg_qty[N] will equal N.
88
1bb98cec
DM
89 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
90 entries should be referenced with an index below `max_reg'.
7afe21cc
RK
91
92 We also maintain a bidirectional chain of registers for each
1bb98cec
DM
93 quantity number. The `qty_table` members `first_reg' and `last_reg',
94 and `reg_eqv_table' members `next' and `prev' hold these chains.
7afe21cc
RK
95
96 The first register in a chain is the one whose lifespan is least local.
97 Among equals, it is the one that was seen first.
98 We replace any equivalent register with that one.
99
100 If two registers have the same quantity number, it must be true that
1bb98cec 101 REG expressions with qty_table `mode' must be in the hash table for both
7afe21cc
RK
102 registers and must be in the same class.
103
104 The converse is not true. Since hard registers may be referenced in
105 any mode, two REG expressions might be equivalent in the hash table
106 but not have the same quantity number if the quantity number of one
107 of the registers is not the same mode as those expressions.
278a83b2 108
7afe21cc
RK
109Constants and quantity numbers
110
111 When a quantity has a known constant value, that value is stored
1bb98cec 112 in the appropriate qty_table `const_rtx'. This is in addition to
7afe21cc
RK
113 putting the constant in the hash table as is usual for non-regs.
114
d45cf215 115 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
116 macro CONST_COSTS and will often depend on the constant value. In any
117 event, expressions containing constants can be simplified, by fold_rtx.
118
119 When a quantity has a known nearly constant value (such as an address
1bb98cec
DM
120 of a stack slot), that value is stored in the appropriate qty_table
121 `const_rtx'.
7afe21cc
RK
122
123 Integer constants don't have a machine mode. However, cse
124 determines the intended machine mode from the destination
125 of the instruction that moves the constant. The machine mode
126 is recorded in the hash table along with the actual RTL
127 constant expression so that different modes are kept separate.
128
129Other expressions:
130
131 To record known equivalences among expressions in general
132 we use a hash table called `table'. It has a fixed number of buckets
133 that contain chains of `struct table_elt' elements for expressions.
134 These chains connect the elements whose expressions have the same
135 hash codes.
136
137 Other chains through the same elements connect the elements which
138 currently have equivalent values.
139
140 Register references in an expression are canonicalized before hashing
1bb98cec 141 the expression. This is done using `reg_qty' and qty_table `first_reg'.
7afe21cc
RK
142 The hash code of a register reference is computed using the quantity
143 number, not the register number.
144
145 When the value of an expression changes, it is necessary to remove from the
146 hash table not just that expression but all expressions whose values
147 could be different as a result.
148
149 1. If the value changing is in memory, except in special cases
150 ANYTHING referring to memory could be changed. That is because
151 nobody knows where a pointer does not point.
152 The function `invalidate_memory' removes what is necessary.
153
154 The special cases are when the address is constant or is
155 a constant plus a fixed register such as the frame pointer
156 or a static chain pointer. When such addresses are stored in,
157 we can tell exactly which other such addresses must be invalidated
158 due to overlap. `invalidate' does this.
159 All expressions that refer to non-constant
160 memory addresses are also invalidated. `invalidate_memory' does this.
161
162 2. If the value changing is a register, all expressions
163 containing references to that register, and only those,
164 must be removed.
165
166 Because searching the entire hash table for expressions that contain
167 a register is very slow, we try to figure out when it isn't necessary.
168 Precisely, this is necessary only when expressions have been
169 entered in the hash table using this register, and then the value has
170 changed, and then another expression wants to be added to refer to
171 the register's new value. This sequence of circumstances is rare
172 within any one basic block.
173
174 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
175 reg_tick[i] is incremented whenever a value is stored in register i.
176 reg_in_table[i] holds -1 if no references to register i have been
177 entered in the table; otherwise, it contains the value reg_tick[i] had
178 when the references were entered. If we want to enter a reference
179 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
180 Until we want to enter a new entry, the mere fact that the two vectors
181 don't match makes the entries be ignored if anyone tries to match them.
182
183 Registers themselves are entered in the hash table as well as in
184 the equivalent-register chains. However, the vectors `reg_tick'
185 and `reg_in_table' do not apply to expressions which are simple
186 register references. These expressions are removed from the table
187 immediately when they become invalid, and this can be done even if
188 we do not immediately search for all the expressions that refer to
189 the register.
190
191 A CLOBBER rtx in an instruction invalidates its operand for further
192 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
193 invalidates everything that resides in memory.
194
195Related expressions:
196
197 Constant expressions that differ only by an additive integer
198 are called related. When a constant expression is put in
199 the table, the related expression with no constant term
200 is also entered. These are made to point at each other
201 so that it is possible to find out if there exists any
202 register equivalent to an expression related to a given expression. */
278a83b2 203
7afe21cc
RK
204/* One plus largest register number used in this function. */
205
206static int max_reg;
207
556c714b
JW
208/* One plus largest instruction UID used in this function at time of
209 cse_main call. */
210
211static int max_insn_uid;
212
1bb98cec
DM
213/* Length of qty_table vector. We know in advance we will not need
214 a quantity number this big. */
7afe21cc
RK
215
216static int max_qty;
217
218/* Next quantity number to be allocated.
219 This is 1 + the largest number needed so far. */
220
221static int next_qty;
222
1bb98cec 223/* Per-qty information tracking.
7afe21cc 224
1bb98cec
DM
225 `first_reg' and `last_reg' track the head and tail of the
226 chain of registers which currently contain this quantity.
7afe21cc 227
1bb98cec 228 `mode' contains the machine mode of this quantity.
7afe21cc 229
1bb98cec
DM
230 `const_rtx' holds the rtx of the constant value of this
231 quantity, if known. A summations of the frame/arg pointer
232 and a constant can also be entered here. When this holds
233 a known value, `const_insn' is the insn which stored the
234 constant value.
7afe21cc 235
1bb98cec
DM
236 `comparison_{code,const,qty}' are used to track when a
237 comparison between a quantity and some constant or register has
238 been passed. In such a case, we know the results of the comparison
239 in case we see it again. These members record a comparison that
240 is known to be true. `comparison_code' holds the rtx code of such
241 a comparison, else it is set to UNKNOWN and the other two
242 comparison members are undefined. `comparison_const' holds
243 the constant being compared against, or zero if the comparison
244 is not against a constant. `comparison_qty' holds the quantity
245 being compared against when the result is known. If the comparison
246 is not with a register, `comparison_qty' is -1. */
7afe21cc 247
1bb98cec
DM
248struct qty_table_elem
249{
250 rtx const_rtx;
251 rtx const_insn;
252 rtx comparison_const;
253 int comparison_qty;
770ae6cc 254 unsigned int first_reg, last_reg;
496324d0
DN
255 /* The sizes of these fields should match the sizes of the
256 code and mode fields of struct rtx_def (see rtl.h). */
257 ENUM_BITFIELD(rtx_code) comparison_code : 16;
258 ENUM_BITFIELD(machine_mode) mode : 8;
1bb98cec 259};
7afe21cc 260
1bb98cec
DM
261/* The table of all qtys, indexed by qty number. */
262static struct qty_table_elem *qty_table;
7afe21cc
RK
263
264#ifdef HAVE_cc0
265/* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
268
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
273
274static rtx prev_insn_cc0;
275static enum machine_mode prev_insn_cc0_mode;
7afe21cc
RK
276
277/* Previous actual insn. 0 if at first insn of basic block. */
278
279static rtx prev_insn;
4977bab6 280#endif
7afe21cc
RK
281
282/* Insn being scanned. */
283
284static rtx this_insn;
285
71d306d1
DE
286/* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
7afe21cc
RK
288 value.
289
290 Or -1 if this register is at the end of the chain.
291
1bb98cec
DM
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
293
294/* Per-register equivalence chain. */
295struct reg_eqv_elem
296{
297 int next, prev;
298};
7afe21cc 299
1bb98cec
DM
300/* The table of all register equivalence chains. */
301static struct reg_eqv_elem *reg_eqv_table;
7afe21cc 302
14a774a9
RK
303struct cse_reg_info
304{
9b1549b8
DM
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
c1edba58
VM
307
308 /* The next cse_reg_info structure in the free or used list. */
14a774a9 309 struct cse_reg_info *next;
30f72379 310
9b1549b8 311 /* Search key */
770ae6cc 312 unsigned int regno;
9b1549b8
DM
313
314 /* The quantity number of the register's current contents. */
315 int reg_qty;
316
317 /* The number of times the register has been altered in the current
318 basic block. */
319 int reg_tick;
320
30f72379
MM
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
324 invalid. */
325 int reg_in_table;
46081bb3
SH
326
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
5dd78e9a 329 unsigned int subreg_ticked;
30f72379 330};
7afe21cc 331
30f72379
MM
332/* A free list of cse_reg_info entries. */
333static struct cse_reg_info *cse_reg_info_free_list;
7afe21cc 334
c1edba58
VM
335/* A used list of cse_reg_info entries. */
336static struct cse_reg_info *cse_reg_info_used_list;
337static struct cse_reg_info *cse_reg_info_used_list_end;
338
30f72379 339/* A mapping from registers to cse_reg_info data structures. */
9b1549b8
DM
340#define REGHASH_SHIFT 7
341#define REGHASH_SIZE (1 << REGHASH_SHIFT)
342#define REGHASH_MASK (REGHASH_SIZE - 1)
343static struct cse_reg_info *reg_hash[REGHASH_SIZE];
344
345#define REGHASH_FN(REGNO) \
346 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
7afe21cc 347
30f72379
MM
348/* The last lookup we did into the cse_reg_info_tree. This allows us
349 to cache repeated lookups. */
770ae6cc 350static unsigned int cached_regno;
30f72379 351static struct cse_reg_info *cached_cse_reg_info;
7afe21cc 352
278a83b2 353/* A HARD_REG_SET containing all the hard registers for which there is
7afe21cc
RK
354 currently a REG expression in the hash table. Note the difference
355 from the above variables, which indicate if the REG is mentioned in some
356 expression in the table. */
357
358static HARD_REG_SET hard_regs_in_table;
359
7afe21cc
RK
360/* CUID of insn that starts the basic block currently being cse-processed. */
361
362static int cse_basic_block_start;
363
364/* CUID of insn that ends the basic block currently being cse-processed. */
365
366static int cse_basic_block_end;
367
368/* Vector mapping INSN_UIDs to cuids.
d45cf215 369 The cuids are like uids but increase monotonically always.
7afe21cc
RK
370 We use them to see whether a reg is used outside a given basic block. */
371
906c4e36 372static int *uid_cuid;
7afe21cc 373
164c8956
RK
374/* Highest UID in UID_CUID. */
375static int max_uid;
376
7afe21cc
RK
377/* Get the cuid of an insn. */
378
379#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
380
4eadede7
ZW
381/* Nonzero if this pass has made changes, and therefore it's
382 worthwhile to run the garbage collector. */
383
384static int cse_altered;
385
7afe21cc
RK
386/* Nonzero if cse has altered conditional jump insns
387 in such a way that jump optimization should be redone. */
388
389static int cse_jumps_altered;
390
f85cc4cb
RK
391/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392 REG_LABEL, we have to rerun jump after CSE to put in the note. */
a5dfb4ee
RK
393static int recorded_label_ref;
394
7afe21cc
RK
395/* canon_hash stores 1 in do_not_record
396 if it notices a reference to CC0, PC, or some other volatile
397 subexpression. */
398
399static int do_not_record;
400
7bac1be0
RK
401#ifdef LOAD_EXTEND_OP
402
403/* Scratch rtl used when looking for load-extended copy of a MEM. */
404static rtx memory_extend_rtx;
405#endif
406
7afe21cc
RK
407/* canon_hash stores 1 in hash_arg_in_memory
408 if it notices a reference to memory within the expression being hashed. */
409
410static int hash_arg_in_memory;
411
7afe21cc
RK
412/* The hash table contains buckets which are chains of `struct table_elt's,
413 each recording one expression's information.
414 That expression is in the `exp' field.
415
db048faf
MM
416 The canon_exp field contains a canonical (from the point of view of
417 alias analysis) version of the `exp' field.
418
7afe21cc
RK
419 Those elements with the same hash code are chained in both directions
420 through the `next_same_hash' and `prev_same_hash' fields.
421
422 Each set of expressions with equivalent values
423 are on a two-way chain through the `next_same_value'
424 and `prev_same_value' fields, and all point with
425 the `first_same_value' field at the first element in
426 that chain. The chain is in order of increasing cost.
427 Each element's cost value is in its `cost' field.
428
429 The `in_memory' field is nonzero for elements that
430 involve any reference to memory. These elements are removed
431 whenever a write is done to an unidentified location in memory.
432 To be safe, we assume that a memory address is unidentified unless
433 the address is either a symbol constant or a constant plus
434 the frame pointer or argument pointer.
435
7afe21cc
RK
436 The `related_value' field is used to connect related expressions
437 (that differ by adding an integer).
438 The related expressions are chained in a circular fashion.
439 `related_value' is zero for expressions for which this
440 chain is not useful.
441
442 The `cost' field stores the cost of this element's expression.
630c79be
BS
443 The `regcost' field stores the value returned by approx_reg_cost for
444 this element's expression.
7afe21cc
RK
445
446 The `is_const' flag is set if the element is a constant (including
447 a fixed address).
448
449 The `flag' field is used as a temporary during some search routines.
450
451 The `mode' field is usually the same as GET_MODE (`exp'), but
452 if `exp' is a CONST_INT and has no machine mode then the `mode'
453 field is the mode it was being used as. Each constant is
454 recorded separately for each mode it is used with. */
455
7afe21cc
RK
456struct table_elt
457{
458 rtx exp;
db048faf 459 rtx canon_exp;
7afe21cc
RK
460 struct table_elt *next_same_hash;
461 struct table_elt *prev_same_hash;
462 struct table_elt *next_same_value;
463 struct table_elt *prev_same_value;
464 struct table_elt *first_same_value;
465 struct table_elt *related_value;
466 int cost;
630c79be 467 int regcost;
496324d0
DN
468 /* The size of this field should match the size
469 of the mode field of struct rtx_def (see rtl.h). */
470 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc 471 char in_memory;
7afe21cc
RK
472 char is_const;
473 char flag;
474};
475
7afe21cc
RK
476/* We don't want a lot of buckets, because we rarely have very many
477 things stored in the hash table, and a lot of buckets slows
478 down a lot of loops that happen frequently. */
9b1549b8
DM
479#define HASH_SHIFT 5
480#define HASH_SIZE (1 << HASH_SHIFT)
481#define HASH_MASK (HASH_SIZE - 1)
7afe21cc
RK
482
483/* Compute hash code of X in mode M. Special-case case where X is a pseudo
484 register (hard registers may require `do_not_record' to be set). */
485
486#define HASH(X, M) \
9b1549b8
DM
487 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
488 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
489 : canon_hash (X, M)) & HASH_MASK)
7afe21cc 490
630c79be
BS
491/* Determine whether register number N is considered a fixed register for the
492 purpose of approximating register costs.
7afe21cc
RK
493 It is desirable to replace other regs with fixed regs, to reduce need for
494 non-fixed hard regs.
553687c9 495 A reg wins if it is either the frame pointer or designated as fixed. */
7afe21cc 496#define FIXED_REGNO_P(N) \
8bc169f2 497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 498 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
499
500/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
501 hard registers and pointers into the frame are the cheapest with a cost
502 of 0. Next come pseudos with a cost of one and other hard registers with
503 a cost of 2. Aside from these special cases, call `rtx_cost'. */
504
6ab832bc 505#define CHEAP_REGNO(N) \
7080f735
AJ
506 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
507 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
508 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
8bc169f2 509 || ((N) < FIRST_PSEUDO_REGISTER \
e7bb59fa 510 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 511
f2fa288f
RH
512#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
513#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
7afe21cc 514
30f72379
MM
515/* Get the info associated with register N. */
516
7080f735 517#define GET_CSE_REG_INFO(N) \
30f72379
MM
518 (((N) == cached_regno && cached_cse_reg_info) \
519 ? cached_cse_reg_info : get_cse_reg_info ((N)))
520
521/* Get the number of times this register has been updated in this
522 basic block. */
523
c1edba58 524#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
30f72379
MM
525
526/* Get the point at which REG was recorded in the table. */
527
528#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
529
46081bb3
SH
530/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
531 SUBREG). */
532
533#define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
534
30f72379
MM
535/* Get the quantity number for REG. */
536
537#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
538
7afe21cc 539/* Determine if the quantity number for register X represents a valid index
1bb98cec 540 into the qty_table. */
7afe21cc 541
770ae6cc 542#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
7afe21cc 543
9b1549b8 544static struct table_elt *table[HASH_SIZE];
7afe21cc
RK
545
546/* Chain of `struct table_elt's made so far for this function
547 but currently removed from the table. */
548
549static struct table_elt *free_element_chain;
550
551/* Number of `struct table_elt' structures made so far for this function. */
552
553static int n_elements_made;
554
555/* Maximum value `n_elements_made' has had so far in this compilation
556 for functions previously processed. */
557
558static int max_elements_made;
559
278a83b2 560/* Surviving equivalence class when two equivalence classes are merged
7afe21cc
RK
561 by recording the effects of a jump in the last insn. Zero if the
562 last insn was not a conditional jump. */
563
564static struct table_elt *last_jump_equiv_class;
565
566/* Set to the cost of a constant pool reference if one was found for a
567 symbolic constant. If this was found, it means we should try to
568 convert constants into constant pool entries if they don't fit in
569 the insn. */
570
571static int constant_pool_entries_cost;
dd0ba281 572static int constant_pool_entries_regcost;
7afe21cc 573
6cd4575e
RK
574/* This data describes a block that will be processed by cse_basic_block. */
575
14a774a9
RK
576struct cse_basic_block_data
577{
6cd4575e
RK
578 /* Lowest CUID value of insns in block. */
579 int low_cuid;
580 /* Highest CUID value of insns in block. */
581 int high_cuid;
582 /* Total number of SETs in block. */
583 int nsets;
584 /* Last insn in the block. */
585 rtx last;
586 /* Size of current branch path, if any. */
587 int path_size;
588 /* Current branch path, indicating which branches will be taken. */
14a774a9
RK
589 struct branch_path
590 {
591 /* The branch insn. */
592 rtx branch;
593 /* Whether it should be taken or not. AROUND is the same as taken
594 except that it is used when the destination label is not preceded
6cd4575e 595 by a BARRIER. */
14a774a9 596 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
9bf8cfbf 597 } *path;
6cd4575e
RK
598};
599
7080f735
AJ
600static bool fixed_base_plus_p (rtx x);
601static int notreg_cost (rtx, enum rtx_code);
602static int approx_reg_cost_1 (rtx *, void *);
603static int approx_reg_cost (rtx);
56ae04af 604static int preferable (int, int, int, int);
7080f735
AJ
605static void new_basic_block (void);
606static void make_new_qty (unsigned int, enum machine_mode);
607static void make_regs_eqv (unsigned int, unsigned int);
608static void delete_reg_equiv (unsigned int);
609static int mention_regs (rtx);
610static int insert_regs (rtx, struct table_elt *, int);
611static void remove_from_table (struct table_elt *, unsigned);
612static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
613static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
614static rtx lookup_as_function (rtx, enum rtx_code);
615static struct table_elt *insert (rtx, struct table_elt *, unsigned,
616 enum machine_mode);
617static void merge_equiv_classes (struct table_elt *, struct table_elt *);
618static void invalidate (rtx, enum machine_mode);
619static int cse_rtx_varies_p (rtx, int);
620static void remove_invalid_refs (unsigned int);
621static void remove_invalid_subreg_refs (unsigned int, unsigned int,
622 enum machine_mode);
623static void rehash_using_reg (rtx);
624static void invalidate_memory (void);
625static void invalidate_for_call (void);
626static rtx use_related_value (rtx, struct table_elt *);
627static unsigned canon_hash (rtx, enum machine_mode);
628static unsigned canon_hash_string (const char *);
629static unsigned safe_hash (rtx, enum machine_mode);
630static int exp_equiv_p (rtx, rtx, int, int);
631static rtx canon_reg (rtx, rtx);
632static void find_best_addr (rtx, rtx *, enum machine_mode);
633static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
634 enum machine_mode *,
635 enum machine_mode *);
636static rtx fold_rtx (rtx, rtx);
637static rtx equiv_constant (rtx);
638static void record_jump_equiv (rtx, int);
639static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
640 int);
641static void cse_insn (rtx, rtx);
86caf04d
PB
642static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
643 int, int, int);
7080f735
AJ
644static int addr_affects_sp_p (rtx);
645static void invalidate_from_clobbers (rtx);
646static rtx cse_process_notes (rtx, rtx);
647static void cse_around_loop (rtx);
648static void invalidate_skipped_set (rtx, rtx, void *);
649static void invalidate_skipped_block (rtx);
650static void cse_check_loop_start (rtx, rtx, void *);
651static void cse_set_around_loop (rtx, rtx, rtx);
652static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
9ab81df2 653static void count_reg_usage (rtx, int *, int);
7080f735
AJ
654static int check_for_label_ref (rtx *, void *);
655extern void dump_class (struct table_elt*);
656static struct cse_reg_info * get_cse_reg_info (unsigned int);
657static int check_dependence (rtx *, void *);
658
659static void flush_hash_table (void);
660static bool insn_live_p (rtx, int *);
661static bool set_live_p (rtx, rtx, int *);
662static bool dead_libcall_p (rtx, int *);
e129d93a
ILT
663static int cse_change_cc_mode (rtx *, void *);
664static void cse_change_cc_mode_insns (rtx, rtx, rtx);
665static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
7afe21cc 666\f
4977bab6
ZW
667/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
668 virtual regs here because the simplify_*_operation routines are called
669 by integrate.c, which is called before virtual register instantiation. */
670
671static bool
7080f735 672fixed_base_plus_p (rtx x)
4977bab6
ZW
673{
674 switch (GET_CODE (x))
675 {
676 case REG:
677 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
678 return true;
679 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
680 return true;
681 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
682 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
683 return true;
684 return false;
685
686 case PLUS:
687 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
688 return false;
689 return fixed_base_plus_p (XEXP (x, 0));
690
691 case ADDRESSOF:
692 return true;
693
694 default:
695 return false;
696 }
697}
698
a4c6502a
MM
699/* Dump the expressions in the equivalence class indicated by CLASSP.
700 This function is used only for debugging. */
a0153051 701void
7080f735 702dump_class (struct table_elt *classp)
a4c6502a
MM
703{
704 struct table_elt *elt;
705
706 fprintf (stderr, "Equivalence chain for ");
707 print_rtl (stderr, classp->exp);
708 fprintf (stderr, ": \n");
278a83b2 709
a4c6502a
MM
710 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
711 {
712 print_rtl (stderr, elt->exp);
713 fprintf (stderr, "\n");
714 }
715}
716
630c79be 717/* Subroutine of approx_reg_cost; called through for_each_rtx. */
be8ac49a 718
630c79be 719static int
7080f735 720approx_reg_cost_1 (rtx *xp, void *data)
630c79be
BS
721{
722 rtx x = *xp;
c863f8c2 723 int *cost_p = data;
630c79be
BS
724
725 if (x && GET_CODE (x) == REG)
c863f8c2
DM
726 {
727 unsigned int regno = REGNO (x);
728
729 if (! CHEAP_REGNO (regno))
730 {
731 if (regno < FIRST_PSEUDO_REGISTER)
732 {
733 if (SMALL_REGISTER_CLASSES)
734 return 1;
735 *cost_p += 2;
736 }
737 else
738 *cost_p += 1;
739 }
740 }
741
630c79be
BS
742 return 0;
743}
744
745/* Return an estimate of the cost of the registers used in an rtx.
746 This is mostly the number of different REG expressions in the rtx;
a1f300c0 747 however for some exceptions like fixed registers we use a cost of
f1c1dfc3 748 0. If any other hard register reference occurs, return MAX_COST. */
630c79be
BS
749
750static int
7080f735 751approx_reg_cost (rtx x)
630c79be 752{
630c79be 753 int cost = 0;
f1c1dfc3 754
c863f8c2
DM
755 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
756 return MAX_COST;
630c79be 757
c863f8c2 758 return cost;
630c79be
BS
759}
760
761/* Return a negative value if an rtx A, whose costs are given by COST_A
762 and REGCOST_A, is more desirable than an rtx B.
763 Return a positive value if A is less desirable, or 0 if the two are
764 equally good. */
765static int
56ae04af 766preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
630c79be 767{
423adbb9 768 /* First, get rid of cases involving expressions that are entirely
f1c1dfc3
BS
769 unwanted. */
770 if (cost_a != cost_b)
771 {
772 if (cost_a == MAX_COST)
773 return 1;
774 if (cost_b == MAX_COST)
775 return -1;
776 }
777
778 /* Avoid extending lifetimes of hardregs. */
779 if (regcost_a != regcost_b)
780 {
781 if (regcost_a == MAX_COST)
782 return 1;
783 if (regcost_b == MAX_COST)
784 return -1;
785 }
786
787 /* Normal operation costs take precedence. */
630c79be
BS
788 if (cost_a != cost_b)
789 return cost_a - cost_b;
f1c1dfc3 790 /* Only if these are identical consider effects on register pressure. */
630c79be
BS
791 if (regcost_a != regcost_b)
792 return regcost_a - regcost_b;
793 return 0;
794}
795
954a5693
RK
796/* Internal function, to compute cost when X is not a register; called
797 from COST macro to keep it simple. */
798
799static int
7080f735 800notreg_cost (rtx x, enum rtx_code outer)
954a5693
RK
801{
802 return ((GET_CODE (x) == SUBREG
803 && GET_CODE (SUBREG_REG (x)) == REG
804 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
805 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
806 && (GET_MODE_SIZE (GET_MODE (x))
807 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
808 && subreg_lowpart_p (x)
809 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
810 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
630c79be 811 ? 0
f2fa288f 812 : rtx_cost (x, outer) * 2);
954a5693
RK
813}
814
01329426 815\f
30f72379 816static struct cse_reg_info *
7080f735 817get_cse_reg_info (unsigned int regno)
30f72379 818{
9b1549b8
DM
819 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
820 struct cse_reg_info *p;
821
278a83b2 822 for (p = *hash_head; p != NULL; p = p->hash_next)
9b1549b8
DM
823 if (p->regno == regno)
824 break;
825
826 if (p == NULL)
30f72379
MM
827 {
828 /* Get a new cse_reg_info structure. */
9b1549b8 829 if (cse_reg_info_free_list)
30f72379 830 {
9b1549b8
DM
831 p = cse_reg_info_free_list;
832 cse_reg_info_free_list = p->next;
30f72379
MM
833 }
834 else
703ad42b 835 p = xmalloc (sizeof (struct cse_reg_info));
9b1549b8
DM
836
837 /* Insert into hash table. */
838 p->hash_next = *hash_head;
839 *hash_head = p;
30f72379
MM
840
841 /* Initialize it. */
9b1549b8
DM
842 p->reg_tick = 1;
843 p->reg_in_table = -1;
46081bb3 844 p->subreg_ticked = -1;
9b1549b8
DM
845 p->reg_qty = regno;
846 p->regno = regno;
847 p->next = cse_reg_info_used_list;
848 cse_reg_info_used_list = p;
c1edba58 849 if (!cse_reg_info_used_list_end)
9b1549b8 850 cse_reg_info_used_list_end = p;
30f72379
MM
851 }
852
853 /* Cache this lookup; we tend to be looking up information about the
854 same register several times in a row. */
855 cached_regno = regno;
9b1549b8 856 cached_cse_reg_info = p;
30f72379 857
9b1549b8 858 return p;
30f72379
MM
859}
860
7afe21cc
RK
861/* Clear the hash table and initialize each register with its own quantity,
862 for a new basic block. */
863
864static void
7080f735 865new_basic_block (void)
7afe21cc 866{
b3694847 867 int i;
7afe21cc
RK
868
869 next_qty = max_reg;
870
9b1549b8
DM
871 /* Clear out hash table state for this pass. */
872
703ad42b 873 memset (reg_hash, 0, sizeof reg_hash);
9b1549b8
DM
874
875 if (cse_reg_info_used_list)
30f72379 876 {
9b1549b8
DM
877 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
878 cse_reg_info_free_list = cse_reg_info_used_list;
879 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
30f72379 880 }
9b1549b8 881 cached_cse_reg_info = 0;
7afe21cc 882
7afe21cc
RK
883 CLEAR_HARD_REG_SET (hard_regs_in_table);
884
885 /* The per-quantity values used to be initialized here, but it is
886 much faster to initialize each as it is made in `make_new_qty'. */
887
9b1549b8 888 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 889 {
9b1549b8
DM
890 struct table_elt *first;
891
892 first = table[i];
893 if (first != NULL)
7afe21cc 894 {
9b1549b8
DM
895 struct table_elt *last = first;
896
897 table[i] = NULL;
898
899 while (last->next_same_hash != NULL)
900 last = last->next_same_hash;
901
902 /* Now relink this hash entire chain into
903 the free element list. */
904
905 last->next_same_hash = free_element_chain;
906 free_element_chain = first;
7afe21cc
RK
907 }
908 }
909
7afe21cc 910#ifdef HAVE_cc0
4977bab6 911 prev_insn = 0;
7afe21cc
RK
912 prev_insn_cc0 = 0;
913#endif
914}
915
1bb98cec
DM
916/* Say that register REG contains a quantity in mode MODE not in any
917 register before and initialize that quantity. */
7afe21cc
RK
918
919static void
7080f735 920make_new_qty (unsigned int reg, enum machine_mode mode)
7afe21cc 921{
b3694847
SS
922 int q;
923 struct qty_table_elem *ent;
924 struct reg_eqv_elem *eqv;
7afe21cc
RK
925
926 if (next_qty >= max_qty)
927 abort ();
928
30f72379 929 q = REG_QTY (reg) = next_qty++;
1bb98cec
DM
930 ent = &qty_table[q];
931 ent->first_reg = reg;
932 ent->last_reg = reg;
933 ent->mode = mode;
934 ent->const_rtx = ent->const_insn = NULL_RTX;
935 ent->comparison_code = UNKNOWN;
936
937 eqv = &reg_eqv_table[reg];
938 eqv->next = eqv->prev = -1;
7afe21cc
RK
939}
940
941/* Make reg NEW equivalent to reg OLD.
942 OLD is not changing; NEW is. */
943
944static void
7080f735 945make_regs_eqv (unsigned int new, unsigned int old)
7afe21cc 946{
770ae6cc
RK
947 unsigned int lastr, firstr;
948 int q = REG_QTY (old);
949 struct qty_table_elem *ent;
1bb98cec
DM
950
951 ent = &qty_table[q];
7afe21cc
RK
952
953 /* Nothing should become eqv until it has a "non-invalid" qty number. */
954 if (! REGNO_QTY_VALID_P (old))
955 abort ();
956
30f72379 957 REG_QTY (new) = q;
1bb98cec
DM
958 firstr = ent->first_reg;
959 lastr = ent->last_reg;
7afe21cc
RK
960
961 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
962 hard regs. Among pseudos, if NEW will live longer than any other reg
963 of the same qty, and that is beyond the current basic block,
964 make it the new canonical replacement for this qty. */
965 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
966 /* Certain fixed registers might be of the class NO_REGS. This means
967 that not only can they not be allocated by the compiler, but
830a38ee 968 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
969 either. */
970 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
971 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
972 || (new >= FIRST_PSEUDO_REGISTER
973 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
974 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
975 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 976 < cse_basic_block_start))
b1f21e0a
MM
977 && (uid_cuid[REGNO_LAST_UID (new)]
978 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc 979 {
1bb98cec
DM
980 reg_eqv_table[firstr].prev = new;
981 reg_eqv_table[new].next = firstr;
982 reg_eqv_table[new].prev = -1;
983 ent->first_reg = new;
7afe21cc
RK
984 }
985 else
986 {
987 /* If NEW is a hard reg (known to be non-fixed), insert at end.
988 Otherwise, insert before any non-fixed hard regs that are at the
989 end. Registers of class NO_REGS cannot be used as an
990 equivalent for anything. */
1bb98cec 991 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
7afe21cc
RK
992 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
993 && new >= FIRST_PSEUDO_REGISTER)
1bb98cec
DM
994 lastr = reg_eqv_table[lastr].prev;
995 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
996 if (reg_eqv_table[lastr].next >= 0)
997 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
7afe21cc 998 else
1bb98cec
DM
999 qty_table[q].last_reg = new;
1000 reg_eqv_table[lastr].next = new;
1001 reg_eqv_table[new].prev = lastr;
7afe21cc
RK
1002 }
1003}
1004
1005/* Remove REG from its equivalence class. */
1006
1007static void
7080f735 1008delete_reg_equiv (unsigned int reg)
7afe21cc 1009{
b3694847
SS
1010 struct qty_table_elem *ent;
1011 int q = REG_QTY (reg);
1012 int p, n;
7afe21cc 1013
a4e262bc 1014 /* If invalid, do nothing. */
770ae6cc 1015 if (q == (int) reg)
7afe21cc
RK
1016 return;
1017
1bb98cec
DM
1018 ent = &qty_table[q];
1019
1020 p = reg_eqv_table[reg].prev;
1021 n = reg_eqv_table[reg].next;
a4e262bc 1022
7afe21cc 1023 if (n != -1)
1bb98cec 1024 reg_eqv_table[n].prev = p;
7afe21cc 1025 else
1bb98cec 1026 ent->last_reg = p;
7afe21cc 1027 if (p != -1)
1bb98cec 1028 reg_eqv_table[p].next = n;
7afe21cc 1029 else
1bb98cec 1030 ent->first_reg = n;
7afe21cc 1031
30f72379 1032 REG_QTY (reg) = reg;
7afe21cc
RK
1033}
1034
1035/* Remove any invalid expressions from the hash table
1036 that refer to any of the registers contained in expression X.
1037
1038 Make sure that newly inserted references to those registers
1039 as subexpressions will be considered valid.
1040
1041 mention_regs is not called when a register itself
1042 is being stored in the table.
1043
1044 Return 1 if we have done something that may have changed the hash code
1045 of X. */
1046
1047static int
7080f735 1048mention_regs (rtx x)
7afe21cc 1049{
b3694847
SS
1050 enum rtx_code code;
1051 int i, j;
1052 const char *fmt;
1053 int changed = 0;
7afe21cc
RK
1054
1055 if (x == 0)
e5f6a288 1056 return 0;
7afe21cc
RK
1057
1058 code = GET_CODE (x);
1059 if (code == REG)
1060 {
770ae6cc
RK
1061 unsigned int regno = REGNO (x);
1062 unsigned int endregno
7afe21cc 1063 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 1064 : hard_regno_nregs[regno][GET_MODE (x)]);
770ae6cc 1065 unsigned int i;
7afe21cc
RK
1066
1067 for (i = regno; i < endregno; i++)
1068 {
30f72379 1069 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1070 remove_invalid_refs (i);
1071
30f72379 1072 REG_IN_TABLE (i) = REG_TICK (i);
46081bb3 1073 SUBREG_TICKED (i) = -1;
7afe21cc
RK
1074 }
1075
1076 return 0;
1077 }
1078
34c73909
R
1079 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1080 pseudo if they don't use overlapping words. We handle only pseudos
1081 here for simplicity. */
1082 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1083 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1084 {
770ae6cc 1085 unsigned int i = REGNO (SUBREG_REG (x));
34c73909 1086
30f72379 1087 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909 1088 {
46081bb3
SH
1089 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1090 the last store to this register really stored into this
1091 subreg, then remove the memory of this subreg.
1092 Otherwise, remove any memory of the entire register and
1093 all its subregs from the table. */
1094 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
5dd78e9a 1095 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
34c73909
R
1096 remove_invalid_refs (i);
1097 else
ddef6bc7 1098 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
34c73909
R
1099 }
1100
30f72379 1101 REG_IN_TABLE (i) = REG_TICK (i);
5dd78e9a 1102 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
34c73909
R
1103 return 0;
1104 }
1105
7afe21cc
RK
1106 /* If X is a comparison or a COMPARE and either operand is a register
1107 that does not have a quantity, give it one. This is so that a later
1108 call to record_jump_equiv won't cause X to be assigned a different
1109 hash code and not found in the table after that call.
1110
1111 It is not necessary to do this here, since rehash_using_reg can
1112 fix up the table later, but doing this here eliminates the need to
1113 call that expensive function in the most common case where the only
1114 use of the register is in the comparison. */
1115
ec8e098d 1116 if (code == COMPARE || COMPARISON_P (x))
7afe21cc
RK
1117 {
1118 if (GET_CODE (XEXP (x, 0)) == REG
1119 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
9714cf43 1120 if (insert_regs (XEXP (x, 0), NULL, 0))
7afe21cc
RK
1121 {
1122 rehash_using_reg (XEXP (x, 0));
1123 changed = 1;
1124 }
1125
1126 if (GET_CODE (XEXP (x, 1)) == REG
1127 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
9714cf43 1128 if (insert_regs (XEXP (x, 1), NULL, 0))
7afe21cc
RK
1129 {
1130 rehash_using_reg (XEXP (x, 1));
1131 changed = 1;
1132 }
1133 }
1134
1135 fmt = GET_RTX_FORMAT (code);
1136 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1137 if (fmt[i] == 'e')
1138 changed |= mention_regs (XEXP (x, i));
1139 else if (fmt[i] == 'E')
1140 for (j = 0; j < XVECLEN (x, i); j++)
1141 changed |= mention_regs (XVECEXP (x, i, j));
1142
1143 return changed;
1144}
1145
1146/* Update the register quantities for inserting X into the hash table
1147 with a value equivalent to CLASSP.
1148 (If the class does not contain a REG, it is irrelevant.)
1149 If MODIFIED is nonzero, X is a destination; it is being modified.
1150 Note that delete_reg_equiv should be called on a register
1151 before insert_regs is done on that register with MODIFIED != 0.
1152
1153 Nonzero value means that elements of reg_qty have changed
1154 so X's hash code may be different. */
1155
1156static int
7080f735 1157insert_regs (rtx x, struct table_elt *classp, int modified)
7afe21cc
RK
1158{
1159 if (GET_CODE (x) == REG)
1160 {
770ae6cc
RK
1161 unsigned int regno = REGNO (x);
1162 int qty_valid;
7afe21cc 1163
1ff0c00d
RK
1164 /* If REGNO is in the equivalence table already but is of the
1165 wrong mode for that equivalence, don't do anything here. */
1166
1bb98cec
DM
1167 qty_valid = REGNO_QTY_VALID_P (regno);
1168 if (qty_valid)
1169 {
1170 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1ff0c00d 1171
1bb98cec
DM
1172 if (ent->mode != GET_MODE (x))
1173 return 0;
1174 }
1175
1176 if (modified || ! qty_valid)
7afe21cc
RK
1177 {
1178 if (classp)
1179 for (classp = classp->first_same_value;
1180 classp != 0;
1181 classp = classp->next_same_value)
1182 if (GET_CODE (classp->exp) == REG
1183 && GET_MODE (classp->exp) == GET_MODE (x))
1184 {
1185 make_regs_eqv (regno, REGNO (classp->exp));
1186 return 1;
1187 }
1188
d9f20424
R
1189 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1190 than REG_IN_TABLE to find out if there was only a single preceding
1191 invalidation - for the SUBREG - or another one, which would be
1192 for the full register. However, if we find here that REG_TICK
1193 indicates that the register is invalid, it means that it has
1194 been invalidated in a separate operation. The SUBREG might be used
1195 now (then this is a recursive call), or we might use the full REG
1196 now and a SUBREG of it later. So bump up REG_TICK so that
1197 mention_regs will do the right thing. */
1198 if (! modified
1199 && REG_IN_TABLE (regno) >= 0
1200 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1201 REG_TICK (regno)++;
1bb98cec 1202 make_new_qty (regno, GET_MODE (x));
7afe21cc
RK
1203 return 1;
1204 }
cdf4112f
TG
1205
1206 return 0;
7afe21cc 1207 }
c610adec
RK
1208
1209 /* If X is a SUBREG, we will likely be inserting the inner register in the
1210 table. If that register doesn't have an assigned quantity number at
1211 this point but does later, the insertion that we will be doing now will
1212 not be accessible because its hash code will have changed. So assign
1213 a quantity number now. */
1214
1215 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1216 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1217 {
9714cf43 1218 insert_regs (SUBREG_REG (x), NULL, 0);
34c73909 1219 mention_regs (x);
c610adec
RK
1220 return 1;
1221 }
7afe21cc
RK
1222 else
1223 return mention_regs (x);
1224}
1225\f
1226/* Look in or update the hash table. */
1227
7afe21cc
RK
1228/* Remove table element ELT from use in the table.
1229 HASH is its hash code, made using the HASH macro.
1230 It's an argument because often that is known in advance
1231 and we save much time not recomputing it. */
1232
1233static void
7080f735 1234remove_from_table (struct table_elt *elt, unsigned int hash)
7afe21cc
RK
1235{
1236 if (elt == 0)
1237 return;
1238
1239 /* Mark this element as removed. See cse_insn. */
1240 elt->first_same_value = 0;
1241
1242 /* Remove the table element from its equivalence class. */
278a83b2 1243
7afe21cc 1244 {
b3694847
SS
1245 struct table_elt *prev = elt->prev_same_value;
1246 struct table_elt *next = elt->next_same_value;
7afe21cc 1247
278a83b2
KH
1248 if (next)
1249 next->prev_same_value = prev;
7afe21cc
RK
1250
1251 if (prev)
1252 prev->next_same_value = next;
1253 else
1254 {
b3694847 1255 struct table_elt *newfirst = next;
7afe21cc
RK
1256 while (next)
1257 {
1258 next->first_same_value = newfirst;
1259 next = next->next_same_value;
1260 }
1261 }
1262 }
1263
1264 /* Remove the table element from its hash bucket. */
1265
1266 {
b3694847
SS
1267 struct table_elt *prev = elt->prev_same_hash;
1268 struct table_elt *next = elt->next_same_hash;
7afe21cc 1269
278a83b2
KH
1270 if (next)
1271 next->prev_same_hash = prev;
7afe21cc
RK
1272
1273 if (prev)
1274 prev->next_same_hash = next;
1275 else if (table[hash] == elt)
1276 table[hash] = next;
1277 else
1278 {
1279 /* This entry is not in the proper hash bucket. This can happen
1280 when two classes were merged by `merge_equiv_classes'. Search
1281 for the hash bucket that it heads. This happens only very
1282 rarely, so the cost is acceptable. */
9b1549b8 1283 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1284 if (table[hash] == elt)
1285 table[hash] = next;
1286 }
1287 }
1288
1289 /* Remove the table element from its related-value circular chain. */
1290
1291 if (elt->related_value != 0 && elt->related_value != elt)
1292 {
b3694847 1293 struct table_elt *p = elt->related_value;
770ae6cc 1294
7afe21cc
RK
1295 while (p->related_value != elt)
1296 p = p->related_value;
1297 p->related_value = elt->related_value;
1298 if (p->related_value == p)
1299 p->related_value = 0;
1300 }
1301
9b1549b8
DM
1302 /* Now add it to the free element chain. */
1303 elt->next_same_hash = free_element_chain;
1304 free_element_chain = elt;
7afe21cc
RK
1305}
1306
1307/* Look up X in the hash table and return its table element,
1308 or 0 if X is not in the table.
1309
1310 MODE is the machine-mode of X, or if X is an integer constant
1311 with VOIDmode then MODE is the mode with which X will be used.
1312
1313 Here we are satisfied to find an expression whose tree structure
1314 looks like X. */
1315
1316static struct table_elt *
7080f735 1317lookup (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1318{
b3694847 1319 struct table_elt *p;
7afe21cc
RK
1320
1321 for (p = table[hash]; p; p = p->next_same_hash)
1322 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1323 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1324 return p;
1325
1326 return 0;
1327}
1328
1329/* Like `lookup' but don't care whether the table element uses invalid regs.
1330 Also ignore discrepancies in the machine mode of a register. */
1331
1332static struct table_elt *
7080f735 1333lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1334{
b3694847 1335 struct table_elt *p;
7afe21cc
RK
1336
1337 if (GET_CODE (x) == REG)
1338 {
770ae6cc
RK
1339 unsigned int regno = REGNO (x);
1340
7afe21cc
RK
1341 /* Don't check the machine mode when comparing registers;
1342 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1343 for (p = table[hash]; p; p = p->next_same_hash)
1344 if (GET_CODE (p->exp) == REG
1345 && REGNO (p->exp) == regno)
1346 return p;
1347 }
1348 else
1349 {
1350 for (p = table[hash]; p; p = p->next_same_hash)
1351 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1352 return p;
1353 }
1354
1355 return 0;
1356}
1357
1358/* Look for an expression equivalent to X and with code CODE.
1359 If one is found, return that expression. */
1360
1361static rtx
7080f735 1362lookup_as_function (rtx x, enum rtx_code code)
7afe21cc 1363{
b3694847 1364 struct table_elt *p
770ae6cc
RK
1365 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1366
34c73909
R
1367 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1368 long as we are narrowing. So if we looked in vain for a mode narrower
1369 than word_mode before, look for word_mode now. */
1370 if (p == 0 && code == CONST_INT
1371 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1372 {
1373 x = copy_rtx (x);
1374 PUT_MODE (x, word_mode);
9b1549b8 1375 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
34c73909
R
1376 }
1377
7afe21cc
RK
1378 if (p == 0)
1379 return 0;
1380
1381 for (p = p->first_same_value; p; p = p->next_same_value)
770ae6cc
RK
1382 if (GET_CODE (p->exp) == code
1383 /* Make sure this is a valid entry in the table. */
1384 && exp_equiv_p (p->exp, p->exp, 1, 0))
1385 return p->exp;
278a83b2 1386
7afe21cc
RK
1387 return 0;
1388}
1389
1390/* Insert X in the hash table, assuming HASH is its hash code
1391 and CLASSP is an element of the class it should go in
1392 (or 0 if a new class should be made).
1393 It is inserted at the proper position to keep the class in
1394 the order cheapest first.
1395
1396 MODE is the machine-mode of X, or if X is an integer constant
1397 with VOIDmode then MODE is the mode with which X will be used.
1398
1399 For elements of equal cheapness, the most recent one
1400 goes in front, except that the first element in the list
1401 remains first unless a cheaper element is added. The order of
1402 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1403 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1404
1405 The in_memory field in the hash table element is set to 0.
1406 The caller must set it nonzero if appropriate.
1407
1408 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1409 and if insert_regs returns a nonzero value
1410 you must then recompute its hash code before calling here.
1411
1412 If necessary, update table showing constant values of quantities. */
1413
630c79be 1414#define CHEAPER(X, Y) \
56ae04af 1415 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
7afe21cc
RK
1416
1417static struct table_elt *
7080f735 1418insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
7afe21cc 1419{
b3694847 1420 struct table_elt *elt;
7afe21cc
RK
1421
1422 /* If X is a register and we haven't made a quantity for it,
1423 something is wrong. */
1424 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1425 abort ();
1426
1427 /* If X is a hard register, show it is being put in the table. */
1428 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1429 {
770ae6cc 1430 unsigned int regno = REGNO (x);
66fd46b6 1431 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
770ae6cc 1432 unsigned int i;
7afe21cc
RK
1433
1434 for (i = regno; i < endregno; i++)
770ae6cc 1435 SET_HARD_REG_BIT (hard_regs_in_table, i);
7afe21cc
RK
1436 }
1437
7afe21cc
RK
1438 /* Put an element for X into the right hash bucket. */
1439
9b1549b8
DM
1440 elt = free_element_chain;
1441 if (elt)
770ae6cc 1442 free_element_chain = elt->next_same_hash;
9b1549b8
DM
1443 else
1444 {
1445 n_elements_made++;
703ad42b 1446 elt = xmalloc (sizeof (struct table_elt));
9b1549b8
DM
1447 }
1448
7afe21cc 1449 elt->exp = x;
db048faf 1450 elt->canon_exp = NULL_RTX;
7afe21cc 1451 elt->cost = COST (x);
630c79be 1452 elt->regcost = approx_reg_cost (x);
7afe21cc
RK
1453 elt->next_same_value = 0;
1454 elt->prev_same_value = 0;
1455 elt->next_same_hash = table[hash];
1456 elt->prev_same_hash = 0;
1457 elt->related_value = 0;
1458 elt->in_memory = 0;
1459 elt->mode = mode;
1460 elt->is_const = (CONSTANT_P (x)
1461 /* GNU C++ takes advantage of this for `this'
1462 (and other const values). */
2adc7f12
JJ
1463 || (GET_CODE (x) == REG
1464 && RTX_UNCHANGING_P (x)
7afe21cc 1465 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
4977bab6 1466 || fixed_base_plus_p (x));
7afe21cc
RK
1467
1468 if (table[hash])
1469 table[hash]->prev_same_hash = elt;
1470 table[hash] = elt;
1471
1472 /* Put it into the proper value-class. */
1473 if (classp)
1474 {
1475 classp = classp->first_same_value;
1476 if (CHEAPER (elt, classp))
f9da5064 1477 /* Insert at the head of the class. */
7afe21cc 1478 {
b3694847 1479 struct table_elt *p;
7afe21cc
RK
1480 elt->next_same_value = classp;
1481 classp->prev_same_value = elt;
1482 elt->first_same_value = elt;
1483
1484 for (p = classp; p; p = p->next_same_value)
1485 p->first_same_value = elt;
1486 }
1487 else
1488 {
1489 /* Insert not at head of the class. */
1490 /* Put it after the last element cheaper than X. */
b3694847 1491 struct table_elt *p, *next;
770ae6cc 1492
7afe21cc
RK
1493 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1494 p = next);
770ae6cc 1495
7afe21cc
RK
1496 /* Put it after P and before NEXT. */
1497 elt->next_same_value = next;
1498 if (next)
1499 next->prev_same_value = elt;
770ae6cc 1500
7afe21cc
RK
1501 elt->prev_same_value = p;
1502 p->next_same_value = elt;
1503 elt->first_same_value = classp;
1504 }
1505 }
1506 else
1507 elt->first_same_value = elt;
1508
1509 /* If this is a constant being set equivalent to a register or a register
1510 being set equivalent to a constant, note the constant equivalence.
1511
1512 If this is a constant, it cannot be equivalent to a different constant,
1513 and a constant is the only thing that can be cheaper than a register. So
1514 we know the register is the head of the class (before the constant was
1515 inserted).
1516
1517 If this is a register that is not already known equivalent to a
1518 constant, we must check the entire class.
1519
1520 If this is a register that is already known equivalent to an insn,
1bb98cec 1521 update the qtys `const_insn' to show that `this_insn' is the latest
7afe21cc
RK
1522 insn making that quantity equivalent to the constant. */
1523
f353588a
RK
1524 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1525 && GET_CODE (x) != REG)
7afe21cc 1526 {
1bb98cec
DM
1527 int exp_q = REG_QTY (REGNO (classp->exp));
1528 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1529
4de249d9 1530 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1bb98cec 1531 exp_ent->const_insn = this_insn;
7afe21cc
RK
1532 }
1533
1bb98cec
DM
1534 else if (GET_CODE (x) == REG
1535 && classp
1536 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
f353588a 1537 && ! elt->is_const)
7afe21cc 1538 {
b3694847 1539 struct table_elt *p;
7afe21cc
RK
1540
1541 for (p = classp; p != 0; p = p->next_same_value)
1542 {
f353588a 1543 if (p->is_const && GET_CODE (p->exp) != REG)
7afe21cc 1544 {
1bb98cec
DM
1545 int x_q = REG_QTY (REGNO (x));
1546 struct qty_table_elem *x_ent = &qty_table[x_q];
1547
770ae6cc 1548 x_ent->const_rtx
4de249d9 1549 = gen_lowpart (GET_MODE (x), p->exp);
1bb98cec 1550 x_ent->const_insn = this_insn;
7afe21cc
RK
1551 break;
1552 }
1553 }
1554 }
1555
1bb98cec
DM
1556 else if (GET_CODE (x) == REG
1557 && qty_table[REG_QTY (REGNO (x))].const_rtx
1558 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1559 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
7afe21cc
RK
1560
1561 /* If this is a constant with symbolic value,
1562 and it has a term with an explicit integer value,
1563 link it up with related expressions. */
1564 if (GET_CODE (x) == CONST)
1565 {
1566 rtx subexp = get_related_value (x);
2197a88a 1567 unsigned subhash;
7afe21cc
RK
1568 struct table_elt *subelt, *subelt_prev;
1569
1570 if (subexp != 0)
1571 {
1572 /* Get the integer-free subexpression in the hash table. */
9b1549b8 1573 subhash = safe_hash (subexp, mode) & HASH_MASK;
7afe21cc
RK
1574 subelt = lookup (subexp, subhash, mode);
1575 if (subelt == 0)
9714cf43 1576 subelt = insert (subexp, NULL, subhash, mode);
7afe21cc
RK
1577 /* Initialize SUBELT's circular chain if it has none. */
1578 if (subelt->related_value == 0)
1579 subelt->related_value = subelt;
1580 /* Find the element in the circular chain that precedes SUBELT. */
1581 subelt_prev = subelt;
1582 while (subelt_prev->related_value != subelt)
1583 subelt_prev = subelt_prev->related_value;
1584 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1585 This way the element that follows SUBELT is the oldest one. */
1586 elt->related_value = subelt_prev->related_value;
1587 subelt_prev->related_value = elt;
1588 }
1589 }
1590
1591 return elt;
1592}
1593\f
1594/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1595 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1596 the two classes equivalent.
1597
1598 CLASS1 will be the surviving class; CLASS2 should not be used after this
1599 call.
1600
1601 Any invalid entries in CLASS2 will not be copied. */
1602
1603static void
7080f735 1604merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
7afe21cc
RK
1605{
1606 struct table_elt *elt, *next, *new;
1607
1608 /* Ensure we start with the head of the classes. */
1609 class1 = class1->first_same_value;
1610 class2 = class2->first_same_value;
1611
1612 /* If they were already equal, forget it. */
1613 if (class1 == class2)
1614 return;
1615
1616 for (elt = class2; elt; elt = next)
1617 {
770ae6cc 1618 unsigned int hash;
7afe21cc
RK
1619 rtx exp = elt->exp;
1620 enum machine_mode mode = elt->mode;
1621
1622 next = elt->next_same_value;
1623
1624 /* Remove old entry, make a new one in CLASS1's class.
1625 Don't do this for invalid entries as we cannot find their
0f41302f 1626 hash code (it also isn't necessary). */
7afe21cc
RK
1627 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1628 {
1629 hash_arg_in_memory = 0;
7afe21cc 1630 hash = HASH (exp, mode);
278a83b2 1631
7afe21cc
RK
1632 if (GET_CODE (exp) == REG)
1633 delete_reg_equiv (REGNO (exp));
278a83b2 1634
7afe21cc
RK
1635 remove_from_table (elt, hash);
1636
1637 if (insert_regs (exp, class1, 0))
8ae2b8f6
JW
1638 {
1639 rehash_using_reg (exp);
1640 hash = HASH (exp, mode);
1641 }
7afe21cc
RK
1642 new = insert (exp, class1, hash, mode);
1643 new->in_memory = hash_arg_in_memory;
7afe21cc
RK
1644 }
1645 }
1646}
1647\f
01e752d3
JL
1648/* Flush the entire hash table. */
1649
1650static void
7080f735 1651flush_hash_table (void)
01e752d3
JL
1652{
1653 int i;
1654 struct table_elt *p;
1655
9b1549b8 1656 for (i = 0; i < HASH_SIZE; i++)
01e752d3
JL
1657 for (p = table[i]; p; p = table[i])
1658 {
1659 /* Note that invalidate can remove elements
1660 after P in the current hash chain. */
1661 if (GET_CODE (p->exp) == REG)
1662 invalidate (p->exp, p->mode);
1663 else
1664 remove_from_table (p, i);
1665 }
1666}
14a774a9 1667\f
2ce6dc2f
JH
1668/* Function called for each rtx to check whether true dependence exist. */
1669struct check_dependence_data
1670{
1671 enum machine_mode mode;
1672 rtx exp;
9ddb66ca 1673 rtx addr;
2ce6dc2f 1674};
be8ac49a 1675
2ce6dc2f 1676static int
7080f735 1677check_dependence (rtx *x, void *data)
2ce6dc2f
JH
1678{
1679 struct check_dependence_data *d = (struct check_dependence_data *) data;
1680 if (*x && GET_CODE (*x) == MEM)
9ddb66ca
JH
1681 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1682 cse_rtx_varies_p);
2ce6dc2f
JH
1683 else
1684 return 0;
1685}
1686\f
14a774a9
RK
1687/* Remove from the hash table, or mark as invalid, all expressions whose
1688 values could be altered by storing in X. X is a register, a subreg, or
1689 a memory reference with nonvarying address (because, when a memory
1690 reference with a varying address is stored in, all memory references are
1691 removed by invalidate_memory so specific invalidation is superfluous).
1692 FULL_MODE, if not VOIDmode, indicates that this much should be
1693 invalidated instead of just the amount indicated by the mode of X. This
1694 is only used for bitfield stores into memory.
1695
1696 A nonvarying address may be just a register or just a symbol reference,
1697 or it may be either of those plus a numeric offset. */
7afe21cc
RK
1698
1699static void
7080f735 1700invalidate (rtx x, enum machine_mode full_mode)
7afe21cc 1701{
b3694847
SS
1702 int i;
1703 struct table_elt *p;
9ddb66ca 1704 rtx addr;
7afe21cc 1705
14a774a9 1706 switch (GET_CODE (x))
7afe21cc 1707 {
14a774a9
RK
1708 case REG:
1709 {
1710 /* If X is a register, dependencies on its contents are recorded
1711 through the qty number mechanism. Just change the qty number of
1712 the register, mark it as invalid for expressions that refer to it,
1713 and remove it itself. */
770ae6cc
RK
1714 unsigned int regno = REGNO (x);
1715 unsigned int hash = HASH (x, GET_MODE (x));
7afe21cc 1716
14a774a9
RK
1717 /* Remove REGNO from any quantity list it might be on and indicate
1718 that its value might have changed. If it is a pseudo, remove its
1719 entry from the hash table.
7afe21cc 1720
14a774a9
RK
1721 For a hard register, we do the first two actions above for any
1722 additional hard registers corresponding to X. Then, if any of these
1723 registers are in the table, we must remove any REG entries that
1724 overlap these registers. */
7afe21cc 1725
14a774a9
RK
1726 delete_reg_equiv (regno);
1727 REG_TICK (regno)++;
46081bb3 1728 SUBREG_TICKED (regno) = -1;
85e4d983 1729
14a774a9
RK
1730 if (regno >= FIRST_PSEUDO_REGISTER)
1731 {
1732 /* Because a register can be referenced in more than one mode,
1733 we might have to remove more than one table entry. */
1734 struct table_elt *elt;
85e4d983 1735
14a774a9
RK
1736 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1737 remove_from_table (elt, hash);
1738 }
1739 else
1740 {
1741 HOST_WIDE_INT in_table
1742 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
770ae6cc 1743 unsigned int endregno
66fd46b6 1744 = regno + hard_regno_nregs[regno][GET_MODE (x)];
770ae6cc 1745 unsigned int tregno, tendregno, rn;
b3694847 1746 struct table_elt *p, *next;
7afe21cc 1747
14a774a9 1748 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc 1749
770ae6cc 1750 for (rn = regno + 1; rn < endregno; rn++)
14a774a9 1751 {
770ae6cc
RK
1752 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1753 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1754 delete_reg_equiv (rn);
1755 REG_TICK (rn)++;
46081bb3 1756 SUBREG_TICKED (rn) = -1;
14a774a9 1757 }
7afe21cc 1758
14a774a9 1759 if (in_table)
9b1549b8 1760 for (hash = 0; hash < HASH_SIZE; hash++)
14a774a9
RK
1761 for (p = table[hash]; p; p = next)
1762 {
1763 next = p->next_same_hash;
7afe21cc 1764
278a83b2
KH
1765 if (GET_CODE (p->exp) != REG
1766 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1767 continue;
1768
14a774a9
RK
1769 tregno = REGNO (p->exp);
1770 tendregno
66fd46b6 1771 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
14a774a9
RK
1772 if (tendregno > regno && tregno < endregno)
1773 remove_from_table (p, hash);
1774 }
1775 }
1776 }
7afe21cc 1777 return;
7afe21cc 1778
14a774a9 1779 case SUBREG:
bb4034b3 1780 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc 1781 return;
aac5cc16 1782
14a774a9 1783 case PARALLEL:
278a83b2 1784 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
aac5cc16
RH
1785 invalidate (XVECEXP (x, 0, i), VOIDmode);
1786 return;
aac5cc16 1787
14a774a9
RK
1788 case EXPR_LIST:
1789 /* This is part of a disjoint return value; extract the location in
1790 question ignoring the offset. */
aac5cc16
RH
1791 invalidate (XEXP (x, 0), VOIDmode);
1792 return;
7afe21cc 1793
14a774a9 1794 case MEM:
9ddb66ca 1795 addr = canon_rtx (get_addr (XEXP (x, 0)));
db048faf
MM
1796 /* Calculate the canonical version of X here so that
1797 true_dependence doesn't generate new RTL for X on each call. */
1798 x = canon_rtx (x);
1799
14a774a9
RK
1800 /* Remove all hash table elements that refer to overlapping pieces of
1801 memory. */
1802 if (full_mode == VOIDmode)
1803 full_mode = GET_MODE (x);
bb4034b3 1804
9b1549b8 1805 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 1806 {
b3694847 1807 struct table_elt *next;
14a774a9
RK
1808
1809 for (p = table[i]; p; p = next)
1810 {
1811 next = p->next_same_hash;
db048faf
MM
1812 if (p->in_memory)
1813 {
2ce6dc2f
JH
1814 struct check_dependence_data d;
1815
1816 /* Just canonicalize the expression once;
1817 otherwise each time we call invalidate
1818 true_dependence will canonicalize the
1819 expression again. */
1820 if (!p->canon_exp)
1821 p->canon_exp = canon_rtx (p->exp);
1822 d.exp = x;
9ddb66ca 1823 d.addr = addr;
2ce6dc2f
JH
1824 d.mode = full_mode;
1825 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
db048faf 1826 remove_from_table (p, i);
db048faf 1827 }
14a774a9 1828 }
7afe21cc 1829 }
14a774a9
RK
1830 return;
1831
1832 default:
1833 abort ();
7afe21cc
RK
1834 }
1835}
14a774a9 1836\f
7afe21cc
RK
1837/* Remove all expressions that refer to register REGNO,
1838 since they are already invalid, and we are about to
1839 mark that register valid again and don't want the old
1840 expressions to reappear as valid. */
1841
1842static void
7080f735 1843remove_invalid_refs (unsigned int regno)
7afe21cc 1844{
770ae6cc
RK
1845 unsigned int i;
1846 struct table_elt *p, *next;
7afe21cc 1847
9b1549b8 1848 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1849 for (p = table[i]; p; p = next)
1850 {
1851 next = p->next_same_hash;
1852 if (GET_CODE (p->exp) != REG
68252e27 1853 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
7afe21cc
RK
1854 remove_from_table (p, i);
1855 }
1856}
34c73909 1857
ddef6bc7
JJ
1858/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1859 and mode MODE. */
34c73909 1860static void
7080f735
AJ
1861remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1862 enum machine_mode mode)
34c73909 1863{
770ae6cc
RK
1864 unsigned int i;
1865 struct table_elt *p, *next;
ddef6bc7 1866 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
34c73909 1867
9b1549b8 1868 for (i = 0; i < HASH_SIZE; i++)
34c73909
R
1869 for (p = table[i]; p; p = next)
1870 {
ddef6bc7 1871 rtx exp = p->exp;
34c73909 1872 next = p->next_same_hash;
278a83b2 1873
ddef6bc7 1874 if (GET_CODE (exp) != REG
34c73909
R
1875 && (GET_CODE (exp) != SUBREG
1876 || GET_CODE (SUBREG_REG (exp)) != REG
1877 || REGNO (SUBREG_REG (exp)) != regno
ddef6bc7
JJ
1878 || (((SUBREG_BYTE (exp)
1879 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1880 && SUBREG_BYTE (exp) <= end))
68252e27 1881 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
34c73909
R
1882 remove_from_table (p, i);
1883 }
1884}
7afe21cc
RK
1885\f
1886/* Recompute the hash codes of any valid entries in the hash table that
1887 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1888
1889 This is called when we make a jump equivalence. */
1890
1891static void
7080f735 1892rehash_using_reg (rtx x)
7afe21cc 1893{
973838fd 1894 unsigned int i;
7afe21cc 1895 struct table_elt *p, *next;
2197a88a 1896 unsigned hash;
7afe21cc
RK
1897
1898 if (GET_CODE (x) == SUBREG)
1899 x = SUBREG_REG (x);
1900
1901 /* If X is not a register or if the register is known not to be in any
1902 valid entries in the table, we have no work to do. */
1903
1904 if (GET_CODE (x) != REG
30f72379
MM
1905 || REG_IN_TABLE (REGNO (x)) < 0
1906 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1907 return;
1908
1909 /* Scan all hash chains looking for valid entries that mention X.
1910 If we find one and it is in the wrong hash chain, move it. We can skip
1911 objects that are registers, since they are handled specially. */
1912
9b1549b8 1913 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1914 for (p = table[i]; p; p = next)
1915 {
1916 next = p->next_same_hash;
1917 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1918 && exp_equiv_p (p->exp, p->exp, 1, 0)
9b1549b8 1919 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
7afe21cc
RK
1920 {
1921 if (p->next_same_hash)
1922 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1923
1924 if (p->prev_same_hash)
1925 p->prev_same_hash->next_same_hash = p->next_same_hash;
1926 else
1927 table[i] = p->next_same_hash;
1928
1929 p->next_same_hash = table[hash];
1930 p->prev_same_hash = 0;
1931 if (table[hash])
1932 table[hash]->prev_same_hash = p;
1933 table[hash] = p;
1934 }
1935 }
1936}
1937\f
7afe21cc
RK
1938/* Remove from the hash table any expression that is a call-clobbered
1939 register. Also update their TICK values. */
1940
1941static void
7080f735 1942invalidate_for_call (void)
7afe21cc 1943{
770ae6cc
RK
1944 unsigned int regno, endregno;
1945 unsigned int i;
2197a88a 1946 unsigned hash;
7afe21cc
RK
1947 struct table_elt *p, *next;
1948 int in_table = 0;
1949
1950 /* Go through all the hard registers. For each that is clobbered in
1951 a CALL_INSN, remove the register from quantity chains and update
1952 reg_tick if defined. Also see if any of these registers is currently
1953 in the table. */
1954
1955 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1956 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1957 {
1958 delete_reg_equiv (regno);
30f72379 1959 if (REG_TICK (regno) >= 0)
46081bb3
SH
1960 {
1961 REG_TICK (regno)++;
1962 SUBREG_TICKED (regno) = -1;
1963 }
7afe21cc 1964
0e227018 1965 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
1966 }
1967
1968 /* In the case where we have no call-clobbered hard registers in the
1969 table, we are done. Otherwise, scan the table and remove any
1970 entry that overlaps a call-clobbered register. */
1971
1972 if (in_table)
9b1549b8 1973 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1974 for (p = table[hash]; p; p = next)
1975 {
1976 next = p->next_same_hash;
1977
1978 if (GET_CODE (p->exp) != REG
1979 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1980 continue;
1981
1982 regno = REGNO (p->exp);
66fd46b6 1983 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
7afe21cc
RK
1984
1985 for (i = regno; i < endregno; i++)
1986 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1987 {
1988 remove_from_table (p, hash);
1989 break;
1990 }
1991 }
1992}
1993\f
1994/* Given an expression X of type CONST,
1995 and ELT which is its table entry (or 0 if it
1996 is not in the hash table),
1997 return an alternate expression for X as a register plus integer.
1998 If none can be found, return 0. */
1999
2000static rtx
7080f735 2001use_related_value (rtx x, struct table_elt *elt)
7afe21cc 2002{
b3694847
SS
2003 struct table_elt *relt = 0;
2004 struct table_elt *p, *q;
906c4e36 2005 HOST_WIDE_INT offset;
7afe21cc
RK
2006
2007 /* First, is there anything related known?
2008 If we have a table element, we can tell from that.
2009 Otherwise, must look it up. */
2010
2011 if (elt != 0 && elt->related_value != 0)
2012 relt = elt;
2013 else if (elt == 0 && GET_CODE (x) == CONST)
2014 {
2015 rtx subexp = get_related_value (x);
2016 if (subexp != 0)
2017 relt = lookup (subexp,
9b1549b8 2018 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
7afe21cc
RK
2019 GET_MODE (subexp));
2020 }
2021
2022 if (relt == 0)
2023 return 0;
2024
2025 /* Search all related table entries for one that has an
2026 equivalent register. */
2027
2028 p = relt;
2029 while (1)
2030 {
2031 /* This loop is strange in that it is executed in two different cases.
2032 The first is when X is already in the table. Then it is searching
2033 the RELATED_VALUE list of X's class (RELT). The second case is when
2034 X is not in the table. Then RELT points to a class for the related
2035 value.
2036
2037 Ensure that, whatever case we are in, that we ignore classes that have
2038 the same value as X. */
2039
2040 if (rtx_equal_p (x, p->exp))
2041 q = 0;
2042 else
2043 for (q = p->first_same_value; q; q = q->next_same_value)
2044 if (GET_CODE (q->exp) == REG)
2045 break;
2046
2047 if (q)
2048 break;
2049
2050 p = p->related_value;
2051
2052 /* We went all the way around, so there is nothing to be found.
2053 Alternatively, perhaps RELT was in the table for some other reason
2054 and it has no related values recorded. */
2055 if (p == relt || p == 0)
2056 break;
2057 }
2058
2059 if (q == 0)
2060 return 0;
2061
2062 offset = (get_integer_term (x) - get_integer_term (p->exp));
2063 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2064 return plus_constant (q->exp, offset);
2065}
2066\f
6462bb43
AO
2067/* Hash a string. Just add its bytes up. */
2068static inline unsigned
7080f735 2069canon_hash_string (const char *ps)
6462bb43
AO
2070{
2071 unsigned hash = 0;
68252e27
KH
2072 const unsigned char *p = (const unsigned char *) ps;
2073
6462bb43
AO
2074 if (p)
2075 while (*p)
2076 hash += *p++;
2077
2078 return hash;
2079}
2080
7afe21cc
RK
2081/* Hash an rtx. We are careful to make sure the value is never negative.
2082 Equivalent registers hash identically.
2083 MODE is used in hashing for CONST_INTs only;
2084 otherwise the mode of X is used.
2085
2086 Store 1 in do_not_record if any subexpression is volatile.
2087
2088 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2089 which does not have the RTX_UNCHANGING_P bit set.
7afe21cc
RK
2090
2091 Note that cse_insn knows that the hash code of a MEM expression
2092 is just (int) MEM plus the hash code of the address. */
2093
2197a88a 2094static unsigned
7080f735 2095canon_hash (rtx x, enum machine_mode mode)
7afe21cc 2096{
b3694847
SS
2097 int i, j;
2098 unsigned hash = 0;
2099 enum rtx_code code;
2100 const char *fmt;
7afe21cc
RK
2101
2102 /* repeat is used to turn tail-recursion into iteration. */
2103 repeat:
2104 if (x == 0)
2105 return hash;
2106
2107 code = GET_CODE (x);
2108 switch (code)
2109 {
2110 case REG:
2111 {
770ae6cc 2112 unsigned int regno = REGNO (x);
1441374b 2113 bool record;
7afe21cc
RK
2114
2115 /* On some machines, we can't record any non-fixed hard register,
2116 because extending its life will cause reload problems. We
1441374b 2117 consider ap, fp, sp, gp to be fixed for this purpose.
9a794e50
RH
2118
2119 We also consider CCmode registers to be fixed for this purpose;
2120 failure to do so leads to failure to simplify 0<100 type of
2121 conditionals.
2122
68252e27 2123 On all machines, we can't record any global registers.
8bf4dfc2
GK
2124 Nor should we record any register that is in a small
2125 class, as defined by CLASS_LIKELY_SPILLED_P. */
7afe21cc 2126
1441374b
RH
2127 if (regno >= FIRST_PSEUDO_REGISTER)
2128 record = true;
2129 else if (x == frame_pointer_rtx
2130 || x == hard_frame_pointer_rtx
2131 || x == arg_pointer_rtx
2132 || x == stack_pointer_rtx
2133 || x == pic_offset_table_rtx)
2134 record = true;
2135 else if (global_regs[regno])
2136 record = false;
2137 else if (fixed_regs[regno])
2138 record = true;
2139 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2140 record = true;
2141 else if (SMALL_REGISTER_CLASSES)
2142 record = false;
2143 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2144 record = false;
2145 else
2146 record = true;
7080f735 2147
1441374b 2148 if (!record)
7afe21cc
RK
2149 {
2150 do_not_record = 1;
2151 return 0;
2152 }
770ae6cc 2153
30f72379 2154 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2197a88a 2155 return hash;
7afe21cc
RK
2156 }
2157
34c73909
R
2158 /* We handle SUBREG of a REG specially because the underlying
2159 reg changes its hash value with every value change; we don't
2160 want to have to forget unrelated subregs when one subreg changes. */
2161 case SUBREG:
2162 {
2163 if (GET_CODE (SUBREG_REG (x)) == REG)
2164 {
2165 hash += (((unsigned) SUBREG << 7)
ddef6bc7
JJ
2166 + REGNO (SUBREG_REG (x))
2167 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
34c73909
R
2168 return hash;
2169 }
2170 break;
2171 }
2172
7afe21cc 2173 case CONST_INT:
2197a88a
RK
2174 {
2175 unsigned HOST_WIDE_INT tem = INTVAL (x);
2176 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2177 return hash;
2178 }
7afe21cc
RK
2179
2180 case CONST_DOUBLE:
2181 /* This is like the general case, except that it only counts
2182 the integers representing the constant. */
2197a88a 2183 hash += (unsigned) code + (unsigned) GET_MODE (x);
969c8517 2184 if (GET_MODE (x) != VOIDmode)
46b33600 2185 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
969c8517
RK
2186 else
2187 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2188 + (unsigned) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2189 return hash;
2190
69ef87e2
AH
2191 case CONST_VECTOR:
2192 {
2193 int units;
2194 rtx elt;
2195
2196 units = CONST_VECTOR_NUNITS (x);
2197
2198 for (i = 0; i < units; ++i)
2199 {
2200 elt = CONST_VECTOR_ELT (x, i);
2201 hash += canon_hash (elt, GET_MODE (elt));
2202 }
2203
2204 return hash;
2205 }
2206
7afe21cc
RK
2207 /* Assume there is only one rtx object for any given label. */
2208 case LABEL_REF:
ddc356e8 2209 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2197a88a 2210 return hash;
7afe21cc
RK
2211
2212 case SYMBOL_REF:
ddc356e8 2213 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2197a88a 2214 return hash;
7afe21cc
RK
2215
2216 case MEM:
14a774a9
RK
2217 /* We don't record if marked volatile or if BLKmode since we don't
2218 know the size of the move. */
2219 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
7afe21cc
RK
2220 {
2221 do_not_record = 1;
2222 return 0;
2223 }
4977bab6
ZW
2224 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2225 hash_arg_in_memory = 1;
2226
7afe21cc
RK
2227 /* Now that we have already found this special case,
2228 might as well speed it up as much as possible. */
2197a88a 2229 hash += (unsigned) MEM;
7afe21cc
RK
2230 x = XEXP (x, 0);
2231 goto repeat;
2232
bb07060a
JW
2233 case USE:
2234 /* A USE that mentions non-volatile memory needs special
2235 handling since the MEM may be BLKmode which normally
2236 prevents an entry from being made. Pure calls are
2237 marked by a USE which mentions BLKmode memory. */
2238 if (GET_CODE (XEXP (x, 0)) == MEM
2239 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2240 {
68252e27 2241 hash += (unsigned) USE;
bb07060a
JW
2242 x = XEXP (x, 0);
2243
4977bab6 2244 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
bb07060a
JW
2245 hash_arg_in_memory = 1;
2246
2247 /* Now that we have already found this special case,
2248 might as well speed it up as much as possible. */
2249 hash += (unsigned) MEM;
2250 x = XEXP (x, 0);
2251 goto repeat;
2252 }
2253 break;
2254
7afe21cc
RK
2255 case PRE_DEC:
2256 case PRE_INC:
2257 case POST_DEC:
2258 case POST_INC:
4b983fdc
RH
2259 case PRE_MODIFY:
2260 case POST_MODIFY:
7afe21cc
RK
2261 case PC:
2262 case CC0:
2263 case CALL:
2264 case UNSPEC_VOLATILE:
2265 do_not_record = 1;
2266 return 0;
2267
2268 case ASM_OPERANDS:
2269 if (MEM_VOLATILE_P (x))
2270 {
2271 do_not_record = 1;
2272 return 0;
2273 }
6462bb43
AO
2274 else
2275 {
2276 /* We don't want to take the filename and line into account. */
2277 hash += (unsigned) code + (unsigned) GET_MODE (x)
2278 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2279 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2280 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2281
2282 if (ASM_OPERANDS_INPUT_LENGTH (x))
2283 {
2284 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2285 {
2286 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2287 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2288 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2289 (x, i)));
2290 }
2291
2292 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2293 x = ASM_OPERANDS_INPUT (x, 0);
2294 mode = GET_MODE (x);
2295 goto repeat;
2296 }
2297
2298 return hash;
2299 }
e9a25f70 2300 break;
278a83b2 2301
e9a25f70
JL
2302 default:
2303 break;
7afe21cc
RK
2304 }
2305
2306 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2307 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2308 fmt = GET_RTX_FORMAT (code);
2309 for (; i >= 0; i--)
2310 {
2311 if (fmt[i] == 'e')
2312 {
2313 rtx tem = XEXP (x, i);
7afe21cc
RK
2314
2315 /* If we are about to do the last recursive call
2316 needed at this level, change it into iteration.
2317 This function is called enough to be worth it. */
2318 if (i == 0)
2319 {
2320 x = tem;
2321 goto repeat;
2322 }
2323 hash += canon_hash (tem, 0);
2324 }
2325 else if (fmt[i] == 'E')
2326 for (j = 0; j < XVECLEN (x, i); j++)
2327 hash += canon_hash (XVECEXP (x, i, j), 0);
2328 else if (fmt[i] == 's')
6462bb43 2329 hash += canon_hash_string (XSTR (x, i));
7afe21cc
RK
2330 else if (fmt[i] == 'i')
2331 {
b3694847 2332 unsigned tem = XINT (x, i);
2197a88a 2333 hash += tem;
7afe21cc 2334 }
8f985ec4 2335 else if (fmt[i] == '0' || fmt[i] == 't')
ddc356e8
KH
2336 /* Unused. */
2337 ;
7afe21cc
RK
2338 else
2339 abort ();
2340 }
2341 return hash;
2342}
2343
2344/* Like canon_hash but with no side effects. */
2345
2197a88a 2346static unsigned
7080f735 2347safe_hash (rtx x, enum machine_mode mode)
7afe21cc
RK
2348{
2349 int save_do_not_record = do_not_record;
2350 int save_hash_arg_in_memory = hash_arg_in_memory;
2197a88a 2351 unsigned hash = canon_hash (x, mode);
7afe21cc 2352 hash_arg_in_memory = save_hash_arg_in_memory;
7afe21cc
RK
2353 do_not_record = save_do_not_record;
2354 return hash;
2355}
2356\f
2357/* Return 1 iff X and Y would canonicalize into the same thing,
2358 without actually constructing the canonicalization of either one.
2359 If VALIDATE is nonzero,
2360 we assume X is an expression being processed from the rtl
2361 and Y was found in the hash table. We check register refs
2362 in Y for being marked as valid.
2363
2364 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2365 that is known to be in the register. Ordinarily, we don't allow them
2366 to match, because letting them match would cause unpredictable results
2367 in all the places that search a hash table chain for an equivalent
2368 for a given value. A possible equivalent that has different structure
2369 has its hash code computed from different data. Whether the hash code
38e01259 2370 is the same as that of the given value is pure luck. */
7afe21cc
RK
2371
2372static int
7080f735 2373exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
7afe21cc 2374{
b3694847
SS
2375 int i, j;
2376 enum rtx_code code;
2377 const char *fmt;
7afe21cc
RK
2378
2379 /* Note: it is incorrect to assume an expression is equivalent to itself
2380 if VALIDATE is nonzero. */
2381 if (x == y && !validate)
2382 return 1;
2383 if (x == 0 || y == 0)
2384 return x == y;
2385
2386 code = GET_CODE (x);
2387 if (code != GET_CODE (y))
2388 {
2389 if (!equal_values)
2390 return 0;
2391
2392 /* If X is a constant and Y is a register or vice versa, they may be
2393 equivalent. We only have to validate if Y is a register. */
2394 if (CONSTANT_P (x) && GET_CODE (y) == REG
1bb98cec
DM
2395 && REGNO_QTY_VALID_P (REGNO (y)))
2396 {
2397 int y_q = REG_QTY (REGNO (y));
2398 struct qty_table_elem *y_ent = &qty_table[y_q];
2399
2400 if (GET_MODE (y) == y_ent->mode
2401 && rtx_equal_p (x, y_ent->const_rtx)
2402 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2403 return 1;
2404 }
7afe21cc
RK
2405
2406 if (CONSTANT_P (y) && code == REG
1bb98cec
DM
2407 && REGNO_QTY_VALID_P (REGNO (x)))
2408 {
2409 int x_q = REG_QTY (REGNO (x));
2410 struct qty_table_elem *x_ent = &qty_table[x_q];
2411
2412 if (GET_MODE (x) == x_ent->mode
2413 && rtx_equal_p (y, x_ent->const_rtx))
2414 return 1;
2415 }
7afe21cc
RK
2416
2417 return 0;
2418 }
2419
2420 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2421 if (GET_MODE (x) != GET_MODE (y))
2422 return 0;
2423
2424 switch (code)
2425 {
2426 case PC:
2427 case CC0:
7afe21cc 2428 case CONST_INT:
c13e8210 2429 return x == y;
7afe21cc
RK
2430
2431 case LABEL_REF:
7afe21cc
RK
2432 return XEXP (x, 0) == XEXP (y, 0);
2433
f54d4924
RK
2434 case SYMBOL_REF:
2435 return XSTR (x, 0) == XSTR (y, 0);
2436
7afe21cc
RK
2437 case REG:
2438 {
770ae6cc
RK
2439 unsigned int regno = REGNO (y);
2440 unsigned int endregno
7afe21cc 2441 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 2442 : hard_regno_nregs[regno][GET_MODE (y)]);
770ae6cc 2443 unsigned int i;
7afe21cc
RK
2444
2445 /* If the quantities are not the same, the expressions are not
2446 equivalent. If there are and we are not to validate, they
2447 are equivalent. Otherwise, ensure all regs are up-to-date. */
2448
30f72379 2449 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
7afe21cc
RK
2450 return 0;
2451
2452 if (! validate)
2453 return 1;
2454
2455 for (i = regno; i < endregno; i++)
30f72379 2456 if (REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
2457 return 0;
2458
2459 return 1;
2460 }
2461
2462 /* For commutative operations, check both orders. */
2463 case PLUS:
2464 case MULT:
2465 case AND:
2466 case IOR:
2467 case XOR:
2468 case NE:
2469 case EQ:
2470 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2471 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2472 validate, equal_values))
2473 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2474 validate, equal_values)
2475 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2476 validate, equal_values)));
278a83b2 2477
6462bb43
AO
2478 case ASM_OPERANDS:
2479 /* We don't use the generic code below because we want to
2480 disregard filename and line numbers. */
2481
2482 /* A volatile asm isn't equivalent to any other. */
2483 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2484 return 0;
2485
2486 if (GET_MODE (x) != GET_MODE (y)
2487 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2488 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2489 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2490 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2491 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2492 return 0;
2493
2494 if (ASM_OPERANDS_INPUT_LENGTH (x))
2495 {
2496 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2497 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2498 ASM_OPERANDS_INPUT (y, i),
2499 validate, equal_values)
2500 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2501 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2502 return 0;
2503 }
2504
2505 return 1;
2506
e9a25f70
JL
2507 default:
2508 break;
7afe21cc
RK
2509 }
2510
2511 /* Compare the elements. If any pair of corresponding elements
2512 fail to match, return 0 for the whole things. */
2513
2514 fmt = GET_RTX_FORMAT (code);
2515 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2516 {
906c4e36 2517 switch (fmt[i])
7afe21cc 2518 {
906c4e36 2519 case 'e':
7afe21cc
RK
2520 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2521 return 0;
906c4e36
RK
2522 break;
2523
2524 case 'E':
7afe21cc
RK
2525 if (XVECLEN (x, i) != XVECLEN (y, i))
2526 return 0;
2527 for (j = 0; j < XVECLEN (x, i); j++)
2528 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2529 validate, equal_values))
2530 return 0;
906c4e36
RK
2531 break;
2532
2533 case 's':
7afe21cc
RK
2534 if (strcmp (XSTR (x, i), XSTR (y, i)))
2535 return 0;
906c4e36
RK
2536 break;
2537
2538 case 'i':
7afe21cc
RK
2539 if (XINT (x, i) != XINT (y, i))
2540 return 0;
906c4e36
RK
2541 break;
2542
2543 case 'w':
2544 if (XWINT (x, i) != XWINT (y, i))
2545 return 0;
278a83b2 2546 break;
906c4e36
RK
2547
2548 case '0':
8f985ec4 2549 case 't':
906c4e36
RK
2550 break;
2551
2552 default:
2553 abort ();
7afe21cc 2554 }
278a83b2 2555 }
906c4e36 2556
7afe21cc
RK
2557 return 1;
2558}
2559\f
9ae8ffe7
JL
2560/* Return 1 if X has a value that can vary even between two
2561 executions of the program. 0 means X can be compared reliably
2562 against certain constants or near-constants. */
7afe21cc
RK
2563
2564static int
7080f735 2565cse_rtx_varies_p (rtx x, int from_alias)
7afe21cc
RK
2566{
2567 /* We need not check for X and the equivalence class being of the same
2568 mode because if X is equivalent to a constant in some mode, it
2569 doesn't vary in any mode. */
2570
9ae8ffe7 2571 if (GET_CODE (x) == REG
1bb98cec
DM
2572 && REGNO_QTY_VALID_P (REGNO (x)))
2573 {
2574 int x_q = REG_QTY (REGNO (x));
2575 struct qty_table_elem *x_ent = &qty_table[x_q];
2576
2577 if (GET_MODE (x) == x_ent->mode
2578 && x_ent->const_rtx != NULL_RTX)
2579 return 0;
2580 }
7afe21cc 2581
9ae8ffe7
JL
2582 if (GET_CODE (x) == PLUS
2583 && GET_CODE (XEXP (x, 1)) == CONST_INT
2584 && GET_CODE (XEXP (x, 0)) == REG
1bb98cec
DM
2585 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2586 {
2587 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2588 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2589
2590 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2591 && x0_ent->const_rtx != NULL_RTX)
2592 return 0;
2593 }
7afe21cc 2594
9c6b0bae
RK
2595 /* This can happen as the result of virtual register instantiation, if
2596 the initial constant is too large to be a valid address. This gives
2597 us a three instruction sequence, load large offset into a register,
2598 load fp minus a constant into a register, then a MEM which is the
2599 sum of the two `constant' registers. */
9ae8ffe7
JL
2600 if (GET_CODE (x) == PLUS
2601 && GET_CODE (XEXP (x, 0)) == REG
2602 && GET_CODE (XEXP (x, 1)) == REG
2603 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
1bb98cec
DM
2604 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2605 {
2606 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2607 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2608 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2609 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2610
2611 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2612 && x0_ent->const_rtx != NULL_RTX
2613 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2614 && x1_ent->const_rtx != NULL_RTX)
2615 return 0;
2616 }
9c6b0bae 2617
2be28ee2 2618 return rtx_varies_p (x, from_alias);
7afe21cc
RK
2619}
2620\f
2621/* Canonicalize an expression:
2622 replace each register reference inside it
2623 with the "oldest" equivalent register.
2624
da7d8304 2625 If INSN is nonzero and we are replacing a pseudo with a hard register
7722328e 2626 or vice versa, validate_change is used to ensure that INSN remains valid
da7d8304 2627 after we make our substitution. The calls are made with IN_GROUP nonzero
7722328e
RK
2628 so apply_change_group must be called upon the outermost return from this
2629 function (unless INSN is zero). The result of apply_change_group can
2630 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2631
2632static rtx
7080f735 2633canon_reg (rtx x, rtx insn)
7afe21cc 2634{
b3694847
SS
2635 int i;
2636 enum rtx_code code;
2637 const char *fmt;
7afe21cc
RK
2638
2639 if (x == 0)
2640 return x;
2641
2642 code = GET_CODE (x);
2643 switch (code)
2644 {
2645 case PC:
2646 case CC0:
2647 case CONST:
2648 case CONST_INT:
2649 case CONST_DOUBLE:
69ef87e2 2650 case CONST_VECTOR:
7afe21cc
RK
2651 case SYMBOL_REF:
2652 case LABEL_REF:
2653 case ADDR_VEC:
2654 case ADDR_DIFF_VEC:
2655 return x;
2656
2657 case REG:
2658 {
b3694847
SS
2659 int first;
2660 int q;
2661 struct qty_table_elem *ent;
7afe21cc
RK
2662
2663 /* Never replace a hard reg, because hard regs can appear
2664 in more than one machine mode, and we must preserve the mode
2665 of each occurrence. Also, some hard regs appear in
2666 MEMs that are shared and mustn't be altered. Don't try to
2667 replace any reg that maps to a reg of class NO_REGS. */
2668 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2669 || ! REGNO_QTY_VALID_P (REGNO (x)))
2670 return x;
2671
278a83b2 2672 q = REG_QTY (REGNO (x));
1bb98cec
DM
2673 ent = &qty_table[q];
2674 first = ent->first_reg;
7afe21cc
RK
2675 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2676 : REGNO_REG_CLASS (first) == NO_REGS ? x
1bb98cec 2677 : gen_rtx_REG (ent->mode, first));
7afe21cc 2678 }
278a83b2 2679
e9a25f70
JL
2680 default:
2681 break;
7afe21cc
RK
2682 }
2683
2684 fmt = GET_RTX_FORMAT (code);
2685 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2686 {
b3694847 2687 int j;
7afe21cc
RK
2688
2689 if (fmt[i] == 'e')
2690 {
2691 rtx new = canon_reg (XEXP (x, i), insn);
58873255 2692 int insn_code;
7afe21cc
RK
2693
2694 /* If replacing pseudo with hard reg or vice versa, ensure the
178c39f6 2695 insn remains valid. Likewise if the insn has MATCH_DUPs. */
aee9dc31
RS
2696 if (insn != 0 && new != 0
2697 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
178c39f6
RK
2698 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2699 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
58873255 2700 || (insn_code = recog_memoized (insn)) < 0
a995e389 2701 || insn_data[insn_code].n_dups > 0))
77fa0940 2702 validate_change (insn, &XEXP (x, i), new, 1);
7afe21cc
RK
2703 else
2704 XEXP (x, i) = new;
2705 }
2706 else if (fmt[i] == 'E')
2707 for (j = 0; j < XVECLEN (x, i); j++)
2708 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2709 }
2710
2711 return x;
2712}
2713\f
a2cabb29 2714/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2715 a MEM). Find the best equivalent address to use that is valid for this
2716 insn.
2717
2718 On most CISC machines, complicated address modes are costly, and rtx_cost
2719 is a good approximation for that cost. However, most RISC machines have
2720 only a few (usually only one) memory reference formats. If an address is
2721 valid at all, it is often just as cheap as any other address. Hence, for
e37135f7
RH
2722 RISC machines, we use `address_cost' to compare the costs of various
2723 addresses. For two addresses of equal cost, choose the one with the
2724 highest `rtx_cost' value as that has the potential of eliminating the
2725 most insns. For equal costs, we choose the first in the equivalence
2726 class. Note that we ignore the fact that pseudo registers are cheaper than
2727 hard registers here because we would also prefer the pseudo registers. */
7afe21cc 2728
6cd4575e 2729static void
7080f735 2730find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
7afe21cc 2731{
7a87758d 2732 struct table_elt *elt;
7afe21cc 2733 rtx addr = *loc;
7a87758d 2734 struct table_elt *p;
7afe21cc
RK
2735 int found_better = 1;
2736 int save_do_not_record = do_not_record;
2737 int save_hash_arg_in_memory = hash_arg_in_memory;
7afe21cc
RK
2738 int addr_volatile;
2739 int regno;
2197a88a 2740 unsigned hash;
7afe21cc
RK
2741
2742 /* Do not try to replace constant addresses or addresses of local and
2743 argument slots. These MEM expressions are made only once and inserted
2744 in many instructions, as well as being used to control symbol table
2745 output. It is not safe to clobber them.
2746
2747 There are some uncommon cases where the address is already in a register
2748 for some reason, but we cannot take advantage of that because we have
2749 no easy way to unshare the MEM. In addition, looking up all stack
2750 addresses is costly. */
2751 if ((GET_CODE (addr) == PLUS
2752 && GET_CODE (XEXP (addr, 0)) == REG
2753 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2754 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2755 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2756 || regno == ARG_POINTER_REGNUM))
7afe21cc 2757 || (GET_CODE (addr) == REG
8bc169f2
DE
2758 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2759 || regno == HARD_FRAME_POINTER_REGNUM
2760 || regno == ARG_POINTER_REGNUM))
e9a25f70 2761 || GET_CODE (addr) == ADDRESSOF
7afe21cc
RK
2762 || CONSTANT_ADDRESS_P (addr))
2763 return;
2764
2765 /* If this address is not simply a register, try to fold it. This will
2766 sometimes simplify the expression. Many simplifications
2767 will not be valid, but some, usually applying the associative rule, will
2768 be valid and produce better code. */
8c87f107
RK
2769 if (GET_CODE (addr) != REG)
2770 {
2771 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
630c79be
BS
2772 int addr_folded_cost = address_cost (folded, mode);
2773 int addr_cost = address_cost (addr, mode);
2774
2775 if ((addr_folded_cost < addr_cost
2776 || (addr_folded_cost == addr_cost
2777 /* ??? The rtx_cost comparison is left over from an older
2778 version of this code. It is probably no longer helpful. */
2779 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2780 || approx_reg_cost (folded) < approx_reg_cost (addr))))
8c87f107
RK
2781 && validate_change (insn, loc, folded, 0))
2782 addr = folded;
2783 }
278a83b2 2784
42495ca0
RK
2785 /* If this address is not in the hash table, we can't look for equivalences
2786 of the whole address. Also, ignore if volatile. */
2787
7afe21cc 2788 do_not_record = 0;
2197a88a 2789 hash = HASH (addr, Pmode);
7afe21cc
RK
2790 addr_volatile = do_not_record;
2791 do_not_record = save_do_not_record;
2792 hash_arg_in_memory = save_hash_arg_in_memory;
7afe21cc
RK
2793
2794 if (addr_volatile)
2795 return;
2796
2197a88a 2797 elt = lookup (addr, hash, Pmode);
7afe21cc 2798
42495ca0
RK
2799 if (elt)
2800 {
2801 /* We need to find the best (under the criteria documented above) entry
2802 in the class that is valid. We use the `flag' field to indicate
2803 choices that were invalid and iterate until we can't find a better
2804 one that hasn't already been tried. */
7afe21cc 2805
42495ca0
RK
2806 for (p = elt->first_same_value; p; p = p->next_same_value)
2807 p->flag = 0;
7afe21cc 2808
42495ca0
RK
2809 while (found_better)
2810 {
01329426 2811 int best_addr_cost = address_cost (*loc, mode);
42495ca0 2812 int best_rtx_cost = (elt->cost + 1) >> 1;
01329426 2813 int exp_cost;
278a83b2 2814 struct table_elt *best_elt = elt;
42495ca0
RK
2815
2816 found_better = 0;
2817 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2818 if (! p->flag)
42495ca0 2819 {
2f541799
MM
2820 if ((GET_CODE (p->exp) == REG
2821 || exp_equiv_p (p->exp, p->exp, 1, 0))
01329426
JH
2822 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2823 || (exp_cost == best_addr_cost
05bd3d41 2824 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2f541799
MM
2825 {
2826 found_better = 1;
01329426 2827 best_addr_cost = exp_cost;
2f541799
MM
2828 best_rtx_cost = (p->cost + 1) >> 1;
2829 best_elt = p;
2830 }
42495ca0 2831 }
7afe21cc 2832
42495ca0
RK
2833 if (found_better)
2834 {
2835 if (validate_change (insn, loc,
906c4e36
RK
2836 canon_reg (copy_rtx (best_elt->exp),
2837 NULL_RTX), 0))
42495ca0
RK
2838 return;
2839 else
2840 best_elt->flag = 1;
2841 }
2842 }
2843 }
7afe21cc 2844
42495ca0
RK
2845 /* If the address is a binary operation with the first operand a register
2846 and the second a constant, do the same as above, but looking for
2847 equivalences of the register. Then try to simplify before checking for
2848 the best address to use. This catches a few cases: First is when we
2849 have REG+const and the register is another REG+const. We can often merge
2850 the constants and eliminate one insn and one register. It may also be
2851 that a machine has a cheap REG+REG+const. Finally, this improves the
2852 code on the Alpha for unaligned byte stores. */
2853
2854 if (flag_expensive_optimizations
ec8e098d 2855 && ARITHMETIC_P (*loc)
7b9c108f 2856 && GET_CODE (XEXP (*loc, 0)) == REG)
7afe21cc 2857 {
7b9c108f 2858 rtx op1 = XEXP (*loc, 1);
42495ca0
RK
2859
2860 do_not_record = 0;
2197a88a 2861 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2862 do_not_record = save_do_not_record;
2863 hash_arg_in_memory = save_hash_arg_in_memory;
42495ca0 2864
2197a88a 2865 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2866 if (elt == 0)
2867 return;
2868
2869 /* We need to find the best (under the criteria documented above) entry
2870 in the class that is valid. We use the `flag' field to indicate
2871 choices that were invalid and iterate until we can't find a better
2872 one that hasn't already been tried. */
7afe21cc 2873
7afe21cc 2874 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2875 p->flag = 0;
7afe21cc 2876
42495ca0 2877 while (found_better)
7afe21cc 2878 {
01329426 2879 int best_addr_cost = address_cost (*loc, mode);
42495ca0 2880 int best_rtx_cost = (COST (*loc) + 1) >> 1;
278a83b2 2881 struct table_elt *best_elt = elt;
42495ca0 2882 rtx best_rtx = *loc;
f6516aee
JW
2883 int count;
2884
2885 /* This is at worst case an O(n^2) algorithm, so limit our search
2886 to the first 32 elements on the list. This avoids trouble
2887 compiling code with very long basic blocks that can easily
0cedb36c
JL
2888 call simplify_gen_binary so many times that we run out of
2889 memory. */
96b0e481 2890
0cedb36c
JL
2891 found_better = 0;
2892 for (p = elt->first_same_value, count = 0;
2893 p && count < 32;
2894 p = p->next_same_value, count++)
2895 if (! p->flag
2896 && (GET_CODE (p->exp) == REG
2897 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2898 {
2899 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
7b9c108f 2900 p->exp, op1);
01329426
JH
2901 int new_cost;
2902 new_cost = address_cost (new, mode);
96b0e481 2903
01329426
JH
2904 if (new_cost < best_addr_cost
2905 || (new_cost == best_addr_cost
2906 && (COST (new) + 1) >> 1 > best_rtx_cost))
0cedb36c
JL
2907 {
2908 found_better = 1;
01329426 2909 best_addr_cost = new_cost;
0cedb36c
JL
2910 best_rtx_cost = (COST (new) + 1) >> 1;
2911 best_elt = p;
2912 best_rtx = new;
2913 }
2914 }
96b0e481 2915
0cedb36c
JL
2916 if (found_better)
2917 {
2918 if (validate_change (insn, loc,
2919 canon_reg (copy_rtx (best_rtx),
2920 NULL_RTX), 0))
2921 return;
2922 else
2923 best_elt->flag = 1;
2924 }
2925 }
2926 }
96b0e481
RK
2927}
2928\f
bca05d20
RK
2929/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2930 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2931 what values are being compared.
1a87eea2 2932
bca05d20
RK
2933 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2934 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2935 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2936 compared to produce cc0.
a432f20d 2937
bca05d20
RK
2938 The return value is the comparison operator and is either the code of
2939 A or the code corresponding to the inverse of the comparison. */
7afe21cc 2940
0cedb36c 2941static enum rtx_code
7080f735
AJ
2942find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2943 enum machine_mode *pmode1, enum machine_mode *pmode2)
7afe21cc 2944{
0cedb36c 2945 rtx arg1, arg2;
1a87eea2 2946
0cedb36c 2947 arg1 = *parg1, arg2 = *parg2;
7afe21cc 2948
0cedb36c 2949 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
7afe21cc 2950
0cedb36c 2951 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
a432f20d 2952 {
da7d8304 2953 /* Set nonzero when we find something of interest. */
0cedb36c
JL
2954 rtx x = 0;
2955 int reverse_code = 0;
2956 struct table_elt *p = 0;
6076248a 2957
0cedb36c
JL
2958 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2959 On machines with CC0, this is the only case that can occur, since
2960 fold_rtx will return the COMPARE or item being compared with zero
2961 when given CC0. */
6076248a 2962
0cedb36c
JL
2963 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2964 x = arg1;
6076248a 2965
0cedb36c
JL
2966 /* If ARG1 is a comparison operator and CODE is testing for
2967 STORE_FLAG_VALUE, get the inner arguments. */
a432f20d 2968
ec8e098d 2969 else if (COMPARISON_P (arg1))
7afe21cc 2970 {
efdc7e19
RH
2971#ifdef FLOAT_STORE_FLAG_VALUE
2972 REAL_VALUE_TYPE fsfv;
2973#endif
2974
0cedb36c
JL
2975 if (code == NE
2976 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2977 && code == LT && STORE_FLAG_VALUE == -1)
2978#ifdef FLOAT_STORE_FLAG_VALUE
2979 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
efdc7e19
RH
2980 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2981 REAL_VALUE_NEGATIVE (fsfv)))
7afe21cc 2982#endif
a432f20d 2983 )
0cedb36c
JL
2984 x = arg1;
2985 else if (code == EQ
2986 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2987 && code == GE && STORE_FLAG_VALUE == -1)
2988#ifdef FLOAT_STORE_FLAG_VALUE
2989 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
efdc7e19
RH
2990 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2991 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
2992#endif
2993 )
2994 x = arg1, reverse_code = 1;
7afe21cc
RK
2995 }
2996
0cedb36c 2997 /* ??? We could also check for
7afe21cc 2998
0cedb36c 2999 (ne (and (eq (...) (const_int 1))) (const_int 0))
7afe21cc 3000
0cedb36c 3001 and related forms, but let's wait until we see them occurring. */
7afe21cc 3002
0cedb36c
JL
3003 if (x == 0)
3004 /* Look up ARG1 in the hash table and see if it has an equivalence
3005 that lets us see what is being compared. */
9b1549b8 3006 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
0cedb36c 3007 GET_MODE (arg1));
278a83b2 3008 if (p)
8b03b984
R
3009 {
3010 p = p->first_same_value;
3011
3012 /* If what we compare is already known to be constant, that is as
3013 good as it gets.
3014 We need to break the loop in this case, because otherwise we
3015 can have an infinite loop when looking at a reg that is known
3016 to be a constant which is the same as a comparison of a reg
3017 against zero which appears later in the insn stream, which in
3018 turn is constant and the same as the comparison of the first reg
3019 against zero... */
3020 if (p->is_const)
3021 break;
3022 }
7afe21cc 3023
0cedb36c 3024 for (; p; p = p->next_same_value)
7afe21cc 3025 {
0cedb36c 3026 enum machine_mode inner_mode = GET_MODE (p->exp);
efdc7e19
RH
3027#ifdef FLOAT_STORE_FLAG_VALUE
3028 REAL_VALUE_TYPE fsfv;
3029#endif
7afe21cc 3030
0cedb36c
JL
3031 /* If the entry isn't valid, skip it. */
3032 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3033 continue;
f76b9db2 3034
bca05d20
RK
3035 if (GET_CODE (p->exp) == COMPARE
3036 /* Another possibility is that this machine has a compare insn
3037 that includes the comparison code. In that case, ARG1 would
3038 be equivalent to a comparison operation that would set ARG1 to
3039 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3040 ORIG_CODE is the actual comparison being done; if it is an EQ,
3041 we must reverse ORIG_CODE. On machine with a negative value
3042 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3043 || ((code == NE
3044 || (code == LT
3045 && GET_MODE_CLASS (inner_mode) == MODE_INT
3046 && (GET_MODE_BITSIZE (inner_mode)
3047 <= HOST_BITS_PER_WIDE_INT)
3048 && (STORE_FLAG_VALUE
3049 & ((HOST_WIDE_INT) 1
3050 << (GET_MODE_BITSIZE (inner_mode) - 1))))
0cedb36c 3051#ifdef FLOAT_STORE_FLAG_VALUE
bca05d20
RK
3052 || (code == LT
3053 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
efdc7e19
RH
3054 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3055 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c 3056#endif
bca05d20 3057 )
ec8e098d 3058 && COMPARISON_P (p->exp)))
7afe21cc 3059 {
0cedb36c
JL
3060 x = p->exp;
3061 break;
3062 }
3063 else if ((code == EQ
3064 || (code == GE
3065 && GET_MODE_CLASS (inner_mode) == MODE_INT
3066 && (GET_MODE_BITSIZE (inner_mode)
3067 <= HOST_BITS_PER_WIDE_INT)
3068 && (STORE_FLAG_VALUE
3069 & ((HOST_WIDE_INT) 1
3070 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3071#ifdef FLOAT_STORE_FLAG_VALUE
3072 || (code == GE
3073 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
efdc7e19
RH
3074 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3075 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
3076#endif
3077 )
ec8e098d 3078 && COMPARISON_P (p->exp))
0cedb36c
JL
3079 {
3080 reverse_code = 1;
3081 x = p->exp;
3082 break;
7afe21cc
RK
3083 }
3084
4977bab6
ZW
3085 /* If this non-trapping address, e.g. fp + constant, the
3086 equivalent is a better operand since it may let us predict
3087 the value of the comparison. */
3088 else if (!rtx_addr_can_trap_p (p->exp))
0cedb36c
JL
3089 {
3090 arg1 = p->exp;
3091 continue;
3092 }
7afe21cc 3093 }
7afe21cc 3094
0cedb36c
JL
3095 /* If we didn't find a useful equivalence for ARG1, we are done.
3096 Otherwise, set up for the next iteration. */
3097 if (x == 0)
3098 break;
7afe21cc 3099
78192b09
RH
3100 /* If we need to reverse the comparison, make sure that that is
3101 possible -- we can't necessarily infer the value of GE from LT
3102 with floating-point operands. */
0cedb36c 3103 if (reverse_code)
261efdef
JH
3104 {
3105 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3106 if (reversed == UNKNOWN)
3107 break;
68252e27
KH
3108 else
3109 code = reversed;
261efdef 3110 }
ec8e098d 3111 else if (COMPARISON_P (x))
261efdef
JH
3112 code = GET_CODE (x);
3113 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
7afe21cc
RK
3114 }
3115
0cedb36c
JL
3116 /* Return our results. Return the modes from before fold_rtx
3117 because fold_rtx might produce const_int, and then it's too late. */
3118 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3119 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3120
3121 return code;
7afe21cc
RK
3122}
3123\f
3124/* If X is a nontrivial arithmetic operation on an argument
3125 for which a constant value can be determined, return
3126 the result of operating on that value, as a constant.
3127 Otherwise, return X, possibly with one or more operands
3128 modified by recursive calls to this function.
3129
e7bb59fa
RK
3130 If X is a register whose contents are known, we do NOT
3131 return those contents here. equiv_constant is called to
3132 perform that task.
7afe21cc
RK
3133
3134 INSN is the insn that we may be modifying. If it is 0, make a copy
3135 of X before modifying it. */
3136
3137static rtx
7080f735 3138fold_rtx (rtx x, rtx insn)
7afe21cc 3139{
b3694847
SS
3140 enum rtx_code code;
3141 enum machine_mode mode;
3142 const char *fmt;
3143 int i;
7afe21cc
RK
3144 rtx new = 0;
3145 int copied = 0;
3146 int must_swap = 0;
3147
3148 /* Folded equivalents of first two operands of X. */
3149 rtx folded_arg0;
3150 rtx folded_arg1;
3151
3152 /* Constant equivalents of first three operands of X;
3153 0 when no such equivalent is known. */
3154 rtx const_arg0;
3155 rtx const_arg1;
3156 rtx const_arg2;
3157
3158 /* The mode of the first operand of X. We need this for sign and zero
3159 extends. */
3160 enum machine_mode mode_arg0;
3161
3162 if (x == 0)
3163 return x;
3164
3165 mode = GET_MODE (x);
3166 code = GET_CODE (x);
3167 switch (code)
3168 {
3169 case CONST:
3170 case CONST_INT:
3171 case CONST_DOUBLE:
69ef87e2 3172 case CONST_VECTOR:
7afe21cc
RK
3173 case SYMBOL_REF:
3174 case LABEL_REF:
3175 case REG:
3176 /* No use simplifying an EXPR_LIST
3177 since they are used only for lists of args
3178 in a function call's REG_EQUAL note. */
3179 case EXPR_LIST:
956d6950
JL
3180 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3181 want to (e.g.,) make (addressof (const_int 0)) just because
3182 the location is known to be zero. */
3183 case ADDRESSOF:
7afe21cc
RK
3184 return x;
3185
3186#ifdef HAVE_cc0
3187 case CC0:
3188 return prev_insn_cc0;
3189#endif
3190
3191 case PC:
3192 /* If the next insn is a CODE_LABEL followed by a jump table,
3193 PC's value is a LABEL_REF pointing to that label. That
8aeea6e6 3194 lets us fold switch statements on the VAX. */
e1233a7d
RH
3195 {
3196 rtx next;
7c2aa9d7 3197 if (insn && tablejump_p (insn, &next, NULL))
e1233a7d
RH
3198 return gen_rtx_LABEL_REF (Pmode, next);
3199 }
7afe21cc
RK
3200 break;
3201
3202 case SUBREG:
c610adec
RK
3203 /* See if we previously assigned a constant value to this SUBREG. */
3204 if ((new = lookup_as_function (x, CONST_INT)) != 0
3205 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
3206 return new;
3207
4b980e20
RK
3208 /* If this is a paradoxical SUBREG, we have no idea what value the
3209 extra bits would have. However, if the operand is equivalent
3210 to a SUBREG whose operand is the same as our mode, and all the
3211 modes are within a word, we can just use the inner operand
31c85c78
RK
3212 because these SUBREGs just say how to treat the register.
3213
3214 Similarly if we find an integer constant. */
4b980e20 3215
e5f6a288 3216 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
3217 {
3218 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3219 struct table_elt *elt;
3220
3221 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3222 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3223 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3224 imode)) != 0)
ddc356e8 3225 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
31c85c78
RK
3226 {
3227 if (CONSTANT_P (elt->exp)
3228 && GET_MODE (elt->exp) == VOIDmode)
3229 return elt->exp;
3230
4b980e20
RK
3231 if (GET_CODE (elt->exp) == SUBREG
3232 && GET_MODE (SUBREG_REG (elt->exp)) == mode
906c4e36 3233 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20 3234 return copy_rtx (SUBREG_REG (elt->exp));
1bb98cec 3235 }
4b980e20
RK
3236
3237 return x;
3238 }
e5f6a288 3239
7afe21cc
RK
3240 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3241 We might be able to if the SUBREG is extracting a single word in an
3242 integral mode or extracting the low part. */
3243
3244 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3245 const_arg0 = equiv_constant (folded_arg0);
3246 if (const_arg0)
3247 folded_arg0 = const_arg0;
3248
3249 if (folded_arg0 != SUBREG_REG (x))
3250 {
949c5d62
JH
3251 new = simplify_subreg (mode, folded_arg0,
3252 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7afe21cc
RK
3253 if (new)
3254 return new;
3255 }
e5f6a288 3256
e5f6a288 3257 if (GET_CODE (folded_arg0) == REG
4c442790 3258 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
e5f6a288
RK
3259 {
3260 struct table_elt *elt;
3261
3262 /* We can use HASH here since we know that canon_hash won't be
3263 called. */
3264 elt = lookup (folded_arg0,
3265 HASH (folded_arg0, GET_MODE (folded_arg0)),
3266 GET_MODE (folded_arg0));
3267
3268 if (elt)
3269 elt = elt->first_same_value;
3270
4c442790
PB
3271 if (subreg_lowpart_p (x))
3272 /* If this is a narrowing SUBREG and our operand is a REG, see
3273 if we can find an equivalence for REG that is an arithmetic
3274 operation in a wider mode where both operands are paradoxical
3275 SUBREGs from objects of our result mode. In that case, we
3276 couldn-t report an equivalent value for that operation, since we
3277 don't know what the extra bits will be. But we can find an
3278 equivalence for this SUBREG by folding that operation in the
3279 narrow mode. This allows us to fold arithmetic in narrow modes
3280 when the machine only supports word-sized arithmetic.
3281
3282 Also look for a case where we have a SUBREG whose operand
3283 is the same as our result. If both modes are smaller
3284 than a word, we are simply interpreting a register in
3285 different modes and we can use the inner value. */
3286
3287 for (; elt; elt = elt->next_same_value)
3288 {
3289 enum rtx_code eltcode = GET_CODE (elt->exp);
3290
3291 /* Just check for unary and binary operations. */
ec8e098d
PB
3292 if (UNARY_P (elt->exp)
3293 && eltcode != SIGN_EXTEND
3294 && eltcode != ZERO_EXTEND
4c442790
PB
3295 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3296 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3297 && (GET_MODE_CLASS (mode)
3298 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3299 {
3300 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
e5f6a288 3301
4c442790
PB
3302 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3303 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288 3304
e5f6a288 3305 op0 = equiv_constant (op0);
4c442790
PB
3306 if (op0)
3307 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3308 op0, mode);
3309 }
ec8e098d 3310 else if (ARITHMETIC_P (elt->exp)
4c442790
PB
3311 && eltcode != DIV && eltcode != MOD
3312 && eltcode != UDIV && eltcode != UMOD
3313 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3314 && eltcode != ROTATE && eltcode != ROTATERT
3315 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3316 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3317 == mode))
3318 || CONSTANT_P (XEXP (elt->exp, 0)))
3319 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3320 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3321 == mode))
3322 || CONSTANT_P (XEXP (elt->exp, 1))))
3323 {
3324 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3325 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3326
3327 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3328 op0 = fold_rtx (op0, NULL_RTX);
3329
3330 if (op0)
3331 op0 = equiv_constant (op0);
3332
3333 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3334 op1 = fold_rtx (op1, NULL_RTX);
3335
3336 if (op1)
3337 op1 = equiv_constant (op1);
3338
3339 /* If we are looking for the low SImode part of
3340 (ashift:DI c (const_int 32)), it doesn't work
3341 to compute that in SImode, because a 32-bit shift
3342 in SImode is unpredictable. We know the value is 0. */
3343 if (op0 && op1
3344 && GET_CODE (elt->exp) == ASHIFT
3345 && GET_CODE (op1) == CONST_INT
3346 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3347 {
3348 if (INTVAL (op1)
3349 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3350 /* If the count fits in the inner mode's width,
3351 but exceeds the outer mode's width,
3352 the value will get truncated to 0
3353 by the subreg. */
3354 new = CONST0_RTX (mode);
3355 else
3356 /* If the count exceeds even the inner mode's width,
76fb0b60 3357 don't fold this expression. */
4c442790
PB
3358 new = 0;
3359 }
3360 else if (op0 && op1)
3361 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3362 }
e5f6a288 3363
4c442790
PB
3364 else if (GET_CODE (elt->exp) == SUBREG
3365 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3366 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3367 <= UNITS_PER_WORD)
3368 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3369 new = copy_rtx (SUBREG_REG (elt->exp));
4b980e20 3370
4c442790
PB
3371 if (new)
3372 return new;
3373 }
3374 else
3375 /* A SUBREG resulting from a zero extension may fold to zero if
3376 it extracts higher bits than the ZERO_EXTEND's source bits.
3377 FIXME: if combine tried to, er, combine these instructions,
3378 this transformation may be moved to simplify_subreg. */
3379 for (; elt; elt = elt->next_same_value)
3380 {
3381 if (GET_CODE (elt->exp) == ZERO_EXTEND
3382 && subreg_lsb (x)
3383 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3384 return CONST0_RTX (mode);
3385 }
e5f6a288
RK
3386 }
3387
7afe21cc
RK
3388 return x;
3389
3390 case NOT:
3391 case NEG:
3392 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3393 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3394 new = lookup_as_function (XEXP (x, 0), code);
3395 if (new)
3396 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3397 break;
13c9910f 3398
7afe21cc
RK
3399 case MEM:
3400 /* If we are not actually processing an insn, don't try to find the
3401 best address. Not only don't we care, but we could modify the
3402 MEM in an invalid way since we have no insn to validate against. */
3403 if (insn != 0)
01329426 3404 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
7afe21cc
RK
3405
3406 {
3407 /* Even if we don't fold in the insn itself,
3408 we can safely do so here, in hopes of getting a constant. */
906c4e36 3409 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 3410 rtx base = 0;
906c4e36 3411 HOST_WIDE_INT offset = 0;
7afe21cc
RK
3412
3413 if (GET_CODE (addr) == REG
1bb98cec
DM
3414 && REGNO_QTY_VALID_P (REGNO (addr)))
3415 {
3416 int addr_q = REG_QTY (REGNO (addr));
3417 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3418
3419 if (GET_MODE (addr) == addr_ent->mode
3420 && addr_ent->const_rtx != NULL_RTX)
3421 addr = addr_ent->const_rtx;
3422 }
7afe21cc
RK
3423
3424 /* If address is constant, split it into a base and integer offset. */
3425 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3426 base = addr;
3427 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3428 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3429 {
3430 base = XEXP (XEXP (addr, 0), 0);
3431 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3432 }
3433 else if (GET_CODE (addr) == LO_SUM
3434 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3435 base = XEXP (addr, 1);
e9a25f70 3436 else if (GET_CODE (addr) == ADDRESSOF)
956d6950 3437 return change_address (x, VOIDmode, addr);
7afe21cc
RK
3438
3439 /* If this is a constant pool reference, we can fold it into its
3440 constant to allow better value tracking. */
3441 if (base && GET_CODE (base) == SYMBOL_REF
3442 && CONSTANT_POOL_ADDRESS_P (base))
3443 {
3444 rtx constant = get_pool_constant (base);
3445 enum machine_mode const_mode = get_pool_mode (base);
3446 rtx new;
3447
3448 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
dd0ba281
RS
3449 {
3450 constant_pool_entries_cost = COST (constant);
3451 constant_pool_entries_regcost = approx_reg_cost (constant);
3452 }
7afe21cc
RK
3453
3454 /* If we are loading the full constant, we have an equivalence. */
3455 if (offset == 0 && mode == const_mode)
3456 return constant;
3457
9faa82d8 3458 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
3459 anything. Otherwise, handle the two most common cases:
3460 extracting a word from a multi-word constant, and extracting
3461 the low-order bits. Other cases don't seem common enough to
3462 worry about. */
3463 if (! CONSTANT_P (constant))
3464 return x;
3465
3466 if (GET_MODE_CLASS (mode) == MODE_INT
3467 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3468 && offset % UNITS_PER_WORD == 0
3469 && (new = operand_subword (constant,
3470 offset / UNITS_PER_WORD,
3471 0, const_mode)) != 0)
3472 return new;
3473
3474 if (((BYTES_BIG_ENDIAN
3475 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3476 || (! BYTES_BIG_ENDIAN && offset == 0))
4de249d9 3477 && (new = gen_lowpart (mode, constant)) != 0)
7afe21cc
RK
3478 return new;
3479 }
3480
3481 /* If this is a reference to a label at a known position in a jump
3482 table, we also know its value. */
3483 if (base && GET_CODE (base) == LABEL_REF)
3484 {
3485 rtx label = XEXP (base, 0);
3486 rtx table_insn = NEXT_INSN (label);
278a83b2 3487
7afe21cc
RK
3488 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3489 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3490 {
3491 rtx table = PATTERN (table_insn);
3492
3493 if (offset >= 0
3494 && (offset / GET_MODE_SIZE (GET_MODE (table))
3495 < XVECLEN (table, 0)))
3496 return XVECEXP (table, 0,
3497 offset / GET_MODE_SIZE (GET_MODE (table)));
3498 }
3499 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3500 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3501 {
3502 rtx table = PATTERN (table_insn);
3503
3504 if (offset >= 0
3505 && (offset / GET_MODE_SIZE (GET_MODE (table))
3506 < XVECLEN (table, 1)))
3507 {
3508 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
3509 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3510 XEXP (table, 0));
7afe21cc
RK
3511
3512 if (GET_MODE (table) != Pmode)
38a448ca 3513 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 3514
278a83b2 3515 /* Indicate this is a constant. This isn't a
67a37737
RK
3516 valid form of CONST, but it will only be used
3517 to fold the next insns and then discarded, so
ac7ef8d5
FS
3518 it should be safe.
3519
3520 Note this expression must be explicitly discarded,
3521 by cse_insn, else it may end up in a REG_EQUAL note
3522 and "escape" to cause problems elsewhere. */
38a448ca 3523 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
3524 }
3525 }
3526 }
3527
3528 return x;
3529 }
9255709c 3530
a5e5cf67
RH
3531#ifdef NO_FUNCTION_CSE
3532 case CALL:
3533 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3534 return x;
3535 break;
3536#endif
3537
9255709c 3538 case ASM_OPERANDS:
6462bb43
AO
3539 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3540 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3541 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
9255709c 3542 break;
278a83b2 3543
e9a25f70
JL
3544 default:
3545 break;
7afe21cc
RK
3546 }
3547
3548 const_arg0 = 0;
3549 const_arg1 = 0;
3550 const_arg2 = 0;
3551 mode_arg0 = VOIDmode;
3552
3553 /* Try folding our operands.
3554 Then see which ones have constant values known. */
3555
3556 fmt = GET_RTX_FORMAT (code);
3557 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3558 if (fmt[i] == 'e')
3559 {
3560 rtx arg = XEXP (x, i);
3561 rtx folded_arg = arg, const_arg = 0;
3562 enum machine_mode mode_arg = GET_MODE (arg);
3563 rtx cheap_arg, expensive_arg;
3564 rtx replacements[2];
3565 int j;
5b437e0f 3566 int old_cost = COST_IN (XEXP (x, i), code);
7afe21cc
RK
3567
3568 /* Most arguments are cheap, so handle them specially. */
3569 switch (GET_CODE (arg))
3570 {
3571 case REG:
3572 /* This is the same as calling equiv_constant; it is duplicated
3573 here for speed. */
1bb98cec
DM
3574 if (REGNO_QTY_VALID_P (REGNO (arg)))
3575 {
3576 int arg_q = REG_QTY (REGNO (arg));
3577 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3578
3579 if (arg_ent->const_rtx != NULL_RTX
3580 && GET_CODE (arg_ent->const_rtx) != REG
3581 && GET_CODE (arg_ent->const_rtx) != PLUS)
3582 const_arg
4de249d9 3583 = gen_lowpart (GET_MODE (arg),
1bb98cec
DM
3584 arg_ent->const_rtx);
3585 }
7afe21cc
RK
3586 break;
3587
3588 case CONST:
3589 case CONST_INT:
3590 case SYMBOL_REF:
3591 case LABEL_REF:
3592 case CONST_DOUBLE:
69ef87e2 3593 case CONST_VECTOR:
7afe21cc
RK
3594 const_arg = arg;
3595 break;
3596
3597#ifdef HAVE_cc0
3598 case CC0:
3599 folded_arg = prev_insn_cc0;
3600 mode_arg = prev_insn_cc0_mode;
3601 const_arg = equiv_constant (folded_arg);
3602 break;
3603#endif
3604
3605 default:
3606 folded_arg = fold_rtx (arg, insn);
3607 const_arg = equiv_constant (folded_arg);
3608 }
3609
3610 /* For the first three operands, see if the operand
3611 is constant or equivalent to a constant. */
3612 switch (i)
3613 {
3614 case 0:
3615 folded_arg0 = folded_arg;
3616 const_arg0 = const_arg;
3617 mode_arg0 = mode_arg;
3618 break;
3619 case 1:
3620 folded_arg1 = folded_arg;
3621 const_arg1 = const_arg;
3622 break;
3623 case 2:
3624 const_arg2 = const_arg;
3625 break;
3626 }
3627
3628 /* Pick the least expensive of the folded argument and an
3629 equivalent constant argument. */
3630 if (const_arg == 0 || const_arg == folded_arg
f2fa288f 3631 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
7afe21cc
RK
3632 cheap_arg = folded_arg, expensive_arg = const_arg;
3633 else
3634 cheap_arg = const_arg, expensive_arg = folded_arg;
3635
3636 /* Try to replace the operand with the cheapest of the two
3637 possibilities. If it doesn't work and this is either of the first
3638 two operands of a commutative operation, try swapping them.
3639 If THAT fails, try the more expensive, provided it is cheaper
3640 than what is already there. */
3641
3642 if (cheap_arg == XEXP (x, i))
3643 continue;
3644
3645 if (insn == 0 && ! copied)
3646 {
3647 x = copy_rtx (x);
3648 copied = 1;
3649 }
3650
f2fa288f
RH
3651 /* Order the replacements from cheapest to most expensive. */
3652 replacements[0] = cheap_arg;
3653 replacements[1] = expensive_arg;
3654
68252e27 3655 for (j = 0; j < 2 && replacements[j]; j++)
7afe21cc 3656 {
f2fa288f
RH
3657 int new_cost = COST_IN (replacements[j], code);
3658
3659 /* Stop if what existed before was cheaper. Prefer constants
3660 in the case of a tie. */
3661 if (new_cost > old_cost
3662 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3663 break;
3664
7afe21cc
RK
3665 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3666 break;
3667
ec8e098d
PB
3668 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3669 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
7afe21cc
RK
3670 {
3671 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3672 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3673
3674 if (apply_change_group ())
3675 {
3676 /* Swap them back to be invalid so that this loop can
3677 continue and flag them to be swapped back later. */
3678 rtx tem;
3679
3680 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3681 XEXP (x, 1) = tem;
3682 must_swap = 1;
3683 break;
3684 }
3685 }
3686 }
3687 }
3688
2d8b0f3a
JL
3689 else
3690 {
3691 if (fmt[i] == 'E')
3692 /* Don't try to fold inside of a vector of expressions.
3693 Doing nothing is harmless. */
e49a1d2e 3694 {;}
2d8b0f3a 3695 }
7afe21cc
RK
3696
3697 /* If a commutative operation, place a constant integer as the second
3698 operand unless the first operand is also a constant integer. Otherwise,
3699 place any constant second unless the first operand is also a constant. */
3700
ec8e098d 3701 if (COMMUTATIVE_P (x))
7afe21cc 3702 {
c715abdd
RS
3703 if (must_swap
3704 || swap_commutative_operands_p (const_arg0 ? const_arg0
3705 : XEXP (x, 0),
3706 const_arg1 ? const_arg1
3707 : XEXP (x, 1)))
7afe21cc 3708 {
b3694847 3709 rtx tem = XEXP (x, 0);
7afe21cc
RK
3710
3711 if (insn == 0 && ! copied)
3712 {
3713 x = copy_rtx (x);
3714 copied = 1;
3715 }
3716
3717 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3718 validate_change (insn, &XEXP (x, 1), tem, 1);
3719 if (apply_change_group ())
3720 {
3721 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3722 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3723 }
3724 }
3725 }
3726
3727 /* If X is an arithmetic operation, see if we can simplify it. */
3728
3729 switch (GET_RTX_CLASS (code))
3730 {
ec8e098d 3731 case RTX_UNARY:
67a37737
RK
3732 {
3733 int is_const = 0;
3734
3735 /* We can't simplify extension ops unless we know the
3736 original mode. */
3737 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3738 && mode_arg0 == VOIDmode)
3739 break;
3740
3741 /* If we had a CONST, strip it off and put it back later if we
3742 fold. */
3743 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3744 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3745
3746 new = simplify_unary_operation (code, mode,
3747 const_arg0 ? const_arg0 : folded_arg0,
3748 mode_arg0);
3749 if (new != 0 && is_const)
38a448ca 3750 new = gen_rtx_CONST (mode, new);
67a37737 3751 }
7afe21cc 3752 break;
278a83b2 3753
ec8e098d
PB
3754 case RTX_COMPARE:
3755 case RTX_COMM_COMPARE:
7afe21cc
RK
3756 /* See what items are actually being compared and set FOLDED_ARG[01]
3757 to those values and CODE to the actual comparison code. If any are
3758 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3759 do anything if both operands are already known to be constant. */
3760
3761 if (const_arg0 == 0 || const_arg1 == 0)
3762 {
3763 struct table_elt *p0, *p1;
d6edb99e 3764 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
13c9910f 3765 enum machine_mode mode_arg1;
c610adec
RK
3766
3767#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 3768 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 3769 {
d6edb99e 3770 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
68252e27 3771 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3772 false_rtx = CONST0_RTX (mode);
c610adec
RK
3773 }
3774#endif
7afe21cc 3775
13c9910f
RS
3776 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3777 &mode_arg0, &mode_arg1);
7afe21cc
RK
3778 const_arg0 = equiv_constant (folded_arg0);
3779 const_arg1 = equiv_constant (folded_arg1);
3780
13c9910f
RS
3781 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3782 what kinds of things are being compared, so we can't do
3783 anything with this comparison. */
7afe21cc
RK
3784
3785 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3786 break;
3787
0f41302f
MS
3788 /* If we do not now have two constants being compared, see
3789 if we can nevertheless deduce some things about the
3790 comparison. */
7afe21cc
RK
3791 if (const_arg0 == 0 || const_arg1 == 0)
3792 {
4977bab6
ZW
3793 /* Some addresses are known to be nonzero. We don't know
3794 their sign, but equality comparisons are known. */
7afe21cc 3795 if (const_arg1 == const0_rtx
4977bab6 3796 && nonzero_address_p (folded_arg0))
7afe21cc
RK
3797 {
3798 if (code == EQ)
d6edb99e 3799 return false_rtx;
7afe21cc 3800 else if (code == NE)
d6edb99e 3801 return true_rtx;
7afe21cc
RK
3802 }
3803
fd13313f
JH
3804 /* See if the two operands are the same. */
3805
3806 if (folded_arg0 == folded_arg1
3807 || (GET_CODE (folded_arg0) == REG
3808 && GET_CODE (folded_arg1) == REG
3809 && (REG_QTY (REGNO (folded_arg0))
3810 == REG_QTY (REGNO (folded_arg1))))
3811 || ((p0 = lookup (folded_arg0,
3812 (safe_hash (folded_arg0, mode_arg0)
3813 & HASH_MASK), mode_arg0))
3814 && (p1 = lookup (folded_arg1,
3815 (safe_hash (folded_arg1, mode_arg0)
3816 & HASH_MASK), mode_arg0))
3817 && p0->first_same_value == p1->first_same_value))
3818 {
71925bc0
RS
3819 /* Sadly two equal NaNs are not equivalent. */
3820 if (!HONOR_NANS (mode_arg0))
3821 return ((code == EQ || code == LE || code == GE
3822 || code == LEU || code == GEU || code == UNEQ
3823 || code == UNLE || code == UNGE
3824 || code == ORDERED)
3825 ? true_rtx : false_rtx);
3826 /* Take care for the FP compares we can resolve. */
3827 if (code == UNEQ || code == UNLE || code == UNGE)
3828 return true_rtx;
3829 if (code == LTGT || code == LT || code == GT)
3830 return false_rtx;
fd13313f 3831 }
7afe21cc
RK
3832
3833 /* If FOLDED_ARG0 is a register, see if the comparison we are
3834 doing now is either the same as we did before or the reverse
3835 (we only check the reverse if not floating-point). */
3836 else if (GET_CODE (folded_arg0) == REG)
3837 {
30f72379 3838 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc 3839
1bb98cec
DM
3840 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3841 {
3842 struct qty_table_elem *ent = &qty_table[qty];
3843
3844 if ((comparison_dominates_p (ent->comparison_code, code)
1eb8759b
RH
3845 || (! FLOAT_MODE_P (mode_arg0)
3846 && comparison_dominates_p (ent->comparison_code,
3847 reverse_condition (code))))
1bb98cec
DM
3848 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3849 || (const_arg1
3850 && rtx_equal_p (ent->comparison_const,
3851 const_arg1))
3852 || (GET_CODE (folded_arg1) == REG
3853 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3854 return (comparison_dominates_p (ent->comparison_code, code)
d6edb99e 3855 ? true_rtx : false_rtx);
1bb98cec 3856 }
7afe21cc
RK
3857 }
3858 }
3859 }
3860
3861 /* If we are comparing against zero, see if the first operand is
3862 equivalent to an IOR with a constant. If so, we may be able to
3863 determine the result of this comparison. */
3864
3865 if (const_arg1 == const0_rtx)
3866 {
3867 rtx y = lookup_as_function (folded_arg0, IOR);
3868 rtx inner_const;
3869
3870 if (y != 0
3871 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3872 && GET_CODE (inner_const) == CONST_INT
3873 && INTVAL (inner_const) != 0)
3874 {
3875 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
3876 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3877 && (INTVAL (inner_const)
3878 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
d6edb99e 3879 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
c610adec
RK
3880
3881#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 3882 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 3883 {
d6edb99e 3884 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
12530dbe 3885 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3886 false_rtx = CONST0_RTX (mode);
c610adec
RK
3887 }
3888#endif
7afe21cc
RK
3889
3890 switch (code)
3891 {
3892 case EQ:
d6edb99e 3893 return false_rtx;
7afe21cc 3894 case NE:
d6edb99e 3895 return true_rtx;
7afe21cc
RK
3896 case LT: case LE:
3897 if (has_sign)
d6edb99e 3898 return true_rtx;
7afe21cc
RK
3899 break;
3900 case GT: case GE:
3901 if (has_sign)
d6edb99e 3902 return false_rtx;
7afe21cc 3903 break;
e9a25f70
JL
3904 default:
3905 break;
7afe21cc
RK
3906 }
3907 }
3908 }
3909
7ce3e360 3910 new = simplify_relational_operation (code, mode,
95d0e5f1
AO
3911 (mode_arg0 != VOIDmode
3912 ? mode_arg0
3913 : (GET_MODE (const_arg0
3914 ? const_arg0
3915 : folded_arg0)
3916 != VOIDmode)
3917 ? GET_MODE (const_arg0
3918 ? const_arg0
3919 : folded_arg0)
3920 : GET_MODE (const_arg1
3921 ? const_arg1
3922 : folded_arg1)),
7afe21cc
RK
3923 const_arg0 ? const_arg0 : folded_arg0,
3924 const_arg1 ? const_arg1 : folded_arg1);
3925 break;
3926
ec8e098d
PB
3927 case RTX_BIN_ARITH:
3928 case RTX_COMM_ARITH:
7afe21cc
RK
3929 switch (code)
3930 {
3931 case PLUS:
3932 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3933 with that LABEL_REF as its second operand. If so, the result is
3934 the first operand of that MINUS. This handles switches with an
3935 ADDR_DIFF_VEC table. */
3936 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3937 {
e650cbda
RK
3938 rtx y
3939 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
ddc356e8 3940 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
3941
3942 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3943 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3944 return XEXP (y, 0);
67a37737
RK
3945
3946 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
3947 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3948 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
3949 && GET_CODE (XEXP (y, 0)) == MINUS
3950 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 3951 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
67a37737 3952 return XEXP (XEXP (y, 0), 0);
7afe21cc 3953 }
c2cc0778 3954
e650cbda
RK
3955 /* Likewise if the operands are in the other order. */
3956 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3957 {
3958 rtx y
3959 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
ddc356e8 3960 : lookup_as_function (folded_arg1, MINUS);
e650cbda
RK
3961
3962 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3963 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3964 return XEXP (y, 0);
3965
3966 /* Now try for a CONST of a MINUS like the above. */
3967 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3968 : lookup_as_function (folded_arg1, CONST))) != 0
3969 && GET_CODE (XEXP (y, 0)) == MINUS
3970 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 3971 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
e650cbda
RK
3972 return XEXP (XEXP (y, 0), 0);
3973 }
3974
c2cc0778
RK
3975 /* If second operand is a register equivalent to a negative
3976 CONST_INT, see if we can find a register equivalent to the
3977 positive constant. Make a MINUS if so. Don't do this for
5d595063 3978 a non-negative constant since we might then alternate between
a1f300c0 3979 choosing positive and negative constants. Having the positive
5d595063
RK
3980 constant previously-used is the more common case. Be sure
3981 the resulting constant is non-negative; if const_arg1 were
3982 the smallest negative number this would overflow: depending
3983 on the mode, this would either just be the same value (and
3984 hence not save anything) or be incorrect. */
3985 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3986 && INTVAL (const_arg1) < 0
4741f6ad
JL
3987 /* This used to test
3988
ddc356e8 3989 -INTVAL (const_arg1) >= 0
4741f6ad
JL
3990
3991 But The Sun V5.0 compilers mis-compiled that test. So
3992 instead we test for the problematic value in a more direct
3993 manner and hope the Sun compilers get it correct. */
5c45a8ac
KG
3994 && INTVAL (const_arg1) !=
3995 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
5d595063 3996 && GET_CODE (folded_arg1) == REG)
c2cc0778 3997 {
ddc356e8 3998 rtx new_const = GEN_INT (-INTVAL (const_arg1));
c2cc0778 3999 struct table_elt *p
9b1549b8 4000 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
c2cc0778
RK
4001 mode);
4002
4003 if (p)
4004 for (p = p->first_same_value; p; p = p->next_same_value)
4005 if (GET_CODE (p->exp) == REG)
0cedb36c
JL
4006 return simplify_gen_binary (MINUS, mode, folded_arg0,
4007 canon_reg (p->exp, NULL_RTX));
c2cc0778 4008 }
13c9910f
RS
4009 goto from_plus;
4010
4011 case MINUS:
4012 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4013 If so, produce (PLUS Z C2-C). */
4014 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4015 {
4016 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4017 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
4018 return fold_rtx (plus_constant (copy_rtx (y),
4019 -INTVAL (const_arg1)),
a3b5c94a 4020 NULL_RTX);
13c9910f 4021 }
7afe21cc 4022
ddc356e8 4023 /* Fall through. */
7afe21cc 4024
13c9910f 4025 from_plus:
7afe21cc
RK
4026 case SMIN: case SMAX: case UMIN: case UMAX:
4027 case IOR: case AND: case XOR:
f930bfd0 4028 case MULT:
7afe21cc
RK
4029 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4030 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4031 is known to be of similar form, we may be able to replace the
4032 operation with a combined operation. This may eliminate the
4033 intermediate operation if every use is simplified in this way.
4034 Note that the similar optimization done by combine.c only works
4035 if the intermediate operation's result has only one reference. */
4036
4037 if (GET_CODE (folded_arg0) == REG
4038 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4039 {
4040 int is_shift
4041 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4042 rtx y = lookup_as_function (folded_arg0, code);
4043 rtx inner_const;
4044 enum rtx_code associate_code;
4045 rtx new_const;
4046
4047 if (y == 0
4048 || 0 == (inner_const
4049 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4050 || GET_CODE (inner_const) != CONST_INT
4051 /* If we have compiled a statement like
4052 "if (x == (x & mask1))", and now are looking at
4053 "x & mask2", we will have a case where the first operand
4054 of Y is the same as our first operand. Unless we detect
4055 this case, an infinite loop will result. */
4056 || XEXP (y, 0) == folded_arg0)
4057 break;
4058
4059 /* Don't associate these operations if they are a PLUS with the
4060 same constant and it is a power of two. These might be doable
4061 with a pre- or post-increment. Similarly for two subtracts of
4062 identical powers of two with post decrement. */
4063
213d5fbc 4064 if (code == PLUS && const_arg1 == inner_const
940da324
JL
4065 && ((HAVE_PRE_INCREMENT
4066 && exact_log2 (INTVAL (const_arg1)) >= 0)
4067 || (HAVE_POST_INCREMENT
4068 && exact_log2 (INTVAL (const_arg1)) >= 0)
4069 || (HAVE_PRE_DECREMENT
4070 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4071 || (HAVE_POST_DECREMENT
4072 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
4073 break;
4074
4075 /* Compute the code used to compose the constants. For example,
f930bfd0 4076 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
7afe21cc 4077
f930bfd0 4078 associate_code = (is_shift || code == MINUS ? PLUS : code);
7afe21cc
RK
4079
4080 new_const = simplify_binary_operation (associate_code, mode,
4081 const_arg1, inner_const);
4082
4083 if (new_const == 0)
4084 break;
4085
4086 /* If we are associating shift operations, don't let this
4908e508
RS
4087 produce a shift of the size of the object or larger.
4088 This could occur when we follow a sign-extend by a right
4089 shift on a machine that does a sign-extend as a pair
4090 of shifts. */
7afe21cc
RK
4091
4092 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
4093 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4094 {
4095 /* As an exception, we can turn an ASHIFTRT of this
4096 form into a shift of the number of bits - 1. */
4097 if (code == ASHIFTRT)
4098 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4099 else
4100 break;
4101 }
7afe21cc
RK
4102
4103 y = copy_rtx (XEXP (y, 0));
4104
4105 /* If Y contains our first operand (the most common way this
4106 can happen is if Y is a MEM), we would do into an infinite
4107 loop if we tried to fold it. So don't in that case. */
4108
4109 if (! reg_mentioned_p (folded_arg0, y))
4110 y = fold_rtx (y, insn);
4111
0cedb36c 4112 return simplify_gen_binary (code, mode, y, new_const);
7afe21cc 4113 }
e9a25f70
JL
4114 break;
4115
f930bfd0
JW
4116 case DIV: case UDIV:
4117 /* ??? The associative optimization performed immediately above is
4118 also possible for DIV and UDIV using associate_code of MULT.
4119 However, we would need extra code to verify that the
4120 multiplication does not overflow, that is, there is no overflow
4121 in the calculation of new_const. */
4122 break;
4123
e9a25f70
JL
4124 default:
4125 break;
7afe21cc
RK
4126 }
4127
4128 new = simplify_binary_operation (code, mode,
4129 const_arg0 ? const_arg0 : folded_arg0,
4130 const_arg1 ? const_arg1 : folded_arg1);
4131 break;
4132
ec8e098d 4133 case RTX_OBJ:
7afe21cc
RK
4134 /* (lo_sum (high X) X) is simply X. */
4135 if (code == LO_SUM && const_arg0 != 0
4136 && GET_CODE (const_arg0) == HIGH
4137 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4138 return const_arg1;
4139 break;
4140
ec8e098d
PB
4141 case RTX_TERNARY:
4142 case RTX_BITFIELD_OPS:
7afe21cc
RK
4143 new = simplify_ternary_operation (code, mode, mode_arg0,
4144 const_arg0 ? const_arg0 : folded_arg0,
4145 const_arg1 ? const_arg1 : folded_arg1,
4146 const_arg2 ? const_arg2 : XEXP (x, 2));
4147 break;
ee5332b8 4148
ec8e098d 4149 case RTX_EXTRA:
34ee7f82 4150 /* Eliminate CONSTANT_P_RTX if its constant. */
ee5332b8 4151 if (code == CONSTANT_P_RTX)
34ee7f82
RS
4152 {
4153 if (const_arg0)
4154 return const1_rtx;
41559112 4155 if (optimize == 0 || !flag_gcse)
34ee7f82
RS
4156 return const0_rtx;
4157 }
ee5332b8 4158 break;
ec8e098d
PB
4159
4160 default:
4161 break;
7afe21cc
RK
4162 }
4163
4164 return new ? new : x;
4165}
4166\f
4167/* Return a constant value currently equivalent to X.
4168 Return 0 if we don't know one. */
4169
4170static rtx
7080f735 4171equiv_constant (rtx x)
7afe21cc
RK
4172{
4173 if (GET_CODE (x) == REG
1bb98cec
DM
4174 && REGNO_QTY_VALID_P (REGNO (x)))
4175 {
4176 int x_q = REG_QTY (REGNO (x));
4177 struct qty_table_elem *x_ent = &qty_table[x_q];
4178
4179 if (x_ent->const_rtx)
4de249d9 4180 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
1bb98cec 4181 }
7afe21cc 4182
2ce5e1b4 4183 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
4184 return x;
4185
fc3ffe83
RK
4186 /* If X is a MEM, try to fold it outside the context of any insn to see if
4187 it might be equivalent to a constant. That handles the case where it
4188 is a constant-pool reference. Then try to look it up in the hash table
4189 in case it is something whose value we have seen before. */
4190
4191 if (GET_CODE (x) == MEM)
4192 {
4193 struct table_elt *elt;
4194
906c4e36 4195 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
4196 if (CONSTANT_P (x))
4197 return x;
4198
9b1549b8 4199 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
fc3ffe83
RK
4200 if (elt == 0)
4201 return 0;
4202
4203 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4204 if (elt->is_const && CONSTANT_P (elt->exp))
4205 return elt->exp;
4206 }
4207
7afe21cc
RK
4208 return 0;
4209}
4210\f
4211/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4212 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4213 least-significant part of X.
278a83b2 4214 MODE specifies how big a part of X to return.
7afe21cc
RK
4215
4216 If the requested operation cannot be done, 0 is returned.
4217
4de249d9 4218 This is similar to gen_lowpart_general in emit-rtl.c. */
7afe21cc
RK
4219
4220rtx
7080f735 4221gen_lowpart_if_possible (enum machine_mode mode, rtx x)
7afe21cc
RK
4222{
4223 rtx result = gen_lowpart_common (mode, x);
4224
4225 if (result)
4226 return result;
4227 else if (GET_CODE (x) == MEM)
4228 {
4229 /* This is the only other case we handle. */
b3694847 4230 int offset = 0;
7afe21cc
RK
4231 rtx new;
4232
f76b9db2
ILT
4233 if (WORDS_BIG_ENDIAN)
4234 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4235 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4236 if (BYTES_BIG_ENDIAN)
f1ec5147
RK
4237 /* Adjust the address so that the address-after-the-data is
4238 unchanged. */
4239 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4240 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4241
4242 new = adjust_address_nv (x, mode, offset);
7afe21cc
RK
4243 if (! memory_address_p (mode, XEXP (new, 0)))
4244 return 0;
f1ec5147 4245
7afe21cc
RK
4246 return new;
4247 }
4248 else
4249 return 0;
4250}
4251\f
4252/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4253 branch. It will be zero if not.
4254
4255 In certain cases, this can cause us to add an equivalence. For example,
278a83b2 4256 if we are following the taken case of
7080f735 4257 if (i == 2)
7afe21cc
RK
4258 we can add the fact that `i' and '2' are now equivalent.
4259
4260 In any case, we can record that this comparison was passed. If the same
4261 comparison is seen later, we will know its value. */
4262
4263static void
7080f735 4264record_jump_equiv (rtx insn, int taken)
7afe21cc
RK
4265{
4266 int cond_known_true;
4267 rtx op0, op1;
7f1c097d 4268 rtx set;
13c9910f 4269 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
4270 int reversed_nonequality = 0;
4271 enum rtx_code code;
4272
4273 /* Ensure this is the right kind of insn. */
7f1c097d 4274 if (! any_condjump_p (insn))
7afe21cc 4275 return;
7f1c097d 4276 set = pc_set (insn);
7afe21cc
RK
4277
4278 /* See if this jump condition is known true or false. */
4279 if (taken)
7f1c097d 4280 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
7afe21cc 4281 else
7f1c097d 4282 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
7afe21cc
RK
4283
4284 /* Get the type of comparison being done and the operands being compared.
4285 If we had to reverse a non-equality condition, record that fact so we
4286 know that it isn't valid for floating-point. */
7f1c097d
JH
4287 code = GET_CODE (XEXP (SET_SRC (set), 0));
4288 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4289 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
7afe21cc 4290
13c9910f 4291 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
4292 if (! cond_known_true)
4293 {
261efdef 4294 code = reversed_comparison_code_parts (code, op0, op1, insn);
1eb8759b
RH
4295
4296 /* Don't remember if we can't find the inverse. */
4297 if (code == UNKNOWN)
4298 return;
7afe21cc
RK
4299 }
4300
4301 /* The mode is the mode of the non-constant. */
13c9910f
RS
4302 mode = mode0;
4303 if (mode1 != VOIDmode)
4304 mode = mode1;
7afe21cc
RK
4305
4306 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4307}
4308
4309/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4310 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4311 Make any useful entries we can with that information. Called from
4312 above function and called recursively. */
4313
4314static void
7080f735
AJ
4315record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4316 rtx op1, int reversed_nonequality)
7afe21cc 4317{
2197a88a 4318 unsigned op0_hash, op1_hash;
e428d738 4319 int op0_in_memory, op1_in_memory;
7afe21cc
RK
4320 struct table_elt *op0_elt, *op1_elt;
4321
4322 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4323 we know that they are also equal in the smaller mode (this is also
4324 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 4325 is not worth testing for with no SUBREG). */
7afe21cc 4326
2e794ee8 4327 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 4328 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
4329 && (GET_MODE_SIZE (GET_MODE (op0))
4330 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
4331 {
4332 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4de249d9 4333 rtx tem = gen_lowpart (inner_mode, op1);
7afe21cc
RK
4334
4335 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 4336 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
4337 reversed_nonequality);
4338 }
4339
4340 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
4341 && (GET_MODE_SIZE (GET_MODE (op1))
4342 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
4343 {
4344 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4de249d9 4345 rtx tem = gen_lowpart (inner_mode, op0);
7afe21cc
RK
4346
4347 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 4348 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
4349 reversed_nonequality);
4350 }
4351
278a83b2 4352 /* Similarly, if this is an NE comparison, and either is a SUBREG
7afe21cc
RK
4353 making a smaller mode, we know the whole thing is also NE. */
4354
2e794ee8
RS
4355 /* Note that GET_MODE (op0) may not equal MODE;
4356 if we test MODE instead, we can get an infinite recursion
4357 alternating between two modes each wider than MODE. */
4358
7afe21cc
RK
4359 if (code == NE && GET_CODE (op0) == SUBREG
4360 && subreg_lowpart_p (op0)
2e794ee8
RS
4361 && (GET_MODE_SIZE (GET_MODE (op0))
4362 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
4363 {
4364 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4de249d9 4365 rtx tem = gen_lowpart (inner_mode, op1);
7afe21cc
RK
4366
4367 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 4368 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
4369 reversed_nonequality);
4370 }
4371
4372 if (code == NE && GET_CODE (op1) == SUBREG
4373 && subreg_lowpart_p (op1)
2e794ee8
RS
4374 && (GET_MODE_SIZE (GET_MODE (op1))
4375 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
4376 {
4377 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4de249d9 4378 rtx tem = gen_lowpart (inner_mode, op0);
7afe21cc
RK
4379
4380 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 4381 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
4382 reversed_nonequality);
4383 }
4384
4385 /* Hash both operands. */
4386
4387 do_not_record = 0;
4388 hash_arg_in_memory = 0;
2197a88a 4389 op0_hash = HASH (op0, mode);
7afe21cc 4390 op0_in_memory = hash_arg_in_memory;
7afe21cc
RK
4391
4392 if (do_not_record)
4393 return;
4394
4395 do_not_record = 0;
4396 hash_arg_in_memory = 0;
2197a88a 4397 op1_hash = HASH (op1, mode);
7afe21cc 4398 op1_in_memory = hash_arg_in_memory;
278a83b2 4399
7afe21cc
RK
4400 if (do_not_record)
4401 return;
4402
4403 /* Look up both operands. */
2197a88a
RK
4404 op0_elt = lookup (op0, op0_hash, mode);
4405 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 4406
af3869c1
RK
4407 /* If both operands are already equivalent or if they are not in the
4408 table but are identical, do nothing. */
4409 if ((op0_elt != 0 && op1_elt != 0
4410 && op0_elt->first_same_value == op1_elt->first_same_value)
4411 || op0 == op1 || rtx_equal_p (op0, op1))
4412 return;
4413
7afe21cc 4414 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
4415 comparison. Similarly if this is floating-point. In the latter
4416 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4417 If we record the equality, we might inadvertently delete code
4418 whose intent was to change -0 to +0. */
4419
cbf6a543 4420 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc 4421 {
1bb98cec
DM
4422 struct qty_table_elem *ent;
4423 int qty;
4424
7afe21cc
RK
4425 /* If we reversed a floating-point comparison, if OP0 is not a
4426 register, or if OP1 is neither a register or constant, we can't
4427 do anything. */
4428
4429 if (GET_CODE (op1) != REG)
4430 op1 = equiv_constant (op1);
4431
cbf6a543 4432 if ((reversed_nonequality && FLOAT_MODE_P (mode))
7afe21cc
RK
4433 || GET_CODE (op0) != REG || op1 == 0)
4434 return;
4435
4436 /* Put OP0 in the hash table if it isn't already. This gives it a
4437 new quantity number. */
4438 if (op0_elt == 0)
4439 {
9714cf43 4440 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4441 {
4442 rehash_using_reg (op0);
2197a88a 4443 op0_hash = HASH (op0, mode);
2bb81c86
RK
4444
4445 /* If OP0 is contained in OP1, this changes its hash code
4446 as well. Faster to rehash than to check, except
4447 for the simple case of a constant. */
4448 if (! CONSTANT_P (op1))
2197a88a 4449 op1_hash = HASH (op1,mode);
7afe21cc
RK
4450 }
4451
9714cf43 4452 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4453 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4454 }
4455
1bb98cec
DM
4456 qty = REG_QTY (REGNO (op0));
4457 ent = &qty_table[qty];
4458
4459 ent->comparison_code = code;
7afe21cc
RK
4460 if (GET_CODE (op1) == REG)
4461 {
5d5ea909 4462 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 4463 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 4464
7afe21cc
RK
4465 /* Put OP1 in the hash table so it gets a new quantity number. */
4466 if (op1_elt == 0)
4467 {
9714cf43 4468 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4469 {
4470 rehash_using_reg (op1);
2197a88a 4471 op1_hash = HASH (op1, mode);
7afe21cc
RK
4472 }
4473
9714cf43 4474 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4475 op1_elt->in_memory = op1_in_memory;
7afe21cc
RK
4476 }
4477
1bb98cec
DM
4478 ent->comparison_const = NULL_RTX;
4479 ent->comparison_qty = REG_QTY (REGNO (op1));
7afe21cc
RK
4480 }
4481 else
4482 {
1bb98cec
DM
4483 ent->comparison_const = op1;
4484 ent->comparison_qty = -1;
7afe21cc
RK
4485 }
4486
4487 return;
4488 }
4489
eb5ad42a
RS
4490 /* If either side is still missing an equivalence, make it now,
4491 then merge the equivalences. */
7afe21cc 4492
7afe21cc
RK
4493 if (op0_elt == 0)
4494 {
9714cf43 4495 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4496 {
4497 rehash_using_reg (op0);
2197a88a 4498 op0_hash = HASH (op0, mode);
7afe21cc
RK
4499 }
4500
9714cf43 4501 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4502 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4503 }
4504
4505 if (op1_elt == 0)
4506 {
9714cf43 4507 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4508 {
4509 rehash_using_reg (op1);
2197a88a 4510 op1_hash = HASH (op1, mode);
7afe21cc
RK
4511 }
4512
9714cf43 4513 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4514 op1_elt->in_memory = op1_in_memory;
7afe21cc 4515 }
eb5ad42a
RS
4516
4517 merge_equiv_classes (op0_elt, op1_elt);
4518 last_jump_equiv_class = op0_elt;
7afe21cc
RK
4519}
4520\f
4521/* CSE processing for one instruction.
4522 First simplify sources and addresses of all assignments
4523 in the instruction, using previously-computed equivalents values.
4524 Then install the new sources and destinations in the table
278a83b2 4525 of available values.
7afe21cc 4526
1ed0205e
VM
4527 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4528 the insn. It means that INSN is inside libcall block. In this
ddc356e8 4529 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
4530
4531/* Data on one SET contained in the instruction. */
4532
4533struct set
4534{
4535 /* The SET rtx itself. */
4536 rtx rtl;
4537 /* The SET_SRC of the rtx (the original value, if it is changing). */
4538 rtx src;
4539 /* The hash-table element for the SET_SRC of the SET. */
4540 struct table_elt *src_elt;
2197a88a
RK
4541 /* Hash value for the SET_SRC. */
4542 unsigned src_hash;
4543 /* Hash value for the SET_DEST. */
4544 unsigned dest_hash;
7afe21cc
RK
4545 /* The SET_DEST, with SUBREG, etc., stripped. */
4546 rtx inner_dest;
278a83b2 4547 /* Nonzero if the SET_SRC is in memory. */
7afe21cc 4548 char src_in_memory;
7afe21cc
RK
4549 /* Nonzero if the SET_SRC contains something
4550 whose value cannot be predicted and understood. */
4551 char src_volatile;
496324d0
DN
4552 /* Original machine mode, in case it becomes a CONST_INT.
4553 The size of this field should match the size of the mode
4554 field of struct rtx_def (see rtl.h). */
4555 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc
RK
4556 /* A constant equivalent for SET_SRC, if any. */
4557 rtx src_const;
47841d1b
JJ
4558 /* Original SET_SRC value used for libcall notes. */
4559 rtx orig_src;
2197a88a
RK
4560 /* Hash value of constant equivalent for SET_SRC. */
4561 unsigned src_const_hash;
7afe21cc
RK
4562 /* Table entry for constant equivalent for SET_SRC, if any. */
4563 struct table_elt *src_const_elt;
4564};
4565
4566static void
7080f735 4567cse_insn (rtx insn, rtx libcall_insn)
7afe21cc 4568{
b3694847
SS
4569 rtx x = PATTERN (insn);
4570 int i;
92f9aa51 4571 rtx tem;
b3694847 4572 int n_sets = 0;
7afe21cc 4573
2d8b0f3a 4574#ifdef HAVE_cc0
7afe21cc
RK
4575 /* Records what this insn does to set CC0. */
4576 rtx this_insn_cc0 = 0;
135d84b8 4577 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 4578#endif
7afe21cc
RK
4579
4580 rtx src_eqv = 0;
4581 struct table_elt *src_eqv_elt = 0;
6a651371
KG
4582 int src_eqv_volatile = 0;
4583 int src_eqv_in_memory = 0;
6a651371 4584 unsigned src_eqv_hash = 0;
7afe21cc 4585
9714cf43 4586 struct set *sets = (struct set *) 0;
7afe21cc
RK
4587
4588 this_insn = insn;
7afe21cc
RK
4589
4590 /* Find all the SETs and CLOBBERs in this instruction.
4591 Record all the SETs in the array `set' and count them.
4592 Also determine whether there is a CLOBBER that invalidates
4593 all memory references, or all references at varying addresses. */
4594
f1e7c95f
RK
4595 if (GET_CODE (insn) == CALL_INSN)
4596 {
4597 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
f474c6f8
AO
4598 {
4599 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4600 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4601 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4602 }
f1e7c95f
RK
4603 }
4604
7afe21cc
RK
4605 if (GET_CODE (x) == SET)
4606 {
703ad42b 4607 sets = alloca (sizeof (struct set));
7afe21cc
RK
4608 sets[0].rtl = x;
4609
4610 /* Ignore SETs that are unconditional jumps.
4611 They never need cse processing, so this does not hurt.
4612 The reason is not efficiency but rather
4613 so that we can test at the end for instructions
4614 that have been simplified to unconditional jumps
4615 and not be misled by unchanged instructions
4616 that were unconditional jumps to begin with. */
4617 if (SET_DEST (x) == pc_rtx
4618 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4619 ;
4620
4621 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4622 The hard function value register is used only once, to copy to
4623 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4624 Ensure we invalidate the destination register. On the 80386 no
7722328e 4625 other code would invalidate it since it is a fixed_reg.
0f41302f 4626 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
4627
4628 else if (GET_CODE (SET_SRC (x)) == CALL)
4629 {
4630 canon_reg (SET_SRC (x), insn);
77fa0940 4631 apply_change_group ();
7afe21cc 4632 fold_rtx (SET_SRC (x), insn);
bb4034b3 4633 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
4634 }
4635 else
4636 n_sets = 1;
4637 }
4638 else if (GET_CODE (x) == PARALLEL)
4639 {
b3694847 4640 int lim = XVECLEN (x, 0);
7afe21cc 4641
703ad42b 4642 sets = alloca (lim * sizeof (struct set));
7afe21cc
RK
4643
4644 /* Find all regs explicitly clobbered in this insn,
4645 and ensure they are not replaced with any other regs
4646 elsewhere in this insn.
4647 When a reg that is clobbered is also used for input,
4648 we should presume that that is for a reason,
4649 and we should not substitute some other register
4650 which is not supposed to be clobbered.
4651 Therefore, this loop cannot be merged into the one below
830a38ee 4652 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
4653 value clobbered. We must not let a canonicalization do
4654 anything in that case. */
4655 for (i = 0; i < lim; i++)
4656 {
b3694847 4657 rtx y = XVECEXP (x, 0, i);
2708da92
RS
4658 if (GET_CODE (y) == CLOBBER)
4659 {
4660 rtx clobbered = XEXP (y, 0);
4661
4662 if (GET_CODE (clobbered) == REG
4663 || GET_CODE (clobbered) == SUBREG)
bb4034b3 4664 invalidate (clobbered, VOIDmode);
2708da92
RS
4665 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4666 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 4667 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 4668 }
7afe21cc 4669 }
278a83b2 4670
7afe21cc
RK
4671 for (i = 0; i < lim; i++)
4672 {
b3694847 4673 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
4674 if (GET_CODE (y) == SET)
4675 {
7722328e
RK
4676 /* As above, we ignore unconditional jumps and call-insns and
4677 ignore the result of apply_change_group. */
7afe21cc
RK
4678 if (GET_CODE (SET_SRC (y)) == CALL)
4679 {
4680 canon_reg (SET_SRC (y), insn);
77fa0940 4681 apply_change_group ();
7afe21cc 4682 fold_rtx (SET_SRC (y), insn);
bb4034b3 4683 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
4684 }
4685 else if (SET_DEST (y) == pc_rtx
4686 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4687 ;
4688 else
4689 sets[n_sets++].rtl = y;
4690 }
4691 else if (GET_CODE (y) == CLOBBER)
4692 {
9ae8ffe7 4693 /* If we clobber memory, canon the address.
7afe21cc
RK
4694 This does nothing when a register is clobbered
4695 because we have already invalidated the reg. */
4696 if (GET_CODE (XEXP (y, 0)) == MEM)
9ae8ffe7 4697 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
4698 }
4699 else if (GET_CODE (y) == USE
4700 && ! (GET_CODE (XEXP (y, 0)) == REG
4701 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4702 canon_reg (y, NULL_RTX);
7afe21cc
RK
4703 else if (GET_CODE (y) == CALL)
4704 {
7722328e
RK
4705 /* The result of apply_change_group can be ignored; see
4706 canon_reg. */
7afe21cc 4707 canon_reg (y, insn);
77fa0940 4708 apply_change_group ();
7afe21cc
RK
4709 fold_rtx (y, insn);
4710 }
4711 }
4712 }
4713 else if (GET_CODE (x) == CLOBBER)
4714 {
4715 if (GET_CODE (XEXP (x, 0)) == MEM)
9ae8ffe7 4716 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4717 }
4718
4719 /* Canonicalize a USE of a pseudo register or memory location. */
4720 else if (GET_CODE (x) == USE
4721 && ! (GET_CODE (XEXP (x, 0)) == REG
4722 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4723 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4724 else if (GET_CODE (x) == CALL)
4725 {
7722328e 4726 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 4727 canon_reg (x, insn);
77fa0940 4728 apply_change_group ();
7afe21cc
RK
4729 fold_rtx (x, insn);
4730 }
4731
7b3ab05e
JW
4732 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4733 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4734 is handled specially for this case, and if it isn't set, then there will
9faa82d8 4735 be no equivalence for the destination. */
92f9aa51
RK
4736 if (n_sets == 1 && REG_NOTES (insn) != 0
4737 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
4738 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4739 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
7b668f9e
JJ
4740 {
4741 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4742 XEXP (tem, 0) = src_eqv;
4743 }
7afe21cc
RK
4744
4745 /* Canonicalize sources and addresses of destinations.
4746 We do this in a separate pass to avoid problems when a MATCH_DUP is
4747 present in the insn pattern. In that case, we want to ensure that
4748 we don't break the duplicate nature of the pattern. So we will replace
4749 both operands at the same time. Otherwise, we would fail to find an
4750 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
4751
4752 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 4753 but we don't do this any more. */
7afe21cc
RK
4754
4755 for (i = 0; i < n_sets; i++)
4756 {
4757 rtx dest = SET_DEST (sets[i].rtl);
4758 rtx src = SET_SRC (sets[i].rtl);
4759 rtx new = canon_reg (src, insn);
58873255 4760 int insn_code;
7afe21cc 4761
47841d1b 4762 sets[i].orig_src = src;
77fa0940
RK
4763 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4764 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4765 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255 4766 || (insn_code = recog_memoized (insn)) < 0
a995e389 4767 || insn_data[insn_code].n_dups > 0)
77fa0940 4768 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
4769 else
4770 SET_SRC (sets[i].rtl) = new;
4771
4772 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4773 {
4774 validate_change (insn, &XEXP (dest, 1),
77fa0940 4775 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 4776 validate_change (insn, &XEXP (dest, 2),
77fa0940 4777 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
4778 }
4779
4780 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4781 || GET_CODE (dest) == ZERO_EXTRACT
4782 || GET_CODE (dest) == SIGN_EXTRACT)
4783 dest = XEXP (dest, 0);
4784
4785 if (GET_CODE (dest) == MEM)
4786 canon_reg (dest, insn);
4787 }
4788
77fa0940
RK
4789 /* Now that we have done all the replacements, we can apply the change
4790 group and see if they all work. Note that this will cause some
4791 canonicalizations that would have worked individually not to be applied
4792 because some other canonicalization didn't work, but this should not
278a83b2 4793 occur often.
7722328e
RK
4794
4795 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
4796
4797 apply_change_group ();
4798
7afe21cc
RK
4799 /* Set sets[i].src_elt to the class each source belongs to.
4800 Detect assignments from or to volatile things
4801 and set set[i] to zero so they will be ignored
4802 in the rest of this function.
4803
4804 Nothing in this loop changes the hash table or the register chains. */
4805
4806 for (i = 0; i < n_sets; i++)
4807 {
b3694847
SS
4808 rtx src, dest;
4809 rtx src_folded;
4810 struct table_elt *elt = 0, *p;
7afe21cc
RK
4811 enum machine_mode mode;
4812 rtx src_eqv_here;
4813 rtx src_const = 0;
4814 rtx src_related = 0;
4815 struct table_elt *src_const_elt = 0;
99a9c946
GS
4816 int src_cost = MAX_COST;
4817 int src_eqv_cost = MAX_COST;
4818 int src_folded_cost = MAX_COST;
4819 int src_related_cost = MAX_COST;
4820 int src_elt_cost = MAX_COST;
4821 int src_regcost = MAX_COST;
4822 int src_eqv_regcost = MAX_COST;
4823 int src_folded_regcost = MAX_COST;
4824 int src_related_regcost = MAX_COST;
4825 int src_elt_regcost = MAX_COST;
da7d8304 4826 /* Set nonzero if we need to call force_const_mem on with the
7afe21cc
RK
4827 contents of src_folded before using it. */
4828 int src_folded_force_flag = 0;
4829
4830 dest = SET_DEST (sets[i].rtl);
4831 src = SET_SRC (sets[i].rtl);
4832
4833 /* If SRC is a constant that has no machine mode,
4834 hash it with the destination's machine mode.
4835 This way we can keep different modes separate. */
4836
4837 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4838 sets[i].mode = mode;
4839
4840 if (src_eqv)
4841 {
4842 enum machine_mode eqvmode = mode;
4843 if (GET_CODE (dest) == STRICT_LOW_PART)
4844 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4845 do_not_record = 0;
4846 hash_arg_in_memory = 0;
2197a88a 4847 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
4848
4849 /* Find the equivalence class for the equivalent expression. */
4850
4851 if (!do_not_record)
2197a88a 4852 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
4853
4854 src_eqv_volatile = do_not_record;
4855 src_eqv_in_memory = hash_arg_in_memory;
7afe21cc
RK
4856 }
4857
4858 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4859 value of the INNER register, not the destination. So it is not
3826a3da 4860 a valid substitution for the source. But save it for later. */
7afe21cc
RK
4861 if (GET_CODE (dest) == STRICT_LOW_PART)
4862 src_eqv_here = 0;
4863 else
4864 src_eqv_here = src_eqv;
4865
4866 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4867 simplified result, which may not necessarily be valid. */
4868 src_folded = fold_rtx (src, insn);
4869
e6a125a0
RK
4870#if 0
4871 /* ??? This caused bad code to be generated for the m68k port with -O2.
4872 Suppose src is (CONST_INT -1), and that after truncation src_folded
4873 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4874 At the end we will add src and src_const to the same equivalence
4875 class. We now have 3 and -1 on the same equivalence class. This
4876 causes later instructions to be mis-optimized. */
7afe21cc
RK
4877 /* If storing a constant in a bitfield, pre-truncate the constant
4878 so we will be able to record it later. */
4879 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4880 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4881 {
4882 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4883
4884 if (GET_CODE (src) == CONST_INT
4885 && GET_CODE (width) == CONST_INT
906c4e36
RK
4886 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4887 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4888 src_folded
4889 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4890 << INTVAL (width)) - 1));
7afe21cc 4891 }
e6a125a0 4892#endif
7afe21cc
RK
4893
4894 /* Compute SRC's hash code, and also notice if it
4895 should not be recorded at all. In that case,
4896 prevent any further processing of this assignment. */
4897 do_not_record = 0;
4898 hash_arg_in_memory = 0;
7afe21cc
RK
4899
4900 sets[i].src = src;
2197a88a 4901 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
4902 sets[i].src_volatile = do_not_record;
4903 sets[i].src_in_memory = hash_arg_in_memory;
7afe21cc 4904
50196afa 4905 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
43e72072
JJ
4906 a pseudo, do not record SRC. Using SRC as a replacement for
4907 anything else will be incorrect in that situation. Note that
4908 this usually occurs only for stack slots, in which case all the
4909 RTL would be referring to SRC, so we don't lose any optimization
4910 opportunities by not having SRC in the hash table. */
50196afa
RK
4911
4912 if (GET_CODE (src) == MEM
43e72072 4913 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
50196afa 4914 && GET_CODE (dest) == REG
43e72072 4915 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
50196afa
RK
4916 sets[i].src_volatile = 1;
4917
0dadecf6
RK
4918#if 0
4919 /* It is no longer clear why we used to do this, but it doesn't
4920 appear to still be needed. So let's try without it since this
4921 code hurts cse'ing widened ops. */
9a5a17f3 4922 /* If source is a paradoxical subreg (such as QI treated as an SI),
7afe21cc
RK
4923 treat it as volatile. It may do the work of an SI in one context
4924 where the extra bits are not being used, but cannot replace an SI
4925 in general. */
4926 if (GET_CODE (src) == SUBREG
4927 && (GET_MODE_SIZE (GET_MODE (src))
4928 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4929 sets[i].src_volatile = 1;
0dadecf6 4930#endif
7afe21cc
RK
4931
4932 /* Locate all possible equivalent forms for SRC. Try to replace
4933 SRC in the insn with each cheaper equivalent.
4934
4935 We have the following types of equivalents: SRC itself, a folded
4936 version, a value given in a REG_EQUAL note, or a value related
4937 to a constant.
4938
4939 Each of these equivalents may be part of an additional class
4940 of equivalents (if more than one is in the table, they must be in
4941 the same class; we check for this).
4942
4943 If the source is volatile, we don't do any table lookups.
4944
4945 We note any constant equivalent for possible later use in a
4946 REG_NOTE. */
4947
4948 if (!sets[i].src_volatile)
2197a88a 4949 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
4950
4951 sets[i].src_elt = elt;
4952
4953 if (elt && src_eqv_here && src_eqv_elt)
278a83b2
KH
4954 {
4955 if (elt->first_same_value != src_eqv_elt->first_same_value)
7afe21cc
RK
4956 {
4957 /* The REG_EQUAL is indicating that two formerly distinct
4958 classes are now equivalent. So merge them. */
4959 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
4960 src_eqv_hash = HASH (src_eqv, elt->mode);
4961 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
4962 }
4963
278a83b2
KH
4964 src_eqv_here = 0;
4965 }
7afe21cc
RK
4966
4967 else if (src_eqv_elt)
278a83b2 4968 elt = src_eqv_elt;
7afe21cc
RK
4969
4970 /* Try to find a constant somewhere and record it in `src_const'.
4971 Record its table element, if any, in `src_const_elt'. Look in
4972 any known equivalences first. (If the constant is not in the
2197a88a 4973 table, also set `sets[i].src_const_hash'). */
7afe21cc 4974 if (elt)
278a83b2 4975 for (p = elt->first_same_value; p; p = p->next_same_value)
7afe21cc
RK
4976 if (p->is_const)
4977 {
4978 src_const = p->exp;
4979 src_const_elt = elt;
4980 break;
4981 }
4982
4983 if (src_const == 0
4984 && (CONSTANT_P (src_folded)
278a83b2 4985 /* Consider (minus (label_ref L1) (label_ref L2)) as
7afe21cc
RK
4986 "constant" here so we will record it. This allows us
4987 to fold switch statements when an ADDR_DIFF_VEC is used. */
4988 || (GET_CODE (src_folded) == MINUS
4989 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4990 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4991 src_const = src_folded, src_const_elt = elt;
4992 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4993 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4994
4995 /* If we don't know if the constant is in the table, get its
4996 hash code and look it up. */
4997 if (src_const && src_const_elt == 0)
4998 {
2197a88a
RK
4999 sets[i].src_const_hash = HASH (src_const, mode);
5000 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
5001 }
5002
5003 sets[i].src_const = src_const;
5004 sets[i].src_const_elt = src_const_elt;
5005
5006 /* If the constant and our source are both in the table, mark them as
5007 equivalent. Otherwise, if a constant is in the table but the source
5008 isn't, set ELT to it. */
5009 if (src_const_elt && elt
5010 && src_const_elt->first_same_value != elt->first_same_value)
5011 merge_equiv_classes (elt, src_const_elt);
5012 else if (src_const_elt && elt == 0)
5013 elt = src_const_elt;
5014
5015 /* See if there is a register linearly related to a constant
5016 equivalent of SRC. */
5017 if (src_const
5018 && (GET_CODE (src_const) == CONST
5019 || (src_const_elt && src_const_elt->related_value != 0)))
278a83b2
KH
5020 {
5021 src_related = use_related_value (src_const, src_const_elt);
5022 if (src_related)
5023 {
7afe21cc 5024 struct table_elt *src_related_elt
278a83b2 5025 = lookup (src_related, HASH (src_related, mode), mode);
7afe21cc 5026 if (src_related_elt && elt)
278a83b2 5027 {
7afe21cc
RK
5028 if (elt->first_same_value
5029 != src_related_elt->first_same_value)
278a83b2 5030 /* This can occur when we previously saw a CONST
7afe21cc
RK
5031 involving a SYMBOL_REF and then see the SYMBOL_REF
5032 twice. Merge the involved classes. */
5033 merge_equiv_classes (elt, src_related_elt);
5034
278a83b2 5035 src_related = 0;
7afe21cc 5036 src_related_elt = 0;
278a83b2
KH
5037 }
5038 else if (src_related_elt && elt == 0)
5039 elt = src_related_elt;
7afe21cc 5040 }
278a83b2 5041 }
7afe21cc 5042
e4600702
RK
5043 /* See if we have a CONST_INT that is already in a register in a
5044 wider mode. */
5045
5046 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5047 && GET_MODE_CLASS (mode) == MODE_INT
5048 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5049 {
5050 enum machine_mode wider_mode;
5051
5052 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5053 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5054 && src_related == 0;
5055 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5056 {
5057 struct table_elt *const_elt
5058 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5059
5060 if (const_elt == 0)
5061 continue;
5062
5063 for (const_elt = const_elt->first_same_value;
5064 const_elt; const_elt = const_elt->next_same_value)
5065 if (GET_CODE (const_elt->exp) == REG)
5066 {
4de249d9 5067 src_related = gen_lowpart (mode,
e4600702
RK
5068 const_elt->exp);
5069 break;
5070 }
5071 }
5072 }
5073
d45cf215
RS
5074 /* Another possibility is that we have an AND with a constant in
5075 a mode narrower than a word. If so, it might have been generated
5076 as part of an "if" which would narrow the AND. If we already
5077 have done the AND in a wider mode, we can use a SUBREG of that
5078 value. */
5079
5080 if (flag_expensive_optimizations && ! src_related
5081 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5082 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5083 {
5084 enum machine_mode tmode;
38a448ca 5085 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
5086
5087 for (tmode = GET_MODE_WIDER_MODE (mode);
5088 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5089 tmode = GET_MODE_WIDER_MODE (tmode))
5090 {
4de249d9 5091 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
d45cf215
RS
5092 struct table_elt *larger_elt;
5093
5094 if (inner)
5095 {
5096 PUT_MODE (new_and, tmode);
5097 XEXP (new_and, 0) = inner;
5098 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5099 if (larger_elt == 0)
5100 continue;
5101
5102 for (larger_elt = larger_elt->first_same_value;
5103 larger_elt; larger_elt = larger_elt->next_same_value)
5104 if (GET_CODE (larger_elt->exp) == REG)
5105 {
5106 src_related
4de249d9 5107 = gen_lowpart (mode, larger_elt->exp);
d45cf215
RS
5108 break;
5109 }
5110
5111 if (src_related)
5112 break;
5113 }
5114 }
5115 }
7bac1be0
RK
5116
5117#ifdef LOAD_EXTEND_OP
5118 /* See if a MEM has already been loaded with a widening operation;
5119 if it has, we can use a subreg of that. Many CISC machines
5120 also have such operations, but this is only likely to be
71cc389b 5121 beneficial on these machines. */
278a83b2 5122
ddc356e8 5123 if (flag_expensive_optimizations && src_related == 0
7bac1be0
RK
5124 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5125 && GET_MODE_CLASS (mode) == MODE_INT
5126 && GET_CODE (src) == MEM && ! do_not_record
5127 && LOAD_EXTEND_OP (mode) != NIL)
5128 {
5129 enum machine_mode tmode;
278a83b2 5130
7bac1be0
RK
5131 /* Set what we are trying to extend and the operation it might
5132 have been extended with. */
5133 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5134 XEXP (memory_extend_rtx, 0) = src;
278a83b2 5135
7bac1be0
RK
5136 for (tmode = GET_MODE_WIDER_MODE (mode);
5137 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5138 tmode = GET_MODE_WIDER_MODE (tmode))
5139 {
5140 struct table_elt *larger_elt;
278a83b2 5141
7bac1be0 5142 PUT_MODE (memory_extend_rtx, tmode);
278a83b2 5143 larger_elt = lookup (memory_extend_rtx,
7bac1be0
RK
5144 HASH (memory_extend_rtx, tmode), tmode);
5145 if (larger_elt == 0)
5146 continue;
278a83b2 5147
7bac1be0
RK
5148 for (larger_elt = larger_elt->first_same_value;
5149 larger_elt; larger_elt = larger_elt->next_same_value)
5150 if (GET_CODE (larger_elt->exp) == REG)
5151 {
4de249d9 5152 src_related = gen_lowpart (mode,
7bac1be0
RK
5153 larger_elt->exp);
5154 break;
5155 }
278a83b2 5156
7bac1be0
RK
5157 if (src_related)
5158 break;
5159 }
5160 }
5161#endif /* LOAD_EXTEND_OP */
278a83b2 5162
7afe21cc 5163 if (src == src_folded)
278a83b2 5164 src_folded = 0;
7afe21cc 5165
da7d8304 5166 /* At this point, ELT, if nonzero, points to a class of expressions
7afe21cc 5167 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
da7d8304 5168 and SRC_RELATED, if nonzero, each contain additional equivalent
7afe21cc
RK
5169 expressions. Prune these latter expressions by deleting expressions
5170 already in the equivalence class.
5171
5172 Check for an equivalent identical to the destination. If found,
5173 this is the preferred equivalent since it will likely lead to
5174 elimination of the insn. Indicate this by placing it in
5175 `src_related'. */
5176
278a83b2
KH
5177 if (elt)
5178 elt = elt->first_same_value;
7afe21cc 5179 for (p = elt; p; p = p->next_same_value)
278a83b2 5180 {
7afe21cc
RK
5181 enum rtx_code code = GET_CODE (p->exp);
5182
5183 /* If the expression is not valid, ignore it. Then we do not
5184 have to check for validity below. In most cases, we can use
5185 `rtx_equal_p', since canonicalization has already been done. */
5186 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5187 continue;
5188
5a03c8c4
RK
5189 /* Also skip paradoxical subregs, unless that's what we're
5190 looking for. */
5191 if (code == SUBREG
5192 && (GET_MODE_SIZE (GET_MODE (p->exp))
5193 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5194 && ! (src != 0
5195 && GET_CODE (src) == SUBREG
5196 && GET_MODE (src) == GET_MODE (p->exp)
5197 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5198 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5199 continue;
5200
278a83b2 5201 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7afe21cc 5202 src = 0;
278a83b2 5203 else if (src_folded && GET_CODE (src_folded) == code
7afe21cc
RK
5204 && rtx_equal_p (src_folded, p->exp))
5205 src_folded = 0;
278a83b2 5206 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7afe21cc
RK
5207 && rtx_equal_p (src_eqv_here, p->exp))
5208 src_eqv_here = 0;
278a83b2 5209 else if (src_related && GET_CODE (src_related) == code
7afe21cc
RK
5210 && rtx_equal_p (src_related, p->exp))
5211 src_related = 0;
5212
5213 /* This is the same as the destination of the insns, we want
5214 to prefer it. Copy it to src_related. The code below will
5215 then give it a negative cost. */
5216 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5217 src_related = dest;
278a83b2 5218 }
7afe21cc
RK
5219
5220 /* Find the cheapest valid equivalent, trying all the available
5221 possibilities. Prefer items not in the hash table to ones
5222 that are when they are equal cost. Note that we can never
5223 worsen an insn as the current contents will also succeed.
05c33dd8 5224 If we find an equivalent identical to the destination, use it as best,
0f41302f 5225 since this insn will probably be eliminated in that case. */
7afe21cc
RK
5226 if (src)
5227 {
5228 if (rtx_equal_p (src, dest))
f1c1dfc3 5229 src_cost = src_regcost = -1;
7afe21cc 5230 else
630c79be
BS
5231 {
5232 src_cost = COST (src);
5233 src_regcost = approx_reg_cost (src);
5234 }
7afe21cc
RK
5235 }
5236
5237 if (src_eqv_here)
5238 {
5239 if (rtx_equal_p (src_eqv_here, dest))
f1c1dfc3 5240 src_eqv_cost = src_eqv_regcost = -1;
7afe21cc 5241 else
630c79be
BS
5242 {
5243 src_eqv_cost = COST (src_eqv_here);
5244 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5245 }
7afe21cc
RK
5246 }
5247
5248 if (src_folded)
5249 {
5250 if (rtx_equal_p (src_folded, dest))
f1c1dfc3 5251 src_folded_cost = src_folded_regcost = -1;
7afe21cc 5252 else
630c79be
BS
5253 {
5254 src_folded_cost = COST (src_folded);
5255 src_folded_regcost = approx_reg_cost (src_folded);
5256 }
7afe21cc
RK
5257 }
5258
5259 if (src_related)
5260 {
5261 if (rtx_equal_p (src_related, dest))
f1c1dfc3 5262 src_related_cost = src_related_regcost = -1;
7afe21cc 5263 else
630c79be
BS
5264 {
5265 src_related_cost = COST (src_related);
5266 src_related_regcost = approx_reg_cost (src_related);
5267 }
7afe21cc
RK
5268 }
5269
5270 /* If this was an indirect jump insn, a known label will really be
5271 cheaper even though it looks more expensive. */
5272 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
99a9c946 5273 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
278a83b2 5274
7afe21cc
RK
5275 /* Terminate loop when replacement made. This must terminate since
5276 the current contents will be tested and will always be valid. */
5277 while (1)
278a83b2
KH
5278 {
5279 rtx trial;
7afe21cc 5280
278a83b2
KH
5281 /* Skip invalid entries. */
5282 while (elt && GET_CODE (elt->exp) != REG
5283 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5284 elt = elt->next_same_value;
5a03c8c4
RK
5285
5286 /* A paradoxical subreg would be bad here: it'll be the right
5287 size, but later may be adjusted so that the upper bits aren't
5288 what we want. So reject it. */
5289 if (elt != 0
5290 && GET_CODE (elt->exp) == SUBREG
5291 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5292 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5293 /* It is okay, though, if the rtx we're trying to match
5294 will ignore any of the bits we can't predict. */
5295 && ! (src != 0
5296 && GET_CODE (src) == SUBREG
5297 && GET_MODE (src) == GET_MODE (elt->exp)
5298 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5299 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5300 {
5301 elt = elt->next_same_value;
5302 continue;
5303 }
278a83b2 5304
68252e27 5305 if (elt)
630c79be
BS
5306 {
5307 src_elt_cost = elt->cost;
5308 src_elt_regcost = elt->regcost;
5309 }
7afe21cc 5310
68252e27 5311 /* Find cheapest and skip it for the next time. For items
7afe21cc
RK
5312 of equal cost, use this order:
5313 src_folded, src, src_eqv, src_related and hash table entry. */
99a9c946 5314 if (src_folded
56ae04af
KH
5315 && preferable (src_folded_cost, src_folded_regcost,
5316 src_cost, src_regcost) <= 0
5317 && preferable (src_folded_cost, src_folded_regcost,
5318 src_eqv_cost, src_eqv_regcost) <= 0
5319 && preferable (src_folded_cost, src_folded_regcost,
5320 src_related_cost, src_related_regcost) <= 0
5321 && preferable (src_folded_cost, src_folded_regcost,
5322 src_elt_cost, src_elt_regcost) <= 0)
7afe21cc 5323 {
f1c1dfc3 5324 trial = src_folded, src_folded_cost = MAX_COST;
7afe21cc 5325 if (src_folded_force_flag)
9d8de1de
EB
5326 {
5327 rtx forced = force_const_mem (mode, trial);
5328 if (forced)
5329 trial = forced;
5330 }
7afe21cc 5331 }
99a9c946 5332 else if (src
56ae04af
KH
5333 && preferable (src_cost, src_regcost,
5334 src_eqv_cost, src_eqv_regcost) <= 0
5335 && preferable (src_cost, src_regcost,
5336 src_related_cost, src_related_regcost) <= 0
5337 && preferable (src_cost, src_regcost,
5338 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5339 trial = src, src_cost = MAX_COST;
99a9c946 5340 else if (src_eqv_here
56ae04af
KH
5341 && preferable (src_eqv_cost, src_eqv_regcost,
5342 src_related_cost, src_related_regcost) <= 0
5343 && preferable (src_eqv_cost, src_eqv_regcost,
5344 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5345 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
99a9c946 5346 else if (src_related
56ae04af
KH
5347 && preferable (src_related_cost, src_related_regcost,
5348 src_elt_cost, src_elt_regcost) <= 0)
68252e27 5349 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
278a83b2 5350 else
7afe21cc 5351 {
05c33dd8 5352 trial = copy_rtx (elt->exp);
7afe21cc 5353 elt = elt->next_same_value;
f1c1dfc3 5354 src_elt_cost = MAX_COST;
7afe21cc
RK
5355 }
5356
5357 /* We don't normally have an insn matching (set (pc) (pc)), so
5358 check for this separately here. We will delete such an
5359 insn below.
5360
d466c016
JL
5361 For other cases such as a table jump or conditional jump
5362 where we know the ultimate target, go ahead and replace the
5363 operand. While that may not make a valid insn, we will
5364 reemit the jump below (and also insert any necessary
5365 barriers). */
7afe21cc
RK
5366 if (n_sets == 1 && dest == pc_rtx
5367 && (trial == pc_rtx
5368 || (GET_CODE (trial) == LABEL_REF
5369 && ! condjump_p (insn))))
5370 {
d466c016 5371 SET_SRC (sets[i].rtl) = trial;
602c4c0d 5372 cse_jumps_altered = 1;
7afe21cc
RK
5373 break;
5374 }
278a83b2 5375
7afe21cc 5376 /* Look for a substitution that makes a valid insn. */
ddc356e8 5377 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 5378 {
dbaff908
RS
5379 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5380
7bd8b2a8
JL
5381 /* If we just made a substitution inside a libcall, then we
5382 need to make the same substitution in any notes attached
5383 to the RETVAL insn. */
1ed0205e 5384 if (libcall_insn
47841d1b
JJ
5385 && (GET_CODE (sets[i].orig_src) == REG
5386 || GET_CODE (sets[i].orig_src) == SUBREG
278a83b2 5387 || GET_CODE (sets[i].orig_src) == MEM))
0c19a26f
RS
5388 simplify_replace_rtx (REG_NOTES (libcall_insn),
5389 sets[i].orig_src, copy_rtx (new));
7bd8b2a8 5390
7722328e
RK
5391 /* The result of apply_change_group can be ignored; see
5392 canon_reg. */
5393
dbaff908 5394 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6702af89 5395 apply_change_group ();
05c33dd8
RK
5396 break;
5397 }
7afe21cc 5398
278a83b2 5399 /* If we previously found constant pool entries for
7afe21cc
RK
5400 constants and this is a constant, try making a
5401 pool entry. Put it in src_folded unless we already have done
5402 this since that is where it likely came from. */
5403
5404 else if (constant_pool_entries_cost
5405 && CONSTANT_P (trial)
d51ff7cb
JW
5406 /* Reject cases that will abort in decode_rtx_const.
5407 On the alpha when simplifying a switch, we get
5408 (const (truncate (minus (label_ref) (label_ref)))). */
1bbd065b
RK
5409 && ! (GET_CODE (trial) == CONST
5410 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
d51ff7cb
JW
5411 /* Likewise on IA-64, except without the truncate. */
5412 && ! (GET_CODE (trial) == CONST
5413 && GET_CODE (XEXP (trial, 0)) == MINUS
5414 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5415 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
1bbd065b
RK
5416 && (src_folded == 0
5417 || (GET_CODE (src_folded) != MEM
5418 && ! src_folded_force_flag))
9ae8ffe7
JL
5419 && GET_MODE_CLASS (mode) != MODE_CC
5420 && mode != VOIDmode)
7afe21cc
RK
5421 {
5422 src_folded_force_flag = 1;
5423 src_folded = trial;
5424 src_folded_cost = constant_pool_entries_cost;
dd0ba281 5425 src_folded_regcost = constant_pool_entries_regcost;
7afe21cc 5426 }
278a83b2 5427 }
7afe21cc
RK
5428
5429 src = SET_SRC (sets[i].rtl);
5430
5431 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5432 However, there is an important exception: If both are registers
5433 that are not the head of their equivalence class, replace SET_SRC
5434 with the head of the class. If we do not do this, we will have
5435 both registers live over a portion of the basic block. This way,
5436 their lifetimes will likely abut instead of overlapping. */
5437 if (GET_CODE (dest) == REG
1bb98cec 5438 && REGNO_QTY_VALID_P (REGNO (dest)))
7afe21cc 5439 {
1bb98cec
DM
5440 int dest_q = REG_QTY (REGNO (dest));
5441 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5442
5443 if (dest_ent->mode == GET_MODE (dest)
5444 && dest_ent->first_reg != REGNO (dest)
5445 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5446 /* Don't do this if the original insn had a hard reg as
5447 SET_SRC or SET_DEST. */
5448 && (GET_CODE (sets[i].src) != REG
5449 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5450 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5451 /* We can't call canon_reg here because it won't do anything if
5452 SRC is a hard register. */
759bd8b7 5453 {
1bb98cec
DM
5454 int src_q = REG_QTY (REGNO (src));
5455 struct qty_table_elem *src_ent = &qty_table[src_q];
5456 int first = src_ent->first_reg;
5457 rtx new_src
5458 = (first >= FIRST_PSEUDO_REGISTER
5459 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5460
5461 /* We must use validate-change even for this, because this
5462 might be a special no-op instruction, suitable only to
5463 tag notes onto. */
5464 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5465 {
5466 src = new_src;
5467 /* If we had a constant that is cheaper than what we are now
5468 setting SRC to, use that constant. We ignored it when we
5469 thought we could make this into a no-op. */
5470 if (src_const && COST (src_const) < COST (src)
278a83b2
KH
5471 && validate_change (insn, &SET_SRC (sets[i].rtl),
5472 src_const, 0))
1bb98cec
DM
5473 src = src_const;
5474 }
759bd8b7 5475 }
7afe21cc
RK
5476 }
5477
5478 /* If we made a change, recompute SRC values. */
5479 if (src != sets[i].src)
278a83b2 5480 {
4eadede7 5481 cse_altered = 1;
278a83b2
KH
5482 do_not_record = 0;
5483 hash_arg_in_memory = 0;
7afe21cc 5484 sets[i].src = src;
278a83b2
KH
5485 sets[i].src_hash = HASH (src, mode);
5486 sets[i].src_volatile = do_not_record;
5487 sets[i].src_in_memory = hash_arg_in_memory;
5488 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5489 }
7afe21cc
RK
5490
5491 /* If this is a single SET, we are setting a register, and we have an
5492 equivalent constant, we want to add a REG_NOTE. We don't want
5493 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 5494 that pseudo hasn't been eliminated is a pain. Such a note also
278a83b2 5495 won't help anything.
ac7ef8d5
FS
5496
5497 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5498 which can be created for a reference to a compile time computable
5499 entry in a jump table. */
5500
7afe21cc 5501 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
ac7ef8d5
FS
5502 && GET_CODE (src_const) != REG
5503 && ! (GET_CODE (src_const) == CONST
5504 && GET_CODE (XEXP (src_const, 0)) == MINUS
5505 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5506 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 5507 {
a77b7e32
RS
5508 /* We only want a REG_EQUAL note if src_const != src. */
5509 if (! rtx_equal_p (src, src_const))
5510 {
5511 /* Make sure that the rtx is not shared. */
5512 src_const = copy_rtx (src_const);
51e2a951 5513
a77b7e32
RS
5514 /* Record the actual constant value in a REG_EQUAL note,
5515 making a new one if one does not already exist. */
5516 set_unique_reg_note (insn, REG_EQUAL, src_const);
5517 }
7afe21cc
RK
5518 }
5519
5520 /* Now deal with the destination. */
5521 do_not_record = 0;
7afe21cc
RK
5522
5523 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5524 to the MEM or REG within it. */
5525 while (GET_CODE (dest) == SIGN_EXTRACT
5526 || GET_CODE (dest) == ZERO_EXTRACT
5527 || GET_CODE (dest) == SUBREG
5528 || GET_CODE (dest) == STRICT_LOW_PART)
0339ce7e 5529 dest = XEXP (dest, 0);
7afe21cc
RK
5530
5531 sets[i].inner_dest = dest;
5532
5533 if (GET_CODE (dest) == MEM)
5534 {
9ae8ffe7
JL
5535#ifdef PUSH_ROUNDING
5536 /* Stack pushes invalidate the stack pointer. */
5537 rtx addr = XEXP (dest, 0);
ec8e098d 5538 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
9ae8ffe7
JL
5539 && XEXP (addr, 0) == stack_pointer_rtx)
5540 invalidate (stack_pointer_rtx, Pmode);
5541#endif
7afe21cc 5542 dest = fold_rtx (dest, insn);
7afe21cc
RK
5543 }
5544
5545 /* Compute the hash code of the destination now,
5546 before the effects of this instruction are recorded,
5547 since the register values used in the address computation
5548 are those before this instruction. */
2197a88a 5549 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
5550
5551 /* Don't enter a bit-field in the hash table
5552 because the value in it after the store
5553 may not equal what was stored, due to truncation. */
5554
5555 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5556 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5557 {
5558 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5559
5560 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5561 && GET_CODE (width) == CONST_INT
906c4e36
RK
5562 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5563 && ! (INTVAL (src_const)
5564 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
5565 /* Exception: if the value is constant,
5566 and it won't be truncated, record it. */
5567 ;
5568 else
5569 {
5570 /* This is chosen so that the destination will be invalidated
5571 but no new value will be recorded.
5572 We must invalidate because sometimes constant
5573 values can be recorded for bitfields. */
5574 sets[i].src_elt = 0;
5575 sets[i].src_volatile = 1;
5576 src_eqv = 0;
5577 src_eqv_elt = 0;
5578 }
5579 }
5580
5581 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5582 the insn. */
5583 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5584 {
ef178af3 5585 /* One less use of the label this insn used to jump to. */
49ce134f 5586 delete_insn (insn);
7afe21cc 5587 cse_jumps_altered = 1;
7afe21cc
RK
5588 /* No more processing for this set. */
5589 sets[i].rtl = 0;
5590 }
5591
5592 /* If this SET is now setting PC to a label, we know it used to
d466c016 5593 be a conditional or computed branch. */
7afe21cc
RK
5594 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5595 {
8fb1e50e
GS
5596 /* Now emit a BARRIER after the unconditional jump. */
5597 if (NEXT_INSN (insn) == 0
5598 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5599 emit_barrier_after (insn);
5600
d466c016
JL
5601 /* We reemit the jump in as many cases as possible just in
5602 case the form of an unconditional jump is significantly
5603 different than a computed jump or conditional jump.
5604
5605 If this insn has multiple sets, then reemitting the
5606 jump is nontrivial. So instead we just force rerecognition
5607 and hope for the best. */
5608 if (n_sets == 1)
7afe21cc 5609 {
9dcb4381 5610 rtx new, note;
8fb1e50e 5611
9dcb4381 5612 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
7afe21cc
RK
5613 JUMP_LABEL (new) = XEXP (src, 0);
5614 LABEL_NUSES (XEXP (src, 0))++;
9dcb4381
RH
5615
5616 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5617 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5618 if (note)
5619 {
5620 XEXP (note, 1) = NULL_RTX;
5621 REG_NOTES (new) = note;
5622 }
5623
38c1593d 5624 delete_insn (insn);
7afe21cc 5625 insn = new;
8fb1e50e
GS
5626
5627 /* Now emit a BARRIER after the unconditional jump. */
5628 if (NEXT_INSN (insn) == 0
5629 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5630 emit_barrier_after (insn);
7afe21cc 5631 }
31dcf83f 5632 else
31dcf83f 5633 INSN_CODE (insn) = -1;
7afe21cc 5634
56d44285 5635 never_reached_warning (insn, NULL);
312f6255 5636
8fb1e50e
GS
5637 /* Do not bother deleting any unreachable code,
5638 let jump/flow do that. */
7afe21cc
RK
5639
5640 cse_jumps_altered = 1;
5641 sets[i].rtl = 0;
5642 }
5643
c2a47e48
RK
5644 /* If destination is volatile, invalidate it and then do no further
5645 processing for this assignment. */
7afe21cc
RK
5646
5647 else if (do_not_record)
c2a47e48 5648 {
bb07060a 5649 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
bb4034b3 5650 invalidate (dest, VOIDmode);
bb07060a
JW
5651 else if (GET_CODE (dest) == MEM)
5652 {
5653 /* Outgoing arguments for a libcall don't
5654 affect any recorded expressions. */
5655 if (! libcall_insn || insn == libcall_insn)
5656 invalidate (dest, VOIDmode);
5657 }
2708da92
RS
5658 else if (GET_CODE (dest) == STRICT_LOW_PART
5659 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5660 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
5661 sets[i].rtl = 0;
5662 }
7afe21cc
RK
5663
5664 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 5665 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
5666
5667#ifdef HAVE_cc0
5668 /* If setting CC0, record what it was set to, or a constant, if it
5669 is equivalent to a constant. If it is being set to a floating-point
5670 value, make a COMPARE with the appropriate constant of 0. If we
5671 don't do this, later code can interpret this as a test against
5672 const0_rtx, which can cause problems if we try to put it into an
5673 insn as a floating-point operand. */
5674 if (dest == cc0_rtx)
5675 {
5676 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5677 this_insn_cc0_mode = mode;
cbf6a543 5678 if (FLOAT_MODE_P (mode))
38a448ca
RH
5679 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5680 CONST0_RTX (mode));
7afe21cc
RK
5681 }
5682#endif
5683 }
5684
5685 /* Now enter all non-volatile source expressions in the hash table
5686 if they are not already present.
5687 Record their equivalence classes in src_elt.
5688 This way we can insert the corresponding destinations into
5689 the same classes even if the actual sources are no longer in them
5690 (having been invalidated). */
5691
5692 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5693 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5694 {
b3694847
SS
5695 struct table_elt *elt;
5696 struct table_elt *classp = sets[0].src_elt;
7afe21cc
RK
5697 rtx dest = SET_DEST (sets[0].rtl);
5698 enum machine_mode eqvmode = GET_MODE (dest);
5699
5700 if (GET_CODE (dest) == STRICT_LOW_PART)
5701 {
5702 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5703 classp = 0;
5704 }
5705 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
5706 {
5707 rehash_using_reg (src_eqv);
5708 src_eqv_hash = HASH (src_eqv, eqvmode);
5709 }
2197a88a 5710 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc 5711 elt->in_memory = src_eqv_in_memory;
7afe21cc 5712 src_eqv_elt = elt;
f7911249
JW
5713
5714 /* Check to see if src_eqv_elt is the same as a set source which
5715 does not yet have an elt, and if so set the elt of the set source
5716 to src_eqv_elt. */
5717 for (i = 0; i < n_sets; i++)
26132f71
JW
5718 if (sets[i].rtl && sets[i].src_elt == 0
5719 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
f7911249 5720 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
5721 }
5722
5723 for (i = 0; i < n_sets; i++)
5724 if (sets[i].rtl && ! sets[i].src_volatile
5725 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5726 {
5727 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5728 {
5729 /* REG_EQUAL in setting a STRICT_LOW_PART
5730 gives an equivalent for the entire destination register,
5731 not just for the subreg being stored in now.
5732 This is a more interesting equivalence, so we arrange later
5733 to treat the entire reg as the destination. */
5734 sets[i].src_elt = src_eqv_elt;
2197a88a 5735 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
5736 }
5737 else
5738 {
5739 /* Insert source and constant equivalent into hash table, if not
5740 already present. */
b3694847
SS
5741 struct table_elt *classp = src_eqv_elt;
5742 rtx src = sets[i].src;
5743 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5744 enum machine_mode mode
5745 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5746
1fcc57f1
AM
5747 /* It's possible that we have a source value known to be
5748 constant but don't have a REG_EQUAL note on the insn.
5749 Lack of a note will mean src_eqv_elt will be NULL. This
5750 can happen where we've generated a SUBREG to access a
5751 CONST_INT that is already in a register in a wider mode.
5752 Ensure that the source expression is put in the proper
5753 constant class. */
5754 if (!classp)
5755 classp = sets[i].src_const_elt;
5756
26132f71 5757 if (sets[i].src_elt == 0)
7afe21cc 5758 {
26132f71
JW
5759 /* Don't put a hard register source into the table if this is
5760 the last insn of a libcall. In this case, we only need
5761 to put src_eqv_elt in src_elt. */
db4a8254 5762 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
8ae2b8f6 5763 {
b3694847 5764 struct table_elt *elt;
26132f71
JW
5765
5766 /* Note that these insert_regs calls cannot remove
5767 any of the src_elt's, because they would have failed to
5768 match if not still valid. */
5769 if (insert_regs (src, classp, 0))
5770 {
5771 rehash_using_reg (src);
5772 sets[i].src_hash = HASH (src, mode);
5773 }
5774 elt = insert (src, classp, sets[i].src_hash, mode);
5775 elt->in_memory = sets[i].src_in_memory;
26132f71 5776 sets[i].src_elt = classp = elt;
8ae2b8f6 5777 }
26132f71
JW
5778 else
5779 sets[i].src_elt = classp;
7afe21cc 5780 }
7afe21cc
RK
5781 if (sets[i].src_const && sets[i].src_const_elt == 0
5782 && src != sets[i].src_const
5783 && ! rtx_equal_p (sets[i].src_const, src))
5784 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 5785 sets[i].src_const_hash, mode);
7afe21cc
RK
5786 }
5787 }
5788 else if (sets[i].src_elt == 0)
5789 /* If we did not insert the source into the hash table (e.g., it was
5790 volatile), note the equivalence class for the REG_EQUAL value, if any,
5791 so that the destination goes into that class. */
5792 sets[i].src_elt = src_eqv_elt;
5793
9ae8ffe7 5794 invalidate_from_clobbers (x);
77fa0940 5795
278a83b2 5796 /* Some registers are invalidated by subroutine calls. Memory is
77fa0940
RK
5797 invalidated by non-constant calls. */
5798
7afe21cc
RK
5799 if (GET_CODE (insn) == CALL_INSN)
5800 {
24a28584 5801 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 5802 invalidate_memory ();
7afe21cc
RK
5803 invalidate_for_call ();
5804 }
5805
5806 /* Now invalidate everything set by this instruction.
5807 If a SUBREG or other funny destination is being set,
5808 sets[i].rtl is still nonzero, so here we invalidate the reg
5809 a part of which is being set. */
5810
5811 for (i = 0; i < n_sets; i++)
5812 if (sets[i].rtl)
5813 {
bb4034b3
JW
5814 /* We can't use the inner dest, because the mode associated with
5815 a ZERO_EXTRACT is significant. */
b3694847 5816 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5817
5818 /* Needed for registers to remove the register from its
5819 previous quantity's chain.
5820 Needed for memory if this is a nonvarying address, unless
5821 we have just done an invalidate_memory that covers even those. */
bb07060a 5822 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
bb4034b3 5823 invalidate (dest, VOIDmode);
bb07060a
JW
5824 else if (GET_CODE (dest) == MEM)
5825 {
5826 /* Outgoing arguments for a libcall don't
5827 affect any recorded expressions. */
5828 if (! libcall_insn || insn == libcall_insn)
5829 invalidate (dest, VOIDmode);
5830 }
2708da92
RS
5831 else if (GET_CODE (dest) == STRICT_LOW_PART
5832 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5833 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
5834 }
5835
01e752d3
JL
5836 /* A volatile ASM invalidates everything. */
5837 if (GET_CODE (insn) == INSN
5838 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5839 && MEM_VOLATILE_P (PATTERN (insn)))
5840 flush_hash_table ();
5841
7afe21cc
RK
5842 /* Make sure registers mentioned in destinations
5843 are safe for use in an expression to be inserted.
5844 This removes from the hash table
5845 any invalid entry that refers to one of these registers.
5846
5847 We don't care about the return value from mention_regs because
5848 we are going to hash the SET_DEST values unconditionally. */
5849
5850 for (i = 0; i < n_sets; i++)
34c73909
R
5851 {
5852 if (sets[i].rtl)
5853 {
5854 rtx x = SET_DEST (sets[i].rtl);
5855
5856 if (GET_CODE (x) != REG)
5857 mention_regs (x);
5858 else
5859 {
5860 /* We used to rely on all references to a register becoming
5861 inaccessible when a register changes to a new quantity,
5862 since that changes the hash code. However, that is not
9b1549b8 5863 safe, since after HASH_SIZE new quantities we get a
34c73909
R
5864 hash 'collision' of a register with its own invalid
5865 entries. And since SUBREGs have been changed not to
5866 change their hash code with the hash code of the register,
5867 it wouldn't work any longer at all. So we have to check
5868 for any invalid references lying around now.
5869 This code is similar to the REG case in mention_regs,
5870 but it knows that reg_tick has been incremented, and
5871 it leaves reg_in_table as -1 . */
770ae6cc
RK
5872 unsigned int regno = REGNO (x);
5873 unsigned int endregno
34c73909 5874 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 5875 : hard_regno_nregs[regno][GET_MODE (x)]);
770ae6cc 5876 unsigned int i;
34c73909
R
5877
5878 for (i = regno; i < endregno; i++)
5879 {
30f72379 5880 if (REG_IN_TABLE (i) >= 0)
34c73909
R
5881 {
5882 remove_invalid_refs (i);
30f72379 5883 REG_IN_TABLE (i) = -1;
34c73909
R
5884 }
5885 }
5886 }
5887 }
5888 }
7afe21cc
RK
5889
5890 /* We may have just removed some of the src_elt's from the hash table.
5891 So replace each one with the current head of the same class. */
5892
5893 for (i = 0; i < n_sets; i++)
5894 if (sets[i].rtl)
5895 {
5896 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5897 /* If elt was removed, find current head of same class,
5898 or 0 if nothing remains of that class. */
5899 {
b3694847 5900 struct table_elt *elt = sets[i].src_elt;
7afe21cc
RK
5901
5902 while (elt && elt->prev_same_value)
5903 elt = elt->prev_same_value;
5904
5905 while (elt && elt->first_same_value == 0)
5906 elt = elt->next_same_value;
5907 sets[i].src_elt = elt ? elt->first_same_value : 0;
5908 }
5909 }
5910
5911 /* Now insert the destinations into their equivalence classes. */
5912
5913 for (i = 0; i < n_sets; i++)
5914 if (sets[i].rtl)
5915 {
b3694847 5916 rtx dest = SET_DEST (sets[i].rtl);
9de2c71a 5917 rtx inner_dest = sets[i].inner_dest;
b3694847 5918 struct table_elt *elt;
7afe21cc
RK
5919
5920 /* Don't record value if we are not supposed to risk allocating
5921 floating-point values in registers that might be wider than
5922 memory. */
5923 if ((flag_float_store
5924 && GET_CODE (dest) == MEM
cbf6a543 5925 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
5926 /* Don't record BLKmode values, because we don't know the
5927 size of it, and can't be sure that other BLKmode values
5928 have the same or smaller size. */
5929 || GET_MODE (dest) == BLKmode
7afe21cc
RK
5930 /* Don't record values of destinations set inside a libcall block
5931 since we might delete the libcall. Things should have been set
5932 up so we won't want to reuse such a value, but we play it safe
5933 here. */
7bd8b2a8 5934 || libcall_insn
7afe21cc
RK
5935 /* If we didn't put a REG_EQUAL value or a source into the hash
5936 table, there is no point is recording DEST. */
1a8e9a8e
RK
5937 || sets[i].src_elt == 0
5938 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5939 or SIGN_EXTEND, don't record DEST since it can cause
5940 some tracking to be wrong.
5941
5942 ??? Think about this more later. */
5943 || (GET_CODE (dest) == SUBREG
5944 && (GET_MODE_SIZE (GET_MODE (dest))
5945 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5946 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5947 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
5948 continue;
5949
5950 /* STRICT_LOW_PART isn't part of the value BEING set,
5951 and neither is the SUBREG inside it.
5952 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5953 if (GET_CODE (dest) == STRICT_LOW_PART)
5954 dest = SUBREG_REG (XEXP (dest, 0));
5955
c610adec 5956 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7afe21cc
RK
5957 /* Registers must also be inserted into chains for quantities. */
5958 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
5959 {
5960 /* If `insert_regs' changes something, the hash code must be
5961 recalculated. */
5962 rehash_using_reg (dest);
5963 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5964 }
7afe21cc 5965
9de2c71a
MM
5966 if (GET_CODE (inner_dest) == MEM
5967 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
5968 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
278a83b2 5969 that (MEM (ADDRESSOF (X))) is equivalent to Y.
9de2c71a
MM
5970 Consider the case in which the address of the MEM is
5971 passed to a function, which alters the MEM. Then, if we
5972 later use Y instead of the MEM we'll miss the update. */
5973 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
5974 else
5975 elt = insert (dest, sets[i].src_elt,
5976 sets[i].dest_hash, GET_MODE (dest));
5977
c256df0b 5978 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
9ad91d71 5979 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
4977bab6 5980 || fixed_base_plus_p (XEXP (sets[i].inner_dest,
9ad91d71 5981 0))));
c256df0b 5982
fc3ffe83
RK
5983 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5984 narrower than M2, and both M1 and M2 are the same number of words,
5985 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5986 make that equivalence as well.
7afe21cc 5987
4de249d9
PB
5988 However, BAR may have equivalences for which gen_lowpart
5989 will produce a simpler value than gen_lowpart applied to
7afe21cc 5990 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
278a83b2 5991 BAR's equivalences. If we don't get a simplified form, make
7afe21cc
RK
5992 the SUBREG. It will not be used in an equivalence, but will
5993 cause two similar assignments to be detected.
5994
5995 Note the loop below will find SUBREG_REG (DEST) since we have
5996 already entered SRC and DEST of the SET in the table. */
5997
5998 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
5999 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6000 / UNITS_PER_WORD)
278a83b2 6001 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
7afe21cc
RK
6002 && (GET_MODE_SIZE (GET_MODE (dest))
6003 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6004 && sets[i].src_elt != 0)
6005 {
6006 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6007 struct table_elt *elt, *classp = 0;
6008
6009 for (elt = sets[i].src_elt->first_same_value; elt;
6010 elt = elt->next_same_value)
6011 {
6012 rtx new_src = 0;
2197a88a 6013 unsigned src_hash;
7afe21cc 6014 struct table_elt *src_elt;
ff27a429 6015 int byte = 0;
7afe21cc
RK
6016
6017 /* Ignore invalid entries. */
6018 if (GET_CODE (elt->exp) != REG
6019 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6020 continue;
6021
9beb7d20
RH
6022 /* We may have already been playing subreg games. If the
6023 mode is already correct for the destination, use it. */
6024 if (GET_MODE (elt->exp) == new_mode)
6025 new_src = elt->exp;
6026 else
6027 {
6028 /* Calculate big endian correction for the SUBREG_BYTE.
6029 We have already checked that M1 (GET_MODE (dest))
6030 is not narrower than M2 (new_mode). */
6031 if (BYTES_BIG_ENDIAN)
6032 byte = (GET_MODE_SIZE (GET_MODE (dest))
6033 - GET_MODE_SIZE (new_mode));
6034
6035 new_src = simplify_gen_subreg (new_mode, elt->exp,
6036 GET_MODE (dest), byte);
6037 }
6038
ff27a429
R
6039 /* The call to simplify_gen_subreg fails if the value
6040 is VOIDmode, yet we can't do any simplification, e.g.
6041 for EXPR_LISTs denoting function call results.
6042 It is invalid to construct a SUBREG with a VOIDmode
6043 SUBREG_REG, hence a zero new_src means we can't do
6044 this substitution. */
6045 if (! new_src)
6046 continue;
7afe21cc
RK
6047
6048 src_hash = HASH (new_src, new_mode);
6049 src_elt = lookup (new_src, src_hash, new_mode);
6050
6051 /* Put the new source in the hash table is if isn't
6052 already. */
6053 if (src_elt == 0)
6054 {
6055 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
6056 {
6057 rehash_using_reg (new_src);
6058 src_hash = HASH (new_src, new_mode);
6059 }
7afe21cc
RK
6060 src_elt = insert (new_src, classp, src_hash, new_mode);
6061 src_elt->in_memory = elt->in_memory;
7afe21cc
RK
6062 }
6063 else if (classp && classp != src_elt->first_same_value)
278a83b2 6064 /* Show that two things that we've seen before are
7afe21cc
RK
6065 actually the same. */
6066 merge_equiv_classes (src_elt, classp);
6067
6068 classp = src_elt->first_same_value;
da932f04
JL
6069 /* Ignore invalid entries. */
6070 while (classp
6071 && GET_CODE (classp->exp) != REG
6072 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6073 classp = classp->next_same_value;
7afe21cc
RK
6074 }
6075 }
6076 }
6077
403e25d0
RK
6078 /* Special handling for (set REG0 REG1) where REG0 is the
6079 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6080 be used in the sequel, so (if easily done) change this insn to
6081 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6082 that computed their value. Then REG1 will become a dead store
6083 and won't cloud the situation for later optimizations.
7afe21cc
RK
6084
6085 Do not make this change if REG1 is a hard register, because it will
6086 then be used in the sequel and we may be changing a two-operand insn
6087 into a three-operand insn.
6088
50270076
R
6089 Also do not do this if we are operating on a copy of INSN.
6090
6091 Also don't do this if INSN ends a libcall; this would cause an unrelated
6092 register to be set in the middle of a libcall, and we then get bad code
6093 if the libcall is deleted. */
7afe21cc
RK
6094
6095 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6096 && NEXT_INSN (PREV_INSN (insn)) == insn
6097 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6098 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
1bb98cec 6099 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
7afe21cc 6100 {
1bb98cec
DM
6101 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6102 struct qty_table_elem *src_ent = &qty_table[src_q];
7afe21cc 6103
1bb98cec
DM
6104 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6105 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc 6106 {
3e25353e
AH
6107 rtx prev = insn;
6108 /* Scan for the previous nonnote insn, but stop at a basic
6109 block boundary. */
6110 do
6111 {
6112 prev = PREV_INSN (prev);
6113 }
6114 while (prev && GET_CODE (prev) == NOTE
6115 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
7080f735 6116
58ecb5e2
RS
6117 /* Do not swap the registers around if the previous instruction
6118 attaches a REG_EQUIV note to REG1.
6119
6120 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6121 from the pseudo that originally shadowed an incoming argument
6122 to another register. Some uses of REG_EQUIV might rely on it
6123 being attached to REG1 rather than REG2.
6124
6125 This section previously turned the REG_EQUIV into a REG_EQUAL
6126 note. We cannot do that because REG_EQUIV may provide an
4912a07c 6127 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
58ecb5e2 6128
403e25d0
RK
6129 if (prev != 0 && GET_CODE (prev) == INSN
6130 && GET_CODE (PATTERN (prev)) == SET
58ecb5e2
RS
6131 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6132 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
1bb98cec
DM
6133 {
6134 rtx dest = SET_DEST (sets[0].rtl);
403e25d0 6135 rtx src = SET_SRC (sets[0].rtl);
58ecb5e2 6136 rtx note;
7afe21cc 6137
278a83b2
KH
6138 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6139 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6140 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
1bb98cec 6141 apply_change_group ();
7afe21cc 6142
403e25d0
RK
6143 /* If INSN has a REG_EQUAL note, and this note mentions
6144 REG0, then we must delete it, because the value in
6145 REG0 has changed. If the note's value is REG1, we must
6146 also delete it because that is now this insn's dest. */
1bb98cec 6147 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
403e25d0
RK
6148 if (note != 0
6149 && (reg_mentioned_p (dest, XEXP (note, 0))
6150 || rtx_equal_p (src, XEXP (note, 0))))
1bb98cec
DM
6151 remove_note (insn, note);
6152 }
7afe21cc
RK
6153 }
6154 }
6155
6156 /* If this is a conditional jump insn, record any known equivalences due to
6157 the condition being tested. */
6158
6159 last_jump_equiv_class = 0;
6160 if (GET_CODE (insn) == JUMP_INSN
6161 && n_sets == 1 && GET_CODE (x) == SET
6162 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6163 record_jump_equiv (insn, 0);
6164
6165#ifdef HAVE_cc0
6166 /* If the previous insn set CC0 and this insn no longer references CC0,
6167 delete the previous insn. Here we use the fact that nothing expects CC0
6168 to be valid over an insn, which is true until the final pass. */
6169 if (prev_insn && GET_CODE (prev_insn) == INSN
6170 && (tem = single_set (prev_insn)) != 0
6171 && SET_DEST (tem) == cc0_rtx
6172 && ! reg_mentioned_p (cc0_rtx, x))
6dee7384 6173 delete_insn (prev_insn);
7afe21cc
RK
6174
6175 prev_insn_cc0 = this_insn_cc0;
6176 prev_insn_cc0_mode = this_insn_cc0_mode;
7afe21cc 6177 prev_insn = insn;
4977bab6 6178#endif
7afe21cc
RK
6179}
6180\f
a4c6502a 6181/* Remove from the hash table all expressions that reference memory. */
14a774a9 6182
7afe21cc 6183static void
7080f735 6184invalidate_memory (void)
7afe21cc 6185{
b3694847
SS
6186 int i;
6187 struct table_elt *p, *next;
7afe21cc 6188
9b1549b8 6189 for (i = 0; i < HASH_SIZE; i++)
9ae8ffe7
JL
6190 for (p = table[i]; p; p = next)
6191 {
6192 next = p->next_same_hash;
6193 if (p->in_memory)
6194 remove_from_table (p, i);
6195 }
6196}
6197
14a774a9
RK
6198/* If ADDR is an address that implicitly affects the stack pointer, return
6199 1 and update the register tables to show the effect. Else, return 0. */
6200
9ae8ffe7 6201static int
7080f735 6202addr_affects_sp_p (rtx addr)
9ae8ffe7 6203{
ec8e098d 6204 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
9ae8ffe7
JL
6205 && GET_CODE (XEXP (addr, 0)) == REG
6206 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 6207 {
30f72379 6208 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
46081bb3
SH
6209 {
6210 REG_TICK (STACK_POINTER_REGNUM)++;
6211 /* Is it possible to use a subreg of SP? */
6212 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6213 }
9ae8ffe7
JL
6214
6215 /* This should be *very* rare. */
6216 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6217 invalidate (stack_pointer_rtx, VOIDmode);
14a774a9 6218
9ae8ffe7 6219 return 1;
7afe21cc 6220 }
14a774a9 6221
9ae8ffe7 6222 return 0;
7afe21cc
RK
6223}
6224
6225/* Perform invalidation on the basis of everything about an insn
6226 except for invalidating the actual places that are SET in it.
6227 This includes the places CLOBBERed, and anything that might
6228 alias with something that is SET or CLOBBERed.
6229
7afe21cc
RK
6230 X is the pattern of the insn. */
6231
6232static void
7080f735 6233invalidate_from_clobbers (rtx x)
7afe21cc 6234{
7afe21cc
RK
6235 if (GET_CODE (x) == CLOBBER)
6236 {
6237 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
6238 if (ref)
6239 {
6240 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6241 || GET_CODE (ref) == MEM)
6242 invalidate (ref, VOIDmode);
6243 else if (GET_CODE (ref) == STRICT_LOW_PART
6244 || GET_CODE (ref) == ZERO_EXTRACT)
6245 invalidate (XEXP (ref, 0), GET_MODE (ref));
6246 }
7afe21cc
RK
6247 }
6248 else if (GET_CODE (x) == PARALLEL)
6249 {
b3694847 6250 int i;
7afe21cc
RK
6251 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6252 {
b3694847 6253 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
6254 if (GET_CODE (y) == CLOBBER)
6255 {
6256 rtx ref = XEXP (y, 0);
9ae8ffe7
JL
6257 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6258 || GET_CODE (ref) == MEM)
6259 invalidate (ref, VOIDmode);
6260 else if (GET_CODE (ref) == STRICT_LOW_PART
6261 || GET_CODE (ref) == ZERO_EXTRACT)
6262 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
6263 }
6264 }
6265 }
6266}
6267\f
6268/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6269 and replace any registers in them with either an equivalent constant
6270 or the canonical form of the register. If we are inside an address,
6271 only do this if the address remains valid.
6272
6273 OBJECT is 0 except when within a MEM in which case it is the MEM.
6274
6275 Return the replacement for X. */
6276
6277static rtx
7080f735 6278cse_process_notes (rtx x, rtx object)
7afe21cc
RK
6279{
6280 enum rtx_code code = GET_CODE (x);
6f7d635c 6281 const char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
6282 int i;
6283
6284 switch (code)
6285 {
6286 case CONST_INT:
6287 case CONST:
6288 case SYMBOL_REF:
6289 case LABEL_REF:
6290 case CONST_DOUBLE:
69ef87e2 6291 case CONST_VECTOR:
7afe21cc
RK
6292 case PC:
6293 case CC0:
6294 case LO_SUM:
6295 return x;
6296
6297 case MEM:
c96208fa
DC
6298 validate_change (x, &XEXP (x, 0),
6299 cse_process_notes (XEXP (x, 0), x), 0);
7afe21cc
RK
6300 return x;
6301
6302 case EXPR_LIST:
6303 case INSN_LIST:
6304 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 6305 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 6306 if (XEXP (x, 1))
906c4e36 6307 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
6308 return x;
6309
e4890d45
RS
6310 case SIGN_EXTEND:
6311 case ZERO_EXTEND:
0b0ee36c 6312 case SUBREG:
e4890d45
RS
6313 {
6314 rtx new = cse_process_notes (XEXP (x, 0), object);
6315 /* We don't substitute VOIDmode constants into these rtx,
6316 since they would impede folding. */
6317 if (GET_MODE (new) != VOIDmode)
6318 validate_change (object, &XEXP (x, 0), new, 0);
6319 return x;
6320 }
6321
7afe21cc 6322 case REG:
30f72379 6323 i = REG_QTY (REGNO (x));
7afe21cc
RK
6324
6325 /* Return a constant or a constant register. */
1bb98cec 6326 if (REGNO_QTY_VALID_P (REGNO (x)))
7afe21cc 6327 {
1bb98cec
DM
6328 struct qty_table_elem *ent = &qty_table[i];
6329
6330 if (ent->const_rtx != NULL_RTX
6331 && (CONSTANT_P (ent->const_rtx)
6332 || GET_CODE (ent->const_rtx) == REG))
6333 {
4de249d9 6334 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
1bb98cec
DM
6335 if (new)
6336 return new;
6337 }
7afe21cc
RK
6338 }
6339
6340 /* Otherwise, canonicalize this register. */
906c4e36 6341 return canon_reg (x, NULL_RTX);
278a83b2 6342
e9a25f70
JL
6343 default:
6344 break;
7afe21cc
RK
6345 }
6346
6347 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6348 if (fmt[i] == 'e')
6349 validate_change (object, &XEXP (x, i),
7fe34fdf 6350 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
6351
6352 return x;
6353}
6354\f
6355/* Find common subexpressions between the end test of a loop and the beginning
6356 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6357
6358 Often we have a loop where an expression in the exit test is used
6359 in the body of the loop. For example "while (*p) *q++ = *p++;".
6360 Because of the way we duplicate the loop exit test in front of the loop,
6361 however, we don't detect that common subexpression. This will be caught
6362 when global cse is implemented, but this is a quite common case.
6363
6364 This function handles the most common cases of these common expressions.
6365 It is called after we have processed the basic block ending with the
6366 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6367 jumps to a label used only once. */
6368
6369static void
7080f735 6370cse_around_loop (rtx loop_start)
7afe21cc
RK
6371{
6372 rtx insn;
6373 int i;
6374 struct table_elt *p;
6375
6376 /* If the jump at the end of the loop doesn't go to the start, we don't
6377 do anything. */
6378 for (insn = PREV_INSN (loop_start);
6379 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6380 insn = PREV_INSN (insn))
6381 ;
6382
6383 if (insn == 0
6384 || GET_CODE (insn) != NOTE
6385 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6386 return;
6387
6388 /* If the last insn of the loop (the end test) was an NE comparison,
6389 we will interpret it as an EQ comparison, since we fell through
f72aed24 6390 the loop. Any equivalences resulting from that comparison are
7afe21cc
RK
6391 therefore not valid and must be invalidated. */
6392 if (last_jump_equiv_class)
6393 for (p = last_jump_equiv_class->first_same_value; p;
6394 p = p->next_same_value)
51723711 6395 {
278a83b2 6396 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
51723711 6397 || (GET_CODE (p->exp) == SUBREG
278a83b2 6398 && GET_CODE (SUBREG_REG (p->exp)) == REG))
51723711 6399 invalidate (p->exp, VOIDmode);
278a83b2
KH
6400 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6401 || GET_CODE (p->exp) == ZERO_EXTRACT)
51723711
KG
6402 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6403 }
7afe21cc
RK
6404
6405 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6406 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6407
6408 The only thing we do with SET_DEST is invalidate entries, so we
6409 can safely process each SET in order. It is slightly less efficient
556c714b
JW
6410 to do so, but we only want to handle the most common cases.
6411
6412 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6413 These pseudos won't have valid entries in any of the tables indexed
6414 by register number, such as reg_qty. We avoid out-of-range array
6415 accesses by not processing any instructions created after cse started. */
7afe21cc
RK
6416
6417 for (insn = NEXT_INSN (loop_start);
6418 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
556c714b 6419 && INSN_UID (insn) < max_insn_uid
7afe21cc
RK
6420 && ! (GET_CODE (insn) == NOTE
6421 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6422 insn = NEXT_INSN (insn))
6423 {
2c3c49de 6424 if (INSN_P (insn)
7afe21cc
RK
6425 && (GET_CODE (PATTERN (insn)) == SET
6426 || GET_CODE (PATTERN (insn)) == CLOBBER))
6427 cse_set_around_loop (PATTERN (insn), insn, loop_start);
2c3c49de 6428 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
7afe21cc
RK
6429 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6430 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6431 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6432 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6433 loop_start);
6434 }
6435}
6436\f
8b3686ed
RK
6437/* Process one SET of an insn that was skipped. We ignore CLOBBERs
6438 since they are done elsewhere. This function is called via note_stores. */
6439
6440static void
7080f735 6441invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
8b3686ed 6442{
9ae8ffe7
JL
6443 enum rtx_code code = GET_CODE (dest);
6444
6445 if (code == MEM
ddc356e8 6446 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
9ae8ffe7
JL
6447 /* There are times when an address can appear varying and be a PLUS
6448 during this scan when it would be a fixed address were we to know
6449 the proper equivalences. So invalidate all memory if there is
6450 a BLKmode or nonscalar memory reference or a reference to a
6451 variable address. */
6452 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
2be28ee2 6453 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
9ae8ffe7
JL
6454 {
6455 invalidate_memory ();
6456 return;
6457 }
ffcf6393 6458
f47c02fa 6459 if (GET_CODE (set) == CLOBBER
8beccec8 6460 || CC0_P (dest)
f47c02fa
RK
6461 || dest == pc_rtx)
6462 return;
6463
9ae8ffe7 6464 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 6465 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
6466 else if (code == REG || code == SUBREG || code == MEM)
6467 invalidate (dest, VOIDmode);
8b3686ed
RK
6468}
6469
6470/* Invalidate all insns from START up to the end of the function or the
6471 next label. This called when we wish to CSE around a block that is
6472 conditionally executed. */
6473
6474static void
7080f735 6475invalidate_skipped_block (rtx start)
8b3686ed
RK
6476{
6477 rtx insn;
8b3686ed
RK
6478
6479 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6480 insn = NEXT_INSN (insn))
6481 {
2c3c49de 6482 if (! INSN_P (insn))
8b3686ed
RK
6483 continue;
6484
8b3686ed
RK
6485 if (GET_CODE (insn) == CALL_INSN)
6486 {
24a28584 6487 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 6488 invalidate_memory ();
8b3686ed 6489 invalidate_for_call ();
8b3686ed
RK
6490 }
6491
97577254 6492 invalidate_from_clobbers (PATTERN (insn));
84832317 6493 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
8b3686ed
RK
6494 }
6495}
6496\f
84832317
MM
6497/* If modifying X will modify the value in *DATA (which is really an
6498 `rtx *'), indicate that fact by setting the pointed to value to
6499 NULL_RTX. */
7afe21cc
RK
6500
6501static void
7080f735 6502cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
7afe21cc 6503{
84832317
MM
6504 rtx *cse_check_loop_start_value = (rtx *) data;
6505
6506 if (*cse_check_loop_start_value == NULL_RTX
7afe21cc
RK
6507 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6508 return;
6509
84832317
MM
6510 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6511 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6512 *cse_check_loop_start_value = NULL_RTX;
7afe21cc
RK
6513}
6514
6515/* X is a SET or CLOBBER contained in INSN that was found near the start of
6516 a loop that starts with the label at LOOP_START.
6517
6518 If X is a SET, we see if its SET_SRC is currently in our hash table.
6519 If so, we see if it has a value equal to some register used only in the
6520 loop exit code (as marked by jump.c).
6521
6522 If those two conditions are true, we search backwards from the start of
6523 the loop to see if that same value was loaded into a register that still
6524 retains its value at the start of the loop.
6525
6526 If so, we insert an insn after the load to copy the destination of that
6527 load into the equivalent register and (try to) replace our SET_SRC with that
6528 register.
6529
6530 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6531
6532static void
7080f735 6533cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
7afe21cc 6534{
7afe21cc 6535 struct table_elt *src_elt;
7afe21cc
RK
6536
6537 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6538 are setting PC or CC0 or whose SET_SRC is already a register. */
6539 if (GET_CODE (x) == SET
6540 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6541 && GET_CODE (SET_SRC (x)) != REG)
6542 {
6543 src_elt = lookup (SET_SRC (x),
6544 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6545 GET_MODE (SET_DEST (x)));
6546
6547 if (src_elt)
6548 for (src_elt = src_elt->first_same_value; src_elt;
6549 src_elt = src_elt->next_same_value)
6550 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6551 && COST (src_elt->exp) < COST (SET_SRC (x)))
6552 {
6553 rtx p, set;
6554
6555 /* Look for an insn in front of LOOP_START that sets
6556 something in the desired mode to SET_SRC (x) before we hit
6557 a label or CALL_INSN. */
6558
6559 for (p = prev_nonnote_insn (loop_start);
6560 p && GET_CODE (p) != CALL_INSN
6561 && GET_CODE (p) != CODE_LABEL;
6562 p = prev_nonnote_insn (p))
6563 if ((set = single_set (p)) != 0
6564 && GET_CODE (SET_DEST (set)) == REG
6565 && GET_MODE (SET_DEST (set)) == src_elt->mode
6566 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6567 {
6568 /* We now have to ensure that nothing between P
6569 and LOOP_START modified anything referenced in
6570 SET_SRC (x). We know that nothing within the loop
6571 can modify it, or we would have invalidated it in
6572 the hash table. */
6573 rtx q;
84832317 6574 rtx cse_check_loop_start_value = SET_SRC (x);
7afe21cc 6575 for (q = p; q != loop_start; q = NEXT_INSN (q))
2c3c49de 6576 if (INSN_P (q))
84832317
MM
6577 note_stores (PATTERN (q),
6578 cse_check_loop_start,
6579 &cse_check_loop_start_value);
7afe21cc
RK
6580
6581 /* If nothing was changed and we can replace our
6582 SET_SRC, add an insn after P to copy its destination
6583 to what we will be replacing SET_SRC with. */
6584 if (cse_check_loop_start_value
26771da7
JH
6585 && single_set (p)
6586 && !can_throw_internal (insn)
7afe21cc
RK
6587 && validate_change (insn, &SET_SRC (x),
6588 src_elt->exp, 0))
e89d3e6f
R
6589 {
6590 /* If this creates new pseudos, this is unsafe,
6591 because the regno of new pseudo is unsuitable
6592 to index into reg_qty when cse_insn processes
6593 the new insn. Therefore, if a new pseudo was
6594 created, discard this optimization. */
6595 int nregs = max_reg_num ();
6596 rtx move
6597 = gen_move_insn (src_elt->exp, SET_DEST (set));
6598 if (nregs != max_reg_num ())
6599 {
6600 if (! validate_change (insn, &SET_SRC (x),
6601 SET_SRC (set), 0))
6602 abort ();
6603 }
6604 else
9ebfd78b 6605 {
f3ea0706
R
6606 if (CONSTANT_P (SET_SRC (set))
6607 && ! find_reg_equal_equiv_note (insn))
6608 set_unique_reg_note (insn, REG_EQUAL,
6609 SET_SRC (set));
9ebfd78b
EB
6610 if (control_flow_insn_p (p))
6611 /* p can cause a control flow transfer so it
6612 is the last insn of a basic block. We can't
6613 therefore use emit_insn_after. */
6614 emit_insn_before (move, next_nonnote_insn (p));
6615 else
6616 emit_insn_after (move, p);
6617 }
e89d3e6f 6618 }
7afe21cc
RK
6619 break;
6620 }
6621 }
6622 }
6623
14a774a9
RK
6624 /* Deal with the destination of X affecting the stack pointer. */
6625 addr_affects_sp_p (SET_DEST (x));
7afe21cc 6626
14a774a9
RK
6627 /* See comment on similar code in cse_insn for explanation of these
6628 tests. */
7afe21cc 6629 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
9ae8ffe7 6630 || GET_CODE (SET_DEST (x)) == MEM)
bb4034b3 6631 invalidate (SET_DEST (x), VOIDmode);
2708da92
RS
6632 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6633 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
bb4034b3 6634 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7afe21cc
RK
6635}
6636\f
6637/* Find the end of INSN's basic block and return its range,
6638 the total number of SETs in all the insns of the block, the last insn of the
6639 block, and the branch path.
6640
da7d8304 6641 The branch path indicates which branches should be followed. If a nonzero
7afe21cc
RK
6642 path size is specified, the block should be rescanned and a different set
6643 of branches will be taken. The branch path is only used if
da7d8304 6644 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
7afe21cc
RK
6645
6646 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6647 used to describe the block. It is filled in with the information about
6648 the current block. The incoming structure's branch path, if any, is used
6649 to construct the output branch path. */
6650
86caf04d 6651static void
7080f735
AJ
6652cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6653 int follow_jumps, int after_loop, int skip_blocks)
7afe21cc
RK
6654{
6655 rtx p = insn, q;
6656 int nsets = 0;
6657 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
2c3c49de 6658 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
7afe21cc
RK
6659 int path_size = data->path_size;
6660 int path_entry = 0;
6661 int i;
6662
6663 /* Update the previous branch path, if any. If the last branch was
6664 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6665 shorten the path by one and look at the previous branch. We know that
da7d8304 6666 at least one branch must have been taken if PATH_SIZE is nonzero. */
7afe21cc
RK
6667 while (path_size > 0)
6668 {
8b3686ed 6669 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
6670 {
6671 data->path[path_size - 1].status = NOT_TAKEN;
6672 break;
6673 }
6674 else
6675 path_size--;
6676 }
6677
16b702cd
MM
6678 /* If the first instruction is marked with QImode, that means we've
6679 already processed this block. Our caller will look at DATA->LAST
6680 to figure out where to go next. We want to return the next block
6681 in the instruction stream, not some branched-to block somewhere
6682 else. We accomplish this by pretending our called forbid us to
6683 follow jumps, or skip blocks. */
6684 if (GET_MODE (insn) == QImode)
6685 follow_jumps = skip_blocks = 0;
6686
7afe21cc
RK
6687 /* Scan to end of this basic block. */
6688 while (p && GET_CODE (p) != CODE_LABEL)
6689 {
6690 /* Don't cse out the end of a loop. This makes a difference
6691 only for the unusual loops that always execute at least once;
6692 all other loops have labels there so we will stop in any case.
6693 Cse'ing out the end of the loop is dangerous because it
6694 might cause an invariant expression inside the loop
6695 to be reused after the end of the loop. This would make it
6696 hard to move the expression out of the loop in loop.c,
6697 especially if it is one of several equivalent expressions
6698 and loop.c would like to eliminate it.
6699
6700 If we are running after loop.c has finished, we can ignore
6701 the NOTE_INSN_LOOP_END. */
6702
6703 if (! after_loop && GET_CODE (p) == NOTE
6704 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6705 break;
6706
8aeea6e6 6707 /* Don't cse over a call to setjmp; on some machines (eg VAX)
7afe21cc
RK
6708 the regs restored by the longjmp come from
6709 a later time than the setjmp. */
570a98eb
JH
6710 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6711 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
7afe21cc
RK
6712 break;
6713
6714 /* A PARALLEL can have lots of SETs in it,
6715 especially if it is really an ASM_OPERANDS. */
2c3c49de 6716 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
7afe21cc
RK
6717 nsets += XVECLEN (PATTERN (p), 0);
6718 else if (GET_CODE (p) != NOTE)
6719 nsets += 1;
278a83b2 6720
164c8956
RK
6721 /* Ignore insns made by CSE; they cannot affect the boundaries of
6722 the basic block. */
6723
6724 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 6725 high_cuid = INSN_CUID (p);
164c8956
RK
6726 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6727 low_cuid = INSN_CUID (p);
7afe21cc
RK
6728
6729 /* See if this insn is in our branch path. If it is and we are to
6730 take it, do so. */
6731 if (path_entry < path_size && data->path[path_entry].branch == p)
6732 {
8b3686ed 6733 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc 6734 p = JUMP_LABEL (p);
278a83b2 6735
7afe21cc
RK
6736 /* Point to next entry in path, if any. */
6737 path_entry++;
6738 }
6739
6740 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6741 was specified, we haven't reached our maximum path length, there are
6742 insns following the target of the jump, this is the only use of the
8b3686ed
RK
6743 jump label, and the target label is preceded by a BARRIER.
6744
6745 Alternatively, we can follow the jump if it branches around a
6746 block of code and there are no other branches into the block.
6747 In this case invalidate_skipped_block will be called to invalidate any
6748 registers set in the block when following the jump. */
6749
9bf8cfbf 6750 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
7afe21cc 6751 && GET_CODE (p) == JUMP_INSN
278a83b2 6752 && GET_CODE (PATTERN (p)) == SET
7afe21cc 6753 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 6754 && JUMP_LABEL (p) != 0
7afe21cc
RK
6755 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6756 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6757 {
6758 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6759 if ((GET_CODE (q) != NOTE
278a83b2 6760 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
570a98eb
JH
6761 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6762 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
278a83b2 6763 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7afe21cc
RK
6764 break;
6765
6766 /* If we ran into a BARRIER, this code is an extension of the
6767 basic block when the branch is taken. */
8b3686ed 6768 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
6769 {
6770 /* Don't allow ourself to keep walking around an
6771 always-executed loop. */
fc3ffe83
RK
6772 if (next_real_insn (q) == next)
6773 {
6774 p = NEXT_INSN (p);
6775 continue;
6776 }
7afe21cc
RK
6777
6778 /* Similarly, don't put a branch in our path more than once. */
6779 for (i = 0; i < path_entry; i++)
6780 if (data->path[i].branch == p)
6781 break;
6782
6783 if (i != path_entry)
6784 break;
6785
6786 data->path[path_entry].branch = p;
6787 data->path[path_entry++].status = TAKEN;
6788
6789 /* This branch now ends our path. It was possible that we
6790 didn't see this branch the last time around (when the
6791 insn in front of the target was a JUMP_INSN that was
6792 turned into a no-op). */
6793 path_size = path_entry;
6794
6795 p = JUMP_LABEL (p);
6796 /* Mark block so we won't scan it again later. */
6797 PUT_MODE (NEXT_INSN (p), QImode);
6798 }
8b3686ed
RK
6799 /* Detect a branch around a block of code. */
6800 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6801 {
b3694847 6802 rtx tmp;
8b3686ed 6803
fc3ffe83
RK
6804 if (next_real_insn (q) == next)
6805 {
6806 p = NEXT_INSN (p);
6807 continue;
6808 }
8b3686ed
RK
6809
6810 for (i = 0; i < path_entry; i++)
6811 if (data->path[i].branch == p)
6812 break;
6813
6814 if (i != path_entry)
6815 break;
6816
6817 /* This is no_labels_between_p (p, q) with an added check for
6818 reaching the end of a function (in case Q precedes P). */
6819 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6820 if (GET_CODE (tmp) == CODE_LABEL)
6821 break;
278a83b2 6822
8b3686ed
RK
6823 if (tmp == q)
6824 {
6825 data->path[path_entry].branch = p;
6826 data->path[path_entry++].status = AROUND;
6827
6828 path_size = path_entry;
6829
6830 p = JUMP_LABEL (p);
6831 /* Mark block so we won't scan it again later. */
6832 PUT_MODE (NEXT_INSN (p), QImode);
6833 }
6834 }
7afe21cc 6835 }
7afe21cc
RK
6836 p = NEXT_INSN (p);
6837 }
6838
6839 data->low_cuid = low_cuid;
6840 data->high_cuid = high_cuid;
6841 data->nsets = nsets;
6842 data->last = p;
6843
6844 /* If all jumps in the path are not taken, set our path length to zero
6845 so a rescan won't be done. */
6846 for (i = path_size - 1; i >= 0; i--)
8b3686ed 6847 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
6848 break;
6849
6850 if (i == -1)
6851 data->path_size = 0;
6852 else
6853 data->path_size = path_size;
6854
6855 /* End the current branch path. */
6856 data->path[path_size].branch = 0;
6857}
6858\f
7afe21cc
RK
6859/* Perform cse on the instructions of a function.
6860 F is the first instruction.
6861 NREGS is one plus the highest pseudo-reg number used in the instruction.
6862
6863 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6864 (only if -frerun-cse-after-loop).
6865
6866 Returns 1 if jump_optimize should be redone due to simplifications
6867 in conditional jump instructions. */
6868
6869int
7080f735 6870cse_main (rtx f, int nregs, int after_loop, FILE *file)
7afe21cc
RK
6871{
6872 struct cse_basic_block_data val;
b3694847
SS
6873 rtx insn = f;
6874 int i;
7afe21cc 6875
9bf8cfbf
ZD
6876 val.path = xmalloc (sizeof (struct branch_path)
6877 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6878
7afe21cc 6879 cse_jumps_altered = 0;
a5dfb4ee 6880 recorded_label_ref = 0;
7afe21cc 6881 constant_pool_entries_cost = 0;
dd0ba281 6882 constant_pool_entries_regcost = 0;
7afe21cc 6883 val.path_size = 0;
4de249d9 6884 gen_lowpart = gen_lowpart_if_possible;
7afe21cc
RK
6885
6886 init_recog ();
9ae8ffe7 6887 init_alias_analysis ();
7afe21cc
RK
6888
6889 max_reg = nregs;
6890
556c714b
JW
6891 max_insn_uid = get_max_uid ();
6892
703ad42b 6893 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7afe21cc 6894
7bac1be0
RK
6895#ifdef LOAD_EXTEND_OP
6896
6897 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6898 and change the code and mode as appropriate. */
38a448ca 6899 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7bac1be0
RK
6900#endif
6901
1f8f4a0b
MM
6902 /* Reset the counter indicating how many elements have been made
6903 thus far. */
7afe21cc
RK
6904 n_elements_made = 0;
6905
6906 /* Find the largest uid. */
6907
164c8956 6908 max_uid = get_max_uid ();
703ad42b 6909 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7afe21cc
RK
6910
6911 /* Compute the mapping from uids to cuids.
6912 CUIDs are numbers assigned to insns, like uids,
6913 except that cuids increase monotonically through the code.
6914 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6915 between two insns is not affected by -g. */
6916
6917 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6918 {
6919 if (GET_CODE (insn) != NOTE
6920 || NOTE_LINE_NUMBER (insn) < 0)
6921 INSN_CUID (insn) = ++i;
6922 else
6923 /* Give a line number note the same cuid as preceding insn. */
6924 INSN_CUID (insn) = i;
6925 }
6926
1f8f4a0b 6927 ggc_push_context ();
1497faf6 6928
7afe21cc
RK
6929 /* Loop over basic blocks.
6930 Compute the maximum number of qty's needed for each basic block
6931 (which is 2 for each SET). */
6932 insn = f;
6933 while (insn)
6934 {
4eadede7 6935 cse_altered = 0;
8b3686ed
RK
6936 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6937 flag_cse_skip_blocks);
7afe21cc
RK
6938
6939 /* If this basic block was already processed or has no sets, skip it. */
6940 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6941 {
6942 PUT_MODE (insn, VOIDmode);
6943 insn = (val.last ? NEXT_INSN (val.last) : 0);
6944 val.path_size = 0;
6945 continue;
6946 }
6947
6948 cse_basic_block_start = val.low_cuid;
6949 cse_basic_block_end = val.high_cuid;
6950 max_qty = val.nsets * 2;
278a83b2 6951
7afe21cc 6952 if (file)
ab87f8c8 6953 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7afe21cc
RK
6954 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6955 val.nsets);
6956
6957 /* Make MAX_QTY bigger to give us room to optimize
6958 past the end of this basic block, if that should prove useful. */
6959 if (max_qty < 500)
6960 max_qty = 500;
6961
6962 max_qty += max_reg;
6963
6964 /* If this basic block is being extended by following certain jumps,
6965 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6966 Otherwise, we start after this basic block. */
6967 if (val.path_size > 0)
278a83b2 6968 cse_basic_block (insn, val.last, val.path, 0);
7afe21cc
RK
6969 else
6970 {
6971 int old_cse_jumps_altered = cse_jumps_altered;
6972 rtx temp;
6973
6974 /* When cse changes a conditional jump to an unconditional
6975 jump, we want to reprocess the block, since it will give
6976 us a new branch path to investigate. */
6977 cse_jumps_altered = 0;
6978 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
6979 if (cse_jumps_altered == 0
6980 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
6981 insn = temp;
6982
6983 cse_jumps_altered |= old_cse_jumps_altered;
6984 }
6985
1f8f4a0b 6986 if (cse_altered)
1497faf6
RH
6987 ggc_collect ();
6988
7afe21cc
RK
6989#ifdef USE_C_ALLOCA
6990 alloca (0);
6991#endif
6992 }
6993
1f8f4a0b 6994 ggc_pop_context ();
1497faf6 6995
7afe21cc
RK
6996 if (max_elements_made < n_elements_made)
6997 max_elements_made = n_elements_made;
6998
e05e2395
MM
6999 /* Clean up. */
7000 end_alias_analysis ();
75c6bd46 7001 free (uid_cuid);
1bb98cec 7002 free (reg_eqv_table);
9bf8cfbf 7003 free (val.path);
4de249d9 7004 gen_lowpart = gen_lowpart_general;
e05e2395 7005
a5dfb4ee 7006 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
7007}
7008
7009/* Process a single basic block. FROM and TO and the limits of the basic
7010 block. NEXT_BRANCH points to the branch path when following jumps or
7011 a null path when not following jumps.
7012
da7d8304 7013 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7afe21cc
RK
7014 loop. This is true when we are being called for the last time on a
7015 block and this CSE pass is before loop.c. */
7016
7017static rtx
7080f735
AJ
7018cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7019 int around_loop)
7afe21cc 7020{
b3694847 7021 rtx insn;
7afe21cc 7022 int to_usage = 0;
7bd8b2a8 7023 rtx libcall_insn = NULL_RTX;
e9a25f70 7024 int num_insns = 0;
26d107db 7025 int no_conflict = 0;
7afe21cc 7026
1bb98cec
DM
7027 /* This array is undefined before max_reg, so only allocate
7028 the space actually needed and adjust the start. */
7029
703ad42b 7030 qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
1bb98cec 7031 qty_table -= max_reg;
7afe21cc
RK
7032
7033 new_basic_block ();
7034
7035 /* TO might be a label. If so, protect it from being deleted. */
7036 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7037 ++LABEL_NUSES (to);
7038
7039 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7040 {
b3694847 7041 enum rtx_code code = GET_CODE (insn);
e9a25f70 7042
1d22a2c1
MM
7043 /* If we have processed 1,000 insns, flush the hash table to
7044 avoid extreme quadratic behavior. We must not include NOTEs
c13e8210 7045 in the count since there may be more of them when generating
1d22a2c1
MM
7046 debugging information. If we clear the table at different
7047 times, code generated with -g -O might be different than code
7048 generated with -O but not -g.
e9a25f70
JL
7049
7050 ??? This is a real kludge and needs to be done some other way.
7051 Perhaps for 2.9. */
1d22a2c1 7052 if (code != NOTE && num_insns++ > 1000)
e9a25f70 7053 {
01e752d3 7054 flush_hash_table ();
e9a25f70
JL
7055 num_insns = 0;
7056 }
7afe21cc
RK
7057
7058 /* See if this is a branch that is part of the path. If so, and it is
7059 to be taken, do so. */
7060 if (next_branch->branch == insn)
7061 {
8b3686ed
RK
7062 enum taken status = next_branch++->status;
7063 if (status != NOT_TAKEN)
7afe21cc 7064 {
8b3686ed
RK
7065 if (status == TAKEN)
7066 record_jump_equiv (insn, 1);
7067 else
7068 invalidate_skipped_block (NEXT_INSN (insn));
7069
7afe21cc
RK
7070 /* Set the last insn as the jump insn; it doesn't affect cc0.
7071 Then follow this branch. */
7072#ifdef HAVE_cc0
7073 prev_insn_cc0 = 0;
7afe21cc 7074 prev_insn = insn;
4977bab6 7075#endif
7afe21cc
RK
7076 insn = JUMP_LABEL (insn);
7077 continue;
7078 }
7079 }
278a83b2 7080
7afe21cc
RK
7081 if (GET_MODE (insn) == QImode)
7082 PUT_MODE (insn, VOIDmode);
7083
ec8e098d 7084 if (GET_RTX_CLASS (code) == RTX_INSN)
7afe21cc 7085 {
7bd8b2a8
JL
7086 rtx p;
7087
7afe21cc
RK
7088 /* Process notes first so we have all notes in canonical forms when
7089 looking for duplicate operations. */
7090
7091 if (REG_NOTES (insn))
906c4e36 7092 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
7093
7094 /* Track when we are inside in LIBCALL block. Inside such a block,
7095 we do not want to record destinations. The last insn of a
7096 LIBCALL block is not considered to be part of the block, since
830a38ee 7097 its destination is the result of the block and hence should be
7afe21cc
RK
7098 recorded. */
7099
efc9bd41
RK
7100 if (REG_NOTES (insn) != 0)
7101 {
7102 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7103 libcall_insn = XEXP (p, 0);
7104 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
26d107db
KK
7105 {
7106 /* Keep libcall_insn for the last SET insn of a no-conflict
7107 block to prevent changing the destination. */
7108 if (! no_conflict)
7109 libcall_insn = 0;
7110 else
7111 no_conflict = -1;
7112 }
7113 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7114 no_conflict = 1;
efc9bd41 7115 }
7afe21cc 7116
7bd8b2a8 7117 cse_insn (insn, libcall_insn);
f85cc4cb 7118
26d107db
KK
7119 if (no_conflict == -1)
7120 {
7121 libcall_insn = 0;
7122 no_conflict = 0;
7123 }
7124
be8ac49a
RK
7125 /* If we haven't already found an insn where we added a LABEL_REF,
7126 check this one. */
7127 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7128 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7129 (void *) insn))
f85cc4cb 7130 recorded_label_ref = 1;
7afe21cc
RK
7131 }
7132
7133 /* If INSN is now an unconditional jump, skip to the end of our
7134 basic block by pretending that we just did the last insn in the
7135 basic block. If we are jumping to the end of our block, show
7136 that we can have one usage of TO. */
7137
7f1c097d 7138 if (any_uncondjump_p (insn))
7afe21cc
RK
7139 {
7140 if (to == 0)
fa0933ba
JL
7141 {
7142 free (qty_table + max_reg);
7143 return 0;
7144 }
7afe21cc
RK
7145
7146 if (JUMP_LABEL (insn) == to)
7147 to_usage = 1;
7148
6a5293dc
RS
7149 /* Maybe TO was deleted because the jump is unconditional.
7150 If so, there is nothing left in this basic block. */
7151 /* ??? Perhaps it would be smarter to set TO
278a83b2 7152 to whatever follows this insn,
6a5293dc
RS
7153 and pretend the basic block had always ended here. */
7154 if (INSN_DELETED_P (to))
7155 break;
7156
7afe21cc
RK
7157 insn = PREV_INSN (to);
7158 }
7159
7160 /* See if it is ok to keep on going past the label
7161 which used to end our basic block. Remember that we incremented
d45cf215 7162 the count of that label, so we decrement it here. If we made
7afe21cc
RK
7163 a jump unconditional, TO_USAGE will be one; in that case, we don't
7164 want to count the use in that jump. */
7165
7166 if (to != 0 && NEXT_INSN (insn) == to
7167 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7168 {
7169 struct cse_basic_block_data val;
146135d6 7170 rtx prev;
7afe21cc
RK
7171
7172 insn = NEXT_INSN (to);
7173
146135d6
RK
7174 /* If TO was the last insn in the function, we are done. */
7175 if (insn == 0)
fa0933ba
JL
7176 {
7177 free (qty_table + max_reg);
7178 return 0;
7179 }
7afe21cc 7180
146135d6
RK
7181 /* If TO was preceded by a BARRIER we are done with this block
7182 because it has no continuation. */
7183 prev = prev_nonnote_insn (to);
7184 if (prev && GET_CODE (prev) == BARRIER)
fa0933ba
JL
7185 {
7186 free (qty_table + max_reg);
7187 return insn;
7188 }
146135d6
RK
7189
7190 /* Find the end of the following block. Note that we won't be
7191 following branches in this case. */
7afe21cc
RK
7192 to_usage = 0;
7193 val.path_size = 0;
9bf8cfbf
ZD
7194 val.path = xmalloc (sizeof (struct branch_path)
7195 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
8b3686ed 7196 cse_end_of_basic_block (insn, &val, 0, 0, 0);
9bf8cfbf 7197 free (val.path);
7afe21cc
RK
7198
7199 /* If the tables we allocated have enough space left
7200 to handle all the SETs in the next basic block,
7201 continue through it. Otherwise, return,
7202 and that block will be scanned individually. */
7203 if (val.nsets * 2 + next_qty > max_qty)
7204 break;
7205
7206 cse_basic_block_start = val.low_cuid;
7207 cse_basic_block_end = val.high_cuid;
7208 to = val.last;
7209
7210 /* Prevent TO from being deleted if it is a label. */
7211 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7212 ++LABEL_NUSES (to);
7213
7214 /* Back up so we process the first insn in the extension. */
7215 insn = PREV_INSN (insn);
7216 }
7217 }
7218
7219 if (next_qty > max_qty)
7220 abort ();
7221
7222 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7223 the previous insn is the only insn that branches to the head of a loop,
7224 we can cse into the loop. Don't do this if we changed the jump
7225 structure of a loop unless we aren't going to be following jumps. */
7226
68252e27 7227 insn = prev_nonnote_insn (to);
8b3686ed
RK
7228 if ((cse_jumps_altered == 0
7229 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
7230 && around_loop && to != 0
7231 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
b5a696fb
RZ
7232 && GET_CODE (insn) == JUMP_INSN
7233 && JUMP_LABEL (insn) != 0
7234 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7235 cse_around_loop (JUMP_LABEL (insn));
7afe21cc 7236
1bb98cec 7237 free (qty_table + max_reg);
75c6bd46 7238
7afe21cc
RK
7239 return to ? NEXT_INSN (to) : 0;
7240}
7241\f
be8ac49a 7242/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
45c23566 7243 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
be8ac49a
RK
7244
7245static int
7080f735 7246check_for_label_ref (rtx *rtl, void *data)
be8ac49a
RK
7247{
7248 rtx insn = (rtx) data;
7249
7250 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7251 we must rerun jump since it needs to place the note. If this is a
7252 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
ec5c56db 7253 since no REG_LABEL will be added. */
be8ac49a 7254 return (GET_CODE (*rtl) == LABEL_REF
45c23566 7255 && ! LABEL_REF_NONLOCAL_P (*rtl)
4838c5ee 7256 && LABEL_P (XEXP (*rtl, 0))
be8ac49a
RK
7257 && INSN_UID (XEXP (*rtl, 0)) != 0
7258 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7259}
7260\f
7afe21cc
RK
7261/* Count the number of times registers are used (not set) in X.
7262 COUNTS is an array in which we accumulate the count, INCR is how much
9ab81df2 7263 we count each register usage. */
7afe21cc
RK
7264
7265static void
9ab81df2 7266count_reg_usage (rtx x, int *counts, int incr)
7afe21cc 7267{
f1e7c95f 7268 enum rtx_code code;
b17d5d7c 7269 rtx note;
6f7d635c 7270 const char *fmt;
7afe21cc
RK
7271 int i, j;
7272
f1e7c95f
RK
7273 if (x == 0)
7274 return;
7275
7276 switch (code = GET_CODE (x))
7afe21cc
RK
7277 {
7278 case REG:
9ab81df2 7279 counts[REGNO (x)] += incr;
7afe21cc
RK
7280 return;
7281
7282 case PC:
7283 case CC0:
7284 case CONST:
7285 case CONST_INT:
7286 case CONST_DOUBLE:
69ef87e2 7287 case CONST_VECTOR:
7afe21cc
RK
7288 case SYMBOL_REF:
7289 case LABEL_REF:
02e39abc
JL
7290 return;
7291
278a83b2 7292 case CLOBBER:
02e39abc
JL
7293 /* If we are clobbering a MEM, mark any registers inside the address
7294 as being used. */
7295 if (GET_CODE (XEXP (x, 0)) == MEM)
9ab81df2 7296 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7afe21cc
RK
7297 return;
7298
7299 case SET:
7300 /* Unless we are setting a REG, count everything in SET_DEST. */
7301 if (GET_CODE (SET_DEST (x)) != REG)
9ab81df2
JDA
7302 count_reg_usage (SET_DEST (x), counts, incr);
7303 count_reg_usage (SET_SRC (x), counts, incr);
7afe21cc
RK
7304 return;
7305
f1e7c95f 7306 case CALL_INSN:
9ab81df2 7307 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
ddc356e8 7308 /* Fall through. */
f1e7c95f 7309
7afe21cc
RK
7310 case INSN:
7311 case JUMP_INSN:
9ab81df2 7312 count_reg_usage (PATTERN (x), counts, incr);
7afe21cc
RK
7313
7314 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7315 use them. */
7316
b17d5d7c
ZD
7317 note = find_reg_equal_equiv_note (x);
7318 if (note)
839844be
R
7319 {
7320 rtx eqv = XEXP (note, 0);
7321
7322 if (GET_CODE (eqv) == EXPR_LIST)
7323 /* This REG_EQUAL note describes the result of a function call.
7324 Process all the arguments. */
7325 do
7326 {
9ab81df2 7327 count_reg_usage (XEXP (eqv, 0), counts, incr);
839844be
R
7328 eqv = XEXP (eqv, 1);
7329 }
7330 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7331 else
9ab81df2 7332 count_reg_usage (eqv, counts, incr);
839844be 7333 }
7afe21cc
RK
7334 return;
7335
ee960939
OH
7336 case EXPR_LIST:
7337 if (REG_NOTE_KIND (x) == REG_EQUAL
7338 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7339 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7340 involving registers in the address. */
7341 || GET_CODE (XEXP (x, 0)) == CLOBBER)
9ab81df2 7342 count_reg_usage (XEXP (x, 0), counts, incr);
ee960939 7343
9ab81df2 7344 count_reg_usage (XEXP (x, 1), counts, incr);
ee960939
OH
7345 return;
7346
a6c14a64 7347 case ASM_OPERANDS:
a6c14a64
RH
7348 /* Iterate over just the inputs, not the constraints as well. */
7349 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
9ab81df2 7350 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
a6c14a64
RH
7351 return;
7352
7afe21cc 7353 case INSN_LIST:
b17d5d7c 7354 abort ();
278a83b2 7355
e9a25f70
JL
7356 default:
7357 break;
7afe21cc
RK
7358 }
7359
7360 fmt = GET_RTX_FORMAT (code);
7361 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7362 {
7363 if (fmt[i] == 'e')
9ab81df2 7364 count_reg_usage (XEXP (x, i), counts, incr);
7afe21cc
RK
7365 else if (fmt[i] == 'E')
7366 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9ab81df2 7367 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7afe21cc
RK
7368 }
7369}
7370\f
4793dca1
JH
7371/* Return true if set is live. */
7372static bool
7080f735
AJ
7373set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7374 int *counts)
4793dca1
JH
7375{
7376#ifdef HAVE_cc0
7377 rtx tem;
7378#endif
7379
7380 if (set_noop_p (set))
7381 ;
7382
7383#ifdef HAVE_cc0
7384 else if (GET_CODE (SET_DEST (set)) == CC0
7385 && !side_effects_p (SET_SRC (set))
7386 && ((tem = next_nonnote_insn (insn)) == 0
7387 || !INSN_P (tem)
7388 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7389 return false;
7390#endif
7391 else if (GET_CODE (SET_DEST (set)) != REG
7392 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7393 || counts[REGNO (SET_DEST (set))] != 0
7394 || side_effects_p (SET_SRC (set))
7395 /* An ADDRESSOF expression can turn into a use of the
7396 internal arg pointer, so always consider the
7397 internal arg pointer live. If it is truly dead,
7398 flow will delete the initializing insn. */
7399 || (SET_DEST (set) == current_function_internal_arg_pointer))
7400 return true;
7401 return false;
7402}
7403
7404/* Return true if insn is live. */
7405
7406static bool
7080f735 7407insn_live_p (rtx insn, int *counts)
4793dca1
JH
7408{
7409 int i;
a3745024 7410 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
a646f6cc
AH
7411 return true;
7412 else if (GET_CODE (PATTERN (insn)) == SET)
0021de69 7413 return set_live_p (PATTERN (insn), insn, counts);
4793dca1 7414 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
0021de69
DB
7415 {
7416 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7417 {
7418 rtx elt = XVECEXP (PATTERN (insn), 0, i);
4793dca1 7419
0021de69
DB
7420 if (GET_CODE (elt) == SET)
7421 {
7422 if (set_live_p (elt, insn, counts))
7423 return true;
7424 }
7425 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7426 return true;
7427 }
7428 return false;
7429 }
4793dca1
JH
7430 else
7431 return true;
7432}
7433
7434/* Return true if libcall is dead as a whole. */
7435
7436static bool
7080f735 7437dead_libcall_p (rtx insn, int *counts)
4793dca1 7438{
0c19a26f
RS
7439 rtx note, set, new;
7440
4793dca1
JH
7441 /* See if there's a REG_EQUAL note on this insn and try to
7442 replace the source with the REG_EQUAL expression.
7443
7444 We assume that insns with REG_RETVALs can only be reg->reg
7445 copies at this point. */
7446 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
0c19a26f
RS
7447 if (!note)
7448 return false;
7449
7450 set = single_set (insn);
7451 if (!set)
7452 return false;
4793dca1 7453
0c19a26f
RS
7454 new = simplify_rtx (XEXP (note, 0));
7455 if (!new)
7456 new = XEXP (note, 0);
4793dca1 7457
0c19a26f 7458 /* While changing insn, we must update the counts accordingly. */
9ab81df2 7459 count_reg_usage (insn, counts, -1);
1e150f2c 7460
0c19a26f
RS
7461 if (validate_change (insn, &SET_SRC (set), new, 0))
7462 {
9ab81df2 7463 count_reg_usage (insn, counts, 1);
0c19a26f
RS
7464 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7465 remove_note (insn, note);
7466 return true;
7467 }
7468
7469 if (CONSTANT_P (new))
7470 {
7471 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7472 if (new && validate_change (insn, &SET_SRC (set), new, 0))
4793dca1 7473 {
9ab81df2 7474 count_reg_usage (insn, counts, 1);
4793dca1 7475 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
1e150f2c 7476 remove_note (insn, note);
4793dca1
JH
7477 return true;
7478 }
7479 }
7080f735 7480
9ab81df2 7481 count_reg_usage (insn, counts, 1);
4793dca1
JH
7482 return false;
7483}
7484
7afe21cc
RK
7485/* Scan all the insns and delete any that are dead; i.e., they store a register
7486 that is never used or they copy a register to itself.
7487
c6a26dc4
JL
7488 This is used to remove insns made obviously dead by cse, loop or other
7489 optimizations. It improves the heuristics in loop since it won't try to
7490 move dead invariants out of loops or make givs for dead quantities. The
7491 remaining passes of the compilation are also sped up. */
7afe21cc 7492
3dec4024 7493int
7080f735 7494delete_trivially_dead_insns (rtx insns, int nreg)
7afe21cc 7495{
4da896b2 7496 int *counts;
77fa0940 7497 rtx insn, prev;
614bb5d4 7498 int in_libcall = 0, dead_libcall = 0;
3dec4024 7499 int ndead = 0, nlastdead, niterations = 0;
7afe21cc 7500
3dec4024 7501 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7afe21cc 7502 /* First count the number of times each register is used. */
703ad42b 7503 counts = xcalloc (nreg, sizeof (int));
7afe21cc 7504 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9ab81df2 7505 count_reg_usage (insn, counts, 1);
7afe21cc 7506
3dec4024
JH
7507 do
7508 {
7509 nlastdead = ndead;
7510 niterations++;
7511 /* Go from the last insn to the first and delete insns that only set unused
7512 registers or copy a register to itself. As we delete an insn, remove
7513 usage counts for registers it uses.
7514
7515 The first jump optimization pass may leave a real insn as the last
7516 insn in the function. We must not skip that insn or we may end
7517 up deleting code that is not really dead. */
7518 insn = get_last_insn ();
7519 if (! INSN_P (insn))
7520 insn = prev_real_insn (insn);
0cedb36c 7521
3dec4024 7522 for (; insn; insn = prev)
7afe21cc 7523 {
4793dca1 7524 int live_insn = 0;
7afe21cc 7525
3dec4024 7526 prev = prev_real_insn (insn);
7afe21cc 7527
4793dca1
JH
7528 /* Don't delete any insns that are part of a libcall block unless
7529 we can delete the whole libcall block.
7afe21cc 7530
4793dca1
JH
7531 Flow or loop might get confused if we did that. Remember
7532 that we are scanning backwards. */
7533 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7534 {
7535 in_libcall = 1;
dc42616f 7536 live_insn = 1;
1e150f2c 7537 dead_libcall = dead_libcall_p (insn, counts);
4793dca1
JH
7538 }
7539 else if (in_libcall)
7540 live_insn = ! dead_libcall;
7541 else
7542 live_insn = insn_live_p (insn, counts);
7afe21cc 7543
4793dca1
JH
7544 /* If this is a dead insn, delete it and show registers in it aren't
7545 being used. */
7afe21cc 7546
4793dca1
JH
7547 if (! live_insn)
7548 {
9ab81df2 7549 count_reg_usage (insn, counts, -1);
3dec4024
JH
7550 delete_insn_and_edges (insn);
7551 ndead++;
4793dca1 7552 }
e4890d45 7553
4793dca1
JH
7554 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7555 {
7556 in_libcall = 0;
7557 dead_libcall = 0;
7558 }
614bb5d4 7559 }
68252e27
KH
7560 }
7561 while (ndead != nlastdead);
4da896b2 7562
c263766c
RH
7563 if (dump_file && ndead)
7564 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
3dec4024 7565 ndead, niterations);
4da896b2
MM
7566 /* Clean up. */
7567 free (counts);
3dec4024
JH
7568 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7569 return ndead;
7afe21cc 7570}
e129d93a
ILT
7571
7572/* This function is called via for_each_rtx. The argument, NEWREG, is
7573 a condition code register with the desired mode. If we are looking
7574 at the same register in a different mode, replace it with
7575 NEWREG. */
7576
7577static int
7578cse_change_cc_mode (rtx *loc, void *data)
7579{
7580 rtx newreg = (rtx) data;
7581
7582 if (*loc
7583 && GET_CODE (*loc) == REG
7584 && REGNO (*loc) == REGNO (newreg)
7585 && GET_MODE (*loc) != GET_MODE (newreg))
7586 {
7587 *loc = newreg;
7588 return -1;
7589 }
7590 return 0;
7591}
7592
7593/* Change the mode of any reference to the register REGNO (NEWREG) to
7594 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
2e802a6f 7595 any instruction which modifies NEWREG. */
e129d93a
ILT
7596
7597static void
7598cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7599{
7600 rtx insn;
7601
7602 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7603 {
7604 if (! INSN_P (insn))
7605 continue;
7606
2e802a6f 7607 if (reg_set_p (newreg, insn))
e129d93a
ILT
7608 return;
7609
7610 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7611 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7612 }
7613}
7614
7615/* BB is a basic block which finishes with CC_REG as a condition code
7616 register which is set to CC_SRC. Look through the successors of BB
7617 to find blocks which have a single predecessor (i.e., this one),
7618 and look through those blocks for an assignment to CC_REG which is
7619 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7620 permitted to change the mode of CC_SRC to a compatible mode. This
7621 returns VOIDmode if no equivalent assignments were found.
7622 Otherwise it returns the mode which CC_SRC should wind up with.
7623
7624 The main complexity in this function is handling the mode issues.
7625 We may have more than one duplicate which we can eliminate, and we
7626 try to find a mode which will work for multiple duplicates. */
7627
7628static enum machine_mode
7629cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7630{
7631 bool found_equiv;
7632 enum machine_mode mode;
7633 unsigned int insn_count;
7634 edge e;
7635 rtx insns[2];
7636 enum machine_mode modes[2];
7637 rtx last_insns[2];
7638 unsigned int i;
7639 rtx newreg;
7640
7641 /* We expect to have two successors. Look at both before picking
7642 the final mode for the comparison. If we have more successors
7643 (i.e., some sort of table jump, although that seems unlikely),
7644 then we require all beyond the first two to use the same
7645 mode. */
7646
7647 found_equiv = false;
7648 mode = GET_MODE (cc_src);
7649 insn_count = 0;
7650 for (e = bb->succ; e; e = e->succ_next)
7651 {
7652 rtx insn;
7653 rtx end;
7654
7655 if (e->flags & EDGE_COMPLEX)
7656 continue;
7657
7658 if (! e->dest->pred
7659 || e->dest->pred->pred_next
7660 || e->dest == EXIT_BLOCK_PTR)
7661 continue;
7662
7663 end = NEXT_INSN (BB_END (e->dest));
7664 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7665 {
7666 rtx set;
7667
7668 if (! INSN_P (insn))
7669 continue;
7670
7671 /* If CC_SRC is modified, we have to stop looking for
7672 something which uses it. */
7673 if (modified_in_p (cc_src, insn))
7674 break;
7675
7676 /* Check whether INSN sets CC_REG to CC_SRC. */
7677 set = single_set (insn);
7678 if (set
7679 && GET_CODE (SET_DEST (set)) == REG
7680 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7681 {
7682 bool found;
7683 enum machine_mode set_mode;
7684 enum machine_mode comp_mode;
7685
7686 found = false;
7687 set_mode = GET_MODE (SET_SRC (set));
7688 comp_mode = set_mode;
7689 if (rtx_equal_p (cc_src, SET_SRC (set)))
7690 found = true;
7691 else if (GET_CODE (cc_src) == COMPARE
7692 && GET_CODE (SET_SRC (set)) == COMPARE
1f44254c 7693 && mode != set_mode
e129d93a
ILT
7694 && rtx_equal_p (XEXP (cc_src, 0),
7695 XEXP (SET_SRC (set), 0))
7696 && rtx_equal_p (XEXP (cc_src, 1),
7697 XEXP (SET_SRC (set), 1)))
7698
7699 {
5fd9b178 7700 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
e129d93a
ILT
7701 if (comp_mode != VOIDmode
7702 && (can_change_mode || comp_mode == mode))
7703 found = true;
7704 }
7705
7706 if (found)
7707 {
7708 found_equiv = true;
1f44254c 7709 if (insn_count < ARRAY_SIZE (insns))
e129d93a
ILT
7710 {
7711 insns[insn_count] = insn;
7712 modes[insn_count] = set_mode;
7713 last_insns[insn_count] = end;
7714 ++insn_count;
7715
1f44254c
ILT
7716 if (mode != comp_mode)
7717 {
7718 if (! can_change_mode)
7719 abort ();
7720 mode = comp_mode;
7721 PUT_MODE (cc_src, mode);
7722 }
e129d93a
ILT
7723 }
7724 else
7725 {
7726 if (set_mode != mode)
1f44254c
ILT
7727 {
7728 /* We found a matching expression in the
7729 wrong mode, but we don't have room to
7730 store it in the array. Punt. This case
7731 should be rare. */
7732 break;
7733 }
e129d93a
ILT
7734 /* INSN sets CC_REG to a value equal to CC_SRC
7735 with the right mode. We can simply delete
7736 it. */
7737 delete_insn (insn);
7738 }
7739
7740 /* We found an instruction to delete. Keep looking,
7741 in the hopes of finding a three-way jump. */
7742 continue;
7743 }
7744
7745 /* We found an instruction which sets the condition
7746 code, so don't look any farther. */
7747 break;
7748 }
7749
7750 /* If INSN sets CC_REG in some other way, don't look any
7751 farther. */
7752 if (reg_set_p (cc_reg, insn))
7753 break;
7754 }
7755
7756 /* If we fell off the bottom of the block, we can keep looking
7757 through successors. We pass CAN_CHANGE_MODE as false because
7758 we aren't prepared to handle compatibility between the
7759 further blocks and this block. */
7760 if (insn == end)
7761 {
1f44254c
ILT
7762 enum machine_mode submode;
7763
7764 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7765 if (submode != VOIDmode)
7766 {
7767 if (submode != mode)
7768 abort ();
7769 found_equiv = true;
7770 can_change_mode = false;
7771 }
e129d93a
ILT
7772 }
7773 }
7774
7775 if (! found_equiv)
7776 return VOIDmode;
7777
7778 /* Now INSN_COUNT is the number of instructions we found which set
7779 CC_REG to a value equivalent to CC_SRC. The instructions are in
7780 INSNS. The modes used by those instructions are in MODES. */
7781
7782 newreg = NULL_RTX;
7783 for (i = 0; i < insn_count; ++i)
7784 {
7785 if (modes[i] != mode)
7786 {
7787 /* We need to change the mode of CC_REG in INSNS[i] and
7788 subsequent instructions. */
7789 if (! newreg)
7790 {
7791 if (GET_MODE (cc_reg) == mode)
7792 newreg = cc_reg;
7793 else
7794 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7795 }
7796 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7797 newreg);
7798 }
7799
7800 delete_insn (insns[i]);
7801 }
7802
7803 return mode;
7804}
7805
7806/* If we have a fixed condition code register (or two), walk through
7807 the instructions and try to eliminate duplicate assignments. */
7808
7809void
7810cse_condition_code_reg (void)
7811{
7812 unsigned int cc_regno_1;
7813 unsigned int cc_regno_2;
7814 rtx cc_reg_1;
7815 rtx cc_reg_2;
7816 basic_block bb;
7817
5fd9b178 7818 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
e129d93a
ILT
7819 return;
7820
7821 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7822 if (cc_regno_2 != INVALID_REGNUM)
7823 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7824 else
7825 cc_reg_2 = NULL_RTX;
7826
7827 FOR_EACH_BB (bb)
7828 {
7829 rtx last_insn;
7830 rtx cc_reg;
7831 rtx insn;
7832 rtx cc_src_insn;
7833 rtx cc_src;
7834 enum machine_mode mode;
1f44254c 7835 enum machine_mode orig_mode;
e129d93a
ILT
7836
7837 /* Look for blocks which end with a conditional jump based on a
7838 condition code register. Then look for the instruction which
7839 sets the condition code register. Then look through the
7840 successor blocks for instructions which set the condition
7841 code register to the same value. There are other possible
7842 uses of the condition code register, but these are by far the
7843 most common and the ones which we are most likely to be able
7844 to optimize. */
7845
7846 last_insn = BB_END (bb);
7847 if (GET_CODE (last_insn) != JUMP_INSN)
7848 continue;
7849
7850 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7851 cc_reg = cc_reg_1;
7852 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7853 cc_reg = cc_reg_2;
7854 else
7855 continue;
7856
7857 cc_src_insn = NULL_RTX;
7858 cc_src = NULL_RTX;
7859 for (insn = PREV_INSN (last_insn);
7860 insn && insn != PREV_INSN (BB_HEAD (bb));
7861 insn = PREV_INSN (insn))
7862 {
7863 rtx set;
7864
7865 if (! INSN_P (insn))
7866 continue;
7867 set = single_set (insn);
7868 if (set
7869 && GET_CODE (SET_DEST (set)) == REG
7870 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7871 {
7872 cc_src_insn = insn;
7873 cc_src = SET_SRC (set);
7874 break;
7875 }
7876 else if (reg_set_p (cc_reg, insn))
7877 break;
7878 }
7879
7880 if (! cc_src_insn)
7881 continue;
7882
7883 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7884 continue;
7885
7886 /* Now CC_REG is a condition code register used for a
7887 conditional jump at the end of the block, and CC_SRC, in
7888 CC_SRC_INSN, is the value to which that condition code
7889 register is set, and CC_SRC is still meaningful at the end of
7890 the basic block. */
7891
1f44254c 7892 orig_mode = GET_MODE (cc_src);
e129d93a 7893 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
1f44254c 7894 if (mode != VOIDmode)
e129d93a 7895 {
1f44254c
ILT
7896 if (mode != GET_MODE (cc_src))
7897 abort ();
7898 if (mode != orig_mode)
2e802a6f
KH
7899 {
7900 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7901
7902 /* Change the mode of CC_REG in CC_SRC_INSN to
7903 GET_MODE (NEWREG). */
7904 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
7905 newreg);
7906 for_each_rtx (&REG_NOTES (cc_src_insn), cse_change_cc_mode,
7907 newreg);
7908
7909 /* Do the same in the following insns that use the
7910 current value of CC_REG within BB. */
7911 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7912 NEXT_INSN (last_insn),
7913 newreg);
7914 }
e129d93a
ILT
7915 }
7916 }
7917}