]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
m68k.c (m68k_save_reg): Also save A5 for non-leaf functions when -mid-shared-library...
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
5e7b4e25 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
ad616de1 3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
7afe21cc 4
1322177d 5This file is part of GCC.
7afe21cc 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
7afe21cc 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
7afe21cc
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
7afe21cc 21
7afe21cc 22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
7afe21cc 27#include "rtl.h"
6baf1cc8 28#include "tm_p.h"
7afe21cc 29#include "hard-reg-set.h"
7932a3db 30#include "regs.h"
630c79be 31#include "basic-block.h"
7afe21cc
RK
32#include "flags.h"
33#include "real.h"
34#include "insn-config.h"
35#include "recog.h"
49ad7cfa 36#include "function.h"
956d6950 37#include "expr.h"
50b2596f
KG
38#include "toplev.h"
39#include "output.h"
1497faf6 40#include "ggc.h"
3dec4024 41#include "timevar.h"
26771da7 42#include "except.h"
3c50106f 43#include "target.h"
9bf8cfbf 44#include "params.h"
2f93eea8 45#include "rtlhooks-def.h"
7afe21cc
RK
46
47/* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
51
52 It is too complicated to keep track of the different possibilities
e48a7fbe
JL
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
57
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
7afe21cc
RK
61
62 We use two data structures to record the equivalent expressions:
1bb98cec
DM
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
7afe21cc
RK
65
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
74
75Registers and "quantity numbers":
278a83b2 76
7afe21cc
RK
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
459281be 83 `REG_QTY (N)' records what quantity register N is currently thought
7afe21cc
RK
84 of as containing.
85
08a69267 86 All real quantity numbers are greater than or equal to zero.
459281be 87 If register N has not been assigned a quantity, `REG_QTY (N)' will
08a69267 88 equal -N - 1, which is always negative.
7afe21cc 89
08a69267
RS
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
7afe21cc
RK
92
93 We also maintain a bidirectional chain of registers for each
1bb98cec
DM
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
7afe21cc
RK
96
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
100
101 If two registers have the same quantity number, it must be true that
1bb98cec 102 REG expressions with qty_table `mode' must be in the hash table for both
7afe21cc
RK
103 registers and must be in the same class.
104
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
278a83b2 109
7afe21cc
RK
110Constants and quantity numbers
111
112 When a quantity has a known constant value, that value is stored
1bb98cec 113 in the appropriate qty_table `const_rtx'. This is in addition to
7afe21cc
RK
114 putting the constant in the hash table as is usual for non-regs.
115
d45cf215 116 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
119
120 When a quantity has a known nearly constant value (such as an address
1bb98cec
DM
121 of a stack slot), that value is stored in the appropriate qty_table
122 `const_rtx'.
7afe21cc
RK
123
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
129
130Other expressions:
131
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
136 hash codes.
137
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
140
141 Register references in an expression are canonicalized before hashing
1bb98cec 142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
7afe21cc
RK
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
145
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
149
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
154
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
162
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
165 must be removed.
166
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
174
459281be
KH
175 `REG_TICK' and `REG_IN_TABLE', accessors for members of
176 cse_reg_info, are used to detect this case. REG_TICK (i) is
177 incremented whenever a value is stored in register i.
178 REG_IN_TABLE (i) holds -1 if no references to register i have been
179 entered in the table; otherwise, it contains the value REG_TICK (i)
180 had when the references were entered. If we want to enter a
181 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
182 remove old references. Until we want to enter a new entry, the
183 mere fact that the two vectors don't match makes the entries be
184 ignored if anyone tries to match them.
7afe21cc
RK
185
186 Registers themselves are entered in the hash table as well as in
459281be
KH
187 the equivalent-register chains. However, `REG_TICK' and
188 `REG_IN_TABLE' do not apply to expressions which are simple
7afe21cc
RK
189 register references. These expressions are removed from the table
190 immediately when they become invalid, and this can be done even if
191 we do not immediately search for all the expressions that refer to
192 the register.
193
194 A CLOBBER rtx in an instruction invalidates its operand for further
195 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
196 invalidates everything that resides in memory.
197
198Related expressions:
199
200 Constant expressions that differ only by an additive integer
201 are called related. When a constant expression is put in
202 the table, the related expression with no constant term
203 is also entered. These are made to point at each other
204 so that it is possible to find out if there exists any
205 register equivalent to an expression related to a given expression. */
278a83b2 206
1bb98cec
DM
207/* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
7afe21cc
RK
209
210static int max_qty;
211
212/* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
214
215static int next_qty;
216
1bb98cec 217/* Per-qty information tracking.
7afe21cc 218
1bb98cec
DM
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
7afe21cc 221
1bb98cec 222 `mode' contains the machine mode of this quantity.
7afe21cc 223
1bb98cec
DM
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
7afe21cc 229
1bb98cec
DM
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
7afe21cc 241
1bb98cec
DM
242struct qty_table_elem
243{
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
770ae6cc 248 unsigned int first_reg, last_reg;
496324d0
DN
249 /* The sizes of these fields should match the sizes of the
250 code and mode fields of struct rtx_def (see rtl.h). */
251 ENUM_BITFIELD(rtx_code) comparison_code : 16;
252 ENUM_BITFIELD(machine_mode) mode : 8;
1bb98cec 253};
7afe21cc 254
1bb98cec
DM
255/* The table of all qtys, indexed by qty number. */
256static struct qty_table_elem *qty_table;
7afe21cc 257
fc188d37
AK
258/* Structure used to pass arguments via for_each_rtx to function
259 cse_change_cc_mode. */
260struct change_cc_mode_args
261{
262 rtx insn;
263 rtx newreg;
264};
265
7afe21cc
RK
266#ifdef HAVE_cc0
267/* For machines that have a CC0, we do not record its value in the hash
268 table since its use is guaranteed to be the insn immediately following
269 its definition and any other insn is presumed to invalidate it.
270
271 Instead, we store below the value last assigned to CC0. If it should
272 happen to be a constant, it is stored in preference to the actual
273 assigned value. In case it is a constant, we store the mode in which
274 the constant should be interpreted. */
275
276static rtx prev_insn_cc0;
277static enum machine_mode prev_insn_cc0_mode;
7afe21cc
RK
278
279/* Previous actual insn. 0 if at first insn of basic block. */
280
281static rtx prev_insn;
4977bab6 282#endif
7afe21cc
RK
283
284/* Insn being scanned. */
285
286static rtx this_insn;
287
71d306d1
DE
288/* Index by register number, gives the number of the next (or
289 previous) register in the chain of registers sharing the same
7afe21cc
RK
290 value.
291
292 Or -1 if this register is at the end of the chain.
293
459281be 294 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
1bb98cec
DM
295
296/* Per-register equivalence chain. */
297struct reg_eqv_elem
298{
299 int next, prev;
300};
7afe21cc 301
1bb98cec
DM
302/* The table of all register equivalence chains. */
303static struct reg_eqv_elem *reg_eqv_table;
7afe21cc 304
14a774a9
RK
305struct cse_reg_info
306{
bc5e3b54
KH
307 /* The timestamp at which this register is initialized. */
308 unsigned int timestamp;
9b1549b8
DM
309
310 /* The quantity number of the register's current contents. */
311 int reg_qty;
312
313 /* The number of times the register has been altered in the current
314 basic block. */
315 int reg_tick;
316
30f72379
MM
317 /* The REG_TICK value at which rtx's containing this register are
318 valid in the hash table. If this does not equal the current
319 reg_tick value, such expressions existing in the hash table are
320 invalid. */
321 int reg_in_table;
46081bb3
SH
322
323 /* The SUBREG that was set when REG_TICK was last incremented. Set
324 to -1 if the last store was to the whole register, not a subreg. */
5dd78e9a 325 unsigned int subreg_ticked;
30f72379 326};
7afe21cc 327
bc5e3b54
KH
328/* A table of cse_reg_info indexed by register numbers. */
329struct cse_reg_info *cse_reg_info_table;
c1edba58 330
bc5e3b54
KH
331/* The size of the above table. */
332static unsigned int cse_reg_info_table_size;
9b1549b8 333
bc5e3b54
KH
334/* The index of the first entry that has not been initialized. */
335static unsigned int cse_reg_info_table_first_uninitialized;
7afe21cc 336
bc5e3b54 337/* The timestamp at the beginning of the current run of
0388d40a 338 cse_basic_block. We increment this variable at the beginning of
bc5e3b54
KH
339 the current run of cse_basic_block. The timestamp field of a
340 cse_reg_info entry matches the value of this variable if and only
341 if the entry has been initialized during the current run of
342 cse_basic_block. */
343static unsigned int cse_reg_info_timestamp;
7afe21cc 344
278a83b2 345/* A HARD_REG_SET containing all the hard registers for which there is
7afe21cc
RK
346 currently a REG expression in the hash table. Note the difference
347 from the above variables, which indicate if the REG is mentioned in some
348 expression in the table. */
349
350static HARD_REG_SET hard_regs_in_table;
351
7afe21cc
RK
352/* CUID of insn that starts the basic block currently being cse-processed. */
353
354static int cse_basic_block_start;
355
356/* CUID of insn that ends the basic block currently being cse-processed. */
357
358static int cse_basic_block_end;
359
360/* Vector mapping INSN_UIDs to cuids.
d45cf215 361 The cuids are like uids but increase monotonically always.
7afe21cc
RK
362 We use them to see whether a reg is used outside a given basic block. */
363
906c4e36 364static int *uid_cuid;
7afe21cc 365
164c8956
RK
366/* Highest UID in UID_CUID. */
367static int max_uid;
368
7afe21cc
RK
369/* Get the cuid of an insn. */
370
371#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
372
4eadede7
ZW
373/* Nonzero if this pass has made changes, and therefore it's
374 worthwhile to run the garbage collector. */
375
376static int cse_altered;
377
7afe21cc
RK
378/* Nonzero if cse has altered conditional jump insns
379 in such a way that jump optimization should be redone. */
380
381static int cse_jumps_altered;
382
f85cc4cb
RK
383/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
384 REG_LABEL, we have to rerun jump after CSE to put in the note. */
a5dfb4ee
RK
385static int recorded_label_ref;
386
7afe21cc
RK
387/* canon_hash stores 1 in do_not_record
388 if it notices a reference to CC0, PC, or some other volatile
389 subexpression. */
390
391static int do_not_record;
392
393/* canon_hash stores 1 in hash_arg_in_memory
394 if it notices a reference to memory within the expression being hashed. */
395
396static int hash_arg_in_memory;
397
7afe21cc
RK
398/* The hash table contains buckets which are chains of `struct table_elt's,
399 each recording one expression's information.
400 That expression is in the `exp' field.
401
db048faf
MM
402 The canon_exp field contains a canonical (from the point of view of
403 alias analysis) version of the `exp' field.
404
7afe21cc
RK
405 Those elements with the same hash code are chained in both directions
406 through the `next_same_hash' and `prev_same_hash' fields.
407
408 Each set of expressions with equivalent values
409 are on a two-way chain through the `next_same_value'
410 and `prev_same_value' fields, and all point with
411 the `first_same_value' field at the first element in
412 that chain. The chain is in order of increasing cost.
413 Each element's cost value is in its `cost' field.
414
415 The `in_memory' field is nonzero for elements that
416 involve any reference to memory. These elements are removed
417 whenever a write is done to an unidentified location in memory.
418 To be safe, we assume that a memory address is unidentified unless
419 the address is either a symbol constant or a constant plus
420 the frame pointer or argument pointer.
421
7afe21cc
RK
422 The `related_value' field is used to connect related expressions
423 (that differ by adding an integer).
424 The related expressions are chained in a circular fashion.
425 `related_value' is zero for expressions for which this
426 chain is not useful.
427
428 The `cost' field stores the cost of this element's expression.
630c79be
BS
429 The `regcost' field stores the value returned by approx_reg_cost for
430 this element's expression.
7afe21cc
RK
431
432 The `is_const' flag is set if the element is a constant (including
433 a fixed address).
434
435 The `flag' field is used as a temporary during some search routines.
436
437 The `mode' field is usually the same as GET_MODE (`exp'), but
438 if `exp' is a CONST_INT and has no machine mode then the `mode'
439 field is the mode it was being used as. Each constant is
440 recorded separately for each mode it is used with. */
441
7afe21cc
RK
442struct table_elt
443{
444 rtx exp;
db048faf 445 rtx canon_exp;
7afe21cc
RK
446 struct table_elt *next_same_hash;
447 struct table_elt *prev_same_hash;
448 struct table_elt *next_same_value;
449 struct table_elt *prev_same_value;
450 struct table_elt *first_same_value;
451 struct table_elt *related_value;
452 int cost;
630c79be 453 int regcost;
496324d0
DN
454 /* The size of this field should match the size
455 of the mode field of struct rtx_def (see rtl.h). */
456 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc 457 char in_memory;
7afe21cc
RK
458 char is_const;
459 char flag;
460};
461
7afe21cc
RK
462/* We don't want a lot of buckets, because we rarely have very many
463 things stored in the hash table, and a lot of buckets slows
464 down a lot of loops that happen frequently. */
9b1549b8
DM
465#define HASH_SHIFT 5
466#define HASH_SIZE (1 << HASH_SHIFT)
467#define HASH_MASK (HASH_SIZE - 1)
7afe21cc
RK
468
469/* Compute hash code of X in mode M. Special-case case where X is a pseudo
470 register (hard registers may require `do_not_record' to be set). */
471
472#define HASH(X, M) \
f8cfc6aa 473 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
9b1549b8
DM
474 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
475 : canon_hash (X, M)) & HASH_MASK)
7afe21cc 476
0516f6fe
SB
477/* Like HASH, but without side-effects. */
478#define SAFE_HASH(X, M) \
479 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
480 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
481 : safe_hash (X, M)) & HASH_MASK)
482
630c79be
BS
483/* Determine whether register number N is considered a fixed register for the
484 purpose of approximating register costs.
7afe21cc
RK
485 It is desirable to replace other regs with fixed regs, to reduce need for
486 non-fixed hard regs.
553687c9 487 A reg wins if it is either the frame pointer or designated as fixed. */
7afe21cc 488#define FIXED_REGNO_P(N) \
8bc169f2 489 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 490 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
491
492/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
493 hard registers and pointers into the frame are the cheapest with a cost
494 of 0. Next come pseudos with a cost of one and other hard registers with
495 a cost of 2. Aside from these special cases, call `rtx_cost'. */
496
d67fb775
SB
497#define CHEAP_REGNO(N) \
498 (REGNO_PTR_FRAME_P(N) \
499 || (HARD_REGISTER_NUM_P (N) \
e7bb59fa 500 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 501
f8cfc6aa
JQ
502#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
503#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
7afe21cc 504
30f72379
MM
505/* Get the number of times this register has been updated in this
506 basic block. */
507
bc5e3b54 508#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
30f72379
MM
509
510/* Get the point at which REG was recorded in the table. */
511
bc5e3b54 512#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
30f72379 513
46081bb3
SH
514/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
515 SUBREG). */
516
bc5e3b54 517#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
46081bb3 518
30f72379
MM
519/* Get the quantity number for REG. */
520
bc5e3b54 521#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
30f72379 522
7afe21cc 523/* Determine if the quantity number for register X represents a valid index
1bb98cec 524 into the qty_table. */
7afe21cc 525
08a69267 526#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
7afe21cc 527
9b1549b8 528static struct table_elt *table[HASH_SIZE];
7afe21cc
RK
529
530/* Chain of `struct table_elt's made so far for this function
531 but currently removed from the table. */
532
533static struct table_elt *free_element_chain;
534
7afe21cc
RK
535/* Set to the cost of a constant pool reference if one was found for a
536 symbolic constant. If this was found, it means we should try to
537 convert constants into constant pool entries if they don't fit in
538 the insn. */
539
540static int constant_pool_entries_cost;
dd0ba281 541static int constant_pool_entries_regcost;
7afe21cc 542
6cd4575e
RK
543/* This data describes a block that will be processed by cse_basic_block. */
544
14a774a9
RK
545struct cse_basic_block_data
546{
6cd4575e
RK
547 /* Lowest CUID value of insns in block. */
548 int low_cuid;
549 /* Highest CUID value of insns in block. */
550 int high_cuid;
551 /* Total number of SETs in block. */
552 int nsets;
553 /* Last insn in the block. */
554 rtx last;
555 /* Size of current branch path, if any. */
556 int path_size;
557 /* Current branch path, indicating which branches will be taken. */
14a774a9
RK
558 struct branch_path
559 {
560 /* The branch insn. */
561 rtx branch;
562 /* Whether it should be taken or not. AROUND is the same as taken
563 except that it is used when the destination label is not preceded
6cd4575e 564 by a BARRIER. */
6de9cd9a 565 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
9bf8cfbf 566 } *path;
6cd4575e
RK
567};
568
7080f735
AJ
569static bool fixed_base_plus_p (rtx x);
570static int notreg_cost (rtx, enum rtx_code);
571static int approx_reg_cost_1 (rtx *, void *);
572static int approx_reg_cost (rtx);
56ae04af 573static int preferable (int, int, int, int);
7080f735
AJ
574static void new_basic_block (void);
575static void make_new_qty (unsigned int, enum machine_mode);
576static void make_regs_eqv (unsigned int, unsigned int);
577static void delete_reg_equiv (unsigned int);
578static int mention_regs (rtx);
579static int insert_regs (rtx, struct table_elt *, int);
580static void remove_from_table (struct table_elt *, unsigned);
581static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
582static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
583static rtx lookup_as_function (rtx, enum rtx_code);
584static struct table_elt *insert (rtx, struct table_elt *, unsigned,
585 enum machine_mode);
586static void merge_equiv_classes (struct table_elt *, struct table_elt *);
587static void invalidate (rtx, enum machine_mode);
588static int cse_rtx_varies_p (rtx, int);
589static void remove_invalid_refs (unsigned int);
590static void remove_invalid_subreg_refs (unsigned int, unsigned int,
591 enum machine_mode);
592static void rehash_using_reg (rtx);
593static void invalidate_memory (void);
594static void invalidate_for_call (void);
595static rtx use_related_value (rtx, struct table_elt *);
0516f6fe
SB
596
597static inline unsigned canon_hash (rtx, enum machine_mode);
598static inline unsigned safe_hash (rtx, enum machine_mode);
599static unsigned hash_rtx_string (const char *);
600
7080f735
AJ
601static rtx canon_reg (rtx, rtx);
602static void find_best_addr (rtx, rtx *, enum machine_mode);
603static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
604 enum machine_mode *,
605 enum machine_mode *);
606static rtx fold_rtx (rtx, rtx);
607static rtx equiv_constant (rtx);
608static void record_jump_equiv (rtx, int);
609static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
610 int);
611static void cse_insn (rtx, rtx);
86caf04d 612static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
5affca01 613 int, int);
7080f735
AJ
614static int addr_affects_sp_p (rtx);
615static void invalidate_from_clobbers (rtx);
616static rtx cse_process_notes (rtx, rtx);
7080f735
AJ
617static void invalidate_skipped_set (rtx, rtx, void *);
618static void invalidate_skipped_block (rtx);
5affca01 619static rtx cse_basic_block (rtx, rtx, struct branch_path *);
9ab81df2 620static void count_reg_usage (rtx, int *, int);
7080f735
AJ
621static int check_for_label_ref (rtx *, void *);
622extern void dump_class (struct table_elt*);
bc5e3b54
KH
623static void get_cse_reg_info_1 (unsigned int regno);
624static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
7080f735
AJ
625static int check_dependence (rtx *, void *);
626
627static void flush_hash_table (void);
628static bool insn_live_p (rtx, int *);
629static bool set_live_p (rtx, rtx, int *);
630static bool dead_libcall_p (rtx, int *);
e129d93a 631static int cse_change_cc_mode (rtx *, void *);
fc188d37 632static void cse_change_cc_mode_insn (rtx, rtx);
e129d93a
ILT
633static void cse_change_cc_mode_insns (rtx, rtx, rtx);
634static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
7afe21cc 635\f
2f93eea8
PB
636
637#undef RTL_HOOKS_GEN_LOWPART
638#define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
639
640static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
641\f
4977bab6
ZW
642/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
643 virtual regs here because the simplify_*_operation routines are called
644 by integrate.c, which is called before virtual register instantiation. */
645
646static bool
7080f735 647fixed_base_plus_p (rtx x)
4977bab6
ZW
648{
649 switch (GET_CODE (x))
650 {
651 case REG:
652 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
653 return true;
654 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
655 return true;
656 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
657 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
658 return true;
659 return false;
660
661 case PLUS:
662 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
663 return false;
664 return fixed_base_plus_p (XEXP (x, 0));
665
4977bab6
ZW
666 default:
667 return false;
668 }
669}
670
a4c6502a
MM
671/* Dump the expressions in the equivalence class indicated by CLASSP.
672 This function is used only for debugging. */
a0153051 673void
7080f735 674dump_class (struct table_elt *classp)
a4c6502a
MM
675{
676 struct table_elt *elt;
677
678 fprintf (stderr, "Equivalence chain for ");
679 print_rtl (stderr, classp->exp);
680 fprintf (stderr, ": \n");
278a83b2 681
a4c6502a
MM
682 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
683 {
684 print_rtl (stderr, elt->exp);
685 fprintf (stderr, "\n");
686 }
687}
688
630c79be 689/* Subroutine of approx_reg_cost; called through for_each_rtx. */
be8ac49a 690
630c79be 691static int
7080f735 692approx_reg_cost_1 (rtx *xp, void *data)
630c79be
BS
693{
694 rtx x = *xp;
c863f8c2 695 int *cost_p = data;
630c79be 696
f8cfc6aa 697 if (x && REG_P (x))
c863f8c2
DM
698 {
699 unsigned int regno = REGNO (x);
700
701 if (! CHEAP_REGNO (regno))
702 {
703 if (regno < FIRST_PSEUDO_REGISTER)
704 {
705 if (SMALL_REGISTER_CLASSES)
706 return 1;
707 *cost_p += 2;
708 }
709 else
710 *cost_p += 1;
711 }
712 }
713
630c79be
BS
714 return 0;
715}
716
717/* Return an estimate of the cost of the registers used in an rtx.
718 This is mostly the number of different REG expressions in the rtx;
a1f300c0 719 however for some exceptions like fixed registers we use a cost of
f1c1dfc3 720 0. If any other hard register reference occurs, return MAX_COST. */
630c79be
BS
721
722static int
7080f735 723approx_reg_cost (rtx x)
630c79be 724{
630c79be 725 int cost = 0;
f1c1dfc3 726
c863f8c2
DM
727 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
728 return MAX_COST;
630c79be 729
c863f8c2 730 return cost;
630c79be
BS
731}
732
b7ca416f 733/* Returns a canonical version of X for the address, from the point of view,
6668f6a7
KH
734 that all multiplications are represented as MULT instead of the multiply
735 by a power of 2 being represented as ASHIFT. */
b7ca416f
AP
736
737static rtx
738canon_for_address (rtx x)
739{
740 enum rtx_code code;
741 enum machine_mode mode;
742 rtx new = 0;
743 int i;
744 const char *fmt;
745
746 if (!x)
747 return x;
748
749 code = GET_CODE (x);
750 mode = GET_MODE (x);
751
752 switch (code)
753 {
754 case ASHIFT:
755 if (GET_CODE (XEXP (x, 1)) == CONST_INT
756 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
757 && INTVAL (XEXP (x, 1)) >= 0)
758 {
759 new = canon_for_address (XEXP (x, 0));
760 new = gen_rtx_MULT (mode, new,
761 gen_int_mode ((HOST_WIDE_INT) 1
762 << INTVAL (XEXP (x, 1)),
763 mode));
764 }
765 break;
766 default:
767 break;
768
769 }
770 if (new)
771 return new;
772
773 /* Now recursively process each operand of this operation. */
774 fmt = GET_RTX_FORMAT (code);
775 for (i = 0; i < GET_RTX_LENGTH (code); i++)
776 if (fmt[i] == 'e')
777 {
778 new = canon_for_address (XEXP (x, i));
779 XEXP (x, i) = new;
780 }
781 return x;
782}
783
630c79be
BS
784/* Return a negative value if an rtx A, whose costs are given by COST_A
785 and REGCOST_A, is more desirable than an rtx B.
786 Return a positive value if A is less desirable, or 0 if the two are
787 equally good. */
788static int
56ae04af 789preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
630c79be 790{
423adbb9 791 /* First, get rid of cases involving expressions that are entirely
f1c1dfc3
BS
792 unwanted. */
793 if (cost_a != cost_b)
794 {
795 if (cost_a == MAX_COST)
796 return 1;
797 if (cost_b == MAX_COST)
798 return -1;
799 }
800
801 /* Avoid extending lifetimes of hardregs. */
802 if (regcost_a != regcost_b)
803 {
804 if (regcost_a == MAX_COST)
805 return 1;
806 if (regcost_b == MAX_COST)
807 return -1;
808 }
809
810 /* Normal operation costs take precedence. */
630c79be
BS
811 if (cost_a != cost_b)
812 return cost_a - cost_b;
f1c1dfc3 813 /* Only if these are identical consider effects on register pressure. */
630c79be
BS
814 if (regcost_a != regcost_b)
815 return regcost_a - regcost_b;
816 return 0;
817}
818
954a5693
RK
819/* Internal function, to compute cost when X is not a register; called
820 from COST macro to keep it simple. */
821
822static int
7080f735 823notreg_cost (rtx x, enum rtx_code outer)
954a5693
RK
824{
825 return ((GET_CODE (x) == SUBREG
f8cfc6aa 826 && REG_P (SUBREG_REG (x))
954a5693
RK
827 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
828 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
829 && (GET_MODE_SIZE (GET_MODE (x))
830 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
831 && subreg_lowpart_p (x)
832 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
833 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
630c79be 834 ? 0
f2fa288f 835 : rtx_cost (x, outer) * 2);
954a5693
RK
836}
837
01329426 838\f
bc5e3b54 839/* Initialize CSE_REG_INFO_TABLE. */
9b1549b8 840
bc5e3b54
KH
841static void
842init_cse_reg_info (unsigned int nregs)
843{
844 /* Do we need to grow the table? */
845 if (nregs > cse_reg_info_table_size)
30f72379 846 {
bc5e3b54
KH
847 unsigned int new_size;
848
849 if (cse_reg_info_table_size < 2048)
30f72379 850 {
bc5e3b54
KH
851 /* Compute a new size that is a power of 2 and no smaller
852 than the large of NREGS and 64. */
853 new_size = (cse_reg_info_table_size
854 ? cse_reg_info_table_size : 64);
855
856 while (new_size < nregs)
857 new_size *= 2;
30f72379
MM
858 }
859 else
1590d0d4 860 {
bc5e3b54
KH
861 /* If we need a big table, allocate just enough to hold
862 NREGS registers. */
863 new_size = nregs;
1590d0d4 864 }
9b1549b8 865
bc5e3b54 866 /* Reallocate the table with NEW_SIZE entries. */
a811c672
KH
867 if (cse_reg_info_table)
868 free (cse_reg_info_table);
869 cse_reg_info_table = xmalloc (sizeof (struct cse_reg_info)
870 * new_size);
bc5e3b54 871 cse_reg_info_table_size = new_size;
a811c672 872 cse_reg_info_table_first_uninitialized = 0;
bc5e3b54
KH
873 }
874
875 /* Do we have all of the first NREGS entries initialized? */
876 if (cse_reg_info_table_first_uninitialized < nregs)
877 {
878 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
879 unsigned int i;
880
881 /* Put the old timestamp on newly allocated entries so that they
882 will all be considered out of date. We do not touch those
883 entries beyond the first NREGS entries to be nice to the
884 virtual memory. */
885 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
886 cse_reg_info_table[i].timestamp = old_timestamp;
30f72379 887
bc5e3b54 888 cse_reg_info_table_first_uninitialized = nregs;
30f72379 889 }
bc5e3b54
KH
890}
891
a52aff23 892/* Given REGNO, initialize the cse_reg_info entry for REGNO. */
bc5e3b54
KH
893
894static void
895get_cse_reg_info_1 (unsigned int regno)
896{
897 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
898 entry will be considered to have been initialized. */
899 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
900
901 /* Initialize the rest of the entry. */
902 cse_reg_info_table[regno].reg_tick = 1;
903 cse_reg_info_table[regno].reg_in_table = -1;
904 cse_reg_info_table[regno].subreg_ticked = -1;
905 cse_reg_info_table[regno].reg_qty = -regno - 1;
906}
907
908/* Find a cse_reg_info entry for REGNO. */
30f72379 909
bc5e3b54
KH
910static inline struct cse_reg_info *
911get_cse_reg_info (unsigned int regno)
912{
913 struct cse_reg_info *p = &cse_reg_info_table[regno];
914
782c0a3e
KH
915 /* If this entry has not been initialized, go ahead and initialize
916 it. */
bc5e3b54
KH
917 if (p->timestamp != cse_reg_info_timestamp)
918 get_cse_reg_info_1 (regno);
30f72379 919
9b1549b8 920 return p;
30f72379
MM
921}
922
7afe21cc
RK
923/* Clear the hash table and initialize each register with its own quantity,
924 for a new basic block. */
925
926static void
7080f735 927new_basic_block (void)
7afe21cc 928{
b3694847 929 int i;
7afe21cc 930
08a69267 931 next_qty = 0;
7afe21cc 932
a52aff23 933 /* Invalidate cse_reg_info_table. */
bc5e3b54 934 cse_reg_info_timestamp++;
7afe21cc 935
bc5e3b54 936 /* Clear out hash table state for this pass. */
7afe21cc
RK
937 CLEAR_HARD_REG_SET (hard_regs_in_table);
938
939 /* The per-quantity values used to be initialized here, but it is
940 much faster to initialize each as it is made in `make_new_qty'. */
941
9b1549b8 942 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 943 {
9b1549b8
DM
944 struct table_elt *first;
945
946 first = table[i];
947 if (first != NULL)
7afe21cc 948 {
9b1549b8
DM
949 struct table_elt *last = first;
950
951 table[i] = NULL;
952
953 while (last->next_same_hash != NULL)
954 last = last->next_same_hash;
955
956 /* Now relink this hash entire chain into
957 the free element list. */
958
959 last->next_same_hash = free_element_chain;
960 free_element_chain = first;
7afe21cc
RK
961 }
962 }
963
7afe21cc 964#ifdef HAVE_cc0
4977bab6 965 prev_insn = 0;
7afe21cc
RK
966 prev_insn_cc0 = 0;
967#endif
968}
969
1bb98cec
DM
970/* Say that register REG contains a quantity in mode MODE not in any
971 register before and initialize that quantity. */
7afe21cc
RK
972
973static void
7080f735 974make_new_qty (unsigned int reg, enum machine_mode mode)
7afe21cc 975{
b3694847
SS
976 int q;
977 struct qty_table_elem *ent;
978 struct reg_eqv_elem *eqv;
7afe21cc 979
341c100f 980 gcc_assert (next_qty < max_qty);
7afe21cc 981
30f72379 982 q = REG_QTY (reg) = next_qty++;
1bb98cec
DM
983 ent = &qty_table[q];
984 ent->first_reg = reg;
985 ent->last_reg = reg;
986 ent->mode = mode;
987 ent->const_rtx = ent->const_insn = NULL_RTX;
988 ent->comparison_code = UNKNOWN;
989
990 eqv = &reg_eqv_table[reg];
991 eqv->next = eqv->prev = -1;
7afe21cc
RK
992}
993
994/* Make reg NEW equivalent to reg OLD.
995 OLD is not changing; NEW is. */
996
997static void
7080f735 998make_regs_eqv (unsigned int new, unsigned int old)
7afe21cc 999{
770ae6cc
RK
1000 unsigned int lastr, firstr;
1001 int q = REG_QTY (old);
1002 struct qty_table_elem *ent;
1bb98cec
DM
1003
1004 ent = &qty_table[q];
7afe21cc
RK
1005
1006 /* Nothing should become eqv until it has a "non-invalid" qty number. */
341c100f 1007 gcc_assert (REGNO_QTY_VALID_P (old));
7afe21cc 1008
30f72379 1009 REG_QTY (new) = q;
1bb98cec
DM
1010 firstr = ent->first_reg;
1011 lastr = ent->last_reg;
7afe21cc
RK
1012
1013 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1014 hard regs. Among pseudos, if NEW will live longer than any other reg
1015 of the same qty, and that is beyond the current basic block,
1016 make it the new canonical replacement for this qty. */
1017 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1018 /* Certain fixed registers might be of the class NO_REGS. This means
1019 that not only can they not be allocated by the compiler, but
830a38ee 1020 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
1021 either. */
1022 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1023 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1024 || (new >= FIRST_PSEUDO_REGISTER
1025 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
1026 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1027 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 1028 < cse_basic_block_start))
b1f21e0a
MM
1029 && (uid_cuid[REGNO_LAST_UID (new)]
1030 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc 1031 {
1bb98cec
DM
1032 reg_eqv_table[firstr].prev = new;
1033 reg_eqv_table[new].next = firstr;
1034 reg_eqv_table[new].prev = -1;
1035 ent->first_reg = new;
7afe21cc
RK
1036 }
1037 else
1038 {
1039 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1040 Otherwise, insert before any non-fixed hard regs that are at the
1041 end. Registers of class NO_REGS cannot be used as an
1042 equivalent for anything. */
1bb98cec 1043 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
7afe21cc
RK
1044 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1045 && new >= FIRST_PSEUDO_REGISTER)
1bb98cec
DM
1046 lastr = reg_eqv_table[lastr].prev;
1047 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1048 if (reg_eqv_table[lastr].next >= 0)
1049 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
7afe21cc 1050 else
1bb98cec
DM
1051 qty_table[q].last_reg = new;
1052 reg_eqv_table[lastr].next = new;
1053 reg_eqv_table[new].prev = lastr;
7afe21cc
RK
1054 }
1055}
1056
1057/* Remove REG from its equivalence class. */
1058
1059static void
7080f735 1060delete_reg_equiv (unsigned int reg)
7afe21cc 1061{
b3694847
SS
1062 struct qty_table_elem *ent;
1063 int q = REG_QTY (reg);
1064 int p, n;
7afe21cc 1065
a4e262bc 1066 /* If invalid, do nothing. */
08a69267 1067 if (! REGNO_QTY_VALID_P (reg))
7afe21cc
RK
1068 return;
1069
1bb98cec
DM
1070 ent = &qty_table[q];
1071
1072 p = reg_eqv_table[reg].prev;
1073 n = reg_eqv_table[reg].next;
a4e262bc 1074
7afe21cc 1075 if (n != -1)
1bb98cec 1076 reg_eqv_table[n].prev = p;
7afe21cc 1077 else
1bb98cec 1078 ent->last_reg = p;
7afe21cc 1079 if (p != -1)
1bb98cec 1080 reg_eqv_table[p].next = n;
7afe21cc 1081 else
1bb98cec 1082 ent->first_reg = n;
7afe21cc 1083
08a69267 1084 REG_QTY (reg) = -reg - 1;
7afe21cc
RK
1085}
1086
1087/* Remove any invalid expressions from the hash table
1088 that refer to any of the registers contained in expression X.
1089
1090 Make sure that newly inserted references to those registers
1091 as subexpressions will be considered valid.
1092
1093 mention_regs is not called when a register itself
1094 is being stored in the table.
1095
1096 Return 1 if we have done something that may have changed the hash code
1097 of X. */
1098
1099static int
7080f735 1100mention_regs (rtx x)
7afe21cc 1101{
b3694847
SS
1102 enum rtx_code code;
1103 int i, j;
1104 const char *fmt;
1105 int changed = 0;
7afe21cc
RK
1106
1107 if (x == 0)
e5f6a288 1108 return 0;
7afe21cc
RK
1109
1110 code = GET_CODE (x);
1111 if (code == REG)
1112 {
770ae6cc
RK
1113 unsigned int regno = REGNO (x);
1114 unsigned int endregno
7afe21cc 1115 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 1116 : hard_regno_nregs[regno][GET_MODE (x)]);
770ae6cc 1117 unsigned int i;
7afe21cc
RK
1118
1119 for (i = regno; i < endregno; i++)
1120 {
30f72379 1121 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1122 remove_invalid_refs (i);
1123
30f72379 1124 REG_IN_TABLE (i) = REG_TICK (i);
46081bb3 1125 SUBREG_TICKED (i) = -1;
7afe21cc
RK
1126 }
1127
1128 return 0;
1129 }
1130
34c73909
R
1131 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1132 pseudo if they don't use overlapping words. We handle only pseudos
1133 here for simplicity. */
f8cfc6aa 1134 if (code == SUBREG && REG_P (SUBREG_REG (x))
34c73909
R
1135 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1136 {
770ae6cc 1137 unsigned int i = REGNO (SUBREG_REG (x));
34c73909 1138
30f72379 1139 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909 1140 {
46081bb3
SH
1141 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1142 the last store to this register really stored into this
1143 subreg, then remove the memory of this subreg.
1144 Otherwise, remove any memory of the entire register and
1145 all its subregs from the table. */
1146 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
5dd78e9a 1147 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
34c73909
R
1148 remove_invalid_refs (i);
1149 else
ddef6bc7 1150 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
34c73909
R
1151 }
1152
30f72379 1153 REG_IN_TABLE (i) = REG_TICK (i);
5dd78e9a 1154 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
34c73909
R
1155 return 0;
1156 }
1157
7afe21cc
RK
1158 /* If X is a comparison or a COMPARE and either operand is a register
1159 that does not have a quantity, give it one. This is so that a later
1160 call to record_jump_equiv won't cause X to be assigned a different
1161 hash code and not found in the table after that call.
1162
1163 It is not necessary to do this here, since rehash_using_reg can
1164 fix up the table later, but doing this here eliminates the need to
1165 call that expensive function in the most common case where the only
1166 use of the register is in the comparison. */
1167
ec8e098d 1168 if (code == COMPARE || COMPARISON_P (x))
7afe21cc 1169 {
f8cfc6aa 1170 if (REG_P (XEXP (x, 0))
7afe21cc 1171 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
9714cf43 1172 if (insert_regs (XEXP (x, 0), NULL, 0))
7afe21cc
RK
1173 {
1174 rehash_using_reg (XEXP (x, 0));
1175 changed = 1;
1176 }
1177
f8cfc6aa 1178 if (REG_P (XEXP (x, 1))
7afe21cc 1179 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
9714cf43 1180 if (insert_regs (XEXP (x, 1), NULL, 0))
7afe21cc
RK
1181 {
1182 rehash_using_reg (XEXP (x, 1));
1183 changed = 1;
1184 }
1185 }
1186
1187 fmt = GET_RTX_FORMAT (code);
1188 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1189 if (fmt[i] == 'e')
1190 changed |= mention_regs (XEXP (x, i));
1191 else if (fmt[i] == 'E')
1192 for (j = 0; j < XVECLEN (x, i); j++)
1193 changed |= mention_regs (XVECEXP (x, i, j));
1194
1195 return changed;
1196}
1197
1198/* Update the register quantities for inserting X into the hash table
1199 with a value equivalent to CLASSP.
1200 (If the class does not contain a REG, it is irrelevant.)
1201 If MODIFIED is nonzero, X is a destination; it is being modified.
1202 Note that delete_reg_equiv should be called on a register
1203 before insert_regs is done on that register with MODIFIED != 0.
1204
1205 Nonzero value means that elements of reg_qty have changed
1206 so X's hash code may be different. */
1207
1208static int
7080f735 1209insert_regs (rtx x, struct table_elt *classp, int modified)
7afe21cc 1210{
f8cfc6aa 1211 if (REG_P (x))
7afe21cc 1212 {
770ae6cc
RK
1213 unsigned int regno = REGNO (x);
1214 int qty_valid;
7afe21cc 1215
1ff0c00d
RK
1216 /* If REGNO is in the equivalence table already but is of the
1217 wrong mode for that equivalence, don't do anything here. */
1218
1bb98cec
DM
1219 qty_valid = REGNO_QTY_VALID_P (regno);
1220 if (qty_valid)
1221 {
1222 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1ff0c00d 1223
1bb98cec
DM
1224 if (ent->mode != GET_MODE (x))
1225 return 0;
1226 }
1227
1228 if (modified || ! qty_valid)
7afe21cc
RK
1229 {
1230 if (classp)
1231 for (classp = classp->first_same_value;
1232 classp != 0;
1233 classp = classp->next_same_value)
f8cfc6aa 1234 if (REG_P (classp->exp)
7afe21cc
RK
1235 && GET_MODE (classp->exp) == GET_MODE (x))
1236 {
1237 make_regs_eqv (regno, REGNO (classp->exp));
1238 return 1;
1239 }
1240
d9f20424
R
1241 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1242 than REG_IN_TABLE to find out if there was only a single preceding
1243 invalidation - for the SUBREG - or another one, which would be
1244 for the full register. However, if we find here that REG_TICK
1245 indicates that the register is invalid, it means that it has
1246 been invalidated in a separate operation. The SUBREG might be used
1247 now (then this is a recursive call), or we might use the full REG
1248 now and a SUBREG of it later. So bump up REG_TICK so that
1249 mention_regs will do the right thing. */
1250 if (! modified
1251 && REG_IN_TABLE (regno) >= 0
1252 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1253 REG_TICK (regno)++;
1bb98cec 1254 make_new_qty (regno, GET_MODE (x));
7afe21cc
RK
1255 return 1;
1256 }
cdf4112f
TG
1257
1258 return 0;
7afe21cc 1259 }
c610adec
RK
1260
1261 /* If X is a SUBREG, we will likely be inserting the inner register in the
1262 table. If that register doesn't have an assigned quantity number at
1263 this point but does later, the insertion that we will be doing now will
1264 not be accessible because its hash code will have changed. So assign
1265 a quantity number now. */
1266
f8cfc6aa 1267 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
c610adec
RK
1268 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1269 {
9714cf43 1270 insert_regs (SUBREG_REG (x), NULL, 0);
34c73909 1271 mention_regs (x);
c610adec
RK
1272 return 1;
1273 }
7afe21cc
RK
1274 else
1275 return mention_regs (x);
1276}
1277\f
1278/* Look in or update the hash table. */
1279
7afe21cc
RK
1280/* Remove table element ELT from use in the table.
1281 HASH is its hash code, made using the HASH macro.
1282 It's an argument because often that is known in advance
1283 and we save much time not recomputing it. */
1284
1285static void
7080f735 1286remove_from_table (struct table_elt *elt, unsigned int hash)
7afe21cc
RK
1287{
1288 if (elt == 0)
1289 return;
1290
1291 /* Mark this element as removed. See cse_insn. */
1292 elt->first_same_value = 0;
1293
1294 /* Remove the table element from its equivalence class. */
278a83b2 1295
7afe21cc 1296 {
b3694847
SS
1297 struct table_elt *prev = elt->prev_same_value;
1298 struct table_elt *next = elt->next_same_value;
7afe21cc 1299
278a83b2
KH
1300 if (next)
1301 next->prev_same_value = prev;
7afe21cc
RK
1302
1303 if (prev)
1304 prev->next_same_value = next;
1305 else
1306 {
b3694847 1307 struct table_elt *newfirst = next;
7afe21cc
RK
1308 while (next)
1309 {
1310 next->first_same_value = newfirst;
1311 next = next->next_same_value;
1312 }
1313 }
1314 }
1315
1316 /* Remove the table element from its hash bucket. */
1317
1318 {
b3694847
SS
1319 struct table_elt *prev = elt->prev_same_hash;
1320 struct table_elt *next = elt->next_same_hash;
7afe21cc 1321
278a83b2
KH
1322 if (next)
1323 next->prev_same_hash = prev;
7afe21cc
RK
1324
1325 if (prev)
1326 prev->next_same_hash = next;
1327 else if (table[hash] == elt)
1328 table[hash] = next;
1329 else
1330 {
1331 /* This entry is not in the proper hash bucket. This can happen
1332 when two classes were merged by `merge_equiv_classes'. Search
1333 for the hash bucket that it heads. This happens only very
1334 rarely, so the cost is acceptable. */
9b1549b8 1335 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1336 if (table[hash] == elt)
1337 table[hash] = next;
1338 }
1339 }
1340
1341 /* Remove the table element from its related-value circular chain. */
1342
1343 if (elt->related_value != 0 && elt->related_value != elt)
1344 {
b3694847 1345 struct table_elt *p = elt->related_value;
770ae6cc 1346
7afe21cc
RK
1347 while (p->related_value != elt)
1348 p = p->related_value;
1349 p->related_value = elt->related_value;
1350 if (p->related_value == p)
1351 p->related_value = 0;
1352 }
1353
9b1549b8
DM
1354 /* Now add it to the free element chain. */
1355 elt->next_same_hash = free_element_chain;
1356 free_element_chain = elt;
7afe21cc
RK
1357}
1358
1359/* Look up X in the hash table and return its table element,
1360 or 0 if X is not in the table.
1361
1362 MODE is the machine-mode of X, or if X is an integer constant
1363 with VOIDmode then MODE is the mode with which X will be used.
1364
1365 Here we are satisfied to find an expression whose tree structure
1366 looks like X. */
1367
1368static struct table_elt *
7080f735 1369lookup (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1370{
b3694847 1371 struct table_elt *p;
7afe21cc
RK
1372
1373 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1374 if (mode == p->mode && ((x == p->exp && REG_P (x))
0516f6fe 1375 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
7afe21cc
RK
1376 return p;
1377
1378 return 0;
1379}
1380
1381/* Like `lookup' but don't care whether the table element uses invalid regs.
1382 Also ignore discrepancies in the machine mode of a register. */
1383
1384static struct table_elt *
7080f735 1385lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1386{
b3694847 1387 struct table_elt *p;
7afe21cc 1388
f8cfc6aa 1389 if (REG_P (x))
7afe21cc 1390 {
770ae6cc
RK
1391 unsigned int regno = REGNO (x);
1392
7afe21cc
RK
1393 /* Don't check the machine mode when comparing registers;
1394 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1395 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1396 if (REG_P (p->exp)
7afe21cc
RK
1397 && REGNO (p->exp) == regno)
1398 return p;
1399 }
1400 else
1401 {
1402 for (p = table[hash]; p; p = p->next_same_hash)
0516f6fe
SB
1403 if (mode == p->mode
1404 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
7afe21cc
RK
1405 return p;
1406 }
1407
1408 return 0;
1409}
1410
1411/* Look for an expression equivalent to X and with code CODE.
1412 If one is found, return that expression. */
1413
1414static rtx
7080f735 1415lookup_as_function (rtx x, enum rtx_code code)
7afe21cc 1416{
b3694847 1417 struct table_elt *p
0516f6fe 1418 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
770ae6cc 1419
34c73909
R
1420 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1421 long as we are narrowing. So if we looked in vain for a mode narrower
1422 than word_mode before, look for word_mode now. */
1423 if (p == 0 && code == CONST_INT
1424 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1425 {
1426 x = copy_rtx (x);
1427 PUT_MODE (x, word_mode);
0516f6fe 1428 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
34c73909
R
1429 }
1430
7afe21cc
RK
1431 if (p == 0)
1432 return 0;
1433
1434 for (p = p->first_same_value; p; p = p->next_same_value)
770ae6cc
RK
1435 if (GET_CODE (p->exp) == code
1436 /* Make sure this is a valid entry in the table. */
0516f6fe 1437 && exp_equiv_p (p->exp, p->exp, 1, false))
770ae6cc 1438 return p->exp;
278a83b2 1439
7afe21cc
RK
1440 return 0;
1441}
1442
1443/* Insert X in the hash table, assuming HASH is its hash code
1444 and CLASSP is an element of the class it should go in
1445 (or 0 if a new class should be made).
1446 It is inserted at the proper position to keep the class in
1447 the order cheapest first.
1448
1449 MODE is the machine-mode of X, or if X is an integer constant
1450 with VOIDmode then MODE is the mode with which X will be used.
1451
1452 For elements of equal cheapness, the most recent one
1453 goes in front, except that the first element in the list
1454 remains first unless a cheaper element is added. The order of
1455 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1456 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1457
1458 The in_memory field in the hash table element is set to 0.
1459 The caller must set it nonzero if appropriate.
1460
1461 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1462 and if insert_regs returns a nonzero value
1463 you must then recompute its hash code before calling here.
1464
1465 If necessary, update table showing constant values of quantities. */
1466
630c79be 1467#define CHEAPER(X, Y) \
56ae04af 1468 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
7afe21cc
RK
1469
1470static struct table_elt *
7080f735 1471insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
7afe21cc 1472{
b3694847 1473 struct table_elt *elt;
7afe21cc
RK
1474
1475 /* If X is a register and we haven't made a quantity for it,
1476 something is wrong. */
341c100f 1477 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
7afe21cc
RK
1478
1479 /* If X is a hard register, show it is being put in the table. */
f8cfc6aa 1480 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7afe21cc 1481 {
770ae6cc 1482 unsigned int regno = REGNO (x);
66fd46b6 1483 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
770ae6cc 1484 unsigned int i;
7afe21cc
RK
1485
1486 for (i = regno; i < endregno; i++)
770ae6cc 1487 SET_HARD_REG_BIT (hard_regs_in_table, i);
7afe21cc
RK
1488 }
1489
7afe21cc
RK
1490 /* Put an element for X into the right hash bucket. */
1491
9b1549b8
DM
1492 elt = free_element_chain;
1493 if (elt)
770ae6cc 1494 free_element_chain = elt->next_same_hash;
9b1549b8 1495 else
26af0046 1496 elt = xmalloc (sizeof (struct table_elt));
9b1549b8 1497
7afe21cc 1498 elt->exp = x;
db048faf 1499 elt->canon_exp = NULL_RTX;
7afe21cc 1500 elt->cost = COST (x);
630c79be 1501 elt->regcost = approx_reg_cost (x);
7afe21cc
RK
1502 elt->next_same_value = 0;
1503 elt->prev_same_value = 0;
1504 elt->next_same_hash = table[hash];
1505 elt->prev_same_hash = 0;
1506 elt->related_value = 0;
1507 elt->in_memory = 0;
1508 elt->mode = mode;
389fdba0 1509 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
7afe21cc
RK
1510
1511 if (table[hash])
1512 table[hash]->prev_same_hash = elt;
1513 table[hash] = elt;
1514
1515 /* Put it into the proper value-class. */
1516 if (classp)
1517 {
1518 classp = classp->first_same_value;
1519 if (CHEAPER (elt, classp))
f9da5064 1520 /* Insert at the head of the class. */
7afe21cc 1521 {
b3694847 1522 struct table_elt *p;
7afe21cc
RK
1523 elt->next_same_value = classp;
1524 classp->prev_same_value = elt;
1525 elt->first_same_value = elt;
1526
1527 for (p = classp; p; p = p->next_same_value)
1528 p->first_same_value = elt;
1529 }
1530 else
1531 {
1532 /* Insert not at head of the class. */
1533 /* Put it after the last element cheaper than X. */
b3694847 1534 struct table_elt *p, *next;
770ae6cc 1535
7afe21cc
RK
1536 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1537 p = next);
770ae6cc 1538
7afe21cc
RK
1539 /* Put it after P and before NEXT. */
1540 elt->next_same_value = next;
1541 if (next)
1542 next->prev_same_value = elt;
770ae6cc 1543
7afe21cc
RK
1544 elt->prev_same_value = p;
1545 p->next_same_value = elt;
1546 elt->first_same_value = classp;
1547 }
1548 }
1549 else
1550 elt->first_same_value = elt;
1551
1552 /* If this is a constant being set equivalent to a register or a register
1553 being set equivalent to a constant, note the constant equivalence.
1554
1555 If this is a constant, it cannot be equivalent to a different constant,
1556 and a constant is the only thing that can be cheaper than a register. So
1557 we know the register is the head of the class (before the constant was
1558 inserted).
1559
1560 If this is a register that is not already known equivalent to a
1561 constant, we must check the entire class.
1562
1563 If this is a register that is already known equivalent to an insn,
1bb98cec 1564 update the qtys `const_insn' to show that `this_insn' is the latest
7afe21cc
RK
1565 insn making that quantity equivalent to the constant. */
1566
f8cfc6aa
JQ
1567 if (elt->is_const && classp && REG_P (classp->exp)
1568 && !REG_P (x))
7afe21cc 1569 {
1bb98cec
DM
1570 int exp_q = REG_QTY (REGNO (classp->exp));
1571 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1572
4de249d9 1573 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1bb98cec 1574 exp_ent->const_insn = this_insn;
7afe21cc
RK
1575 }
1576
f8cfc6aa 1577 else if (REG_P (x)
1bb98cec
DM
1578 && classp
1579 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
f353588a 1580 && ! elt->is_const)
7afe21cc 1581 {
b3694847 1582 struct table_elt *p;
7afe21cc
RK
1583
1584 for (p = classp; p != 0; p = p->next_same_value)
1585 {
f8cfc6aa 1586 if (p->is_const && !REG_P (p->exp))
7afe21cc 1587 {
1bb98cec
DM
1588 int x_q = REG_QTY (REGNO (x));
1589 struct qty_table_elem *x_ent = &qty_table[x_q];
1590
770ae6cc 1591 x_ent->const_rtx
4de249d9 1592 = gen_lowpart (GET_MODE (x), p->exp);
1bb98cec 1593 x_ent->const_insn = this_insn;
7afe21cc
RK
1594 break;
1595 }
1596 }
1597 }
1598
f8cfc6aa 1599 else if (REG_P (x)
1bb98cec
DM
1600 && qty_table[REG_QTY (REGNO (x))].const_rtx
1601 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1602 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
7afe21cc
RK
1603
1604 /* If this is a constant with symbolic value,
1605 and it has a term with an explicit integer value,
1606 link it up with related expressions. */
1607 if (GET_CODE (x) == CONST)
1608 {
1609 rtx subexp = get_related_value (x);
2197a88a 1610 unsigned subhash;
7afe21cc
RK
1611 struct table_elt *subelt, *subelt_prev;
1612
1613 if (subexp != 0)
1614 {
1615 /* Get the integer-free subexpression in the hash table. */
0516f6fe 1616 subhash = SAFE_HASH (subexp, mode);
7afe21cc
RK
1617 subelt = lookup (subexp, subhash, mode);
1618 if (subelt == 0)
9714cf43 1619 subelt = insert (subexp, NULL, subhash, mode);
7afe21cc
RK
1620 /* Initialize SUBELT's circular chain if it has none. */
1621 if (subelt->related_value == 0)
1622 subelt->related_value = subelt;
1623 /* Find the element in the circular chain that precedes SUBELT. */
1624 subelt_prev = subelt;
1625 while (subelt_prev->related_value != subelt)
1626 subelt_prev = subelt_prev->related_value;
1627 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1628 This way the element that follows SUBELT is the oldest one. */
1629 elt->related_value = subelt_prev->related_value;
1630 subelt_prev->related_value = elt;
1631 }
1632 }
1633
1634 return elt;
1635}
1636\f
1637/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1638 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1639 the two classes equivalent.
1640
1641 CLASS1 will be the surviving class; CLASS2 should not be used after this
1642 call.
1643
1644 Any invalid entries in CLASS2 will not be copied. */
1645
1646static void
7080f735 1647merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
7afe21cc
RK
1648{
1649 struct table_elt *elt, *next, *new;
1650
1651 /* Ensure we start with the head of the classes. */
1652 class1 = class1->first_same_value;
1653 class2 = class2->first_same_value;
1654
1655 /* If they were already equal, forget it. */
1656 if (class1 == class2)
1657 return;
1658
1659 for (elt = class2; elt; elt = next)
1660 {
770ae6cc 1661 unsigned int hash;
7afe21cc
RK
1662 rtx exp = elt->exp;
1663 enum machine_mode mode = elt->mode;
1664
1665 next = elt->next_same_value;
1666
1667 /* Remove old entry, make a new one in CLASS1's class.
1668 Don't do this for invalid entries as we cannot find their
0f41302f 1669 hash code (it also isn't necessary). */
0516f6fe 1670 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
7afe21cc 1671 {
a90fc8e0
RH
1672 bool need_rehash = false;
1673
7afe21cc 1674 hash_arg_in_memory = 0;
7afe21cc 1675 hash = HASH (exp, mode);
278a83b2 1676
f8cfc6aa 1677 if (REG_P (exp))
a90fc8e0 1678 {
08a69267 1679 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
a90fc8e0
RH
1680 delete_reg_equiv (REGNO (exp));
1681 }
278a83b2 1682
7afe21cc
RK
1683 remove_from_table (elt, hash);
1684
a90fc8e0 1685 if (insert_regs (exp, class1, 0) || need_rehash)
8ae2b8f6
JW
1686 {
1687 rehash_using_reg (exp);
1688 hash = HASH (exp, mode);
1689 }
7afe21cc
RK
1690 new = insert (exp, class1, hash, mode);
1691 new->in_memory = hash_arg_in_memory;
7afe21cc
RK
1692 }
1693 }
1694}
1695\f
01e752d3
JL
1696/* Flush the entire hash table. */
1697
1698static void
7080f735 1699flush_hash_table (void)
01e752d3
JL
1700{
1701 int i;
1702 struct table_elt *p;
1703
9b1549b8 1704 for (i = 0; i < HASH_SIZE; i++)
01e752d3
JL
1705 for (p = table[i]; p; p = table[i])
1706 {
1707 /* Note that invalidate can remove elements
1708 after P in the current hash chain. */
f8cfc6aa 1709 if (REG_P (p->exp))
01e752d3
JL
1710 invalidate (p->exp, p->mode);
1711 else
1712 remove_from_table (p, i);
1713 }
1714}
14a774a9 1715\f
2ce6dc2f
JH
1716/* Function called for each rtx to check whether true dependence exist. */
1717struct check_dependence_data
1718{
1719 enum machine_mode mode;
1720 rtx exp;
9ddb66ca 1721 rtx addr;
2ce6dc2f 1722};
be8ac49a 1723
2ce6dc2f 1724static int
7080f735 1725check_dependence (rtx *x, void *data)
2ce6dc2f
JH
1726{
1727 struct check_dependence_data *d = (struct check_dependence_data *) data;
3c0cb5de 1728 if (*x && MEM_P (*x))
9ddb66ca
JH
1729 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1730 cse_rtx_varies_p);
2ce6dc2f
JH
1731 else
1732 return 0;
1733}
1734\f
14a774a9
RK
1735/* Remove from the hash table, or mark as invalid, all expressions whose
1736 values could be altered by storing in X. X is a register, a subreg, or
1737 a memory reference with nonvarying address (because, when a memory
1738 reference with a varying address is stored in, all memory references are
1739 removed by invalidate_memory so specific invalidation is superfluous).
1740 FULL_MODE, if not VOIDmode, indicates that this much should be
1741 invalidated instead of just the amount indicated by the mode of X. This
1742 is only used for bitfield stores into memory.
1743
1744 A nonvarying address may be just a register or just a symbol reference,
1745 or it may be either of those plus a numeric offset. */
7afe21cc
RK
1746
1747static void
7080f735 1748invalidate (rtx x, enum machine_mode full_mode)
7afe21cc 1749{
b3694847
SS
1750 int i;
1751 struct table_elt *p;
9ddb66ca 1752 rtx addr;
7afe21cc 1753
14a774a9 1754 switch (GET_CODE (x))
7afe21cc 1755 {
14a774a9
RK
1756 case REG:
1757 {
1758 /* If X is a register, dependencies on its contents are recorded
1759 through the qty number mechanism. Just change the qty number of
1760 the register, mark it as invalid for expressions that refer to it,
1761 and remove it itself. */
770ae6cc
RK
1762 unsigned int regno = REGNO (x);
1763 unsigned int hash = HASH (x, GET_MODE (x));
7afe21cc 1764
14a774a9
RK
1765 /* Remove REGNO from any quantity list it might be on and indicate
1766 that its value might have changed. If it is a pseudo, remove its
1767 entry from the hash table.
7afe21cc 1768
14a774a9
RK
1769 For a hard register, we do the first two actions above for any
1770 additional hard registers corresponding to X. Then, if any of these
1771 registers are in the table, we must remove any REG entries that
1772 overlap these registers. */
7afe21cc 1773
14a774a9
RK
1774 delete_reg_equiv (regno);
1775 REG_TICK (regno)++;
46081bb3 1776 SUBREG_TICKED (regno) = -1;
85e4d983 1777
14a774a9
RK
1778 if (regno >= FIRST_PSEUDO_REGISTER)
1779 {
1780 /* Because a register can be referenced in more than one mode,
1781 we might have to remove more than one table entry. */
1782 struct table_elt *elt;
85e4d983 1783
14a774a9
RK
1784 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1785 remove_from_table (elt, hash);
1786 }
1787 else
1788 {
1789 HOST_WIDE_INT in_table
1790 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
770ae6cc 1791 unsigned int endregno
66fd46b6 1792 = regno + hard_regno_nregs[regno][GET_MODE (x)];
770ae6cc 1793 unsigned int tregno, tendregno, rn;
b3694847 1794 struct table_elt *p, *next;
7afe21cc 1795
14a774a9 1796 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc 1797
770ae6cc 1798 for (rn = regno + 1; rn < endregno; rn++)
14a774a9 1799 {
770ae6cc
RK
1800 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1801 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1802 delete_reg_equiv (rn);
1803 REG_TICK (rn)++;
46081bb3 1804 SUBREG_TICKED (rn) = -1;
14a774a9 1805 }
7afe21cc 1806
14a774a9 1807 if (in_table)
9b1549b8 1808 for (hash = 0; hash < HASH_SIZE; hash++)
14a774a9
RK
1809 for (p = table[hash]; p; p = next)
1810 {
1811 next = p->next_same_hash;
7afe21cc 1812
f8cfc6aa 1813 if (!REG_P (p->exp)
278a83b2
KH
1814 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1815 continue;
1816
14a774a9
RK
1817 tregno = REGNO (p->exp);
1818 tendregno
66fd46b6 1819 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
14a774a9
RK
1820 if (tendregno > regno && tregno < endregno)
1821 remove_from_table (p, hash);
1822 }
1823 }
1824 }
7afe21cc 1825 return;
7afe21cc 1826
14a774a9 1827 case SUBREG:
bb4034b3 1828 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc 1829 return;
aac5cc16 1830
14a774a9 1831 case PARALLEL:
278a83b2 1832 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
aac5cc16
RH
1833 invalidate (XVECEXP (x, 0, i), VOIDmode);
1834 return;
aac5cc16 1835
14a774a9
RK
1836 case EXPR_LIST:
1837 /* This is part of a disjoint return value; extract the location in
1838 question ignoring the offset. */
aac5cc16
RH
1839 invalidate (XEXP (x, 0), VOIDmode);
1840 return;
7afe21cc 1841
14a774a9 1842 case MEM:
9ddb66ca 1843 addr = canon_rtx (get_addr (XEXP (x, 0)));
db048faf
MM
1844 /* Calculate the canonical version of X here so that
1845 true_dependence doesn't generate new RTL for X on each call. */
1846 x = canon_rtx (x);
1847
14a774a9
RK
1848 /* Remove all hash table elements that refer to overlapping pieces of
1849 memory. */
1850 if (full_mode == VOIDmode)
1851 full_mode = GET_MODE (x);
bb4034b3 1852
9b1549b8 1853 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 1854 {
b3694847 1855 struct table_elt *next;
14a774a9
RK
1856
1857 for (p = table[i]; p; p = next)
1858 {
1859 next = p->next_same_hash;
db048faf
MM
1860 if (p->in_memory)
1861 {
2ce6dc2f
JH
1862 struct check_dependence_data d;
1863
1864 /* Just canonicalize the expression once;
1865 otherwise each time we call invalidate
1866 true_dependence will canonicalize the
1867 expression again. */
1868 if (!p->canon_exp)
1869 p->canon_exp = canon_rtx (p->exp);
1870 d.exp = x;
9ddb66ca 1871 d.addr = addr;
2ce6dc2f
JH
1872 d.mode = full_mode;
1873 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
db048faf 1874 remove_from_table (p, i);
db048faf 1875 }
14a774a9 1876 }
7afe21cc 1877 }
14a774a9
RK
1878 return;
1879
1880 default:
341c100f 1881 gcc_unreachable ();
7afe21cc
RK
1882 }
1883}
14a774a9 1884\f
7afe21cc
RK
1885/* Remove all expressions that refer to register REGNO,
1886 since they are already invalid, and we are about to
1887 mark that register valid again and don't want the old
1888 expressions to reappear as valid. */
1889
1890static void
7080f735 1891remove_invalid_refs (unsigned int regno)
7afe21cc 1892{
770ae6cc
RK
1893 unsigned int i;
1894 struct table_elt *p, *next;
7afe21cc 1895
9b1549b8 1896 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1897 for (p = table[i]; p; p = next)
1898 {
1899 next = p->next_same_hash;
f8cfc6aa 1900 if (!REG_P (p->exp)
68252e27 1901 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
7afe21cc
RK
1902 remove_from_table (p, i);
1903 }
1904}
34c73909 1905
ddef6bc7
JJ
1906/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1907 and mode MODE. */
34c73909 1908static void
7080f735
AJ
1909remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1910 enum machine_mode mode)
34c73909 1911{
770ae6cc
RK
1912 unsigned int i;
1913 struct table_elt *p, *next;
ddef6bc7 1914 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
34c73909 1915
9b1549b8 1916 for (i = 0; i < HASH_SIZE; i++)
34c73909
R
1917 for (p = table[i]; p; p = next)
1918 {
ddef6bc7 1919 rtx exp = p->exp;
34c73909 1920 next = p->next_same_hash;
278a83b2 1921
f8cfc6aa 1922 if (!REG_P (exp)
34c73909 1923 && (GET_CODE (exp) != SUBREG
f8cfc6aa 1924 || !REG_P (SUBREG_REG (exp))
34c73909 1925 || REGNO (SUBREG_REG (exp)) != regno
ddef6bc7
JJ
1926 || (((SUBREG_BYTE (exp)
1927 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1928 && SUBREG_BYTE (exp) <= end))
68252e27 1929 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
34c73909
R
1930 remove_from_table (p, i);
1931 }
1932}
7afe21cc
RK
1933\f
1934/* Recompute the hash codes of any valid entries in the hash table that
1935 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1936
1937 This is called when we make a jump equivalence. */
1938
1939static void
7080f735 1940rehash_using_reg (rtx x)
7afe21cc 1941{
973838fd 1942 unsigned int i;
7afe21cc 1943 struct table_elt *p, *next;
2197a88a 1944 unsigned hash;
7afe21cc
RK
1945
1946 if (GET_CODE (x) == SUBREG)
1947 x = SUBREG_REG (x);
1948
1949 /* If X is not a register or if the register is known not to be in any
1950 valid entries in the table, we have no work to do. */
1951
f8cfc6aa 1952 if (!REG_P (x)
30f72379
MM
1953 || REG_IN_TABLE (REGNO (x)) < 0
1954 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1955 return;
1956
1957 /* Scan all hash chains looking for valid entries that mention X.
a90fc8e0 1958 If we find one and it is in the wrong hash chain, move it. */
7afe21cc 1959
9b1549b8 1960 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1961 for (p = table[i]; p; p = next)
1962 {
1963 next = p->next_same_hash;
a90fc8e0 1964 if (reg_mentioned_p (x, p->exp)
0516f6fe
SB
1965 && exp_equiv_p (p->exp, p->exp, 1, false)
1966 && i != (hash = SAFE_HASH (p->exp, p->mode)))
7afe21cc
RK
1967 {
1968 if (p->next_same_hash)
1969 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1970
1971 if (p->prev_same_hash)
1972 p->prev_same_hash->next_same_hash = p->next_same_hash;
1973 else
1974 table[i] = p->next_same_hash;
1975
1976 p->next_same_hash = table[hash];
1977 p->prev_same_hash = 0;
1978 if (table[hash])
1979 table[hash]->prev_same_hash = p;
1980 table[hash] = p;
1981 }
1982 }
1983}
1984\f
7afe21cc
RK
1985/* Remove from the hash table any expression that is a call-clobbered
1986 register. Also update their TICK values. */
1987
1988static void
7080f735 1989invalidate_for_call (void)
7afe21cc 1990{
770ae6cc
RK
1991 unsigned int regno, endregno;
1992 unsigned int i;
2197a88a 1993 unsigned hash;
7afe21cc
RK
1994 struct table_elt *p, *next;
1995 int in_table = 0;
1996
1997 /* Go through all the hard registers. For each that is clobbered in
1998 a CALL_INSN, remove the register from quantity chains and update
1999 reg_tick if defined. Also see if any of these registers is currently
2000 in the table. */
2001
2002 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2003 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2004 {
2005 delete_reg_equiv (regno);
30f72379 2006 if (REG_TICK (regno) >= 0)
46081bb3
SH
2007 {
2008 REG_TICK (regno)++;
2009 SUBREG_TICKED (regno) = -1;
2010 }
7afe21cc 2011
0e227018 2012 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
2013 }
2014
2015 /* In the case where we have no call-clobbered hard registers in the
2016 table, we are done. Otherwise, scan the table and remove any
2017 entry that overlaps a call-clobbered register. */
2018
2019 if (in_table)
9b1549b8 2020 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
2021 for (p = table[hash]; p; p = next)
2022 {
2023 next = p->next_same_hash;
2024
f8cfc6aa 2025 if (!REG_P (p->exp)
7afe21cc
RK
2026 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2027 continue;
2028
2029 regno = REGNO (p->exp);
66fd46b6 2030 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
7afe21cc
RK
2031
2032 for (i = regno; i < endregno; i++)
2033 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2034 {
2035 remove_from_table (p, hash);
2036 break;
2037 }
2038 }
2039}
2040\f
2041/* Given an expression X of type CONST,
2042 and ELT which is its table entry (or 0 if it
2043 is not in the hash table),
2044 return an alternate expression for X as a register plus integer.
2045 If none can be found, return 0. */
2046
2047static rtx
7080f735 2048use_related_value (rtx x, struct table_elt *elt)
7afe21cc 2049{
b3694847
SS
2050 struct table_elt *relt = 0;
2051 struct table_elt *p, *q;
906c4e36 2052 HOST_WIDE_INT offset;
7afe21cc
RK
2053
2054 /* First, is there anything related known?
2055 If we have a table element, we can tell from that.
2056 Otherwise, must look it up. */
2057
2058 if (elt != 0 && elt->related_value != 0)
2059 relt = elt;
2060 else if (elt == 0 && GET_CODE (x) == CONST)
2061 {
2062 rtx subexp = get_related_value (x);
2063 if (subexp != 0)
2064 relt = lookup (subexp,
0516f6fe 2065 SAFE_HASH (subexp, GET_MODE (subexp)),
7afe21cc
RK
2066 GET_MODE (subexp));
2067 }
2068
2069 if (relt == 0)
2070 return 0;
2071
2072 /* Search all related table entries for one that has an
2073 equivalent register. */
2074
2075 p = relt;
2076 while (1)
2077 {
2078 /* This loop is strange in that it is executed in two different cases.
2079 The first is when X is already in the table. Then it is searching
2080 the RELATED_VALUE list of X's class (RELT). The second case is when
2081 X is not in the table. Then RELT points to a class for the related
2082 value.
2083
2084 Ensure that, whatever case we are in, that we ignore classes that have
2085 the same value as X. */
2086
2087 if (rtx_equal_p (x, p->exp))
2088 q = 0;
2089 else
2090 for (q = p->first_same_value; q; q = q->next_same_value)
f8cfc6aa 2091 if (REG_P (q->exp))
7afe21cc
RK
2092 break;
2093
2094 if (q)
2095 break;
2096
2097 p = p->related_value;
2098
2099 /* We went all the way around, so there is nothing to be found.
2100 Alternatively, perhaps RELT was in the table for some other reason
2101 and it has no related values recorded. */
2102 if (p == relt || p == 0)
2103 break;
2104 }
2105
2106 if (q == 0)
2107 return 0;
2108
2109 offset = (get_integer_term (x) - get_integer_term (p->exp));
2110 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2111 return plus_constant (q->exp, offset);
2112}
2113\f
6462bb43
AO
2114/* Hash a string. Just add its bytes up. */
2115static inline unsigned
0516f6fe 2116hash_rtx_string (const char *ps)
6462bb43
AO
2117{
2118 unsigned hash = 0;
68252e27
KH
2119 const unsigned char *p = (const unsigned char *) ps;
2120
6462bb43
AO
2121 if (p)
2122 while (*p)
2123 hash += *p++;
2124
2125 return hash;
2126}
2127
7afe21cc
RK
2128/* Hash an rtx. We are careful to make sure the value is never negative.
2129 Equivalent registers hash identically.
2130 MODE is used in hashing for CONST_INTs only;
2131 otherwise the mode of X is used.
2132
0516f6fe 2133 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
7afe21cc 2134
0516f6fe
SB
2135 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2136 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
7afe21cc
RK
2137
2138 Note that cse_insn knows that the hash code of a MEM expression
2139 is just (int) MEM plus the hash code of the address. */
2140
0516f6fe
SB
2141unsigned
2142hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2143 int *hash_arg_in_memory_p, bool have_reg_qty)
7afe21cc 2144{
b3694847
SS
2145 int i, j;
2146 unsigned hash = 0;
2147 enum rtx_code code;
2148 const char *fmt;
7afe21cc 2149
0516f6fe
SB
2150 /* Used to turn recursion into iteration. We can't rely on GCC's
2151 tail-recursion elimination since we need to keep accumulating values
2152 in HASH. */
7afe21cc
RK
2153 repeat:
2154 if (x == 0)
2155 return hash;
2156
2157 code = GET_CODE (x);
2158 switch (code)
2159 {
2160 case REG:
2161 {
770ae6cc 2162 unsigned int regno = REGNO (x);
7afe21cc 2163
0516f6fe 2164 if (!reload_completed)
7afe21cc 2165 {
0516f6fe
SB
2166 /* On some machines, we can't record any non-fixed hard register,
2167 because extending its life will cause reload problems. We
2168 consider ap, fp, sp, gp to be fixed for this purpose.
2169
2170 We also consider CCmode registers to be fixed for this purpose;
2171 failure to do so leads to failure to simplify 0<100 type of
2172 conditionals.
2173
2174 On all machines, we can't record any global registers.
2175 Nor should we record any register that is in a small
2176 class, as defined by CLASS_LIKELY_SPILLED_P. */
2177 bool record;
2178
2179 if (regno >= FIRST_PSEUDO_REGISTER)
2180 record = true;
2181 else if (x == frame_pointer_rtx
2182 || x == hard_frame_pointer_rtx
2183 || x == arg_pointer_rtx
2184 || x == stack_pointer_rtx
2185 || x == pic_offset_table_rtx)
2186 record = true;
2187 else if (global_regs[regno])
2188 record = false;
2189 else if (fixed_regs[regno])
2190 record = true;
2191 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2192 record = true;
2193 else if (SMALL_REGISTER_CLASSES)
2194 record = false;
2195 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2196 record = false;
2197 else
2198 record = true;
2199
2200 if (!record)
2201 {
2202 *do_not_record_p = 1;
2203 return 0;
2204 }
7afe21cc 2205 }
770ae6cc 2206
0516f6fe
SB
2207 hash += ((unsigned int) REG << 7);
2208 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2197a88a 2209 return hash;
7afe21cc
RK
2210 }
2211
34c73909
R
2212 /* We handle SUBREG of a REG specially because the underlying
2213 reg changes its hash value with every value change; we don't
2214 want to have to forget unrelated subregs when one subreg changes. */
2215 case SUBREG:
2216 {
f8cfc6aa 2217 if (REG_P (SUBREG_REG (x)))
34c73909 2218 {
0516f6fe 2219 hash += (((unsigned int) SUBREG << 7)
ddef6bc7
JJ
2220 + REGNO (SUBREG_REG (x))
2221 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
34c73909
R
2222 return hash;
2223 }
2224 break;
2225 }
2226
7afe21cc 2227 case CONST_INT:
0516f6fe
SB
2228 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2229 + (unsigned int) INTVAL (x));
2230 return hash;
7afe21cc
RK
2231
2232 case CONST_DOUBLE:
2233 /* This is like the general case, except that it only counts
2234 the integers representing the constant. */
0516f6fe 2235 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
969c8517 2236 if (GET_MODE (x) != VOIDmode)
46b33600 2237 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
969c8517 2238 else
0516f6fe
SB
2239 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2240 + (unsigned int) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2241 return hash;
2242
69ef87e2
AH
2243 case CONST_VECTOR:
2244 {
2245 int units;
2246 rtx elt;
2247
2248 units = CONST_VECTOR_NUNITS (x);
2249
2250 for (i = 0; i < units; ++i)
2251 {
2252 elt = CONST_VECTOR_ELT (x, i);
0516f6fe
SB
2253 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2254 hash_arg_in_memory_p, have_reg_qty);
69ef87e2
AH
2255 }
2256
2257 return hash;
2258 }
2259
7afe21cc
RK
2260 /* Assume there is only one rtx object for any given label. */
2261 case LABEL_REF:
0516f6fe
SB
2262 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2263 differences and differences between each stage's debugging dumps. */
2264 hash += (((unsigned int) LABEL_REF << 7)
2265 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2197a88a 2266 return hash;
7afe21cc
RK
2267
2268 case SYMBOL_REF:
0516f6fe
SB
2269 {
2270 /* Don't hash on the symbol's address to avoid bootstrap differences.
2271 Different hash values may cause expressions to be recorded in
2272 different orders and thus different registers to be used in the
2273 final assembler. This also avoids differences in the dump files
2274 between various stages. */
2275 unsigned int h = 0;
2276 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2277
2278 while (*p)
2279 h += (h << 7) + *p++; /* ??? revisit */
2280
2281 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2282 return hash;
2283 }
7afe21cc
RK
2284
2285 case MEM:
14a774a9
RK
2286 /* We don't record if marked volatile or if BLKmode since we don't
2287 know the size of the move. */
2288 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
7afe21cc 2289 {
0516f6fe 2290 *do_not_record_p = 1;
7afe21cc
RK
2291 return 0;
2292 }
0516f6fe
SB
2293 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2294 *hash_arg_in_memory_p = 1;
4977bab6 2295
7afe21cc
RK
2296 /* Now that we have already found this special case,
2297 might as well speed it up as much as possible. */
2197a88a 2298 hash += (unsigned) MEM;
7afe21cc
RK
2299 x = XEXP (x, 0);
2300 goto repeat;
2301
bb07060a
JW
2302 case USE:
2303 /* A USE that mentions non-volatile memory needs special
2304 handling since the MEM may be BLKmode which normally
2305 prevents an entry from being made. Pure calls are
0516f6fe
SB
2306 marked by a USE which mentions BLKmode memory.
2307 See calls.c:emit_call_1. */
3c0cb5de 2308 if (MEM_P (XEXP (x, 0))
bb07060a
JW
2309 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2310 {
68252e27 2311 hash += (unsigned) USE;
bb07060a
JW
2312 x = XEXP (x, 0);
2313
0516f6fe
SB
2314 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2315 *hash_arg_in_memory_p = 1;
bb07060a
JW
2316
2317 /* Now that we have already found this special case,
2318 might as well speed it up as much as possible. */
2319 hash += (unsigned) MEM;
2320 x = XEXP (x, 0);
2321 goto repeat;
2322 }
2323 break;
2324
7afe21cc
RK
2325 case PRE_DEC:
2326 case PRE_INC:
2327 case POST_DEC:
2328 case POST_INC:
4b983fdc
RH
2329 case PRE_MODIFY:
2330 case POST_MODIFY:
7afe21cc
RK
2331 case PC:
2332 case CC0:
2333 case CALL:
2334 case UNSPEC_VOLATILE:
0516f6fe 2335 *do_not_record_p = 1;
7afe21cc
RK
2336 return 0;
2337
2338 case ASM_OPERANDS:
2339 if (MEM_VOLATILE_P (x))
2340 {
0516f6fe 2341 *do_not_record_p = 1;
7afe21cc
RK
2342 return 0;
2343 }
6462bb43
AO
2344 else
2345 {
2346 /* We don't want to take the filename and line into account. */
2347 hash += (unsigned) code + (unsigned) GET_MODE (x)
0516f6fe
SB
2348 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2349 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
6462bb43
AO
2350 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2351
2352 if (ASM_OPERANDS_INPUT_LENGTH (x))
2353 {
2354 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2355 {
0516f6fe
SB
2356 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2357 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2358 do_not_record_p, hash_arg_in_memory_p,
2359 have_reg_qty)
2360 + hash_rtx_string
2361 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
6462bb43
AO
2362 }
2363
0516f6fe 2364 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
6462bb43
AO
2365 x = ASM_OPERANDS_INPUT (x, 0);
2366 mode = GET_MODE (x);
2367 goto repeat;
2368 }
2369
2370 return hash;
2371 }
e9a25f70 2372 break;
278a83b2 2373
e9a25f70
JL
2374 default:
2375 break;
7afe21cc
RK
2376 }
2377
2378 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2379 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2380 fmt = GET_RTX_FORMAT (code);
2381 for (; i >= 0; i--)
2382 {
341c100f 2383 switch (fmt[i])
7afe21cc 2384 {
341c100f 2385 case 'e':
7afe21cc
RK
2386 /* If we are about to do the last recursive call
2387 needed at this level, change it into iteration.
2388 This function is called enough to be worth it. */
2389 if (i == 0)
2390 {
0516f6fe 2391 x = XEXP (x, i);
7afe21cc
RK
2392 goto repeat;
2393 }
0516f6fe
SB
2394
2395 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2396 hash_arg_in_memory_p, have_reg_qty);
341c100f 2397 break;
0516f6fe 2398
341c100f
NS
2399 case 'E':
2400 for (j = 0; j < XVECLEN (x, i); j++)
0516f6fe
SB
2401 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2402 hash_arg_in_memory_p, have_reg_qty);
341c100f 2403 break;
0516f6fe 2404
341c100f
NS
2405 case 's':
2406 hash += hash_rtx_string (XSTR (x, i));
2407 break;
2408
2409 case 'i':
2410 hash += (unsigned int) XINT (x, i);
2411 break;
2412
2413 case '0': case 't':
2414 /* Unused. */
2415 break;
2416
2417 default:
2418 gcc_unreachable ();
2419 }
7afe21cc 2420 }
0516f6fe 2421
7afe21cc
RK
2422 return hash;
2423}
2424
0516f6fe
SB
2425/* Hash an rtx X for cse via hash_rtx.
2426 Stores 1 in do_not_record if any subexpression is volatile.
2427 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2428 does not have the RTX_UNCHANGING_P bit set. */
2429
2430static inline unsigned
2431canon_hash (rtx x, enum machine_mode mode)
2432{
2433 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2434}
2435
2436/* Like canon_hash but with no side effects, i.e. do_not_record
2437 and hash_arg_in_memory are not changed. */
7afe21cc 2438
0516f6fe 2439static inline unsigned
7080f735 2440safe_hash (rtx x, enum machine_mode mode)
7afe21cc 2441{
0516f6fe
SB
2442 int dummy_do_not_record;
2443 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
7afe21cc
RK
2444}
2445\f
2446/* Return 1 iff X and Y would canonicalize into the same thing,
2447 without actually constructing the canonicalization of either one.
2448 If VALIDATE is nonzero,
2449 we assume X is an expression being processed from the rtl
2450 and Y was found in the hash table. We check register refs
2451 in Y for being marked as valid.
2452
0516f6fe 2453 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
7afe21cc 2454
0516f6fe
SB
2455int
2456exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
7afe21cc 2457{
b3694847
SS
2458 int i, j;
2459 enum rtx_code code;
2460 const char *fmt;
7afe21cc
RK
2461
2462 /* Note: it is incorrect to assume an expression is equivalent to itself
2463 if VALIDATE is nonzero. */
2464 if (x == y && !validate)
2465 return 1;
0516f6fe 2466
7afe21cc
RK
2467 if (x == 0 || y == 0)
2468 return x == y;
2469
2470 code = GET_CODE (x);
2471 if (code != GET_CODE (y))
0516f6fe 2472 return 0;
7afe21cc
RK
2473
2474 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2475 if (GET_MODE (x) != GET_MODE (y))
2476 return 0;
2477
2478 switch (code)
2479 {
2480 case PC:
2481 case CC0:
7afe21cc 2482 case CONST_INT:
c13e8210 2483 return x == y;
7afe21cc
RK
2484
2485 case LABEL_REF:
7afe21cc
RK
2486 return XEXP (x, 0) == XEXP (y, 0);
2487
f54d4924
RK
2488 case SYMBOL_REF:
2489 return XSTR (x, 0) == XSTR (y, 0);
2490
7afe21cc 2491 case REG:
0516f6fe
SB
2492 if (for_gcse)
2493 return REGNO (x) == REGNO (y);
2494 else
2495 {
2496 unsigned int regno = REGNO (y);
2497 unsigned int i;
2498 unsigned int endregno
2499 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2500 : hard_regno_nregs[regno][GET_MODE (y)]);
7afe21cc 2501
0516f6fe
SB
2502 /* If the quantities are not the same, the expressions are not
2503 equivalent. If there are and we are not to validate, they
2504 are equivalent. Otherwise, ensure all regs are up-to-date. */
7afe21cc 2505
0516f6fe
SB
2506 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2507 return 0;
2508
2509 if (! validate)
2510 return 1;
2511
2512 for (i = regno; i < endregno; i++)
2513 if (REG_IN_TABLE (i) != REG_TICK (i))
2514 return 0;
7afe21cc 2515
7afe21cc 2516 return 1;
0516f6fe 2517 }
7afe21cc 2518
0516f6fe
SB
2519 case MEM:
2520 if (for_gcse)
2521 {
2522 /* Can't merge two expressions in different alias sets, since we
2523 can decide that the expression is transparent in a block when
2524 it isn't, due to it being set with the different alias set. */
2525 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
7afe21cc
RK
2526 return 0;
2527
0516f6fe
SB
2528 /* A volatile mem should not be considered equivalent to any
2529 other. */
2530 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2531 return 0;
2532 }
2533 break;
7afe21cc
RK
2534
2535 /* For commutative operations, check both orders. */
2536 case PLUS:
2537 case MULT:
2538 case AND:
2539 case IOR:
2540 case XOR:
2541 case NE:
2542 case EQ:
0516f6fe
SB
2543 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2544 validate, for_gcse)
7afe21cc 2545 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
0516f6fe 2546 validate, for_gcse))
7afe21cc 2547 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
0516f6fe 2548 validate, for_gcse)
7afe21cc 2549 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
0516f6fe 2550 validate, for_gcse)));
278a83b2 2551
6462bb43
AO
2552 case ASM_OPERANDS:
2553 /* We don't use the generic code below because we want to
2554 disregard filename and line numbers. */
2555
2556 /* A volatile asm isn't equivalent to any other. */
2557 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2558 return 0;
2559
2560 if (GET_MODE (x) != GET_MODE (y)
2561 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2562 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2563 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2564 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2565 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2566 return 0;
2567
2568 if (ASM_OPERANDS_INPUT_LENGTH (x))
2569 {
2570 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2571 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2572 ASM_OPERANDS_INPUT (y, i),
0516f6fe 2573 validate, for_gcse)
6462bb43
AO
2574 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2575 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2576 return 0;
2577 }
2578
2579 return 1;
2580
e9a25f70
JL
2581 default:
2582 break;
7afe21cc
RK
2583 }
2584
2585 /* Compare the elements. If any pair of corresponding elements
0516f6fe 2586 fail to match, return 0 for the whole thing. */
7afe21cc
RK
2587
2588 fmt = GET_RTX_FORMAT (code);
2589 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2590 {
906c4e36 2591 switch (fmt[i])
7afe21cc 2592 {
906c4e36 2593 case 'e':
0516f6fe
SB
2594 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2595 validate, for_gcse))
7afe21cc 2596 return 0;
906c4e36
RK
2597 break;
2598
2599 case 'E':
7afe21cc
RK
2600 if (XVECLEN (x, i) != XVECLEN (y, i))
2601 return 0;
2602 for (j = 0; j < XVECLEN (x, i); j++)
2603 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
0516f6fe 2604 validate, for_gcse))
7afe21cc 2605 return 0;
906c4e36
RK
2606 break;
2607
2608 case 's':
7afe21cc
RK
2609 if (strcmp (XSTR (x, i), XSTR (y, i)))
2610 return 0;
906c4e36
RK
2611 break;
2612
2613 case 'i':
7afe21cc
RK
2614 if (XINT (x, i) != XINT (y, i))
2615 return 0;
906c4e36
RK
2616 break;
2617
2618 case 'w':
2619 if (XWINT (x, i) != XWINT (y, i))
2620 return 0;
278a83b2 2621 break;
906c4e36
RK
2622
2623 case '0':
8f985ec4 2624 case 't':
906c4e36
RK
2625 break;
2626
2627 default:
341c100f 2628 gcc_unreachable ();
7afe21cc 2629 }
278a83b2 2630 }
906c4e36 2631
7afe21cc
RK
2632 return 1;
2633}
2634\f
9ae8ffe7
JL
2635/* Return 1 if X has a value that can vary even between two
2636 executions of the program. 0 means X can be compared reliably
2637 against certain constants or near-constants. */
7afe21cc
RK
2638
2639static int
7080f735 2640cse_rtx_varies_p (rtx x, int from_alias)
7afe21cc
RK
2641{
2642 /* We need not check for X and the equivalence class being of the same
2643 mode because if X is equivalent to a constant in some mode, it
2644 doesn't vary in any mode. */
2645
f8cfc6aa 2646 if (REG_P (x)
1bb98cec
DM
2647 && REGNO_QTY_VALID_P (REGNO (x)))
2648 {
2649 int x_q = REG_QTY (REGNO (x));
2650 struct qty_table_elem *x_ent = &qty_table[x_q];
2651
2652 if (GET_MODE (x) == x_ent->mode
2653 && x_ent->const_rtx != NULL_RTX)
2654 return 0;
2655 }
7afe21cc 2656
9ae8ffe7
JL
2657 if (GET_CODE (x) == PLUS
2658 && GET_CODE (XEXP (x, 1)) == CONST_INT
f8cfc6aa 2659 && REG_P (XEXP (x, 0))
1bb98cec
DM
2660 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2661 {
2662 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2663 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2664
2665 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2666 && x0_ent->const_rtx != NULL_RTX)
2667 return 0;
2668 }
7afe21cc 2669
9c6b0bae
RK
2670 /* This can happen as the result of virtual register instantiation, if
2671 the initial constant is too large to be a valid address. This gives
2672 us a three instruction sequence, load large offset into a register,
2673 load fp minus a constant into a register, then a MEM which is the
2674 sum of the two `constant' registers. */
9ae8ffe7 2675 if (GET_CODE (x) == PLUS
f8cfc6aa
JQ
2676 && REG_P (XEXP (x, 0))
2677 && REG_P (XEXP (x, 1))
9ae8ffe7 2678 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
1bb98cec
DM
2679 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2680 {
2681 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2682 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2683 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2684 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2685
2686 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2687 && x0_ent->const_rtx != NULL_RTX
2688 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2689 && x1_ent->const_rtx != NULL_RTX)
2690 return 0;
2691 }
9c6b0bae 2692
2be28ee2 2693 return rtx_varies_p (x, from_alias);
7afe21cc
RK
2694}
2695\f
eef3c949
RS
2696/* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2697 the result if necessary. INSN is as for canon_reg. */
2698
2699static void
2700validate_canon_reg (rtx *xloc, rtx insn)
2701{
2702 rtx new = canon_reg (*xloc, insn);
2703 int insn_code;
2704
2705 /* If replacing pseudo with hard reg or vice versa, ensure the
2706 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2707 if (insn != 0 && new != 0
2708 && REG_P (new) && REG_P (*xloc)
2709 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2710 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2711 || GET_MODE (new) != GET_MODE (*xloc)
2712 || (insn_code = recog_memoized (insn)) < 0
2713 || insn_data[insn_code].n_dups > 0))
2714 validate_change (insn, xloc, new, 1);
2715 else
2716 *xloc = new;
2717}
2718
7afe21cc
RK
2719/* Canonicalize an expression:
2720 replace each register reference inside it
2721 with the "oldest" equivalent register.
2722
da7d8304 2723 If INSN is nonzero and we are replacing a pseudo with a hard register
7722328e 2724 or vice versa, validate_change is used to ensure that INSN remains valid
da7d8304 2725 after we make our substitution. The calls are made with IN_GROUP nonzero
7722328e
RK
2726 so apply_change_group must be called upon the outermost return from this
2727 function (unless INSN is zero). The result of apply_change_group can
2728 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2729
2730static rtx
7080f735 2731canon_reg (rtx x, rtx insn)
7afe21cc 2732{
b3694847
SS
2733 int i;
2734 enum rtx_code code;
2735 const char *fmt;
7afe21cc
RK
2736
2737 if (x == 0)
2738 return x;
2739
2740 code = GET_CODE (x);
2741 switch (code)
2742 {
2743 case PC:
2744 case CC0:
2745 case CONST:
2746 case CONST_INT:
2747 case CONST_DOUBLE:
69ef87e2 2748 case CONST_VECTOR:
7afe21cc
RK
2749 case SYMBOL_REF:
2750 case LABEL_REF:
2751 case ADDR_VEC:
2752 case ADDR_DIFF_VEC:
2753 return x;
2754
2755 case REG:
2756 {
b3694847
SS
2757 int first;
2758 int q;
2759 struct qty_table_elem *ent;
7afe21cc
RK
2760
2761 /* Never replace a hard reg, because hard regs can appear
2762 in more than one machine mode, and we must preserve the mode
2763 of each occurrence. Also, some hard regs appear in
2764 MEMs that are shared and mustn't be altered. Don't try to
2765 replace any reg that maps to a reg of class NO_REGS. */
2766 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2767 || ! REGNO_QTY_VALID_P (REGNO (x)))
2768 return x;
2769
278a83b2 2770 q = REG_QTY (REGNO (x));
1bb98cec
DM
2771 ent = &qty_table[q];
2772 first = ent->first_reg;
7afe21cc
RK
2773 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2774 : REGNO_REG_CLASS (first) == NO_REGS ? x
1bb98cec 2775 : gen_rtx_REG (ent->mode, first));
7afe21cc 2776 }
278a83b2 2777
e9a25f70
JL
2778 default:
2779 break;
7afe21cc
RK
2780 }
2781
2782 fmt = GET_RTX_FORMAT (code);
2783 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2784 {
b3694847 2785 int j;
7afe21cc
RK
2786
2787 if (fmt[i] == 'e')
eef3c949 2788 validate_canon_reg (&XEXP (x, i), insn);
7afe21cc
RK
2789 else if (fmt[i] == 'E')
2790 for (j = 0; j < XVECLEN (x, i); j++)
eef3c949 2791 validate_canon_reg (&XVECEXP (x, i, j), insn);
7afe21cc
RK
2792 }
2793
2794 return x;
2795}
2796\f
a2cabb29 2797/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2798 a MEM). Find the best equivalent address to use that is valid for this
2799 insn.
2800
2801 On most CISC machines, complicated address modes are costly, and rtx_cost
2802 is a good approximation for that cost. However, most RISC machines have
2803 only a few (usually only one) memory reference formats. If an address is
2804 valid at all, it is often just as cheap as any other address. Hence, for
e37135f7
RH
2805 RISC machines, we use `address_cost' to compare the costs of various
2806 addresses. For two addresses of equal cost, choose the one with the
2807 highest `rtx_cost' value as that has the potential of eliminating the
2808 most insns. For equal costs, we choose the first in the equivalence
2809 class. Note that we ignore the fact that pseudo registers are cheaper than
2810 hard registers here because we would also prefer the pseudo registers. */
7afe21cc 2811
6cd4575e 2812static void
7080f735 2813find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
7afe21cc 2814{
7a87758d 2815 struct table_elt *elt;
7afe21cc 2816 rtx addr = *loc;
7a87758d 2817 struct table_elt *p;
7afe21cc
RK
2818 int found_better = 1;
2819 int save_do_not_record = do_not_record;
2820 int save_hash_arg_in_memory = hash_arg_in_memory;
7afe21cc
RK
2821 int addr_volatile;
2822 int regno;
2197a88a 2823 unsigned hash;
7afe21cc
RK
2824
2825 /* Do not try to replace constant addresses or addresses of local and
2826 argument slots. These MEM expressions are made only once and inserted
2827 in many instructions, as well as being used to control symbol table
2828 output. It is not safe to clobber them.
2829
2830 There are some uncommon cases where the address is already in a register
2831 for some reason, but we cannot take advantage of that because we have
2832 no easy way to unshare the MEM. In addition, looking up all stack
2833 addresses is costly. */
2834 if ((GET_CODE (addr) == PLUS
f8cfc6aa 2835 && REG_P (XEXP (addr, 0))
7afe21cc
RK
2836 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2837 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2838 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2839 || regno == ARG_POINTER_REGNUM))
f8cfc6aa 2840 || (REG_P (addr)
8bc169f2
DE
2841 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2842 || regno == HARD_FRAME_POINTER_REGNUM
2843 || regno == ARG_POINTER_REGNUM))
7afe21cc
RK
2844 || CONSTANT_ADDRESS_P (addr))
2845 return;
2846
2847 /* If this address is not simply a register, try to fold it. This will
2848 sometimes simplify the expression. Many simplifications
2849 will not be valid, but some, usually applying the associative rule, will
2850 be valid and produce better code. */
f8cfc6aa 2851 if (!REG_P (addr))
8c87f107 2852 {
6c667859
AB
2853 rtx folded = fold_rtx (addr, NULL_RTX);
2854 if (folded != addr)
2855 {
2856 int addr_folded_cost = address_cost (folded, mode);
2857 int addr_cost = address_cost (addr, mode);
2858
2859 if ((addr_folded_cost < addr_cost
2860 || (addr_folded_cost == addr_cost
2861 /* ??? The rtx_cost comparison is left over from an older
2862 version of this code. It is probably no longer helpful.*/
2863 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2864 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2865 && validate_change (insn, loc, folded, 0))
2866 addr = folded;
2867 }
8c87f107 2868 }
278a83b2 2869
42495ca0
RK
2870 /* If this address is not in the hash table, we can't look for equivalences
2871 of the whole address. Also, ignore if volatile. */
2872
7afe21cc 2873 do_not_record = 0;
2197a88a 2874 hash = HASH (addr, Pmode);
7afe21cc
RK
2875 addr_volatile = do_not_record;
2876 do_not_record = save_do_not_record;
2877 hash_arg_in_memory = save_hash_arg_in_memory;
7afe21cc
RK
2878
2879 if (addr_volatile)
2880 return;
2881
2197a88a 2882 elt = lookup (addr, hash, Pmode);
7afe21cc 2883
42495ca0
RK
2884 if (elt)
2885 {
2886 /* We need to find the best (under the criteria documented above) entry
2887 in the class that is valid. We use the `flag' field to indicate
2888 choices that were invalid and iterate until we can't find a better
2889 one that hasn't already been tried. */
7afe21cc 2890
42495ca0
RK
2891 for (p = elt->first_same_value; p; p = p->next_same_value)
2892 p->flag = 0;
7afe21cc 2893
42495ca0
RK
2894 while (found_better)
2895 {
01329426 2896 int best_addr_cost = address_cost (*loc, mode);
42495ca0 2897 int best_rtx_cost = (elt->cost + 1) >> 1;
01329426 2898 int exp_cost;
278a83b2 2899 struct table_elt *best_elt = elt;
42495ca0
RK
2900
2901 found_better = 0;
2902 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2903 if (! p->flag)
42495ca0 2904 {
f8cfc6aa 2905 if ((REG_P (p->exp)
0516f6fe 2906 || exp_equiv_p (p->exp, p->exp, 1, false))
01329426
JH
2907 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2908 || (exp_cost == best_addr_cost
05bd3d41 2909 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2f541799
MM
2910 {
2911 found_better = 1;
01329426 2912 best_addr_cost = exp_cost;
2f541799
MM
2913 best_rtx_cost = (p->cost + 1) >> 1;
2914 best_elt = p;
2915 }
42495ca0 2916 }
7afe21cc 2917
42495ca0
RK
2918 if (found_better)
2919 {
2920 if (validate_change (insn, loc,
906c4e36
RK
2921 canon_reg (copy_rtx (best_elt->exp),
2922 NULL_RTX), 0))
42495ca0
RK
2923 return;
2924 else
2925 best_elt->flag = 1;
2926 }
2927 }
2928 }
7afe21cc 2929
42495ca0
RK
2930 /* If the address is a binary operation with the first operand a register
2931 and the second a constant, do the same as above, but looking for
2932 equivalences of the register. Then try to simplify before checking for
2933 the best address to use. This catches a few cases: First is when we
2934 have REG+const and the register is another REG+const. We can often merge
2935 the constants and eliminate one insn and one register. It may also be
2936 that a machine has a cheap REG+REG+const. Finally, this improves the
2937 code on the Alpha for unaligned byte stores. */
2938
2939 if (flag_expensive_optimizations
ec8e098d 2940 && ARITHMETIC_P (*loc)
f8cfc6aa 2941 && REG_P (XEXP (*loc, 0)))
7afe21cc 2942 {
7b9c108f 2943 rtx op1 = XEXP (*loc, 1);
42495ca0
RK
2944
2945 do_not_record = 0;
2197a88a 2946 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2947 do_not_record = save_do_not_record;
2948 hash_arg_in_memory = save_hash_arg_in_memory;
42495ca0 2949
2197a88a 2950 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2951 if (elt == 0)
2952 return;
2953
2954 /* We need to find the best (under the criteria documented above) entry
2955 in the class that is valid. We use the `flag' field to indicate
2956 choices that were invalid and iterate until we can't find a better
2957 one that hasn't already been tried. */
7afe21cc 2958
7afe21cc 2959 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2960 p->flag = 0;
7afe21cc 2961
42495ca0 2962 while (found_better)
7afe21cc 2963 {
01329426 2964 int best_addr_cost = address_cost (*loc, mode);
42495ca0 2965 int best_rtx_cost = (COST (*loc) + 1) >> 1;
278a83b2 2966 struct table_elt *best_elt = elt;
42495ca0 2967 rtx best_rtx = *loc;
f6516aee
JW
2968 int count;
2969
2970 /* This is at worst case an O(n^2) algorithm, so limit our search
2971 to the first 32 elements on the list. This avoids trouble
2972 compiling code with very long basic blocks that can easily
0cedb36c
JL
2973 call simplify_gen_binary so many times that we run out of
2974 memory. */
96b0e481 2975
0cedb36c
JL
2976 found_better = 0;
2977 for (p = elt->first_same_value, count = 0;
2978 p && count < 32;
2979 p = p->next_same_value, count++)
2980 if (! p->flag
f8cfc6aa 2981 && (REG_P (p->exp)
0516f6fe 2982 || exp_equiv_p (p->exp, p->exp, 1, false)))
0cedb36c
JL
2983 {
2984 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
7b9c108f 2985 p->exp, op1);
01329426 2986 int new_cost;
b7ca416f
AP
2987
2988 /* Get the canonical version of the address so we can accept
2989 more. */
2990 new = canon_for_address (new);
2991
01329426 2992 new_cost = address_cost (new, mode);
96b0e481 2993
01329426
JH
2994 if (new_cost < best_addr_cost
2995 || (new_cost == best_addr_cost
2996 && (COST (new) + 1) >> 1 > best_rtx_cost))
0cedb36c
JL
2997 {
2998 found_better = 1;
01329426 2999 best_addr_cost = new_cost;
0cedb36c
JL
3000 best_rtx_cost = (COST (new) + 1) >> 1;
3001 best_elt = p;
3002 best_rtx = new;
3003 }
3004 }
96b0e481 3005
0cedb36c
JL
3006 if (found_better)
3007 {
3008 if (validate_change (insn, loc,
3009 canon_reg (copy_rtx (best_rtx),
3010 NULL_RTX), 0))
3011 return;
3012 else
3013 best_elt->flag = 1;
3014 }
3015 }
3016 }
96b0e481
RK
3017}
3018\f
bca05d20
RK
3019/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3020 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3021 what values are being compared.
1a87eea2 3022
bca05d20
RK
3023 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3024 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3025 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3026 compared to produce cc0.
a432f20d 3027
bca05d20
RK
3028 The return value is the comparison operator and is either the code of
3029 A or the code corresponding to the inverse of the comparison. */
7afe21cc 3030
0cedb36c 3031static enum rtx_code
7080f735
AJ
3032find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3033 enum machine_mode *pmode1, enum machine_mode *pmode2)
7afe21cc 3034{
0cedb36c 3035 rtx arg1, arg2;
1a87eea2 3036
0cedb36c 3037 arg1 = *parg1, arg2 = *parg2;
7afe21cc 3038
0cedb36c 3039 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
7afe21cc 3040
0cedb36c 3041 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
a432f20d 3042 {
da7d8304 3043 /* Set nonzero when we find something of interest. */
0cedb36c
JL
3044 rtx x = 0;
3045 int reverse_code = 0;
3046 struct table_elt *p = 0;
6076248a 3047
0cedb36c
JL
3048 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3049 On machines with CC0, this is the only case that can occur, since
3050 fold_rtx will return the COMPARE or item being compared with zero
3051 when given CC0. */
6076248a 3052
0cedb36c
JL
3053 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3054 x = arg1;
6076248a 3055
0cedb36c
JL
3056 /* If ARG1 is a comparison operator and CODE is testing for
3057 STORE_FLAG_VALUE, get the inner arguments. */
a432f20d 3058
ec8e098d 3059 else if (COMPARISON_P (arg1))
7afe21cc 3060 {
efdc7e19
RH
3061#ifdef FLOAT_STORE_FLAG_VALUE
3062 REAL_VALUE_TYPE fsfv;
3063#endif
3064
0cedb36c
JL
3065 if (code == NE
3066 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3067 && code == LT && STORE_FLAG_VALUE == -1)
3068#ifdef FLOAT_STORE_FLAG_VALUE
3069 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
efdc7e19
RH
3070 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3071 REAL_VALUE_NEGATIVE (fsfv)))
7afe21cc 3072#endif
a432f20d 3073 )
0cedb36c
JL
3074 x = arg1;
3075 else if (code == EQ
3076 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3077 && code == GE && STORE_FLAG_VALUE == -1)
3078#ifdef FLOAT_STORE_FLAG_VALUE
3079 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
efdc7e19
RH
3080 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3081 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
3082#endif
3083 )
3084 x = arg1, reverse_code = 1;
7afe21cc
RK
3085 }
3086
0cedb36c 3087 /* ??? We could also check for
7afe21cc 3088
0cedb36c 3089 (ne (and (eq (...) (const_int 1))) (const_int 0))
7afe21cc 3090
0cedb36c 3091 and related forms, but let's wait until we see them occurring. */
7afe21cc 3092
0cedb36c
JL
3093 if (x == 0)
3094 /* Look up ARG1 in the hash table and see if it has an equivalence
3095 that lets us see what is being compared. */
0516f6fe 3096 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
278a83b2 3097 if (p)
8b03b984
R
3098 {
3099 p = p->first_same_value;
3100
3101 /* If what we compare is already known to be constant, that is as
3102 good as it gets.
3103 We need to break the loop in this case, because otherwise we
3104 can have an infinite loop when looking at a reg that is known
3105 to be a constant which is the same as a comparison of a reg
3106 against zero which appears later in the insn stream, which in
3107 turn is constant and the same as the comparison of the first reg
3108 against zero... */
3109 if (p->is_const)
3110 break;
3111 }
7afe21cc 3112
0cedb36c 3113 for (; p; p = p->next_same_value)
7afe21cc 3114 {
0cedb36c 3115 enum machine_mode inner_mode = GET_MODE (p->exp);
efdc7e19
RH
3116#ifdef FLOAT_STORE_FLAG_VALUE
3117 REAL_VALUE_TYPE fsfv;
3118#endif
7afe21cc 3119
0cedb36c 3120 /* If the entry isn't valid, skip it. */
0516f6fe 3121 if (! exp_equiv_p (p->exp, p->exp, 1, false))
0cedb36c 3122 continue;
f76b9db2 3123
bca05d20
RK
3124 if (GET_CODE (p->exp) == COMPARE
3125 /* Another possibility is that this machine has a compare insn
3126 that includes the comparison code. In that case, ARG1 would
3127 be equivalent to a comparison operation that would set ARG1 to
3128 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3129 ORIG_CODE is the actual comparison being done; if it is an EQ,
3130 we must reverse ORIG_CODE. On machine with a negative value
3131 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3132 || ((code == NE
3133 || (code == LT
3134 && GET_MODE_CLASS (inner_mode) == MODE_INT
3135 && (GET_MODE_BITSIZE (inner_mode)
3136 <= HOST_BITS_PER_WIDE_INT)
3137 && (STORE_FLAG_VALUE
3138 & ((HOST_WIDE_INT) 1
3139 << (GET_MODE_BITSIZE (inner_mode) - 1))))
0cedb36c 3140#ifdef FLOAT_STORE_FLAG_VALUE
bca05d20
RK
3141 || (code == LT
3142 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
efdc7e19
RH
3143 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3144 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c 3145#endif
bca05d20 3146 )
ec8e098d 3147 && COMPARISON_P (p->exp)))
7afe21cc 3148 {
0cedb36c
JL
3149 x = p->exp;
3150 break;
3151 }
3152 else if ((code == EQ
3153 || (code == GE
3154 && GET_MODE_CLASS (inner_mode) == MODE_INT
3155 && (GET_MODE_BITSIZE (inner_mode)
3156 <= HOST_BITS_PER_WIDE_INT)
3157 && (STORE_FLAG_VALUE
3158 & ((HOST_WIDE_INT) 1
3159 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3160#ifdef FLOAT_STORE_FLAG_VALUE
3161 || (code == GE
3162 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
efdc7e19
RH
3163 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3164 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
3165#endif
3166 )
ec8e098d 3167 && COMPARISON_P (p->exp))
0cedb36c
JL
3168 {
3169 reverse_code = 1;
3170 x = p->exp;
3171 break;
7afe21cc
RK
3172 }
3173
4977bab6
ZW
3174 /* If this non-trapping address, e.g. fp + constant, the
3175 equivalent is a better operand since it may let us predict
3176 the value of the comparison. */
3177 else if (!rtx_addr_can_trap_p (p->exp))
0cedb36c
JL
3178 {
3179 arg1 = p->exp;
3180 continue;
3181 }
7afe21cc 3182 }
7afe21cc 3183
0cedb36c
JL
3184 /* If we didn't find a useful equivalence for ARG1, we are done.
3185 Otherwise, set up for the next iteration. */
3186 if (x == 0)
3187 break;
7afe21cc 3188
78192b09
RH
3189 /* If we need to reverse the comparison, make sure that that is
3190 possible -- we can't necessarily infer the value of GE from LT
3191 with floating-point operands. */
0cedb36c 3192 if (reverse_code)
261efdef
JH
3193 {
3194 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3195 if (reversed == UNKNOWN)
3196 break;
68252e27
KH
3197 else
3198 code = reversed;
261efdef 3199 }
ec8e098d 3200 else if (COMPARISON_P (x))
261efdef
JH
3201 code = GET_CODE (x);
3202 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
7afe21cc
RK
3203 }
3204
0cedb36c
JL
3205 /* Return our results. Return the modes from before fold_rtx
3206 because fold_rtx might produce const_int, and then it's too late. */
3207 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3208 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3209
3210 return code;
7afe21cc
RK
3211}
3212\f
3213/* If X is a nontrivial arithmetic operation on an argument
3214 for which a constant value can be determined, return
3215 the result of operating on that value, as a constant.
3216 Otherwise, return X, possibly with one or more operands
3217 modified by recursive calls to this function.
3218
e7bb59fa
RK
3219 If X is a register whose contents are known, we do NOT
3220 return those contents here. equiv_constant is called to
3221 perform that task.
7afe21cc
RK
3222
3223 INSN is the insn that we may be modifying. If it is 0, make a copy
3224 of X before modifying it. */
3225
3226static rtx
7080f735 3227fold_rtx (rtx x, rtx insn)
7afe21cc 3228{
b3694847
SS
3229 enum rtx_code code;
3230 enum machine_mode mode;
3231 const char *fmt;
3232 int i;
7afe21cc
RK
3233 rtx new = 0;
3234 int copied = 0;
3235 int must_swap = 0;
3236
3237 /* Folded equivalents of first two operands of X. */
3238 rtx folded_arg0;
3239 rtx folded_arg1;
3240
3241 /* Constant equivalents of first three operands of X;
3242 0 when no such equivalent is known. */
3243 rtx const_arg0;
3244 rtx const_arg1;
3245 rtx const_arg2;
3246
3247 /* The mode of the first operand of X. We need this for sign and zero
3248 extends. */
3249 enum machine_mode mode_arg0;
3250
3251 if (x == 0)
3252 return x;
3253
3254 mode = GET_MODE (x);
3255 code = GET_CODE (x);
3256 switch (code)
3257 {
3258 case CONST:
3259 case CONST_INT:
3260 case CONST_DOUBLE:
69ef87e2 3261 case CONST_VECTOR:
7afe21cc
RK
3262 case SYMBOL_REF:
3263 case LABEL_REF:
3264 case REG:
01aa1d43 3265 case PC:
7afe21cc
RK
3266 /* No use simplifying an EXPR_LIST
3267 since they are used only for lists of args
3268 in a function call's REG_EQUAL note. */
3269 case EXPR_LIST:
3270 return x;
3271
3272#ifdef HAVE_cc0
3273 case CC0:
3274 return prev_insn_cc0;
3275#endif
3276
7afe21cc 3277 case SUBREG:
c610adec
RK
3278 /* See if we previously assigned a constant value to this SUBREG. */
3279 if ((new = lookup_as_function (x, CONST_INT)) != 0
3280 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
3281 return new;
3282
4b980e20
RK
3283 /* If this is a paradoxical SUBREG, we have no idea what value the
3284 extra bits would have. However, if the operand is equivalent
3285 to a SUBREG whose operand is the same as our mode, and all the
3286 modes are within a word, we can just use the inner operand
31c85c78
RK
3287 because these SUBREGs just say how to treat the register.
3288
3289 Similarly if we find an integer constant. */
4b980e20 3290
e5f6a288 3291 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
3292 {
3293 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3294 struct table_elt *elt;
3295
3296 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3297 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3298 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3299 imode)) != 0)
ddc356e8 3300 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
31c85c78
RK
3301 {
3302 if (CONSTANT_P (elt->exp)
3303 && GET_MODE (elt->exp) == VOIDmode)
3304 return elt->exp;
3305
4b980e20
RK
3306 if (GET_CODE (elt->exp) == SUBREG
3307 && GET_MODE (SUBREG_REG (elt->exp)) == mode
0516f6fe 3308 && exp_equiv_p (elt->exp, elt->exp, 1, false))
4b980e20 3309 return copy_rtx (SUBREG_REG (elt->exp));
1bb98cec 3310 }
4b980e20
RK
3311
3312 return x;
3313 }
e5f6a288 3314
7afe21cc
RK
3315 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3316 We might be able to if the SUBREG is extracting a single word in an
3317 integral mode or extracting the low part. */
3318
3319 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3320 const_arg0 = equiv_constant (folded_arg0);
3321 if (const_arg0)
3322 folded_arg0 = const_arg0;
3323
3324 if (folded_arg0 != SUBREG_REG (x))
3325 {
949c5d62
JH
3326 new = simplify_subreg (mode, folded_arg0,
3327 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7afe21cc
RK
3328 if (new)
3329 return new;
3330 }
e5f6a288 3331
f8cfc6aa 3332 if (REG_P (folded_arg0)
4c442790 3333 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
e5f6a288
RK
3334 {
3335 struct table_elt *elt;
3336
e5f6a288
RK
3337 elt = lookup (folded_arg0,
3338 HASH (folded_arg0, GET_MODE (folded_arg0)),
3339 GET_MODE (folded_arg0));
3340
3341 if (elt)
3342 elt = elt->first_same_value;
3343
4c442790
PB
3344 if (subreg_lowpart_p (x))
3345 /* If this is a narrowing SUBREG and our operand is a REG, see
3346 if we can find an equivalence for REG that is an arithmetic
3347 operation in a wider mode where both operands are paradoxical
3348 SUBREGs from objects of our result mode. In that case, we
3349 couldn-t report an equivalent value for that operation, since we
3350 don't know what the extra bits will be. But we can find an
3351 equivalence for this SUBREG by folding that operation in the
3352 narrow mode. This allows us to fold arithmetic in narrow modes
3353 when the machine only supports word-sized arithmetic.
3354
3355 Also look for a case where we have a SUBREG whose operand
3356 is the same as our result. If both modes are smaller
3357 than a word, we are simply interpreting a register in
3358 different modes and we can use the inner value. */
3359
3360 for (; elt; elt = elt->next_same_value)
3361 {
3362 enum rtx_code eltcode = GET_CODE (elt->exp);
3363
3364 /* Just check for unary and binary operations. */
ec8e098d
PB
3365 if (UNARY_P (elt->exp)
3366 && eltcode != SIGN_EXTEND
3367 && eltcode != ZERO_EXTEND
4c442790
PB
3368 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3369 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3370 && (GET_MODE_CLASS (mode)
3371 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3372 {
3373 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
e5f6a288 3374
f8cfc6aa 3375 if (!REG_P (op0) && ! CONSTANT_P (op0))
4c442790 3376 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288 3377
e5f6a288 3378 op0 = equiv_constant (op0);
4c442790
PB
3379 if (op0)
3380 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3381 op0, mode);
3382 }
ec8e098d 3383 else if (ARITHMETIC_P (elt->exp)
4c442790
PB
3384 && eltcode != DIV && eltcode != MOD
3385 && eltcode != UDIV && eltcode != UMOD
3386 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3387 && eltcode != ROTATE && eltcode != ROTATERT
3388 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3389 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3390 == mode))
3391 || CONSTANT_P (XEXP (elt->exp, 0)))
3392 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3393 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3394 == mode))
3395 || CONSTANT_P (XEXP (elt->exp, 1))))
3396 {
3397 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3398 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3399
f8cfc6aa 3400 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
4c442790
PB
3401 op0 = fold_rtx (op0, NULL_RTX);
3402
3403 if (op0)
3404 op0 = equiv_constant (op0);
3405
f8cfc6aa 3406 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
4c442790
PB
3407 op1 = fold_rtx (op1, NULL_RTX);
3408
3409 if (op1)
3410 op1 = equiv_constant (op1);
3411
3412 /* If we are looking for the low SImode part of
3413 (ashift:DI c (const_int 32)), it doesn't work
3414 to compute that in SImode, because a 32-bit shift
3415 in SImode is unpredictable. We know the value is 0. */
3416 if (op0 && op1
3417 && GET_CODE (elt->exp) == ASHIFT
3418 && GET_CODE (op1) == CONST_INT
3419 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3420 {
3421 if (INTVAL (op1)
3422 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3423 /* If the count fits in the inner mode's width,
3424 but exceeds the outer mode's width,
3425 the value will get truncated to 0
3426 by the subreg. */
3427 new = CONST0_RTX (mode);
3428 else
3429 /* If the count exceeds even the inner mode's width,
76fb0b60 3430 don't fold this expression. */
4c442790
PB
3431 new = 0;
3432 }
3433 else if (op0 && op1)
3434 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3435 }
e5f6a288 3436
4c442790
PB
3437 else if (GET_CODE (elt->exp) == SUBREG
3438 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3439 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3440 <= UNITS_PER_WORD)
0516f6fe 3441 && exp_equiv_p (elt->exp, elt->exp, 1, false))
4c442790 3442 new = copy_rtx (SUBREG_REG (elt->exp));
4b980e20 3443
4c442790
PB
3444 if (new)
3445 return new;
3446 }
3447 else
3448 /* A SUBREG resulting from a zero extension may fold to zero if
3449 it extracts higher bits than the ZERO_EXTEND's source bits.
3450 FIXME: if combine tried to, er, combine these instructions,
3451 this transformation may be moved to simplify_subreg. */
3452 for (; elt; elt = elt->next_same_value)
3453 {
3454 if (GET_CODE (elt->exp) == ZERO_EXTEND
3455 && subreg_lsb (x)
3456 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3457 return CONST0_RTX (mode);
3458 }
e5f6a288
RK
3459 }
3460
7afe21cc
RK
3461 return x;
3462
3463 case NOT:
3464 case NEG:
3465 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3466 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3467 new = lookup_as_function (XEXP (x, 0), code);
3468 if (new)
3469 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3470 break;
13c9910f 3471
7afe21cc
RK
3472 case MEM:
3473 /* If we are not actually processing an insn, don't try to find the
3474 best address. Not only don't we care, but we could modify the
3475 MEM in an invalid way since we have no insn to validate against. */
3476 if (insn != 0)
01329426 3477 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
7afe21cc
RK
3478
3479 {
3480 /* Even if we don't fold in the insn itself,
3481 we can safely do so here, in hopes of getting a constant. */
906c4e36 3482 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 3483 rtx base = 0;
906c4e36 3484 HOST_WIDE_INT offset = 0;
7afe21cc 3485
f8cfc6aa 3486 if (REG_P (addr)
1bb98cec
DM
3487 && REGNO_QTY_VALID_P (REGNO (addr)))
3488 {
3489 int addr_q = REG_QTY (REGNO (addr));
3490 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3491
3492 if (GET_MODE (addr) == addr_ent->mode
3493 && addr_ent->const_rtx != NULL_RTX)
3494 addr = addr_ent->const_rtx;
3495 }
7afe21cc
RK
3496
3497 /* If address is constant, split it into a base and integer offset. */
3498 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3499 base = addr;
3500 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3501 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3502 {
3503 base = XEXP (XEXP (addr, 0), 0);
3504 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3505 }
3506 else if (GET_CODE (addr) == LO_SUM
3507 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3508 base = XEXP (addr, 1);
3509
3510 /* If this is a constant pool reference, we can fold it into its
3511 constant to allow better value tracking. */
3512 if (base && GET_CODE (base) == SYMBOL_REF
3513 && CONSTANT_POOL_ADDRESS_P (base))
3514 {
3515 rtx constant = get_pool_constant (base);
3516 enum machine_mode const_mode = get_pool_mode (base);
3517 rtx new;
3518
3519 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
dd0ba281
RS
3520 {
3521 constant_pool_entries_cost = COST (constant);
3522 constant_pool_entries_regcost = approx_reg_cost (constant);
3523 }
7afe21cc
RK
3524
3525 /* If we are loading the full constant, we have an equivalence. */
3526 if (offset == 0 && mode == const_mode)
3527 return constant;
3528
9faa82d8 3529 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
3530 anything. Otherwise, handle the two most common cases:
3531 extracting a word from a multi-word constant, and extracting
3532 the low-order bits. Other cases don't seem common enough to
3533 worry about. */
3534 if (! CONSTANT_P (constant))
3535 return x;
3536
3537 if (GET_MODE_CLASS (mode) == MODE_INT
3538 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3539 && offset % UNITS_PER_WORD == 0
3540 && (new = operand_subword (constant,
3541 offset / UNITS_PER_WORD,
3542 0, const_mode)) != 0)
3543 return new;
3544
3545 if (((BYTES_BIG_ENDIAN
3546 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3547 || (! BYTES_BIG_ENDIAN && offset == 0))
4de249d9 3548 && (new = gen_lowpart (mode, constant)) != 0)
7afe21cc
RK
3549 return new;
3550 }
3551
3552 /* If this is a reference to a label at a known position in a jump
3553 table, we also know its value. */
3554 if (base && GET_CODE (base) == LABEL_REF)
3555 {
3556 rtx label = XEXP (base, 0);
3557 rtx table_insn = NEXT_INSN (label);
278a83b2 3558
4b4bf941 3559 if (table_insn && JUMP_P (table_insn)
7afe21cc
RK
3560 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3561 {
3562 rtx table = PATTERN (table_insn);
3563
3564 if (offset >= 0
3565 && (offset / GET_MODE_SIZE (GET_MODE (table))
3566 < XVECLEN (table, 0)))
3567 return XVECEXP (table, 0,
3568 offset / GET_MODE_SIZE (GET_MODE (table)));
3569 }
4b4bf941 3570 if (table_insn && JUMP_P (table_insn)
7afe21cc
RK
3571 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3572 {
3573 rtx table = PATTERN (table_insn);
3574
3575 if (offset >= 0
3576 && (offset / GET_MODE_SIZE (GET_MODE (table))
3577 < XVECLEN (table, 1)))
3578 {
3579 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
3580 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3581 XEXP (table, 0));
7afe21cc
RK
3582
3583 if (GET_MODE (table) != Pmode)
38a448ca 3584 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 3585
278a83b2 3586 /* Indicate this is a constant. This isn't a
67a37737
RK
3587 valid form of CONST, but it will only be used
3588 to fold the next insns and then discarded, so
ac7ef8d5
FS
3589 it should be safe.
3590
3591 Note this expression must be explicitly discarded,
3592 by cse_insn, else it may end up in a REG_EQUAL note
3593 and "escape" to cause problems elsewhere. */
38a448ca 3594 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
3595 }
3596 }
3597 }
3598
3599 return x;
3600 }
9255709c 3601
a5e5cf67
RH
3602#ifdef NO_FUNCTION_CSE
3603 case CALL:
3604 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3605 return x;
3606 break;
3607#endif
3608
9255709c 3609 case ASM_OPERANDS:
6c667859
AB
3610 if (insn)
3611 {
3612 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3613 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3614 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3615 }
9255709c 3616 break;
278a83b2 3617
e9a25f70
JL
3618 default:
3619 break;
7afe21cc
RK
3620 }
3621
3622 const_arg0 = 0;
3623 const_arg1 = 0;
3624 const_arg2 = 0;
3625 mode_arg0 = VOIDmode;
3626
3627 /* Try folding our operands.
3628 Then see which ones have constant values known. */
3629
3630 fmt = GET_RTX_FORMAT (code);
3631 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3632 if (fmt[i] == 'e')
3633 {
3634 rtx arg = XEXP (x, i);
3635 rtx folded_arg = arg, const_arg = 0;
3636 enum machine_mode mode_arg = GET_MODE (arg);
3637 rtx cheap_arg, expensive_arg;
3638 rtx replacements[2];
3639 int j;
5b437e0f 3640 int old_cost = COST_IN (XEXP (x, i), code);
7afe21cc
RK
3641
3642 /* Most arguments are cheap, so handle them specially. */
3643 switch (GET_CODE (arg))
3644 {
3645 case REG:
3646 /* This is the same as calling equiv_constant; it is duplicated
3647 here for speed. */
1bb98cec
DM
3648 if (REGNO_QTY_VALID_P (REGNO (arg)))
3649 {
3650 int arg_q = REG_QTY (REGNO (arg));
3651 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3652
3653 if (arg_ent->const_rtx != NULL_RTX
f8cfc6aa 3654 && !REG_P (arg_ent->const_rtx)
1bb98cec
DM
3655 && GET_CODE (arg_ent->const_rtx) != PLUS)
3656 const_arg
4de249d9 3657 = gen_lowpart (GET_MODE (arg),
1bb98cec
DM
3658 arg_ent->const_rtx);
3659 }
7afe21cc
RK
3660 break;
3661
3662 case CONST:
3663 case CONST_INT:
3664 case SYMBOL_REF:
3665 case LABEL_REF:
3666 case CONST_DOUBLE:
69ef87e2 3667 case CONST_VECTOR:
7afe21cc
RK
3668 const_arg = arg;
3669 break;
3670
3671#ifdef HAVE_cc0
3672 case CC0:
3673 folded_arg = prev_insn_cc0;
3674 mode_arg = prev_insn_cc0_mode;
3675 const_arg = equiv_constant (folded_arg);
3676 break;
3677#endif
3678
3679 default:
3680 folded_arg = fold_rtx (arg, insn);
3681 const_arg = equiv_constant (folded_arg);
3682 }
3683
3684 /* For the first three operands, see if the operand
3685 is constant or equivalent to a constant. */
3686 switch (i)
3687 {
3688 case 0:
3689 folded_arg0 = folded_arg;
3690 const_arg0 = const_arg;
3691 mode_arg0 = mode_arg;
3692 break;
3693 case 1:
3694 folded_arg1 = folded_arg;
3695 const_arg1 = const_arg;
3696 break;
3697 case 2:
3698 const_arg2 = const_arg;
3699 break;
3700 }
3701
3702 /* Pick the least expensive of the folded argument and an
3703 equivalent constant argument. */
3704 if (const_arg == 0 || const_arg == folded_arg
f2fa288f 3705 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
7afe21cc
RK
3706 cheap_arg = folded_arg, expensive_arg = const_arg;
3707 else
3708 cheap_arg = const_arg, expensive_arg = folded_arg;
3709
3710 /* Try to replace the operand with the cheapest of the two
3711 possibilities. If it doesn't work and this is either of the first
3712 two operands of a commutative operation, try swapping them.
3713 If THAT fails, try the more expensive, provided it is cheaper
3714 than what is already there. */
3715
3716 if (cheap_arg == XEXP (x, i))
3717 continue;
3718
3719 if (insn == 0 && ! copied)
3720 {
3721 x = copy_rtx (x);
3722 copied = 1;
3723 }
3724
f2fa288f
RH
3725 /* Order the replacements from cheapest to most expensive. */
3726 replacements[0] = cheap_arg;
3727 replacements[1] = expensive_arg;
3728
68252e27 3729 for (j = 0; j < 2 && replacements[j]; j++)
7afe21cc 3730 {
f2fa288f
RH
3731 int new_cost = COST_IN (replacements[j], code);
3732
3733 /* Stop if what existed before was cheaper. Prefer constants
3734 in the case of a tie. */
3735 if (new_cost > old_cost
3736 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3737 break;
3738
8cce3d04
RS
3739 /* It's not safe to substitute the operand of a conversion
3740 operator with a constant, as the conversion's identity
3741 depends upon the mode of it's operand. This optimization
3742 is handled by the call to simplify_unary_operation. */
3743 if (GET_RTX_CLASS (code) == RTX_UNARY
3744 && GET_MODE (replacements[j]) != mode_arg0
3745 && (code == ZERO_EXTEND
3746 || code == SIGN_EXTEND
3747 || code == TRUNCATE
3748 || code == FLOAT_TRUNCATE
3749 || code == FLOAT_EXTEND
3750 || code == FLOAT
3751 || code == FIX
3752 || code == UNSIGNED_FLOAT
3753 || code == UNSIGNED_FIX))
3754 continue;
3755
7afe21cc
RK
3756 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3757 break;
3758
ec8e098d
PB
3759 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3760 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
7afe21cc
RK
3761 {
3762 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3763 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3764
3765 if (apply_change_group ())
3766 {
3767 /* Swap them back to be invalid so that this loop can
3768 continue and flag them to be swapped back later. */
3769 rtx tem;
3770
3771 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3772 XEXP (x, 1) = tem;
3773 must_swap = 1;
3774 break;
3775 }
3776 }
3777 }
3778 }
3779
2d8b0f3a
JL
3780 else
3781 {
3782 if (fmt[i] == 'E')
3783 /* Don't try to fold inside of a vector of expressions.
3784 Doing nothing is harmless. */
e49a1d2e 3785 {;}
2d8b0f3a 3786 }
7afe21cc
RK
3787
3788 /* If a commutative operation, place a constant integer as the second
3789 operand unless the first operand is also a constant integer. Otherwise,
3790 place any constant second unless the first operand is also a constant. */
3791
ec8e098d 3792 if (COMMUTATIVE_P (x))
7afe21cc 3793 {
c715abdd
RS
3794 if (must_swap
3795 || swap_commutative_operands_p (const_arg0 ? const_arg0
3796 : XEXP (x, 0),
3797 const_arg1 ? const_arg1
3798 : XEXP (x, 1)))
7afe21cc 3799 {
b3694847 3800 rtx tem = XEXP (x, 0);
7afe21cc
RK
3801
3802 if (insn == 0 && ! copied)
3803 {
3804 x = copy_rtx (x);
3805 copied = 1;
3806 }
3807
3808 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3809 validate_change (insn, &XEXP (x, 1), tem, 1);
3810 if (apply_change_group ())
3811 {
3812 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3813 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3814 }
3815 }
3816 }
3817
3818 /* If X is an arithmetic operation, see if we can simplify it. */
3819
3820 switch (GET_RTX_CLASS (code))
3821 {
ec8e098d 3822 case RTX_UNARY:
67a37737
RK
3823 {
3824 int is_const = 0;
3825
3826 /* We can't simplify extension ops unless we know the
3827 original mode. */
3828 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3829 && mode_arg0 == VOIDmode)
3830 break;
3831
3832 /* If we had a CONST, strip it off and put it back later if we
3833 fold. */
3834 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3835 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3836
3837 new = simplify_unary_operation (code, mode,
3838 const_arg0 ? const_arg0 : folded_arg0,
3839 mode_arg0);
ec666d23
JH
3840 /* NEG of PLUS could be converted into MINUS, but that causes
3841 expressions of the form
3842 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3843 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3844 FIXME: those ports should be fixed. */
3845 if (new != 0 && is_const
3846 && GET_CODE (new) == PLUS
3847 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3848 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3849 && GET_CODE (XEXP (new, 1)) == CONST_INT)
38a448ca 3850 new = gen_rtx_CONST (mode, new);
67a37737 3851 }
7afe21cc 3852 break;
278a83b2 3853
ec8e098d
PB
3854 case RTX_COMPARE:
3855 case RTX_COMM_COMPARE:
7afe21cc
RK
3856 /* See what items are actually being compared and set FOLDED_ARG[01]
3857 to those values and CODE to the actual comparison code. If any are
3858 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3859 do anything if both operands are already known to be constant. */
3860
21e5076a
UB
3861 /* ??? Vector mode comparisons are not supported yet. */
3862 if (VECTOR_MODE_P (mode))
3863 break;
3864
7afe21cc
RK
3865 if (const_arg0 == 0 || const_arg1 == 0)
3866 {
3867 struct table_elt *p0, *p1;
d6edb99e 3868 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
13c9910f 3869 enum machine_mode mode_arg1;
c610adec
RK
3870
3871#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 3872 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 3873 {
d6edb99e 3874 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
68252e27 3875 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3876 false_rtx = CONST0_RTX (mode);
c610adec
RK
3877 }
3878#endif
7afe21cc 3879
13c9910f
RS
3880 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3881 &mode_arg0, &mode_arg1);
7afe21cc 3882
13c9910f
RS
3883 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3884 what kinds of things are being compared, so we can't do
3885 anything with this comparison. */
7afe21cc
RK
3886
3887 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3888 break;
3889
75335440
KH
3890 const_arg0 = equiv_constant (folded_arg0);
3891 const_arg1 = equiv_constant (folded_arg1);
3892
0f41302f
MS
3893 /* If we do not now have two constants being compared, see
3894 if we can nevertheless deduce some things about the
3895 comparison. */
7afe21cc
RK
3896 if (const_arg0 == 0 || const_arg1 == 0)
3897 {
4977bab6
ZW
3898 /* Some addresses are known to be nonzero. We don't know
3899 their sign, but equality comparisons are known. */
7afe21cc 3900 if (const_arg1 == const0_rtx
4977bab6 3901 && nonzero_address_p (folded_arg0))
7afe21cc
RK
3902 {
3903 if (code == EQ)
d6edb99e 3904 return false_rtx;
7afe21cc 3905 else if (code == NE)
d6edb99e 3906 return true_rtx;
7afe21cc
RK
3907 }
3908
fd13313f
JH
3909 /* See if the two operands are the same. */
3910
3911 if (folded_arg0 == folded_arg1
f8cfc6aa
JQ
3912 || (REG_P (folded_arg0)
3913 && REG_P (folded_arg1)
fd13313f
JH
3914 && (REG_QTY (REGNO (folded_arg0))
3915 == REG_QTY (REGNO (folded_arg1))))
3916 || ((p0 = lookup (folded_arg0,
0516f6fe
SB
3917 SAFE_HASH (folded_arg0, mode_arg0),
3918 mode_arg0))
fd13313f 3919 && (p1 = lookup (folded_arg1,
0516f6fe
SB
3920 SAFE_HASH (folded_arg1, mode_arg0),
3921 mode_arg0))
fd13313f
JH
3922 && p0->first_same_value == p1->first_same_value))
3923 {
71925bc0
RS
3924 /* Sadly two equal NaNs are not equivalent. */
3925 if (!HONOR_NANS (mode_arg0))
3926 return ((code == EQ || code == LE || code == GE
3927 || code == LEU || code == GEU || code == UNEQ
3928 || code == UNLE || code == UNGE
3929 || code == ORDERED)
3930 ? true_rtx : false_rtx);
3931 /* Take care for the FP compares we can resolve. */
3932 if (code == UNEQ || code == UNLE || code == UNGE)
3933 return true_rtx;
3934 if (code == LTGT || code == LT || code == GT)
3935 return false_rtx;
fd13313f 3936 }
7afe21cc
RK
3937
3938 /* If FOLDED_ARG0 is a register, see if the comparison we are
3939 doing now is either the same as we did before or the reverse
3940 (we only check the reverse if not floating-point). */
f8cfc6aa 3941 else if (REG_P (folded_arg0))
7afe21cc 3942 {
30f72379 3943 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc 3944
1bb98cec
DM
3945 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3946 {
3947 struct qty_table_elem *ent = &qty_table[qty];
3948
3949 if ((comparison_dominates_p (ent->comparison_code, code)
1eb8759b
RH
3950 || (! FLOAT_MODE_P (mode_arg0)
3951 && comparison_dominates_p (ent->comparison_code,
3952 reverse_condition (code))))
1bb98cec
DM
3953 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3954 || (const_arg1
3955 && rtx_equal_p (ent->comparison_const,
3956 const_arg1))
f8cfc6aa 3957 || (REG_P (folded_arg1)
1bb98cec
DM
3958 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3959 return (comparison_dominates_p (ent->comparison_code, code)
d6edb99e 3960 ? true_rtx : false_rtx);
1bb98cec 3961 }
7afe21cc
RK
3962 }
3963 }
3964 }
3965
3966 /* If we are comparing against zero, see if the first operand is
3967 equivalent to an IOR with a constant. If so, we may be able to
3968 determine the result of this comparison. */
3969
3970 if (const_arg1 == const0_rtx)
3971 {
3972 rtx y = lookup_as_function (folded_arg0, IOR);
3973 rtx inner_const;
3974
3975 if (y != 0
3976 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3977 && GET_CODE (inner_const) == CONST_INT
3978 && INTVAL (inner_const) != 0)
3979 {
3980 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
3981 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3982 && (INTVAL (inner_const)
3983 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
d6edb99e 3984 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
c610adec
RK
3985
3986#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 3987 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 3988 {
d6edb99e 3989 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
12530dbe 3990 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3991 false_rtx = CONST0_RTX (mode);
c610adec
RK
3992 }
3993#endif
7afe21cc
RK
3994
3995 switch (code)
3996 {
3997 case EQ:
d6edb99e 3998 return false_rtx;
7afe21cc 3999 case NE:
d6edb99e 4000 return true_rtx;
7afe21cc
RK
4001 case LT: case LE:
4002 if (has_sign)
d6edb99e 4003 return true_rtx;
7afe21cc
RK
4004 break;
4005 case GT: case GE:
4006 if (has_sign)
d6edb99e 4007 return false_rtx;
7afe21cc 4008 break;
e9a25f70
JL
4009 default:
4010 break;
7afe21cc
RK
4011 }
4012 }
4013 }
4014
c6fb08ad
PB
4015 {
4016 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4017 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4018 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4019 }
7afe21cc
RK
4020 break;
4021
ec8e098d
PB
4022 case RTX_BIN_ARITH:
4023 case RTX_COMM_ARITH:
7afe21cc
RK
4024 switch (code)
4025 {
4026 case PLUS:
4027 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4028 with that LABEL_REF as its second operand. If so, the result is
4029 the first operand of that MINUS. This handles switches with an
4030 ADDR_DIFF_VEC table. */
4031 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4032 {
e650cbda
RK
4033 rtx y
4034 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
ddc356e8 4035 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
4036
4037 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4038 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4039 return XEXP (y, 0);
67a37737
RK
4040
4041 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
4042 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4043 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
4044 && GET_CODE (XEXP (y, 0)) == MINUS
4045 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 4046 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
67a37737 4047 return XEXP (XEXP (y, 0), 0);
7afe21cc 4048 }
c2cc0778 4049
e650cbda
RK
4050 /* Likewise if the operands are in the other order. */
4051 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4052 {
4053 rtx y
4054 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
ddc356e8 4055 : lookup_as_function (folded_arg1, MINUS);
e650cbda
RK
4056
4057 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4058 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4059 return XEXP (y, 0);
4060
4061 /* Now try for a CONST of a MINUS like the above. */
4062 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4063 : lookup_as_function (folded_arg1, CONST))) != 0
4064 && GET_CODE (XEXP (y, 0)) == MINUS
4065 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 4066 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
e650cbda
RK
4067 return XEXP (XEXP (y, 0), 0);
4068 }
4069
c2cc0778
RK
4070 /* If second operand is a register equivalent to a negative
4071 CONST_INT, see if we can find a register equivalent to the
4072 positive constant. Make a MINUS if so. Don't do this for
5d595063 4073 a non-negative constant since we might then alternate between
a1f300c0 4074 choosing positive and negative constants. Having the positive
5d595063
RK
4075 constant previously-used is the more common case. Be sure
4076 the resulting constant is non-negative; if const_arg1 were
4077 the smallest negative number this would overflow: depending
4078 on the mode, this would either just be the same value (and
4079 hence not save anything) or be incorrect. */
4080 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4081 && INTVAL (const_arg1) < 0
4741f6ad
JL
4082 /* This used to test
4083
ddc356e8 4084 -INTVAL (const_arg1) >= 0
4741f6ad
JL
4085
4086 But The Sun V5.0 compilers mis-compiled that test. So
4087 instead we test for the problematic value in a more direct
4088 manner and hope the Sun compilers get it correct. */
5c45a8ac
KG
4089 && INTVAL (const_arg1) !=
4090 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
f8cfc6aa 4091 && REG_P (folded_arg1))
c2cc0778 4092 {
ddc356e8 4093 rtx new_const = GEN_INT (-INTVAL (const_arg1));
c2cc0778 4094 struct table_elt *p
0516f6fe 4095 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
c2cc0778
RK
4096
4097 if (p)
4098 for (p = p->first_same_value; p; p = p->next_same_value)
f8cfc6aa 4099 if (REG_P (p->exp))
0cedb36c
JL
4100 return simplify_gen_binary (MINUS, mode, folded_arg0,
4101 canon_reg (p->exp, NULL_RTX));
c2cc0778 4102 }
13c9910f
RS
4103 goto from_plus;
4104
4105 case MINUS:
4106 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4107 If so, produce (PLUS Z C2-C). */
4108 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4109 {
4110 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4111 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
4112 return fold_rtx (plus_constant (copy_rtx (y),
4113 -INTVAL (const_arg1)),
a3b5c94a 4114 NULL_RTX);
13c9910f 4115 }
7afe21cc 4116
ddc356e8 4117 /* Fall through. */
7afe21cc 4118
13c9910f 4119 from_plus:
7afe21cc
RK
4120 case SMIN: case SMAX: case UMIN: case UMAX:
4121 case IOR: case AND: case XOR:
f930bfd0 4122 case MULT:
7afe21cc
RK
4123 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4124 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4125 is known to be of similar form, we may be able to replace the
4126 operation with a combined operation. This may eliminate the
4127 intermediate operation if every use is simplified in this way.
4128 Note that the similar optimization done by combine.c only works
4129 if the intermediate operation's result has only one reference. */
4130
f8cfc6aa 4131 if (REG_P (folded_arg0)
7afe21cc
RK
4132 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4133 {
4134 int is_shift
4135 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4136 rtx y = lookup_as_function (folded_arg0, code);
4137 rtx inner_const;
4138 enum rtx_code associate_code;
4139 rtx new_const;
4140
4141 if (y == 0
4142 || 0 == (inner_const
4143 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4144 || GET_CODE (inner_const) != CONST_INT
4145 /* If we have compiled a statement like
4146 "if (x == (x & mask1))", and now are looking at
4147 "x & mask2", we will have a case where the first operand
4148 of Y is the same as our first operand. Unless we detect
4149 this case, an infinite loop will result. */
4150 || XEXP (y, 0) == folded_arg0)
4151 break;
4152
4153 /* Don't associate these operations if they are a PLUS with the
4154 same constant and it is a power of two. These might be doable
4155 with a pre- or post-increment. Similarly for two subtracts of
4156 identical powers of two with post decrement. */
4157
213d5fbc 4158 if (code == PLUS && const_arg1 == inner_const
940da324
JL
4159 && ((HAVE_PRE_INCREMENT
4160 && exact_log2 (INTVAL (const_arg1)) >= 0)
4161 || (HAVE_POST_INCREMENT
4162 && exact_log2 (INTVAL (const_arg1)) >= 0)
4163 || (HAVE_PRE_DECREMENT
4164 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4165 || (HAVE_POST_DECREMENT
4166 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
4167 break;
4168
4169 /* Compute the code used to compose the constants. For example,
f930bfd0 4170 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
7afe21cc 4171
f930bfd0 4172 associate_code = (is_shift || code == MINUS ? PLUS : code);
7afe21cc
RK
4173
4174 new_const = simplify_binary_operation (associate_code, mode,
4175 const_arg1, inner_const);
4176
4177 if (new_const == 0)
4178 break;
4179
4180 /* If we are associating shift operations, don't let this
4908e508
RS
4181 produce a shift of the size of the object or larger.
4182 This could occur when we follow a sign-extend by a right
4183 shift on a machine that does a sign-extend as a pair
4184 of shifts. */
7afe21cc
RK
4185
4186 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
4187 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4188 {
4189 /* As an exception, we can turn an ASHIFTRT of this
4190 form into a shift of the number of bits - 1. */
4191 if (code == ASHIFTRT)
4192 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4193 else
4194 break;
4195 }
7afe21cc
RK
4196
4197 y = copy_rtx (XEXP (y, 0));
4198
4199 /* If Y contains our first operand (the most common way this
4200 can happen is if Y is a MEM), we would do into an infinite
4201 loop if we tried to fold it. So don't in that case. */
4202
4203 if (! reg_mentioned_p (folded_arg0, y))
4204 y = fold_rtx (y, insn);
4205
0cedb36c 4206 return simplify_gen_binary (code, mode, y, new_const);
7afe21cc 4207 }
e9a25f70
JL
4208 break;
4209
f930bfd0
JW
4210 case DIV: case UDIV:
4211 /* ??? The associative optimization performed immediately above is
4212 also possible for DIV and UDIV using associate_code of MULT.
4213 However, we would need extra code to verify that the
4214 multiplication does not overflow, that is, there is no overflow
4215 in the calculation of new_const. */
4216 break;
4217
e9a25f70
JL
4218 default:
4219 break;
7afe21cc
RK
4220 }
4221
4222 new = simplify_binary_operation (code, mode,
4223 const_arg0 ? const_arg0 : folded_arg0,
4224 const_arg1 ? const_arg1 : folded_arg1);
4225 break;
4226
ec8e098d 4227 case RTX_OBJ:
7afe21cc
RK
4228 /* (lo_sum (high X) X) is simply X. */
4229 if (code == LO_SUM && const_arg0 != 0
4230 && GET_CODE (const_arg0) == HIGH
4231 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4232 return const_arg1;
4233 break;
4234
ec8e098d
PB
4235 case RTX_TERNARY:
4236 case RTX_BITFIELD_OPS:
7afe21cc
RK
4237 new = simplify_ternary_operation (code, mode, mode_arg0,
4238 const_arg0 ? const_arg0 : folded_arg0,
4239 const_arg1 ? const_arg1 : folded_arg1,
4240 const_arg2 ? const_arg2 : XEXP (x, 2));
4241 break;
ee5332b8 4242
ec8e098d
PB
4243 default:
4244 break;
7afe21cc
RK
4245 }
4246
4247 return new ? new : x;
4248}
4249\f
4250/* Return a constant value currently equivalent to X.
4251 Return 0 if we don't know one. */
4252
4253static rtx
7080f735 4254equiv_constant (rtx x)
7afe21cc 4255{
f8cfc6aa 4256 if (REG_P (x)
1bb98cec
DM
4257 && REGNO_QTY_VALID_P (REGNO (x)))
4258 {
4259 int x_q = REG_QTY (REGNO (x));
4260 struct qty_table_elem *x_ent = &qty_table[x_q];
4261
4262 if (x_ent->const_rtx)
4de249d9 4263 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
1bb98cec 4264 }
7afe21cc 4265
2ce5e1b4 4266 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
4267 return x;
4268
fc3ffe83
RK
4269 /* If X is a MEM, try to fold it outside the context of any insn to see if
4270 it might be equivalent to a constant. That handles the case where it
4271 is a constant-pool reference. Then try to look it up in the hash table
4272 in case it is something whose value we have seen before. */
4273
3c0cb5de 4274 if (MEM_P (x))
fc3ffe83
RK
4275 {
4276 struct table_elt *elt;
4277
906c4e36 4278 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
4279 if (CONSTANT_P (x))
4280 return x;
4281
0516f6fe 4282 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
fc3ffe83
RK
4283 if (elt == 0)
4284 return 0;
4285
4286 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4287 if (elt->is_const && CONSTANT_P (elt->exp))
4288 return elt->exp;
4289 }
4290
7afe21cc
RK
4291 return 0;
4292}
4293\f
4294/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4295 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4296 least-significant part of X.
278a83b2 4297 MODE specifies how big a part of X to return.
7afe21cc
RK
4298
4299 If the requested operation cannot be done, 0 is returned.
4300
4de249d9 4301 This is similar to gen_lowpart_general in emit-rtl.c. */
7afe21cc
RK
4302
4303rtx
7080f735 4304gen_lowpart_if_possible (enum machine_mode mode, rtx x)
7afe21cc
RK
4305{
4306 rtx result = gen_lowpart_common (mode, x);
4307
4308 if (result)
4309 return result;
3c0cb5de 4310 else if (MEM_P (x))
7afe21cc
RK
4311 {
4312 /* This is the only other case we handle. */
b3694847 4313 int offset = 0;
7afe21cc
RK
4314 rtx new;
4315
f76b9db2
ILT
4316 if (WORDS_BIG_ENDIAN)
4317 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4318 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4319 if (BYTES_BIG_ENDIAN)
f1ec5147
RK
4320 /* Adjust the address so that the address-after-the-data is
4321 unchanged. */
4322 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4323 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4324
4325 new = adjust_address_nv (x, mode, offset);
7afe21cc
RK
4326 if (! memory_address_p (mode, XEXP (new, 0)))
4327 return 0;
f1ec5147 4328
7afe21cc
RK
4329 return new;
4330 }
4331 else
4332 return 0;
4333}
4334\f
6de9cd9a 4335/* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
7afe21cc
RK
4336 branch. It will be zero if not.
4337
4338 In certain cases, this can cause us to add an equivalence. For example,
278a83b2 4339 if we are following the taken case of
7080f735 4340 if (i == 2)
7afe21cc
RK
4341 we can add the fact that `i' and '2' are now equivalent.
4342
4343 In any case, we can record that this comparison was passed. If the same
4344 comparison is seen later, we will know its value. */
4345
4346static void
7080f735 4347record_jump_equiv (rtx insn, int taken)
7afe21cc
RK
4348{
4349 int cond_known_true;
4350 rtx op0, op1;
7f1c097d 4351 rtx set;
13c9910f 4352 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
4353 int reversed_nonequality = 0;
4354 enum rtx_code code;
4355
4356 /* Ensure this is the right kind of insn. */
7f1c097d 4357 if (! any_condjump_p (insn))
7afe21cc 4358 return;
7f1c097d 4359 set = pc_set (insn);
7afe21cc
RK
4360
4361 /* See if this jump condition is known true or false. */
4362 if (taken)
7f1c097d 4363 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
7afe21cc 4364 else
7f1c097d 4365 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
7afe21cc
RK
4366
4367 /* Get the type of comparison being done and the operands being compared.
4368 If we had to reverse a non-equality condition, record that fact so we
4369 know that it isn't valid for floating-point. */
7f1c097d
JH
4370 code = GET_CODE (XEXP (SET_SRC (set), 0));
4371 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4372 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
7afe21cc 4373
13c9910f 4374 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
4375 if (! cond_known_true)
4376 {
261efdef 4377 code = reversed_comparison_code_parts (code, op0, op1, insn);
1eb8759b
RH
4378
4379 /* Don't remember if we can't find the inverse. */
4380 if (code == UNKNOWN)
4381 return;
7afe21cc
RK
4382 }
4383
4384 /* The mode is the mode of the non-constant. */
13c9910f
RS
4385 mode = mode0;
4386 if (mode1 != VOIDmode)
4387 mode = mode1;
7afe21cc
RK
4388
4389 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4390}
4391
794693c0
RH
4392/* Yet another form of subreg creation. In this case, we want something in
4393 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4394
4395static rtx
4396record_jump_cond_subreg (enum machine_mode mode, rtx op)
4397{
4398 enum machine_mode op_mode = GET_MODE (op);
4399 if (op_mode == mode || op_mode == VOIDmode)
4400 return op;
4401 return lowpart_subreg (mode, op, op_mode);
4402}
4403
7afe21cc
RK
4404/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4405 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4406 Make any useful entries we can with that information. Called from
4407 above function and called recursively. */
4408
4409static void
7080f735
AJ
4410record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4411 rtx op1, int reversed_nonequality)
7afe21cc 4412{
2197a88a 4413 unsigned op0_hash, op1_hash;
e428d738 4414 int op0_in_memory, op1_in_memory;
7afe21cc
RK
4415 struct table_elt *op0_elt, *op1_elt;
4416
4417 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4418 we know that they are also equal in the smaller mode (this is also
4419 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 4420 is not worth testing for with no SUBREG). */
7afe21cc 4421
2e794ee8 4422 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 4423 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
4424 && (GET_MODE_SIZE (GET_MODE (op0))
4425 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
4426 {
4427 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
4428 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4429 if (tem)
4430 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4431 reversed_nonequality);
7afe21cc
RK
4432 }
4433
4434 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
4435 && (GET_MODE_SIZE (GET_MODE (op1))
4436 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
4437 {
4438 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
4439 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4440 if (tem)
4441 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4442 reversed_nonequality);
7afe21cc
RK
4443 }
4444
278a83b2 4445 /* Similarly, if this is an NE comparison, and either is a SUBREG
7afe21cc
RK
4446 making a smaller mode, we know the whole thing is also NE. */
4447
2e794ee8
RS
4448 /* Note that GET_MODE (op0) may not equal MODE;
4449 if we test MODE instead, we can get an infinite recursion
4450 alternating between two modes each wider than MODE. */
4451
7afe21cc
RK
4452 if (code == NE && GET_CODE (op0) == SUBREG
4453 && subreg_lowpart_p (op0)
2e794ee8
RS
4454 && (GET_MODE_SIZE (GET_MODE (op0))
4455 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
4456 {
4457 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
4458 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4459 if (tem)
4460 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4461 reversed_nonequality);
7afe21cc
RK
4462 }
4463
4464 if (code == NE && GET_CODE (op1) == SUBREG
4465 && subreg_lowpart_p (op1)
2e794ee8
RS
4466 && (GET_MODE_SIZE (GET_MODE (op1))
4467 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
4468 {
4469 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
4470 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4471 if (tem)
4472 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4473 reversed_nonequality);
7afe21cc
RK
4474 }
4475
4476 /* Hash both operands. */
4477
4478 do_not_record = 0;
4479 hash_arg_in_memory = 0;
2197a88a 4480 op0_hash = HASH (op0, mode);
7afe21cc 4481 op0_in_memory = hash_arg_in_memory;
7afe21cc
RK
4482
4483 if (do_not_record)
4484 return;
4485
4486 do_not_record = 0;
4487 hash_arg_in_memory = 0;
2197a88a 4488 op1_hash = HASH (op1, mode);
7afe21cc 4489 op1_in_memory = hash_arg_in_memory;
278a83b2 4490
7afe21cc
RK
4491 if (do_not_record)
4492 return;
4493
4494 /* Look up both operands. */
2197a88a
RK
4495 op0_elt = lookup (op0, op0_hash, mode);
4496 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 4497
af3869c1
RK
4498 /* If both operands are already equivalent or if they are not in the
4499 table but are identical, do nothing. */
4500 if ((op0_elt != 0 && op1_elt != 0
4501 && op0_elt->first_same_value == op1_elt->first_same_value)
4502 || op0 == op1 || rtx_equal_p (op0, op1))
4503 return;
4504
7afe21cc 4505 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
4506 comparison. Similarly if this is floating-point. In the latter
4507 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4508 If we record the equality, we might inadvertently delete code
4509 whose intent was to change -0 to +0. */
4510
cbf6a543 4511 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc 4512 {
1bb98cec
DM
4513 struct qty_table_elem *ent;
4514 int qty;
4515
7afe21cc
RK
4516 /* If we reversed a floating-point comparison, if OP0 is not a
4517 register, or if OP1 is neither a register or constant, we can't
4518 do anything. */
4519
f8cfc6aa 4520 if (!REG_P (op1))
7afe21cc
RK
4521 op1 = equiv_constant (op1);
4522
cbf6a543 4523 if ((reversed_nonequality && FLOAT_MODE_P (mode))
f8cfc6aa 4524 || !REG_P (op0) || op1 == 0)
7afe21cc
RK
4525 return;
4526
4527 /* Put OP0 in the hash table if it isn't already. This gives it a
4528 new quantity number. */
4529 if (op0_elt == 0)
4530 {
9714cf43 4531 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4532 {
4533 rehash_using_reg (op0);
2197a88a 4534 op0_hash = HASH (op0, mode);
2bb81c86
RK
4535
4536 /* If OP0 is contained in OP1, this changes its hash code
4537 as well. Faster to rehash than to check, except
4538 for the simple case of a constant. */
4539 if (! CONSTANT_P (op1))
2197a88a 4540 op1_hash = HASH (op1,mode);
7afe21cc
RK
4541 }
4542
9714cf43 4543 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4544 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4545 }
4546
1bb98cec
DM
4547 qty = REG_QTY (REGNO (op0));
4548 ent = &qty_table[qty];
4549
4550 ent->comparison_code = code;
f8cfc6aa 4551 if (REG_P (op1))
7afe21cc 4552 {
5d5ea909 4553 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 4554 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 4555
7afe21cc
RK
4556 /* Put OP1 in the hash table so it gets a new quantity number. */
4557 if (op1_elt == 0)
4558 {
9714cf43 4559 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4560 {
4561 rehash_using_reg (op1);
2197a88a 4562 op1_hash = HASH (op1, mode);
7afe21cc
RK
4563 }
4564
9714cf43 4565 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4566 op1_elt->in_memory = op1_in_memory;
7afe21cc
RK
4567 }
4568
1bb98cec
DM
4569 ent->comparison_const = NULL_RTX;
4570 ent->comparison_qty = REG_QTY (REGNO (op1));
7afe21cc
RK
4571 }
4572 else
4573 {
1bb98cec
DM
4574 ent->comparison_const = op1;
4575 ent->comparison_qty = -1;
7afe21cc
RK
4576 }
4577
4578 return;
4579 }
4580
eb5ad42a
RS
4581 /* If either side is still missing an equivalence, make it now,
4582 then merge the equivalences. */
7afe21cc 4583
7afe21cc
RK
4584 if (op0_elt == 0)
4585 {
9714cf43 4586 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4587 {
4588 rehash_using_reg (op0);
2197a88a 4589 op0_hash = HASH (op0, mode);
7afe21cc
RK
4590 }
4591
9714cf43 4592 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4593 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4594 }
4595
4596 if (op1_elt == 0)
4597 {
9714cf43 4598 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4599 {
4600 rehash_using_reg (op1);
2197a88a 4601 op1_hash = HASH (op1, mode);
7afe21cc
RK
4602 }
4603
9714cf43 4604 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4605 op1_elt->in_memory = op1_in_memory;
7afe21cc 4606 }
eb5ad42a
RS
4607
4608 merge_equiv_classes (op0_elt, op1_elt);
7afe21cc
RK
4609}
4610\f
4611/* CSE processing for one instruction.
4612 First simplify sources and addresses of all assignments
4613 in the instruction, using previously-computed equivalents values.
4614 Then install the new sources and destinations in the table
278a83b2 4615 of available values.
7afe21cc 4616
1ed0205e
VM
4617 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4618 the insn. It means that INSN is inside libcall block. In this
ddc356e8 4619 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
4620
4621/* Data on one SET contained in the instruction. */
4622
4623struct set
4624{
4625 /* The SET rtx itself. */
4626 rtx rtl;
4627 /* The SET_SRC of the rtx (the original value, if it is changing). */
4628 rtx src;
4629 /* The hash-table element for the SET_SRC of the SET. */
4630 struct table_elt *src_elt;
2197a88a
RK
4631 /* Hash value for the SET_SRC. */
4632 unsigned src_hash;
4633 /* Hash value for the SET_DEST. */
4634 unsigned dest_hash;
7afe21cc
RK
4635 /* The SET_DEST, with SUBREG, etc., stripped. */
4636 rtx inner_dest;
278a83b2 4637 /* Nonzero if the SET_SRC is in memory. */
7afe21cc 4638 char src_in_memory;
7afe21cc
RK
4639 /* Nonzero if the SET_SRC contains something
4640 whose value cannot be predicted and understood. */
4641 char src_volatile;
496324d0
DN
4642 /* Original machine mode, in case it becomes a CONST_INT.
4643 The size of this field should match the size of the mode
4644 field of struct rtx_def (see rtl.h). */
4645 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc
RK
4646 /* A constant equivalent for SET_SRC, if any. */
4647 rtx src_const;
47841d1b
JJ
4648 /* Original SET_SRC value used for libcall notes. */
4649 rtx orig_src;
2197a88a
RK
4650 /* Hash value of constant equivalent for SET_SRC. */
4651 unsigned src_const_hash;
7afe21cc
RK
4652 /* Table entry for constant equivalent for SET_SRC, if any. */
4653 struct table_elt *src_const_elt;
4654};
4655
4656static void
7080f735 4657cse_insn (rtx insn, rtx libcall_insn)
7afe21cc 4658{
b3694847
SS
4659 rtx x = PATTERN (insn);
4660 int i;
92f9aa51 4661 rtx tem;
b3694847 4662 int n_sets = 0;
7afe21cc 4663
2d8b0f3a 4664#ifdef HAVE_cc0
7afe21cc
RK
4665 /* Records what this insn does to set CC0. */
4666 rtx this_insn_cc0 = 0;
135d84b8 4667 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 4668#endif
7afe21cc
RK
4669
4670 rtx src_eqv = 0;
4671 struct table_elt *src_eqv_elt = 0;
6a651371
KG
4672 int src_eqv_volatile = 0;
4673 int src_eqv_in_memory = 0;
6a651371 4674 unsigned src_eqv_hash = 0;
7afe21cc 4675
9714cf43 4676 struct set *sets = (struct set *) 0;
7afe21cc
RK
4677
4678 this_insn = insn;
7afe21cc
RK
4679
4680 /* Find all the SETs and CLOBBERs in this instruction.
4681 Record all the SETs in the array `set' and count them.
4682 Also determine whether there is a CLOBBER that invalidates
4683 all memory references, or all references at varying addresses. */
4684
4b4bf941 4685 if (CALL_P (insn))
f1e7c95f
RK
4686 {
4687 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
f474c6f8
AO
4688 {
4689 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4690 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4691 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4692 }
f1e7c95f
RK
4693 }
4694
7afe21cc
RK
4695 if (GET_CODE (x) == SET)
4696 {
703ad42b 4697 sets = alloca (sizeof (struct set));
7afe21cc
RK
4698 sets[0].rtl = x;
4699
4700 /* Ignore SETs that are unconditional jumps.
4701 They never need cse processing, so this does not hurt.
4702 The reason is not efficiency but rather
4703 so that we can test at the end for instructions
4704 that have been simplified to unconditional jumps
4705 and not be misled by unchanged instructions
4706 that were unconditional jumps to begin with. */
4707 if (SET_DEST (x) == pc_rtx
4708 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4709 ;
4710
4711 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4712 The hard function value register is used only once, to copy to
4713 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4714 Ensure we invalidate the destination register. On the 80386 no
7722328e 4715 other code would invalidate it since it is a fixed_reg.
0f41302f 4716 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
4717
4718 else if (GET_CODE (SET_SRC (x)) == CALL)
4719 {
4720 canon_reg (SET_SRC (x), insn);
77fa0940 4721 apply_change_group ();
7afe21cc 4722 fold_rtx (SET_SRC (x), insn);
bb4034b3 4723 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
4724 }
4725 else
4726 n_sets = 1;
4727 }
4728 else if (GET_CODE (x) == PARALLEL)
4729 {
b3694847 4730 int lim = XVECLEN (x, 0);
7afe21cc 4731
703ad42b 4732 sets = alloca (lim * sizeof (struct set));
7afe21cc
RK
4733
4734 /* Find all regs explicitly clobbered in this insn,
4735 and ensure they are not replaced with any other regs
4736 elsewhere in this insn.
4737 When a reg that is clobbered is also used for input,
4738 we should presume that that is for a reason,
4739 and we should not substitute some other register
4740 which is not supposed to be clobbered.
4741 Therefore, this loop cannot be merged into the one below
830a38ee 4742 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
4743 value clobbered. We must not let a canonicalization do
4744 anything in that case. */
4745 for (i = 0; i < lim; i++)
4746 {
b3694847 4747 rtx y = XVECEXP (x, 0, i);
2708da92
RS
4748 if (GET_CODE (y) == CLOBBER)
4749 {
4750 rtx clobbered = XEXP (y, 0);
4751
f8cfc6aa 4752 if (REG_P (clobbered)
2708da92 4753 || GET_CODE (clobbered) == SUBREG)
bb4034b3 4754 invalidate (clobbered, VOIDmode);
2708da92
RS
4755 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4756 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 4757 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 4758 }
7afe21cc 4759 }
278a83b2 4760
7afe21cc
RK
4761 for (i = 0; i < lim; i++)
4762 {
b3694847 4763 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
4764 if (GET_CODE (y) == SET)
4765 {
7722328e
RK
4766 /* As above, we ignore unconditional jumps and call-insns and
4767 ignore the result of apply_change_group. */
7afe21cc
RK
4768 if (GET_CODE (SET_SRC (y)) == CALL)
4769 {
4770 canon_reg (SET_SRC (y), insn);
77fa0940 4771 apply_change_group ();
7afe21cc 4772 fold_rtx (SET_SRC (y), insn);
bb4034b3 4773 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
4774 }
4775 else if (SET_DEST (y) == pc_rtx
4776 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4777 ;
4778 else
4779 sets[n_sets++].rtl = y;
4780 }
4781 else if (GET_CODE (y) == CLOBBER)
4782 {
9ae8ffe7 4783 /* If we clobber memory, canon the address.
7afe21cc
RK
4784 This does nothing when a register is clobbered
4785 because we have already invalidated the reg. */
3c0cb5de 4786 if (MEM_P (XEXP (y, 0)))
9ae8ffe7 4787 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
4788 }
4789 else if (GET_CODE (y) == USE
f8cfc6aa 4790 && ! (REG_P (XEXP (y, 0))
7afe21cc 4791 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4792 canon_reg (y, NULL_RTX);
7afe21cc
RK
4793 else if (GET_CODE (y) == CALL)
4794 {
7722328e
RK
4795 /* The result of apply_change_group can be ignored; see
4796 canon_reg. */
7afe21cc 4797 canon_reg (y, insn);
77fa0940 4798 apply_change_group ();
7afe21cc
RK
4799 fold_rtx (y, insn);
4800 }
4801 }
4802 }
4803 else if (GET_CODE (x) == CLOBBER)
4804 {
3c0cb5de 4805 if (MEM_P (XEXP (x, 0)))
9ae8ffe7 4806 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4807 }
4808
4809 /* Canonicalize a USE of a pseudo register or memory location. */
4810 else if (GET_CODE (x) == USE
f8cfc6aa 4811 && ! (REG_P (XEXP (x, 0))
7afe21cc 4812 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4813 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4814 else if (GET_CODE (x) == CALL)
4815 {
7722328e 4816 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 4817 canon_reg (x, insn);
77fa0940 4818 apply_change_group ();
7afe21cc
RK
4819 fold_rtx (x, insn);
4820 }
4821
7b3ab05e
JW
4822 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4823 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4824 is handled specially for this case, and if it isn't set, then there will
9faa82d8 4825 be no equivalence for the destination. */
92f9aa51
RK
4826 if (n_sets == 1 && REG_NOTES (insn) != 0
4827 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
4828 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4829 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
7b668f9e
JJ
4830 {
4831 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4832 XEXP (tem, 0) = src_eqv;
4833 }
7afe21cc
RK
4834
4835 /* Canonicalize sources and addresses of destinations.
4836 We do this in a separate pass to avoid problems when a MATCH_DUP is
4837 present in the insn pattern. In that case, we want to ensure that
4838 we don't break the duplicate nature of the pattern. So we will replace
4839 both operands at the same time. Otherwise, we would fail to find an
4840 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
4841
4842 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 4843 but we don't do this any more. */
7afe21cc
RK
4844
4845 for (i = 0; i < n_sets; i++)
4846 {
4847 rtx dest = SET_DEST (sets[i].rtl);
4848 rtx src = SET_SRC (sets[i].rtl);
4849 rtx new = canon_reg (src, insn);
58873255 4850 int insn_code;
7afe21cc 4851
47841d1b 4852 sets[i].orig_src = src;
f8cfc6aa 4853 if ((REG_P (new) && REG_P (src)
77fa0940
RK
4854 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4855 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255 4856 || (insn_code = recog_memoized (insn)) < 0
a995e389 4857 || insn_data[insn_code].n_dups > 0)
77fa0940 4858 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
4859 else
4860 SET_SRC (sets[i].rtl) = new;
4861
46d096a3 4862 if (GET_CODE (dest) == ZERO_EXTRACT)
7afe21cc
RK
4863 {
4864 validate_change (insn, &XEXP (dest, 1),
77fa0940 4865 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 4866 validate_change (insn, &XEXP (dest, 2),
77fa0940 4867 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
4868 }
4869
46d096a3 4870 while (GET_CODE (dest) == SUBREG
7afe21cc 4871 || GET_CODE (dest) == ZERO_EXTRACT
46d096a3 4872 || GET_CODE (dest) == STRICT_LOW_PART)
7afe21cc
RK
4873 dest = XEXP (dest, 0);
4874
3c0cb5de 4875 if (MEM_P (dest))
7afe21cc
RK
4876 canon_reg (dest, insn);
4877 }
4878
77fa0940
RK
4879 /* Now that we have done all the replacements, we can apply the change
4880 group and see if they all work. Note that this will cause some
4881 canonicalizations that would have worked individually not to be applied
4882 because some other canonicalization didn't work, but this should not
278a83b2 4883 occur often.
7722328e
RK
4884
4885 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
4886
4887 apply_change_group ();
4888
7afe21cc
RK
4889 /* Set sets[i].src_elt to the class each source belongs to.
4890 Detect assignments from or to volatile things
4891 and set set[i] to zero so they will be ignored
4892 in the rest of this function.
4893
4894 Nothing in this loop changes the hash table or the register chains. */
4895
4896 for (i = 0; i < n_sets; i++)
4897 {
b3694847
SS
4898 rtx src, dest;
4899 rtx src_folded;
4900 struct table_elt *elt = 0, *p;
7afe21cc
RK
4901 enum machine_mode mode;
4902 rtx src_eqv_here;
4903 rtx src_const = 0;
4904 rtx src_related = 0;
4905 struct table_elt *src_const_elt = 0;
99a9c946
GS
4906 int src_cost = MAX_COST;
4907 int src_eqv_cost = MAX_COST;
4908 int src_folded_cost = MAX_COST;
4909 int src_related_cost = MAX_COST;
4910 int src_elt_cost = MAX_COST;
4911 int src_regcost = MAX_COST;
4912 int src_eqv_regcost = MAX_COST;
4913 int src_folded_regcost = MAX_COST;
4914 int src_related_regcost = MAX_COST;
4915 int src_elt_regcost = MAX_COST;
da7d8304 4916 /* Set nonzero if we need to call force_const_mem on with the
7afe21cc
RK
4917 contents of src_folded before using it. */
4918 int src_folded_force_flag = 0;
4919
4920 dest = SET_DEST (sets[i].rtl);
4921 src = SET_SRC (sets[i].rtl);
4922
4923 /* If SRC is a constant that has no machine mode,
4924 hash it with the destination's machine mode.
4925 This way we can keep different modes separate. */
4926
4927 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4928 sets[i].mode = mode;
4929
4930 if (src_eqv)
4931 {
4932 enum machine_mode eqvmode = mode;
4933 if (GET_CODE (dest) == STRICT_LOW_PART)
4934 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4935 do_not_record = 0;
4936 hash_arg_in_memory = 0;
2197a88a 4937 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
4938
4939 /* Find the equivalence class for the equivalent expression. */
4940
4941 if (!do_not_record)
2197a88a 4942 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
4943
4944 src_eqv_volatile = do_not_record;
4945 src_eqv_in_memory = hash_arg_in_memory;
7afe21cc
RK
4946 }
4947
4948 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4949 value of the INNER register, not the destination. So it is not
3826a3da 4950 a valid substitution for the source. But save it for later. */
7afe21cc
RK
4951 if (GET_CODE (dest) == STRICT_LOW_PART)
4952 src_eqv_here = 0;
4953 else
4954 src_eqv_here = src_eqv;
4955
4956 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4957 simplified result, which may not necessarily be valid. */
4958 src_folded = fold_rtx (src, insn);
4959
e6a125a0
RK
4960#if 0
4961 /* ??? This caused bad code to be generated for the m68k port with -O2.
4962 Suppose src is (CONST_INT -1), and that after truncation src_folded
4963 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4964 At the end we will add src and src_const to the same equivalence
4965 class. We now have 3 and -1 on the same equivalence class. This
4966 causes later instructions to be mis-optimized. */
7afe21cc
RK
4967 /* If storing a constant in a bitfield, pre-truncate the constant
4968 so we will be able to record it later. */
46d096a3 4969 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
4970 {
4971 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4972
4973 if (GET_CODE (src) == CONST_INT
4974 && GET_CODE (width) == CONST_INT
906c4e36
RK
4975 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4976 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4977 src_folded
4978 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4979 << INTVAL (width)) - 1));
7afe21cc 4980 }
e6a125a0 4981#endif
7afe21cc
RK
4982
4983 /* Compute SRC's hash code, and also notice if it
4984 should not be recorded at all. In that case,
4985 prevent any further processing of this assignment. */
4986 do_not_record = 0;
4987 hash_arg_in_memory = 0;
7afe21cc
RK
4988
4989 sets[i].src = src;
2197a88a 4990 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
4991 sets[i].src_volatile = do_not_record;
4992 sets[i].src_in_memory = hash_arg_in_memory;
7afe21cc 4993
50196afa 4994 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
43e72072
JJ
4995 a pseudo, do not record SRC. Using SRC as a replacement for
4996 anything else will be incorrect in that situation. Note that
4997 this usually occurs only for stack slots, in which case all the
4998 RTL would be referring to SRC, so we don't lose any optimization
4999 opportunities by not having SRC in the hash table. */
50196afa 5000
3c0cb5de 5001 if (MEM_P (src)
43e72072 5002 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
f8cfc6aa 5003 && REG_P (dest)
43e72072 5004 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
50196afa
RK
5005 sets[i].src_volatile = 1;
5006
0dadecf6
RK
5007#if 0
5008 /* It is no longer clear why we used to do this, but it doesn't
5009 appear to still be needed. So let's try without it since this
5010 code hurts cse'ing widened ops. */
9a5a17f3 5011 /* If source is a paradoxical subreg (such as QI treated as an SI),
7afe21cc
RK
5012 treat it as volatile. It may do the work of an SI in one context
5013 where the extra bits are not being used, but cannot replace an SI
5014 in general. */
5015 if (GET_CODE (src) == SUBREG
5016 && (GET_MODE_SIZE (GET_MODE (src))
5017 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5018 sets[i].src_volatile = 1;
0dadecf6 5019#endif
7afe21cc
RK
5020
5021 /* Locate all possible equivalent forms for SRC. Try to replace
5022 SRC in the insn with each cheaper equivalent.
5023
5024 We have the following types of equivalents: SRC itself, a folded
5025 version, a value given in a REG_EQUAL note, or a value related
5026 to a constant.
5027
5028 Each of these equivalents may be part of an additional class
5029 of equivalents (if more than one is in the table, they must be in
5030 the same class; we check for this).
5031
5032 If the source is volatile, we don't do any table lookups.
5033
5034 We note any constant equivalent for possible later use in a
5035 REG_NOTE. */
5036
5037 if (!sets[i].src_volatile)
2197a88a 5038 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
5039
5040 sets[i].src_elt = elt;
5041
5042 if (elt && src_eqv_here && src_eqv_elt)
278a83b2
KH
5043 {
5044 if (elt->first_same_value != src_eqv_elt->first_same_value)
7afe21cc
RK
5045 {
5046 /* The REG_EQUAL is indicating that two formerly distinct
5047 classes are now equivalent. So merge them. */
5048 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
5049 src_eqv_hash = HASH (src_eqv, elt->mode);
5050 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
5051 }
5052
278a83b2
KH
5053 src_eqv_here = 0;
5054 }
7afe21cc
RK
5055
5056 else if (src_eqv_elt)
278a83b2 5057 elt = src_eqv_elt;
7afe21cc
RK
5058
5059 /* Try to find a constant somewhere and record it in `src_const'.
5060 Record its table element, if any, in `src_const_elt'. Look in
5061 any known equivalences first. (If the constant is not in the
2197a88a 5062 table, also set `sets[i].src_const_hash'). */
7afe21cc 5063 if (elt)
278a83b2 5064 for (p = elt->first_same_value; p; p = p->next_same_value)
7afe21cc
RK
5065 if (p->is_const)
5066 {
5067 src_const = p->exp;
5068 src_const_elt = elt;
5069 break;
5070 }
5071
5072 if (src_const == 0
5073 && (CONSTANT_P (src_folded)
278a83b2 5074 /* Consider (minus (label_ref L1) (label_ref L2)) as
7afe21cc
RK
5075 "constant" here so we will record it. This allows us
5076 to fold switch statements when an ADDR_DIFF_VEC is used. */
5077 || (GET_CODE (src_folded) == MINUS
5078 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5079 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5080 src_const = src_folded, src_const_elt = elt;
5081 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5082 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5083
5084 /* If we don't know if the constant is in the table, get its
5085 hash code and look it up. */
5086 if (src_const && src_const_elt == 0)
5087 {
2197a88a
RK
5088 sets[i].src_const_hash = HASH (src_const, mode);
5089 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
5090 }
5091
5092 sets[i].src_const = src_const;
5093 sets[i].src_const_elt = src_const_elt;
5094
5095 /* If the constant and our source are both in the table, mark them as
5096 equivalent. Otherwise, if a constant is in the table but the source
5097 isn't, set ELT to it. */
5098 if (src_const_elt && elt
5099 && src_const_elt->first_same_value != elt->first_same_value)
5100 merge_equiv_classes (elt, src_const_elt);
5101 else if (src_const_elt && elt == 0)
5102 elt = src_const_elt;
5103
5104 /* See if there is a register linearly related to a constant
5105 equivalent of SRC. */
5106 if (src_const
5107 && (GET_CODE (src_const) == CONST
5108 || (src_const_elt && src_const_elt->related_value != 0)))
278a83b2
KH
5109 {
5110 src_related = use_related_value (src_const, src_const_elt);
5111 if (src_related)
5112 {
7afe21cc 5113 struct table_elt *src_related_elt
278a83b2 5114 = lookup (src_related, HASH (src_related, mode), mode);
7afe21cc 5115 if (src_related_elt && elt)
278a83b2 5116 {
7afe21cc
RK
5117 if (elt->first_same_value
5118 != src_related_elt->first_same_value)
278a83b2 5119 /* This can occur when we previously saw a CONST
7afe21cc
RK
5120 involving a SYMBOL_REF and then see the SYMBOL_REF
5121 twice. Merge the involved classes. */
5122 merge_equiv_classes (elt, src_related_elt);
5123
278a83b2 5124 src_related = 0;
7afe21cc 5125 src_related_elt = 0;
278a83b2
KH
5126 }
5127 else if (src_related_elt && elt == 0)
5128 elt = src_related_elt;
7afe21cc 5129 }
278a83b2 5130 }
7afe21cc 5131
e4600702
RK
5132 /* See if we have a CONST_INT that is already in a register in a
5133 wider mode. */
5134
5135 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5136 && GET_MODE_CLASS (mode) == MODE_INT
5137 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5138 {
5139 enum machine_mode wider_mode;
5140
5141 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5142 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5143 && src_related == 0;
5144 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5145 {
5146 struct table_elt *const_elt
5147 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5148
5149 if (const_elt == 0)
5150 continue;
5151
5152 for (const_elt = const_elt->first_same_value;
5153 const_elt; const_elt = const_elt->next_same_value)
f8cfc6aa 5154 if (REG_P (const_elt->exp))
e4600702 5155 {
4de249d9 5156 src_related = gen_lowpart (mode,
e4600702
RK
5157 const_elt->exp);
5158 break;
5159 }
5160 }
5161 }
5162
d45cf215
RS
5163 /* Another possibility is that we have an AND with a constant in
5164 a mode narrower than a word. If so, it might have been generated
5165 as part of an "if" which would narrow the AND. If we already
5166 have done the AND in a wider mode, we can use a SUBREG of that
5167 value. */
5168
5169 if (flag_expensive_optimizations && ! src_related
5170 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5171 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5172 {
5173 enum machine_mode tmode;
38a448ca 5174 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
5175
5176 for (tmode = GET_MODE_WIDER_MODE (mode);
5177 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5178 tmode = GET_MODE_WIDER_MODE (tmode))
5179 {
4de249d9 5180 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
d45cf215
RS
5181 struct table_elt *larger_elt;
5182
5183 if (inner)
5184 {
5185 PUT_MODE (new_and, tmode);
5186 XEXP (new_and, 0) = inner;
5187 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5188 if (larger_elt == 0)
5189 continue;
5190
5191 for (larger_elt = larger_elt->first_same_value;
5192 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 5193 if (REG_P (larger_elt->exp))
d45cf215
RS
5194 {
5195 src_related
4de249d9 5196 = gen_lowpart (mode, larger_elt->exp);
d45cf215
RS
5197 break;
5198 }
5199
5200 if (src_related)
5201 break;
5202 }
5203 }
5204 }
7bac1be0
RK
5205
5206#ifdef LOAD_EXTEND_OP
5207 /* See if a MEM has already been loaded with a widening operation;
5208 if it has, we can use a subreg of that. Many CISC machines
5209 also have such operations, but this is only likely to be
71cc389b 5210 beneficial on these machines. */
278a83b2 5211
ddc356e8 5212 if (flag_expensive_optimizations && src_related == 0
7bac1be0
RK
5213 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5214 && GET_MODE_CLASS (mode) == MODE_INT
3c0cb5de 5215 && MEM_P (src) && ! do_not_record
f822d252 5216 && LOAD_EXTEND_OP (mode) != UNKNOWN)
7bac1be0 5217 {
9d80ef7c
RH
5218 struct rtx_def memory_extend_buf;
5219 rtx memory_extend_rtx = &memory_extend_buf;
7bac1be0 5220 enum machine_mode tmode;
278a83b2 5221
7bac1be0
RK
5222 /* Set what we are trying to extend and the operation it might
5223 have been extended with. */
9d80ef7c 5224 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
7bac1be0
RK
5225 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5226 XEXP (memory_extend_rtx, 0) = src;
278a83b2 5227
7bac1be0
RK
5228 for (tmode = GET_MODE_WIDER_MODE (mode);
5229 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5230 tmode = GET_MODE_WIDER_MODE (tmode))
5231 {
5232 struct table_elt *larger_elt;
278a83b2 5233
7bac1be0 5234 PUT_MODE (memory_extend_rtx, tmode);
278a83b2 5235 larger_elt = lookup (memory_extend_rtx,
7bac1be0
RK
5236 HASH (memory_extend_rtx, tmode), tmode);
5237 if (larger_elt == 0)
5238 continue;
278a83b2 5239
7bac1be0
RK
5240 for (larger_elt = larger_elt->first_same_value;
5241 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 5242 if (REG_P (larger_elt->exp))
7bac1be0 5243 {
4de249d9 5244 src_related = gen_lowpart (mode,
7bac1be0
RK
5245 larger_elt->exp);
5246 break;
5247 }
278a83b2 5248
7bac1be0
RK
5249 if (src_related)
5250 break;
5251 }
5252 }
5253#endif /* LOAD_EXTEND_OP */
278a83b2 5254
7afe21cc 5255 if (src == src_folded)
278a83b2 5256 src_folded = 0;
7afe21cc 5257
da7d8304 5258 /* At this point, ELT, if nonzero, points to a class of expressions
7afe21cc 5259 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
da7d8304 5260 and SRC_RELATED, if nonzero, each contain additional equivalent
7afe21cc
RK
5261 expressions. Prune these latter expressions by deleting expressions
5262 already in the equivalence class.
5263
5264 Check for an equivalent identical to the destination. If found,
5265 this is the preferred equivalent since it will likely lead to
5266 elimination of the insn. Indicate this by placing it in
5267 `src_related'. */
5268
278a83b2
KH
5269 if (elt)
5270 elt = elt->first_same_value;
7afe21cc 5271 for (p = elt; p; p = p->next_same_value)
278a83b2 5272 {
7afe21cc
RK
5273 enum rtx_code code = GET_CODE (p->exp);
5274
5275 /* If the expression is not valid, ignore it. Then we do not
5276 have to check for validity below. In most cases, we can use
5277 `rtx_equal_p', since canonicalization has already been done. */
0516f6fe 5278 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
7afe21cc
RK
5279 continue;
5280
5a03c8c4
RK
5281 /* Also skip paradoxical subregs, unless that's what we're
5282 looking for. */
5283 if (code == SUBREG
5284 && (GET_MODE_SIZE (GET_MODE (p->exp))
5285 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5286 && ! (src != 0
5287 && GET_CODE (src) == SUBREG
5288 && GET_MODE (src) == GET_MODE (p->exp)
5289 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5290 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5291 continue;
5292
278a83b2 5293 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7afe21cc 5294 src = 0;
278a83b2 5295 else if (src_folded && GET_CODE (src_folded) == code
7afe21cc
RK
5296 && rtx_equal_p (src_folded, p->exp))
5297 src_folded = 0;
278a83b2 5298 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7afe21cc
RK
5299 && rtx_equal_p (src_eqv_here, p->exp))
5300 src_eqv_here = 0;
278a83b2 5301 else if (src_related && GET_CODE (src_related) == code
7afe21cc
RK
5302 && rtx_equal_p (src_related, p->exp))
5303 src_related = 0;
5304
5305 /* This is the same as the destination of the insns, we want
5306 to prefer it. Copy it to src_related. The code below will
5307 then give it a negative cost. */
5308 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5309 src_related = dest;
278a83b2 5310 }
7afe21cc
RK
5311
5312 /* Find the cheapest valid equivalent, trying all the available
5313 possibilities. Prefer items not in the hash table to ones
5314 that are when they are equal cost. Note that we can never
5315 worsen an insn as the current contents will also succeed.
05c33dd8 5316 If we find an equivalent identical to the destination, use it as best,
0f41302f 5317 since this insn will probably be eliminated in that case. */
7afe21cc
RK
5318 if (src)
5319 {
5320 if (rtx_equal_p (src, dest))
f1c1dfc3 5321 src_cost = src_regcost = -1;
7afe21cc 5322 else
630c79be
BS
5323 {
5324 src_cost = COST (src);
5325 src_regcost = approx_reg_cost (src);
5326 }
7afe21cc
RK
5327 }
5328
5329 if (src_eqv_here)
5330 {
5331 if (rtx_equal_p (src_eqv_here, dest))
f1c1dfc3 5332 src_eqv_cost = src_eqv_regcost = -1;
7afe21cc 5333 else
630c79be
BS
5334 {
5335 src_eqv_cost = COST (src_eqv_here);
5336 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5337 }
7afe21cc
RK
5338 }
5339
5340 if (src_folded)
5341 {
5342 if (rtx_equal_p (src_folded, dest))
f1c1dfc3 5343 src_folded_cost = src_folded_regcost = -1;
7afe21cc 5344 else
630c79be
BS
5345 {
5346 src_folded_cost = COST (src_folded);
5347 src_folded_regcost = approx_reg_cost (src_folded);
5348 }
7afe21cc
RK
5349 }
5350
5351 if (src_related)
5352 {
5353 if (rtx_equal_p (src_related, dest))
f1c1dfc3 5354 src_related_cost = src_related_regcost = -1;
7afe21cc 5355 else
630c79be
BS
5356 {
5357 src_related_cost = COST (src_related);
5358 src_related_regcost = approx_reg_cost (src_related);
5359 }
7afe21cc
RK
5360 }
5361
5362 /* If this was an indirect jump insn, a known label will really be
5363 cheaper even though it looks more expensive. */
5364 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
99a9c946 5365 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
278a83b2 5366
7afe21cc
RK
5367 /* Terminate loop when replacement made. This must terminate since
5368 the current contents will be tested and will always be valid. */
5369 while (1)
278a83b2
KH
5370 {
5371 rtx trial;
7afe21cc 5372
278a83b2 5373 /* Skip invalid entries. */
f8cfc6aa 5374 while (elt && !REG_P (elt->exp)
0516f6fe 5375 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
278a83b2 5376 elt = elt->next_same_value;
5a03c8c4
RK
5377
5378 /* A paradoxical subreg would be bad here: it'll be the right
5379 size, but later may be adjusted so that the upper bits aren't
5380 what we want. So reject it. */
5381 if (elt != 0
5382 && GET_CODE (elt->exp) == SUBREG
5383 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5384 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5385 /* It is okay, though, if the rtx we're trying to match
5386 will ignore any of the bits we can't predict. */
5387 && ! (src != 0
5388 && GET_CODE (src) == SUBREG
5389 && GET_MODE (src) == GET_MODE (elt->exp)
5390 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5391 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5392 {
5393 elt = elt->next_same_value;
5394 continue;
5395 }
278a83b2 5396
68252e27 5397 if (elt)
630c79be
BS
5398 {
5399 src_elt_cost = elt->cost;
5400 src_elt_regcost = elt->regcost;
5401 }
7afe21cc 5402
68252e27 5403 /* Find cheapest and skip it for the next time. For items
7afe21cc
RK
5404 of equal cost, use this order:
5405 src_folded, src, src_eqv, src_related and hash table entry. */
99a9c946 5406 if (src_folded
56ae04af
KH
5407 && preferable (src_folded_cost, src_folded_regcost,
5408 src_cost, src_regcost) <= 0
5409 && preferable (src_folded_cost, src_folded_regcost,
5410 src_eqv_cost, src_eqv_regcost) <= 0
5411 && preferable (src_folded_cost, src_folded_regcost,
5412 src_related_cost, src_related_regcost) <= 0
5413 && preferable (src_folded_cost, src_folded_regcost,
5414 src_elt_cost, src_elt_regcost) <= 0)
7afe21cc 5415 {
f1c1dfc3 5416 trial = src_folded, src_folded_cost = MAX_COST;
7afe21cc 5417 if (src_folded_force_flag)
9d8de1de
EB
5418 {
5419 rtx forced = force_const_mem (mode, trial);
5420 if (forced)
5421 trial = forced;
5422 }
7afe21cc 5423 }
99a9c946 5424 else if (src
56ae04af
KH
5425 && preferable (src_cost, src_regcost,
5426 src_eqv_cost, src_eqv_regcost) <= 0
5427 && preferable (src_cost, src_regcost,
5428 src_related_cost, src_related_regcost) <= 0
5429 && preferable (src_cost, src_regcost,
5430 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5431 trial = src, src_cost = MAX_COST;
99a9c946 5432 else if (src_eqv_here
56ae04af
KH
5433 && preferable (src_eqv_cost, src_eqv_regcost,
5434 src_related_cost, src_related_regcost) <= 0
5435 && preferable (src_eqv_cost, src_eqv_regcost,
5436 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5437 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
99a9c946 5438 else if (src_related
56ae04af
KH
5439 && preferable (src_related_cost, src_related_regcost,
5440 src_elt_cost, src_elt_regcost) <= 0)
68252e27 5441 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
278a83b2 5442 else
7afe21cc 5443 {
05c33dd8 5444 trial = copy_rtx (elt->exp);
7afe21cc 5445 elt = elt->next_same_value;
f1c1dfc3 5446 src_elt_cost = MAX_COST;
7afe21cc
RK
5447 }
5448
5449 /* We don't normally have an insn matching (set (pc) (pc)), so
5450 check for this separately here. We will delete such an
5451 insn below.
5452
d466c016
JL
5453 For other cases such as a table jump or conditional jump
5454 where we know the ultimate target, go ahead and replace the
5455 operand. While that may not make a valid insn, we will
5456 reemit the jump below (and also insert any necessary
5457 barriers). */
7afe21cc
RK
5458 if (n_sets == 1 && dest == pc_rtx
5459 && (trial == pc_rtx
5460 || (GET_CODE (trial) == LABEL_REF
5461 && ! condjump_p (insn))))
5462 {
2f39b6ca
UW
5463 /* Don't substitute non-local labels, this confuses CFG. */
5464 if (GET_CODE (trial) == LABEL_REF
5465 && LABEL_REF_NONLOCAL_P (trial))
5466 continue;
5467
d466c016 5468 SET_SRC (sets[i].rtl) = trial;
602c4c0d 5469 cse_jumps_altered = 1;
7afe21cc
RK
5470 break;
5471 }
278a83b2 5472
7afe21cc 5473 /* Look for a substitution that makes a valid insn. */
ddc356e8 5474 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 5475 {
dbaff908
RS
5476 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5477
7bd8b2a8
JL
5478 /* If we just made a substitution inside a libcall, then we
5479 need to make the same substitution in any notes attached
5480 to the RETVAL insn. */
1ed0205e 5481 if (libcall_insn
f8cfc6aa 5482 && (REG_P (sets[i].orig_src)
47841d1b 5483 || GET_CODE (sets[i].orig_src) == SUBREG
3c0cb5de 5484 || MEM_P (sets[i].orig_src)))
d8b7ec41
RS
5485 {
5486 rtx note = find_reg_equal_equiv_note (libcall_insn);
5487 if (note != 0)
5488 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5489 sets[i].orig_src,
5490 copy_rtx (new));
5491 }
7bd8b2a8 5492
7722328e
RK
5493 /* The result of apply_change_group can be ignored; see
5494 canon_reg. */
5495
dbaff908 5496 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6702af89 5497 apply_change_group ();
05c33dd8
RK
5498 break;
5499 }
7afe21cc 5500
278a83b2 5501 /* If we previously found constant pool entries for
7afe21cc
RK
5502 constants and this is a constant, try making a
5503 pool entry. Put it in src_folded unless we already have done
5504 this since that is where it likely came from. */
5505
5506 else if (constant_pool_entries_cost
5507 && CONSTANT_P (trial)
d51ff7cb
JW
5508 /* Reject cases that will abort in decode_rtx_const.
5509 On the alpha when simplifying a switch, we get
5510 (const (truncate (minus (label_ref) (label_ref)))). */
1bbd065b
RK
5511 && ! (GET_CODE (trial) == CONST
5512 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
d51ff7cb
JW
5513 /* Likewise on IA-64, except without the truncate. */
5514 && ! (GET_CODE (trial) == CONST
5515 && GET_CODE (XEXP (trial, 0)) == MINUS
5516 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5517 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
1bbd065b 5518 && (src_folded == 0
3c0cb5de 5519 || (!MEM_P (src_folded)
1bbd065b 5520 && ! src_folded_force_flag))
9ae8ffe7
JL
5521 && GET_MODE_CLASS (mode) != MODE_CC
5522 && mode != VOIDmode)
7afe21cc
RK
5523 {
5524 src_folded_force_flag = 1;
5525 src_folded = trial;
5526 src_folded_cost = constant_pool_entries_cost;
dd0ba281 5527 src_folded_regcost = constant_pool_entries_regcost;
7afe21cc 5528 }
278a83b2 5529 }
7afe21cc
RK
5530
5531 src = SET_SRC (sets[i].rtl);
5532
5533 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5534 However, there is an important exception: If both are registers
5535 that are not the head of their equivalence class, replace SET_SRC
5536 with the head of the class. If we do not do this, we will have
5537 both registers live over a portion of the basic block. This way,
5538 their lifetimes will likely abut instead of overlapping. */
f8cfc6aa 5539 if (REG_P (dest)
1bb98cec 5540 && REGNO_QTY_VALID_P (REGNO (dest)))
7afe21cc 5541 {
1bb98cec
DM
5542 int dest_q = REG_QTY (REGNO (dest));
5543 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5544
5545 if (dest_ent->mode == GET_MODE (dest)
5546 && dest_ent->first_reg != REGNO (dest)
f8cfc6aa 5547 && REG_P (src) && REGNO (src) == REGNO (dest)
1bb98cec
DM
5548 /* Don't do this if the original insn had a hard reg as
5549 SET_SRC or SET_DEST. */
f8cfc6aa 5550 && (!REG_P (sets[i].src)
1bb98cec 5551 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
f8cfc6aa 5552 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
1bb98cec
DM
5553 /* We can't call canon_reg here because it won't do anything if
5554 SRC is a hard register. */
759bd8b7 5555 {
1bb98cec
DM
5556 int src_q = REG_QTY (REGNO (src));
5557 struct qty_table_elem *src_ent = &qty_table[src_q];
5558 int first = src_ent->first_reg;
5559 rtx new_src
5560 = (first >= FIRST_PSEUDO_REGISTER
5561 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5562
5563 /* We must use validate-change even for this, because this
5564 might be a special no-op instruction, suitable only to
5565 tag notes onto. */
5566 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5567 {
5568 src = new_src;
5569 /* If we had a constant that is cheaper than what we are now
5570 setting SRC to, use that constant. We ignored it when we
5571 thought we could make this into a no-op. */
5572 if (src_const && COST (src_const) < COST (src)
278a83b2
KH
5573 && validate_change (insn, &SET_SRC (sets[i].rtl),
5574 src_const, 0))
1bb98cec
DM
5575 src = src_const;
5576 }
759bd8b7 5577 }
7afe21cc
RK
5578 }
5579
5580 /* If we made a change, recompute SRC values. */
5581 if (src != sets[i].src)
278a83b2 5582 {
4eadede7 5583 cse_altered = 1;
278a83b2
KH
5584 do_not_record = 0;
5585 hash_arg_in_memory = 0;
7afe21cc 5586 sets[i].src = src;
278a83b2
KH
5587 sets[i].src_hash = HASH (src, mode);
5588 sets[i].src_volatile = do_not_record;
5589 sets[i].src_in_memory = hash_arg_in_memory;
5590 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5591 }
7afe21cc
RK
5592
5593 /* If this is a single SET, we are setting a register, and we have an
5594 equivalent constant, we want to add a REG_NOTE. We don't want
5595 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 5596 that pseudo hasn't been eliminated is a pain. Such a note also
278a83b2 5597 won't help anything.
ac7ef8d5
FS
5598
5599 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5600 which can be created for a reference to a compile time computable
5601 entry in a jump table. */
5602
f8cfc6aa
JQ
5603 if (n_sets == 1 && src_const && REG_P (dest)
5604 && !REG_P (src_const)
ac7ef8d5
FS
5605 && ! (GET_CODE (src_const) == CONST
5606 && GET_CODE (XEXP (src_const, 0)) == MINUS
5607 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5608 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 5609 {
a77b7e32
RS
5610 /* We only want a REG_EQUAL note if src_const != src. */
5611 if (! rtx_equal_p (src, src_const))
5612 {
5613 /* Make sure that the rtx is not shared. */
5614 src_const = copy_rtx (src_const);
51e2a951 5615
a77b7e32
RS
5616 /* Record the actual constant value in a REG_EQUAL note,
5617 making a new one if one does not already exist. */
5618 set_unique_reg_note (insn, REG_EQUAL, src_const);
5619 }
7afe21cc
RK
5620 }
5621
5622 /* Now deal with the destination. */
5623 do_not_record = 0;
7afe21cc 5624
46d096a3
SB
5625 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5626 while (GET_CODE (dest) == SUBREG
7afe21cc 5627 || GET_CODE (dest) == ZERO_EXTRACT
7afe21cc 5628 || GET_CODE (dest) == STRICT_LOW_PART)
0339ce7e 5629 dest = XEXP (dest, 0);
7afe21cc
RK
5630
5631 sets[i].inner_dest = dest;
5632
3c0cb5de 5633 if (MEM_P (dest))
7afe21cc 5634 {
9ae8ffe7
JL
5635#ifdef PUSH_ROUNDING
5636 /* Stack pushes invalidate the stack pointer. */
5637 rtx addr = XEXP (dest, 0);
ec8e098d 5638 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
9ae8ffe7
JL
5639 && XEXP (addr, 0) == stack_pointer_rtx)
5640 invalidate (stack_pointer_rtx, Pmode);
5641#endif
7afe21cc 5642 dest = fold_rtx (dest, insn);
7afe21cc
RK
5643 }
5644
5645 /* Compute the hash code of the destination now,
5646 before the effects of this instruction are recorded,
5647 since the register values used in the address computation
5648 are those before this instruction. */
2197a88a 5649 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
5650
5651 /* Don't enter a bit-field in the hash table
5652 because the value in it after the store
5653 may not equal what was stored, due to truncation. */
5654
46d096a3 5655 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
5656 {
5657 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5658
5659 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5660 && GET_CODE (width) == CONST_INT
906c4e36
RK
5661 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5662 && ! (INTVAL (src_const)
5663 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
5664 /* Exception: if the value is constant,
5665 and it won't be truncated, record it. */
5666 ;
5667 else
5668 {
5669 /* This is chosen so that the destination will be invalidated
5670 but no new value will be recorded.
5671 We must invalidate because sometimes constant
5672 values can be recorded for bitfields. */
5673 sets[i].src_elt = 0;
5674 sets[i].src_volatile = 1;
5675 src_eqv = 0;
5676 src_eqv_elt = 0;
5677 }
5678 }
5679
5680 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5681 the insn. */
5682 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5683 {
ef178af3 5684 /* One less use of the label this insn used to jump to. */
49ce134f 5685 delete_insn (insn);
7afe21cc 5686 cse_jumps_altered = 1;
7afe21cc
RK
5687 /* No more processing for this set. */
5688 sets[i].rtl = 0;
5689 }
5690
5691 /* If this SET is now setting PC to a label, we know it used to
d466c016 5692 be a conditional or computed branch. */
8f235343
JH
5693 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5694 && !LABEL_REF_NONLOCAL_P (src))
7afe21cc 5695 {
8fb1e50e
GS
5696 /* Now emit a BARRIER after the unconditional jump. */
5697 if (NEXT_INSN (insn) == 0
4b4bf941 5698 || !BARRIER_P (NEXT_INSN (insn)))
8fb1e50e
GS
5699 emit_barrier_after (insn);
5700
d466c016
JL
5701 /* We reemit the jump in as many cases as possible just in
5702 case the form of an unconditional jump is significantly
5703 different than a computed jump or conditional jump.
5704
5705 If this insn has multiple sets, then reemitting the
5706 jump is nontrivial. So instead we just force rerecognition
5707 and hope for the best. */
5708 if (n_sets == 1)
7afe21cc 5709 {
9dcb4381 5710 rtx new, note;
8fb1e50e 5711
9dcb4381 5712 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
7afe21cc
RK
5713 JUMP_LABEL (new) = XEXP (src, 0);
5714 LABEL_NUSES (XEXP (src, 0))++;
9dcb4381
RH
5715
5716 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5717 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5718 if (note)
5719 {
5720 XEXP (note, 1) = NULL_RTX;
5721 REG_NOTES (new) = note;
5722 }
5723
38c1593d 5724 delete_insn (insn);
7afe21cc 5725 insn = new;
8fb1e50e
GS
5726
5727 /* Now emit a BARRIER after the unconditional jump. */
5728 if (NEXT_INSN (insn) == 0
4b4bf941 5729 || !BARRIER_P (NEXT_INSN (insn)))
8fb1e50e 5730 emit_barrier_after (insn);
7afe21cc 5731 }
31dcf83f 5732 else
31dcf83f 5733 INSN_CODE (insn) = -1;
7afe21cc 5734
8fb1e50e
GS
5735 /* Do not bother deleting any unreachable code,
5736 let jump/flow do that. */
7afe21cc
RK
5737
5738 cse_jumps_altered = 1;
5739 sets[i].rtl = 0;
5740 }
5741
c2a47e48
RK
5742 /* If destination is volatile, invalidate it and then do no further
5743 processing for this assignment. */
7afe21cc
RK
5744
5745 else if (do_not_record)
c2a47e48 5746 {
f8cfc6aa 5747 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5748 invalidate (dest, VOIDmode);
3c0cb5de 5749 else if (MEM_P (dest))
32fab725 5750 invalidate (dest, VOIDmode);
2708da92
RS
5751 else if (GET_CODE (dest) == STRICT_LOW_PART
5752 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5753 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
5754 sets[i].rtl = 0;
5755 }
7afe21cc
RK
5756
5757 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 5758 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
5759
5760#ifdef HAVE_cc0
5761 /* If setting CC0, record what it was set to, or a constant, if it
5762 is equivalent to a constant. If it is being set to a floating-point
5763 value, make a COMPARE with the appropriate constant of 0. If we
5764 don't do this, later code can interpret this as a test against
5765 const0_rtx, which can cause problems if we try to put it into an
5766 insn as a floating-point operand. */
5767 if (dest == cc0_rtx)
5768 {
5769 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5770 this_insn_cc0_mode = mode;
cbf6a543 5771 if (FLOAT_MODE_P (mode))
38a448ca
RH
5772 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5773 CONST0_RTX (mode));
7afe21cc
RK
5774 }
5775#endif
5776 }
5777
5778 /* Now enter all non-volatile source expressions in the hash table
5779 if they are not already present.
5780 Record their equivalence classes in src_elt.
5781 This way we can insert the corresponding destinations into
5782 the same classes even if the actual sources are no longer in them
5783 (having been invalidated). */
5784
5785 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5786 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5787 {
b3694847
SS
5788 struct table_elt *elt;
5789 struct table_elt *classp = sets[0].src_elt;
7afe21cc
RK
5790 rtx dest = SET_DEST (sets[0].rtl);
5791 enum machine_mode eqvmode = GET_MODE (dest);
5792
5793 if (GET_CODE (dest) == STRICT_LOW_PART)
5794 {
5795 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5796 classp = 0;
5797 }
5798 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
5799 {
5800 rehash_using_reg (src_eqv);
5801 src_eqv_hash = HASH (src_eqv, eqvmode);
5802 }
2197a88a 5803 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc 5804 elt->in_memory = src_eqv_in_memory;
7afe21cc 5805 src_eqv_elt = elt;
f7911249
JW
5806
5807 /* Check to see if src_eqv_elt is the same as a set source which
5808 does not yet have an elt, and if so set the elt of the set source
5809 to src_eqv_elt. */
5810 for (i = 0; i < n_sets; i++)
26132f71
JW
5811 if (sets[i].rtl && sets[i].src_elt == 0
5812 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
f7911249 5813 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
5814 }
5815
5816 for (i = 0; i < n_sets; i++)
5817 if (sets[i].rtl && ! sets[i].src_volatile
5818 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5819 {
5820 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5821 {
5822 /* REG_EQUAL in setting a STRICT_LOW_PART
5823 gives an equivalent for the entire destination register,
5824 not just for the subreg being stored in now.
5825 This is a more interesting equivalence, so we arrange later
5826 to treat the entire reg as the destination. */
5827 sets[i].src_elt = src_eqv_elt;
2197a88a 5828 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
5829 }
5830 else
5831 {
5832 /* Insert source and constant equivalent into hash table, if not
5833 already present. */
b3694847
SS
5834 struct table_elt *classp = src_eqv_elt;
5835 rtx src = sets[i].src;
5836 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5837 enum machine_mode mode
5838 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5839
1fcc57f1
AM
5840 /* It's possible that we have a source value known to be
5841 constant but don't have a REG_EQUAL note on the insn.
5842 Lack of a note will mean src_eqv_elt will be NULL. This
5843 can happen where we've generated a SUBREG to access a
5844 CONST_INT that is already in a register in a wider mode.
5845 Ensure that the source expression is put in the proper
5846 constant class. */
5847 if (!classp)
5848 classp = sets[i].src_const_elt;
5849
26132f71 5850 if (sets[i].src_elt == 0)
7afe21cc 5851 {
26132f71
JW
5852 /* Don't put a hard register source into the table if this is
5853 the last insn of a libcall. In this case, we only need
5854 to put src_eqv_elt in src_elt. */
db4a8254 5855 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
8ae2b8f6 5856 {
b3694847 5857 struct table_elt *elt;
26132f71
JW
5858
5859 /* Note that these insert_regs calls cannot remove
5860 any of the src_elt's, because they would have failed to
5861 match if not still valid. */
5862 if (insert_regs (src, classp, 0))
5863 {
5864 rehash_using_reg (src);
5865 sets[i].src_hash = HASH (src, mode);
5866 }
5867 elt = insert (src, classp, sets[i].src_hash, mode);
5868 elt->in_memory = sets[i].src_in_memory;
26132f71 5869 sets[i].src_elt = classp = elt;
8ae2b8f6 5870 }
26132f71
JW
5871 else
5872 sets[i].src_elt = classp;
7afe21cc 5873 }
7afe21cc
RK
5874 if (sets[i].src_const && sets[i].src_const_elt == 0
5875 && src != sets[i].src_const
5876 && ! rtx_equal_p (sets[i].src_const, src))
5877 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 5878 sets[i].src_const_hash, mode);
7afe21cc
RK
5879 }
5880 }
5881 else if (sets[i].src_elt == 0)
5882 /* If we did not insert the source into the hash table (e.g., it was
5883 volatile), note the equivalence class for the REG_EQUAL value, if any,
5884 so that the destination goes into that class. */
5885 sets[i].src_elt = src_eqv_elt;
5886
9ae8ffe7 5887 invalidate_from_clobbers (x);
77fa0940 5888
278a83b2 5889 /* Some registers are invalidated by subroutine calls. Memory is
77fa0940
RK
5890 invalidated by non-constant calls. */
5891
4b4bf941 5892 if (CALL_P (insn))
7afe21cc 5893 {
24a28584 5894 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 5895 invalidate_memory ();
7afe21cc
RK
5896 invalidate_for_call ();
5897 }
5898
5899 /* Now invalidate everything set by this instruction.
5900 If a SUBREG or other funny destination is being set,
5901 sets[i].rtl is still nonzero, so here we invalidate the reg
5902 a part of which is being set. */
5903
5904 for (i = 0; i < n_sets; i++)
5905 if (sets[i].rtl)
5906 {
bb4034b3
JW
5907 /* We can't use the inner dest, because the mode associated with
5908 a ZERO_EXTRACT is significant. */
b3694847 5909 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5910
5911 /* Needed for registers to remove the register from its
5912 previous quantity's chain.
5913 Needed for memory if this is a nonvarying address, unless
5914 we have just done an invalidate_memory that covers even those. */
f8cfc6aa 5915 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5916 invalidate (dest, VOIDmode);
3c0cb5de 5917 else if (MEM_P (dest))
32fab725 5918 invalidate (dest, VOIDmode);
2708da92
RS
5919 else if (GET_CODE (dest) == STRICT_LOW_PART
5920 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5921 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
5922 }
5923
01e752d3 5924 /* A volatile ASM invalidates everything. */
4b4bf941 5925 if (NONJUMP_INSN_P (insn)
01e752d3
JL
5926 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5927 && MEM_VOLATILE_P (PATTERN (insn)))
5928 flush_hash_table ();
5929
7afe21cc
RK
5930 /* Make sure registers mentioned in destinations
5931 are safe for use in an expression to be inserted.
5932 This removes from the hash table
5933 any invalid entry that refers to one of these registers.
5934
5935 We don't care about the return value from mention_regs because
5936 we are going to hash the SET_DEST values unconditionally. */
5937
5938 for (i = 0; i < n_sets; i++)
34c73909
R
5939 {
5940 if (sets[i].rtl)
5941 {
5942 rtx x = SET_DEST (sets[i].rtl);
5943
f8cfc6aa 5944 if (!REG_P (x))
34c73909
R
5945 mention_regs (x);
5946 else
5947 {
5948 /* We used to rely on all references to a register becoming
5949 inaccessible when a register changes to a new quantity,
5950 since that changes the hash code. However, that is not
9b1549b8 5951 safe, since after HASH_SIZE new quantities we get a
34c73909
R
5952 hash 'collision' of a register with its own invalid
5953 entries. And since SUBREGs have been changed not to
5954 change their hash code with the hash code of the register,
5955 it wouldn't work any longer at all. So we have to check
5956 for any invalid references lying around now.
5957 This code is similar to the REG case in mention_regs,
5958 but it knows that reg_tick has been incremented, and
5959 it leaves reg_in_table as -1 . */
770ae6cc
RK
5960 unsigned int regno = REGNO (x);
5961 unsigned int endregno
34c73909 5962 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 5963 : hard_regno_nregs[regno][GET_MODE (x)]);
770ae6cc 5964 unsigned int i;
34c73909
R
5965
5966 for (i = regno; i < endregno; i++)
5967 {
30f72379 5968 if (REG_IN_TABLE (i) >= 0)
34c73909
R
5969 {
5970 remove_invalid_refs (i);
30f72379 5971 REG_IN_TABLE (i) = -1;
34c73909
R
5972 }
5973 }
5974 }
5975 }
5976 }
7afe21cc
RK
5977
5978 /* We may have just removed some of the src_elt's from the hash table.
5979 So replace each one with the current head of the same class. */
5980
5981 for (i = 0; i < n_sets; i++)
5982 if (sets[i].rtl)
5983 {
5984 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5985 /* If elt was removed, find current head of same class,
5986 or 0 if nothing remains of that class. */
5987 {
b3694847 5988 struct table_elt *elt = sets[i].src_elt;
7afe21cc
RK
5989
5990 while (elt && elt->prev_same_value)
5991 elt = elt->prev_same_value;
5992
5993 while (elt && elt->first_same_value == 0)
5994 elt = elt->next_same_value;
5995 sets[i].src_elt = elt ? elt->first_same_value : 0;
5996 }
5997 }
5998
5999 /* Now insert the destinations into their equivalence classes. */
6000
6001 for (i = 0; i < n_sets; i++)
6002 if (sets[i].rtl)
6003 {
b3694847 6004 rtx dest = SET_DEST (sets[i].rtl);
b3694847 6005 struct table_elt *elt;
7afe21cc
RK
6006
6007 /* Don't record value if we are not supposed to risk allocating
6008 floating-point values in registers that might be wider than
6009 memory. */
6010 if ((flag_float_store
3c0cb5de 6011 && MEM_P (dest)
cbf6a543 6012 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
6013 /* Don't record BLKmode values, because we don't know the
6014 size of it, and can't be sure that other BLKmode values
6015 have the same or smaller size. */
6016 || GET_MODE (dest) == BLKmode
7afe21cc
RK
6017 /* Don't record values of destinations set inside a libcall block
6018 since we might delete the libcall. Things should have been set
6019 up so we won't want to reuse such a value, but we play it safe
6020 here. */
7bd8b2a8 6021 || libcall_insn
7afe21cc
RK
6022 /* If we didn't put a REG_EQUAL value or a source into the hash
6023 table, there is no point is recording DEST. */
1a8e9a8e
RK
6024 || sets[i].src_elt == 0
6025 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6026 or SIGN_EXTEND, don't record DEST since it can cause
6027 some tracking to be wrong.
6028
6029 ??? Think about this more later. */
6030 || (GET_CODE (dest) == SUBREG
6031 && (GET_MODE_SIZE (GET_MODE (dest))
6032 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6033 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6034 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
6035 continue;
6036
6037 /* STRICT_LOW_PART isn't part of the value BEING set,
6038 and neither is the SUBREG inside it.
6039 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6040 if (GET_CODE (dest) == STRICT_LOW_PART)
6041 dest = SUBREG_REG (XEXP (dest, 0));
6042
f8cfc6aa 6043 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
7afe21cc
RK
6044 /* Registers must also be inserted into chains for quantities. */
6045 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
6046 {
6047 /* If `insert_regs' changes something, the hash code must be
6048 recalculated. */
6049 rehash_using_reg (dest);
6050 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6051 }
7afe21cc 6052
8fff4fc1
RH
6053 elt = insert (dest, sets[i].src_elt,
6054 sets[i].dest_hash, GET_MODE (dest));
9de2c71a 6055
3c0cb5de 6056 elt->in_memory = (MEM_P (sets[i].inner_dest)
389fdba0 6057 && !MEM_READONLY_P (sets[i].inner_dest));
c256df0b 6058
fc3ffe83
RK
6059 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6060 narrower than M2, and both M1 and M2 are the same number of words,
6061 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6062 make that equivalence as well.
7afe21cc 6063
4de249d9
PB
6064 However, BAR may have equivalences for which gen_lowpart
6065 will produce a simpler value than gen_lowpart applied to
7afe21cc 6066 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
278a83b2 6067 BAR's equivalences. If we don't get a simplified form, make
7afe21cc
RK
6068 the SUBREG. It will not be used in an equivalence, but will
6069 cause two similar assignments to be detected.
6070
6071 Note the loop below will find SUBREG_REG (DEST) since we have
6072 already entered SRC and DEST of the SET in the table. */
6073
6074 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
6075 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6076 / UNITS_PER_WORD)
278a83b2 6077 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
7afe21cc
RK
6078 && (GET_MODE_SIZE (GET_MODE (dest))
6079 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6080 && sets[i].src_elt != 0)
6081 {
6082 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6083 struct table_elt *elt, *classp = 0;
6084
6085 for (elt = sets[i].src_elt->first_same_value; elt;
6086 elt = elt->next_same_value)
6087 {
6088 rtx new_src = 0;
2197a88a 6089 unsigned src_hash;
7afe21cc 6090 struct table_elt *src_elt;
ff27a429 6091 int byte = 0;
7afe21cc
RK
6092
6093 /* Ignore invalid entries. */
f8cfc6aa 6094 if (!REG_P (elt->exp)
0516f6fe 6095 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
7afe21cc
RK
6096 continue;
6097
9beb7d20
RH
6098 /* We may have already been playing subreg games. If the
6099 mode is already correct for the destination, use it. */
6100 if (GET_MODE (elt->exp) == new_mode)
6101 new_src = elt->exp;
6102 else
6103 {
6104 /* Calculate big endian correction for the SUBREG_BYTE.
6105 We have already checked that M1 (GET_MODE (dest))
6106 is not narrower than M2 (new_mode). */
6107 if (BYTES_BIG_ENDIAN)
6108 byte = (GET_MODE_SIZE (GET_MODE (dest))
6109 - GET_MODE_SIZE (new_mode));
6110
6111 new_src = simplify_gen_subreg (new_mode, elt->exp,
6112 GET_MODE (dest), byte);
6113 }
6114
ff27a429
R
6115 /* The call to simplify_gen_subreg fails if the value
6116 is VOIDmode, yet we can't do any simplification, e.g.
6117 for EXPR_LISTs denoting function call results.
6118 It is invalid to construct a SUBREG with a VOIDmode
6119 SUBREG_REG, hence a zero new_src means we can't do
6120 this substitution. */
6121 if (! new_src)
6122 continue;
7afe21cc
RK
6123
6124 src_hash = HASH (new_src, new_mode);
6125 src_elt = lookup (new_src, src_hash, new_mode);
6126
6127 /* Put the new source in the hash table is if isn't
6128 already. */
6129 if (src_elt == 0)
6130 {
6131 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
6132 {
6133 rehash_using_reg (new_src);
6134 src_hash = HASH (new_src, new_mode);
6135 }
7afe21cc
RK
6136 src_elt = insert (new_src, classp, src_hash, new_mode);
6137 src_elt->in_memory = elt->in_memory;
7afe21cc
RK
6138 }
6139 else if (classp && classp != src_elt->first_same_value)
278a83b2 6140 /* Show that two things that we've seen before are
7afe21cc
RK
6141 actually the same. */
6142 merge_equiv_classes (src_elt, classp);
6143
6144 classp = src_elt->first_same_value;
da932f04
JL
6145 /* Ignore invalid entries. */
6146 while (classp
f8cfc6aa 6147 && !REG_P (classp->exp)
0516f6fe 6148 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
da932f04 6149 classp = classp->next_same_value;
7afe21cc
RK
6150 }
6151 }
6152 }
6153
403e25d0
RK
6154 /* Special handling for (set REG0 REG1) where REG0 is the
6155 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6156 be used in the sequel, so (if easily done) change this insn to
6157 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6158 that computed their value. Then REG1 will become a dead store
6159 and won't cloud the situation for later optimizations.
7afe21cc
RK
6160
6161 Do not make this change if REG1 is a hard register, because it will
6162 then be used in the sequel and we may be changing a two-operand insn
6163 into a three-operand insn.
6164
50270076
R
6165 Also do not do this if we are operating on a copy of INSN.
6166
6167 Also don't do this if INSN ends a libcall; this would cause an unrelated
6168 register to be set in the middle of a libcall, and we then get bad code
6169 if the libcall is deleted. */
7afe21cc 6170
f8cfc6aa 6171 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
7afe21cc 6172 && NEXT_INSN (PREV_INSN (insn)) == insn
f8cfc6aa 6173 && REG_P (SET_SRC (sets[0].rtl))
7afe21cc 6174 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
1bb98cec 6175 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
7afe21cc 6176 {
1bb98cec
DM
6177 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6178 struct qty_table_elem *src_ent = &qty_table[src_q];
7afe21cc 6179
1bb98cec
DM
6180 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6181 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc 6182 {
3e25353e
AH
6183 rtx prev = insn;
6184 /* Scan for the previous nonnote insn, but stop at a basic
6185 block boundary. */
6186 do
6187 {
6188 prev = PREV_INSN (prev);
6189 }
4b4bf941 6190 while (prev && NOTE_P (prev)
3e25353e 6191 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
7080f735 6192
58ecb5e2
RS
6193 /* Do not swap the registers around if the previous instruction
6194 attaches a REG_EQUIV note to REG1.
6195
6196 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6197 from the pseudo that originally shadowed an incoming argument
6198 to another register. Some uses of REG_EQUIV might rely on it
6199 being attached to REG1 rather than REG2.
6200
6201 This section previously turned the REG_EQUIV into a REG_EQUAL
6202 note. We cannot do that because REG_EQUIV may provide an
4912a07c 6203 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
58ecb5e2 6204
4b4bf941 6205 if (prev != 0 && NONJUMP_INSN_P (prev)
403e25d0 6206 && GET_CODE (PATTERN (prev)) == SET
58ecb5e2
RS
6207 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6208 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
1bb98cec
DM
6209 {
6210 rtx dest = SET_DEST (sets[0].rtl);
403e25d0 6211 rtx src = SET_SRC (sets[0].rtl);
58ecb5e2 6212 rtx note;
7afe21cc 6213
278a83b2
KH
6214 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6215 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6216 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
1bb98cec 6217 apply_change_group ();
7afe21cc 6218
403e25d0
RK
6219 /* If INSN has a REG_EQUAL note, and this note mentions
6220 REG0, then we must delete it, because the value in
6221 REG0 has changed. If the note's value is REG1, we must
6222 also delete it because that is now this insn's dest. */
1bb98cec 6223 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
403e25d0
RK
6224 if (note != 0
6225 && (reg_mentioned_p (dest, XEXP (note, 0))
6226 || rtx_equal_p (src, XEXP (note, 0))))
1bb98cec
DM
6227 remove_note (insn, note);
6228 }
7afe21cc
RK
6229 }
6230 }
6231
6232 /* If this is a conditional jump insn, record any known equivalences due to
6233 the condition being tested. */
6234
4b4bf941 6235 if (JUMP_P (insn)
7afe21cc
RK
6236 && n_sets == 1 && GET_CODE (x) == SET
6237 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6238 record_jump_equiv (insn, 0);
6239
6240#ifdef HAVE_cc0
6241 /* If the previous insn set CC0 and this insn no longer references CC0,
6242 delete the previous insn. Here we use the fact that nothing expects CC0
6243 to be valid over an insn, which is true until the final pass. */
4b4bf941 6244 if (prev_insn && NONJUMP_INSN_P (prev_insn)
7afe21cc
RK
6245 && (tem = single_set (prev_insn)) != 0
6246 && SET_DEST (tem) == cc0_rtx
6247 && ! reg_mentioned_p (cc0_rtx, x))
6dee7384 6248 delete_insn (prev_insn);
7afe21cc
RK
6249
6250 prev_insn_cc0 = this_insn_cc0;
6251 prev_insn_cc0_mode = this_insn_cc0_mode;
7afe21cc 6252 prev_insn = insn;
4977bab6 6253#endif
7afe21cc
RK
6254}
6255\f
a4c6502a 6256/* Remove from the hash table all expressions that reference memory. */
14a774a9 6257
7afe21cc 6258static void
7080f735 6259invalidate_memory (void)
7afe21cc 6260{
b3694847
SS
6261 int i;
6262 struct table_elt *p, *next;
7afe21cc 6263
9b1549b8 6264 for (i = 0; i < HASH_SIZE; i++)
9ae8ffe7
JL
6265 for (p = table[i]; p; p = next)
6266 {
6267 next = p->next_same_hash;
6268 if (p->in_memory)
6269 remove_from_table (p, i);
6270 }
6271}
6272
14a774a9
RK
6273/* If ADDR is an address that implicitly affects the stack pointer, return
6274 1 and update the register tables to show the effect. Else, return 0. */
6275
9ae8ffe7 6276static int
7080f735 6277addr_affects_sp_p (rtx addr)
9ae8ffe7 6278{
ec8e098d 6279 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
f8cfc6aa 6280 && REG_P (XEXP (addr, 0))
9ae8ffe7 6281 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 6282 {
30f72379 6283 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
46081bb3
SH
6284 {
6285 REG_TICK (STACK_POINTER_REGNUM)++;
6286 /* Is it possible to use a subreg of SP? */
6287 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6288 }
9ae8ffe7
JL
6289
6290 /* This should be *very* rare. */
6291 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6292 invalidate (stack_pointer_rtx, VOIDmode);
14a774a9 6293
9ae8ffe7 6294 return 1;
7afe21cc 6295 }
14a774a9 6296
9ae8ffe7 6297 return 0;
7afe21cc
RK
6298}
6299
6300/* Perform invalidation on the basis of everything about an insn
6301 except for invalidating the actual places that are SET in it.
6302 This includes the places CLOBBERed, and anything that might
6303 alias with something that is SET or CLOBBERed.
6304
7afe21cc
RK
6305 X is the pattern of the insn. */
6306
6307static void
7080f735 6308invalidate_from_clobbers (rtx x)
7afe21cc 6309{
7afe21cc
RK
6310 if (GET_CODE (x) == CLOBBER)
6311 {
6312 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
6313 if (ref)
6314 {
f8cfc6aa 6315 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 6316 || MEM_P (ref))
9ae8ffe7
JL
6317 invalidate (ref, VOIDmode);
6318 else if (GET_CODE (ref) == STRICT_LOW_PART
6319 || GET_CODE (ref) == ZERO_EXTRACT)
6320 invalidate (XEXP (ref, 0), GET_MODE (ref));
6321 }
7afe21cc
RK
6322 }
6323 else if (GET_CODE (x) == PARALLEL)
6324 {
b3694847 6325 int i;
7afe21cc
RK
6326 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6327 {
b3694847 6328 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
6329 if (GET_CODE (y) == CLOBBER)
6330 {
6331 rtx ref = XEXP (y, 0);
f8cfc6aa 6332 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 6333 || MEM_P (ref))
9ae8ffe7
JL
6334 invalidate (ref, VOIDmode);
6335 else if (GET_CODE (ref) == STRICT_LOW_PART
6336 || GET_CODE (ref) == ZERO_EXTRACT)
6337 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
6338 }
6339 }
6340 }
6341}
6342\f
6343/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6344 and replace any registers in them with either an equivalent constant
6345 or the canonical form of the register. If we are inside an address,
6346 only do this if the address remains valid.
6347
6348 OBJECT is 0 except when within a MEM in which case it is the MEM.
6349
6350 Return the replacement for X. */
6351
6352static rtx
7080f735 6353cse_process_notes (rtx x, rtx object)
7afe21cc
RK
6354{
6355 enum rtx_code code = GET_CODE (x);
6f7d635c 6356 const char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
6357 int i;
6358
6359 switch (code)
6360 {
6361 case CONST_INT:
6362 case CONST:
6363 case SYMBOL_REF:
6364 case LABEL_REF:
6365 case CONST_DOUBLE:
69ef87e2 6366 case CONST_VECTOR:
7afe21cc
RK
6367 case PC:
6368 case CC0:
6369 case LO_SUM:
6370 return x;
6371
6372 case MEM:
c96208fa
DC
6373 validate_change (x, &XEXP (x, 0),
6374 cse_process_notes (XEXP (x, 0), x), 0);
7afe21cc
RK
6375 return x;
6376
6377 case EXPR_LIST:
6378 case INSN_LIST:
6379 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 6380 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 6381 if (XEXP (x, 1))
906c4e36 6382 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
6383 return x;
6384
e4890d45
RS
6385 case SIGN_EXTEND:
6386 case ZERO_EXTEND:
0b0ee36c 6387 case SUBREG:
e4890d45
RS
6388 {
6389 rtx new = cse_process_notes (XEXP (x, 0), object);
6390 /* We don't substitute VOIDmode constants into these rtx,
6391 since they would impede folding. */
6392 if (GET_MODE (new) != VOIDmode)
6393 validate_change (object, &XEXP (x, 0), new, 0);
6394 return x;
6395 }
6396
7afe21cc 6397 case REG:
30f72379 6398 i = REG_QTY (REGNO (x));
7afe21cc
RK
6399
6400 /* Return a constant or a constant register. */
1bb98cec 6401 if (REGNO_QTY_VALID_P (REGNO (x)))
7afe21cc 6402 {
1bb98cec
DM
6403 struct qty_table_elem *ent = &qty_table[i];
6404
6405 if (ent->const_rtx != NULL_RTX
6406 && (CONSTANT_P (ent->const_rtx)
f8cfc6aa 6407 || REG_P (ent->const_rtx)))
1bb98cec 6408 {
4de249d9 6409 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
1bb98cec
DM
6410 if (new)
6411 return new;
6412 }
7afe21cc
RK
6413 }
6414
6415 /* Otherwise, canonicalize this register. */
906c4e36 6416 return canon_reg (x, NULL_RTX);
278a83b2 6417
e9a25f70
JL
6418 default:
6419 break;
7afe21cc
RK
6420 }
6421
6422 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6423 if (fmt[i] == 'e')
6424 validate_change (object, &XEXP (x, i),
7fe34fdf 6425 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
6426
6427 return x;
6428}
6429\f
8b3686ed
RK
6430/* Process one SET of an insn that was skipped. We ignore CLOBBERs
6431 since they are done elsewhere. This function is called via note_stores. */
6432
6433static void
7080f735 6434invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
8b3686ed 6435{
9ae8ffe7
JL
6436 enum rtx_code code = GET_CODE (dest);
6437
6438 if (code == MEM
ddc356e8 6439 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
9ae8ffe7
JL
6440 /* There are times when an address can appear varying and be a PLUS
6441 during this scan when it would be a fixed address were we to know
6442 the proper equivalences. So invalidate all memory if there is
6443 a BLKmode or nonscalar memory reference or a reference to a
6444 variable address. */
6445 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
2be28ee2 6446 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
9ae8ffe7
JL
6447 {
6448 invalidate_memory ();
6449 return;
6450 }
ffcf6393 6451
f47c02fa 6452 if (GET_CODE (set) == CLOBBER
8beccec8 6453 || CC0_P (dest)
f47c02fa
RK
6454 || dest == pc_rtx)
6455 return;
6456
9ae8ffe7 6457 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 6458 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
6459 else if (code == REG || code == SUBREG || code == MEM)
6460 invalidate (dest, VOIDmode);
8b3686ed
RK
6461}
6462
6463/* Invalidate all insns from START up to the end of the function or the
6464 next label. This called when we wish to CSE around a block that is
6465 conditionally executed. */
6466
6467static void
7080f735 6468invalidate_skipped_block (rtx start)
8b3686ed
RK
6469{
6470 rtx insn;
8b3686ed 6471
4b4bf941 6472 for (insn = start; insn && !LABEL_P (insn);
8b3686ed
RK
6473 insn = NEXT_INSN (insn))
6474 {
2c3c49de 6475 if (! INSN_P (insn))
8b3686ed
RK
6476 continue;
6477
4b4bf941 6478 if (CALL_P (insn))
8b3686ed 6479 {
24a28584 6480 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 6481 invalidate_memory ();
8b3686ed 6482 invalidate_for_call ();
8b3686ed
RK
6483 }
6484
97577254 6485 invalidate_from_clobbers (PATTERN (insn));
84832317 6486 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
8b3686ed
RK
6487 }
6488}
6489\f
7afe21cc
RK
6490/* Find the end of INSN's basic block and return its range,
6491 the total number of SETs in all the insns of the block, the last insn of the
6492 block, and the branch path.
6493
da7d8304 6494 The branch path indicates which branches should be followed. If a nonzero
7afe21cc
RK
6495 path size is specified, the block should be rescanned and a different set
6496 of branches will be taken. The branch path is only used if
da7d8304 6497 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
7afe21cc
RK
6498
6499 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6500 used to describe the block. It is filled in with the information about
6501 the current block. The incoming structure's branch path, if any, is used
6502 to construct the output branch path. */
6503
86caf04d 6504static void
7080f735 6505cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
5affca01 6506 int follow_jumps, int skip_blocks)
7afe21cc
RK
6507{
6508 rtx p = insn, q;
6509 int nsets = 0;
6510 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
2c3c49de 6511 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
7afe21cc
RK
6512 int path_size = data->path_size;
6513 int path_entry = 0;
6514 int i;
6515
6516 /* Update the previous branch path, if any. If the last branch was
6de9cd9a
DN
6517 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6518 If it was previously PATH_NOT_TAKEN,
7afe21cc 6519 shorten the path by one and look at the previous branch. We know that
da7d8304 6520 at least one branch must have been taken if PATH_SIZE is nonzero. */
7afe21cc
RK
6521 while (path_size > 0)
6522 {
6de9cd9a 6523 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
7afe21cc 6524 {
6de9cd9a 6525 data->path[path_size - 1].status = PATH_NOT_TAKEN;
7afe21cc
RK
6526 break;
6527 }
6528 else
6529 path_size--;
6530 }
6531
16b702cd
MM
6532 /* If the first instruction is marked with QImode, that means we've
6533 already processed this block. Our caller will look at DATA->LAST
6534 to figure out where to go next. We want to return the next block
6535 in the instruction stream, not some branched-to block somewhere
6536 else. We accomplish this by pretending our called forbid us to
6537 follow jumps, or skip blocks. */
6538 if (GET_MODE (insn) == QImode)
6539 follow_jumps = skip_blocks = 0;
6540
7afe21cc 6541 /* Scan to end of this basic block. */
4b4bf941 6542 while (p && !LABEL_P (p))
7afe21cc 6543 {
8aeea6e6 6544 /* Don't cse over a call to setjmp; on some machines (eg VAX)
7afe21cc
RK
6545 the regs restored by the longjmp come from
6546 a later time than the setjmp. */
4b4bf941 6547 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
570a98eb 6548 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
7afe21cc
RK
6549 break;
6550
6551 /* A PARALLEL can have lots of SETs in it,
6552 especially if it is really an ASM_OPERANDS. */
2c3c49de 6553 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
7afe21cc 6554 nsets += XVECLEN (PATTERN (p), 0);
4b4bf941 6555 else if (!NOTE_P (p))
7afe21cc 6556 nsets += 1;
278a83b2 6557
164c8956
RK
6558 /* Ignore insns made by CSE; they cannot affect the boundaries of
6559 the basic block. */
6560
6561 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 6562 high_cuid = INSN_CUID (p);
164c8956
RK
6563 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6564 low_cuid = INSN_CUID (p);
7afe21cc
RK
6565
6566 /* See if this insn is in our branch path. If it is and we are to
6567 take it, do so. */
6568 if (path_entry < path_size && data->path[path_entry].branch == p)
6569 {
6de9cd9a 6570 if (data->path[path_entry].status != PATH_NOT_TAKEN)
7afe21cc 6571 p = JUMP_LABEL (p);
278a83b2 6572
7afe21cc
RK
6573 /* Point to next entry in path, if any. */
6574 path_entry++;
6575 }
6576
6577 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6578 was specified, we haven't reached our maximum path length, there are
6579 insns following the target of the jump, this is the only use of the
8b3686ed
RK
6580 jump label, and the target label is preceded by a BARRIER.
6581
6582 Alternatively, we can follow the jump if it branches around a
6583 block of code and there are no other branches into the block.
6584 In this case invalidate_skipped_block will be called to invalidate any
6585 registers set in the block when following the jump. */
6586
9bf8cfbf 6587 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
4b4bf941 6588 && JUMP_P (p)
278a83b2 6589 && GET_CODE (PATTERN (p)) == SET
7afe21cc 6590 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 6591 && JUMP_LABEL (p) != 0
7afe21cc
RK
6592 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6593 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6594 {
6595 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
4b4bf941 6596 if ((!NOTE_P (q)
278a83b2 6597 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
4b4bf941 6598 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
570a98eb 6599 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
4b4bf941 6600 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
7afe21cc
RK
6601 break;
6602
6603 /* If we ran into a BARRIER, this code is an extension of the
6604 basic block when the branch is taken. */
4b4bf941 6605 if (follow_jumps && q != 0 && BARRIER_P (q))
7afe21cc
RK
6606 {
6607 /* Don't allow ourself to keep walking around an
6608 always-executed loop. */
fc3ffe83
RK
6609 if (next_real_insn (q) == next)
6610 {
6611 p = NEXT_INSN (p);
6612 continue;
6613 }
7afe21cc
RK
6614
6615 /* Similarly, don't put a branch in our path more than once. */
6616 for (i = 0; i < path_entry; i++)
6617 if (data->path[i].branch == p)
6618 break;
6619
6620 if (i != path_entry)
6621 break;
6622
6623 data->path[path_entry].branch = p;
6de9cd9a 6624 data->path[path_entry++].status = PATH_TAKEN;
7afe21cc
RK
6625
6626 /* This branch now ends our path. It was possible that we
6627 didn't see this branch the last time around (when the
6628 insn in front of the target was a JUMP_INSN that was
6629 turned into a no-op). */
6630 path_size = path_entry;
6631
6632 p = JUMP_LABEL (p);
6633 /* Mark block so we won't scan it again later. */
6634 PUT_MODE (NEXT_INSN (p), QImode);
6635 }
8b3686ed 6636 /* Detect a branch around a block of code. */
4b4bf941 6637 else if (skip_blocks && q != 0 && !LABEL_P (q))
8b3686ed 6638 {
b3694847 6639 rtx tmp;
8b3686ed 6640
fc3ffe83
RK
6641 if (next_real_insn (q) == next)
6642 {
6643 p = NEXT_INSN (p);
6644 continue;
6645 }
8b3686ed
RK
6646
6647 for (i = 0; i < path_entry; i++)
6648 if (data->path[i].branch == p)
6649 break;
6650
6651 if (i != path_entry)
6652 break;
6653
6654 /* This is no_labels_between_p (p, q) with an added check for
6655 reaching the end of a function (in case Q precedes P). */
6656 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
4b4bf941 6657 if (LABEL_P (tmp))
8b3686ed 6658 break;
278a83b2 6659
8b3686ed
RK
6660 if (tmp == q)
6661 {
6662 data->path[path_entry].branch = p;
6de9cd9a 6663 data->path[path_entry++].status = PATH_AROUND;
8b3686ed
RK
6664
6665 path_size = path_entry;
6666
6667 p = JUMP_LABEL (p);
6668 /* Mark block so we won't scan it again later. */
6669 PUT_MODE (NEXT_INSN (p), QImode);
6670 }
6671 }
7afe21cc 6672 }
7afe21cc
RK
6673 p = NEXT_INSN (p);
6674 }
6675
6676 data->low_cuid = low_cuid;
6677 data->high_cuid = high_cuid;
6678 data->nsets = nsets;
6679 data->last = p;
6680
6681 /* If all jumps in the path are not taken, set our path length to zero
6682 so a rescan won't be done. */
6683 for (i = path_size - 1; i >= 0; i--)
6de9cd9a 6684 if (data->path[i].status != PATH_NOT_TAKEN)
7afe21cc
RK
6685 break;
6686
6687 if (i == -1)
6688 data->path_size = 0;
6689 else
6690 data->path_size = path_size;
6691
6692 /* End the current branch path. */
6693 data->path[path_size].branch = 0;
6694}
6695\f
7afe21cc
RK
6696/* Perform cse on the instructions of a function.
6697 F is the first instruction.
6698 NREGS is one plus the highest pseudo-reg number used in the instruction.
6699
7afe21cc
RK
6700 Returns 1 if jump_optimize should be redone due to simplifications
6701 in conditional jump instructions. */
6702
6703int
5affca01 6704cse_main (rtx f, int nregs, FILE *file)
7afe21cc
RK
6705{
6706 struct cse_basic_block_data val;
b3694847
SS
6707 rtx insn = f;
6708 int i;
7afe21cc 6709
bc5e3b54
KH
6710 init_cse_reg_info (nregs);
6711
9bf8cfbf
ZD
6712 val.path = xmalloc (sizeof (struct branch_path)
6713 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6714
7afe21cc 6715 cse_jumps_altered = 0;
a5dfb4ee 6716 recorded_label_ref = 0;
7afe21cc 6717 constant_pool_entries_cost = 0;
dd0ba281 6718 constant_pool_entries_regcost = 0;
7afe21cc 6719 val.path_size = 0;
2f93eea8 6720 rtl_hooks = cse_rtl_hooks;
7afe21cc
RK
6721
6722 init_recog ();
9ae8ffe7 6723 init_alias_analysis ();
7afe21cc 6724
703ad42b 6725 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7afe21cc 6726
7afe21cc
RK
6727 /* Find the largest uid. */
6728
164c8956 6729 max_uid = get_max_uid ();
703ad42b 6730 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7afe21cc
RK
6731
6732 /* Compute the mapping from uids to cuids.
6733 CUIDs are numbers assigned to insns, like uids,
6734 except that cuids increase monotonically through the code.
6735 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6736 between two insns is not affected by -g. */
6737
6738 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6739 {
4b4bf941 6740 if (!NOTE_P (insn)
7afe21cc
RK
6741 || NOTE_LINE_NUMBER (insn) < 0)
6742 INSN_CUID (insn) = ++i;
6743 else
6744 /* Give a line number note the same cuid as preceding insn. */
6745 INSN_CUID (insn) = i;
6746 }
6747
7afe21cc
RK
6748 /* Loop over basic blocks.
6749 Compute the maximum number of qty's needed for each basic block
6750 (which is 2 for each SET). */
6751 insn = f;
6752 while (insn)
6753 {
4eadede7 6754 cse_altered = 0;
5affca01 6755 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
8b3686ed 6756 flag_cse_skip_blocks);
7afe21cc
RK
6757
6758 /* If this basic block was already processed or has no sets, skip it. */
6759 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6760 {
6761 PUT_MODE (insn, VOIDmode);
6762 insn = (val.last ? NEXT_INSN (val.last) : 0);
6763 val.path_size = 0;
6764 continue;
6765 }
6766
6767 cse_basic_block_start = val.low_cuid;
6768 cse_basic_block_end = val.high_cuid;
6769 max_qty = val.nsets * 2;
278a83b2 6770
7afe21cc 6771 if (file)
ab87f8c8 6772 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7afe21cc
RK
6773 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6774 val.nsets);
6775
6776 /* Make MAX_QTY bigger to give us room to optimize
6777 past the end of this basic block, if that should prove useful. */
6778 if (max_qty < 500)
6779 max_qty = 500;
6780
7afe21cc
RK
6781 /* If this basic block is being extended by following certain jumps,
6782 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6783 Otherwise, we start after this basic block. */
6784 if (val.path_size > 0)
5affca01 6785 cse_basic_block (insn, val.last, val.path);
7afe21cc
RK
6786 else
6787 {
6788 int old_cse_jumps_altered = cse_jumps_altered;
6789 rtx temp;
6790
6791 /* When cse changes a conditional jump to an unconditional
6792 jump, we want to reprocess the block, since it will give
6793 us a new branch path to investigate. */
6794 cse_jumps_altered = 0;
5affca01 6795 temp = cse_basic_block (insn, val.last, val.path);
8b3686ed
RK
6796 if (cse_jumps_altered == 0
6797 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
6798 insn = temp;
6799
6800 cse_jumps_altered |= old_cse_jumps_altered;
6801 }
6802
1f8f4a0b 6803 if (cse_altered)
1497faf6
RH
6804 ggc_collect ();
6805
7afe21cc
RK
6806#ifdef USE_C_ALLOCA
6807 alloca (0);
6808#endif
6809 }
6810
e05e2395
MM
6811 /* Clean up. */
6812 end_alias_analysis ();
75c6bd46 6813 free (uid_cuid);
1bb98cec 6814 free (reg_eqv_table);
9bf8cfbf 6815 free (val.path);
2f93eea8 6816 rtl_hooks = general_rtl_hooks;
e05e2395 6817
a5dfb4ee 6818 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
6819}
6820
6821/* Process a single basic block. FROM and TO and the limits of the basic
6822 block. NEXT_BRANCH points to the branch path when following jumps or
75473b02 6823 a null path when not following jumps. */
7afe21cc
RK
6824
6825static rtx
5affca01 6826cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
7afe21cc 6827{
b3694847 6828 rtx insn;
7afe21cc 6829 int to_usage = 0;
7bd8b2a8 6830 rtx libcall_insn = NULL_RTX;
e9a25f70 6831 int num_insns = 0;
26d107db 6832 int no_conflict = 0;
7afe21cc 6833
08a69267
RS
6834 /* Allocate the space needed by qty_table. */
6835 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
7afe21cc
RK
6836
6837 new_basic_block ();
6838
6839 /* TO might be a label. If so, protect it from being deleted. */
4b4bf941 6840 if (to != 0 && LABEL_P (to))
7afe21cc
RK
6841 ++LABEL_NUSES (to);
6842
6843 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6844 {
b3694847 6845 enum rtx_code code = GET_CODE (insn);
e9a25f70 6846
1d22a2c1
MM
6847 /* If we have processed 1,000 insns, flush the hash table to
6848 avoid extreme quadratic behavior. We must not include NOTEs
c13e8210 6849 in the count since there may be more of them when generating
1d22a2c1
MM
6850 debugging information. If we clear the table at different
6851 times, code generated with -g -O might be different than code
6852 generated with -O but not -g.
e9a25f70
JL
6853
6854 ??? This is a real kludge and needs to be done some other way.
6855 Perhaps for 2.9. */
1d22a2c1 6856 if (code != NOTE && num_insns++ > 1000)
e9a25f70 6857 {
01e752d3 6858 flush_hash_table ();
e9a25f70
JL
6859 num_insns = 0;
6860 }
7afe21cc
RK
6861
6862 /* See if this is a branch that is part of the path. If so, and it is
6863 to be taken, do so. */
6864 if (next_branch->branch == insn)
6865 {
8b3686ed 6866 enum taken status = next_branch++->status;
6de9cd9a 6867 if (status != PATH_NOT_TAKEN)
7afe21cc 6868 {
6de9cd9a 6869 if (status == PATH_TAKEN)
8b3686ed
RK
6870 record_jump_equiv (insn, 1);
6871 else
6872 invalidate_skipped_block (NEXT_INSN (insn));
6873
7afe21cc
RK
6874 /* Set the last insn as the jump insn; it doesn't affect cc0.
6875 Then follow this branch. */
6876#ifdef HAVE_cc0
6877 prev_insn_cc0 = 0;
7afe21cc 6878 prev_insn = insn;
4977bab6 6879#endif
7afe21cc
RK
6880 insn = JUMP_LABEL (insn);
6881 continue;
6882 }
6883 }
278a83b2 6884
7afe21cc
RK
6885 if (GET_MODE (insn) == QImode)
6886 PUT_MODE (insn, VOIDmode);
6887
ec8e098d 6888 if (GET_RTX_CLASS (code) == RTX_INSN)
7afe21cc 6889 {
7bd8b2a8
JL
6890 rtx p;
6891
7afe21cc
RK
6892 /* Process notes first so we have all notes in canonical forms when
6893 looking for duplicate operations. */
6894
6895 if (REG_NOTES (insn))
906c4e36 6896 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
6897
6898 /* Track when we are inside in LIBCALL block. Inside such a block,
6899 we do not want to record destinations. The last insn of a
6900 LIBCALL block is not considered to be part of the block, since
830a38ee 6901 its destination is the result of the block and hence should be
7afe21cc
RK
6902 recorded. */
6903
efc9bd41
RK
6904 if (REG_NOTES (insn) != 0)
6905 {
6906 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6907 libcall_insn = XEXP (p, 0);
6908 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
26d107db
KK
6909 {
6910 /* Keep libcall_insn for the last SET insn of a no-conflict
6911 block to prevent changing the destination. */
6912 if (! no_conflict)
6913 libcall_insn = 0;
6914 else
6915 no_conflict = -1;
6916 }
6917 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6918 no_conflict = 1;
efc9bd41 6919 }
7afe21cc 6920
7bd8b2a8 6921 cse_insn (insn, libcall_insn);
f85cc4cb 6922
26d107db
KK
6923 if (no_conflict == -1)
6924 {
6925 libcall_insn = 0;
6926 no_conflict = 0;
6927 }
6928
be8ac49a
RK
6929 /* If we haven't already found an insn where we added a LABEL_REF,
6930 check this one. */
4b4bf941 6931 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
be8ac49a
RK
6932 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6933 (void *) insn))
f85cc4cb 6934 recorded_label_ref = 1;
7afe21cc
RK
6935 }
6936
6937 /* If INSN is now an unconditional jump, skip to the end of our
6938 basic block by pretending that we just did the last insn in the
6939 basic block. If we are jumping to the end of our block, show
6940 that we can have one usage of TO. */
6941
7f1c097d 6942 if (any_uncondjump_p (insn))
7afe21cc
RK
6943 {
6944 if (to == 0)
fa0933ba 6945 {
08a69267 6946 free (qty_table);
fa0933ba
JL
6947 return 0;
6948 }
7afe21cc
RK
6949
6950 if (JUMP_LABEL (insn) == to)
6951 to_usage = 1;
6952
6a5293dc
RS
6953 /* Maybe TO was deleted because the jump is unconditional.
6954 If so, there is nothing left in this basic block. */
6955 /* ??? Perhaps it would be smarter to set TO
278a83b2 6956 to whatever follows this insn,
6a5293dc
RS
6957 and pretend the basic block had always ended here. */
6958 if (INSN_DELETED_P (to))
6959 break;
6960
7afe21cc
RK
6961 insn = PREV_INSN (to);
6962 }
6963
6964 /* See if it is ok to keep on going past the label
6965 which used to end our basic block. Remember that we incremented
d45cf215 6966 the count of that label, so we decrement it here. If we made
7afe21cc
RK
6967 a jump unconditional, TO_USAGE will be one; in that case, we don't
6968 want to count the use in that jump. */
6969
6970 if (to != 0 && NEXT_INSN (insn) == to
4b4bf941 6971 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7afe21cc
RK
6972 {
6973 struct cse_basic_block_data val;
146135d6 6974 rtx prev;
7afe21cc
RK
6975
6976 insn = NEXT_INSN (to);
6977
146135d6
RK
6978 /* If TO was the last insn in the function, we are done. */
6979 if (insn == 0)
fa0933ba 6980 {
08a69267 6981 free (qty_table);
fa0933ba
JL
6982 return 0;
6983 }
7afe21cc 6984
146135d6
RK
6985 /* If TO was preceded by a BARRIER we are done with this block
6986 because it has no continuation. */
6987 prev = prev_nonnote_insn (to);
4b4bf941 6988 if (prev && BARRIER_P (prev))
fa0933ba 6989 {
08a69267 6990 free (qty_table);
fa0933ba
JL
6991 return insn;
6992 }
146135d6
RK
6993
6994 /* Find the end of the following block. Note that we won't be
6995 following branches in this case. */
7afe21cc
RK
6996 to_usage = 0;
6997 val.path_size = 0;
9bf8cfbf
ZD
6998 val.path = xmalloc (sizeof (struct branch_path)
6999 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
5affca01 7000 cse_end_of_basic_block (insn, &val, 0, 0);
9bf8cfbf 7001 free (val.path);
7afe21cc
RK
7002
7003 /* If the tables we allocated have enough space left
7004 to handle all the SETs in the next basic block,
7005 continue through it. Otherwise, return,
7006 and that block will be scanned individually. */
7007 if (val.nsets * 2 + next_qty > max_qty)
7008 break;
7009
7010 cse_basic_block_start = val.low_cuid;
7011 cse_basic_block_end = val.high_cuid;
7012 to = val.last;
7013
7014 /* Prevent TO from being deleted if it is a label. */
4b4bf941 7015 if (to != 0 && LABEL_P (to))
7afe21cc
RK
7016 ++LABEL_NUSES (to);
7017
7018 /* Back up so we process the first insn in the extension. */
7019 insn = PREV_INSN (insn);
7020 }
7021 }
7022
341c100f 7023 gcc_assert (next_qty <= max_qty);
7afe21cc 7024
08a69267 7025 free (qty_table);
75c6bd46 7026
7afe21cc
RK
7027 return to ? NEXT_INSN (to) : 0;
7028}
7029\f
be8ac49a 7030/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
45c23566 7031 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
be8ac49a
RK
7032
7033static int
7080f735 7034check_for_label_ref (rtx *rtl, void *data)
be8ac49a
RK
7035{
7036 rtx insn = (rtx) data;
7037
7038 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7039 we must rerun jump since it needs to place the note. If this is a
7040 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
ec5c56db 7041 since no REG_LABEL will be added. */
be8ac49a 7042 return (GET_CODE (*rtl) == LABEL_REF
45c23566 7043 && ! LABEL_REF_NONLOCAL_P (*rtl)
4838c5ee 7044 && LABEL_P (XEXP (*rtl, 0))
be8ac49a
RK
7045 && INSN_UID (XEXP (*rtl, 0)) != 0
7046 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7047}
7048\f
7afe21cc
RK
7049/* Count the number of times registers are used (not set) in X.
7050 COUNTS is an array in which we accumulate the count, INCR is how much
9ab81df2 7051 we count each register usage. */
7afe21cc
RK
7052
7053static void
9ab81df2 7054count_reg_usage (rtx x, int *counts, int incr)
7afe21cc 7055{
f1e7c95f 7056 enum rtx_code code;
b17d5d7c 7057 rtx note;
6f7d635c 7058 const char *fmt;
7afe21cc
RK
7059 int i, j;
7060
f1e7c95f
RK
7061 if (x == 0)
7062 return;
7063
7064 switch (code = GET_CODE (x))
7afe21cc
RK
7065 {
7066 case REG:
9ab81df2 7067 counts[REGNO (x)] += incr;
7afe21cc
RK
7068 return;
7069
7070 case PC:
7071 case CC0:
7072 case CONST:
7073 case CONST_INT:
7074 case CONST_DOUBLE:
69ef87e2 7075 case CONST_VECTOR:
7afe21cc
RK
7076 case SYMBOL_REF:
7077 case LABEL_REF:
02e39abc
JL
7078 return;
7079
278a83b2 7080 case CLOBBER:
02e39abc
JL
7081 /* If we are clobbering a MEM, mark any registers inside the address
7082 as being used. */
3c0cb5de 7083 if (MEM_P (XEXP (x, 0)))
9ab81df2 7084 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7afe21cc
RK
7085 return;
7086
7087 case SET:
7088 /* Unless we are setting a REG, count everything in SET_DEST. */
f8cfc6aa 7089 if (!REG_P (SET_DEST (x)))
9ab81df2
JDA
7090 count_reg_usage (SET_DEST (x), counts, incr);
7091 count_reg_usage (SET_SRC (x), counts, incr);
7afe21cc
RK
7092 return;
7093
f1e7c95f 7094 case CALL_INSN:
9ab81df2 7095 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
ddc356e8 7096 /* Fall through. */
f1e7c95f 7097
7afe21cc
RK
7098 case INSN:
7099 case JUMP_INSN:
9ab81df2 7100 count_reg_usage (PATTERN (x), counts, incr);
7afe21cc
RK
7101
7102 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7103 use them. */
7104
b17d5d7c
ZD
7105 note = find_reg_equal_equiv_note (x);
7106 if (note)
839844be
R
7107 {
7108 rtx eqv = XEXP (note, 0);
7109
7110 if (GET_CODE (eqv) == EXPR_LIST)
7111 /* This REG_EQUAL note describes the result of a function call.
7112 Process all the arguments. */
7113 do
7114 {
9ab81df2 7115 count_reg_usage (XEXP (eqv, 0), counts, incr);
839844be
R
7116 eqv = XEXP (eqv, 1);
7117 }
7118 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7119 else
9ab81df2 7120 count_reg_usage (eqv, counts, incr);
839844be 7121 }
7afe21cc
RK
7122 return;
7123
ee960939
OH
7124 case EXPR_LIST:
7125 if (REG_NOTE_KIND (x) == REG_EQUAL
7126 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7127 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7128 involving registers in the address. */
7129 || GET_CODE (XEXP (x, 0)) == CLOBBER)
9ab81df2 7130 count_reg_usage (XEXP (x, 0), counts, incr);
ee960939 7131
9ab81df2 7132 count_reg_usage (XEXP (x, 1), counts, incr);
ee960939
OH
7133 return;
7134
a6c14a64 7135 case ASM_OPERANDS:
a6c14a64
RH
7136 /* Iterate over just the inputs, not the constraints as well. */
7137 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
9ab81df2 7138 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
a6c14a64
RH
7139 return;
7140
7afe21cc 7141 case INSN_LIST:
341c100f 7142 gcc_unreachable ();
278a83b2 7143
e9a25f70
JL
7144 default:
7145 break;
7afe21cc
RK
7146 }
7147
7148 fmt = GET_RTX_FORMAT (code);
7149 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7150 {
7151 if (fmt[i] == 'e')
9ab81df2 7152 count_reg_usage (XEXP (x, i), counts, incr);
7afe21cc
RK
7153 else if (fmt[i] == 'E')
7154 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9ab81df2 7155 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7afe21cc
RK
7156 }
7157}
7158\f
4793dca1
JH
7159/* Return true if set is live. */
7160static bool
7080f735
AJ
7161set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7162 int *counts)
4793dca1
JH
7163{
7164#ifdef HAVE_cc0
7165 rtx tem;
7166#endif
7167
7168 if (set_noop_p (set))
7169 ;
7170
7171#ifdef HAVE_cc0
7172 else if (GET_CODE (SET_DEST (set)) == CC0
7173 && !side_effects_p (SET_SRC (set))
7174 && ((tem = next_nonnote_insn (insn)) == 0
7175 || !INSN_P (tem)
7176 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7177 return false;
7178#endif
f8cfc6aa 7179 else if (!REG_P (SET_DEST (set))
4793dca1
JH
7180 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7181 || counts[REGNO (SET_DEST (set))] != 0
8fff4fc1 7182 || side_effects_p (SET_SRC (set)))
4793dca1
JH
7183 return true;
7184 return false;
7185}
7186
7187/* Return true if insn is live. */
7188
7189static bool
7080f735 7190insn_live_p (rtx insn, int *counts)
4793dca1
JH
7191{
7192 int i;
a3745024 7193 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
a646f6cc
AH
7194 return true;
7195 else if (GET_CODE (PATTERN (insn)) == SET)
0021de69 7196 return set_live_p (PATTERN (insn), insn, counts);
4793dca1 7197 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
0021de69
DB
7198 {
7199 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7200 {
7201 rtx elt = XVECEXP (PATTERN (insn), 0, i);
4793dca1 7202
0021de69
DB
7203 if (GET_CODE (elt) == SET)
7204 {
7205 if (set_live_p (elt, insn, counts))
7206 return true;
7207 }
7208 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7209 return true;
7210 }
7211 return false;
7212 }
4793dca1
JH
7213 else
7214 return true;
7215}
7216
7217/* Return true if libcall is dead as a whole. */
7218
7219static bool
7080f735 7220dead_libcall_p (rtx insn, int *counts)
4793dca1 7221{
0c19a26f
RS
7222 rtx note, set, new;
7223
4793dca1
JH
7224 /* See if there's a REG_EQUAL note on this insn and try to
7225 replace the source with the REG_EQUAL expression.
7226
7227 We assume that insns with REG_RETVALs can only be reg->reg
7228 copies at this point. */
7229 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
0c19a26f
RS
7230 if (!note)
7231 return false;
7232
7233 set = single_set (insn);
7234 if (!set)
7235 return false;
4793dca1 7236
0c19a26f
RS
7237 new = simplify_rtx (XEXP (note, 0));
7238 if (!new)
7239 new = XEXP (note, 0);
4793dca1 7240
0c19a26f 7241 /* While changing insn, we must update the counts accordingly. */
9ab81df2 7242 count_reg_usage (insn, counts, -1);
1e150f2c 7243
0c19a26f
RS
7244 if (validate_change (insn, &SET_SRC (set), new, 0))
7245 {
9ab81df2 7246 count_reg_usage (insn, counts, 1);
0c19a26f
RS
7247 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7248 remove_note (insn, note);
7249 return true;
7250 }
7251
7252 if (CONSTANT_P (new))
7253 {
7254 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7255 if (new && validate_change (insn, &SET_SRC (set), new, 0))
4793dca1 7256 {
9ab81df2 7257 count_reg_usage (insn, counts, 1);
4793dca1 7258 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
1e150f2c 7259 remove_note (insn, note);
4793dca1
JH
7260 return true;
7261 }
7262 }
7080f735 7263
9ab81df2 7264 count_reg_usage (insn, counts, 1);
4793dca1
JH
7265 return false;
7266}
7267
7afe21cc
RK
7268/* Scan all the insns and delete any that are dead; i.e., they store a register
7269 that is never used or they copy a register to itself.
7270
c6a26dc4
JL
7271 This is used to remove insns made obviously dead by cse, loop or other
7272 optimizations. It improves the heuristics in loop since it won't try to
7273 move dead invariants out of loops or make givs for dead quantities. The
7274 remaining passes of the compilation are also sped up. */
7afe21cc 7275
3dec4024 7276int
7080f735 7277delete_trivially_dead_insns (rtx insns, int nreg)
7afe21cc 7278{
4da896b2 7279 int *counts;
77fa0940 7280 rtx insn, prev;
614bb5d4 7281 int in_libcall = 0, dead_libcall = 0;
65e9fa10 7282 int ndead = 0;
7afe21cc 7283
3dec4024 7284 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7afe21cc 7285 /* First count the number of times each register is used. */
703ad42b 7286 counts = xcalloc (nreg, sizeof (int));
7afe21cc 7287 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9ab81df2 7288 count_reg_usage (insn, counts, 1);
7afe21cc 7289
65e9fa10
KH
7290 /* Go from the last insn to the first and delete insns that only set unused
7291 registers or copy a register to itself. As we delete an insn, remove
7292 usage counts for registers it uses.
0cedb36c 7293
65e9fa10
KH
7294 The first jump optimization pass may leave a real insn as the last
7295 insn in the function. We must not skip that insn or we may end
7296 up deleting code that is not really dead. */
7297 insn = get_last_insn ();
7298 if (! INSN_P (insn))
7299 insn = prev_real_insn (insn);
7afe21cc 7300
65e9fa10
KH
7301 for (; insn; insn = prev)
7302 {
7303 int live_insn = 0;
7afe21cc 7304
65e9fa10 7305 prev = prev_real_insn (insn);
7afe21cc 7306
65e9fa10
KH
7307 /* Don't delete any insns that are part of a libcall block unless
7308 we can delete the whole libcall block.
7309
7310 Flow or loop might get confused if we did that. Remember
7311 that we are scanning backwards. */
7312 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7313 {
7314 in_libcall = 1;
7315 live_insn = 1;
7316 dead_libcall = dead_libcall_p (insn, counts);
7317 }
7318 else if (in_libcall)
7319 live_insn = ! dead_libcall;
7320 else
7321 live_insn = insn_live_p (insn, counts);
7afe21cc 7322
65e9fa10
KH
7323 /* If this is a dead insn, delete it and show registers in it aren't
7324 being used. */
7afe21cc 7325
65e9fa10
KH
7326 if (! live_insn)
7327 {
7328 count_reg_usage (insn, counts, -1);
7329 delete_insn_and_edges (insn);
7330 ndead++;
7331 }
e4890d45 7332
65e9fa10
KH
7333 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7334 {
7335 in_libcall = 0;
7336 dead_libcall = 0;
614bb5d4 7337 }
68252e27 7338 }
4da896b2 7339
c263766c 7340 if (dump_file && ndead)
65e9fa10
KH
7341 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7342 ndead);
4da896b2
MM
7343 /* Clean up. */
7344 free (counts);
3dec4024
JH
7345 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7346 return ndead;
7afe21cc 7347}
e129d93a
ILT
7348
7349/* This function is called via for_each_rtx. The argument, NEWREG, is
7350 a condition code register with the desired mode. If we are looking
7351 at the same register in a different mode, replace it with
7352 NEWREG. */
7353
7354static int
7355cse_change_cc_mode (rtx *loc, void *data)
7356{
fc188d37 7357 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
e129d93a
ILT
7358
7359 if (*loc
f8cfc6aa 7360 && REG_P (*loc)
fc188d37
AK
7361 && REGNO (*loc) == REGNO (args->newreg)
7362 && GET_MODE (*loc) != GET_MODE (args->newreg))
e129d93a 7363 {
fc188d37
AK
7364 validate_change (args->insn, loc, args->newreg, 1);
7365
e129d93a
ILT
7366 return -1;
7367 }
7368 return 0;
7369}
7370
fc188d37
AK
7371/* Change the mode of any reference to the register REGNO (NEWREG) to
7372 GET_MODE (NEWREG) in INSN. */
7373
7374static void
7375cse_change_cc_mode_insn (rtx insn, rtx newreg)
7376{
7377 struct change_cc_mode_args args;
7378 int success;
7379
7380 if (!INSN_P (insn))
7381 return;
7382
7383 args.insn = insn;
7384 args.newreg = newreg;
7385
7386 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7387 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7388
7389 /* If the following assertion was triggered, there is most probably
7390 something wrong with the cc_modes_compatible back end function.
7391 CC modes only can be considered compatible if the insn - with the mode
7392 replaced by any of the compatible modes - can still be recognized. */
7393 success = apply_change_group ();
7394 gcc_assert (success);
7395}
7396
e129d93a
ILT
7397/* Change the mode of any reference to the register REGNO (NEWREG) to
7398 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
2e802a6f 7399 any instruction which modifies NEWREG. */
e129d93a
ILT
7400
7401static void
7402cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7403{
7404 rtx insn;
7405
7406 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7407 {
7408 if (! INSN_P (insn))
7409 continue;
7410
2e802a6f 7411 if (reg_set_p (newreg, insn))
e129d93a
ILT
7412 return;
7413
fc188d37 7414 cse_change_cc_mode_insn (insn, newreg);
e129d93a
ILT
7415 }
7416}
7417
7418/* BB is a basic block which finishes with CC_REG as a condition code
7419 register which is set to CC_SRC. Look through the successors of BB
7420 to find blocks which have a single predecessor (i.e., this one),
7421 and look through those blocks for an assignment to CC_REG which is
7422 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7423 permitted to change the mode of CC_SRC to a compatible mode. This
7424 returns VOIDmode if no equivalent assignments were found.
7425 Otherwise it returns the mode which CC_SRC should wind up with.
7426
7427 The main complexity in this function is handling the mode issues.
7428 We may have more than one duplicate which we can eliminate, and we
7429 try to find a mode which will work for multiple duplicates. */
7430
7431static enum machine_mode
7432cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7433{
7434 bool found_equiv;
7435 enum machine_mode mode;
7436 unsigned int insn_count;
7437 edge e;
7438 rtx insns[2];
7439 enum machine_mode modes[2];
7440 rtx last_insns[2];
7441 unsigned int i;
7442 rtx newreg;
628f6a4e 7443 edge_iterator ei;
e129d93a
ILT
7444
7445 /* We expect to have two successors. Look at both before picking
7446 the final mode for the comparison. If we have more successors
7447 (i.e., some sort of table jump, although that seems unlikely),
7448 then we require all beyond the first two to use the same
7449 mode. */
7450
7451 found_equiv = false;
7452 mode = GET_MODE (cc_src);
7453 insn_count = 0;
628f6a4e 7454 FOR_EACH_EDGE (e, ei, bb->succs)
e129d93a
ILT
7455 {
7456 rtx insn;
7457 rtx end;
7458
7459 if (e->flags & EDGE_COMPLEX)
7460 continue;
7461
628f6a4e 7462 if (EDGE_COUNT (e->dest->preds) != 1
e129d93a
ILT
7463 || e->dest == EXIT_BLOCK_PTR)
7464 continue;
7465
7466 end = NEXT_INSN (BB_END (e->dest));
7467 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7468 {
7469 rtx set;
7470
7471 if (! INSN_P (insn))
7472 continue;
7473
7474 /* If CC_SRC is modified, we have to stop looking for
7475 something which uses it. */
7476 if (modified_in_p (cc_src, insn))
7477 break;
7478
7479 /* Check whether INSN sets CC_REG to CC_SRC. */
7480 set = single_set (insn);
7481 if (set
f8cfc6aa 7482 && REG_P (SET_DEST (set))
e129d93a
ILT
7483 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7484 {
7485 bool found;
7486 enum machine_mode set_mode;
7487 enum machine_mode comp_mode;
7488
7489 found = false;
7490 set_mode = GET_MODE (SET_SRC (set));
7491 comp_mode = set_mode;
7492 if (rtx_equal_p (cc_src, SET_SRC (set)))
7493 found = true;
7494 else if (GET_CODE (cc_src) == COMPARE
7495 && GET_CODE (SET_SRC (set)) == COMPARE
1f44254c 7496 && mode != set_mode
e129d93a
ILT
7497 && rtx_equal_p (XEXP (cc_src, 0),
7498 XEXP (SET_SRC (set), 0))
7499 && rtx_equal_p (XEXP (cc_src, 1),
7500 XEXP (SET_SRC (set), 1)))
7501
7502 {
5fd9b178 7503 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
e129d93a
ILT
7504 if (comp_mode != VOIDmode
7505 && (can_change_mode || comp_mode == mode))
7506 found = true;
7507 }
7508
7509 if (found)
7510 {
7511 found_equiv = true;
1f44254c 7512 if (insn_count < ARRAY_SIZE (insns))
e129d93a
ILT
7513 {
7514 insns[insn_count] = insn;
7515 modes[insn_count] = set_mode;
7516 last_insns[insn_count] = end;
7517 ++insn_count;
7518
1f44254c
ILT
7519 if (mode != comp_mode)
7520 {
341c100f 7521 gcc_assert (can_change_mode);
1f44254c 7522 mode = comp_mode;
fc188d37
AK
7523
7524 /* The modified insn will be re-recognized later. */
1f44254c
ILT
7525 PUT_MODE (cc_src, mode);
7526 }
e129d93a
ILT
7527 }
7528 else
7529 {
7530 if (set_mode != mode)
1f44254c
ILT
7531 {
7532 /* We found a matching expression in the
7533 wrong mode, but we don't have room to
7534 store it in the array. Punt. This case
7535 should be rare. */
7536 break;
7537 }
e129d93a
ILT
7538 /* INSN sets CC_REG to a value equal to CC_SRC
7539 with the right mode. We can simply delete
7540 it. */
7541 delete_insn (insn);
7542 }
7543
7544 /* We found an instruction to delete. Keep looking,
7545 in the hopes of finding a three-way jump. */
7546 continue;
7547 }
7548
7549 /* We found an instruction which sets the condition
7550 code, so don't look any farther. */
7551 break;
7552 }
7553
7554 /* If INSN sets CC_REG in some other way, don't look any
7555 farther. */
7556 if (reg_set_p (cc_reg, insn))
7557 break;
7558 }
7559
7560 /* If we fell off the bottom of the block, we can keep looking
7561 through successors. We pass CAN_CHANGE_MODE as false because
7562 we aren't prepared to handle compatibility between the
7563 further blocks and this block. */
7564 if (insn == end)
7565 {
1f44254c
ILT
7566 enum machine_mode submode;
7567
7568 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7569 if (submode != VOIDmode)
7570 {
341c100f 7571 gcc_assert (submode == mode);
1f44254c
ILT
7572 found_equiv = true;
7573 can_change_mode = false;
7574 }
e129d93a
ILT
7575 }
7576 }
7577
7578 if (! found_equiv)
7579 return VOIDmode;
7580
7581 /* Now INSN_COUNT is the number of instructions we found which set
7582 CC_REG to a value equivalent to CC_SRC. The instructions are in
7583 INSNS. The modes used by those instructions are in MODES. */
7584
7585 newreg = NULL_RTX;
7586 for (i = 0; i < insn_count; ++i)
7587 {
7588 if (modes[i] != mode)
7589 {
7590 /* We need to change the mode of CC_REG in INSNS[i] and
7591 subsequent instructions. */
7592 if (! newreg)
7593 {
7594 if (GET_MODE (cc_reg) == mode)
7595 newreg = cc_reg;
7596 else
7597 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7598 }
7599 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7600 newreg);
7601 }
7602
7603 delete_insn (insns[i]);
7604 }
7605
7606 return mode;
7607}
7608
7609/* If we have a fixed condition code register (or two), walk through
7610 the instructions and try to eliminate duplicate assignments. */
7611
7612void
7613cse_condition_code_reg (void)
7614{
7615 unsigned int cc_regno_1;
7616 unsigned int cc_regno_2;
7617 rtx cc_reg_1;
7618 rtx cc_reg_2;
7619 basic_block bb;
7620
5fd9b178 7621 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
e129d93a
ILT
7622 return;
7623
7624 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7625 if (cc_regno_2 != INVALID_REGNUM)
7626 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7627 else
7628 cc_reg_2 = NULL_RTX;
7629
7630 FOR_EACH_BB (bb)
7631 {
7632 rtx last_insn;
7633 rtx cc_reg;
7634 rtx insn;
7635 rtx cc_src_insn;
7636 rtx cc_src;
7637 enum machine_mode mode;
1f44254c 7638 enum machine_mode orig_mode;
e129d93a
ILT
7639
7640 /* Look for blocks which end with a conditional jump based on a
7641 condition code register. Then look for the instruction which
7642 sets the condition code register. Then look through the
7643 successor blocks for instructions which set the condition
7644 code register to the same value. There are other possible
7645 uses of the condition code register, but these are by far the
7646 most common and the ones which we are most likely to be able
7647 to optimize. */
7648
7649 last_insn = BB_END (bb);
4b4bf941 7650 if (!JUMP_P (last_insn))
e129d93a
ILT
7651 continue;
7652
7653 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7654 cc_reg = cc_reg_1;
7655 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7656 cc_reg = cc_reg_2;
7657 else
7658 continue;
7659
7660 cc_src_insn = NULL_RTX;
7661 cc_src = NULL_RTX;
7662 for (insn = PREV_INSN (last_insn);
7663 insn && insn != PREV_INSN (BB_HEAD (bb));
7664 insn = PREV_INSN (insn))
7665 {
7666 rtx set;
7667
7668 if (! INSN_P (insn))
7669 continue;
7670 set = single_set (insn);
7671 if (set
f8cfc6aa 7672 && REG_P (SET_DEST (set))
e129d93a
ILT
7673 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7674 {
7675 cc_src_insn = insn;
7676 cc_src = SET_SRC (set);
7677 break;
7678 }
7679 else if (reg_set_p (cc_reg, insn))
7680 break;
7681 }
7682
7683 if (! cc_src_insn)
7684 continue;
7685
7686 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7687 continue;
7688
7689 /* Now CC_REG is a condition code register used for a
7690 conditional jump at the end of the block, and CC_SRC, in
7691 CC_SRC_INSN, is the value to which that condition code
7692 register is set, and CC_SRC is still meaningful at the end of
7693 the basic block. */
7694
1f44254c 7695 orig_mode = GET_MODE (cc_src);
e129d93a 7696 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
1f44254c 7697 if (mode != VOIDmode)
e129d93a 7698 {
341c100f 7699 gcc_assert (mode == GET_MODE (cc_src));
1f44254c 7700 if (mode != orig_mode)
2e802a6f
KH
7701 {
7702 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7703
fc188d37 7704 cse_change_cc_mode_insn (cc_src_insn, newreg);
2e802a6f
KH
7705
7706 /* Do the same in the following insns that use the
7707 current value of CC_REG within BB. */
7708 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7709 NEXT_INSN (last_insn),
7710 newreg);
7711 }
e129d93a
ILT
7712 }
7713 }
7714}