]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
Daily bump.
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
5e7b4e25 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
ad616de1 3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
7afe21cc 4
1322177d 5This file is part of GCC.
7afe21cc 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
7afe21cc 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
7afe21cc
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
7afe21cc 21
7afe21cc 22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
7afe21cc 27#include "rtl.h"
6baf1cc8 28#include "tm_p.h"
7afe21cc 29#include "hard-reg-set.h"
7932a3db 30#include "regs.h"
630c79be 31#include "basic-block.h"
7afe21cc
RK
32#include "flags.h"
33#include "real.h"
34#include "insn-config.h"
35#include "recog.h"
49ad7cfa 36#include "function.h"
956d6950 37#include "expr.h"
50b2596f
KG
38#include "toplev.h"
39#include "output.h"
1497faf6 40#include "ggc.h"
3dec4024 41#include "timevar.h"
26771da7 42#include "except.h"
3c50106f 43#include "target.h"
9bf8cfbf 44#include "params.h"
2f93eea8 45#include "rtlhooks-def.h"
7afe21cc
RK
46
47/* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
51
52 It is too complicated to keep track of the different possibilities
e48a7fbe
JL
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
57
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
7afe21cc
RK
61
62 We use two data structures to record the equivalent expressions:
1bb98cec
DM
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
7afe21cc
RK
65
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
74
75Registers and "quantity numbers":
278a83b2 76
7afe21cc
RK
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
85
08a69267
RS
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
7afe21cc 89
08a69267
RS
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
7afe21cc
RK
92
93 We also maintain a bidirectional chain of registers for each
1bb98cec
DM
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
7afe21cc
RK
96
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
100
101 If two registers have the same quantity number, it must be true that
1bb98cec 102 REG expressions with qty_table `mode' must be in the hash table for both
7afe21cc
RK
103 registers and must be in the same class.
104
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
278a83b2 109
7afe21cc
RK
110Constants and quantity numbers
111
112 When a quantity has a known constant value, that value is stored
1bb98cec 113 in the appropriate qty_table `const_rtx'. This is in addition to
7afe21cc
RK
114 putting the constant in the hash table as is usual for non-regs.
115
d45cf215 116 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
119
120 When a quantity has a known nearly constant value (such as an address
1bb98cec
DM
121 of a stack slot), that value is stored in the appropriate qty_table
122 `const_rtx'.
7afe21cc
RK
123
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
129
130Other expressions:
131
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
136 hash codes.
137
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
140
141 Register references in an expression are canonicalized before hashing
1bb98cec 142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
7afe21cc
RK
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
145
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
149
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
154
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
162
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
165 must be removed.
166
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
174
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
183
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
190 the register.
191
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
195
196Related expressions:
197
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
278a83b2 204
1bb98cec
DM
205/* Length of qty_table vector. We know in advance we will not need
206 a quantity number this big. */
7afe21cc
RK
207
208static int max_qty;
209
210/* Next quantity number to be allocated.
211 This is 1 + the largest number needed so far. */
212
213static int next_qty;
214
1bb98cec 215/* Per-qty information tracking.
7afe21cc 216
1bb98cec
DM
217 `first_reg' and `last_reg' track the head and tail of the
218 chain of registers which currently contain this quantity.
7afe21cc 219
1bb98cec 220 `mode' contains the machine mode of this quantity.
7afe21cc 221
1bb98cec
DM
222 `const_rtx' holds the rtx of the constant value of this
223 quantity, if known. A summations of the frame/arg pointer
224 and a constant can also be entered here. When this holds
225 a known value, `const_insn' is the insn which stored the
226 constant value.
7afe21cc 227
1bb98cec
DM
228 `comparison_{code,const,qty}' are used to track when a
229 comparison between a quantity and some constant or register has
230 been passed. In such a case, we know the results of the comparison
231 in case we see it again. These members record a comparison that
232 is known to be true. `comparison_code' holds the rtx code of such
233 a comparison, else it is set to UNKNOWN and the other two
234 comparison members are undefined. `comparison_const' holds
235 the constant being compared against, or zero if the comparison
236 is not against a constant. `comparison_qty' holds the quantity
237 being compared against when the result is known. If the comparison
238 is not with a register, `comparison_qty' is -1. */
7afe21cc 239
1bb98cec
DM
240struct qty_table_elem
241{
242 rtx const_rtx;
243 rtx const_insn;
244 rtx comparison_const;
245 int comparison_qty;
770ae6cc 246 unsigned int first_reg, last_reg;
496324d0
DN
247 /* The sizes of these fields should match the sizes of the
248 code and mode fields of struct rtx_def (see rtl.h). */
249 ENUM_BITFIELD(rtx_code) comparison_code : 16;
250 ENUM_BITFIELD(machine_mode) mode : 8;
1bb98cec 251};
7afe21cc 252
1bb98cec
DM
253/* The table of all qtys, indexed by qty number. */
254static struct qty_table_elem *qty_table;
7afe21cc 255
fc188d37
AK
256/* Structure used to pass arguments via for_each_rtx to function
257 cse_change_cc_mode. */
258struct change_cc_mode_args
259{
260 rtx insn;
261 rtx newreg;
262};
263
7afe21cc
RK
264#ifdef HAVE_cc0
265/* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
268
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
273
274static rtx prev_insn_cc0;
275static enum machine_mode prev_insn_cc0_mode;
7afe21cc
RK
276
277/* Previous actual insn. 0 if at first insn of basic block. */
278
279static rtx prev_insn;
4977bab6 280#endif
7afe21cc
RK
281
282/* Insn being scanned. */
283
284static rtx this_insn;
285
71d306d1
DE
286/* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
7afe21cc
RK
288 value.
289
290 Or -1 if this register is at the end of the chain.
291
1bb98cec
DM
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
293
294/* Per-register equivalence chain. */
295struct reg_eqv_elem
296{
297 int next, prev;
298};
7afe21cc 299
1bb98cec
DM
300/* The table of all register equivalence chains. */
301static struct reg_eqv_elem *reg_eqv_table;
7afe21cc 302
14a774a9
RK
303struct cse_reg_info
304{
bc5e3b54
KH
305 /* The timestamp at which this register is initialized. */
306 unsigned int timestamp;
9b1549b8
DM
307
308 /* The quantity number of the register's current contents. */
309 int reg_qty;
310
311 /* The number of times the register has been altered in the current
312 basic block. */
313 int reg_tick;
314
30f72379
MM
315 /* The REG_TICK value at which rtx's containing this register are
316 valid in the hash table. If this does not equal the current
317 reg_tick value, such expressions existing in the hash table are
318 invalid. */
319 int reg_in_table;
46081bb3
SH
320
321 /* The SUBREG that was set when REG_TICK was last incremented. Set
322 to -1 if the last store was to the whole register, not a subreg. */
5dd78e9a 323 unsigned int subreg_ticked;
30f72379 324};
7afe21cc 325
bc5e3b54
KH
326/* A table of cse_reg_info indexed by register numbers. */
327struct cse_reg_info *cse_reg_info_table;
c1edba58 328
bc5e3b54
KH
329/* The size of the above table. */
330static unsigned int cse_reg_info_table_size;
9b1549b8 331
bc5e3b54
KH
332/* The index of the first entry that has not been initialized. */
333static unsigned int cse_reg_info_table_first_uninitialized;
7afe21cc 334
bc5e3b54
KH
335/* The timestamp at the beginning of the current run of
336 cse_basic_block. We increment this variable at at the beginning of
337 the current run of cse_basic_block. The timestamp field of a
338 cse_reg_info entry matches the value of this variable if and only
339 if the entry has been initialized during the current run of
340 cse_basic_block. */
341static unsigned int cse_reg_info_timestamp;
7afe21cc 342
278a83b2 343/* A HARD_REG_SET containing all the hard registers for which there is
7afe21cc
RK
344 currently a REG expression in the hash table. Note the difference
345 from the above variables, which indicate if the REG is mentioned in some
346 expression in the table. */
347
348static HARD_REG_SET hard_regs_in_table;
349
7afe21cc
RK
350/* CUID of insn that starts the basic block currently being cse-processed. */
351
352static int cse_basic_block_start;
353
354/* CUID of insn that ends the basic block currently being cse-processed. */
355
356static int cse_basic_block_end;
357
358/* Vector mapping INSN_UIDs to cuids.
d45cf215 359 The cuids are like uids but increase monotonically always.
7afe21cc
RK
360 We use them to see whether a reg is used outside a given basic block. */
361
906c4e36 362static int *uid_cuid;
7afe21cc 363
164c8956
RK
364/* Highest UID in UID_CUID. */
365static int max_uid;
366
7afe21cc
RK
367/* Get the cuid of an insn. */
368
369#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
370
4eadede7
ZW
371/* Nonzero if this pass has made changes, and therefore it's
372 worthwhile to run the garbage collector. */
373
374static int cse_altered;
375
7afe21cc
RK
376/* Nonzero if cse has altered conditional jump insns
377 in such a way that jump optimization should be redone. */
378
379static int cse_jumps_altered;
380
f85cc4cb
RK
381/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
382 REG_LABEL, we have to rerun jump after CSE to put in the note. */
a5dfb4ee
RK
383static int recorded_label_ref;
384
7afe21cc
RK
385/* canon_hash stores 1 in do_not_record
386 if it notices a reference to CC0, PC, or some other volatile
387 subexpression. */
388
389static int do_not_record;
390
391/* canon_hash stores 1 in hash_arg_in_memory
392 if it notices a reference to memory within the expression being hashed. */
393
394static int hash_arg_in_memory;
395
7afe21cc
RK
396/* The hash table contains buckets which are chains of `struct table_elt's,
397 each recording one expression's information.
398 That expression is in the `exp' field.
399
db048faf
MM
400 The canon_exp field contains a canonical (from the point of view of
401 alias analysis) version of the `exp' field.
402
7afe21cc
RK
403 Those elements with the same hash code are chained in both directions
404 through the `next_same_hash' and `prev_same_hash' fields.
405
406 Each set of expressions with equivalent values
407 are on a two-way chain through the `next_same_value'
408 and `prev_same_value' fields, and all point with
409 the `first_same_value' field at the first element in
410 that chain. The chain is in order of increasing cost.
411 Each element's cost value is in its `cost' field.
412
413 The `in_memory' field is nonzero for elements that
414 involve any reference to memory. These elements are removed
415 whenever a write is done to an unidentified location in memory.
416 To be safe, we assume that a memory address is unidentified unless
417 the address is either a symbol constant or a constant plus
418 the frame pointer or argument pointer.
419
7afe21cc
RK
420 The `related_value' field is used to connect related expressions
421 (that differ by adding an integer).
422 The related expressions are chained in a circular fashion.
423 `related_value' is zero for expressions for which this
424 chain is not useful.
425
426 The `cost' field stores the cost of this element's expression.
630c79be
BS
427 The `regcost' field stores the value returned by approx_reg_cost for
428 this element's expression.
7afe21cc
RK
429
430 The `is_const' flag is set if the element is a constant (including
431 a fixed address).
432
433 The `flag' field is used as a temporary during some search routines.
434
435 The `mode' field is usually the same as GET_MODE (`exp'), but
436 if `exp' is a CONST_INT and has no machine mode then the `mode'
437 field is the mode it was being used as. Each constant is
438 recorded separately for each mode it is used with. */
439
7afe21cc
RK
440struct table_elt
441{
442 rtx exp;
db048faf 443 rtx canon_exp;
7afe21cc
RK
444 struct table_elt *next_same_hash;
445 struct table_elt *prev_same_hash;
446 struct table_elt *next_same_value;
447 struct table_elt *prev_same_value;
448 struct table_elt *first_same_value;
449 struct table_elt *related_value;
450 int cost;
630c79be 451 int regcost;
496324d0
DN
452 /* The size of this field should match the size
453 of the mode field of struct rtx_def (see rtl.h). */
454 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc 455 char in_memory;
7afe21cc
RK
456 char is_const;
457 char flag;
458};
459
7afe21cc
RK
460/* We don't want a lot of buckets, because we rarely have very many
461 things stored in the hash table, and a lot of buckets slows
462 down a lot of loops that happen frequently. */
9b1549b8
DM
463#define HASH_SHIFT 5
464#define HASH_SIZE (1 << HASH_SHIFT)
465#define HASH_MASK (HASH_SIZE - 1)
7afe21cc
RK
466
467/* Compute hash code of X in mode M. Special-case case where X is a pseudo
468 register (hard registers may require `do_not_record' to be set). */
469
470#define HASH(X, M) \
f8cfc6aa 471 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
9b1549b8
DM
472 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
473 : canon_hash (X, M)) & HASH_MASK)
7afe21cc 474
0516f6fe
SB
475/* Like HASH, but without side-effects. */
476#define SAFE_HASH(X, M) \
477 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
478 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
479 : safe_hash (X, M)) & HASH_MASK)
480
630c79be
BS
481/* Determine whether register number N is considered a fixed register for the
482 purpose of approximating register costs.
7afe21cc
RK
483 It is desirable to replace other regs with fixed regs, to reduce need for
484 non-fixed hard regs.
553687c9 485 A reg wins if it is either the frame pointer or designated as fixed. */
7afe21cc 486#define FIXED_REGNO_P(N) \
8bc169f2 487 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 488 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
489
490/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
491 hard registers and pointers into the frame are the cheapest with a cost
492 of 0. Next come pseudos with a cost of one and other hard registers with
493 a cost of 2. Aside from these special cases, call `rtx_cost'. */
494
d67fb775
SB
495#define CHEAP_REGNO(N) \
496 (REGNO_PTR_FRAME_P(N) \
497 || (HARD_REGISTER_NUM_P (N) \
e7bb59fa 498 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 499
f8cfc6aa
JQ
500#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
501#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
7afe21cc 502
30f72379
MM
503/* Get the number of times this register has been updated in this
504 basic block. */
505
bc5e3b54 506#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
30f72379
MM
507
508/* Get the point at which REG was recorded in the table. */
509
bc5e3b54 510#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
30f72379 511
46081bb3
SH
512/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
513 SUBREG). */
514
bc5e3b54 515#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
46081bb3 516
30f72379
MM
517/* Get the quantity number for REG. */
518
bc5e3b54 519#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
30f72379 520
7afe21cc 521/* Determine if the quantity number for register X represents a valid index
1bb98cec 522 into the qty_table. */
7afe21cc 523
08a69267 524#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
7afe21cc 525
9b1549b8 526static struct table_elt *table[HASH_SIZE];
7afe21cc
RK
527
528/* Chain of `struct table_elt's made so far for this function
529 but currently removed from the table. */
530
531static struct table_elt *free_element_chain;
532
7afe21cc
RK
533/* Set to the cost of a constant pool reference if one was found for a
534 symbolic constant. If this was found, it means we should try to
535 convert constants into constant pool entries if they don't fit in
536 the insn. */
537
538static int constant_pool_entries_cost;
dd0ba281 539static int constant_pool_entries_regcost;
7afe21cc 540
6cd4575e
RK
541/* This data describes a block that will be processed by cse_basic_block. */
542
14a774a9
RK
543struct cse_basic_block_data
544{
6cd4575e
RK
545 /* Lowest CUID value of insns in block. */
546 int low_cuid;
547 /* Highest CUID value of insns in block. */
548 int high_cuid;
549 /* Total number of SETs in block. */
550 int nsets;
551 /* Last insn in the block. */
552 rtx last;
553 /* Size of current branch path, if any. */
554 int path_size;
555 /* Current branch path, indicating which branches will be taken. */
14a774a9
RK
556 struct branch_path
557 {
558 /* The branch insn. */
559 rtx branch;
560 /* Whether it should be taken or not. AROUND is the same as taken
561 except that it is used when the destination label is not preceded
6cd4575e 562 by a BARRIER. */
6de9cd9a 563 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
9bf8cfbf 564 } *path;
6cd4575e
RK
565};
566
7080f735
AJ
567static bool fixed_base_plus_p (rtx x);
568static int notreg_cost (rtx, enum rtx_code);
569static int approx_reg_cost_1 (rtx *, void *);
570static int approx_reg_cost (rtx);
56ae04af 571static int preferable (int, int, int, int);
7080f735
AJ
572static void new_basic_block (void);
573static void make_new_qty (unsigned int, enum machine_mode);
574static void make_regs_eqv (unsigned int, unsigned int);
575static void delete_reg_equiv (unsigned int);
576static int mention_regs (rtx);
577static int insert_regs (rtx, struct table_elt *, int);
578static void remove_from_table (struct table_elt *, unsigned);
579static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
580static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
581static rtx lookup_as_function (rtx, enum rtx_code);
582static struct table_elt *insert (rtx, struct table_elt *, unsigned,
583 enum machine_mode);
584static void merge_equiv_classes (struct table_elt *, struct table_elt *);
585static void invalidate (rtx, enum machine_mode);
586static int cse_rtx_varies_p (rtx, int);
587static void remove_invalid_refs (unsigned int);
588static void remove_invalid_subreg_refs (unsigned int, unsigned int,
589 enum machine_mode);
590static void rehash_using_reg (rtx);
591static void invalidate_memory (void);
592static void invalidate_for_call (void);
593static rtx use_related_value (rtx, struct table_elt *);
0516f6fe
SB
594
595static inline unsigned canon_hash (rtx, enum machine_mode);
596static inline unsigned safe_hash (rtx, enum machine_mode);
597static unsigned hash_rtx_string (const char *);
598
7080f735
AJ
599static rtx canon_reg (rtx, rtx);
600static void find_best_addr (rtx, rtx *, enum machine_mode);
601static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
602 enum machine_mode *,
603 enum machine_mode *);
604static rtx fold_rtx (rtx, rtx);
605static rtx equiv_constant (rtx);
606static void record_jump_equiv (rtx, int);
607static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
608 int);
609static void cse_insn (rtx, rtx);
86caf04d 610static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
5affca01 611 int, int);
7080f735
AJ
612static int addr_affects_sp_p (rtx);
613static void invalidate_from_clobbers (rtx);
614static rtx cse_process_notes (rtx, rtx);
7080f735
AJ
615static void invalidate_skipped_set (rtx, rtx, void *);
616static void invalidate_skipped_block (rtx);
5affca01 617static rtx cse_basic_block (rtx, rtx, struct branch_path *);
9ab81df2 618static void count_reg_usage (rtx, int *, int);
7080f735
AJ
619static int check_for_label_ref (rtx *, void *);
620extern void dump_class (struct table_elt*);
bc5e3b54
KH
621static void get_cse_reg_info_1 (unsigned int regno);
622static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
7080f735
AJ
623static int check_dependence (rtx *, void *);
624
625static void flush_hash_table (void);
626static bool insn_live_p (rtx, int *);
627static bool set_live_p (rtx, rtx, int *);
628static bool dead_libcall_p (rtx, int *);
e129d93a 629static int cse_change_cc_mode (rtx *, void *);
fc188d37 630static void cse_change_cc_mode_insn (rtx, rtx);
e129d93a
ILT
631static void cse_change_cc_mode_insns (rtx, rtx, rtx);
632static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
7afe21cc 633\f
2f93eea8
PB
634
635#undef RTL_HOOKS_GEN_LOWPART
636#define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
637
638static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
639\f
4977bab6
ZW
640/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
641 virtual regs here because the simplify_*_operation routines are called
642 by integrate.c, which is called before virtual register instantiation. */
643
644static bool
7080f735 645fixed_base_plus_p (rtx x)
4977bab6
ZW
646{
647 switch (GET_CODE (x))
648 {
649 case REG:
650 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
651 return true;
652 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
653 return true;
654 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
655 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
656 return true;
657 return false;
658
659 case PLUS:
660 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
661 return false;
662 return fixed_base_plus_p (XEXP (x, 0));
663
4977bab6
ZW
664 default:
665 return false;
666 }
667}
668
a4c6502a
MM
669/* Dump the expressions in the equivalence class indicated by CLASSP.
670 This function is used only for debugging. */
a0153051 671void
7080f735 672dump_class (struct table_elt *classp)
a4c6502a
MM
673{
674 struct table_elt *elt;
675
676 fprintf (stderr, "Equivalence chain for ");
677 print_rtl (stderr, classp->exp);
678 fprintf (stderr, ": \n");
278a83b2 679
a4c6502a
MM
680 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
681 {
682 print_rtl (stderr, elt->exp);
683 fprintf (stderr, "\n");
684 }
685}
686
630c79be 687/* Subroutine of approx_reg_cost; called through for_each_rtx. */
be8ac49a 688
630c79be 689static int
7080f735 690approx_reg_cost_1 (rtx *xp, void *data)
630c79be
BS
691{
692 rtx x = *xp;
c863f8c2 693 int *cost_p = data;
630c79be 694
f8cfc6aa 695 if (x && REG_P (x))
c863f8c2
DM
696 {
697 unsigned int regno = REGNO (x);
698
699 if (! CHEAP_REGNO (regno))
700 {
701 if (regno < FIRST_PSEUDO_REGISTER)
702 {
703 if (SMALL_REGISTER_CLASSES)
704 return 1;
705 *cost_p += 2;
706 }
707 else
708 *cost_p += 1;
709 }
710 }
711
630c79be
BS
712 return 0;
713}
714
715/* Return an estimate of the cost of the registers used in an rtx.
716 This is mostly the number of different REG expressions in the rtx;
a1f300c0 717 however for some exceptions like fixed registers we use a cost of
f1c1dfc3 718 0. If any other hard register reference occurs, return MAX_COST. */
630c79be
BS
719
720static int
7080f735 721approx_reg_cost (rtx x)
630c79be 722{
630c79be 723 int cost = 0;
f1c1dfc3 724
c863f8c2
DM
725 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
726 return MAX_COST;
630c79be 727
c863f8c2 728 return cost;
630c79be
BS
729}
730
b7ca416f 731/* Returns a canonical version of X for the address, from the point of view,
6668f6a7
KH
732 that all multiplications are represented as MULT instead of the multiply
733 by a power of 2 being represented as ASHIFT. */
b7ca416f
AP
734
735static rtx
736canon_for_address (rtx x)
737{
738 enum rtx_code code;
739 enum machine_mode mode;
740 rtx new = 0;
741 int i;
742 const char *fmt;
743
744 if (!x)
745 return x;
746
747 code = GET_CODE (x);
748 mode = GET_MODE (x);
749
750 switch (code)
751 {
752 case ASHIFT:
753 if (GET_CODE (XEXP (x, 1)) == CONST_INT
754 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
755 && INTVAL (XEXP (x, 1)) >= 0)
756 {
757 new = canon_for_address (XEXP (x, 0));
758 new = gen_rtx_MULT (mode, new,
759 gen_int_mode ((HOST_WIDE_INT) 1
760 << INTVAL (XEXP (x, 1)),
761 mode));
762 }
763 break;
764 default:
765 break;
766
767 }
768 if (new)
769 return new;
770
771 /* Now recursively process each operand of this operation. */
772 fmt = GET_RTX_FORMAT (code);
773 for (i = 0; i < GET_RTX_LENGTH (code); i++)
774 if (fmt[i] == 'e')
775 {
776 new = canon_for_address (XEXP (x, i));
777 XEXP (x, i) = new;
778 }
779 return x;
780}
781
630c79be
BS
782/* Return a negative value if an rtx A, whose costs are given by COST_A
783 and REGCOST_A, is more desirable than an rtx B.
784 Return a positive value if A is less desirable, or 0 if the two are
785 equally good. */
786static int
56ae04af 787preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
630c79be 788{
423adbb9 789 /* First, get rid of cases involving expressions that are entirely
f1c1dfc3
BS
790 unwanted. */
791 if (cost_a != cost_b)
792 {
793 if (cost_a == MAX_COST)
794 return 1;
795 if (cost_b == MAX_COST)
796 return -1;
797 }
798
799 /* Avoid extending lifetimes of hardregs. */
800 if (regcost_a != regcost_b)
801 {
802 if (regcost_a == MAX_COST)
803 return 1;
804 if (regcost_b == MAX_COST)
805 return -1;
806 }
807
808 /* Normal operation costs take precedence. */
630c79be
BS
809 if (cost_a != cost_b)
810 return cost_a - cost_b;
f1c1dfc3 811 /* Only if these are identical consider effects on register pressure. */
630c79be
BS
812 if (regcost_a != regcost_b)
813 return regcost_a - regcost_b;
814 return 0;
815}
816
954a5693
RK
817/* Internal function, to compute cost when X is not a register; called
818 from COST macro to keep it simple. */
819
820static int
7080f735 821notreg_cost (rtx x, enum rtx_code outer)
954a5693
RK
822{
823 return ((GET_CODE (x) == SUBREG
f8cfc6aa 824 && REG_P (SUBREG_REG (x))
954a5693
RK
825 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
826 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
827 && (GET_MODE_SIZE (GET_MODE (x))
828 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
829 && subreg_lowpart_p (x)
830 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
831 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
630c79be 832 ? 0
f2fa288f 833 : rtx_cost (x, outer) * 2);
954a5693
RK
834}
835
01329426 836\f
bc5e3b54 837/* Initialize CSE_REG_INFO_TABLE. */
9b1549b8 838
bc5e3b54
KH
839static void
840init_cse_reg_info (unsigned int nregs)
841{
842 /* Do we need to grow the table? */
843 if (nregs > cse_reg_info_table_size)
30f72379 844 {
bc5e3b54
KH
845 unsigned int new_size;
846
847 if (cse_reg_info_table_size < 2048)
30f72379 848 {
bc5e3b54
KH
849 /* Compute a new size that is a power of 2 and no smaller
850 than the large of NREGS and 64. */
851 new_size = (cse_reg_info_table_size
852 ? cse_reg_info_table_size : 64);
853
854 while (new_size < nregs)
855 new_size *= 2;
30f72379
MM
856 }
857 else
1590d0d4 858 {
bc5e3b54
KH
859 /* If we need a big table, allocate just enough to hold
860 NREGS registers. */
861 new_size = nregs;
1590d0d4 862 }
9b1549b8 863
bc5e3b54
KH
864 /* Reallocate the table with NEW_SIZE entries. */
865 cse_reg_info_table = xrealloc (cse_reg_info_table,
866 (sizeof (struct cse_reg_info)
867 * new_size));
868 cse_reg_info_table_size = new_size;
869 }
870
871 /* Do we have all of the first NREGS entries initialized? */
872 if (cse_reg_info_table_first_uninitialized < nregs)
873 {
874 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
875 unsigned int i;
876
877 /* Put the old timestamp on newly allocated entries so that they
878 will all be considered out of date. We do not touch those
879 entries beyond the first NREGS entries to be nice to the
880 virtual memory. */
881 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
882 cse_reg_info_table[i].timestamp = old_timestamp;
30f72379 883
bc5e3b54 884 cse_reg_info_table_first_uninitialized = nregs;
30f72379 885 }
bc5e3b54
KH
886}
887
888/* Given REGNO, ensure that a cse_reg_info entry exists for REGNO by
889 growing the cse_reg_info_table and/or initializing the entry for
890 REGNO. */
891
892static void
893get_cse_reg_info_1 (unsigned int regno)
894{
895 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
896 entry will be considered to have been initialized. */
897 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
898
899 /* Initialize the rest of the entry. */
900 cse_reg_info_table[regno].reg_tick = 1;
901 cse_reg_info_table[regno].reg_in_table = -1;
902 cse_reg_info_table[regno].subreg_ticked = -1;
903 cse_reg_info_table[regno].reg_qty = -regno - 1;
904}
905
906/* Find a cse_reg_info entry for REGNO. */
30f72379 907
bc5e3b54
KH
908static inline struct cse_reg_info *
909get_cse_reg_info (unsigned int regno)
910{
911 struct cse_reg_info *p = &cse_reg_info_table[regno];
912
913 /* If we are looking for REGNO that is different from the last
914 look-up, make sure the entry for REGNO exists and has been
915 initialized. */
916 if (p->timestamp != cse_reg_info_timestamp)
917 get_cse_reg_info_1 (regno);
30f72379 918
9b1549b8 919 return p;
30f72379
MM
920}
921
7afe21cc
RK
922/* Clear the hash table and initialize each register with its own quantity,
923 for a new basic block. */
924
925static void
7080f735 926new_basic_block (void)
7afe21cc 927{
b3694847 928 int i;
7afe21cc 929
08a69267 930 next_qty = 0;
7afe21cc 931
bc5e3b54
KH
932 /* Invalidate cse_reg_info_table and its cache. */
933 cse_reg_info_timestamp++;
7afe21cc 934
bc5e3b54 935 /* Clear out hash table state for this pass. */
7afe21cc
RK
936 CLEAR_HARD_REG_SET (hard_regs_in_table);
937
938 /* The per-quantity values used to be initialized here, but it is
939 much faster to initialize each as it is made in `make_new_qty'. */
940
9b1549b8 941 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 942 {
9b1549b8
DM
943 struct table_elt *first;
944
945 first = table[i];
946 if (first != NULL)
7afe21cc 947 {
9b1549b8
DM
948 struct table_elt *last = first;
949
950 table[i] = NULL;
951
952 while (last->next_same_hash != NULL)
953 last = last->next_same_hash;
954
955 /* Now relink this hash entire chain into
956 the free element list. */
957
958 last->next_same_hash = free_element_chain;
959 free_element_chain = first;
7afe21cc
RK
960 }
961 }
962
7afe21cc 963#ifdef HAVE_cc0
4977bab6 964 prev_insn = 0;
7afe21cc
RK
965 prev_insn_cc0 = 0;
966#endif
967}
968
1bb98cec
DM
969/* Say that register REG contains a quantity in mode MODE not in any
970 register before and initialize that quantity. */
7afe21cc
RK
971
972static void
7080f735 973make_new_qty (unsigned int reg, enum machine_mode mode)
7afe21cc 974{
b3694847
SS
975 int q;
976 struct qty_table_elem *ent;
977 struct reg_eqv_elem *eqv;
7afe21cc 978
341c100f 979 gcc_assert (next_qty < max_qty);
7afe21cc 980
30f72379 981 q = REG_QTY (reg) = next_qty++;
1bb98cec
DM
982 ent = &qty_table[q];
983 ent->first_reg = reg;
984 ent->last_reg = reg;
985 ent->mode = mode;
986 ent->const_rtx = ent->const_insn = NULL_RTX;
987 ent->comparison_code = UNKNOWN;
988
989 eqv = &reg_eqv_table[reg];
990 eqv->next = eqv->prev = -1;
7afe21cc
RK
991}
992
993/* Make reg NEW equivalent to reg OLD.
994 OLD is not changing; NEW is. */
995
996static void
7080f735 997make_regs_eqv (unsigned int new, unsigned int old)
7afe21cc 998{
770ae6cc
RK
999 unsigned int lastr, firstr;
1000 int q = REG_QTY (old);
1001 struct qty_table_elem *ent;
1bb98cec
DM
1002
1003 ent = &qty_table[q];
7afe21cc
RK
1004
1005 /* Nothing should become eqv until it has a "non-invalid" qty number. */
341c100f 1006 gcc_assert (REGNO_QTY_VALID_P (old));
7afe21cc 1007
30f72379 1008 REG_QTY (new) = q;
1bb98cec
DM
1009 firstr = ent->first_reg;
1010 lastr = ent->last_reg;
7afe21cc
RK
1011
1012 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1013 hard regs. Among pseudos, if NEW will live longer than any other reg
1014 of the same qty, and that is beyond the current basic block,
1015 make it the new canonical replacement for this qty. */
1016 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1017 /* Certain fixed registers might be of the class NO_REGS. This means
1018 that not only can they not be allocated by the compiler, but
830a38ee 1019 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
1020 either. */
1021 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1022 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1023 || (new >= FIRST_PSEUDO_REGISTER
1024 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
1025 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1026 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 1027 < cse_basic_block_start))
b1f21e0a
MM
1028 && (uid_cuid[REGNO_LAST_UID (new)]
1029 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc 1030 {
1bb98cec
DM
1031 reg_eqv_table[firstr].prev = new;
1032 reg_eqv_table[new].next = firstr;
1033 reg_eqv_table[new].prev = -1;
1034 ent->first_reg = new;
7afe21cc
RK
1035 }
1036 else
1037 {
1038 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1039 Otherwise, insert before any non-fixed hard regs that are at the
1040 end. Registers of class NO_REGS cannot be used as an
1041 equivalent for anything. */
1bb98cec 1042 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
7afe21cc
RK
1043 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1044 && new >= FIRST_PSEUDO_REGISTER)
1bb98cec
DM
1045 lastr = reg_eqv_table[lastr].prev;
1046 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1047 if (reg_eqv_table[lastr].next >= 0)
1048 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
7afe21cc 1049 else
1bb98cec
DM
1050 qty_table[q].last_reg = new;
1051 reg_eqv_table[lastr].next = new;
1052 reg_eqv_table[new].prev = lastr;
7afe21cc
RK
1053 }
1054}
1055
1056/* Remove REG from its equivalence class. */
1057
1058static void
7080f735 1059delete_reg_equiv (unsigned int reg)
7afe21cc 1060{
b3694847
SS
1061 struct qty_table_elem *ent;
1062 int q = REG_QTY (reg);
1063 int p, n;
7afe21cc 1064
a4e262bc 1065 /* If invalid, do nothing. */
08a69267 1066 if (! REGNO_QTY_VALID_P (reg))
7afe21cc
RK
1067 return;
1068
1bb98cec
DM
1069 ent = &qty_table[q];
1070
1071 p = reg_eqv_table[reg].prev;
1072 n = reg_eqv_table[reg].next;
a4e262bc 1073
7afe21cc 1074 if (n != -1)
1bb98cec 1075 reg_eqv_table[n].prev = p;
7afe21cc 1076 else
1bb98cec 1077 ent->last_reg = p;
7afe21cc 1078 if (p != -1)
1bb98cec 1079 reg_eqv_table[p].next = n;
7afe21cc 1080 else
1bb98cec 1081 ent->first_reg = n;
7afe21cc 1082
08a69267 1083 REG_QTY (reg) = -reg - 1;
7afe21cc
RK
1084}
1085
1086/* Remove any invalid expressions from the hash table
1087 that refer to any of the registers contained in expression X.
1088
1089 Make sure that newly inserted references to those registers
1090 as subexpressions will be considered valid.
1091
1092 mention_regs is not called when a register itself
1093 is being stored in the table.
1094
1095 Return 1 if we have done something that may have changed the hash code
1096 of X. */
1097
1098static int
7080f735 1099mention_regs (rtx x)
7afe21cc 1100{
b3694847
SS
1101 enum rtx_code code;
1102 int i, j;
1103 const char *fmt;
1104 int changed = 0;
7afe21cc
RK
1105
1106 if (x == 0)
e5f6a288 1107 return 0;
7afe21cc
RK
1108
1109 code = GET_CODE (x);
1110 if (code == REG)
1111 {
770ae6cc
RK
1112 unsigned int regno = REGNO (x);
1113 unsigned int endregno
7afe21cc 1114 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 1115 : hard_regno_nregs[regno][GET_MODE (x)]);
770ae6cc 1116 unsigned int i;
7afe21cc
RK
1117
1118 for (i = regno; i < endregno; i++)
1119 {
30f72379 1120 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1121 remove_invalid_refs (i);
1122
30f72379 1123 REG_IN_TABLE (i) = REG_TICK (i);
46081bb3 1124 SUBREG_TICKED (i) = -1;
7afe21cc
RK
1125 }
1126
1127 return 0;
1128 }
1129
34c73909
R
1130 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1131 pseudo if they don't use overlapping words. We handle only pseudos
1132 here for simplicity. */
f8cfc6aa 1133 if (code == SUBREG && REG_P (SUBREG_REG (x))
34c73909
R
1134 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1135 {
770ae6cc 1136 unsigned int i = REGNO (SUBREG_REG (x));
34c73909 1137
30f72379 1138 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909 1139 {
46081bb3
SH
1140 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1141 the last store to this register really stored into this
1142 subreg, then remove the memory of this subreg.
1143 Otherwise, remove any memory of the entire register and
1144 all its subregs from the table. */
1145 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
5dd78e9a 1146 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
34c73909
R
1147 remove_invalid_refs (i);
1148 else
ddef6bc7 1149 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
34c73909
R
1150 }
1151
30f72379 1152 REG_IN_TABLE (i) = REG_TICK (i);
5dd78e9a 1153 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
34c73909
R
1154 return 0;
1155 }
1156
7afe21cc
RK
1157 /* If X is a comparison or a COMPARE and either operand is a register
1158 that does not have a quantity, give it one. This is so that a later
1159 call to record_jump_equiv won't cause X to be assigned a different
1160 hash code and not found in the table after that call.
1161
1162 It is not necessary to do this here, since rehash_using_reg can
1163 fix up the table later, but doing this here eliminates the need to
1164 call that expensive function in the most common case where the only
1165 use of the register is in the comparison. */
1166
ec8e098d 1167 if (code == COMPARE || COMPARISON_P (x))
7afe21cc 1168 {
f8cfc6aa 1169 if (REG_P (XEXP (x, 0))
7afe21cc 1170 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
9714cf43 1171 if (insert_regs (XEXP (x, 0), NULL, 0))
7afe21cc
RK
1172 {
1173 rehash_using_reg (XEXP (x, 0));
1174 changed = 1;
1175 }
1176
f8cfc6aa 1177 if (REG_P (XEXP (x, 1))
7afe21cc 1178 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
9714cf43 1179 if (insert_regs (XEXP (x, 1), NULL, 0))
7afe21cc
RK
1180 {
1181 rehash_using_reg (XEXP (x, 1));
1182 changed = 1;
1183 }
1184 }
1185
1186 fmt = GET_RTX_FORMAT (code);
1187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1188 if (fmt[i] == 'e')
1189 changed |= mention_regs (XEXP (x, i));
1190 else if (fmt[i] == 'E')
1191 for (j = 0; j < XVECLEN (x, i); j++)
1192 changed |= mention_regs (XVECEXP (x, i, j));
1193
1194 return changed;
1195}
1196
1197/* Update the register quantities for inserting X into the hash table
1198 with a value equivalent to CLASSP.
1199 (If the class does not contain a REG, it is irrelevant.)
1200 If MODIFIED is nonzero, X is a destination; it is being modified.
1201 Note that delete_reg_equiv should be called on a register
1202 before insert_regs is done on that register with MODIFIED != 0.
1203
1204 Nonzero value means that elements of reg_qty have changed
1205 so X's hash code may be different. */
1206
1207static int
7080f735 1208insert_regs (rtx x, struct table_elt *classp, int modified)
7afe21cc 1209{
f8cfc6aa 1210 if (REG_P (x))
7afe21cc 1211 {
770ae6cc
RK
1212 unsigned int regno = REGNO (x);
1213 int qty_valid;
7afe21cc 1214
1ff0c00d
RK
1215 /* If REGNO is in the equivalence table already but is of the
1216 wrong mode for that equivalence, don't do anything here. */
1217
1bb98cec
DM
1218 qty_valid = REGNO_QTY_VALID_P (regno);
1219 if (qty_valid)
1220 {
1221 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1ff0c00d 1222
1bb98cec
DM
1223 if (ent->mode != GET_MODE (x))
1224 return 0;
1225 }
1226
1227 if (modified || ! qty_valid)
7afe21cc
RK
1228 {
1229 if (classp)
1230 for (classp = classp->first_same_value;
1231 classp != 0;
1232 classp = classp->next_same_value)
f8cfc6aa 1233 if (REG_P (classp->exp)
7afe21cc
RK
1234 && GET_MODE (classp->exp) == GET_MODE (x))
1235 {
1236 make_regs_eqv (regno, REGNO (classp->exp));
1237 return 1;
1238 }
1239
d9f20424
R
1240 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1241 than REG_IN_TABLE to find out if there was only a single preceding
1242 invalidation - for the SUBREG - or another one, which would be
1243 for the full register. However, if we find here that REG_TICK
1244 indicates that the register is invalid, it means that it has
1245 been invalidated in a separate operation. The SUBREG might be used
1246 now (then this is a recursive call), or we might use the full REG
1247 now and a SUBREG of it later. So bump up REG_TICK so that
1248 mention_regs will do the right thing. */
1249 if (! modified
1250 && REG_IN_TABLE (regno) >= 0
1251 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1252 REG_TICK (regno)++;
1bb98cec 1253 make_new_qty (regno, GET_MODE (x));
7afe21cc
RK
1254 return 1;
1255 }
cdf4112f
TG
1256
1257 return 0;
7afe21cc 1258 }
c610adec
RK
1259
1260 /* If X is a SUBREG, we will likely be inserting the inner register in the
1261 table. If that register doesn't have an assigned quantity number at
1262 this point but does later, the insertion that we will be doing now will
1263 not be accessible because its hash code will have changed. So assign
1264 a quantity number now. */
1265
f8cfc6aa 1266 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
c610adec
RK
1267 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1268 {
9714cf43 1269 insert_regs (SUBREG_REG (x), NULL, 0);
34c73909 1270 mention_regs (x);
c610adec
RK
1271 return 1;
1272 }
7afe21cc
RK
1273 else
1274 return mention_regs (x);
1275}
1276\f
1277/* Look in or update the hash table. */
1278
7afe21cc
RK
1279/* Remove table element ELT from use in the table.
1280 HASH is its hash code, made using the HASH macro.
1281 It's an argument because often that is known in advance
1282 and we save much time not recomputing it. */
1283
1284static void
7080f735 1285remove_from_table (struct table_elt *elt, unsigned int hash)
7afe21cc
RK
1286{
1287 if (elt == 0)
1288 return;
1289
1290 /* Mark this element as removed. See cse_insn. */
1291 elt->first_same_value = 0;
1292
1293 /* Remove the table element from its equivalence class. */
278a83b2 1294
7afe21cc 1295 {
b3694847
SS
1296 struct table_elt *prev = elt->prev_same_value;
1297 struct table_elt *next = elt->next_same_value;
7afe21cc 1298
278a83b2
KH
1299 if (next)
1300 next->prev_same_value = prev;
7afe21cc
RK
1301
1302 if (prev)
1303 prev->next_same_value = next;
1304 else
1305 {
b3694847 1306 struct table_elt *newfirst = next;
7afe21cc
RK
1307 while (next)
1308 {
1309 next->first_same_value = newfirst;
1310 next = next->next_same_value;
1311 }
1312 }
1313 }
1314
1315 /* Remove the table element from its hash bucket. */
1316
1317 {
b3694847
SS
1318 struct table_elt *prev = elt->prev_same_hash;
1319 struct table_elt *next = elt->next_same_hash;
7afe21cc 1320
278a83b2
KH
1321 if (next)
1322 next->prev_same_hash = prev;
7afe21cc
RK
1323
1324 if (prev)
1325 prev->next_same_hash = next;
1326 else if (table[hash] == elt)
1327 table[hash] = next;
1328 else
1329 {
1330 /* This entry is not in the proper hash bucket. This can happen
1331 when two classes were merged by `merge_equiv_classes'. Search
1332 for the hash bucket that it heads. This happens only very
1333 rarely, so the cost is acceptable. */
9b1549b8 1334 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1335 if (table[hash] == elt)
1336 table[hash] = next;
1337 }
1338 }
1339
1340 /* Remove the table element from its related-value circular chain. */
1341
1342 if (elt->related_value != 0 && elt->related_value != elt)
1343 {
b3694847 1344 struct table_elt *p = elt->related_value;
770ae6cc 1345
7afe21cc
RK
1346 while (p->related_value != elt)
1347 p = p->related_value;
1348 p->related_value = elt->related_value;
1349 if (p->related_value == p)
1350 p->related_value = 0;
1351 }
1352
9b1549b8
DM
1353 /* Now add it to the free element chain. */
1354 elt->next_same_hash = free_element_chain;
1355 free_element_chain = elt;
7afe21cc
RK
1356}
1357
1358/* Look up X in the hash table and return its table element,
1359 or 0 if X is not in the table.
1360
1361 MODE is the machine-mode of X, or if X is an integer constant
1362 with VOIDmode then MODE is the mode with which X will be used.
1363
1364 Here we are satisfied to find an expression whose tree structure
1365 looks like X. */
1366
1367static struct table_elt *
7080f735 1368lookup (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1369{
b3694847 1370 struct table_elt *p;
7afe21cc
RK
1371
1372 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1373 if (mode == p->mode && ((x == p->exp && REG_P (x))
0516f6fe 1374 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
7afe21cc
RK
1375 return p;
1376
1377 return 0;
1378}
1379
1380/* Like `lookup' but don't care whether the table element uses invalid regs.
1381 Also ignore discrepancies in the machine mode of a register. */
1382
1383static struct table_elt *
7080f735 1384lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1385{
b3694847 1386 struct table_elt *p;
7afe21cc 1387
f8cfc6aa 1388 if (REG_P (x))
7afe21cc 1389 {
770ae6cc
RK
1390 unsigned int regno = REGNO (x);
1391
7afe21cc
RK
1392 /* Don't check the machine mode when comparing registers;
1393 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1394 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1395 if (REG_P (p->exp)
7afe21cc
RK
1396 && REGNO (p->exp) == regno)
1397 return p;
1398 }
1399 else
1400 {
1401 for (p = table[hash]; p; p = p->next_same_hash)
0516f6fe
SB
1402 if (mode == p->mode
1403 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
7afe21cc
RK
1404 return p;
1405 }
1406
1407 return 0;
1408}
1409
1410/* Look for an expression equivalent to X and with code CODE.
1411 If one is found, return that expression. */
1412
1413static rtx
7080f735 1414lookup_as_function (rtx x, enum rtx_code code)
7afe21cc 1415{
b3694847 1416 struct table_elt *p
0516f6fe 1417 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
770ae6cc 1418
34c73909
R
1419 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1420 long as we are narrowing. So if we looked in vain for a mode narrower
1421 than word_mode before, look for word_mode now. */
1422 if (p == 0 && code == CONST_INT
1423 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1424 {
1425 x = copy_rtx (x);
1426 PUT_MODE (x, word_mode);
0516f6fe 1427 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
34c73909
R
1428 }
1429
7afe21cc
RK
1430 if (p == 0)
1431 return 0;
1432
1433 for (p = p->first_same_value; p; p = p->next_same_value)
770ae6cc
RK
1434 if (GET_CODE (p->exp) == code
1435 /* Make sure this is a valid entry in the table. */
0516f6fe 1436 && exp_equiv_p (p->exp, p->exp, 1, false))
770ae6cc 1437 return p->exp;
278a83b2 1438
7afe21cc
RK
1439 return 0;
1440}
1441
1442/* Insert X in the hash table, assuming HASH is its hash code
1443 and CLASSP is an element of the class it should go in
1444 (or 0 if a new class should be made).
1445 It is inserted at the proper position to keep the class in
1446 the order cheapest first.
1447
1448 MODE is the machine-mode of X, or if X is an integer constant
1449 with VOIDmode then MODE is the mode with which X will be used.
1450
1451 For elements of equal cheapness, the most recent one
1452 goes in front, except that the first element in the list
1453 remains first unless a cheaper element is added. The order of
1454 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1455 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1456
1457 The in_memory field in the hash table element is set to 0.
1458 The caller must set it nonzero if appropriate.
1459
1460 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1461 and if insert_regs returns a nonzero value
1462 you must then recompute its hash code before calling here.
1463
1464 If necessary, update table showing constant values of quantities. */
1465
630c79be 1466#define CHEAPER(X, Y) \
56ae04af 1467 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
7afe21cc
RK
1468
1469static struct table_elt *
7080f735 1470insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
7afe21cc 1471{
b3694847 1472 struct table_elt *elt;
7afe21cc
RK
1473
1474 /* If X is a register and we haven't made a quantity for it,
1475 something is wrong. */
341c100f 1476 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
7afe21cc
RK
1477
1478 /* If X is a hard register, show it is being put in the table. */
f8cfc6aa 1479 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7afe21cc 1480 {
770ae6cc 1481 unsigned int regno = REGNO (x);
66fd46b6 1482 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
770ae6cc 1483 unsigned int i;
7afe21cc
RK
1484
1485 for (i = regno; i < endregno; i++)
770ae6cc 1486 SET_HARD_REG_BIT (hard_regs_in_table, i);
7afe21cc
RK
1487 }
1488
7afe21cc
RK
1489 /* Put an element for X into the right hash bucket. */
1490
9b1549b8
DM
1491 elt = free_element_chain;
1492 if (elt)
770ae6cc 1493 free_element_chain = elt->next_same_hash;
9b1549b8 1494 else
26af0046 1495 elt = xmalloc (sizeof (struct table_elt));
9b1549b8 1496
7afe21cc 1497 elt->exp = x;
db048faf 1498 elt->canon_exp = NULL_RTX;
7afe21cc 1499 elt->cost = COST (x);
630c79be 1500 elt->regcost = approx_reg_cost (x);
7afe21cc
RK
1501 elt->next_same_value = 0;
1502 elt->prev_same_value = 0;
1503 elt->next_same_hash = table[hash];
1504 elt->prev_same_hash = 0;
1505 elt->related_value = 0;
1506 elt->in_memory = 0;
1507 elt->mode = mode;
389fdba0 1508 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
7afe21cc
RK
1509
1510 if (table[hash])
1511 table[hash]->prev_same_hash = elt;
1512 table[hash] = elt;
1513
1514 /* Put it into the proper value-class. */
1515 if (classp)
1516 {
1517 classp = classp->first_same_value;
1518 if (CHEAPER (elt, classp))
f9da5064 1519 /* Insert at the head of the class. */
7afe21cc 1520 {
b3694847 1521 struct table_elt *p;
7afe21cc
RK
1522 elt->next_same_value = classp;
1523 classp->prev_same_value = elt;
1524 elt->first_same_value = elt;
1525
1526 for (p = classp; p; p = p->next_same_value)
1527 p->first_same_value = elt;
1528 }
1529 else
1530 {
1531 /* Insert not at head of the class. */
1532 /* Put it after the last element cheaper than X. */
b3694847 1533 struct table_elt *p, *next;
770ae6cc 1534
7afe21cc
RK
1535 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1536 p = next);
770ae6cc 1537
7afe21cc
RK
1538 /* Put it after P and before NEXT. */
1539 elt->next_same_value = next;
1540 if (next)
1541 next->prev_same_value = elt;
770ae6cc 1542
7afe21cc
RK
1543 elt->prev_same_value = p;
1544 p->next_same_value = elt;
1545 elt->first_same_value = classp;
1546 }
1547 }
1548 else
1549 elt->first_same_value = elt;
1550
1551 /* If this is a constant being set equivalent to a register or a register
1552 being set equivalent to a constant, note the constant equivalence.
1553
1554 If this is a constant, it cannot be equivalent to a different constant,
1555 and a constant is the only thing that can be cheaper than a register. So
1556 we know the register is the head of the class (before the constant was
1557 inserted).
1558
1559 If this is a register that is not already known equivalent to a
1560 constant, we must check the entire class.
1561
1562 If this is a register that is already known equivalent to an insn,
1bb98cec 1563 update the qtys `const_insn' to show that `this_insn' is the latest
7afe21cc
RK
1564 insn making that quantity equivalent to the constant. */
1565
f8cfc6aa
JQ
1566 if (elt->is_const && classp && REG_P (classp->exp)
1567 && !REG_P (x))
7afe21cc 1568 {
1bb98cec
DM
1569 int exp_q = REG_QTY (REGNO (classp->exp));
1570 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1571
4de249d9 1572 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1bb98cec 1573 exp_ent->const_insn = this_insn;
7afe21cc
RK
1574 }
1575
f8cfc6aa 1576 else if (REG_P (x)
1bb98cec
DM
1577 && classp
1578 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
f353588a 1579 && ! elt->is_const)
7afe21cc 1580 {
b3694847 1581 struct table_elt *p;
7afe21cc
RK
1582
1583 for (p = classp; p != 0; p = p->next_same_value)
1584 {
f8cfc6aa 1585 if (p->is_const && !REG_P (p->exp))
7afe21cc 1586 {
1bb98cec
DM
1587 int x_q = REG_QTY (REGNO (x));
1588 struct qty_table_elem *x_ent = &qty_table[x_q];
1589
770ae6cc 1590 x_ent->const_rtx
4de249d9 1591 = gen_lowpart (GET_MODE (x), p->exp);
1bb98cec 1592 x_ent->const_insn = this_insn;
7afe21cc
RK
1593 break;
1594 }
1595 }
1596 }
1597
f8cfc6aa 1598 else if (REG_P (x)
1bb98cec
DM
1599 && qty_table[REG_QTY (REGNO (x))].const_rtx
1600 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1601 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
7afe21cc
RK
1602
1603 /* If this is a constant with symbolic value,
1604 and it has a term with an explicit integer value,
1605 link it up with related expressions. */
1606 if (GET_CODE (x) == CONST)
1607 {
1608 rtx subexp = get_related_value (x);
2197a88a 1609 unsigned subhash;
7afe21cc
RK
1610 struct table_elt *subelt, *subelt_prev;
1611
1612 if (subexp != 0)
1613 {
1614 /* Get the integer-free subexpression in the hash table. */
0516f6fe 1615 subhash = SAFE_HASH (subexp, mode);
7afe21cc
RK
1616 subelt = lookup (subexp, subhash, mode);
1617 if (subelt == 0)
9714cf43 1618 subelt = insert (subexp, NULL, subhash, mode);
7afe21cc
RK
1619 /* Initialize SUBELT's circular chain if it has none. */
1620 if (subelt->related_value == 0)
1621 subelt->related_value = subelt;
1622 /* Find the element in the circular chain that precedes SUBELT. */
1623 subelt_prev = subelt;
1624 while (subelt_prev->related_value != subelt)
1625 subelt_prev = subelt_prev->related_value;
1626 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1627 This way the element that follows SUBELT is the oldest one. */
1628 elt->related_value = subelt_prev->related_value;
1629 subelt_prev->related_value = elt;
1630 }
1631 }
1632
1633 return elt;
1634}
1635\f
1636/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1637 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1638 the two classes equivalent.
1639
1640 CLASS1 will be the surviving class; CLASS2 should not be used after this
1641 call.
1642
1643 Any invalid entries in CLASS2 will not be copied. */
1644
1645static void
7080f735 1646merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
7afe21cc
RK
1647{
1648 struct table_elt *elt, *next, *new;
1649
1650 /* Ensure we start with the head of the classes. */
1651 class1 = class1->first_same_value;
1652 class2 = class2->first_same_value;
1653
1654 /* If they were already equal, forget it. */
1655 if (class1 == class2)
1656 return;
1657
1658 for (elt = class2; elt; elt = next)
1659 {
770ae6cc 1660 unsigned int hash;
7afe21cc
RK
1661 rtx exp = elt->exp;
1662 enum machine_mode mode = elt->mode;
1663
1664 next = elt->next_same_value;
1665
1666 /* Remove old entry, make a new one in CLASS1's class.
1667 Don't do this for invalid entries as we cannot find their
0f41302f 1668 hash code (it also isn't necessary). */
0516f6fe 1669 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
7afe21cc 1670 {
a90fc8e0
RH
1671 bool need_rehash = false;
1672
7afe21cc 1673 hash_arg_in_memory = 0;
7afe21cc 1674 hash = HASH (exp, mode);
278a83b2 1675
f8cfc6aa 1676 if (REG_P (exp))
a90fc8e0 1677 {
08a69267 1678 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
a90fc8e0
RH
1679 delete_reg_equiv (REGNO (exp));
1680 }
278a83b2 1681
7afe21cc
RK
1682 remove_from_table (elt, hash);
1683
a90fc8e0 1684 if (insert_regs (exp, class1, 0) || need_rehash)
8ae2b8f6
JW
1685 {
1686 rehash_using_reg (exp);
1687 hash = HASH (exp, mode);
1688 }
7afe21cc
RK
1689 new = insert (exp, class1, hash, mode);
1690 new->in_memory = hash_arg_in_memory;
7afe21cc
RK
1691 }
1692 }
1693}
1694\f
01e752d3
JL
1695/* Flush the entire hash table. */
1696
1697static void
7080f735 1698flush_hash_table (void)
01e752d3
JL
1699{
1700 int i;
1701 struct table_elt *p;
1702
9b1549b8 1703 for (i = 0; i < HASH_SIZE; i++)
01e752d3
JL
1704 for (p = table[i]; p; p = table[i])
1705 {
1706 /* Note that invalidate can remove elements
1707 after P in the current hash chain. */
f8cfc6aa 1708 if (REG_P (p->exp))
01e752d3
JL
1709 invalidate (p->exp, p->mode);
1710 else
1711 remove_from_table (p, i);
1712 }
1713}
14a774a9 1714\f
2ce6dc2f
JH
1715/* Function called for each rtx to check whether true dependence exist. */
1716struct check_dependence_data
1717{
1718 enum machine_mode mode;
1719 rtx exp;
9ddb66ca 1720 rtx addr;
2ce6dc2f 1721};
be8ac49a 1722
2ce6dc2f 1723static int
7080f735 1724check_dependence (rtx *x, void *data)
2ce6dc2f
JH
1725{
1726 struct check_dependence_data *d = (struct check_dependence_data *) data;
3c0cb5de 1727 if (*x && MEM_P (*x))
9ddb66ca
JH
1728 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1729 cse_rtx_varies_p);
2ce6dc2f
JH
1730 else
1731 return 0;
1732}
1733\f
14a774a9
RK
1734/* Remove from the hash table, or mark as invalid, all expressions whose
1735 values could be altered by storing in X. X is a register, a subreg, or
1736 a memory reference with nonvarying address (because, when a memory
1737 reference with a varying address is stored in, all memory references are
1738 removed by invalidate_memory so specific invalidation is superfluous).
1739 FULL_MODE, if not VOIDmode, indicates that this much should be
1740 invalidated instead of just the amount indicated by the mode of X. This
1741 is only used for bitfield stores into memory.
1742
1743 A nonvarying address may be just a register or just a symbol reference,
1744 or it may be either of those plus a numeric offset. */
7afe21cc
RK
1745
1746static void
7080f735 1747invalidate (rtx x, enum machine_mode full_mode)
7afe21cc 1748{
b3694847
SS
1749 int i;
1750 struct table_elt *p;
9ddb66ca 1751 rtx addr;
7afe21cc 1752
14a774a9 1753 switch (GET_CODE (x))
7afe21cc 1754 {
14a774a9
RK
1755 case REG:
1756 {
1757 /* If X is a register, dependencies on its contents are recorded
1758 through the qty number mechanism. Just change the qty number of
1759 the register, mark it as invalid for expressions that refer to it,
1760 and remove it itself. */
770ae6cc
RK
1761 unsigned int regno = REGNO (x);
1762 unsigned int hash = HASH (x, GET_MODE (x));
7afe21cc 1763
14a774a9
RK
1764 /* Remove REGNO from any quantity list it might be on and indicate
1765 that its value might have changed. If it is a pseudo, remove its
1766 entry from the hash table.
7afe21cc 1767
14a774a9
RK
1768 For a hard register, we do the first two actions above for any
1769 additional hard registers corresponding to X. Then, if any of these
1770 registers are in the table, we must remove any REG entries that
1771 overlap these registers. */
7afe21cc 1772
14a774a9
RK
1773 delete_reg_equiv (regno);
1774 REG_TICK (regno)++;
46081bb3 1775 SUBREG_TICKED (regno) = -1;
85e4d983 1776
14a774a9
RK
1777 if (regno >= FIRST_PSEUDO_REGISTER)
1778 {
1779 /* Because a register can be referenced in more than one mode,
1780 we might have to remove more than one table entry. */
1781 struct table_elt *elt;
85e4d983 1782
14a774a9
RK
1783 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1784 remove_from_table (elt, hash);
1785 }
1786 else
1787 {
1788 HOST_WIDE_INT in_table
1789 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
770ae6cc 1790 unsigned int endregno
66fd46b6 1791 = regno + hard_regno_nregs[regno][GET_MODE (x)];
770ae6cc 1792 unsigned int tregno, tendregno, rn;
b3694847 1793 struct table_elt *p, *next;
7afe21cc 1794
14a774a9 1795 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc 1796
770ae6cc 1797 for (rn = regno + 1; rn < endregno; rn++)
14a774a9 1798 {
770ae6cc
RK
1799 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1800 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1801 delete_reg_equiv (rn);
1802 REG_TICK (rn)++;
46081bb3 1803 SUBREG_TICKED (rn) = -1;
14a774a9 1804 }
7afe21cc 1805
14a774a9 1806 if (in_table)
9b1549b8 1807 for (hash = 0; hash < HASH_SIZE; hash++)
14a774a9
RK
1808 for (p = table[hash]; p; p = next)
1809 {
1810 next = p->next_same_hash;
7afe21cc 1811
f8cfc6aa 1812 if (!REG_P (p->exp)
278a83b2
KH
1813 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1814 continue;
1815
14a774a9
RK
1816 tregno = REGNO (p->exp);
1817 tendregno
66fd46b6 1818 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
14a774a9
RK
1819 if (tendregno > regno && tregno < endregno)
1820 remove_from_table (p, hash);
1821 }
1822 }
1823 }
7afe21cc 1824 return;
7afe21cc 1825
14a774a9 1826 case SUBREG:
bb4034b3 1827 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc 1828 return;
aac5cc16 1829
14a774a9 1830 case PARALLEL:
278a83b2 1831 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
aac5cc16
RH
1832 invalidate (XVECEXP (x, 0, i), VOIDmode);
1833 return;
aac5cc16 1834
14a774a9
RK
1835 case EXPR_LIST:
1836 /* This is part of a disjoint return value; extract the location in
1837 question ignoring the offset. */
aac5cc16
RH
1838 invalidate (XEXP (x, 0), VOIDmode);
1839 return;
7afe21cc 1840
14a774a9 1841 case MEM:
9ddb66ca 1842 addr = canon_rtx (get_addr (XEXP (x, 0)));
db048faf
MM
1843 /* Calculate the canonical version of X here so that
1844 true_dependence doesn't generate new RTL for X on each call. */
1845 x = canon_rtx (x);
1846
14a774a9
RK
1847 /* Remove all hash table elements that refer to overlapping pieces of
1848 memory. */
1849 if (full_mode == VOIDmode)
1850 full_mode = GET_MODE (x);
bb4034b3 1851
9b1549b8 1852 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 1853 {
b3694847 1854 struct table_elt *next;
14a774a9
RK
1855
1856 for (p = table[i]; p; p = next)
1857 {
1858 next = p->next_same_hash;
db048faf
MM
1859 if (p->in_memory)
1860 {
2ce6dc2f
JH
1861 struct check_dependence_data d;
1862
1863 /* Just canonicalize the expression once;
1864 otherwise each time we call invalidate
1865 true_dependence will canonicalize the
1866 expression again. */
1867 if (!p->canon_exp)
1868 p->canon_exp = canon_rtx (p->exp);
1869 d.exp = x;
9ddb66ca 1870 d.addr = addr;
2ce6dc2f
JH
1871 d.mode = full_mode;
1872 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
db048faf 1873 remove_from_table (p, i);
db048faf 1874 }
14a774a9 1875 }
7afe21cc 1876 }
14a774a9
RK
1877 return;
1878
1879 default:
341c100f 1880 gcc_unreachable ();
7afe21cc
RK
1881 }
1882}
14a774a9 1883\f
7afe21cc
RK
1884/* Remove all expressions that refer to register REGNO,
1885 since they are already invalid, and we are about to
1886 mark that register valid again and don't want the old
1887 expressions to reappear as valid. */
1888
1889static void
7080f735 1890remove_invalid_refs (unsigned int regno)
7afe21cc 1891{
770ae6cc
RK
1892 unsigned int i;
1893 struct table_elt *p, *next;
7afe21cc 1894
9b1549b8 1895 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1896 for (p = table[i]; p; p = next)
1897 {
1898 next = p->next_same_hash;
f8cfc6aa 1899 if (!REG_P (p->exp)
68252e27 1900 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
7afe21cc
RK
1901 remove_from_table (p, i);
1902 }
1903}
34c73909 1904
ddef6bc7
JJ
1905/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1906 and mode MODE. */
34c73909 1907static void
7080f735
AJ
1908remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1909 enum machine_mode mode)
34c73909 1910{
770ae6cc
RK
1911 unsigned int i;
1912 struct table_elt *p, *next;
ddef6bc7 1913 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
34c73909 1914
9b1549b8 1915 for (i = 0; i < HASH_SIZE; i++)
34c73909
R
1916 for (p = table[i]; p; p = next)
1917 {
ddef6bc7 1918 rtx exp = p->exp;
34c73909 1919 next = p->next_same_hash;
278a83b2 1920
f8cfc6aa 1921 if (!REG_P (exp)
34c73909 1922 && (GET_CODE (exp) != SUBREG
f8cfc6aa 1923 || !REG_P (SUBREG_REG (exp))
34c73909 1924 || REGNO (SUBREG_REG (exp)) != regno
ddef6bc7
JJ
1925 || (((SUBREG_BYTE (exp)
1926 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1927 && SUBREG_BYTE (exp) <= end))
68252e27 1928 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
34c73909
R
1929 remove_from_table (p, i);
1930 }
1931}
7afe21cc
RK
1932\f
1933/* Recompute the hash codes of any valid entries in the hash table that
1934 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1935
1936 This is called when we make a jump equivalence. */
1937
1938static void
7080f735 1939rehash_using_reg (rtx x)
7afe21cc 1940{
973838fd 1941 unsigned int i;
7afe21cc 1942 struct table_elt *p, *next;
2197a88a 1943 unsigned hash;
7afe21cc
RK
1944
1945 if (GET_CODE (x) == SUBREG)
1946 x = SUBREG_REG (x);
1947
1948 /* If X is not a register or if the register is known not to be in any
1949 valid entries in the table, we have no work to do. */
1950
f8cfc6aa 1951 if (!REG_P (x)
30f72379
MM
1952 || REG_IN_TABLE (REGNO (x)) < 0
1953 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1954 return;
1955
1956 /* Scan all hash chains looking for valid entries that mention X.
a90fc8e0 1957 If we find one and it is in the wrong hash chain, move it. */
7afe21cc 1958
9b1549b8 1959 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1960 for (p = table[i]; p; p = next)
1961 {
1962 next = p->next_same_hash;
a90fc8e0 1963 if (reg_mentioned_p (x, p->exp)
0516f6fe
SB
1964 && exp_equiv_p (p->exp, p->exp, 1, false)
1965 && i != (hash = SAFE_HASH (p->exp, p->mode)))
7afe21cc
RK
1966 {
1967 if (p->next_same_hash)
1968 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1969
1970 if (p->prev_same_hash)
1971 p->prev_same_hash->next_same_hash = p->next_same_hash;
1972 else
1973 table[i] = p->next_same_hash;
1974
1975 p->next_same_hash = table[hash];
1976 p->prev_same_hash = 0;
1977 if (table[hash])
1978 table[hash]->prev_same_hash = p;
1979 table[hash] = p;
1980 }
1981 }
1982}
1983\f
7afe21cc
RK
1984/* Remove from the hash table any expression that is a call-clobbered
1985 register. Also update their TICK values. */
1986
1987static void
7080f735 1988invalidate_for_call (void)
7afe21cc 1989{
770ae6cc
RK
1990 unsigned int regno, endregno;
1991 unsigned int i;
2197a88a 1992 unsigned hash;
7afe21cc
RK
1993 struct table_elt *p, *next;
1994 int in_table = 0;
1995
1996 /* Go through all the hard registers. For each that is clobbered in
1997 a CALL_INSN, remove the register from quantity chains and update
1998 reg_tick if defined. Also see if any of these registers is currently
1999 in the table. */
2000
2001 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2002 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2003 {
2004 delete_reg_equiv (regno);
30f72379 2005 if (REG_TICK (regno) >= 0)
46081bb3
SH
2006 {
2007 REG_TICK (regno)++;
2008 SUBREG_TICKED (regno) = -1;
2009 }
7afe21cc 2010
0e227018 2011 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
2012 }
2013
2014 /* In the case where we have no call-clobbered hard registers in the
2015 table, we are done. Otherwise, scan the table and remove any
2016 entry that overlaps a call-clobbered register. */
2017
2018 if (in_table)
9b1549b8 2019 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
2020 for (p = table[hash]; p; p = next)
2021 {
2022 next = p->next_same_hash;
2023
f8cfc6aa 2024 if (!REG_P (p->exp)
7afe21cc
RK
2025 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2026 continue;
2027
2028 regno = REGNO (p->exp);
66fd46b6 2029 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
7afe21cc
RK
2030
2031 for (i = regno; i < endregno; i++)
2032 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2033 {
2034 remove_from_table (p, hash);
2035 break;
2036 }
2037 }
2038}
2039\f
2040/* Given an expression X of type CONST,
2041 and ELT which is its table entry (or 0 if it
2042 is not in the hash table),
2043 return an alternate expression for X as a register plus integer.
2044 If none can be found, return 0. */
2045
2046static rtx
7080f735 2047use_related_value (rtx x, struct table_elt *elt)
7afe21cc 2048{
b3694847
SS
2049 struct table_elt *relt = 0;
2050 struct table_elt *p, *q;
906c4e36 2051 HOST_WIDE_INT offset;
7afe21cc
RK
2052
2053 /* First, is there anything related known?
2054 If we have a table element, we can tell from that.
2055 Otherwise, must look it up. */
2056
2057 if (elt != 0 && elt->related_value != 0)
2058 relt = elt;
2059 else if (elt == 0 && GET_CODE (x) == CONST)
2060 {
2061 rtx subexp = get_related_value (x);
2062 if (subexp != 0)
2063 relt = lookup (subexp,
0516f6fe 2064 SAFE_HASH (subexp, GET_MODE (subexp)),
7afe21cc
RK
2065 GET_MODE (subexp));
2066 }
2067
2068 if (relt == 0)
2069 return 0;
2070
2071 /* Search all related table entries for one that has an
2072 equivalent register. */
2073
2074 p = relt;
2075 while (1)
2076 {
2077 /* This loop is strange in that it is executed in two different cases.
2078 The first is when X is already in the table. Then it is searching
2079 the RELATED_VALUE list of X's class (RELT). The second case is when
2080 X is not in the table. Then RELT points to a class for the related
2081 value.
2082
2083 Ensure that, whatever case we are in, that we ignore classes that have
2084 the same value as X. */
2085
2086 if (rtx_equal_p (x, p->exp))
2087 q = 0;
2088 else
2089 for (q = p->first_same_value; q; q = q->next_same_value)
f8cfc6aa 2090 if (REG_P (q->exp))
7afe21cc
RK
2091 break;
2092
2093 if (q)
2094 break;
2095
2096 p = p->related_value;
2097
2098 /* We went all the way around, so there is nothing to be found.
2099 Alternatively, perhaps RELT was in the table for some other reason
2100 and it has no related values recorded. */
2101 if (p == relt || p == 0)
2102 break;
2103 }
2104
2105 if (q == 0)
2106 return 0;
2107
2108 offset = (get_integer_term (x) - get_integer_term (p->exp));
2109 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2110 return plus_constant (q->exp, offset);
2111}
2112\f
6462bb43
AO
2113/* Hash a string. Just add its bytes up. */
2114static inline unsigned
0516f6fe 2115hash_rtx_string (const char *ps)
6462bb43
AO
2116{
2117 unsigned hash = 0;
68252e27
KH
2118 const unsigned char *p = (const unsigned char *) ps;
2119
6462bb43
AO
2120 if (p)
2121 while (*p)
2122 hash += *p++;
2123
2124 return hash;
2125}
2126
7afe21cc
RK
2127/* Hash an rtx. We are careful to make sure the value is never negative.
2128 Equivalent registers hash identically.
2129 MODE is used in hashing for CONST_INTs only;
2130 otherwise the mode of X is used.
2131
0516f6fe 2132 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
7afe21cc 2133
0516f6fe
SB
2134 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2135 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
7afe21cc
RK
2136
2137 Note that cse_insn knows that the hash code of a MEM expression
2138 is just (int) MEM plus the hash code of the address. */
2139
0516f6fe
SB
2140unsigned
2141hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2142 int *hash_arg_in_memory_p, bool have_reg_qty)
7afe21cc 2143{
b3694847
SS
2144 int i, j;
2145 unsigned hash = 0;
2146 enum rtx_code code;
2147 const char *fmt;
7afe21cc 2148
0516f6fe
SB
2149 /* Used to turn recursion into iteration. We can't rely on GCC's
2150 tail-recursion elimination since we need to keep accumulating values
2151 in HASH. */
7afe21cc
RK
2152 repeat:
2153 if (x == 0)
2154 return hash;
2155
2156 code = GET_CODE (x);
2157 switch (code)
2158 {
2159 case REG:
2160 {
770ae6cc 2161 unsigned int regno = REGNO (x);
7afe21cc 2162
0516f6fe 2163 if (!reload_completed)
7afe21cc 2164 {
0516f6fe
SB
2165 /* On some machines, we can't record any non-fixed hard register,
2166 because extending its life will cause reload problems. We
2167 consider ap, fp, sp, gp to be fixed for this purpose.
2168
2169 We also consider CCmode registers to be fixed for this purpose;
2170 failure to do so leads to failure to simplify 0<100 type of
2171 conditionals.
2172
2173 On all machines, we can't record any global registers.
2174 Nor should we record any register that is in a small
2175 class, as defined by CLASS_LIKELY_SPILLED_P. */
2176 bool record;
2177
2178 if (regno >= FIRST_PSEUDO_REGISTER)
2179 record = true;
2180 else if (x == frame_pointer_rtx
2181 || x == hard_frame_pointer_rtx
2182 || x == arg_pointer_rtx
2183 || x == stack_pointer_rtx
2184 || x == pic_offset_table_rtx)
2185 record = true;
2186 else if (global_regs[regno])
2187 record = false;
2188 else if (fixed_regs[regno])
2189 record = true;
2190 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2191 record = true;
2192 else if (SMALL_REGISTER_CLASSES)
2193 record = false;
2194 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2195 record = false;
2196 else
2197 record = true;
2198
2199 if (!record)
2200 {
2201 *do_not_record_p = 1;
2202 return 0;
2203 }
7afe21cc 2204 }
770ae6cc 2205
0516f6fe
SB
2206 hash += ((unsigned int) REG << 7);
2207 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2197a88a 2208 return hash;
7afe21cc
RK
2209 }
2210
34c73909
R
2211 /* We handle SUBREG of a REG specially because the underlying
2212 reg changes its hash value with every value change; we don't
2213 want to have to forget unrelated subregs when one subreg changes. */
2214 case SUBREG:
2215 {
f8cfc6aa 2216 if (REG_P (SUBREG_REG (x)))
34c73909 2217 {
0516f6fe 2218 hash += (((unsigned int) SUBREG << 7)
ddef6bc7
JJ
2219 + REGNO (SUBREG_REG (x))
2220 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
34c73909
R
2221 return hash;
2222 }
2223 break;
2224 }
2225
7afe21cc 2226 case CONST_INT:
0516f6fe
SB
2227 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2228 + (unsigned int) INTVAL (x));
2229 return hash;
7afe21cc
RK
2230
2231 case CONST_DOUBLE:
2232 /* This is like the general case, except that it only counts
2233 the integers representing the constant. */
0516f6fe 2234 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
969c8517 2235 if (GET_MODE (x) != VOIDmode)
46b33600 2236 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
969c8517 2237 else
0516f6fe
SB
2238 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2239 + (unsigned int) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2240 return hash;
2241
69ef87e2
AH
2242 case CONST_VECTOR:
2243 {
2244 int units;
2245 rtx elt;
2246
2247 units = CONST_VECTOR_NUNITS (x);
2248
2249 for (i = 0; i < units; ++i)
2250 {
2251 elt = CONST_VECTOR_ELT (x, i);
0516f6fe
SB
2252 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2253 hash_arg_in_memory_p, have_reg_qty);
69ef87e2
AH
2254 }
2255
2256 return hash;
2257 }
2258
7afe21cc
RK
2259 /* Assume there is only one rtx object for any given label. */
2260 case LABEL_REF:
0516f6fe
SB
2261 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2262 differences and differences between each stage's debugging dumps. */
2263 hash += (((unsigned int) LABEL_REF << 7)
2264 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2197a88a 2265 return hash;
7afe21cc
RK
2266
2267 case SYMBOL_REF:
0516f6fe
SB
2268 {
2269 /* Don't hash on the symbol's address to avoid bootstrap differences.
2270 Different hash values may cause expressions to be recorded in
2271 different orders and thus different registers to be used in the
2272 final assembler. This also avoids differences in the dump files
2273 between various stages. */
2274 unsigned int h = 0;
2275 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2276
2277 while (*p)
2278 h += (h << 7) + *p++; /* ??? revisit */
2279
2280 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2281 return hash;
2282 }
7afe21cc
RK
2283
2284 case MEM:
14a774a9
RK
2285 /* We don't record if marked volatile or if BLKmode since we don't
2286 know the size of the move. */
2287 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
7afe21cc 2288 {
0516f6fe 2289 *do_not_record_p = 1;
7afe21cc
RK
2290 return 0;
2291 }
0516f6fe
SB
2292 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2293 *hash_arg_in_memory_p = 1;
4977bab6 2294
7afe21cc
RK
2295 /* Now that we have already found this special case,
2296 might as well speed it up as much as possible. */
2197a88a 2297 hash += (unsigned) MEM;
7afe21cc
RK
2298 x = XEXP (x, 0);
2299 goto repeat;
2300
bb07060a
JW
2301 case USE:
2302 /* A USE that mentions non-volatile memory needs special
2303 handling since the MEM may be BLKmode which normally
2304 prevents an entry from being made. Pure calls are
0516f6fe
SB
2305 marked by a USE which mentions BLKmode memory.
2306 See calls.c:emit_call_1. */
3c0cb5de 2307 if (MEM_P (XEXP (x, 0))
bb07060a
JW
2308 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2309 {
68252e27 2310 hash += (unsigned) USE;
bb07060a
JW
2311 x = XEXP (x, 0);
2312
0516f6fe
SB
2313 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2314 *hash_arg_in_memory_p = 1;
bb07060a
JW
2315
2316 /* Now that we have already found this special case,
2317 might as well speed it up as much as possible. */
2318 hash += (unsigned) MEM;
2319 x = XEXP (x, 0);
2320 goto repeat;
2321 }
2322 break;
2323
7afe21cc
RK
2324 case PRE_DEC:
2325 case PRE_INC:
2326 case POST_DEC:
2327 case POST_INC:
4b983fdc
RH
2328 case PRE_MODIFY:
2329 case POST_MODIFY:
7afe21cc
RK
2330 case PC:
2331 case CC0:
2332 case CALL:
2333 case UNSPEC_VOLATILE:
0516f6fe 2334 *do_not_record_p = 1;
7afe21cc
RK
2335 return 0;
2336
2337 case ASM_OPERANDS:
2338 if (MEM_VOLATILE_P (x))
2339 {
0516f6fe 2340 *do_not_record_p = 1;
7afe21cc
RK
2341 return 0;
2342 }
6462bb43
AO
2343 else
2344 {
2345 /* We don't want to take the filename and line into account. */
2346 hash += (unsigned) code + (unsigned) GET_MODE (x)
0516f6fe
SB
2347 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2348 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
6462bb43
AO
2349 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2350
2351 if (ASM_OPERANDS_INPUT_LENGTH (x))
2352 {
2353 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2354 {
0516f6fe
SB
2355 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2356 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2357 do_not_record_p, hash_arg_in_memory_p,
2358 have_reg_qty)
2359 + hash_rtx_string
2360 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
6462bb43
AO
2361 }
2362
0516f6fe 2363 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
6462bb43
AO
2364 x = ASM_OPERANDS_INPUT (x, 0);
2365 mode = GET_MODE (x);
2366 goto repeat;
2367 }
2368
2369 return hash;
2370 }
e9a25f70 2371 break;
278a83b2 2372
e9a25f70
JL
2373 default:
2374 break;
7afe21cc
RK
2375 }
2376
2377 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2378 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2379 fmt = GET_RTX_FORMAT (code);
2380 for (; i >= 0; i--)
2381 {
341c100f 2382 switch (fmt[i])
7afe21cc 2383 {
341c100f 2384 case 'e':
7afe21cc
RK
2385 /* If we are about to do the last recursive call
2386 needed at this level, change it into iteration.
2387 This function is called enough to be worth it. */
2388 if (i == 0)
2389 {
0516f6fe 2390 x = XEXP (x, i);
7afe21cc
RK
2391 goto repeat;
2392 }
0516f6fe
SB
2393
2394 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2395 hash_arg_in_memory_p, have_reg_qty);
341c100f 2396 break;
0516f6fe 2397
341c100f
NS
2398 case 'E':
2399 for (j = 0; j < XVECLEN (x, i); j++)
0516f6fe
SB
2400 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2401 hash_arg_in_memory_p, have_reg_qty);
341c100f 2402 break;
0516f6fe 2403
341c100f
NS
2404 case 's':
2405 hash += hash_rtx_string (XSTR (x, i));
2406 break;
2407
2408 case 'i':
2409 hash += (unsigned int) XINT (x, i);
2410 break;
2411
2412 case '0': case 't':
2413 /* Unused. */
2414 break;
2415
2416 default:
2417 gcc_unreachable ();
2418 }
7afe21cc 2419 }
0516f6fe 2420
7afe21cc
RK
2421 return hash;
2422}
2423
0516f6fe
SB
2424/* Hash an rtx X for cse via hash_rtx.
2425 Stores 1 in do_not_record if any subexpression is volatile.
2426 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2427 does not have the RTX_UNCHANGING_P bit set. */
2428
2429static inline unsigned
2430canon_hash (rtx x, enum machine_mode mode)
2431{
2432 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2433}
2434
2435/* Like canon_hash but with no side effects, i.e. do_not_record
2436 and hash_arg_in_memory are not changed. */
7afe21cc 2437
0516f6fe 2438static inline unsigned
7080f735 2439safe_hash (rtx x, enum machine_mode mode)
7afe21cc 2440{
0516f6fe
SB
2441 int dummy_do_not_record;
2442 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
7afe21cc
RK
2443}
2444\f
2445/* Return 1 iff X and Y would canonicalize into the same thing,
2446 without actually constructing the canonicalization of either one.
2447 If VALIDATE is nonzero,
2448 we assume X is an expression being processed from the rtl
2449 and Y was found in the hash table. We check register refs
2450 in Y for being marked as valid.
2451
0516f6fe 2452 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
7afe21cc 2453
0516f6fe
SB
2454int
2455exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
7afe21cc 2456{
b3694847
SS
2457 int i, j;
2458 enum rtx_code code;
2459 const char *fmt;
7afe21cc
RK
2460
2461 /* Note: it is incorrect to assume an expression is equivalent to itself
2462 if VALIDATE is nonzero. */
2463 if (x == y && !validate)
2464 return 1;
0516f6fe 2465
7afe21cc
RK
2466 if (x == 0 || y == 0)
2467 return x == y;
2468
2469 code = GET_CODE (x);
2470 if (code != GET_CODE (y))
0516f6fe 2471 return 0;
7afe21cc
RK
2472
2473 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2474 if (GET_MODE (x) != GET_MODE (y))
2475 return 0;
2476
2477 switch (code)
2478 {
2479 case PC:
2480 case CC0:
7afe21cc 2481 case CONST_INT:
c13e8210 2482 return x == y;
7afe21cc
RK
2483
2484 case LABEL_REF:
7afe21cc
RK
2485 return XEXP (x, 0) == XEXP (y, 0);
2486
f54d4924
RK
2487 case SYMBOL_REF:
2488 return XSTR (x, 0) == XSTR (y, 0);
2489
7afe21cc 2490 case REG:
0516f6fe
SB
2491 if (for_gcse)
2492 return REGNO (x) == REGNO (y);
2493 else
2494 {
2495 unsigned int regno = REGNO (y);
2496 unsigned int i;
2497 unsigned int endregno
2498 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2499 : hard_regno_nregs[regno][GET_MODE (y)]);
7afe21cc 2500
0516f6fe
SB
2501 /* If the quantities are not the same, the expressions are not
2502 equivalent. If there are and we are not to validate, they
2503 are equivalent. Otherwise, ensure all regs are up-to-date. */
7afe21cc 2504
0516f6fe
SB
2505 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2506 return 0;
2507
2508 if (! validate)
2509 return 1;
2510
2511 for (i = regno; i < endregno; i++)
2512 if (REG_IN_TABLE (i) != REG_TICK (i))
2513 return 0;
7afe21cc 2514
7afe21cc 2515 return 1;
0516f6fe 2516 }
7afe21cc 2517
0516f6fe
SB
2518 case MEM:
2519 if (for_gcse)
2520 {
2521 /* Can't merge two expressions in different alias sets, since we
2522 can decide that the expression is transparent in a block when
2523 it isn't, due to it being set with the different alias set. */
2524 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
7afe21cc
RK
2525 return 0;
2526
0516f6fe
SB
2527 /* A volatile mem should not be considered equivalent to any
2528 other. */
2529 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2530 return 0;
2531 }
2532 break;
7afe21cc
RK
2533
2534 /* For commutative operations, check both orders. */
2535 case PLUS:
2536 case MULT:
2537 case AND:
2538 case IOR:
2539 case XOR:
2540 case NE:
2541 case EQ:
0516f6fe
SB
2542 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2543 validate, for_gcse)
7afe21cc 2544 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
0516f6fe 2545 validate, for_gcse))
7afe21cc 2546 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
0516f6fe 2547 validate, for_gcse)
7afe21cc 2548 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
0516f6fe 2549 validate, for_gcse)));
278a83b2 2550
6462bb43
AO
2551 case ASM_OPERANDS:
2552 /* We don't use the generic code below because we want to
2553 disregard filename and line numbers. */
2554
2555 /* A volatile asm isn't equivalent to any other. */
2556 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2557 return 0;
2558
2559 if (GET_MODE (x) != GET_MODE (y)
2560 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2561 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2562 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2563 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2564 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2565 return 0;
2566
2567 if (ASM_OPERANDS_INPUT_LENGTH (x))
2568 {
2569 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2570 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2571 ASM_OPERANDS_INPUT (y, i),
0516f6fe 2572 validate, for_gcse)
6462bb43
AO
2573 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2574 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2575 return 0;
2576 }
2577
2578 return 1;
2579
e9a25f70
JL
2580 default:
2581 break;
7afe21cc
RK
2582 }
2583
2584 /* Compare the elements. If any pair of corresponding elements
0516f6fe 2585 fail to match, return 0 for the whole thing. */
7afe21cc
RK
2586
2587 fmt = GET_RTX_FORMAT (code);
2588 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2589 {
906c4e36 2590 switch (fmt[i])
7afe21cc 2591 {
906c4e36 2592 case 'e':
0516f6fe
SB
2593 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2594 validate, for_gcse))
7afe21cc 2595 return 0;
906c4e36
RK
2596 break;
2597
2598 case 'E':
7afe21cc
RK
2599 if (XVECLEN (x, i) != XVECLEN (y, i))
2600 return 0;
2601 for (j = 0; j < XVECLEN (x, i); j++)
2602 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
0516f6fe 2603 validate, for_gcse))
7afe21cc 2604 return 0;
906c4e36
RK
2605 break;
2606
2607 case 's':
7afe21cc
RK
2608 if (strcmp (XSTR (x, i), XSTR (y, i)))
2609 return 0;
906c4e36
RK
2610 break;
2611
2612 case 'i':
7afe21cc
RK
2613 if (XINT (x, i) != XINT (y, i))
2614 return 0;
906c4e36
RK
2615 break;
2616
2617 case 'w':
2618 if (XWINT (x, i) != XWINT (y, i))
2619 return 0;
278a83b2 2620 break;
906c4e36
RK
2621
2622 case '0':
8f985ec4 2623 case 't':
906c4e36
RK
2624 break;
2625
2626 default:
341c100f 2627 gcc_unreachable ();
7afe21cc 2628 }
278a83b2 2629 }
906c4e36 2630
7afe21cc
RK
2631 return 1;
2632}
2633\f
9ae8ffe7
JL
2634/* Return 1 if X has a value that can vary even between two
2635 executions of the program. 0 means X can be compared reliably
2636 against certain constants or near-constants. */
7afe21cc
RK
2637
2638static int
7080f735 2639cse_rtx_varies_p (rtx x, int from_alias)
7afe21cc
RK
2640{
2641 /* We need not check for X and the equivalence class being of the same
2642 mode because if X is equivalent to a constant in some mode, it
2643 doesn't vary in any mode. */
2644
f8cfc6aa 2645 if (REG_P (x)
1bb98cec
DM
2646 && REGNO_QTY_VALID_P (REGNO (x)))
2647 {
2648 int x_q = REG_QTY (REGNO (x));
2649 struct qty_table_elem *x_ent = &qty_table[x_q];
2650
2651 if (GET_MODE (x) == x_ent->mode
2652 && x_ent->const_rtx != NULL_RTX)
2653 return 0;
2654 }
7afe21cc 2655
9ae8ffe7
JL
2656 if (GET_CODE (x) == PLUS
2657 && GET_CODE (XEXP (x, 1)) == CONST_INT
f8cfc6aa 2658 && REG_P (XEXP (x, 0))
1bb98cec
DM
2659 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2660 {
2661 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2662 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2663
2664 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2665 && x0_ent->const_rtx != NULL_RTX)
2666 return 0;
2667 }
7afe21cc 2668
9c6b0bae
RK
2669 /* This can happen as the result of virtual register instantiation, if
2670 the initial constant is too large to be a valid address. This gives
2671 us a three instruction sequence, load large offset into a register,
2672 load fp minus a constant into a register, then a MEM which is the
2673 sum of the two `constant' registers. */
9ae8ffe7 2674 if (GET_CODE (x) == PLUS
f8cfc6aa
JQ
2675 && REG_P (XEXP (x, 0))
2676 && REG_P (XEXP (x, 1))
9ae8ffe7 2677 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
1bb98cec
DM
2678 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2679 {
2680 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2681 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2682 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2683 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2684
2685 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2686 && x0_ent->const_rtx != NULL_RTX
2687 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2688 && x1_ent->const_rtx != NULL_RTX)
2689 return 0;
2690 }
9c6b0bae 2691
2be28ee2 2692 return rtx_varies_p (x, from_alias);
7afe21cc
RK
2693}
2694\f
eef3c949
RS
2695/* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2696 the result if necessary. INSN is as for canon_reg. */
2697
2698static void
2699validate_canon_reg (rtx *xloc, rtx insn)
2700{
2701 rtx new = canon_reg (*xloc, insn);
2702 int insn_code;
2703
2704 /* If replacing pseudo with hard reg or vice versa, ensure the
2705 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2706 if (insn != 0 && new != 0
2707 && REG_P (new) && REG_P (*xloc)
2708 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2709 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2710 || GET_MODE (new) != GET_MODE (*xloc)
2711 || (insn_code = recog_memoized (insn)) < 0
2712 || insn_data[insn_code].n_dups > 0))
2713 validate_change (insn, xloc, new, 1);
2714 else
2715 *xloc = new;
2716}
2717
7afe21cc
RK
2718/* Canonicalize an expression:
2719 replace each register reference inside it
2720 with the "oldest" equivalent register.
2721
da7d8304 2722 If INSN is nonzero and we are replacing a pseudo with a hard register
7722328e 2723 or vice versa, validate_change is used to ensure that INSN remains valid
da7d8304 2724 after we make our substitution. The calls are made with IN_GROUP nonzero
7722328e
RK
2725 so apply_change_group must be called upon the outermost return from this
2726 function (unless INSN is zero). The result of apply_change_group can
2727 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2728
2729static rtx
7080f735 2730canon_reg (rtx x, rtx insn)
7afe21cc 2731{
b3694847
SS
2732 int i;
2733 enum rtx_code code;
2734 const char *fmt;
7afe21cc
RK
2735
2736 if (x == 0)
2737 return x;
2738
2739 code = GET_CODE (x);
2740 switch (code)
2741 {
2742 case PC:
2743 case CC0:
2744 case CONST:
2745 case CONST_INT:
2746 case CONST_DOUBLE:
69ef87e2 2747 case CONST_VECTOR:
7afe21cc
RK
2748 case SYMBOL_REF:
2749 case LABEL_REF:
2750 case ADDR_VEC:
2751 case ADDR_DIFF_VEC:
2752 return x;
2753
2754 case REG:
2755 {
b3694847
SS
2756 int first;
2757 int q;
2758 struct qty_table_elem *ent;
7afe21cc
RK
2759
2760 /* Never replace a hard reg, because hard regs can appear
2761 in more than one machine mode, and we must preserve the mode
2762 of each occurrence. Also, some hard regs appear in
2763 MEMs that are shared and mustn't be altered. Don't try to
2764 replace any reg that maps to a reg of class NO_REGS. */
2765 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2766 || ! REGNO_QTY_VALID_P (REGNO (x)))
2767 return x;
2768
278a83b2 2769 q = REG_QTY (REGNO (x));
1bb98cec
DM
2770 ent = &qty_table[q];
2771 first = ent->first_reg;
7afe21cc
RK
2772 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2773 : REGNO_REG_CLASS (first) == NO_REGS ? x
1bb98cec 2774 : gen_rtx_REG (ent->mode, first));
7afe21cc 2775 }
278a83b2 2776
e9a25f70
JL
2777 default:
2778 break;
7afe21cc
RK
2779 }
2780
2781 fmt = GET_RTX_FORMAT (code);
2782 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2783 {
b3694847 2784 int j;
7afe21cc
RK
2785
2786 if (fmt[i] == 'e')
eef3c949 2787 validate_canon_reg (&XEXP (x, i), insn);
7afe21cc
RK
2788 else if (fmt[i] == 'E')
2789 for (j = 0; j < XVECLEN (x, i); j++)
eef3c949 2790 validate_canon_reg (&XVECEXP (x, i, j), insn);
7afe21cc
RK
2791 }
2792
2793 return x;
2794}
2795\f
a2cabb29 2796/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2797 a MEM). Find the best equivalent address to use that is valid for this
2798 insn.
2799
2800 On most CISC machines, complicated address modes are costly, and rtx_cost
2801 is a good approximation for that cost. However, most RISC machines have
2802 only a few (usually only one) memory reference formats. If an address is
2803 valid at all, it is often just as cheap as any other address. Hence, for
e37135f7
RH
2804 RISC machines, we use `address_cost' to compare the costs of various
2805 addresses. For two addresses of equal cost, choose the one with the
2806 highest `rtx_cost' value as that has the potential of eliminating the
2807 most insns. For equal costs, we choose the first in the equivalence
2808 class. Note that we ignore the fact that pseudo registers are cheaper than
2809 hard registers here because we would also prefer the pseudo registers. */
7afe21cc 2810
6cd4575e 2811static void
7080f735 2812find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
7afe21cc 2813{
7a87758d 2814 struct table_elt *elt;
7afe21cc 2815 rtx addr = *loc;
7a87758d 2816 struct table_elt *p;
7afe21cc
RK
2817 int found_better = 1;
2818 int save_do_not_record = do_not_record;
2819 int save_hash_arg_in_memory = hash_arg_in_memory;
7afe21cc
RK
2820 int addr_volatile;
2821 int regno;
2197a88a 2822 unsigned hash;
7afe21cc
RK
2823
2824 /* Do not try to replace constant addresses or addresses of local and
2825 argument slots. These MEM expressions are made only once and inserted
2826 in many instructions, as well as being used to control symbol table
2827 output. It is not safe to clobber them.
2828
2829 There are some uncommon cases where the address is already in a register
2830 for some reason, but we cannot take advantage of that because we have
2831 no easy way to unshare the MEM. In addition, looking up all stack
2832 addresses is costly. */
2833 if ((GET_CODE (addr) == PLUS
f8cfc6aa 2834 && REG_P (XEXP (addr, 0))
7afe21cc
RK
2835 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2836 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2837 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2838 || regno == ARG_POINTER_REGNUM))
f8cfc6aa 2839 || (REG_P (addr)
8bc169f2
DE
2840 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2841 || regno == HARD_FRAME_POINTER_REGNUM
2842 || regno == ARG_POINTER_REGNUM))
7afe21cc
RK
2843 || CONSTANT_ADDRESS_P (addr))
2844 return;
2845
2846 /* If this address is not simply a register, try to fold it. This will
2847 sometimes simplify the expression. Many simplifications
2848 will not be valid, but some, usually applying the associative rule, will
2849 be valid and produce better code. */
f8cfc6aa 2850 if (!REG_P (addr))
8c87f107 2851 {
6c667859
AB
2852 rtx folded = fold_rtx (addr, NULL_RTX);
2853 if (folded != addr)
2854 {
2855 int addr_folded_cost = address_cost (folded, mode);
2856 int addr_cost = address_cost (addr, mode);
2857
2858 if ((addr_folded_cost < addr_cost
2859 || (addr_folded_cost == addr_cost
2860 /* ??? The rtx_cost comparison is left over from an older
2861 version of this code. It is probably no longer helpful.*/
2862 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2863 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2864 && validate_change (insn, loc, folded, 0))
2865 addr = folded;
2866 }
8c87f107 2867 }
278a83b2 2868
42495ca0
RK
2869 /* If this address is not in the hash table, we can't look for equivalences
2870 of the whole address. Also, ignore if volatile. */
2871
7afe21cc 2872 do_not_record = 0;
2197a88a 2873 hash = HASH (addr, Pmode);
7afe21cc
RK
2874 addr_volatile = do_not_record;
2875 do_not_record = save_do_not_record;
2876 hash_arg_in_memory = save_hash_arg_in_memory;
7afe21cc
RK
2877
2878 if (addr_volatile)
2879 return;
2880
2197a88a 2881 elt = lookup (addr, hash, Pmode);
7afe21cc 2882
42495ca0
RK
2883 if (elt)
2884 {
2885 /* We need to find the best (under the criteria documented above) entry
2886 in the class that is valid. We use the `flag' field to indicate
2887 choices that were invalid and iterate until we can't find a better
2888 one that hasn't already been tried. */
7afe21cc 2889
42495ca0
RK
2890 for (p = elt->first_same_value; p; p = p->next_same_value)
2891 p->flag = 0;
7afe21cc 2892
42495ca0
RK
2893 while (found_better)
2894 {
01329426 2895 int best_addr_cost = address_cost (*loc, mode);
42495ca0 2896 int best_rtx_cost = (elt->cost + 1) >> 1;
01329426 2897 int exp_cost;
278a83b2 2898 struct table_elt *best_elt = elt;
42495ca0
RK
2899
2900 found_better = 0;
2901 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2902 if (! p->flag)
42495ca0 2903 {
f8cfc6aa 2904 if ((REG_P (p->exp)
0516f6fe 2905 || exp_equiv_p (p->exp, p->exp, 1, false))
01329426
JH
2906 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2907 || (exp_cost == best_addr_cost
05bd3d41 2908 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2f541799
MM
2909 {
2910 found_better = 1;
01329426 2911 best_addr_cost = exp_cost;
2f541799
MM
2912 best_rtx_cost = (p->cost + 1) >> 1;
2913 best_elt = p;
2914 }
42495ca0 2915 }
7afe21cc 2916
42495ca0
RK
2917 if (found_better)
2918 {
2919 if (validate_change (insn, loc,
906c4e36
RK
2920 canon_reg (copy_rtx (best_elt->exp),
2921 NULL_RTX), 0))
42495ca0
RK
2922 return;
2923 else
2924 best_elt->flag = 1;
2925 }
2926 }
2927 }
7afe21cc 2928
42495ca0
RK
2929 /* If the address is a binary operation with the first operand a register
2930 and the second a constant, do the same as above, but looking for
2931 equivalences of the register. Then try to simplify before checking for
2932 the best address to use. This catches a few cases: First is when we
2933 have REG+const and the register is another REG+const. We can often merge
2934 the constants and eliminate one insn and one register. It may also be
2935 that a machine has a cheap REG+REG+const. Finally, this improves the
2936 code on the Alpha for unaligned byte stores. */
2937
2938 if (flag_expensive_optimizations
ec8e098d 2939 && ARITHMETIC_P (*loc)
f8cfc6aa 2940 && REG_P (XEXP (*loc, 0)))
7afe21cc 2941 {
7b9c108f 2942 rtx op1 = XEXP (*loc, 1);
42495ca0
RK
2943
2944 do_not_record = 0;
2197a88a 2945 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2946 do_not_record = save_do_not_record;
2947 hash_arg_in_memory = save_hash_arg_in_memory;
42495ca0 2948
2197a88a 2949 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2950 if (elt == 0)
2951 return;
2952
2953 /* We need to find the best (under the criteria documented above) entry
2954 in the class that is valid. We use the `flag' field to indicate
2955 choices that were invalid and iterate until we can't find a better
2956 one that hasn't already been tried. */
7afe21cc 2957
7afe21cc 2958 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2959 p->flag = 0;
7afe21cc 2960
42495ca0 2961 while (found_better)
7afe21cc 2962 {
01329426 2963 int best_addr_cost = address_cost (*loc, mode);
42495ca0 2964 int best_rtx_cost = (COST (*loc) + 1) >> 1;
278a83b2 2965 struct table_elt *best_elt = elt;
42495ca0 2966 rtx best_rtx = *loc;
f6516aee
JW
2967 int count;
2968
2969 /* This is at worst case an O(n^2) algorithm, so limit our search
2970 to the first 32 elements on the list. This avoids trouble
2971 compiling code with very long basic blocks that can easily
0cedb36c
JL
2972 call simplify_gen_binary so many times that we run out of
2973 memory. */
96b0e481 2974
0cedb36c
JL
2975 found_better = 0;
2976 for (p = elt->first_same_value, count = 0;
2977 p && count < 32;
2978 p = p->next_same_value, count++)
2979 if (! p->flag
f8cfc6aa 2980 && (REG_P (p->exp)
0516f6fe 2981 || exp_equiv_p (p->exp, p->exp, 1, false)))
0cedb36c
JL
2982 {
2983 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
7b9c108f 2984 p->exp, op1);
01329426 2985 int new_cost;
b7ca416f
AP
2986
2987 /* Get the canonical version of the address so we can accept
2988 more. */
2989 new = canon_for_address (new);
2990
01329426 2991 new_cost = address_cost (new, mode);
96b0e481 2992
01329426
JH
2993 if (new_cost < best_addr_cost
2994 || (new_cost == best_addr_cost
2995 && (COST (new) + 1) >> 1 > best_rtx_cost))
0cedb36c
JL
2996 {
2997 found_better = 1;
01329426 2998 best_addr_cost = new_cost;
0cedb36c
JL
2999 best_rtx_cost = (COST (new) + 1) >> 1;
3000 best_elt = p;
3001 best_rtx = new;
3002 }
3003 }
96b0e481 3004
0cedb36c
JL
3005 if (found_better)
3006 {
3007 if (validate_change (insn, loc,
3008 canon_reg (copy_rtx (best_rtx),
3009 NULL_RTX), 0))
3010 return;
3011 else
3012 best_elt->flag = 1;
3013 }
3014 }
3015 }
96b0e481
RK
3016}
3017\f
bca05d20
RK
3018/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3019 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3020 what values are being compared.
1a87eea2 3021
bca05d20
RK
3022 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3023 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3024 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3025 compared to produce cc0.
a432f20d 3026
bca05d20
RK
3027 The return value is the comparison operator and is either the code of
3028 A or the code corresponding to the inverse of the comparison. */
7afe21cc 3029
0cedb36c 3030static enum rtx_code
7080f735
AJ
3031find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3032 enum machine_mode *pmode1, enum machine_mode *pmode2)
7afe21cc 3033{
0cedb36c 3034 rtx arg1, arg2;
1a87eea2 3035
0cedb36c 3036 arg1 = *parg1, arg2 = *parg2;
7afe21cc 3037
0cedb36c 3038 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
7afe21cc 3039
0cedb36c 3040 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
a432f20d 3041 {
da7d8304 3042 /* Set nonzero when we find something of interest. */
0cedb36c
JL
3043 rtx x = 0;
3044 int reverse_code = 0;
3045 struct table_elt *p = 0;
6076248a 3046
0cedb36c
JL
3047 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3048 On machines with CC0, this is the only case that can occur, since
3049 fold_rtx will return the COMPARE or item being compared with zero
3050 when given CC0. */
6076248a 3051
0cedb36c
JL
3052 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3053 x = arg1;
6076248a 3054
0cedb36c
JL
3055 /* If ARG1 is a comparison operator and CODE is testing for
3056 STORE_FLAG_VALUE, get the inner arguments. */
a432f20d 3057
ec8e098d 3058 else if (COMPARISON_P (arg1))
7afe21cc 3059 {
efdc7e19
RH
3060#ifdef FLOAT_STORE_FLAG_VALUE
3061 REAL_VALUE_TYPE fsfv;
3062#endif
3063
0cedb36c
JL
3064 if (code == NE
3065 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3066 && code == LT && STORE_FLAG_VALUE == -1)
3067#ifdef FLOAT_STORE_FLAG_VALUE
3068 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
efdc7e19
RH
3069 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3070 REAL_VALUE_NEGATIVE (fsfv)))
7afe21cc 3071#endif
a432f20d 3072 )
0cedb36c
JL
3073 x = arg1;
3074 else if (code == EQ
3075 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3076 && code == GE && STORE_FLAG_VALUE == -1)
3077#ifdef FLOAT_STORE_FLAG_VALUE
3078 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
efdc7e19
RH
3079 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3080 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
3081#endif
3082 )
3083 x = arg1, reverse_code = 1;
7afe21cc
RK
3084 }
3085
0cedb36c 3086 /* ??? We could also check for
7afe21cc 3087
0cedb36c 3088 (ne (and (eq (...) (const_int 1))) (const_int 0))
7afe21cc 3089
0cedb36c 3090 and related forms, but let's wait until we see them occurring. */
7afe21cc 3091
0cedb36c
JL
3092 if (x == 0)
3093 /* Look up ARG1 in the hash table and see if it has an equivalence
3094 that lets us see what is being compared. */
0516f6fe 3095 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
278a83b2 3096 if (p)
8b03b984
R
3097 {
3098 p = p->first_same_value;
3099
3100 /* If what we compare is already known to be constant, that is as
3101 good as it gets.
3102 We need to break the loop in this case, because otherwise we
3103 can have an infinite loop when looking at a reg that is known
3104 to be a constant which is the same as a comparison of a reg
3105 against zero which appears later in the insn stream, which in
3106 turn is constant and the same as the comparison of the first reg
3107 against zero... */
3108 if (p->is_const)
3109 break;
3110 }
7afe21cc 3111
0cedb36c 3112 for (; p; p = p->next_same_value)
7afe21cc 3113 {
0cedb36c 3114 enum machine_mode inner_mode = GET_MODE (p->exp);
efdc7e19
RH
3115#ifdef FLOAT_STORE_FLAG_VALUE
3116 REAL_VALUE_TYPE fsfv;
3117#endif
7afe21cc 3118
0cedb36c 3119 /* If the entry isn't valid, skip it. */
0516f6fe 3120 if (! exp_equiv_p (p->exp, p->exp, 1, false))
0cedb36c 3121 continue;
f76b9db2 3122
bca05d20
RK
3123 if (GET_CODE (p->exp) == COMPARE
3124 /* Another possibility is that this machine has a compare insn
3125 that includes the comparison code. In that case, ARG1 would
3126 be equivalent to a comparison operation that would set ARG1 to
3127 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3128 ORIG_CODE is the actual comparison being done; if it is an EQ,
3129 we must reverse ORIG_CODE. On machine with a negative value
3130 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3131 || ((code == NE
3132 || (code == LT
3133 && GET_MODE_CLASS (inner_mode) == MODE_INT
3134 && (GET_MODE_BITSIZE (inner_mode)
3135 <= HOST_BITS_PER_WIDE_INT)
3136 && (STORE_FLAG_VALUE
3137 & ((HOST_WIDE_INT) 1
3138 << (GET_MODE_BITSIZE (inner_mode) - 1))))
0cedb36c 3139#ifdef FLOAT_STORE_FLAG_VALUE
bca05d20
RK
3140 || (code == LT
3141 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
efdc7e19
RH
3142 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3143 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c 3144#endif
bca05d20 3145 )
ec8e098d 3146 && COMPARISON_P (p->exp)))
7afe21cc 3147 {
0cedb36c
JL
3148 x = p->exp;
3149 break;
3150 }
3151 else if ((code == EQ
3152 || (code == GE
3153 && GET_MODE_CLASS (inner_mode) == MODE_INT
3154 && (GET_MODE_BITSIZE (inner_mode)
3155 <= HOST_BITS_PER_WIDE_INT)
3156 && (STORE_FLAG_VALUE
3157 & ((HOST_WIDE_INT) 1
3158 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3159#ifdef FLOAT_STORE_FLAG_VALUE
3160 || (code == GE
3161 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
efdc7e19
RH
3162 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3163 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
3164#endif
3165 )
ec8e098d 3166 && COMPARISON_P (p->exp))
0cedb36c
JL
3167 {
3168 reverse_code = 1;
3169 x = p->exp;
3170 break;
7afe21cc
RK
3171 }
3172
4977bab6
ZW
3173 /* If this non-trapping address, e.g. fp + constant, the
3174 equivalent is a better operand since it may let us predict
3175 the value of the comparison. */
3176 else if (!rtx_addr_can_trap_p (p->exp))
0cedb36c
JL
3177 {
3178 arg1 = p->exp;
3179 continue;
3180 }
7afe21cc 3181 }
7afe21cc 3182
0cedb36c
JL
3183 /* If we didn't find a useful equivalence for ARG1, we are done.
3184 Otherwise, set up for the next iteration. */
3185 if (x == 0)
3186 break;
7afe21cc 3187
78192b09
RH
3188 /* If we need to reverse the comparison, make sure that that is
3189 possible -- we can't necessarily infer the value of GE from LT
3190 with floating-point operands. */
0cedb36c 3191 if (reverse_code)
261efdef
JH
3192 {
3193 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3194 if (reversed == UNKNOWN)
3195 break;
68252e27
KH
3196 else
3197 code = reversed;
261efdef 3198 }
ec8e098d 3199 else if (COMPARISON_P (x))
261efdef
JH
3200 code = GET_CODE (x);
3201 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
7afe21cc
RK
3202 }
3203
0cedb36c
JL
3204 /* Return our results. Return the modes from before fold_rtx
3205 because fold_rtx might produce const_int, and then it's too late. */
3206 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3207 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3208
3209 return code;
7afe21cc
RK
3210}
3211\f
3212/* If X is a nontrivial arithmetic operation on an argument
3213 for which a constant value can be determined, return
3214 the result of operating on that value, as a constant.
3215 Otherwise, return X, possibly with one or more operands
3216 modified by recursive calls to this function.
3217
e7bb59fa
RK
3218 If X is a register whose contents are known, we do NOT
3219 return those contents here. equiv_constant is called to
3220 perform that task.
7afe21cc
RK
3221
3222 INSN is the insn that we may be modifying. If it is 0, make a copy
3223 of X before modifying it. */
3224
3225static rtx
7080f735 3226fold_rtx (rtx x, rtx insn)
7afe21cc 3227{
b3694847
SS
3228 enum rtx_code code;
3229 enum machine_mode mode;
3230 const char *fmt;
3231 int i;
7afe21cc
RK
3232 rtx new = 0;
3233 int copied = 0;
3234 int must_swap = 0;
3235
3236 /* Folded equivalents of first two operands of X. */
3237 rtx folded_arg0;
3238 rtx folded_arg1;
3239
3240 /* Constant equivalents of first three operands of X;
3241 0 when no such equivalent is known. */
3242 rtx const_arg0;
3243 rtx const_arg1;
3244 rtx const_arg2;
3245
3246 /* The mode of the first operand of X. We need this for sign and zero
3247 extends. */
3248 enum machine_mode mode_arg0;
3249
3250 if (x == 0)
3251 return x;
3252
3253 mode = GET_MODE (x);
3254 code = GET_CODE (x);
3255 switch (code)
3256 {
3257 case CONST:
3258 case CONST_INT:
3259 case CONST_DOUBLE:
69ef87e2 3260 case CONST_VECTOR:
7afe21cc
RK
3261 case SYMBOL_REF:
3262 case LABEL_REF:
3263 case REG:
01aa1d43 3264 case PC:
7afe21cc
RK
3265 /* No use simplifying an EXPR_LIST
3266 since they are used only for lists of args
3267 in a function call's REG_EQUAL note. */
3268 case EXPR_LIST:
3269 return x;
3270
3271#ifdef HAVE_cc0
3272 case CC0:
3273 return prev_insn_cc0;
3274#endif
3275
7afe21cc 3276 case SUBREG:
c610adec
RK
3277 /* See if we previously assigned a constant value to this SUBREG. */
3278 if ((new = lookup_as_function (x, CONST_INT)) != 0
3279 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
3280 return new;
3281
4b980e20
RK
3282 /* If this is a paradoxical SUBREG, we have no idea what value the
3283 extra bits would have. However, if the operand is equivalent
3284 to a SUBREG whose operand is the same as our mode, and all the
3285 modes are within a word, we can just use the inner operand
31c85c78
RK
3286 because these SUBREGs just say how to treat the register.
3287
3288 Similarly if we find an integer constant. */
4b980e20 3289
e5f6a288 3290 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
3291 {
3292 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3293 struct table_elt *elt;
3294
3295 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3296 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3297 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3298 imode)) != 0)
ddc356e8 3299 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
31c85c78
RK
3300 {
3301 if (CONSTANT_P (elt->exp)
3302 && GET_MODE (elt->exp) == VOIDmode)
3303 return elt->exp;
3304
4b980e20
RK
3305 if (GET_CODE (elt->exp) == SUBREG
3306 && GET_MODE (SUBREG_REG (elt->exp)) == mode
0516f6fe 3307 && exp_equiv_p (elt->exp, elt->exp, 1, false))
4b980e20 3308 return copy_rtx (SUBREG_REG (elt->exp));
1bb98cec 3309 }
4b980e20
RK
3310
3311 return x;
3312 }
e5f6a288 3313
7afe21cc
RK
3314 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3315 We might be able to if the SUBREG is extracting a single word in an
3316 integral mode or extracting the low part. */
3317
3318 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3319 const_arg0 = equiv_constant (folded_arg0);
3320 if (const_arg0)
3321 folded_arg0 = const_arg0;
3322
3323 if (folded_arg0 != SUBREG_REG (x))
3324 {
949c5d62
JH
3325 new = simplify_subreg (mode, folded_arg0,
3326 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7afe21cc
RK
3327 if (new)
3328 return new;
3329 }
e5f6a288 3330
f8cfc6aa 3331 if (REG_P (folded_arg0)
4c442790 3332 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
e5f6a288
RK
3333 {
3334 struct table_elt *elt;
3335
e5f6a288
RK
3336 elt = lookup (folded_arg0,
3337 HASH (folded_arg0, GET_MODE (folded_arg0)),
3338 GET_MODE (folded_arg0));
3339
3340 if (elt)
3341 elt = elt->first_same_value;
3342
4c442790
PB
3343 if (subreg_lowpart_p (x))
3344 /* If this is a narrowing SUBREG and our operand is a REG, see
3345 if we can find an equivalence for REG that is an arithmetic
3346 operation in a wider mode where both operands are paradoxical
3347 SUBREGs from objects of our result mode. In that case, we
3348 couldn-t report an equivalent value for that operation, since we
3349 don't know what the extra bits will be. But we can find an
3350 equivalence for this SUBREG by folding that operation in the
3351 narrow mode. This allows us to fold arithmetic in narrow modes
3352 when the machine only supports word-sized arithmetic.
3353
3354 Also look for a case where we have a SUBREG whose operand
3355 is the same as our result. If both modes are smaller
3356 than a word, we are simply interpreting a register in
3357 different modes and we can use the inner value. */
3358
3359 for (; elt; elt = elt->next_same_value)
3360 {
3361 enum rtx_code eltcode = GET_CODE (elt->exp);
3362
3363 /* Just check for unary and binary operations. */
ec8e098d
PB
3364 if (UNARY_P (elt->exp)
3365 && eltcode != SIGN_EXTEND
3366 && eltcode != ZERO_EXTEND
4c442790
PB
3367 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3368 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3369 && (GET_MODE_CLASS (mode)
3370 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3371 {
3372 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
e5f6a288 3373
f8cfc6aa 3374 if (!REG_P (op0) && ! CONSTANT_P (op0))
4c442790 3375 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288 3376
e5f6a288 3377 op0 = equiv_constant (op0);
4c442790
PB
3378 if (op0)
3379 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3380 op0, mode);
3381 }
ec8e098d 3382 else if (ARITHMETIC_P (elt->exp)
4c442790
PB
3383 && eltcode != DIV && eltcode != MOD
3384 && eltcode != UDIV && eltcode != UMOD
3385 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3386 && eltcode != ROTATE && eltcode != ROTATERT
3387 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3388 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3389 == mode))
3390 || CONSTANT_P (XEXP (elt->exp, 0)))
3391 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3392 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3393 == mode))
3394 || CONSTANT_P (XEXP (elt->exp, 1))))
3395 {
3396 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3397 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3398
f8cfc6aa 3399 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
4c442790
PB
3400 op0 = fold_rtx (op0, NULL_RTX);
3401
3402 if (op0)
3403 op0 = equiv_constant (op0);
3404
f8cfc6aa 3405 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
4c442790
PB
3406 op1 = fold_rtx (op1, NULL_RTX);
3407
3408 if (op1)
3409 op1 = equiv_constant (op1);
3410
3411 /* If we are looking for the low SImode part of
3412 (ashift:DI c (const_int 32)), it doesn't work
3413 to compute that in SImode, because a 32-bit shift
3414 in SImode is unpredictable. We know the value is 0. */
3415 if (op0 && op1
3416 && GET_CODE (elt->exp) == ASHIFT
3417 && GET_CODE (op1) == CONST_INT
3418 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3419 {
3420 if (INTVAL (op1)
3421 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3422 /* If the count fits in the inner mode's width,
3423 but exceeds the outer mode's width,
3424 the value will get truncated to 0
3425 by the subreg. */
3426 new = CONST0_RTX (mode);
3427 else
3428 /* If the count exceeds even the inner mode's width,
76fb0b60 3429 don't fold this expression. */
4c442790
PB
3430 new = 0;
3431 }
3432 else if (op0 && op1)
3433 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3434 }
e5f6a288 3435
4c442790
PB
3436 else if (GET_CODE (elt->exp) == SUBREG
3437 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3438 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3439 <= UNITS_PER_WORD)
0516f6fe 3440 && exp_equiv_p (elt->exp, elt->exp, 1, false))
4c442790 3441 new = copy_rtx (SUBREG_REG (elt->exp));
4b980e20 3442
4c442790
PB
3443 if (new)
3444 return new;
3445 }
3446 else
3447 /* A SUBREG resulting from a zero extension may fold to zero if
3448 it extracts higher bits than the ZERO_EXTEND's source bits.
3449 FIXME: if combine tried to, er, combine these instructions,
3450 this transformation may be moved to simplify_subreg. */
3451 for (; elt; elt = elt->next_same_value)
3452 {
3453 if (GET_CODE (elt->exp) == ZERO_EXTEND
3454 && subreg_lsb (x)
3455 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3456 return CONST0_RTX (mode);
3457 }
e5f6a288
RK
3458 }
3459
7afe21cc
RK
3460 return x;
3461
3462 case NOT:
3463 case NEG:
3464 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3465 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3466 new = lookup_as_function (XEXP (x, 0), code);
3467 if (new)
3468 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3469 break;
13c9910f 3470
7afe21cc
RK
3471 case MEM:
3472 /* If we are not actually processing an insn, don't try to find the
3473 best address. Not only don't we care, but we could modify the
3474 MEM in an invalid way since we have no insn to validate against. */
3475 if (insn != 0)
01329426 3476 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
7afe21cc
RK
3477
3478 {
3479 /* Even if we don't fold in the insn itself,
3480 we can safely do so here, in hopes of getting a constant. */
906c4e36 3481 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 3482 rtx base = 0;
906c4e36 3483 HOST_WIDE_INT offset = 0;
7afe21cc 3484
f8cfc6aa 3485 if (REG_P (addr)
1bb98cec
DM
3486 && REGNO_QTY_VALID_P (REGNO (addr)))
3487 {
3488 int addr_q = REG_QTY (REGNO (addr));
3489 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3490
3491 if (GET_MODE (addr) == addr_ent->mode
3492 && addr_ent->const_rtx != NULL_RTX)
3493 addr = addr_ent->const_rtx;
3494 }
7afe21cc
RK
3495
3496 /* If address is constant, split it into a base and integer offset. */
3497 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3498 base = addr;
3499 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3500 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3501 {
3502 base = XEXP (XEXP (addr, 0), 0);
3503 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3504 }
3505 else if (GET_CODE (addr) == LO_SUM
3506 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3507 base = XEXP (addr, 1);
3508
3509 /* If this is a constant pool reference, we can fold it into its
3510 constant to allow better value tracking. */
3511 if (base && GET_CODE (base) == SYMBOL_REF
3512 && CONSTANT_POOL_ADDRESS_P (base))
3513 {
3514 rtx constant = get_pool_constant (base);
3515 enum machine_mode const_mode = get_pool_mode (base);
3516 rtx new;
3517
3518 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
dd0ba281
RS
3519 {
3520 constant_pool_entries_cost = COST (constant);
3521 constant_pool_entries_regcost = approx_reg_cost (constant);
3522 }
7afe21cc
RK
3523
3524 /* If we are loading the full constant, we have an equivalence. */
3525 if (offset == 0 && mode == const_mode)
3526 return constant;
3527
9faa82d8 3528 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
3529 anything. Otherwise, handle the two most common cases:
3530 extracting a word from a multi-word constant, and extracting
3531 the low-order bits. Other cases don't seem common enough to
3532 worry about. */
3533 if (! CONSTANT_P (constant))
3534 return x;
3535
3536 if (GET_MODE_CLASS (mode) == MODE_INT
3537 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3538 && offset % UNITS_PER_WORD == 0
3539 && (new = operand_subword (constant,
3540 offset / UNITS_PER_WORD,
3541 0, const_mode)) != 0)
3542 return new;
3543
3544 if (((BYTES_BIG_ENDIAN
3545 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3546 || (! BYTES_BIG_ENDIAN && offset == 0))
4de249d9 3547 && (new = gen_lowpart (mode, constant)) != 0)
7afe21cc
RK
3548 return new;
3549 }
3550
3551 /* If this is a reference to a label at a known position in a jump
3552 table, we also know its value. */
3553 if (base && GET_CODE (base) == LABEL_REF)
3554 {
3555 rtx label = XEXP (base, 0);
3556 rtx table_insn = NEXT_INSN (label);
278a83b2 3557
4b4bf941 3558 if (table_insn && JUMP_P (table_insn)
7afe21cc
RK
3559 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3560 {
3561 rtx table = PATTERN (table_insn);
3562
3563 if (offset >= 0
3564 && (offset / GET_MODE_SIZE (GET_MODE (table))
3565 < XVECLEN (table, 0)))
3566 return XVECEXP (table, 0,
3567 offset / GET_MODE_SIZE (GET_MODE (table)));
3568 }
4b4bf941 3569 if (table_insn && JUMP_P (table_insn)
7afe21cc
RK
3570 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3571 {
3572 rtx table = PATTERN (table_insn);
3573
3574 if (offset >= 0
3575 && (offset / GET_MODE_SIZE (GET_MODE (table))
3576 < XVECLEN (table, 1)))
3577 {
3578 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
3579 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3580 XEXP (table, 0));
7afe21cc
RK
3581
3582 if (GET_MODE (table) != Pmode)
38a448ca 3583 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 3584
278a83b2 3585 /* Indicate this is a constant. This isn't a
67a37737
RK
3586 valid form of CONST, but it will only be used
3587 to fold the next insns and then discarded, so
ac7ef8d5
FS
3588 it should be safe.
3589
3590 Note this expression must be explicitly discarded,
3591 by cse_insn, else it may end up in a REG_EQUAL note
3592 and "escape" to cause problems elsewhere. */
38a448ca 3593 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
3594 }
3595 }
3596 }
3597
3598 return x;
3599 }
9255709c 3600
a5e5cf67
RH
3601#ifdef NO_FUNCTION_CSE
3602 case CALL:
3603 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3604 return x;
3605 break;
3606#endif
3607
9255709c 3608 case ASM_OPERANDS:
6c667859
AB
3609 if (insn)
3610 {
3611 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3612 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3613 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3614 }
9255709c 3615 break;
278a83b2 3616
e9a25f70
JL
3617 default:
3618 break;
7afe21cc
RK
3619 }
3620
3621 const_arg0 = 0;
3622 const_arg1 = 0;
3623 const_arg2 = 0;
3624 mode_arg0 = VOIDmode;
3625
3626 /* Try folding our operands.
3627 Then see which ones have constant values known. */
3628
3629 fmt = GET_RTX_FORMAT (code);
3630 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3631 if (fmt[i] == 'e')
3632 {
3633 rtx arg = XEXP (x, i);
3634 rtx folded_arg = arg, const_arg = 0;
3635 enum machine_mode mode_arg = GET_MODE (arg);
3636 rtx cheap_arg, expensive_arg;
3637 rtx replacements[2];
3638 int j;
5b437e0f 3639 int old_cost = COST_IN (XEXP (x, i), code);
7afe21cc
RK
3640
3641 /* Most arguments are cheap, so handle them specially. */
3642 switch (GET_CODE (arg))
3643 {
3644 case REG:
3645 /* This is the same as calling equiv_constant; it is duplicated
3646 here for speed. */
1bb98cec
DM
3647 if (REGNO_QTY_VALID_P (REGNO (arg)))
3648 {
3649 int arg_q = REG_QTY (REGNO (arg));
3650 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3651
3652 if (arg_ent->const_rtx != NULL_RTX
f8cfc6aa 3653 && !REG_P (arg_ent->const_rtx)
1bb98cec
DM
3654 && GET_CODE (arg_ent->const_rtx) != PLUS)
3655 const_arg
4de249d9 3656 = gen_lowpart (GET_MODE (arg),
1bb98cec
DM
3657 arg_ent->const_rtx);
3658 }
7afe21cc
RK
3659 break;
3660
3661 case CONST:
3662 case CONST_INT:
3663 case SYMBOL_REF:
3664 case LABEL_REF:
3665 case CONST_DOUBLE:
69ef87e2 3666 case CONST_VECTOR:
7afe21cc
RK
3667 const_arg = arg;
3668 break;
3669
3670#ifdef HAVE_cc0
3671 case CC0:
3672 folded_arg = prev_insn_cc0;
3673 mode_arg = prev_insn_cc0_mode;
3674 const_arg = equiv_constant (folded_arg);
3675 break;
3676#endif
3677
3678 default:
3679 folded_arg = fold_rtx (arg, insn);
3680 const_arg = equiv_constant (folded_arg);
3681 }
3682
3683 /* For the first three operands, see if the operand
3684 is constant or equivalent to a constant. */
3685 switch (i)
3686 {
3687 case 0:
3688 folded_arg0 = folded_arg;
3689 const_arg0 = const_arg;
3690 mode_arg0 = mode_arg;
3691 break;
3692 case 1:
3693 folded_arg1 = folded_arg;
3694 const_arg1 = const_arg;
3695 break;
3696 case 2:
3697 const_arg2 = const_arg;
3698 break;
3699 }
3700
3701 /* Pick the least expensive of the folded argument and an
3702 equivalent constant argument. */
3703 if (const_arg == 0 || const_arg == folded_arg
f2fa288f 3704 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
7afe21cc
RK
3705 cheap_arg = folded_arg, expensive_arg = const_arg;
3706 else
3707 cheap_arg = const_arg, expensive_arg = folded_arg;
3708
3709 /* Try to replace the operand with the cheapest of the two
3710 possibilities. If it doesn't work and this is either of the first
3711 two operands of a commutative operation, try swapping them.
3712 If THAT fails, try the more expensive, provided it is cheaper
3713 than what is already there. */
3714
3715 if (cheap_arg == XEXP (x, i))
3716 continue;
3717
3718 if (insn == 0 && ! copied)
3719 {
3720 x = copy_rtx (x);
3721 copied = 1;
3722 }
3723
f2fa288f
RH
3724 /* Order the replacements from cheapest to most expensive. */
3725 replacements[0] = cheap_arg;
3726 replacements[1] = expensive_arg;
3727
68252e27 3728 for (j = 0; j < 2 && replacements[j]; j++)
7afe21cc 3729 {
f2fa288f
RH
3730 int new_cost = COST_IN (replacements[j], code);
3731
3732 /* Stop if what existed before was cheaper. Prefer constants
3733 in the case of a tie. */
3734 if (new_cost > old_cost
3735 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3736 break;
3737
8cce3d04
RS
3738 /* It's not safe to substitute the operand of a conversion
3739 operator with a constant, as the conversion's identity
3740 depends upon the mode of it's operand. This optimization
3741 is handled by the call to simplify_unary_operation. */
3742 if (GET_RTX_CLASS (code) == RTX_UNARY
3743 && GET_MODE (replacements[j]) != mode_arg0
3744 && (code == ZERO_EXTEND
3745 || code == SIGN_EXTEND
3746 || code == TRUNCATE
3747 || code == FLOAT_TRUNCATE
3748 || code == FLOAT_EXTEND
3749 || code == FLOAT
3750 || code == FIX
3751 || code == UNSIGNED_FLOAT
3752 || code == UNSIGNED_FIX))
3753 continue;
3754
7afe21cc
RK
3755 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3756 break;
3757
ec8e098d
PB
3758 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3759 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
7afe21cc
RK
3760 {
3761 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3762 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3763
3764 if (apply_change_group ())
3765 {
3766 /* Swap them back to be invalid so that this loop can
3767 continue and flag them to be swapped back later. */
3768 rtx tem;
3769
3770 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3771 XEXP (x, 1) = tem;
3772 must_swap = 1;
3773 break;
3774 }
3775 }
3776 }
3777 }
3778
2d8b0f3a
JL
3779 else
3780 {
3781 if (fmt[i] == 'E')
3782 /* Don't try to fold inside of a vector of expressions.
3783 Doing nothing is harmless. */
e49a1d2e 3784 {;}
2d8b0f3a 3785 }
7afe21cc
RK
3786
3787 /* If a commutative operation, place a constant integer as the second
3788 operand unless the first operand is also a constant integer. Otherwise,
3789 place any constant second unless the first operand is also a constant. */
3790
ec8e098d 3791 if (COMMUTATIVE_P (x))
7afe21cc 3792 {
c715abdd
RS
3793 if (must_swap
3794 || swap_commutative_operands_p (const_arg0 ? const_arg0
3795 : XEXP (x, 0),
3796 const_arg1 ? const_arg1
3797 : XEXP (x, 1)))
7afe21cc 3798 {
b3694847 3799 rtx tem = XEXP (x, 0);
7afe21cc
RK
3800
3801 if (insn == 0 && ! copied)
3802 {
3803 x = copy_rtx (x);
3804 copied = 1;
3805 }
3806
3807 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3808 validate_change (insn, &XEXP (x, 1), tem, 1);
3809 if (apply_change_group ())
3810 {
3811 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3812 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3813 }
3814 }
3815 }
3816
3817 /* If X is an arithmetic operation, see if we can simplify it. */
3818
3819 switch (GET_RTX_CLASS (code))
3820 {
ec8e098d 3821 case RTX_UNARY:
67a37737
RK
3822 {
3823 int is_const = 0;
3824
3825 /* We can't simplify extension ops unless we know the
3826 original mode. */
3827 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3828 && mode_arg0 == VOIDmode)
3829 break;
3830
3831 /* If we had a CONST, strip it off and put it back later if we
3832 fold. */
3833 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3834 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3835
3836 new = simplify_unary_operation (code, mode,
3837 const_arg0 ? const_arg0 : folded_arg0,
3838 mode_arg0);
ec666d23
JH
3839 /* NEG of PLUS could be converted into MINUS, but that causes
3840 expressions of the form
3841 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3842 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3843 FIXME: those ports should be fixed. */
3844 if (new != 0 && is_const
3845 && GET_CODE (new) == PLUS
3846 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3847 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3848 && GET_CODE (XEXP (new, 1)) == CONST_INT)
38a448ca 3849 new = gen_rtx_CONST (mode, new);
67a37737 3850 }
7afe21cc 3851 break;
278a83b2 3852
ec8e098d
PB
3853 case RTX_COMPARE:
3854 case RTX_COMM_COMPARE:
7afe21cc
RK
3855 /* See what items are actually being compared and set FOLDED_ARG[01]
3856 to those values and CODE to the actual comparison code. If any are
3857 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3858 do anything if both operands are already known to be constant. */
3859
21e5076a
UB
3860 /* ??? Vector mode comparisons are not supported yet. */
3861 if (VECTOR_MODE_P (mode))
3862 break;
3863
7afe21cc
RK
3864 if (const_arg0 == 0 || const_arg1 == 0)
3865 {
3866 struct table_elt *p0, *p1;
d6edb99e 3867 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
13c9910f 3868 enum machine_mode mode_arg1;
c610adec
RK
3869
3870#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 3871 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 3872 {
d6edb99e 3873 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
68252e27 3874 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3875 false_rtx = CONST0_RTX (mode);
c610adec
RK
3876 }
3877#endif
7afe21cc 3878
13c9910f
RS
3879 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3880 &mode_arg0, &mode_arg1);
7afe21cc 3881
13c9910f
RS
3882 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3883 what kinds of things are being compared, so we can't do
3884 anything with this comparison. */
7afe21cc
RK
3885
3886 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3887 break;
3888
75335440
KH
3889 const_arg0 = equiv_constant (folded_arg0);
3890 const_arg1 = equiv_constant (folded_arg1);
3891
0f41302f
MS
3892 /* If we do not now have two constants being compared, see
3893 if we can nevertheless deduce some things about the
3894 comparison. */
7afe21cc
RK
3895 if (const_arg0 == 0 || const_arg1 == 0)
3896 {
4977bab6
ZW
3897 /* Some addresses are known to be nonzero. We don't know
3898 their sign, but equality comparisons are known. */
7afe21cc 3899 if (const_arg1 == const0_rtx
4977bab6 3900 && nonzero_address_p (folded_arg0))
7afe21cc
RK
3901 {
3902 if (code == EQ)
d6edb99e 3903 return false_rtx;
7afe21cc 3904 else if (code == NE)
d6edb99e 3905 return true_rtx;
7afe21cc
RK
3906 }
3907
fd13313f
JH
3908 /* See if the two operands are the same. */
3909
3910 if (folded_arg0 == folded_arg1
f8cfc6aa
JQ
3911 || (REG_P (folded_arg0)
3912 && REG_P (folded_arg1)
fd13313f
JH
3913 && (REG_QTY (REGNO (folded_arg0))
3914 == REG_QTY (REGNO (folded_arg1))))
3915 || ((p0 = lookup (folded_arg0,
0516f6fe
SB
3916 SAFE_HASH (folded_arg0, mode_arg0),
3917 mode_arg0))
fd13313f 3918 && (p1 = lookup (folded_arg1,
0516f6fe
SB
3919 SAFE_HASH (folded_arg1, mode_arg0),
3920 mode_arg0))
fd13313f
JH
3921 && p0->first_same_value == p1->first_same_value))
3922 {
71925bc0
RS
3923 /* Sadly two equal NaNs are not equivalent. */
3924 if (!HONOR_NANS (mode_arg0))
3925 return ((code == EQ || code == LE || code == GE
3926 || code == LEU || code == GEU || code == UNEQ
3927 || code == UNLE || code == UNGE
3928 || code == ORDERED)
3929 ? true_rtx : false_rtx);
3930 /* Take care for the FP compares we can resolve. */
3931 if (code == UNEQ || code == UNLE || code == UNGE)
3932 return true_rtx;
3933 if (code == LTGT || code == LT || code == GT)
3934 return false_rtx;
fd13313f 3935 }
7afe21cc
RK
3936
3937 /* If FOLDED_ARG0 is a register, see if the comparison we are
3938 doing now is either the same as we did before or the reverse
3939 (we only check the reverse if not floating-point). */
f8cfc6aa 3940 else if (REG_P (folded_arg0))
7afe21cc 3941 {
30f72379 3942 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc 3943
1bb98cec
DM
3944 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3945 {
3946 struct qty_table_elem *ent = &qty_table[qty];
3947
3948 if ((comparison_dominates_p (ent->comparison_code, code)
1eb8759b
RH
3949 || (! FLOAT_MODE_P (mode_arg0)
3950 && comparison_dominates_p (ent->comparison_code,
3951 reverse_condition (code))))
1bb98cec
DM
3952 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3953 || (const_arg1
3954 && rtx_equal_p (ent->comparison_const,
3955 const_arg1))
f8cfc6aa 3956 || (REG_P (folded_arg1)
1bb98cec
DM
3957 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3958 return (comparison_dominates_p (ent->comparison_code, code)
d6edb99e 3959 ? true_rtx : false_rtx);
1bb98cec 3960 }
7afe21cc
RK
3961 }
3962 }
3963 }
3964
3965 /* If we are comparing against zero, see if the first operand is
3966 equivalent to an IOR with a constant. If so, we may be able to
3967 determine the result of this comparison. */
3968
3969 if (const_arg1 == const0_rtx)
3970 {
3971 rtx y = lookup_as_function (folded_arg0, IOR);
3972 rtx inner_const;
3973
3974 if (y != 0
3975 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3976 && GET_CODE (inner_const) == CONST_INT
3977 && INTVAL (inner_const) != 0)
3978 {
3979 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
3980 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3981 && (INTVAL (inner_const)
3982 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
d6edb99e 3983 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
c610adec
RK
3984
3985#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 3986 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 3987 {
d6edb99e 3988 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
12530dbe 3989 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3990 false_rtx = CONST0_RTX (mode);
c610adec
RK
3991 }
3992#endif
7afe21cc
RK
3993
3994 switch (code)
3995 {
3996 case EQ:
d6edb99e 3997 return false_rtx;
7afe21cc 3998 case NE:
d6edb99e 3999 return true_rtx;
7afe21cc
RK
4000 case LT: case LE:
4001 if (has_sign)
d6edb99e 4002 return true_rtx;
7afe21cc
RK
4003 break;
4004 case GT: case GE:
4005 if (has_sign)
d6edb99e 4006 return false_rtx;
7afe21cc 4007 break;
e9a25f70
JL
4008 default:
4009 break;
7afe21cc
RK
4010 }
4011 }
4012 }
4013
c6fb08ad
PB
4014 {
4015 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4016 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4017 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4018 }
7afe21cc
RK
4019 break;
4020
ec8e098d
PB
4021 case RTX_BIN_ARITH:
4022 case RTX_COMM_ARITH:
7afe21cc
RK
4023 switch (code)
4024 {
4025 case PLUS:
4026 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4027 with that LABEL_REF as its second operand. If so, the result is
4028 the first operand of that MINUS. This handles switches with an
4029 ADDR_DIFF_VEC table. */
4030 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4031 {
e650cbda
RK
4032 rtx y
4033 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
ddc356e8 4034 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
4035
4036 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4037 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4038 return XEXP (y, 0);
67a37737
RK
4039
4040 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
4041 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4042 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
4043 && GET_CODE (XEXP (y, 0)) == MINUS
4044 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 4045 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
67a37737 4046 return XEXP (XEXP (y, 0), 0);
7afe21cc 4047 }
c2cc0778 4048
e650cbda
RK
4049 /* Likewise if the operands are in the other order. */
4050 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4051 {
4052 rtx y
4053 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
ddc356e8 4054 : lookup_as_function (folded_arg1, MINUS);
e650cbda
RK
4055
4056 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4057 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4058 return XEXP (y, 0);
4059
4060 /* Now try for a CONST of a MINUS like the above. */
4061 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4062 : lookup_as_function (folded_arg1, CONST))) != 0
4063 && GET_CODE (XEXP (y, 0)) == MINUS
4064 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 4065 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
e650cbda
RK
4066 return XEXP (XEXP (y, 0), 0);
4067 }
4068
c2cc0778
RK
4069 /* If second operand is a register equivalent to a negative
4070 CONST_INT, see if we can find a register equivalent to the
4071 positive constant. Make a MINUS if so. Don't do this for
5d595063 4072 a non-negative constant since we might then alternate between
a1f300c0 4073 choosing positive and negative constants. Having the positive
5d595063
RK
4074 constant previously-used is the more common case. Be sure
4075 the resulting constant is non-negative; if const_arg1 were
4076 the smallest negative number this would overflow: depending
4077 on the mode, this would either just be the same value (and
4078 hence not save anything) or be incorrect. */
4079 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4080 && INTVAL (const_arg1) < 0
4741f6ad
JL
4081 /* This used to test
4082
ddc356e8 4083 -INTVAL (const_arg1) >= 0
4741f6ad
JL
4084
4085 But The Sun V5.0 compilers mis-compiled that test. So
4086 instead we test for the problematic value in a more direct
4087 manner and hope the Sun compilers get it correct. */
5c45a8ac
KG
4088 && INTVAL (const_arg1) !=
4089 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
f8cfc6aa 4090 && REG_P (folded_arg1))
c2cc0778 4091 {
ddc356e8 4092 rtx new_const = GEN_INT (-INTVAL (const_arg1));
c2cc0778 4093 struct table_elt *p
0516f6fe 4094 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
c2cc0778
RK
4095
4096 if (p)
4097 for (p = p->first_same_value; p; p = p->next_same_value)
f8cfc6aa 4098 if (REG_P (p->exp))
0cedb36c
JL
4099 return simplify_gen_binary (MINUS, mode, folded_arg0,
4100 canon_reg (p->exp, NULL_RTX));
c2cc0778 4101 }
13c9910f
RS
4102 goto from_plus;
4103
4104 case MINUS:
4105 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4106 If so, produce (PLUS Z C2-C). */
4107 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4108 {
4109 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4110 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
4111 return fold_rtx (plus_constant (copy_rtx (y),
4112 -INTVAL (const_arg1)),
a3b5c94a 4113 NULL_RTX);
13c9910f 4114 }
7afe21cc 4115
ddc356e8 4116 /* Fall through. */
7afe21cc 4117
13c9910f 4118 from_plus:
7afe21cc
RK
4119 case SMIN: case SMAX: case UMIN: case UMAX:
4120 case IOR: case AND: case XOR:
f930bfd0 4121 case MULT:
7afe21cc
RK
4122 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4123 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4124 is known to be of similar form, we may be able to replace the
4125 operation with a combined operation. This may eliminate the
4126 intermediate operation if every use is simplified in this way.
4127 Note that the similar optimization done by combine.c only works
4128 if the intermediate operation's result has only one reference. */
4129
f8cfc6aa 4130 if (REG_P (folded_arg0)
7afe21cc
RK
4131 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4132 {
4133 int is_shift
4134 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4135 rtx y = lookup_as_function (folded_arg0, code);
4136 rtx inner_const;
4137 enum rtx_code associate_code;
4138 rtx new_const;
4139
4140 if (y == 0
4141 || 0 == (inner_const
4142 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4143 || GET_CODE (inner_const) != CONST_INT
4144 /* If we have compiled a statement like
4145 "if (x == (x & mask1))", and now are looking at
4146 "x & mask2", we will have a case where the first operand
4147 of Y is the same as our first operand. Unless we detect
4148 this case, an infinite loop will result. */
4149 || XEXP (y, 0) == folded_arg0)
4150 break;
4151
4152 /* Don't associate these operations if they are a PLUS with the
4153 same constant and it is a power of two. These might be doable
4154 with a pre- or post-increment. Similarly for two subtracts of
4155 identical powers of two with post decrement. */
4156
213d5fbc 4157 if (code == PLUS && const_arg1 == inner_const
940da324
JL
4158 && ((HAVE_PRE_INCREMENT
4159 && exact_log2 (INTVAL (const_arg1)) >= 0)
4160 || (HAVE_POST_INCREMENT
4161 && exact_log2 (INTVAL (const_arg1)) >= 0)
4162 || (HAVE_PRE_DECREMENT
4163 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4164 || (HAVE_POST_DECREMENT
4165 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
4166 break;
4167
4168 /* Compute the code used to compose the constants. For example,
f930bfd0 4169 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
7afe21cc 4170
f930bfd0 4171 associate_code = (is_shift || code == MINUS ? PLUS : code);
7afe21cc
RK
4172
4173 new_const = simplify_binary_operation (associate_code, mode,
4174 const_arg1, inner_const);
4175
4176 if (new_const == 0)
4177 break;
4178
4179 /* If we are associating shift operations, don't let this
4908e508
RS
4180 produce a shift of the size of the object or larger.
4181 This could occur when we follow a sign-extend by a right
4182 shift on a machine that does a sign-extend as a pair
4183 of shifts. */
7afe21cc
RK
4184
4185 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
4186 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4187 {
4188 /* As an exception, we can turn an ASHIFTRT of this
4189 form into a shift of the number of bits - 1. */
4190 if (code == ASHIFTRT)
4191 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4192 else
4193 break;
4194 }
7afe21cc
RK
4195
4196 y = copy_rtx (XEXP (y, 0));
4197
4198 /* If Y contains our first operand (the most common way this
4199 can happen is if Y is a MEM), we would do into an infinite
4200 loop if we tried to fold it. So don't in that case. */
4201
4202 if (! reg_mentioned_p (folded_arg0, y))
4203 y = fold_rtx (y, insn);
4204
0cedb36c 4205 return simplify_gen_binary (code, mode, y, new_const);
7afe21cc 4206 }
e9a25f70
JL
4207 break;
4208
f930bfd0
JW
4209 case DIV: case UDIV:
4210 /* ??? The associative optimization performed immediately above is
4211 also possible for DIV and UDIV using associate_code of MULT.
4212 However, we would need extra code to verify that the
4213 multiplication does not overflow, that is, there is no overflow
4214 in the calculation of new_const. */
4215 break;
4216
e9a25f70
JL
4217 default:
4218 break;
7afe21cc
RK
4219 }
4220
4221 new = simplify_binary_operation (code, mode,
4222 const_arg0 ? const_arg0 : folded_arg0,
4223 const_arg1 ? const_arg1 : folded_arg1);
4224 break;
4225
ec8e098d 4226 case RTX_OBJ:
7afe21cc
RK
4227 /* (lo_sum (high X) X) is simply X. */
4228 if (code == LO_SUM && const_arg0 != 0
4229 && GET_CODE (const_arg0) == HIGH
4230 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4231 return const_arg1;
4232 break;
4233
ec8e098d
PB
4234 case RTX_TERNARY:
4235 case RTX_BITFIELD_OPS:
7afe21cc
RK
4236 new = simplify_ternary_operation (code, mode, mode_arg0,
4237 const_arg0 ? const_arg0 : folded_arg0,
4238 const_arg1 ? const_arg1 : folded_arg1,
4239 const_arg2 ? const_arg2 : XEXP (x, 2));
4240 break;
ee5332b8 4241
ec8e098d
PB
4242 default:
4243 break;
7afe21cc
RK
4244 }
4245
4246 return new ? new : x;
4247}
4248\f
4249/* Return a constant value currently equivalent to X.
4250 Return 0 if we don't know one. */
4251
4252static rtx
7080f735 4253equiv_constant (rtx x)
7afe21cc 4254{
f8cfc6aa 4255 if (REG_P (x)
1bb98cec
DM
4256 && REGNO_QTY_VALID_P (REGNO (x)))
4257 {
4258 int x_q = REG_QTY (REGNO (x));
4259 struct qty_table_elem *x_ent = &qty_table[x_q];
4260
4261 if (x_ent->const_rtx)
4de249d9 4262 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
1bb98cec 4263 }
7afe21cc 4264
2ce5e1b4 4265 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
4266 return x;
4267
fc3ffe83
RK
4268 /* If X is a MEM, try to fold it outside the context of any insn to see if
4269 it might be equivalent to a constant. That handles the case where it
4270 is a constant-pool reference. Then try to look it up in the hash table
4271 in case it is something whose value we have seen before. */
4272
3c0cb5de 4273 if (MEM_P (x))
fc3ffe83
RK
4274 {
4275 struct table_elt *elt;
4276
906c4e36 4277 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
4278 if (CONSTANT_P (x))
4279 return x;
4280
0516f6fe 4281 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
fc3ffe83
RK
4282 if (elt == 0)
4283 return 0;
4284
4285 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4286 if (elt->is_const && CONSTANT_P (elt->exp))
4287 return elt->exp;
4288 }
4289
7afe21cc
RK
4290 return 0;
4291}
4292\f
4293/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4294 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4295 least-significant part of X.
278a83b2 4296 MODE specifies how big a part of X to return.
7afe21cc
RK
4297
4298 If the requested operation cannot be done, 0 is returned.
4299
4de249d9 4300 This is similar to gen_lowpart_general in emit-rtl.c. */
7afe21cc
RK
4301
4302rtx
7080f735 4303gen_lowpart_if_possible (enum machine_mode mode, rtx x)
7afe21cc
RK
4304{
4305 rtx result = gen_lowpart_common (mode, x);
4306
4307 if (result)
4308 return result;
3c0cb5de 4309 else if (MEM_P (x))
7afe21cc
RK
4310 {
4311 /* This is the only other case we handle. */
b3694847 4312 int offset = 0;
7afe21cc
RK
4313 rtx new;
4314
f76b9db2
ILT
4315 if (WORDS_BIG_ENDIAN)
4316 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4317 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4318 if (BYTES_BIG_ENDIAN)
f1ec5147
RK
4319 /* Adjust the address so that the address-after-the-data is
4320 unchanged. */
4321 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4322 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4323
4324 new = adjust_address_nv (x, mode, offset);
7afe21cc
RK
4325 if (! memory_address_p (mode, XEXP (new, 0)))
4326 return 0;
f1ec5147 4327
7afe21cc
RK
4328 return new;
4329 }
4330 else
4331 return 0;
4332}
4333\f
6de9cd9a 4334/* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
7afe21cc
RK
4335 branch. It will be zero if not.
4336
4337 In certain cases, this can cause us to add an equivalence. For example,
278a83b2 4338 if we are following the taken case of
7080f735 4339 if (i == 2)
7afe21cc
RK
4340 we can add the fact that `i' and '2' are now equivalent.
4341
4342 In any case, we can record that this comparison was passed. If the same
4343 comparison is seen later, we will know its value. */
4344
4345static void
7080f735 4346record_jump_equiv (rtx insn, int taken)
7afe21cc
RK
4347{
4348 int cond_known_true;
4349 rtx op0, op1;
7f1c097d 4350 rtx set;
13c9910f 4351 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
4352 int reversed_nonequality = 0;
4353 enum rtx_code code;
4354
4355 /* Ensure this is the right kind of insn. */
7f1c097d 4356 if (! any_condjump_p (insn))
7afe21cc 4357 return;
7f1c097d 4358 set = pc_set (insn);
7afe21cc
RK
4359
4360 /* See if this jump condition is known true or false. */
4361 if (taken)
7f1c097d 4362 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
7afe21cc 4363 else
7f1c097d 4364 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
7afe21cc
RK
4365
4366 /* Get the type of comparison being done and the operands being compared.
4367 If we had to reverse a non-equality condition, record that fact so we
4368 know that it isn't valid for floating-point. */
7f1c097d
JH
4369 code = GET_CODE (XEXP (SET_SRC (set), 0));
4370 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4371 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
7afe21cc 4372
13c9910f 4373 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
4374 if (! cond_known_true)
4375 {
261efdef 4376 code = reversed_comparison_code_parts (code, op0, op1, insn);
1eb8759b
RH
4377
4378 /* Don't remember if we can't find the inverse. */
4379 if (code == UNKNOWN)
4380 return;
7afe21cc
RK
4381 }
4382
4383 /* The mode is the mode of the non-constant. */
13c9910f
RS
4384 mode = mode0;
4385 if (mode1 != VOIDmode)
4386 mode = mode1;
7afe21cc
RK
4387
4388 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4389}
4390
794693c0
RH
4391/* Yet another form of subreg creation. In this case, we want something in
4392 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4393
4394static rtx
4395record_jump_cond_subreg (enum machine_mode mode, rtx op)
4396{
4397 enum machine_mode op_mode = GET_MODE (op);
4398 if (op_mode == mode || op_mode == VOIDmode)
4399 return op;
4400 return lowpart_subreg (mode, op, op_mode);
4401}
4402
7afe21cc
RK
4403/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4404 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4405 Make any useful entries we can with that information. Called from
4406 above function and called recursively. */
4407
4408static void
7080f735
AJ
4409record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4410 rtx op1, int reversed_nonequality)
7afe21cc 4411{
2197a88a 4412 unsigned op0_hash, op1_hash;
e428d738 4413 int op0_in_memory, op1_in_memory;
7afe21cc
RK
4414 struct table_elt *op0_elt, *op1_elt;
4415
4416 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4417 we know that they are also equal in the smaller mode (this is also
4418 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 4419 is not worth testing for with no SUBREG). */
7afe21cc 4420
2e794ee8 4421 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 4422 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
4423 && (GET_MODE_SIZE (GET_MODE (op0))
4424 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
4425 {
4426 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
4427 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4428 if (tem)
4429 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4430 reversed_nonequality);
7afe21cc
RK
4431 }
4432
4433 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
4434 && (GET_MODE_SIZE (GET_MODE (op1))
4435 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
4436 {
4437 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
4438 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4439 if (tem)
4440 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4441 reversed_nonequality);
7afe21cc
RK
4442 }
4443
278a83b2 4444 /* Similarly, if this is an NE comparison, and either is a SUBREG
7afe21cc
RK
4445 making a smaller mode, we know the whole thing is also NE. */
4446
2e794ee8
RS
4447 /* Note that GET_MODE (op0) may not equal MODE;
4448 if we test MODE instead, we can get an infinite recursion
4449 alternating between two modes each wider than MODE. */
4450
7afe21cc
RK
4451 if (code == NE && GET_CODE (op0) == SUBREG
4452 && subreg_lowpart_p (op0)
2e794ee8
RS
4453 && (GET_MODE_SIZE (GET_MODE (op0))
4454 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
4455 {
4456 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
4457 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4458 if (tem)
4459 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4460 reversed_nonequality);
7afe21cc
RK
4461 }
4462
4463 if (code == NE && GET_CODE (op1) == SUBREG
4464 && subreg_lowpart_p (op1)
2e794ee8
RS
4465 && (GET_MODE_SIZE (GET_MODE (op1))
4466 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
4467 {
4468 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
4469 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4470 if (tem)
4471 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4472 reversed_nonequality);
7afe21cc
RK
4473 }
4474
4475 /* Hash both operands. */
4476
4477 do_not_record = 0;
4478 hash_arg_in_memory = 0;
2197a88a 4479 op0_hash = HASH (op0, mode);
7afe21cc 4480 op0_in_memory = hash_arg_in_memory;
7afe21cc
RK
4481
4482 if (do_not_record)
4483 return;
4484
4485 do_not_record = 0;
4486 hash_arg_in_memory = 0;
2197a88a 4487 op1_hash = HASH (op1, mode);
7afe21cc 4488 op1_in_memory = hash_arg_in_memory;
278a83b2 4489
7afe21cc
RK
4490 if (do_not_record)
4491 return;
4492
4493 /* Look up both operands. */
2197a88a
RK
4494 op0_elt = lookup (op0, op0_hash, mode);
4495 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 4496
af3869c1
RK
4497 /* If both operands are already equivalent or if they are not in the
4498 table but are identical, do nothing. */
4499 if ((op0_elt != 0 && op1_elt != 0
4500 && op0_elt->first_same_value == op1_elt->first_same_value)
4501 || op0 == op1 || rtx_equal_p (op0, op1))
4502 return;
4503
7afe21cc 4504 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
4505 comparison. Similarly if this is floating-point. In the latter
4506 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4507 If we record the equality, we might inadvertently delete code
4508 whose intent was to change -0 to +0. */
4509
cbf6a543 4510 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc 4511 {
1bb98cec
DM
4512 struct qty_table_elem *ent;
4513 int qty;
4514
7afe21cc
RK
4515 /* If we reversed a floating-point comparison, if OP0 is not a
4516 register, or if OP1 is neither a register or constant, we can't
4517 do anything. */
4518
f8cfc6aa 4519 if (!REG_P (op1))
7afe21cc
RK
4520 op1 = equiv_constant (op1);
4521
cbf6a543 4522 if ((reversed_nonequality && FLOAT_MODE_P (mode))
f8cfc6aa 4523 || !REG_P (op0) || op1 == 0)
7afe21cc
RK
4524 return;
4525
4526 /* Put OP0 in the hash table if it isn't already. This gives it a
4527 new quantity number. */
4528 if (op0_elt == 0)
4529 {
9714cf43 4530 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4531 {
4532 rehash_using_reg (op0);
2197a88a 4533 op0_hash = HASH (op0, mode);
2bb81c86
RK
4534
4535 /* If OP0 is contained in OP1, this changes its hash code
4536 as well. Faster to rehash than to check, except
4537 for the simple case of a constant. */
4538 if (! CONSTANT_P (op1))
2197a88a 4539 op1_hash = HASH (op1,mode);
7afe21cc
RK
4540 }
4541
9714cf43 4542 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4543 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4544 }
4545
1bb98cec
DM
4546 qty = REG_QTY (REGNO (op0));
4547 ent = &qty_table[qty];
4548
4549 ent->comparison_code = code;
f8cfc6aa 4550 if (REG_P (op1))
7afe21cc 4551 {
5d5ea909 4552 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 4553 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 4554
7afe21cc
RK
4555 /* Put OP1 in the hash table so it gets a new quantity number. */
4556 if (op1_elt == 0)
4557 {
9714cf43 4558 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4559 {
4560 rehash_using_reg (op1);
2197a88a 4561 op1_hash = HASH (op1, mode);
7afe21cc
RK
4562 }
4563
9714cf43 4564 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4565 op1_elt->in_memory = op1_in_memory;
7afe21cc
RK
4566 }
4567
1bb98cec
DM
4568 ent->comparison_const = NULL_RTX;
4569 ent->comparison_qty = REG_QTY (REGNO (op1));
7afe21cc
RK
4570 }
4571 else
4572 {
1bb98cec
DM
4573 ent->comparison_const = op1;
4574 ent->comparison_qty = -1;
7afe21cc
RK
4575 }
4576
4577 return;
4578 }
4579
eb5ad42a
RS
4580 /* If either side is still missing an equivalence, make it now,
4581 then merge the equivalences. */
7afe21cc 4582
7afe21cc
RK
4583 if (op0_elt == 0)
4584 {
9714cf43 4585 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4586 {
4587 rehash_using_reg (op0);
2197a88a 4588 op0_hash = HASH (op0, mode);
7afe21cc
RK
4589 }
4590
9714cf43 4591 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4592 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4593 }
4594
4595 if (op1_elt == 0)
4596 {
9714cf43 4597 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4598 {
4599 rehash_using_reg (op1);
2197a88a 4600 op1_hash = HASH (op1, mode);
7afe21cc
RK
4601 }
4602
9714cf43 4603 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4604 op1_elt->in_memory = op1_in_memory;
7afe21cc 4605 }
eb5ad42a
RS
4606
4607 merge_equiv_classes (op0_elt, op1_elt);
7afe21cc
RK
4608}
4609\f
4610/* CSE processing for one instruction.
4611 First simplify sources and addresses of all assignments
4612 in the instruction, using previously-computed equivalents values.
4613 Then install the new sources and destinations in the table
278a83b2 4614 of available values.
7afe21cc 4615
1ed0205e
VM
4616 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4617 the insn. It means that INSN is inside libcall block. In this
ddc356e8 4618 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
4619
4620/* Data on one SET contained in the instruction. */
4621
4622struct set
4623{
4624 /* The SET rtx itself. */
4625 rtx rtl;
4626 /* The SET_SRC of the rtx (the original value, if it is changing). */
4627 rtx src;
4628 /* The hash-table element for the SET_SRC of the SET. */
4629 struct table_elt *src_elt;
2197a88a
RK
4630 /* Hash value for the SET_SRC. */
4631 unsigned src_hash;
4632 /* Hash value for the SET_DEST. */
4633 unsigned dest_hash;
7afe21cc
RK
4634 /* The SET_DEST, with SUBREG, etc., stripped. */
4635 rtx inner_dest;
278a83b2 4636 /* Nonzero if the SET_SRC is in memory. */
7afe21cc 4637 char src_in_memory;
7afe21cc
RK
4638 /* Nonzero if the SET_SRC contains something
4639 whose value cannot be predicted and understood. */
4640 char src_volatile;
496324d0
DN
4641 /* Original machine mode, in case it becomes a CONST_INT.
4642 The size of this field should match the size of the mode
4643 field of struct rtx_def (see rtl.h). */
4644 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc
RK
4645 /* A constant equivalent for SET_SRC, if any. */
4646 rtx src_const;
47841d1b
JJ
4647 /* Original SET_SRC value used for libcall notes. */
4648 rtx orig_src;
2197a88a
RK
4649 /* Hash value of constant equivalent for SET_SRC. */
4650 unsigned src_const_hash;
7afe21cc
RK
4651 /* Table entry for constant equivalent for SET_SRC, if any. */
4652 struct table_elt *src_const_elt;
4653};
4654
4655static void
7080f735 4656cse_insn (rtx insn, rtx libcall_insn)
7afe21cc 4657{
b3694847
SS
4658 rtx x = PATTERN (insn);
4659 int i;
92f9aa51 4660 rtx tem;
b3694847 4661 int n_sets = 0;
7afe21cc 4662
2d8b0f3a 4663#ifdef HAVE_cc0
7afe21cc
RK
4664 /* Records what this insn does to set CC0. */
4665 rtx this_insn_cc0 = 0;
135d84b8 4666 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 4667#endif
7afe21cc
RK
4668
4669 rtx src_eqv = 0;
4670 struct table_elt *src_eqv_elt = 0;
6a651371
KG
4671 int src_eqv_volatile = 0;
4672 int src_eqv_in_memory = 0;
6a651371 4673 unsigned src_eqv_hash = 0;
7afe21cc 4674
9714cf43 4675 struct set *sets = (struct set *) 0;
7afe21cc
RK
4676
4677 this_insn = insn;
7afe21cc
RK
4678
4679 /* Find all the SETs and CLOBBERs in this instruction.
4680 Record all the SETs in the array `set' and count them.
4681 Also determine whether there is a CLOBBER that invalidates
4682 all memory references, or all references at varying addresses. */
4683
4b4bf941 4684 if (CALL_P (insn))
f1e7c95f
RK
4685 {
4686 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
f474c6f8
AO
4687 {
4688 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4689 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4690 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4691 }
f1e7c95f
RK
4692 }
4693
7afe21cc
RK
4694 if (GET_CODE (x) == SET)
4695 {
703ad42b 4696 sets = alloca (sizeof (struct set));
7afe21cc
RK
4697 sets[0].rtl = x;
4698
4699 /* Ignore SETs that are unconditional jumps.
4700 They never need cse processing, so this does not hurt.
4701 The reason is not efficiency but rather
4702 so that we can test at the end for instructions
4703 that have been simplified to unconditional jumps
4704 and not be misled by unchanged instructions
4705 that were unconditional jumps to begin with. */
4706 if (SET_DEST (x) == pc_rtx
4707 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4708 ;
4709
4710 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4711 The hard function value register is used only once, to copy to
4712 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4713 Ensure we invalidate the destination register. On the 80386 no
7722328e 4714 other code would invalidate it since it is a fixed_reg.
0f41302f 4715 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
4716
4717 else if (GET_CODE (SET_SRC (x)) == CALL)
4718 {
4719 canon_reg (SET_SRC (x), insn);
77fa0940 4720 apply_change_group ();
7afe21cc 4721 fold_rtx (SET_SRC (x), insn);
bb4034b3 4722 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
4723 }
4724 else
4725 n_sets = 1;
4726 }
4727 else if (GET_CODE (x) == PARALLEL)
4728 {
b3694847 4729 int lim = XVECLEN (x, 0);
7afe21cc 4730
703ad42b 4731 sets = alloca (lim * sizeof (struct set));
7afe21cc
RK
4732
4733 /* Find all regs explicitly clobbered in this insn,
4734 and ensure they are not replaced with any other regs
4735 elsewhere in this insn.
4736 When a reg that is clobbered is also used for input,
4737 we should presume that that is for a reason,
4738 and we should not substitute some other register
4739 which is not supposed to be clobbered.
4740 Therefore, this loop cannot be merged into the one below
830a38ee 4741 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
4742 value clobbered. We must not let a canonicalization do
4743 anything in that case. */
4744 for (i = 0; i < lim; i++)
4745 {
b3694847 4746 rtx y = XVECEXP (x, 0, i);
2708da92
RS
4747 if (GET_CODE (y) == CLOBBER)
4748 {
4749 rtx clobbered = XEXP (y, 0);
4750
f8cfc6aa 4751 if (REG_P (clobbered)
2708da92 4752 || GET_CODE (clobbered) == SUBREG)
bb4034b3 4753 invalidate (clobbered, VOIDmode);
2708da92
RS
4754 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4755 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 4756 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 4757 }
7afe21cc 4758 }
278a83b2 4759
7afe21cc
RK
4760 for (i = 0; i < lim; i++)
4761 {
b3694847 4762 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
4763 if (GET_CODE (y) == SET)
4764 {
7722328e
RK
4765 /* As above, we ignore unconditional jumps and call-insns and
4766 ignore the result of apply_change_group. */
7afe21cc
RK
4767 if (GET_CODE (SET_SRC (y)) == CALL)
4768 {
4769 canon_reg (SET_SRC (y), insn);
77fa0940 4770 apply_change_group ();
7afe21cc 4771 fold_rtx (SET_SRC (y), insn);
bb4034b3 4772 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
4773 }
4774 else if (SET_DEST (y) == pc_rtx
4775 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4776 ;
4777 else
4778 sets[n_sets++].rtl = y;
4779 }
4780 else if (GET_CODE (y) == CLOBBER)
4781 {
9ae8ffe7 4782 /* If we clobber memory, canon the address.
7afe21cc
RK
4783 This does nothing when a register is clobbered
4784 because we have already invalidated the reg. */
3c0cb5de 4785 if (MEM_P (XEXP (y, 0)))
9ae8ffe7 4786 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
4787 }
4788 else if (GET_CODE (y) == USE
f8cfc6aa 4789 && ! (REG_P (XEXP (y, 0))
7afe21cc 4790 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4791 canon_reg (y, NULL_RTX);
7afe21cc
RK
4792 else if (GET_CODE (y) == CALL)
4793 {
7722328e
RK
4794 /* The result of apply_change_group can be ignored; see
4795 canon_reg. */
7afe21cc 4796 canon_reg (y, insn);
77fa0940 4797 apply_change_group ();
7afe21cc
RK
4798 fold_rtx (y, insn);
4799 }
4800 }
4801 }
4802 else if (GET_CODE (x) == CLOBBER)
4803 {
3c0cb5de 4804 if (MEM_P (XEXP (x, 0)))
9ae8ffe7 4805 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4806 }
4807
4808 /* Canonicalize a USE of a pseudo register or memory location. */
4809 else if (GET_CODE (x) == USE
f8cfc6aa 4810 && ! (REG_P (XEXP (x, 0))
7afe21cc 4811 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4812 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4813 else if (GET_CODE (x) == CALL)
4814 {
7722328e 4815 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 4816 canon_reg (x, insn);
77fa0940 4817 apply_change_group ();
7afe21cc
RK
4818 fold_rtx (x, insn);
4819 }
4820
7b3ab05e
JW
4821 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4822 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4823 is handled specially for this case, and if it isn't set, then there will
9faa82d8 4824 be no equivalence for the destination. */
92f9aa51
RK
4825 if (n_sets == 1 && REG_NOTES (insn) != 0
4826 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
4827 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4828 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
7b668f9e
JJ
4829 {
4830 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4831 XEXP (tem, 0) = src_eqv;
4832 }
7afe21cc
RK
4833
4834 /* Canonicalize sources and addresses of destinations.
4835 We do this in a separate pass to avoid problems when a MATCH_DUP is
4836 present in the insn pattern. In that case, we want to ensure that
4837 we don't break the duplicate nature of the pattern. So we will replace
4838 both operands at the same time. Otherwise, we would fail to find an
4839 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
4840
4841 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 4842 but we don't do this any more. */
7afe21cc
RK
4843
4844 for (i = 0; i < n_sets; i++)
4845 {
4846 rtx dest = SET_DEST (sets[i].rtl);
4847 rtx src = SET_SRC (sets[i].rtl);
4848 rtx new = canon_reg (src, insn);
58873255 4849 int insn_code;
7afe21cc 4850
47841d1b 4851 sets[i].orig_src = src;
f8cfc6aa 4852 if ((REG_P (new) && REG_P (src)
77fa0940
RK
4853 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4854 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255 4855 || (insn_code = recog_memoized (insn)) < 0
a995e389 4856 || insn_data[insn_code].n_dups > 0)
77fa0940 4857 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
4858 else
4859 SET_SRC (sets[i].rtl) = new;
4860
46d096a3 4861 if (GET_CODE (dest) == ZERO_EXTRACT)
7afe21cc
RK
4862 {
4863 validate_change (insn, &XEXP (dest, 1),
77fa0940 4864 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 4865 validate_change (insn, &XEXP (dest, 2),
77fa0940 4866 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
4867 }
4868
46d096a3 4869 while (GET_CODE (dest) == SUBREG
7afe21cc 4870 || GET_CODE (dest) == ZERO_EXTRACT
46d096a3 4871 || GET_CODE (dest) == STRICT_LOW_PART)
7afe21cc
RK
4872 dest = XEXP (dest, 0);
4873
3c0cb5de 4874 if (MEM_P (dest))
7afe21cc
RK
4875 canon_reg (dest, insn);
4876 }
4877
77fa0940
RK
4878 /* Now that we have done all the replacements, we can apply the change
4879 group and see if they all work. Note that this will cause some
4880 canonicalizations that would have worked individually not to be applied
4881 because some other canonicalization didn't work, but this should not
278a83b2 4882 occur often.
7722328e
RK
4883
4884 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
4885
4886 apply_change_group ();
4887
7afe21cc
RK
4888 /* Set sets[i].src_elt to the class each source belongs to.
4889 Detect assignments from or to volatile things
4890 and set set[i] to zero so they will be ignored
4891 in the rest of this function.
4892
4893 Nothing in this loop changes the hash table or the register chains. */
4894
4895 for (i = 0; i < n_sets; i++)
4896 {
b3694847
SS
4897 rtx src, dest;
4898 rtx src_folded;
4899 struct table_elt *elt = 0, *p;
7afe21cc
RK
4900 enum machine_mode mode;
4901 rtx src_eqv_here;
4902 rtx src_const = 0;
4903 rtx src_related = 0;
4904 struct table_elt *src_const_elt = 0;
99a9c946
GS
4905 int src_cost = MAX_COST;
4906 int src_eqv_cost = MAX_COST;
4907 int src_folded_cost = MAX_COST;
4908 int src_related_cost = MAX_COST;
4909 int src_elt_cost = MAX_COST;
4910 int src_regcost = MAX_COST;
4911 int src_eqv_regcost = MAX_COST;
4912 int src_folded_regcost = MAX_COST;
4913 int src_related_regcost = MAX_COST;
4914 int src_elt_regcost = MAX_COST;
da7d8304 4915 /* Set nonzero if we need to call force_const_mem on with the
7afe21cc
RK
4916 contents of src_folded before using it. */
4917 int src_folded_force_flag = 0;
4918
4919 dest = SET_DEST (sets[i].rtl);
4920 src = SET_SRC (sets[i].rtl);
4921
4922 /* If SRC is a constant that has no machine mode,
4923 hash it with the destination's machine mode.
4924 This way we can keep different modes separate. */
4925
4926 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4927 sets[i].mode = mode;
4928
4929 if (src_eqv)
4930 {
4931 enum machine_mode eqvmode = mode;
4932 if (GET_CODE (dest) == STRICT_LOW_PART)
4933 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4934 do_not_record = 0;
4935 hash_arg_in_memory = 0;
2197a88a 4936 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
4937
4938 /* Find the equivalence class for the equivalent expression. */
4939
4940 if (!do_not_record)
2197a88a 4941 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
4942
4943 src_eqv_volatile = do_not_record;
4944 src_eqv_in_memory = hash_arg_in_memory;
7afe21cc
RK
4945 }
4946
4947 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4948 value of the INNER register, not the destination. So it is not
3826a3da 4949 a valid substitution for the source. But save it for later. */
7afe21cc
RK
4950 if (GET_CODE (dest) == STRICT_LOW_PART)
4951 src_eqv_here = 0;
4952 else
4953 src_eqv_here = src_eqv;
4954
4955 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4956 simplified result, which may not necessarily be valid. */
4957 src_folded = fold_rtx (src, insn);
4958
e6a125a0
RK
4959#if 0
4960 /* ??? This caused bad code to be generated for the m68k port with -O2.
4961 Suppose src is (CONST_INT -1), and that after truncation src_folded
4962 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4963 At the end we will add src and src_const to the same equivalence
4964 class. We now have 3 and -1 on the same equivalence class. This
4965 causes later instructions to be mis-optimized. */
7afe21cc
RK
4966 /* If storing a constant in a bitfield, pre-truncate the constant
4967 so we will be able to record it later. */
46d096a3 4968 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
4969 {
4970 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4971
4972 if (GET_CODE (src) == CONST_INT
4973 && GET_CODE (width) == CONST_INT
906c4e36
RK
4974 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4975 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4976 src_folded
4977 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4978 << INTVAL (width)) - 1));
7afe21cc 4979 }
e6a125a0 4980#endif
7afe21cc
RK
4981
4982 /* Compute SRC's hash code, and also notice if it
4983 should not be recorded at all. In that case,
4984 prevent any further processing of this assignment. */
4985 do_not_record = 0;
4986 hash_arg_in_memory = 0;
7afe21cc
RK
4987
4988 sets[i].src = src;
2197a88a 4989 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
4990 sets[i].src_volatile = do_not_record;
4991 sets[i].src_in_memory = hash_arg_in_memory;
7afe21cc 4992
50196afa 4993 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
43e72072
JJ
4994 a pseudo, do not record SRC. Using SRC as a replacement for
4995 anything else will be incorrect in that situation. Note that
4996 this usually occurs only for stack slots, in which case all the
4997 RTL would be referring to SRC, so we don't lose any optimization
4998 opportunities by not having SRC in the hash table. */
50196afa 4999
3c0cb5de 5000 if (MEM_P (src)
43e72072 5001 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
f8cfc6aa 5002 && REG_P (dest)
43e72072 5003 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
50196afa
RK
5004 sets[i].src_volatile = 1;
5005
0dadecf6
RK
5006#if 0
5007 /* It is no longer clear why we used to do this, but it doesn't
5008 appear to still be needed. So let's try without it since this
5009 code hurts cse'ing widened ops. */
9a5a17f3 5010 /* If source is a paradoxical subreg (such as QI treated as an SI),
7afe21cc
RK
5011 treat it as volatile. It may do the work of an SI in one context
5012 where the extra bits are not being used, but cannot replace an SI
5013 in general. */
5014 if (GET_CODE (src) == SUBREG
5015 && (GET_MODE_SIZE (GET_MODE (src))
5016 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5017 sets[i].src_volatile = 1;
0dadecf6 5018#endif
7afe21cc
RK
5019
5020 /* Locate all possible equivalent forms for SRC. Try to replace
5021 SRC in the insn with each cheaper equivalent.
5022
5023 We have the following types of equivalents: SRC itself, a folded
5024 version, a value given in a REG_EQUAL note, or a value related
5025 to a constant.
5026
5027 Each of these equivalents may be part of an additional class
5028 of equivalents (if more than one is in the table, they must be in
5029 the same class; we check for this).
5030
5031 If the source is volatile, we don't do any table lookups.
5032
5033 We note any constant equivalent for possible later use in a
5034 REG_NOTE. */
5035
5036 if (!sets[i].src_volatile)
2197a88a 5037 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
5038
5039 sets[i].src_elt = elt;
5040
5041 if (elt && src_eqv_here && src_eqv_elt)
278a83b2
KH
5042 {
5043 if (elt->first_same_value != src_eqv_elt->first_same_value)
7afe21cc
RK
5044 {
5045 /* The REG_EQUAL is indicating that two formerly distinct
5046 classes are now equivalent. So merge them. */
5047 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
5048 src_eqv_hash = HASH (src_eqv, elt->mode);
5049 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
5050 }
5051
278a83b2
KH
5052 src_eqv_here = 0;
5053 }
7afe21cc
RK
5054
5055 else if (src_eqv_elt)
278a83b2 5056 elt = src_eqv_elt;
7afe21cc
RK
5057
5058 /* Try to find a constant somewhere and record it in `src_const'.
5059 Record its table element, if any, in `src_const_elt'. Look in
5060 any known equivalences first. (If the constant is not in the
2197a88a 5061 table, also set `sets[i].src_const_hash'). */
7afe21cc 5062 if (elt)
278a83b2 5063 for (p = elt->first_same_value; p; p = p->next_same_value)
7afe21cc
RK
5064 if (p->is_const)
5065 {
5066 src_const = p->exp;
5067 src_const_elt = elt;
5068 break;
5069 }
5070
5071 if (src_const == 0
5072 && (CONSTANT_P (src_folded)
278a83b2 5073 /* Consider (minus (label_ref L1) (label_ref L2)) as
7afe21cc
RK
5074 "constant" here so we will record it. This allows us
5075 to fold switch statements when an ADDR_DIFF_VEC is used. */
5076 || (GET_CODE (src_folded) == MINUS
5077 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5078 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5079 src_const = src_folded, src_const_elt = elt;
5080 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5081 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5082
5083 /* If we don't know if the constant is in the table, get its
5084 hash code and look it up. */
5085 if (src_const && src_const_elt == 0)
5086 {
2197a88a
RK
5087 sets[i].src_const_hash = HASH (src_const, mode);
5088 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
5089 }
5090
5091 sets[i].src_const = src_const;
5092 sets[i].src_const_elt = src_const_elt;
5093
5094 /* If the constant and our source are both in the table, mark them as
5095 equivalent. Otherwise, if a constant is in the table but the source
5096 isn't, set ELT to it. */
5097 if (src_const_elt && elt
5098 && src_const_elt->first_same_value != elt->first_same_value)
5099 merge_equiv_classes (elt, src_const_elt);
5100 else if (src_const_elt && elt == 0)
5101 elt = src_const_elt;
5102
5103 /* See if there is a register linearly related to a constant
5104 equivalent of SRC. */
5105 if (src_const
5106 && (GET_CODE (src_const) == CONST
5107 || (src_const_elt && src_const_elt->related_value != 0)))
278a83b2
KH
5108 {
5109 src_related = use_related_value (src_const, src_const_elt);
5110 if (src_related)
5111 {
7afe21cc 5112 struct table_elt *src_related_elt
278a83b2 5113 = lookup (src_related, HASH (src_related, mode), mode);
7afe21cc 5114 if (src_related_elt && elt)
278a83b2 5115 {
7afe21cc
RK
5116 if (elt->first_same_value
5117 != src_related_elt->first_same_value)
278a83b2 5118 /* This can occur when we previously saw a CONST
7afe21cc
RK
5119 involving a SYMBOL_REF and then see the SYMBOL_REF
5120 twice. Merge the involved classes. */
5121 merge_equiv_classes (elt, src_related_elt);
5122
278a83b2 5123 src_related = 0;
7afe21cc 5124 src_related_elt = 0;
278a83b2
KH
5125 }
5126 else if (src_related_elt && elt == 0)
5127 elt = src_related_elt;
7afe21cc 5128 }
278a83b2 5129 }
7afe21cc 5130
e4600702
RK
5131 /* See if we have a CONST_INT that is already in a register in a
5132 wider mode. */
5133
5134 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5135 && GET_MODE_CLASS (mode) == MODE_INT
5136 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5137 {
5138 enum machine_mode wider_mode;
5139
5140 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5141 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5142 && src_related == 0;
5143 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5144 {
5145 struct table_elt *const_elt
5146 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5147
5148 if (const_elt == 0)
5149 continue;
5150
5151 for (const_elt = const_elt->first_same_value;
5152 const_elt; const_elt = const_elt->next_same_value)
f8cfc6aa 5153 if (REG_P (const_elt->exp))
e4600702 5154 {
4de249d9 5155 src_related = gen_lowpart (mode,
e4600702
RK
5156 const_elt->exp);
5157 break;
5158 }
5159 }
5160 }
5161
d45cf215
RS
5162 /* Another possibility is that we have an AND with a constant in
5163 a mode narrower than a word. If so, it might have been generated
5164 as part of an "if" which would narrow the AND. If we already
5165 have done the AND in a wider mode, we can use a SUBREG of that
5166 value. */
5167
5168 if (flag_expensive_optimizations && ! src_related
5169 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5170 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5171 {
5172 enum machine_mode tmode;
38a448ca 5173 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
5174
5175 for (tmode = GET_MODE_WIDER_MODE (mode);
5176 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5177 tmode = GET_MODE_WIDER_MODE (tmode))
5178 {
4de249d9 5179 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
d45cf215
RS
5180 struct table_elt *larger_elt;
5181
5182 if (inner)
5183 {
5184 PUT_MODE (new_and, tmode);
5185 XEXP (new_and, 0) = inner;
5186 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5187 if (larger_elt == 0)
5188 continue;
5189
5190 for (larger_elt = larger_elt->first_same_value;
5191 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 5192 if (REG_P (larger_elt->exp))
d45cf215
RS
5193 {
5194 src_related
4de249d9 5195 = gen_lowpart (mode, larger_elt->exp);
d45cf215
RS
5196 break;
5197 }
5198
5199 if (src_related)
5200 break;
5201 }
5202 }
5203 }
7bac1be0
RK
5204
5205#ifdef LOAD_EXTEND_OP
5206 /* See if a MEM has already been loaded with a widening operation;
5207 if it has, we can use a subreg of that. Many CISC machines
5208 also have such operations, but this is only likely to be
71cc389b 5209 beneficial on these machines. */
278a83b2 5210
ddc356e8 5211 if (flag_expensive_optimizations && src_related == 0
7bac1be0
RK
5212 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5213 && GET_MODE_CLASS (mode) == MODE_INT
3c0cb5de 5214 && MEM_P (src) && ! do_not_record
f822d252 5215 && LOAD_EXTEND_OP (mode) != UNKNOWN)
7bac1be0 5216 {
9d80ef7c
RH
5217 struct rtx_def memory_extend_buf;
5218 rtx memory_extend_rtx = &memory_extend_buf;
7bac1be0 5219 enum machine_mode tmode;
278a83b2 5220
7bac1be0
RK
5221 /* Set what we are trying to extend and the operation it might
5222 have been extended with. */
9d80ef7c 5223 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
7bac1be0
RK
5224 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5225 XEXP (memory_extend_rtx, 0) = src;
278a83b2 5226
7bac1be0
RK
5227 for (tmode = GET_MODE_WIDER_MODE (mode);
5228 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5229 tmode = GET_MODE_WIDER_MODE (tmode))
5230 {
5231 struct table_elt *larger_elt;
278a83b2 5232
7bac1be0 5233 PUT_MODE (memory_extend_rtx, tmode);
278a83b2 5234 larger_elt = lookup (memory_extend_rtx,
7bac1be0
RK
5235 HASH (memory_extend_rtx, tmode), tmode);
5236 if (larger_elt == 0)
5237 continue;
278a83b2 5238
7bac1be0
RK
5239 for (larger_elt = larger_elt->first_same_value;
5240 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 5241 if (REG_P (larger_elt->exp))
7bac1be0 5242 {
4de249d9 5243 src_related = gen_lowpart (mode,
7bac1be0
RK
5244 larger_elt->exp);
5245 break;
5246 }
278a83b2 5247
7bac1be0
RK
5248 if (src_related)
5249 break;
5250 }
5251 }
5252#endif /* LOAD_EXTEND_OP */
278a83b2 5253
7afe21cc 5254 if (src == src_folded)
278a83b2 5255 src_folded = 0;
7afe21cc 5256
da7d8304 5257 /* At this point, ELT, if nonzero, points to a class of expressions
7afe21cc 5258 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
da7d8304 5259 and SRC_RELATED, if nonzero, each contain additional equivalent
7afe21cc
RK
5260 expressions. Prune these latter expressions by deleting expressions
5261 already in the equivalence class.
5262
5263 Check for an equivalent identical to the destination. If found,
5264 this is the preferred equivalent since it will likely lead to
5265 elimination of the insn. Indicate this by placing it in
5266 `src_related'. */
5267
278a83b2
KH
5268 if (elt)
5269 elt = elt->first_same_value;
7afe21cc 5270 for (p = elt; p; p = p->next_same_value)
278a83b2 5271 {
7afe21cc
RK
5272 enum rtx_code code = GET_CODE (p->exp);
5273
5274 /* If the expression is not valid, ignore it. Then we do not
5275 have to check for validity below. In most cases, we can use
5276 `rtx_equal_p', since canonicalization has already been done. */
0516f6fe 5277 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
7afe21cc
RK
5278 continue;
5279
5a03c8c4
RK
5280 /* Also skip paradoxical subregs, unless that's what we're
5281 looking for. */
5282 if (code == SUBREG
5283 && (GET_MODE_SIZE (GET_MODE (p->exp))
5284 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5285 && ! (src != 0
5286 && GET_CODE (src) == SUBREG
5287 && GET_MODE (src) == GET_MODE (p->exp)
5288 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5289 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5290 continue;
5291
278a83b2 5292 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7afe21cc 5293 src = 0;
278a83b2 5294 else if (src_folded && GET_CODE (src_folded) == code
7afe21cc
RK
5295 && rtx_equal_p (src_folded, p->exp))
5296 src_folded = 0;
278a83b2 5297 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7afe21cc
RK
5298 && rtx_equal_p (src_eqv_here, p->exp))
5299 src_eqv_here = 0;
278a83b2 5300 else if (src_related && GET_CODE (src_related) == code
7afe21cc
RK
5301 && rtx_equal_p (src_related, p->exp))
5302 src_related = 0;
5303
5304 /* This is the same as the destination of the insns, we want
5305 to prefer it. Copy it to src_related. The code below will
5306 then give it a negative cost. */
5307 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5308 src_related = dest;
278a83b2 5309 }
7afe21cc
RK
5310
5311 /* Find the cheapest valid equivalent, trying all the available
5312 possibilities. Prefer items not in the hash table to ones
5313 that are when they are equal cost. Note that we can never
5314 worsen an insn as the current contents will also succeed.
05c33dd8 5315 If we find an equivalent identical to the destination, use it as best,
0f41302f 5316 since this insn will probably be eliminated in that case. */
7afe21cc
RK
5317 if (src)
5318 {
5319 if (rtx_equal_p (src, dest))
f1c1dfc3 5320 src_cost = src_regcost = -1;
7afe21cc 5321 else
630c79be
BS
5322 {
5323 src_cost = COST (src);
5324 src_regcost = approx_reg_cost (src);
5325 }
7afe21cc
RK
5326 }
5327
5328 if (src_eqv_here)
5329 {
5330 if (rtx_equal_p (src_eqv_here, dest))
f1c1dfc3 5331 src_eqv_cost = src_eqv_regcost = -1;
7afe21cc 5332 else
630c79be
BS
5333 {
5334 src_eqv_cost = COST (src_eqv_here);
5335 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5336 }
7afe21cc
RK
5337 }
5338
5339 if (src_folded)
5340 {
5341 if (rtx_equal_p (src_folded, dest))
f1c1dfc3 5342 src_folded_cost = src_folded_regcost = -1;
7afe21cc 5343 else
630c79be
BS
5344 {
5345 src_folded_cost = COST (src_folded);
5346 src_folded_regcost = approx_reg_cost (src_folded);
5347 }
7afe21cc
RK
5348 }
5349
5350 if (src_related)
5351 {
5352 if (rtx_equal_p (src_related, dest))
f1c1dfc3 5353 src_related_cost = src_related_regcost = -1;
7afe21cc 5354 else
630c79be
BS
5355 {
5356 src_related_cost = COST (src_related);
5357 src_related_regcost = approx_reg_cost (src_related);
5358 }
7afe21cc
RK
5359 }
5360
5361 /* If this was an indirect jump insn, a known label will really be
5362 cheaper even though it looks more expensive. */
5363 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
99a9c946 5364 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
278a83b2 5365
7afe21cc
RK
5366 /* Terminate loop when replacement made. This must terminate since
5367 the current contents will be tested and will always be valid. */
5368 while (1)
278a83b2
KH
5369 {
5370 rtx trial;
7afe21cc 5371
278a83b2 5372 /* Skip invalid entries. */
f8cfc6aa 5373 while (elt && !REG_P (elt->exp)
0516f6fe 5374 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
278a83b2 5375 elt = elt->next_same_value;
5a03c8c4
RK
5376
5377 /* A paradoxical subreg would be bad here: it'll be the right
5378 size, but later may be adjusted so that the upper bits aren't
5379 what we want. So reject it. */
5380 if (elt != 0
5381 && GET_CODE (elt->exp) == SUBREG
5382 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5383 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5384 /* It is okay, though, if the rtx we're trying to match
5385 will ignore any of the bits we can't predict. */
5386 && ! (src != 0
5387 && GET_CODE (src) == SUBREG
5388 && GET_MODE (src) == GET_MODE (elt->exp)
5389 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5390 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5391 {
5392 elt = elt->next_same_value;
5393 continue;
5394 }
278a83b2 5395
68252e27 5396 if (elt)
630c79be
BS
5397 {
5398 src_elt_cost = elt->cost;
5399 src_elt_regcost = elt->regcost;
5400 }
7afe21cc 5401
68252e27 5402 /* Find cheapest and skip it for the next time. For items
7afe21cc
RK
5403 of equal cost, use this order:
5404 src_folded, src, src_eqv, src_related and hash table entry. */
99a9c946 5405 if (src_folded
56ae04af
KH
5406 && preferable (src_folded_cost, src_folded_regcost,
5407 src_cost, src_regcost) <= 0
5408 && preferable (src_folded_cost, src_folded_regcost,
5409 src_eqv_cost, src_eqv_regcost) <= 0
5410 && preferable (src_folded_cost, src_folded_regcost,
5411 src_related_cost, src_related_regcost) <= 0
5412 && preferable (src_folded_cost, src_folded_regcost,
5413 src_elt_cost, src_elt_regcost) <= 0)
7afe21cc 5414 {
f1c1dfc3 5415 trial = src_folded, src_folded_cost = MAX_COST;
7afe21cc 5416 if (src_folded_force_flag)
9d8de1de
EB
5417 {
5418 rtx forced = force_const_mem (mode, trial);
5419 if (forced)
5420 trial = forced;
5421 }
7afe21cc 5422 }
99a9c946 5423 else if (src
56ae04af
KH
5424 && preferable (src_cost, src_regcost,
5425 src_eqv_cost, src_eqv_regcost) <= 0
5426 && preferable (src_cost, src_regcost,
5427 src_related_cost, src_related_regcost) <= 0
5428 && preferable (src_cost, src_regcost,
5429 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5430 trial = src, src_cost = MAX_COST;
99a9c946 5431 else if (src_eqv_here
56ae04af
KH
5432 && preferable (src_eqv_cost, src_eqv_regcost,
5433 src_related_cost, src_related_regcost) <= 0
5434 && preferable (src_eqv_cost, src_eqv_regcost,
5435 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5436 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
99a9c946 5437 else if (src_related
56ae04af
KH
5438 && preferable (src_related_cost, src_related_regcost,
5439 src_elt_cost, src_elt_regcost) <= 0)
68252e27 5440 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
278a83b2 5441 else
7afe21cc 5442 {
05c33dd8 5443 trial = copy_rtx (elt->exp);
7afe21cc 5444 elt = elt->next_same_value;
f1c1dfc3 5445 src_elt_cost = MAX_COST;
7afe21cc
RK
5446 }
5447
5448 /* We don't normally have an insn matching (set (pc) (pc)), so
5449 check for this separately here. We will delete such an
5450 insn below.
5451
d466c016
JL
5452 For other cases such as a table jump or conditional jump
5453 where we know the ultimate target, go ahead and replace the
5454 operand. While that may not make a valid insn, we will
5455 reemit the jump below (and also insert any necessary
5456 barriers). */
7afe21cc
RK
5457 if (n_sets == 1 && dest == pc_rtx
5458 && (trial == pc_rtx
5459 || (GET_CODE (trial) == LABEL_REF
5460 && ! condjump_p (insn))))
5461 {
2f39b6ca
UW
5462 /* Don't substitute non-local labels, this confuses CFG. */
5463 if (GET_CODE (trial) == LABEL_REF
5464 && LABEL_REF_NONLOCAL_P (trial))
5465 continue;
5466
d466c016 5467 SET_SRC (sets[i].rtl) = trial;
602c4c0d 5468 cse_jumps_altered = 1;
7afe21cc
RK
5469 break;
5470 }
278a83b2 5471
7afe21cc 5472 /* Look for a substitution that makes a valid insn. */
ddc356e8 5473 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 5474 {
dbaff908
RS
5475 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5476
7bd8b2a8
JL
5477 /* If we just made a substitution inside a libcall, then we
5478 need to make the same substitution in any notes attached
5479 to the RETVAL insn. */
1ed0205e 5480 if (libcall_insn
f8cfc6aa 5481 && (REG_P (sets[i].orig_src)
47841d1b 5482 || GET_CODE (sets[i].orig_src) == SUBREG
3c0cb5de 5483 || MEM_P (sets[i].orig_src)))
d8b7ec41
RS
5484 {
5485 rtx note = find_reg_equal_equiv_note (libcall_insn);
5486 if (note != 0)
5487 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5488 sets[i].orig_src,
5489 copy_rtx (new));
5490 }
7bd8b2a8 5491
7722328e
RK
5492 /* The result of apply_change_group can be ignored; see
5493 canon_reg. */
5494
dbaff908 5495 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6702af89 5496 apply_change_group ();
05c33dd8
RK
5497 break;
5498 }
7afe21cc 5499
278a83b2 5500 /* If we previously found constant pool entries for
7afe21cc
RK
5501 constants and this is a constant, try making a
5502 pool entry. Put it in src_folded unless we already have done
5503 this since that is where it likely came from. */
5504
5505 else if (constant_pool_entries_cost
5506 && CONSTANT_P (trial)
d51ff7cb
JW
5507 /* Reject cases that will abort in decode_rtx_const.
5508 On the alpha when simplifying a switch, we get
5509 (const (truncate (minus (label_ref) (label_ref)))). */
1bbd065b
RK
5510 && ! (GET_CODE (trial) == CONST
5511 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
d51ff7cb
JW
5512 /* Likewise on IA-64, except without the truncate. */
5513 && ! (GET_CODE (trial) == CONST
5514 && GET_CODE (XEXP (trial, 0)) == MINUS
5515 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5516 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
1bbd065b 5517 && (src_folded == 0
3c0cb5de 5518 || (!MEM_P (src_folded)
1bbd065b 5519 && ! src_folded_force_flag))
9ae8ffe7
JL
5520 && GET_MODE_CLASS (mode) != MODE_CC
5521 && mode != VOIDmode)
7afe21cc
RK
5522 {
5523 src_folded_force_flag = 1;
5524 src_folded = trial;
5525 src_folded_cost = constant_pool_entries_cost;
dd0ba281 5526 src_folded_regcost = constant_pool_entries_regcost;
7afe21cc 5527 }
278a83b2 5528 }
7afe21cc
RK
5529
5530 src = SET_SRC (sets[i].rtl);
5531
5532 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5533 However, there is an important exception: If both are registers
5534 that are not the head of their equivalence class, replace SET_SRC
5535 with the head of the class. If we do not do this, we will have
5536 both registers live over a portion of the basic block. This way,
5537 their lifetimes will likely abut instead of overlapping. */
f8cfc6aa 5538 if (REG_P (dest)
1bb98cec 5539 && REGNO_QTY_VALID_P (REGNO (dest)))
7afe21cc 5540 {
1bb98cec
DM
5541 int dest_q = REG_QTY (REGNO (dest));
5542 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5543
5544 if (dest_ent->mode == GET_MODE (dest)
5545 && dest_ent->first_reg != REGNO (dest)
f8cfc6aa 5546 && REG_P (src) && REGNO (src) == REGNO (dest)
1bb98cec
DM
5547 /* Don't do this if the original insn had a hard reg as
5548 SET_SRC or SET_DEST. */
f8cfc6aa 5549 && (!REG_P (sets[i].src)
1bb98cec 5550 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
f8cfc6aa 5551 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
1bb98cec
DM
5552 /* We can't call canon_reg here because it won't do anything if
5553 SRC is a hard register. */
759bd8b7 5554 {
1bb98cec
DM
5555 int src_q = REG_QTY (REGNO (src));
5556 struct qty_table_elem *src_ent = &qty_table[src_q];
5557 int first = src_ent->first_reg;
5558 rtx new_src
5559 = (first >= FIRST_PSEUDO_REGISTER
5560 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5561
5562 /* We must use validate-change even for this, because this
5563 might be a special no-op instruction, suitable only to
5564 tag notes onto. */
5565 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5566 {
5567 src = new_src;
5568 /* If we had a constant that is cheaper than what we are now
5569 setting SRC to, use that constant. We ignored it when we
5570 thought we could make this into a no-op. */
5571 if (src_const && COST (src_const) < COST (src)
278a83b2
KH
5572 && validate_change (insn, &SET_SRC (sets[i].rtl),
5573 src_const, 0))
1bb98cec
DM
5574 src = src_const;
5575 }
759bd8b7 5576 }
7afe21cc
RK
5577 }
5578
5579 /* If we made a change, recompute SRC values. */
5580 if (src != sets[i].src)
278a83b2 5581 {
4eadede7 5582 cse_altered = 1;
278a83b2
KH
5583 do_not_record = 0;
5584 hash_arg_in_memory = 0;
7afe21cc 5585 sets[i].src = src;
278a83b2
KH
5586 sets[i].src_hash = HASH (src, mode);
5587 sets[i].src_volatile = do_not_record;
5588 sets[i].src_in_memory = hash_arg_in_memory;
5589 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5590 }
7afe21cc
RK
5591
5592 /* If this is a single SET, we are setting a register, and we have an
5593 equivalent constant, we want to add a REG_NOTE. We don't want
5594 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 5595 that pseudo hasn't been eliminated is a pain. Such a note also
278a83b2 5596 won't help anything.
ac7ef8d5
FS
5597
5598 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5599 which can be created for a reference to a compile time computable
5600 entry in a jump table. */
5601
f8cfc6aa
JQ
5602 if (n_sets == 1 && src_const && REG_P (dest)
5603 && !REG_P (src_const)
ac7ef8d5
FS
5604 && ! (GET_CODE (src_const) == CONST
5605 && GET_CODE (XEXP (src_const, 0)) == MINUS
5606 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5607 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 5608 {
a77b7e32
RS
5609 /* We only want a REG_EQUAL note if src_const != src. */
5610 if (! rtx_equal_p (src, src_const))
5611 {
5612 /* Make sure that the rtx is not shared. */
5613 src_const = copy_rtx (src_const);
51e2a951 5614
a77b7e32
RS
5615 /* Record the actual constant value in a REG_EQUAL note,
5616 making a new one if one does not already exist. */
5617 set_unique_reg_note (insn, REG_EQUAL, src_const);
5618 }
7afe21cc
RK
5619 }
5620
5621 /* Now deal with the destination. */
5622 do_not_record = 0;
7afe21cc 5623
46d096a3
SB
5624 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5625 while (GET_CODE (dest) == SUBREG
7afe21cc 5626 || GET_CODE (dest) == ZERO_EXTRACT
7afe21cc 5627 || GET_CODE (dest) == STRICT_LOW_PART)
0339ce7e 5628 dest = XEXP (dest, 0);
7afe21cc
RK
5629
5630 sets[i].inner_dest = dest;
5631
3c0cb5de 5632 if (MEM_P (dest))
7afe21cc 5633 {
9ae8ffe7
JL
5634#ifdef PUSH_ROUNDING
5635 /* Stack pushes invalidate the stack pointer. */
5636 rtx addr = XEXP (dest, 0);
ec8e098d 5637 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
9ae8ffe7
JL
5638 && XEXP (addr, 0) == stack_pointer_rtx)
5639 invalidate (stack_pointer_rtx, Pmode);
5640#endif
7afe21cc 5641 dest = fold_rtx (dest, insn);
7afe21cc
RK
5642 }
5643
5644 /* Compute the hash code of the destination now,
5645 before the effects of this instruction are recorded,
5646 since the register values used in the address computation
5647 are those before this instruction. */
2197a88a 5648 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
5649
5650 /* Don't enter a bit-field in the hash table
5651 because the value in it after the store
5652 may not equal what was stored, due to truncation. */
5653
46d096a3 5654 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
5655 {
5656 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5657
5658 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5659 && GET_CODE (width) == CONST_INT
906c4e36
RK
5660 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5661 && ! (INTVAL (src_const)
5662 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
5663 /* Exception: if the value is constant,
5664 and it won't be truncated, record it. */
5665 ;
5666 else
5667 {
5668 /* This is chosen so that the destination will be invalidated
5669 but no new value will be recorded.
5670 We must invalidate because sometimes constant
5671 values can be recorded for bitfields. */
5672 sets[i].src_elt = 0;
5673 sets[i].src_volatile = 1;
5674 src_eqv = 0;
5675 src_eqv_elt = 0;
5676 }
5677 }
5678
5679 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5680 the insn. */
5681 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5682 {
ef178af3 5683 /* One less use of the label this insn used to jump to. */
49ce134f 5684 delete_insn (insn);
7afe21cc 5685 cse_jumps_altered = 1;
7afe21cc
RK
5686 /* No more processing for this set. */
5687 sets[i].rtl = 0;
5688 }
5689
5690 /* If this SET is now setting PC to a label, we know it used to
d466c016 5691 be a conditional or computed branch. */
8f235343
JH
5692 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5693 && !LABEL_REF_NONLOCAL_P (src))
7afe21cc 5694 {
8fb1e50e
GS
5695 /* Now emit a BARRIER after the unconditional jump. */
5696 if (NEXT_INSN (insn) == 0
4b4bf941 5697 || !BARRIER_P (NEXT_INSN (insn)))
8fb1e50e
GS
5698 emit_barrier_after (insn);
5699
d466c016
JL
5700 /* We reemit the jump in as many cases as possible just in
5701 case the form of an unconditional jump is significantly
5702 different than a computed jump or conditional jump.
5703
5704 If this insn has multiple sets, then reemitting the
5705 jump is nontrivial. So instead we just force rerecognition
5706 and hope for the best. */
5707 if (n_sets == 1)
7afe21cc 5708 {
9dcb4381 5709 rtx new, note;
8fb1e50e 5710
9dcb4381 5711 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
7afe21cc
RK
5712 JUMP_LABEL (new) = XEXP (src, 0);
5713 LABEL_NUSES (XEXP (src, 0))++;
9dcb4381
RH
5714
5715 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5716 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5717 if (note)
5718 {
5719 XEXP (note, 1) = NULL_RTX;
5720 REG_NOTES (new) = note;
5721 }
5722
38c1593d 5723 delete_insn (insn);
7afe21cc 5724 insn = new;
8fb1e50e
GS
5725
5726 /* Now emit a BARRIER after the unconditional jump. */
5727 if (NEXT_INSN (insn) == 0
4b4bf941 5728 || !BARRIER_P (NEXT_INSN (insn)))
8fb1e50e 5729 emit_barrier_after (insn);
7afe21cc 5730 }
31dcf83f 5731 else
31dcf83f 5732 INSN_CODE (insn) = -1;
7afe21cc 5733
8fb1e50e
GS
5734 /* Do not bother deleting any unreachable code,
5735 let jump/flow do that. */
7afe21cc
RK
5736
5737 cse_jumps_altered = 1;
5738 sets[i].rtl = 0;
5739 }
5740
c2a47e48
RK
5741 /* If destination is volatile, invalidate it and then do no further
5742 processing for this assignment. */
7afe21cc
RK
5743
5744 else if (do_not_record)
c2a47e48 5745 {
f8cfc6aa 5746 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5747 invalidate (dest, VOIDmode);
3c0cb5de 5748 else if (MEM_P (dest))
32fab725 5749 invalidate (dest, VOIDmode);
2708da92
RS
5750 else if (GET_CODE (dest) == STRICT_LOW_PART
5751 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5752 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
5753 sets[i].rtl = 0;
5754 }
7afe21cc
RK
5755
5756 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 5757 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
5758
5759#ifdef HAVE_cc0
5760 /* If setting CC0, record what it was set to, or a constant, if it
5761 is equivalent to a constant. If it is being set to a floating-point
5762 value, make a COMPARE with the appropriate constant of 0. If we
5763 don't do this, later code can interpret this as a test against
5764 const0_rtx, which can cause problems if we try to put it into an
5765 insn as a floating-point operand. */
5766 if (dest == cc0_rtx)
5767 {
5768 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5769 this_insn_cc0_mode = mode;
cbf6a543 5770 if (FLOAT_MODE_P (mode))
38a448ca
RH
5771 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5772 CONST0_RTX (mode));
7afe21cc
RK
5773 }
5774#endif
5775 }
5776
5777 /* Now enter all non-volatile source expressions in the hash table
5778 if they are not already present.
5779 Record their equivalence classes in src_elt.
5780 This way we can insert the corresponding destinations into
5781 the same classes even if the actual sources are no longer in them
5782 (having been invalidated). */
5783
5784 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5785 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5786 {
b3694847
SS
5787 struct table_elt *elt;
5788 struct table_elt *classp = sets[0].src_elt;
7afe21cc
RK
5789 rtx dest = SET_DEST (sets[0].rtl);
5790 enum machine_mode eqvmode = GET_MODE (dest);
5791
5792 if (GET_CODE (dest) == STRICT_LOW_PART)
5793 {
5794 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5795 classp = 0;
5796 }
5797 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
5798 {
5799 rehash_using_reg (src_eqv);
5800 src_eqv_hash = HASH (src_eqv, eqvmode);
5801 }
2197a88a 5802 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc 5803 elt->in_memory = src_eqv_in_memory;
7afe21cc 5804 src_eqv_elt = elt;
f7911249
JW
5805
5806 /* Check to see if src_eqv_elt is the same as a set source which
5807 does not yet have an elt, and if so set the elt of the set source
5808 to src_eqv_elt. */
5809 for (i = 0; i < n_sets; i++)
26132f71
JW
5810 if (sets[i].rtl && sets[i].src_elt == 0
5811 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
f7911249 5812 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
5813 }
5814
5815 for (i = 0; i < n_sets; i++)
5816 if (sets[i].rtl && ! sets[i].src_volatile
5817 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5818 {
5819 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5820 {
5821 /* REG_EQUAL in setting a STRICT_LOW_PART
5822 gives an equivalent for the entire destination register,
5823 not just for the subreg being stored in now.
5824 This is a more interesting equivalence, so we arrange later
5825 to treat the entire reg as the destination. */
5826 sets[i].src_elt = src_eqv_elt;
2197a88a 5827 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
5828 }
5829 else
5830 {
5831 /* Insert source and constant equivalent into hash table, if not
5832 already present. */
b3694847
SS
5833 struct table_elt *classp = src_eqv_elt;
5834 rtx src = sets[i].src;
5835 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5836 enum machine_mode mode
5837 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5838
1fcc57f1
AM
5839 /* It's possible that we have a source value known to be
5840 constant but don't have a REG_EQUAL note on the insn.
5841 Lack of a note will mean src_eqv_elt will be NULL. This
5842 can happen where we've generated a SUBREG to access a
5843 CONST_INT that is already in a register in a wider mode.
5844 Ensure that the source expression is put in the proper
5845 constant class. */
5846 if (!classp)
5847 classp = sets[i].src_const_elt;
5848
26132f71 5849 if (sets[i].src_elt == 0)
7afe21cc 5850 {
26132f71
JW
5851 /* Don't put a hard register source into the table if this is
5852 the last insn of a libcall. In this case, we only need
5853 to put src_eqv_elt in src_elt. */
db4a8254 5854 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
8ae2b8f6 5855 {
b3694847 5856 struct table_elt *elt;
26132f71
JW
5857
5858 /* Note that these insert_regs calls cannot remove
5859 any of the src_elt's, because they would have failed to
5860 match if not still valid. */
5861 if (insert_regs (src, classp, 0))
5862 {
5863 rehash_using_reg (src);
5864 sets[i].src_hash = HASH (src, mode);
5865 }
5866 elt = insert (src, classp, sets[i].src_hash, mode);
5867 elt->in_memory = sets[i].src_in_memory;
26132f71 5868 sets[i].src_elt = classp = elt;
8ae2b8f6 5869 }
26132f71
JW
5870 else
5871 sets[i].src_elt = classp;
7afe21cc 5872 }
7afe21cc
RK
5873 if (sets[i].src_const && sets[i].src_const_elt == 0
5874 && src != sets[i].src_const
5875 && ! rtx_equal_p (sets[i].src_const, src))
5876 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 5877 sets[i].src_const_hash, mode);
7afe21cc
RK
5878 }
5879 }
5880 else if (sets[i].src_elt == 0)
5881 /* If we did not insert the source into the hash table (e.g., it was
5882 volatile), note the equivalence class for the REG_EQUAL value, if any,
5883 so that the destination goes into that class. */
5884 sets[i].src_elt = src_eqv_elt;
5885
9ae8ffe7 5886 invalidate_from_clobbers (x);
77fa0940 5887
278a83b2 5888 /* Some registers are invalidated by subroutine calls. Memory is
77fa0940
RK
5889 invalidated by non-constant calls. */
5890
4b4bf941 5891 if (CALL_P (insn))
7afe21cc 5892 {
24a28584 5893 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 5894 invalidate_memory ();
7afe21cc
RK
5895 invalidate_for_call ();
5896 }
5897
5898 /* Now invalidate everything set by this instruction.
5899 If a SUBREG or other funny destination is being set,
5900 sets[i].rtl is still nonzero, so here we invalidate the reg
5901 a part of which is being set. */
5902
5903 for (i = 0; i < n_sets; i++)
5904 if (sets[i].rtl)
5905 {
bb4034b3
JW
5906 /* We can't use the inner dest, because the mode associated with
5907 a ZERO_EXTRACT is significant. */
b3694847 5908 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5909
5910 /* Needed for registers to remove the register from its
5911 previous quantity's chain.
5912 Needed for memory if this is a nonvarying address, unless
5913 we have just done an invalidate_memory that covers even those. */
f8cfc6aa 5914 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5915 invalidate (dest, VOIDmode);
3c0cb5de 5916 else if (MEM_P (dest))
32fab725 5917 invalidate (dest, VOIDmode);
2708da92
RS
5918 else if (GET_CODE (dest) == STRICT_LOW_PART
5919 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5920 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
5921 }
5922
01e752d3 5923 /* A volatile ASM invalidates everything. */
4b4bf941 5924 if (NONJUMP_INSN_P (insn)
01e752d3
JL
5925 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5926 && MEM_VOLATILE_P (PATTERN (insn)))
5927 flush_hash_table ();
5928
7afe21cc
RK
5929 /* Make sure registers mentioned in destinations
5930 are safe for use in an expression to be inserted.
5931 This removes from the hash table
5932 any invalid entry that refers to one of these registers.
5933
5934 We don't care about the return value from mention_regs because
5935 we are going to hash the SET_DEST values unconditionally. */
5936
5937 for (i = 0; i < n_sets; i++)
34c73909
R
5938 {
5939 if (sets[i].rtl)
5940 {
5941 rtx x = SET_DEST (sets[i].rtl);
5942
f8cfc6aa 5943 if (!REG_P (x))
34c73909
R
5944 mention_regs (x);
5945 else
5946 {
5947 /* We used to rely on all references to a register becoming
5948 inaccessible when a register changes to a new quantity,
5949 since that changes the hash code. However, that is not
9b1549b8 5950 safe, since after HASH_SIZE new quantities we get a
34c73909
R
5951 hash 'collision' of a register with its own invalid
5952 entries. And since SUBREGs have been changed not to
5953 change their hash code with the hash code of the register,
5954 it wouldn't work any longer at all. So we have to check
5955 for any invalid references lying around now.
5956 This code is similar to the REG case in mention_regs,
5957 but it knows that reg_tick has been incremented, and
5958 it leaves reg_in_table as -1 . */
770ae6cc
RK
5959 unsigned int regno = REGNO (x);
5960 unsigned int endregno
34c73909 5961 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6 5962 : hard_regno_nregs[regno][GET_MODE (x)]);
770ae6cc 5963 unsigned int i;
34c73909
R
5964
5965 for (i = regno; i < endregno; i++)
5966 {
30f72379 5967 if (REG_IN_TABLE (i) >= 0)
34c73909
R
5968 {
5969 remove_invalid_refs (i);
30f72379 5970 REG_IN_TABLE (i) = -1;
34c73909
R
5971 }
5972 }
5973 }
5974 }
5975 }
7afe21cc
RK
5976
5977 /* We may have just removed some of the src_elt's from the hash table.
5978 So replace each one with the current head of the same class. */
5979
5980 for (i = 0; i < n_sets; i++)
5981 if (sets[i].rtl)
5982 {
5983 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5984 /* If elt was removed, find current head of same class,
5985 or 0 if nothing remains of that class. */
5986 {
b3694847 5987 struct table_elt *elt = sets[i].src_elt;
7afe21cc
RK
5988
5989 while (elt && elt->prev_same_value)
5990 elt = elt->prev_same_value;
5991
5992 while (elt && elt->first_same_value == 0)
5993 elt = elt->next_same_value;
5994 sets[i].src_elt = elt ? elt->first_same_value : 0;
5995 }
5996 }
5997
5998 /* Now insert the destinations into their equivalence classes. */
5999
6000 for (i = 0; i < n_sets; i++)
6001 if (sets[i].rtl)
6002 {
b3694847 6003 rtx dest = SET_DEST (sets[i].rtl);
b3694847 6004 struct table_elt *elt;
7afe21cc
RK
6005
6006 /* Don't record value if we are not supposed to risk allocating
6007 floating-point values in registers that might be wider than
6008 memory. */
6009 if ((flag_float_store
3c0cb5de 6010 && MEM_P (dest)
cbf6a543 6011 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
6012 /* Don't record BLKmode values, because we don't know the
6013 size of it, and can't be sure that other BLKmode values
6014 have the same or smaller size. */
6015 || GET_MODE (dest) == BLKmode
7afe21cc
RK
6016 /* Don't record values of destinations set inside a libcall block
6017 since we might delete the libcall. Things should have been set
6018 up so we won't want to reuse such a value, but we play it safe
6019 here. */
7bd8b2a8 6020 || libcall_insn
7afe21cc
RK
6021 /* If we didn't put a REG_EQUAL value or a source into the hash
6022 table, there is no point is recording DEST. */
1a8e9a8e
RK
6023 || sets[i].src_elt == 0
6024 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6025 or SIGN_EXTEND, don't record DEST since it can cause
6026 some tracking to be wrong.
6027
6028 ??? Think about this more later. */
6029 || (GET_CODE (dest) == SUBREG
6030 && (GET_MODE_SIZE (GET_MODE (dest))
6031 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6032 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6033 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
6034 continue;
6035
6036 /* STRICT_LOW_PART isn't part of the value BEING set,
6037 and neither is the SUBREG inside it.
6038 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6039 if (GET_CODE (dest) == STRICT_LOW_PART)
6040 dest = SUBREG_REG (XEXP (dest, 0));
6041
f8cfc6aa 6042 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
7afe21cc
RK
6043 /* Registers must also be inserted into chains for quantities. */
6044 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
6045 {
6046 /* If `insert_regs' changes something, the hash code must be
6047 recalculated. */
6048 rehash_using_reg (dest);
6049 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6050 }
7afe21cc 6051
8fff4fc1
RH
6052 elt = insert (dest, sets[i].src_elt,
6053 sets[i].dest_hash, GET_MODE (dest));
9de2c71a 6054
3c0cb5de 6055 elt->in_memory = (MEM_P (sets[i].inner_dest)
389fdba0 6056 && !MEM_READONLY_P (sets[i].inner_dest));
c256df0b 6057
fc3ffe83
RK
6058 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6059 narrower than M2, and both M1 and M2 are the same number of words,
6060 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6061 make that equivalence as well.
7afe21cc 6062
4de249d9
PB
6063 However, BAR may have equivalences for which gen_lowpart
6064 will produce a simpler value than gen_lowpart applied to
7afe21cc 6065 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
278a83b2 6066 BAR's equivalences. If we don't get a simplified form, make
7afe21cc
RK
6067 the SUBREG. It will not be used in an equivalence, but will
6068 cause two similar assignments to be detected.
6069
6070 Note the loop below will find SUBREG_REG (DEST) since we have
6071 already entered SRC and DEST of the SET in the table. */
6072
6073 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
6074 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6075 / UNITS_PER_WORD)
278a83b2 6076 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
7afe21cc
RK
6077 && (GET_MODE_SIZE (GET_MODE (dest))
6078 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6079 && sets[i].src_elt != 0)
6080 {
6081 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6082 struct table_elt *elt, *classp = 0;
6083
6084 for (elt = sets[i].src_elt->first_same_value; elt;
6085 elt = elt->next_same_value)
6086 {
6087 rtx new_src = 0;
2197a88a 6088 unsigned src_hash;
7afe21cc 6089 struct table_elt *src_elt;
ff27a429 6090 int byte = 0;
7afe21cc
RK
6091
6092 /* Ignore invalid entries. */
f8cfc6aa 6093 if (!REG_P (elt->exp)
0516f6fe 6094 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
7afe21cc
RK
6095 continue;
6096
9beb7d20
RH
6097 /* We may have already been playing subreg games. If the
6098 mode is already correct for the destination, use it. */
6099 if (GET_MODE (elt->exp) == new_mode)
6100 new_src = elt->exp;
6101 else
6102 {
6103 /* Calculate big endian correction for the SUBREG_BYTE.
6104 We have already checked that M1 (GET_MODE (dest))
6105 is not narrower than M2 (new_mode). */
6106 if (BYTES_BIG_ENDIAN)
6107 byte = (GET_MODE_SIZE (GET_MODE (dest))
6108 - GET_MODE_SIZE (new_mode));
6109
6110 new_src = simplify_gen_subreg (new_mode, elt->exp,
6111 GET_MODE (dest), byte);
6112 }
6113
ff27a429
R
6114 /* The call to simplify_gen_subreg fails if the value
6115 is VOIDmode, yet we can't do any simplification, e.g.
6116 for EXPR_LISTs denoting function call results.
6117 It is invalid to construct a SUBREG with a VOIDmode
6118 SUBREG_REG, hence a zero new_src means we can't do
6119 this substitution. */
6120 if (! new_src)
6121 continue;
7afe21cc
RK
6122
6123 src_hash = HASH (new_src, new_mode);
6124 src_elt = lookup (new_src, src_hash, new_mode);
6125
6126 /* Put the new source in the hash table is if isn't
6127 already. */
6128 if (src_elt == 0)
6129 {
6130 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
6131 {
6132 rehash_using_reg (new_src);
6133 src_hash = HASH (new_src, new_mode);
6134 }
7afe21cc
RK
6135 src_elt = insert (new_src, classp, src_hash, new_mode);
6136 src_elt->in_memory = elt->in_memory;
7afe21cc
RK
6137 }
6138 else if (classp && classp != src_elt->first_same_value)
278a83b2 6139 /* Show that two things that we've seen before are
7afe21cc
RK
6140 actually the same. */
6141 merge_equiv_classes (src_elt, classp);
6142
6143 classp = src_elt->first_same_value;
da932f04
JL
6144 /* Ignore invalid entries. */
6145 while (classp
f8cfc6aa 6146 && !REG_P (classp->exp)
0516f6fe 6147 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
da932f04 6148 classp = classp->next_same_value;
7afe21cc
RK
6149 }
6150 }
6151 }
6152
403e25d0
RK
6153 /* Special handling for (set REG0 REG1) where REG0 is the
6154 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6155 be used in the sequel, so (if easily done) change this insn to
6156 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6157 that computed their value. Then REG1 will become a dead store
6158 and won't cloud the situation for later optimizations.
7afe21cc
RK
6159
6160 Do not make this change if REG1 is a hard register, because it will
6161 then be used in the sequel and we may be changing a two-operand insn
6162 into a three-operand insn.
6163
50270076
R
6164 Also do not do this if we are operating on a copy of INSN.
6165
6166 Also don't do this if INSN ends a libcall; this would cause an unrelated
6167 register to be set in the middle of a libcall, and we then get bad code
6168 if the libcall is deleted. */
7afe21cc 6169
f8cfc6aa 6170 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
7afe21cc 6171 && NEXT_INSN (PREV_INSN (insn)) == insn
f8cfc6aa 6172 && REG_P (SET_SRC (sets[0].rtl))
7afe21cc 6173 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
1bb98cec 6174 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
7afe21cc 6175 {
1bb98cec
DM
6176 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6177 struct qty_table_elem *src_ent = &qty_table[src_q];
7afe21cc 6178
1bb98cec
DM
6179 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6180 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc 6181 {
3e25353e
AH
6182 rtx prev = insn;
6183 /* Scan for the previous nonnote insn, but stop at a basic
6184 block boundary. */
6185 do
6186 {
6187 prev = PREV_INSN (prev);
6188 }
4b4bf941 6189 while (prev && NOTE_P (prev)
3e25353e 6190 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
7080f735 6191
58ecb5e2
RS
6192 /* Do not swap the registers around if the previous instruction
6193 attaches a REG_EQUIV note to REG1.
6194
6195 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6196 from the pseudo that originally shadowed an incoming argument
6197 to another register. Some uses of REG_EQUIV might rely on it
6198 being attached to REG1 rather than REG2.
6199
6200 This section previously turned the REG_EQUIV into a REG_EQUAL
6201 note. We cannot do that because REG_EQUIV may provide an
4912a07c 6202 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
58ecb5e2 6203
4b4bf941 6204 if (prev != 0 && NONJUMP_INSN_P (prev)
403e25d0 6205 && GET_CODE (PATTERN (prev)) == SET
58ecb5e2
RS
6206 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6207 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
1bb98cec
DM
6208 {
6209 rtx dest = SET_DEST (sets[0].rtl);
403e25d0 6210 rtx src = SET_SRC (sets[0].rtl);
58ecb5e2 6211 rtx note;
7afe21cc 6212
278a83b2
KH
6213 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6214 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6215 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
1bb98cec 6216 apply_change_group ();
7afe21cc 6217
403e25d0
RK
6218 /* If INSN has a REG_EQUAL note, and this note mentions
6219 REG0, then we must delete it, because the value in
6220 REG0 has changed. If the note's value is REG1, we must
6221 also delete it because that is now this insn's dest. */
1bb98cec 6222 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
403e25d0
RK
6223 if (note != 0
6224 && (reg_mentioned_p (dest, XEXP (note, 0))
6225 || rtx_equal_p (src, XEXP (note, 0))))
1bb98cec
DM
6226 remove_note (insn, note);
6227 }
7afe21cc
RK
6228 }
6229 }
6230
6231 /* If this is a conditional jump insn, record any known equivalences due to
6232 the condition being tested. */
6233
4b4bf941 6234 if (JUMP_P (insn)
7afe21cc
RK
6235 && n_sets == 1 && GET_CODE (x) == SET
6236 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6237 record_jump_equiv (insn, 0);
6238
6239#ifdef HAVE_cc0
6240 /* If the previous insn set CC0 and this insn no longer references CC0,
6241 delete the previous insn. Here we use the fact that nothing expects CC0
6242 to be valid over an insn, which is true until the final pass. */
4b4bf941 6243 if (prev_insn && NONJUMP_INSN_P (prev_insn)
7afe21cc
RK
6244 && (tem = single_set (prev_insn)) != 0
6245 && SET_DEST (tem) == cc0_rtx
6246 && ! reg_mentioned_p (cc0_rtx, x))
6dee7384 6247 delete_insn (prev_insn);
7afe21cc
RK
6248
6249 prev_insn_cc0 = this_insn_cc0;
6250 prev_insn_cc0_mode = this_insn_cc0_mode;
7afe21cc 6251 prev_insn = insn;
4977bab6 6252#endif
7afe21cc
RK
6253}
6254\f
a4c6502a 6255/* Remove from the hash table all expressions that reference memory. */
14a774a9 6256
7afe21cc 6257static void
7080f735 6258invalidate_memory (void)
7afe21cc 6259{
b3694847
SS
6260 int i;
6261 struct table_elt *p, *next;
7afe21cc 6262
9b1549b8 6263 for (i = 0; i < HASH_SIZE; i++)
9ae8ffe7
JL
6264 for (p = table[i]; p; p = next)
6265 {
6266 next = p->next_same_hash;
6267 if (p->in_memory)
6268 remove_from_table (p, i);
6269 }
6270}
6271
14a774a9
RK
6272/* If ADDR is an address that implicitly affects the stack pointer, return
6273 1 and update the register tables to show the effect. Else, return 0. */
6274
9ae8ffe7 6275static int
7080f735 6276addr_affects_sp_p (rtx addr)
9ae8ffe7 6277{
ec8e098d 6278 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
f8cfc6aa 6279 && REG_P (XEXP (addr, 0))
9ae8ffe7 6280 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 6281 {
30f72379 6282 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
46081bb3
SH
6283 {
6284 REG_TICK (STACK_POINTER_REGNUM)++;
6285 /* Is it possible to use a subreg of SP? */
6286 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6287 }
9ae8ffe7
JL
6288
6289 /* This should be *very* rare. */
6290 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6291 invalidate (stack_pointer_rtx, VOIDmode);
14a774a9 6292
9ae8ffe7 6293 return 1;
7afe21cc 6294 }
14a774a9 6295
9ae8ffe7 6296 return 0;
7afe21cc
RK
6297}
6298
6299/* Perform invalidation on the basis of everything about an insn
6300 except for invalidating the actual places that are SET in it.
6301 This includes the places CLOBBERed, and anything that might
6302 alias with something that is SET or CLOBBERed.
6303
7afe21cc
RK
6304 X is the pattern of the insn. */
6305
6306static void
7080f735 6307invalidate_from_clobbers (rtx x)
7afe21cc 6308{
7afe21cc
RK
6309 if (GET_CODE (x) == CLOBBER)
6310 {
6311 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
6312 if (ref)
6313 {
f8cfc6aa 6314 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 6315 || MEM_P (ref))
9ae8ffe7
JL
6316 invalidate (ref, VOIDmode);
6317 else if (GET_CODE (ref) == STRICT_LOW_PART
6318 || GET_CODE (ref) == ZERO_EXTRACT)
6319 invalidate (XEXP (ref, 0), GET_MODE (ref));
6320 }
7afe21cc
RK
6321 }
6322 else if (GET_CODE (x) == PARALLEL)
6323 {
b3694847 6324 int i;
7afe21cc
RK
6325 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6326 {
b3694847 6327 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
6328 if (GET_CODE (y) == CLOBBER)
6329 {
6330 rtx ref = XEXP (y, 0);
f8cfc6aa 6331 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 6332 || MEM_P (ref))
9ae8ffe7
JL
6333 invalidate (ref, VOIDmode);
6334 else if (GET_CODE (ref) == STRICT_LOW_PART
6335 || GET_CODE (ref) == ZERO_EXTRACT)
6336 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
6337 }
6338 }
6339 }
6340}
6341\f
6342/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6343 and replace any registers in them with either an equivalent constant
6344 or the canonical form of the register. If we are inside an address,
6345 only do this if the address remains valid.
6346
6347 OBJECT is 0 except when within a MEM in which case it is the MEM.
6348
6349 Return the replacement for X. */
6350
6351static rtx
7080f735 6352cse_process_notes (rtx x, rtx object)
7afe21cc
RK
6353{
6354 enum rtx_code code = GET_CODE (x);
6f7d635c 6355 const char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
6356 int i;
6357
6358 switch (code)
6359 {
6360 case CONST_INT:
6361 case CONST:
6362 case SYMBOL_REF:
6363 case LABEL_REF:
6364 case CONST_DOUBLE:
69ef87e2 6365 case CONST_VECTOR:
7afe21cc
RK
6366 case PC:
6367 case CC0:
6368 case LO_SUM:
6369 return x;
6370
6371 case MEM:
c96208fa
DC
6372 validate_change (x, &XEXP (x, 0),
6373 cse_process_notes (XEXP (x, 0), x), 0);
7afe21cc
RK
6374 return x;
6375
6376 case EXPR_LIST:
6377 case INSN_LIST:
6378 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 6379 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 6380 if (XEXP (x, 1))
906c4e36 6381 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
6382 return x;
6383
e4890d45
RS
6384 case SIGN_EXTEND:
6385 case ZERO_EXTEND:
0b0ee36c 6386 case SUBREG:
e4890d45
RS
6387 {
6388 rtx new = cse_process_notes (XEXP (x, 0), object);
6389 /* We don't substitute VOIDmode constants into these rtx,
6390 since they would impede folding. */
6391 if (GET_MODE (new) != VOIDmode)
6392 validate_change (object, &XEXP (x, 0), new, 0);
6393 return x;
6394 }
6395
7afe21cc 6396 case REG:
30f72379 6397 i = REG_QTY (REGNO (x));
7afe21cc
RK
6398
6399 /* Return a constant or a constant register. */
1bb98cec 6400 if (REGNO_QTY_VALID_P (REGNO (x)))
7afe21cc 6401 {
1bb98cec
DM
6402 struct qty_table_elem *ent = &qty_table[i];
6403
6404 if (ent->const_rtx != NULL_RTX
6405 && (CONSTANT_P (ent->const_rtx)
f8cfc6aa 6406 || REG_P (ent->const_rtx)))
1bb98cec 6407 {
4de249d9 6408 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
1bb98cec
DM
6409 if (new)
6410 return new;
6411 }
7afe21cc
RK
6412 }
6413
6414 /* Otherwise, canonicalize this register. */
906c4e36 6415 return canon_reg (x, NULL_RTX);
278a83b2 6416
e9a25f70
JL
6417 default:
6418 break;
7afe21cc
RK
6419 }
6420
6421 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6422 if (fmt[i] == 'e')
6423 validate_change (object, &XEXP (x, i),
7fe34fdf 6424 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
6425
6426 return x;
6427}
6428\f
8b3686ed
RK
6429/* Process one SET of an insn that was skipped. We ignore CLOBBERs
6430 since they are done elsewhere. This function is called via note_stores. */
6431
6432static void
7080f735 6433invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
8b3686ed 6434{
9ae8ffe7
JL
6435 enum rtx_code code = GET_CODE (dest);
6436
6437 if (code == MEM
ddc356e8 6438 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
9ae8ffe7
JL
6439 /* There are times when an address can appear varying and be a PLUS
6440 during this scan when it would be a fixed address were we to know
6441 the proper equivalences. So invalidate all memory if there is
6442 a BLKmode or nonscalar memory reference or a reference to a
6443 variable address. */
6444 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
2be28ee2 6445 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
9ae8ffe7
JL
6446 {
6447 invalidate_memory ();
6448 return;
6449 }
ffcf6393 6450
f47c02fa 6451 if (GET_CODE (set) == CLOBBER
8beccec8 6452 || CC0_P (dest)
f47c02fa
RK
6453 || dest == pc_rtx)
6454 return;
6455
9ae8ffe7 6456 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 6457 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
6458 else if (code == REG || code == SUBREG || code == MEM)
6459 invalidate (dest, VOIDmode);
8b3686ed
RK
6460}
6461
6462/* Invalidate all insns from START up to the end of the function or the
6463 next label. This called when we wish to CSE around a block that is
6464 conditionally executed. */
6465
6466static void
7080f735 6467invalidate_skipped_block (rtx start)
8b3686ed
RK
6468{
6469 rtx insn;
8b3686ed 6470
4b4bf941 6471 for (insn = start; insn && !LABEL_P (insn);
8b3686ed
RK
6472 insn = NEXT_INSN (insn))
6473 {
2c3c49de 6474 if (! INSN_P (insn))
8b3686ed
RK
6475 continue;
6476
4b4bf941 6477 if (CALL_P (insn))
8b3686ed 6478 {
24a28584 6479 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 6480 invalidate_memory ();
8b3686ed 6481 invalidate_for_call ();
8b3686ed
RK
6482 }
6483
97577254 6484 invalidate_from_clobbers (PATTERN (insn));
84832317 6485 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
8b3686ed
RK
6486 }
6487}
6488\f
7afe21cc
RK
6489/* Find the end of INSN's basic block and return its range,
6490 the total number of SETs in all the insns of the block, the last insn of the
6491 block, and the branch path.
6492
da7d8304 6493 The branch path indicates which branches should be followed. If a nonzero
7afe21cc
RK
6494 path size is specified, the block should be rescanned and a different set
6495 of branches will be taken. The branch path is only used if
da7d8304 6496 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
7afe21cc
RK
6497
6498 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6499 used to describe the block. It is filled in with the information about
6500 the current block. The incoming structure's branch path, if any, is used
6501 to construct the output branch path. */
6502
86caf04d 6503static void
7080f735 6504cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
5affca01 6505 int follow_jumps, int skip_blocks)
7afe21cc
RK
6506{
6507 rtx p = insn, q;
6508 int nsets = 0;
6509 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
2c3c49de 6510 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
7afe21cc
RK
6511 int path_size = data->path_size;
6512 int path_entry = 0;
6513 int i;
6514
6515 /* Update the previous branch path, if any. If the last branch was
6de9cd9a
DN
6516 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6517 If it was previously PATH_NOT_TAKEN,
7afe21cc 6518 shorten the path by one and look at the previous branch. We know that
da7d8304 6519 at least one branch must have been taken if PATH_SIZE is nonzero. */
7afe21cc
RK
6520 while (path_size > 0)
6521 {
6de9cd9a 6522 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
7afe21cc 6523 {
6de9cd9a 6524 data->path[path_size - 1].status = PATH_NOT_TAKEN;
7afe21cc
RK
6525 break;
6526 }
6527 else
6528 path_size--;
6529 }
6530
16b702cd
MM
6531 /* If the first instruction is marked with QImode, that means we've
6532 already processed this block. Our caller will look at DATA->LAST
6533 to figure out where to go next. We want to return the next block
6534 in the instruction stream, not some branched-to block somewhere
6535 else. We accomplish this by pretending our called forbid us to
6536 follow jumps, or skip blocks. */
6537 if (GET_MODE (insn) == QImode)
6538 follow_jumps = skip_blocks = 0;
6539
7afe21cc 6540 /* Scan to end of this basic block. */
4b4bf941 6541 while (p && !LABEL_P (p))
7afe21cc 6542 {
8aeea6e6 6543 /* Don't cse over a call to setjmp; on some machines (eg VAX)
7afe21cc
RK
6544 the regs restored by the longjmp come from
6545 a later time than the setjmp. */
4b4bf941 6546 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
570a98eb 6547 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
7afe21cc
RK
6548 break;
6549
6550 /* A PARALLEL can have lots of SETs in it,
6551 especially if it is really an ASM_OPERANDS. */
2c3c49de 6552 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
7afe21cc 6553 nsets += XVECLEN (PATTERN (p), 0);
4b4bf941 6554 else if (!NOTE_P (p))
7afe21cc 6555 nsets += 1;
278a83b2 6556
164c8956
RK
6557 /* Ignore insns made by CSE; they cannot affect the boundaries of
6558 the basic block. */
6559
6560 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 6561 high_cuid = INSN_CUID (p);
164c8956
RK
6562 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6563 low_cuid = INSN_CUID (p);
7afe21cc
RK
6564
6565 /* See if this insn is in our branch path. If it is and we are to
6566 take it, do so. */
6567 if (path_entry < path_size && data->path[path_entry].branch == p)
6568 {
6de9cd9a 6569 if (data->path[path_entry].status != PATH_NOT_TAKEN)
7afe21cc 6570 p = JUMP_LABEL (p);
278a83b2 6571
7afe21cc
RK
6572 /* Point to next entry in path, if any. */
6573 path_entry++;
6574 }
6575
6576 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6577 was specified, we haven't reached our maximum path length, there are
6578 insns following the target of the jump, this is the only use of the
8b3686ed
RK
6579 jump label, and the target label is preceded by a BARRIER.
6580
6581 Alternatively, we can follow the jump if it branches around a
6582 block of code and there are no other branches into the block.
6583 In this case invalidate_skipped_block will be called to invalidate any
6584 registers set in the block when following the jump. */
6585
9bf8cfbf 6586 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
4b4bf941 6587 && JUMP_P (p)
278a83b2 6588 && GET_CODE (PATTERN (p)) == SET
7afe21cc 6589 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 6590 && JUMP_LABEL (p) != 0
7afe21cc
RK
6591 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6592 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6593 {
6594 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
4b4bf941 6595 if ((!NOTE_P (q)
278a83b2 6596 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
4b4bf941 6597 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
570a98eb 6598 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
4b4bf941 6599 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
7afe21cc
RK
6600 break;
6601
6602 /* If we ran into a BARRIER, this code is an extension of the
6603 basic block when the branch is taken. */
4b4bf941 6604 if (follow_jumps && q != 0 && BARRIER_P (q))
7afe21cc
RK
6605 {
6606 /* Don't allow ourself to keep walking around an
6607 always-executed loop. */
fc3ffe83
RK
6608 if (next_real_insn (q) == next)
6609 {
6610 p = NEXT_INSN (p);
6611 continue;
6612 }
7afe21cc
RK
6613
6614 /* Similarly, don't put a branch in our path more than once. */
6615 for (i = 0; i < path_entry; i++)
6616 if (data->path[i].branch == p)
6617 break;
6618
6619 if (i != path_entry)
6620 break;
6621
6622 data->path[path_entry].branch = p;
6de9cd9a 6623 data->path[path_entry++].status = PATH_TAKEN;
7afe21cc
RK
6624
6625 /* This branch now ends our path. It was possible that we
6626 didn't see this branch the last time around (when the
6627 insn in front of the target was a JUMP_INSN that was
6628 turned into a no-op). */
6629 path_size = path_entry;
6630
6631 p = JUMP_LABEL (p);
6632 /* Mark block so we won't scan it again later. */
6633 PUT_MODE (NEXT_INSN (p), QImode);
6634 }
8b3686ed 6635 /* Detect a branch around a block of code. */
4b4bf941 6636 else if (skip_blocks && q != 0 && !LABEL_P (q))
8b3686ed 6637 {
b3694847 6638 rtx tmp;
8b3686ed 6639
fc3ffe83
RK
6640 if (next_real_insn (q) == next)
6641 {
6642 p = NEXT_INSN (p);
6643 continue;
6644 }
8b3686ed
RK
6645
6646 for (i = 0; i < path_entry; i++)
6647 if (data->path[i].branch == p)
6648 break;
6649
6650 if (i != path_entry)
6651 break;
6652
6653 /* This is no_labels_between_p (p, q) with an added check for
6654 reaching the end of a function (in case Q precedes P). */
6655 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
4b4bf941 6656 if (LABEL_P (tmp))
8b3686ed 6657 break;
278a83b2 6658
8b3686ed
RK
6659 if (tmp == q)
6660 {
6661 data->path[path_entry].branch = p;
6de9cd9a 6662 data->path[path_entry++].status = PATH_AROUND;
8b3686ed
RK
6663
6664 path_size = path_entry;
6665
6666 p = JUMP_LABEL (p);
6667 /* Mark block so we won't scan it again later. */
6668 PUT_MODE (NEXT_INSN (p), QImode);
6669 }
6670 }
7afe21cc 6671 }
7afe21cc
RK
6672 p = NEXT_INSN (p);
6673 }
6674
6675 data->low_cuid = low_cuid;
6676 data->high_cuid = high_cuid;
6677 data->nsets = nsets;
6678 data->last = p;
6679
6680 /* If all jumps in the path are not taken, set our path length to zero
6681 so a rescan won't be done. */
6682 for (i = path_size - 1; i >= 0; i--)
6de9cd9a 6683 if (data->path[i].status != PATH_NOT_TAKEN)
7afe21cc
RK
6684 break;
6685
6686 if (i == -1)
6687 data->path_size = 0;
6688 else
6689 data->path_size = path_size;
6690
6691 /* End the current branch path. */
6692 data->path[path_size].branch = 0;
6693}
6694\f
7afe21cc
RK
6695/* Perform cse on the instructions of a function.
6696 F is the first instruction.
6697 NREGS is one plus the highest pseudo-reg number used in the instruction.
6698
7afe21cc
RK
6699 Returns 1 if jump_optimize should be redone due to simplifications
6700 in conditional jump instructions. */
6701
6702int
5affca01 6703cse_main (rtx f, int nregs, FILE *file)
7afe21cc
RK
6704{
6705 struct cse_basic_block_data val;
b3694847
SS
6706 rtx insn = f;
6707 int i;
7afe21cc 6708
bc5e3b54
KH
6709 init_cse_reg_info (nregs);
6710
9bf8cfbf
ZD
6711 val.path = xmalloc (sizeof (struct branch_path)
6712 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6713
7afe21cc 6714 cse_jumps_altered = 0;
a5dfb4ee 6715 recorded_label_ref = 0;
7afe21cc 6716 constant_pool_entries_cost = 0;
dd0ba281 6717 constant_pool_entries_regcost = 0;
7afe21cc 6718 val.path_size = 0;
2f93eea8 6719 rtl_hooks = cse_rtl_hooks;
7afe21cc
RK
6720
6721 init_recog ();
9ae8ffe7 6722 init_alias_analysis ();
7afe21cc 6723
703ad42b 6724 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7afe21cc 6725
7afe21cc
RK
6726 /* Find the largest uid. */
6727
164c8956 6728 max_uid = get_max_uid ();
703ad42b 6729 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7afe21cc
RK
6730
6731 /* Compute the mapping from uids to cuids.
6732 CUIDs are numbers assigned to insns, like uids,
6733 except that cuids increase monotonically through the code.
6734 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6735 between two insns is not affected by -g. */
6736
6737 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6738 {
4b4bf941 6739 if (!NOTE_P (insn)
7afe21cc
RK
6740 || NOTE_LINE_NUMBER (insn) < 0)
6741 INSN_CUID (insn) = ++i;
6742 else
6743 /* Give a line number note the same cuid as preceding insn. */
6744 INSN_CUID (insn) = i;
6745 }
6746
7afe21cc
RK
6747 /* Loop over basic blocks.
6748 Compute the maximum number of qty's needed for each basic block
6749 (which is 2 for each SET). */
6750 insn = f;
6751 while (insn)
6752 {
4eadede7 6753 cse_altered = 0;
5affca01 6754 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
8b3686ed 6755 flag_cse_skip_blocks);
7afe21cc
RK
6756
6757 /* If this basic block was already processed or has no sets, skip it. */
6758 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6759 {
6760 PUT_MODE (insn, VOIDmode);
6761 insn = (val.last ? NEXT_INSN (val.last) : 0);
6762 val.path_size = 0;
6763 continue;
6764 }
6765
6766 cse_basic_block_start = val.low_cuid;
6767 cse_basic_block_end = val.high_cuid;
6768 max_qty = val.nsets * 2;
278a83b2 6769
7afe21cc 6770 if (file)
ab87f8c8 6771 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7afe21cc
RK
6772 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6773 val.nsets);
6774
6775 /* Make MAX_QTY bigger to give us room to optimize
6776 past the end of this basic block, if that should prove useful. */
6777 if (max_qty < 500)
6778 max_qty = 500;
6779
7afe21cc
RK
6780 /* If this basic block is being extended by following certain jumps,
6781 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6782 Otherwise, we start after this basic block. */
6783 if (val.path_size > 0)
5affca01 6784 cse_basic_block (insn, val.last, val.path);
7afe21cc
RK
6785 else
6786 {
6787 int old_cse_jumps_altered = cse_jumps_altered;
6788 rtx temp;
6789
6790 /* When cse changes a conditional jump to an unconditional
6791 jump, we want to reprocess the block, since it will give
6792 us a new branch path to investigate. */
6793 cse_jumps_altered = 0;
5affca01 6794 temp = cse_basic_block (insn, val.last, val.path);
8b3686ed
RK
6795 if (cse_jumps_altered == 0
6796 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
6797 insn = temp;
6798
6799 cse_jumps_altered |= old_cse_jumps_altered;
6800 }
6801
1f8f4a0b 6802 if (cse_altered)
1497faf6
RH
6803 ggc_collect ();
6804
7afe21cc
RK
6805#ifdef USE_C_ALLOCA
6806 alloca (0);
6807#endif
6808 }
6809
e05e2395
MM
6810 /* Clean up. */
6811 end_alias_analysis ();
75c6bd46 6812 free (uid_cuid);
1bb98cec 6813 free (reg_eqv_table);
9bf8cfbf 6814 free (val.path);
2f93eea8 6815 rtl_hooks = general_rtl_hooks;
e05e2395 6816
a5dfb4ee 6817 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
6818}
6819
6820/* Process a single basic block. FROM and TO and the limits of the basic
6821 block. NEXT_BRANCH points to the branch path when following jumps or
75473b02 6822 a null path when not following jumps. */
7afe21cc
RK
6823
6824static rtx
5affca01 6825cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
7afe21cc 6826{
b3694847 6827 rtx insn;
7afe21cc 6828 int to_usage = 0;
7bd8b2a8 6829 rtx libcall_insn = NULL_RTX;
e9a25f70 6830 int num_insns = 0;
26d107db 6831 int no_conflict = 0;
7afe21cc 6832
08a69267
RS
6833 /* Allocate the space needed by qty_table. */
6834 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
7afe21cc
RK
6835
6836 new_basic_block ();
6837
6838 /* TO might be a label. If so, protect it from being deleted. */
4b4bf941 6839 if (to != 0 && LABEL_P (to))
7afe21cc
RK
6840 ++LABEL_NUSES (to);
6841
6842 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6843 {
b3694847 6844 enum rtx_code code = GET_CODE (insn);
e9a25f70 6845
1d22a2c1
MM
6846 /* If we have processed 1,000 insns, flush the hash table to
6847 avoid extreme quadratic behavior. We must not include NOTEs
c13e8210 6848 in the count since there may be more of them when generating
1d22a2c1
MM
6849 debugging information. If we clear the table at different
6850 times, code generated with -g -O might be different than code
6851 generated with -O but not -g.
e9a25f70
JL
6852
6853 ??? This is a real kludge and needs to be done some other way.
6854 Perhaps for 2.9. */
1d22a2c1 6855 if (code != NOTE && num_insns++ > 1000)
e9a25f70 6856 {
01e752d3 6857 flush_hash_table ();
e9a25f70
JL
6858 num_insns = 0;
6859 }
7afe21cc
RK
6860
6861 /* See if this is a branch that is part of the path. If so, and it is
6862 to be taken, do so. */
6863 if (next_branch->branch == insn)
6864 {
8b3686ed 6865 enum taken status = next_branch++->status;
6de9cd9a 6866 if (status != PATH_NOT_TAKEN)
7afe21cc 6867 {
6de9cd9a 6868 if (status == PATH_TAKEN)
8b3686ed
RK
6869 record_jump_equiv (insn, 1);
6870 else
6871 invalidate_skipped_block (NEXT_INSN (insn));
6872
7afe21cc
RK
6873 /* Set the last insn as the jump insn; it doesn't affect cc0.
6874 Then follow this branch. */
6875#ifdef HAVE_cc0
6876 prev_insn_cc0 = 0;
7afe21cc 6877 prev_insn = insn;
4977bab6 6878#endif
7afe21cc
RK
6879 insn = JUMP_LABEL (insn);
6880 continue;
6881 }
6882 }
278a83b2 6883
7afe21cc
RK
6884 if (GET_MODE (insn) == QImode)
6885 PUT_MODE (insn, VOIDmode);
6886
ec8e098d 6887 if (GET_RTX_CLASS (code) == RTX_INSN)
7afe21cc 6888 {
7bd8b2a8
JL
6889 rtx p;
6890
7afe21cc
RK
6891 /* Process notes first so we have all notes in canonical forms when
6892 looking for duplicate operations. */
6893
6894 if (REG_NOTES (insn))
906c4e36 6895 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
6896
6897 /* Track when we are inside in LIBCALL block. Inside such a block,
6898 we do not want to record destinations. The last insn of a
6899 LIBCALL block is not considered to be part of the block, since
830a38ee 6900 its destination is the result of the block and hence should be
7afe21cc
RK
6901 recorded. */
6902
efc9bd41
RK
6903 if (REG_NOTES (insn) != 0)
6904 {
6905 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6906 libcall_insn = XEXP (p, 0);
6907 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
26d107db
KK
6908 {
6909 /* Keep libcall_insn for the last SET insn of a no-conflict
6910 block to prevent changing the destination. */
6911 if (! no_conflict)
6912 libcall_insn = 0;
6913 else
6914 no_conflict = -1;
6915 }
6916 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6917 no_conflict = 1;
efc9bd41 6918 }
7afe21cc 6919
7bd8b2a8 6920 cse_insn (insn, libcall_insn);
f85cc4cb 6921
26d107db
KK
6922 if (no_conflict == -1)
6923 {
6924 libcall_insn = 0;
6925 no_conflict = 0;
6926 }
6927
be8ac49a
RK
6928 /* If we haven't already found an insn where we added a LABEL_REF,
6929 check this one. */
4b4bf941 6930 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
be8ac49a
RK
6931 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6932 (void *) insn))
f85cc4cb 6933 recorded_label_ref = 1;
7afe21cc
RK
6934 }
6935
6936 /* If INSN is now an unconditional jump, skip to the end of our
6937 basic block by pretending that we just did the last insn in the
6938 basic block. If we are jumping to the end of our block, show
6939 that we can have one usage of TO. */
6940
7f1c097d 6941 if (any_uncondjump_p (insn))
7afe21cc
RK
6942 {
6943 if (to == 0)
fa0933ba 6944 {
08a69267 6945 free (qty_table);
fa0933ba
JL
6946 return 0;
6947 }
7afe21cc
RK
6948
6949 if (JUMP_LABEL (insn) == to)
6950 to_usage = 1;
6951
6a5293dc
RS
6952 /* Maybe TO was deleted because the jump is unconditional.
6953 If so, there is nothing left in this basic block. */
6954 /* ??? Perhaps it would be smarter to set TO
278a83b2 6955 to whatever follows this insn,
6a5293dc
RS
6956 and pretend the basic block had always ended here. */
6957 if (INSN_DELETED_P (to))
6958 break;
6959
7afe21cc
RK
6960 insn = PREV_INSN (to);
6961 }
6962
6963 /* See if it is ok to keep on going past the label
6964 which used to end our basic block. Remember that we incremented
d45cf215 6965 the count of that label, so we decrement it here. If we made
7afe21cc
RK
6966 a jump unconditional, TO_USAGE will be one; in that case, we don't
6967 want to count the use in that jump. */
6968
6969 if (to != 0 && NEXT_INSN (insn) == to
4b4bf941 6970 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7afe21cc
RK
6971 {
6972 struct cse_basic_block_data val;
146135d6 6973 rtx prev;
7afe21cc
RK
6974
6975 insn = NEXT_INSN (to);
6976
146135d6
RK
6977 /* If TO was the last insn in the function, we are done. */
6978 if (insn == 0)
fa0933ba 6979 {
08a69267 6980 free (qty_table);
fa0933ba
JL
6981 return 0;
6982 }
7afe21cc 6983
146135d6
RK
6984 /* If TO was preceded by a BARRIER we are done with this block
6985 because it has no continuation. */
6986 prev = prev_nonnote_insn (to);
4b4bf941 6987 if (prev && BARRIER_P (prev))
fa0933ba 6988 {
08a69267 6989 free (qty_table);
fa0933ba
JL
6990 return insn;
6991 }
146135d6
RK
6992
6993 /* Find the end of the following block. Note that we won't be
6994 following branches in this case. */
7afe21cc
RK
6995 to_usage = 0;
6996 val.path_size = 0;
9bf8cfbf
ZD
6997 val.path = xmalloc (sizeof (struct branch_path)
6998 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
5affca01 6999 cse_end_of_basic_block (insn, &val, 0, 0);
9bf8cfbf 7000 free (val.path);
7afe21cc
RK
7001
7002 /* If the tables we allocated have enough space left
7003 to handle all the SETs in the next basic block,
7004 continue through it. Otherwise, return,
7005 and that block will be scanned individually. */
7006 if (val.nsets * 2 + next_qty > max_qty)
7007 break;
7008
7009 cse_basic_block_start = val.low_cuid;
7010 cse_basic_block_end = val.high_cuid;
7011 to = val.last;
7012
7013 /* Prevent TO from being deleted if it is a label. */
4b4bf941 7014 if (to != 0 && LABEL_P (to))
7afe21cc
RK
7015 ++LABEL_NUSES (to);
7016
7017 /* Back up so we process the first insn in the extension. */
7018 insn = PREV_INSN (insn);
7019 }
7020 }
7021
341c100f 7022 gcc_assert (next_qty <= max_qty);
7afe21cc 7023
08a69267 7024 free (qty_table);
75c6bd46 7025
7afe21cc
RK
7026 return to ? NEXT_INSN (to) : 0;
7027}
7028\f
be8ac49a 7029/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
45c23566 7030 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
be8ac49a
RK
7031
7032static int
7080f735 7033check_for_label_ref (rtx *rtl, void *data)
be8ac49a
RK
7034{
7035 rtx insn = (rtx) data;
7036
7037 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7038 we must rerun jump since it needs to place the note. If this is a
7039 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
ec5c56db 7040 since no REG_LABEL will be added. */
be8ac49a 7041 return (GET_CODE (*rtl) == LABEL_REF
45c23566 7042 && ! LABEL_REF_NONLOCAL_P (*rtl)
4838c5ee 7043 && LABEL_P (XEXP (*rtl, 0))
be8ac49a
RK
7044 && INSN_UID (XEXP (*rtl, 0)) != 0
7045 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7046}
7047\f
7afe21cc
RK
7048/* Count the number of times registers are used (not set) in X.
7049 COUNTS is an array in which we accumulate the count, INCR is how much
9ab81df2 7050 we count each register usage. */
7afe21cc
RK
7051
7052static void
9ab81df2 7053count_reg_usage (rtx x, int *counts, int incr)
7afe21cc 7054{
f1e7c95f 7055 enum rtx_code code;
b17d5d7c 7056 rtx note;
6f7d635c 7057 const char *fmt;
7afe21cc
RK
7058 int i, j;
7059
f1e7c95f
RK
7060 if (x == 0)
7061 return;
7062
7063 switch (code = GET_CODE (x))
7afe21cc
RK
7064 {
7065 case REG:
9ab81df2 7066 counts[REGNO (x)] += incr;
7afe21cc
RK
7067 return;
7068
7069 case PC:
7070 case CC0:
7071 case CONST:
7072 case CONST_INT:
7073 case CONST_DOUBLE:
69ef87e2 7074 case CONST_VECTOR:
7afe21cc
RK
7075 case SYMBOL_REF:
7076 case LABEL_REF:
02e39abc
JL
7077 return;
7078
278a83b2 7079 case CLOBBER:
02e39abc
JL
7080 /* If we are clobbering a MEM, mark any registers inside the address
7081 as being used. */
3c0cb5de 7082 if (MEM_P (XEXP (x, 0)))
9ab81df2 7083 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7afe21cc
RK
7084 return;
7085
7086 case SET:
7087 /* Unless we are setting a REG, count everything in SET_DEST. */
f8cfc6aa 7088 if (!REG_P (SET_DEST (x)))
9ab81df2
JDA
7089 count_reg_usage (SET_DEST (x), counts, incr);
7090 count_reg_usage (SET_SRC (x), counts, incr);
7afe21cc
RK
7091 return;
7092
f1e7c95f 7093 case CALL_INSN:
9ab81df2 7094 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
ddc356e8 7095 /* Fall through. */
f1e7c95f 7096
7afe21cc
RK
7097 case INSN:
7098 case JUMP_INSN:
9ab81df2 7099 count_reg_usage (PATTERN (x), counts, incr);
7afe21cc
RK
7100
7101 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7102 use them. */
7103
b17d5d7c
ZD
7104 note = find_reg_equal_equiv_note (x);
7105 if (note)
839844be
R
7106 {
7107 rtx eqv = XEXP (note, 0);
7108
7109 if (GET_CODE (eqv) == EXPR_LIST)
7110 /* This REG_EQUAL note describes the result of a function call.
7111 Process all the arguments. */
7112 do
7113 {
9ab81df2 7114 count_reg_usage (XEXP (eqv, 0), counts, incr);
839844be
R
7115 eqv = XEXP (eqv, 1);
7116 }
7117 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7118 else
9ab81df2 7119 count_reg_usage (eqv, counts, incr);
839844be 7120 }
7afe21cc
RK
7121 return;
7122
ee960939
OH
7123 case EXPR_LIST:
7124 if (REG_NOTE_KIND (x) == REG_EQUAL
7125 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7126 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7127 involving registers in the address. */
7128 || GET_CODE (XEXP (x, 0)) == CLOBBER)
9ab81df2 7129 count_reg_usage (XEXP (x, 0), counts, incr);
ee960939 7130
9ab81df2 7131 count_reg_usage (XEXP (x, 1), counts, incr);
ee960939
OH
7132 return;
7133
a6c14a64 7134 case ASM_OPERANDS:
a6c14a64
RH
7135 /* Iterate over just the inputs, not the constraints as well. */
7136 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
9ab81df2 7137 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
a6c14a64
RH
7138 return;
7139
7afe21cc 7140 case INSN_LIST:
341c100f 7141 gcc_unreachable ();
278a83b2 7142
e9a25f70
JL
7143 default:
7144 break;
7afe21cc
RK
7145 }
7146
7147 fmt = GET_RTX_FORMAT (code);
7148 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7149 {
7150 if (fmt[i] == 'e')
9ab81df2 7151 count_reg_usage (XEXP (x, i), counts, incr);
7afe21cc
RK
7152 else if (fmt[i] == 'E')
7153 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9ab81df2 7154 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7afe21cc
RK
7155 }
7156}
7157\f
4793dca1
JH
7158/* Return true if set is live. */
7159static bool
7080f735
AJ
7160set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7161 int *counts)
4793dca1
JH
7162{
7163#ifdef HAVE_cc0
7164 rtx tem;
7165#endif
7166
7167 if (set_noop_p (set))
7168 ;
7169
7170#ifdef HAVE_cc0
7171 else if (GET_CODE (SET_DEST (set)) == CC0
7172 && !side_effects_p (SET_SRC (set))
7173 && ((tem = next_nonnote_insn (insn)) == 0
7174 || !INSN_P (tem)
7175 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7176 return false;
7177#endif
f8cfc6aa 7178 else if (!REG_P (SET_DEST (set))
4793dca1
JH
7179 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7180 || counts[REGNO (SET_DEST (set))] != 0
8fff4fc1 7181 || side_effects_p (SET_SRC (set)))
4793dca1
JH
7182 return true;
7183 return false;
7184}
7185
7186/* Return true if insn is live. */
7187
7188static bool
7080f735 7189insn_live_p (rtx insn, int *counts)
4793dca1
JH
7190{
7191 int i;
a3745024 7192 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
a646f6cc
AH
7193 return true;
7194 else if (GET_CODE (PATTERN (insn)) == SET)
0021de69 7195 return set_live_p (PATTERN (insn), insn, counts);
4793dca1 7196 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
0021de69
DB
7197 {
7198 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7199 {
7200 rtx elt = XVECEXP (PATTERN (insn), 0, i);
4793dca1 7201
0021de69
DB
7202 if (GET_CODE (elt) == SET)
7203 {
7204 if (set_live_p (elt, insn, counts))
7205 return true;
7206 }
7207 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7208 return true;
7209 }
7210 return false;
7211 }
4793dca1
JH
7212 else
7213 return true;
7214}
7215
7216/* Return true if libcall is dead as a whole. */
7217
7218static bool
7080f735 7219dead_libcall_p (rtx insn, int *counts)
4793dca1 7220{
0c19a26f
RS
7221 rtx note, set, new;
7222
4793dca1
JH
7223 /* See if there's a REG_EQUAL note on this insn and try to
7224 replace the source with the REG_EQUAL expression.
7225
7226 We assume that insns with REG_RETVALs can only be reg->reg
7227 copies at this point. */
7228 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
0c19a26f
RS
7229 if (!note)
7230 return false;
7231
7232 set = single_set (insn);
7233 if (!set)
7234 return false;
4793dca1 7235
0c19a26f
RS
7236 new = simplify_rtx (XEXP (note, 0));
7237 if (!new)
7238 new = XEXP (note, 0);
4793dca1 7239
0c19a26f 7240 /* While changing insn, we must update the counts accordingly. */
9ab81df2 7241 count_reg_usage (insn, counts, -1);
1e150f2c 7242
0c19a26f
RS
7243 if (validate_change (insn, &SET_SRC (set), new, 0))
7244 {
9ab81df2 7245 count_reg_usage (insn, counts, 1);
0c19a26f
RS
7246 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7247 remove_note (insn, note);
7248 return true;
7249 }
7250
7251 if (CONSTANT_P (new))
7252 {
7253 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7254 if (new && validate_change (insn, &SET_SRC (set), new, 0))
4793dca1 7255 {
9ab81df2 7256 count_reg_usage (insn, counts, 1);
4793dca1 7257 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
1e150f2c 7258 remove_note (insn, note);
4793dca1
JH
7259 return true;
7260 }
7261 }
7080f735 7262
9ab81df2 7263 count_reg_usage (insn, counts, 1);
4793dca1
JH
7264 return false;
7265}
7266
7afe21cc
RK
7267/* Scan all the insns and delete any that are dead; i.e., they store a register
7268 that is never used or they copy a register to itself.
7269
c6a26dc4
JL
7270 This is used to remove insns made obviously dead by cse, loop or other
7271 optimizations. It improves the heuristics in loop since it won't try to
7272 move dead invariants out of loops or make givs for dead quantities. The
7273 remaining passes of the compilation are also sped up. */
7afe21cc 7274
3dec4024 7275int
7080f735 7276delete_trivially_dead_insns (rtx insns, int nreg)
7afe21cc 7277{
4da896b2 7278 int *counts;
77fa0940 7279 rtx insn, prev;
614bb5d4 7280 int in_libcall = 0, dead_libcall = 0;
65e9fa10 7281 int ndead = 0;
7afe21cc 7282
3dec4024 7283 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7afe21cc 7284 /* First count the number of times each register is used. */
703ad42b 7285 counts = xcalloc (nreg, sizeof (int));
7afe21cc 7286 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9ab81df2 7287 count_reg_usage (insn, counts, 1);
7afe21cc 7288
65e9fa10
KH
7289 /* Go from the last insn to the first and delete insns that only set unused
7290 registers or copy a register to itself. As we delete an insn, remove
7291 usage counts for registers it uses.
0cedb36c 7292
65e9fa10
KH
7293 The first jump optimization pass may leave a real insn as the last
7294 insn in the function. We must not skip that insn or we may end
7295 up deleting code that is not really dead. */
7296 insn = get_last_insn ();
7297 if (! INSN_P (insn))
7298 insn = prev_real_insn (insn);
7afe21cc 7299
65e9fa10
KH
7300 for (; insn; insn = prev)
7301 {
7302 int live_insn = 0;
7afe21cc 7303
65e9fa10 7304 prev = prev_real_insn (insn);
7afe21cc 7305
65e9fa10
KH
7306 /* Don't delete any insns that are part of a libcall block unless
7307 we can delete the whole libcall block.
7308
7309 Flow or loop might get confused if we did that. Remember
7310 that we are scanning backwards. */
7311 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7312 {
7313 in_libcall = 1;
7314 live_insn = 1;
7315 dead_libcall = dead_libcall_p (insn, counts);
7316 }
7317 else if (in_libcall)
7318 live_insn = ! dead_libcall;
7319 else
7320 live_insn = insn_live_p (insn, counts);
7afe21cc 7321
65e9fa10
KH
7322 /* If this is a dead insn, delete it and show registers in it aren't
7323 being used. */
7afe21cc 7324
65e9fa10
KH
7325 if (! live_insn)
7326 {
7327 count_reg_usage (insn, counts, -1);
7328 delete_insn_and_edges (insn);
7329 ndead++;
7330 }
e4890d45 7331
65e9fa10
KH
7332 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7333 {
7334 in_libcall = 0;
7335 dead_libcall = 0;
614bb5d4 7336 }
68252e27 7337 }
4da896b2 7338
c263766c 7339 if (dump_file && ndead)
65e9fa10
KH
7340 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7341 ndead);
4da896b2
MM
7342 /* Clean up. */
7343 free (counts);
3dec4024
JH
7344 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7345 return ndead;
7afe21cc 7346}
e129d93a
ILT
7347
7348/* This function is called via for_each_rtx. The argument, NEWREG, is
7349 a condition code register with the desired mode. If we are looking
7350 at the same register in a different mode, replace it with
7351 NEWREG. */
7352
7353static int
7354cse_change_cc_mode (rtx *loc, void *data)
7355{
fc188d37 7356 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
e129d93a
ILT
7357
7358 if (*loc
f8cfc6aa 7359 && REG_P (*loc)
fc188d37
AK
7360 && REGNO (*loc) == REGNO (args->newreg)
7361 && GET_MODE (*loc) != GET_MODE (args->newreg))
e129d93a 7362 {
fc188d37
AK
7363 validate_change (args->insn, loc, args->newreg, 1);
7364
e129d93a
ILT
7365 return -1;
7366 }
7367 return 0;
7368}
7369
fc188d37
AK
7370/* Change the mode of any reference to the register REGNO (NEWREG) to
7371 GET_MODE (NEWREG) in INSN. */
7372
7373static void
7374cse_change_cc_mode_insn (rtx insn, rtx newreg)
7375{
7376 struct change_cc_mode_args args;
7377 int success;
7378
7379 if (!INSN_P (insn))
7380 return;
7381
7382 args.insn = insn;
7383 args.newreg = newreg;
7384
7385 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7386 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7387
7388 /* If the following assertion was triggered, there is most probably
7389 something wrong with the cc_modes_compatible back end function.
7390 CC modes only can be considered compatible if the insn - with the mode
7391 replaced by any of the compatible modes - can still be recognized. */
7392 success = apply_change_group ();
7393 gcc_assert (success);
7394}
7395
e129d93a
ILT
7396/* Change the mode of any reference to the register REGNO (NEWREG) to
7397 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
2e802a6f 7398 any instruction which modifies NEWREG. */
e129d93a
ILT
7399
7400static void
7401cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7402{
7403 rtx insn;
7404
7405 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7406 {
7407 if (! INSN_P (insn))
7408 continue;
7409
2e802a6f 7410 if (reg_set_p (newreg, insn))
e129d93a
ILT
7411 return;
7412
fc188d37 7413 cse_change_cc_mode_insn (insn, newreg);
e129d93a
ILT
7414 }
7415}
7416
7417/* BB is a basic block which finishes with CC_REG as a condition code
7418 register which is set to CC_SRC. Look through the successors of BB
7419 to find blocks which have a single predecessor (i.e., this one),
7420 and look through those blocks for an assignment to CC_REG which is
7421 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7422 permitted to change the mode of CC_SRC to a compatible mode. This
7423 returns VOIDmode if no equivalent assignments were found.
7424 Otherwise it returns the mode which CC_SRC should wind up with.
7425
7426 The main complexity in this function is handling the mode issues.
7427 We may have more than one duplicate which we can eliminate, and we
7428 try to find a mode which will work for multiple duplicates. */
7429
7430static enum machine_mode
7431cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7432{
7433 bool found_equiv;
7434 enum machine_mode mode;
7435 unsigned int insn_count;
7436 edge e;
7437 rtx insns[2];
7438 enum machine_mode modes[2];
7439 rtx last_insns[2];
7440 unsigned int i;
7441 rtx newreg;
628f6a4e 7442 edge_iterator ei;
e129d93a
ILT
7443
7444 /* We expect to have two successors. Look at both before picking
7445 the final mode for the comparison. If we have more successors
7446 (i.e., some sort of table jump, although that seems unlikely),
7447 then we require all beyond the first two to use the same
7448 mode. */
7449
7450 found_equiv = false;
7451 mode = GET_MODE (cc_src);
7452 insn_count = 0;
628f6a4e 7453 FOR_EACH_EDGE (e, ei, bb->succs)
e129d93a
ILT
7454 {
7455 rtx insn;
7456 rtx end;
7457
7458 if (e->flags & EDGE_COMPLEX)
7459 continue;
7460
628f6a4e 7461 if (EDGE_COUNT (e->dest->preds) != 1
e129d93a
ILT
7462 || e->dest == EXIT_BLOCK_PTR)
7463 continue;
7464
7465 end = NEXT_INSN (BB_END (e->dest));
7466 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7467 {
7468 rtx set;
7469
7470 if (! INSN_P (insn))
7471 continue;
7472
7473 /* If CC_SRC is modified, we have to stop looking for
7474 something which uses it. */
7475 if (modified_in_p (cc_src, insn))
7476 break;
7477
7478 /* Check whether INSN sets CC_REG to CC_SRC. */
7479 set = single_set (insn);
7480 if (set
f8cfc6aa 7481 && REG_P (SET_DEST (set))
e129d93a
ILT
7482 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7483 {
7484 bool found;
7485 enum machine_mode set_mode;
7486 enum machine_mode comp_mode;
7487
7488 found = false;
7489 set_mode = GET_MODE (SET_SRC (set));
7490 comp_mode = set_mode;
7491 if (rtx_equal_p (cc_src, SET_SRC (set)))
7492 found = true;
7493 else if (GET_CODE (cc_src) == COMPARE
7494 && GET_CODE (SET_SRC (set)) == COMPARE
1f44254c 7495 && mode != set_mode
e129d93a
ILT
7496 && rtx_equal_p (XEXP (cc_src, 0),
7497 XEXP (SET_SRC (set), 0))
7498 && rtx_equal_p (XEXP (cc_src, 1),
7499 XEXP (SET_SRC (set), 1)))
7500
7501 {
5fd9b178 7502 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
e129d93a
ILT
7503 if (comp_mode != VOIDmode
7504 && (can_change_mode || comp_mode == mode))
7505 found = true;
7506 }
7507
7508 if (found)
7509 {
7510 found_equiv = true;
1f44254c 7511 if (insn_count < ARRAY_SIZE (insns))
e129d93a
ILT
7512 {
7513 insns[insn_count] = insn;
7514 modes[insn_count] = set_mode;
7515 last_insns[insn_count] = end;
7516 ++insn_count;
7517
1f44254c
ILT
7518 if (mode != comp_mode)
7519 {
341c100f 7520 gcc_assert (can_change_mode);
1f44254c 7521 mode = comp_mode;
fc188d37
AK
7522
7523 /* The modified insn will be re-recognized later. */
1f44254c
ILT
7524 PUT_MODE (cc_src, mode);
7525 }
e129d93a
ILT
7526 }
7527 else
7528 {
7529 if (set_mode != mode)
1f44254c
ILT
7530 {
7531 /* We found a matching expression in the
7532 wrong mode, but we don't have room to
7533 store it in the array. Punt. This case
7534 should be rare. */
7535 break;
7536 }
e129d93a
ILT
7537 /* INSN sets CC_REG to a value equal to CC_SRC
7538 with the right mode. We can simply delete
7539 it. */
7540 delete_insn (insn);
7541 }
7542
7543 /* We found an instruction to delete. Keep looking,
7544 in the hopes of finding a three-way jump. */
7545 continue;
7546 }
7547
7548 /* We found an instruction which sets the condition
7549 code, so don't look any farther. */
7550 break;
7551 }
7552
7553 /* If INSN sets CC_REG in some other way, don't look any
7554 farther. */
7555 if (reg_set_p (cc_reg, insn))
7556 break;
7557 }
7558
7559 /* If we fell off the bottom of the block, we can keep looking
7560 through successors. We pass CAN_CHANGE_MODE as false because
7561 we aren't prepared to handle compatibility between the
7562 further blocks and this block. */
7563 if (insn == end)
7564 {
1f44254c
ILT
7565 enum machine_mode submode;
7566
7567 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7568 if (submode != VOIDmode)
7569 {
341c100f 7570 gcc_assert (submode == mode);
1f44254c
ILT
7571 found_equiv = true;
7572 can_change_mode = false;
7573 }
e129d93a
ILT
7574 }
7575 }
7576
7577 if (! found_equiv)
7578 return VOIDmode;
7579
7580 /* Now INSN_COUNT is the number of instructions we found which set
7581 CC_REG to a value equivalent to CC_SRC. The instructions are in
7582 INSNS. The modes used by those instructions are in MODES. */
7583
7584 newreg = NULL_RTX;
7585 for (i = 0; i < insn_count; ++i)
7586 {
7587 if (modes[i] != mode)
7588 {
7589 /* We need to change the mode of CC_REG in INSNS[i] and
7590 subsequent instructions. */
7591 if (! newreg)
7592 {
7593 if (GET_MODE (cc_reg) == mode)
7594 newreg = cc_reg;
7595 else
7596 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7597 }
7598 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7599 newreg);
7600 }
7601
7602 delete_insn (insns[i]);
7603 }
7604
7605 return mode;
7606}
7607
7608/* If we have a fixed condition code register (or two), walk through
7609 the instructions and try to eliminate duplicate assignments. */
7610
7611void
7612cse_condition_code_reg (void)
7613{
7614 unsigned int cc_regno_1;
7615 unsigned int cc_regno_2;
7616 rtx cc_reg_1;
7617 rtx cc_reg_2;
7618 basic_block bb;
7619
5fd9b178 7620 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
e129d93a
ILT
7621 return;
7622
7623 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7624 if (cc_regno_2 != INVALID_REGNUM)
7625 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7626 else
7627 cc_reg_2 = NULL_RTX;
7628
7629 FOR_EACH_BB (bb)
7630 {
7631 rtx last_insn;
7632 rtx cc_reg;
7633 rtx insn;
7634 rtx cc_src_insn;
7635 rtx cc_src;
7636 enum machine_mode mode;
1f44254c 7637 enum machine_mode orig_mode;
e129d93a
ILT
7638
7639 /* Look for blocks which end with a conditional jump based on a
7640 condition code register. Then look for the instruction which
7641 sets the condition code register. Then look through the
7642 successor blocks for instructions which set the condition
7643 code register to the same value. There are other possible
7644 uses of the condition code register, but these are by far the
7645 most common and the ones which we are most likely to be able
7646 to optimize. */
7647
7648 last_insn = BB_END (bb);
4b4bf941 7649 if (!JUMP_P (last_insn))
e129d93a
ILT
7650 continue;
7651
7652 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7653 cc_reg = cc_reg_1;
7654 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7655 cc_reg = cc_reg_2;
7656 else
7657 continue;
7658
7659 cc_src_insn = NULL_RTX;
7660 cc_src = NULL_RTX;
7661 for (insn = PREV_INSN (last_insn);
7662 insn && insn != PREV_INSN (BB_HEAD (bb));
7663 insn = PREV_INSN (insn))
7664 {
7665 rtx set;
7666
7667 if (! INSN_P (insn))
7668 continue;
7669 set = single_set (insn);
7670 if (set
f8cfc6aa 7671 && REG_P (SET_DEST (set))
e129d93a
ILT
7672 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7673 {
7674 cc_src_insn = insn;
7675 cc_src = SET_SRC (set);
7676 break;
7677 }
7678 else if (reg_set_p (cc_reg, insn))
7679 break;
7680 }
7681
7682 if (! cc_src_insn)
7683 continue;
7684
7685 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7686 continue;
7687
7688 /* Now CC_REG is a condition code register used for a
7689 conditional jump at the end of the block, and CC_SRC, in
7690 CC_SRC_INSN, is the value to which that condition code
7691 register is set, and CC_SRC is still meaningful at the end of
7692 the basic block. */
7693
1f44254c 7694 orig_mode = GET_MODE (cc_src);
e129d93a 7695 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
1f44254c 7696 if (mode != VOIDmode)
e129d93a 7697 {
341c100f 7698 gcc_assert (mode == GET_MODE (cc_src));
1f44254c 7699 if (mode != orig_mode)
2e802a6f
KH
7700 {
7701 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7702
fc188d37 7703 cse_change_cc_mode_insn (cc_src_insn, newreg);
2e802a6f
KH
7704
7705 /* Do the same in the following insns that use the
7706 current value of CC_REG within BB. */
7707 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7708 NEXT_INSN (last_insn),
7709 newreg);
7710 }
e129d93a
ILT
7711 }
7712 }
7713}