]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
i386.md ("*movtf_internal): Penalize moves to and from integer registers.
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
5e7b4e25 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
7e7e28c7
AO
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
7afe21cc 5
1322177d 6This file is part of GCC.
7afe21cc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
7afe21cc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
7afe21cc
RK
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
7afe21cc 22
7afe21cc 23#include "config.h"
670ee920
KG
24/* stdio.h must precede rtl.h for FFS. */
25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
7afe21cc 28#include "rtl.h"
6baf1cc8 29#include "tm_p.h"
7afe21cc 30#include "hard-reg-set.h"
7932a3db 31#include "regs.h"
630c79be 32#include "basic-block.h"
7afe21cc
RK
33#include "flags.h"
34#include "real.h"
35#include "insn-config.h"
36#include "recog.h"
49ad7cfa 37#include "function.h"
956d6950 38#include "expr.h"
50b2596f
KG
39#include "toplev.h"
40#include "output.h"
1497faf6 41#include "ggc.h"
3dec4024 42#include "timevar.h"
26771da7 43#include "except.h"
3c50106f 44#include "target.h"
9bf8cfbf 45#include "params.h"
2f93eea8 46#include "rtlhooks-def.h"
ef330312 47#include "tree-pass.h"
7afe21cc
RK
48
49/* The basic idea of common subexpression elimination is to go
50 through the code, keeping a record of expressions that would
51 have the same value at the current scan point, and replacing
52 expressions encountered with the cheapest equivalent expression.
53
54 It is too complicated to keep track of the different possibilities
e48a7fbe
JL
55 when control paths merge in this code; so, at each label, we forget all
56 that is known and start fresh. This can be described as processing each
57 extended basic block separately. We have a separate pass to perform
58 global CSE.
59
60 Note CSE can turn a conditional or computed jump into a nop or
61 an unconditional jump. When this occurs we arrange to run the jump
62 optimizer after CSE to delete the unreachable code.
7afe21cc
RK
63
64 We use two data structures to record the equivalent expressions:
1bb98cec
DM
65 a hash table for most expressions, and a vector of "quantity
66 numbers" to record equivalent (pseudo) registers.
7afe21cc
RK
67
68 The use of the special data structure for registers is desirable
69 because it is faster. It is possible because registers references
70 contain a fairly small number, the register number, taken from
71 a contiguously allocated series, and two register references are
72 identical if they have the same number. General expressions
73 do not have any such thing, so the only way to retrieve the
74 information recorded on an expression other than a register
75 is to keep it in a hash table.
76
77Registers and "quantity numbers":
278a83b2 78
7afe21cc
RK
79 At the start of each basic block, all of the (hardware and pseudo)
80 registers used in the function are given distinct quantity
81 numbers to indicate their contents. During scan, when the code
82 copies one register into another, we copy the quantity number.
83 When a register is loaded in any other way, we allocate a new
84 quantity number to describe the value generated by this operation.
459281be 85 `REG_QTY (N)' records what quantity register N is currently thought
7afe21cc
RK
86 of as containing.
87
08a69267 88 All real quantity numbers are greater than or equal to zero.
459281be 89 If register N has not been assigned a quantity, `REG_QTY (N)' will
08a69267 90 equal -N - 1, which is always negative.
7afe21cc 91
08a69267
RS
92 Quantity numbers below zero do not exist and none of the `qty_table'
93 entries should be referenced with a negative index.
7afe21cc
RK
94
95 We also maintain a bidirectional chain of registers for each
1bb98cec
DM
96 quantity number. The `qty_table` members `first_reg' and `last_reg',
97 and `reg_eqv_table' members `next' and `prev' hold these chains.
7afe21cc
RK
98
99 The first register in a chain is the one whose lifespan is least local.
100 Among equals, it is the one that was seen first.
101 We replace any equivalent register with that one.
102
103 If two registers have the same quantity number, it must be true that
1bb98cec 104 REG expressions with qty_table `mode' must be in the hash table for both
7afe21cc
RK
105 registers and must be in the same class.
106
107 The converse is not true. Since hard registers may be referenced in
108 any mode, two REG expressions might be equivalent in the hash table
109 but not have the same quantity number if the quantity number of one
110 of the registers is not the same mode as those expressions.
278a83b2 111
7afe21cc
RK
112Constants and quantity numbers
113
114 When a quantity has a known constant value, that value is stored
1bb98cec 115 in the appropriate qty_table `const_rtx'. This is in addition to
7afe21cc
RK
116 putting the constant in the hash table as is usual for non-regs.
117
d45cf215 118 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
119 macro CONST_COSTS and will often depend on the constant value. In any
120 event, expressions containing constants can be simplified, by fold_rtx.
121
122 When a quantity has a known nearly constant value (such as an address
1bb98cec
DM
123 of a stack slot), that value is stored in the appropriate qty_table
124 `const_rtx'.
7afe21cc
RK
125
126 Integer constants don't have a machine mode. However, cse
127 determines the intended machine mode from the destination
128 of the instruction that moves the constant. The machine mode
129 is recorded in the hash table along with the actual RTL
130 constant expression so that different modes are kept separate.
131
132Other expressions:
133
134 To record known equivalences among expressions in general
135 we use a hash table called `table'. It has a fixed number of buckets
136 that contain chains of `struct table_elt' elements for expressions.
137 These chains connect the elements whose expressions have the same
138 hash codes.
139
140 Other chains through the same elements connect the elements which
141 currently have equivalent values.
142
143 Register references in an expression are canonicalized before hashing
1bb98cec 144 the expression. This is done using `reg_qty' and qty_table `first_reg'.
7afe21cc
RK
145 The hash code of a register reference is computed using the quantity
146 number, not the register number.
147
148 When the value of an expression changes, it is necessary to remove from the
149 hash table not just that expression but all expressions whose values
150 could be different as a result.
151
152 1. If the value changing is in memory, except in special cases
153 ANYTHING referring to memory could be changed. That is because
154 nobody knows where a pointer does not point.
155 The function `invalidate_memory' removes what is necessary.
156
157 The special cases are when the address is constant or is
158 a constant plus a fixed register such as the frame pointer
159 or a static chain pointer. When such addresses are stored in,
160 we can tell exactly which other such addresses must be invalidated
161 due to overlap. `invalidate' does this.
162 All expressions that refer to non-constant
163 memory addresses are also invalidated. `invalidate_memory' does this.
164
165 2. If the value changing is a register, all expressions
166 containing references to that register, and only those,
167 must be removed.
168
169 Because searching the entire hash table for expressions that contain
170 a register is very slow, we try to figure out when it isn't necessary.
171 Precisely, this is necessary only when expressions have been
172 entered in the hash table using this register, and then the value has
173 changed, and then another expression wants to be added to refer to
174 the register's new value. This sequence of circumstances is rare
175 within any one basic block.
176
459281be
KH
177 `REG_TICK' and `REG_IN_TABLE', accessors for members of
178 cse_reg_info, are used to detect this case. REG_TICK (i) is
179 incremented whenever a value is stored in register i.
180 REG_IN_TABLE (i) holds -1 if no references to register i have been
181 entered in the table; otherwise, it contains the value REG_TICK (i)
182 had when the references were entered. If we want to enter a
183 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
184 remove old references. Until we want to enter a new entry, the
185 mere fact that the two vectors don't match makes the entries be
186 ignored if anyone tries to match them.
7afe21cc
RK
187
188 Registers themselves are entered in the hash table as well as in
459281be
KH
189 the equivalent-register chains. However, `REG_TICK' and
190 `REG_IN_TABLE' do not apply to expressions which are simple
7afe21cc
RK
191 register references. These expressions are removed from the table
192 immediately when they become invalid, and this can be done even if
193 we do not immediately search for all the expressions that refer to
194 the register.
195
196 A CLOBBER rtx in an instruction invalidates its operand for further
197 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
198 invalidates everything that resides in memory.
199
200Related expressions:
201
202 Constant expressions that differ only by an additive integer
203 are called related. When a constant expression is put in
204 the table, the related expression with no constant term
205 is also entered. These are made to point at each other
206 so that it is possible to find out if there exists any
207 register equivalent to an expression related to a given expression. */
278a83b2 208
1bb98cec
DM
209/* Length of qty_table vector. We know in advance we will not need
210 a quantity number this big. */
7afe21cc
RK
211
212static int max_qty;
213
214/* Next quantity number to be allocated.
215 This is 1 + the largest number needed so far. */
216
217static int next_qty;
218
1bb98cec 219/* Per-qty information tracking.
7afe21cc 220
1bb98cec
DM
221 `first_reg' and `last_reg' track the head and tail of the
222 chain of registers which currently contain this quantity.
7afe21cc 223
1bb98cec 224 `mode' contains the machine mode of this quantity.
7afe21cc 225
1bb98cec
DM
226 `const_rtx' holds the rtx of the constant value of this
227 quantity, if known. A summations of the frame/arg pointer
228 and a constant can also be entered here. When this holds
229 a known value, `const_insn' is the insn which stored the
230 constant value.
7afe21cc 231
1bb98cec
DM
232 `comparison_{code,const,qty}' are used to track when a
233 comparison between a quantity and some constant or register has
234 been passed. In such a case, we know the results of the comparison
235 in case we see it again. These members record a comparison that
236 is known to be true. `comparison_code' holds the rtx code of such
237 a comparison, else it is set to UNKNOWN and the other two
238 comparison members are undefined. `comparison_const' holds
239 the constant being compared against, or zero if the comparison
240 is not against a constant. `comparison_qty' holds the quantity
241 being compared against when the result is known. If the comparison
242 is not with a register, `comparison_qty' is -1. */
7afe21cc 243
1bb98cec
DM
244struct qty_table_elem
245{
246 rtx const_rtx;
247 rtx const_insn;
248 rtx comparison_const;
249 int comparison_qty;
770ae6cc 250 unsigned int first_reg, last_reg;
496324d0
DN
251 /* The sizes of these fields should match the sizes of the
252 code and mode fields of struct rtx_def (see rtl.h). */
253 ENUM_BITFIELD(rtx_code) comparison_code : 16;
254 ENUM_BITFIELD(machine_mode) mode : 8;
1bb98cec 255};
7afe21cc 256
1bb98cec
DM
257/* The table of all qtys, indexed by qty number. */
258static struct qty_table_elem *qty_table;
7afe21cc 259
fc188d37
AK
260/* Structure used to pass arguments via for_each_rtx to function
261 cse_change_cc_mode. */
262struct change_cc_mode_args
263{
264 rtx insn;
265 rtx newreg;
266};
267
7afe21cc
RK
268#ifdef HAVE_cc0
269/* For machines that have a CC0, we do not record its value in the hash
270 table since its use is guaranteed to be the insn immediately following
271 its definition and any other insn is presumed to invalidate it.
272
96fb470d
SB
273 Instead, we store below the current and last value assigned to CC0.
274 If it should happen to be a constant, it is stored in preference
275 to the actual assigned value. In case it is a constant, we store
276 the mode in which the constant should be interpreted. */
7afe21cc 277
96fb470d
SB
278static rtx this_insn_cc0, prev_insn_cc0;
279static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
4977bab6 280#endif
7afe21cc
RK
281
282/* Insn being scanned. */
283
284static rtx this_insn;
285
71d306d1
DE
286/* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
7afe21cc
RK
288 value.
289
290 Or -1 if this register is at the end of the chain.
291
459281be 292 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
1bb98cec
DM
293
294/* Per-register equivalence chain. */
295struct reg_eqv_elem
296{
297 int next, prev;
298};
7afe21cc 299
1bb98cec
DM
300/* The table of all register equivalence chains. */
301static struct reg_eqv_elem *reg_eqv_table;
7afe21cc 302
14a774a9
RK
303struct cse_reg_info
304{
bc5e3b54
KH
305 /* The timestamp at which this register is initialized. */
306 unsigned int timestamp;
9b1549b8
DM
307
308 /* The quantity number of the register's current contents. */
309 int reg_qty;
310
311 /* The number of times the register has been altered in the current
312 basic block. */
313 int reg_tick;
314
30f72379
MM
315 /* The REG_TICK value at which rtx's containing this register are
316 valid in the hash table. If this does not equal the current
317 reg_tick value, such expressions existing in the hash table are
318 invalid. */
319 int reg_in_table;
46081bb3
SH
320
321 /* The SUBREG that was set when REG_TICK was last incremented. Set
322 to -1 if the last store was to the whole register, not a subreg. */
5dd78e9a 323 unsigned int subreg_ticked;
30f72379 324};
7afe21cc 325
bc5e3b54 326/* A table of cse_reg_info indexed by register numbers. */
f00822b2 327static struct cse_reg_info *cse_reg_info_table;
c1edba58 328
bc5e3b54
KH
329/* The size of the above table. */
330static unsigned int cse_reg_info_table_size;
9b1549b8 331
bc5e3b54
KH
332/* The index of the first entry that has not been initialized. */
333static unsigned int cse_reg_info_table_first_uninitialized;
7afe21cc 334
bc5e3b54 335/* The timestamp at the beginning of the current run of
932ad4d9
SB
336 cse_extended_basic_block. We increment this variable at the beginning of
337 the current run of cse_extended_basic_block. The timestamp field of a
bc5e3b54
KH
338 cse_reg_info entry matches the value of this variable if and only
339 if the entry has been initialized during the current run of
932ad4d9 340 cse_extended_basic_block. */
bc5e3b54 341static unsigned int cse_reg_info_timestamp;
7afe21cc 342
278a83b2 343/* A HARD_REG_SET containing all the hard registers for which there is
7afe21cc
RK
344 currently a REG expression in the hash table. Note the difference
345 from the above variables, which indicate if the REG is mentioned in some
346 expression in the table. */
347
348static HARD_REG_SET hard_regs_in_table;
349
7afe21cc
RK
350/* CUID of insn that starts the basic block currently being cse-processed. */
351
352static int cse_basic_block_start;
353
354/* CUID of insn that ends the basic block currently being cse-processed. */
355
356static int cse_basic_block_end;
357
358/* Vector mapping INSN_UIDs to cuids.
d45cf215 359 The cuids are like uids but increase monotonically always.
7afe21cc
RK
360 We use them to see whether a reg is used outside a given basic block. */
361
906c4e36 362static int *uid_cuid;
7afe21cc 363
164c8956
RK
364/* Highest UID in UID_CUID. */
365static int max_uid;
366
7afe21cc
RK
367/* Get the cuid of an insn. */
368
369#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
370
371/* Nonzero if cse has altered conditional jump insns
372 in such a way that jump optimization should be redone. */
373
374static int cse_jumps_altered;
375
f85cc4cb
RK
376/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
377 REG_LABEL, we have to rerun jump after CSE to put in the note. */
a5dfb4ee
RK
378static int recorded_label_ref;
379
7afe21cc
RK
380/* canon_hash stores 1 in do_not_record
381 if it notices a reference to CC0, PC, or some other volatile
382 subexpression. */
383
384static int do_not_record;
385
386/* canon_hash stores 1 in hash_arg_in_memory
387 if it notices a reference to memory within the expression being hashed. */
388
389static int hash_arg_in_memory;
390
7afe21cc
RK
391/* The hash table contains buckets which are chains of `struct table_elt's,
392 each recording one expression's information.
393 That expression is in the `exp' field.
394
db048faf
MM
395 The canon_exp field contains a canonical (from the point of view of
396 alias analysis) version of the `exp' field.
397
7afe21cc
RK
398 Those elements with the same hash code are chained in both directions
399 through the `next_same_hash' and `prev_same_hash' fields.
400
401 Each set of expressions with equivalent values
402 are on a two-way chain through the `next_same_value'
403 and `prev_same_value' fields, and all point with
404 the `first_same_value' field at the first element in
405 that chain. The chain is in order of increasing cost.
406 Each element's cost value is in its `cost' field.
407
408 The `in_memory' field is nonzero for elements that
409 involve any reference to memory. These elements are removed
410 whenever a write is done to an unidentified location in memory.
411 To be safe, we assume that a memory address is unidentified unless
412 the address is either a symbol constant or a constant plus
413 the frame pointer or argument pointer.
414
7afe21cc
RK
415 The `related_value' field is used to connect related expressions
416 (that differ by adding an integer).
417 The related expressions are chained in a circular fashion.
418 `related_value' is zero for expressions for which this
419 chain is not useful.
420
421 The `cost' field stores the cost of this element's expression.
630c79be
BS
422 The `regcost' field stores the value returned by approx_reg_cost for
423 this element's expression.
7afe21cc
RK
424
425 The `is_const' flag is set if the element is a constant (including
426 a fixed address).
427
428 The `flag' field is used as a temporary during some search routines.
429
430 The `mode' field is usually the same as GET_MODE (`exp'), but
431 if `exp' is a CONST_INT and has no machine mode then the `mode'
432 field is the mode it was being used as. Each constant is
433 recorded separately for each mode it is used with. */
434
7afe21cc
RK
435struct table_elt
436{
437 rtx exp;
db048faf 438 rtx canon_exp;
7afe21cc
RK
439 struct table_elt *next_same_hash;
440 struct table_elt *prev_same_hash;
441 struct table_elt *next_same_value;
442 struct table_elt *prev_same_value;
443 struct table_elt *first_same_value;
444 struct table_elt *related_value;
445 int cost;
630c79be 446 int regcost;
496324d0
DN
447 /* The size of this field should match the size
448 of the mode field of struct rtx_def (see rtl.h). */
449 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc 450 char in_memory;
7afe21cc
RK
451 char is_const;
452 char flag;
453};
454
7afe21cc
RK
455/* We don't want a lot of buckets, because we rarely have very many
456 things stored in the hash table, and a lot of buckets slows
457 down a lot of loops that happen frequently. */
9b1549b8
DM
458#define HASH_SHIFT 5
459#define HASH_SIZE (1 << HASH_SHIFT)
460#define HASH_MASK (HASH_SIZE - 1)
7afe21cc
RK
461
462/* Compute hash code of X in mode M. Special-case case where X is a pseudo
463 register (hard registers may require `do_not_record' to be set). */
464
465#define HASH(X, M) \
f8cfc6aa 466 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
9b1549b8
DM
467 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
468 : canon_hash (X, M)) & HASH_MASK)
7afe21cc 469
0516f6fe
SB
470/* Like HASH, but without side-effects. */
471#define SAFE_HASH(X, M) \
472 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
473 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
474 : safe_hash (X, M)) & HASH_MASK)
475
630c79be
BS
476/* Determine whether register number N is considered a fixed register for the
477 purpose of approximating register costs.
7afe21cc
RK
478 It is desirable to replace other regs with fixed regs, to reduce need for
479 non-fixed hard regs.
553687c9 480 A reg wins if it is either the frame pointer or designated as fixed. */
7afe21cc 481#define FIXED_REGNO_P(N) \
8bc169f2 482 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 483 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
484
485/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
486 hard registers and pointers into the frame are the cheapest with a cost
487 of 0. Next come pseudos with a cost of one and other hard registers with
488 a cost of 2. Aside from these special cases, call `rtx_cost'. */
489
d67fb775
SB
490#define CHEAP_REGNO(N) \
491 (REGNO_PTR_FRAME_P(N) \
492 || (HARD_REGISTER_NUM_P (N) \
e7bb59fa 493 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 494
f8cfc6aa
JQ
495#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
496#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
7afe21cc 497
30f72379
MM
498/* Get the number of times this register has been updated in this
499 basic block. */
500
bc5e3b54 501#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
30f72379
MM
502
503/* Get the point at which REG was recorded in the table. */
504
bc5e3b54 505#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
30f72379 506
46081bb3
SH
507/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
508 SUBREG). */
509
bc5e3b54 510#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
46081bb3 511
30f72379
MM
512/* Get the quantity number for REG. */
513
bc5e3b54 514#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
30f72379 515
7afe21cc 516/* Determine if the quantity number for register X represents a valid index
1bb98cec 517 into the qty_table. */
7afe21cc 518
08a69267 519#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
7afe21cc 520
9b1549b8 521static struct table_elt *table[HASH_SIZE];
7afe21cc
RK
522
523/* Chain of `struct table_elt's made so far for this function
524 but currently removed from the table. */
525
526static struct table_elt *free_element_chain;
527
7afe21cc
RK
528/* Set to the cost of a constant pool reference if one was found for a
529 symbolic constant. If this was found, it means we should try to
530 convert constants into constant pool entries if they don't fit in
531 the insn. */
532
533static int constant_pool_entries_cost;
dd0ba281 534static int constant_pool_entries_regcost;
7afe21cc 535
932ad4d9
SB
536/* This data describes a block that will be processed by
537 cse_extended_basic_block. */
6cd4575e 538
14a774a9
RK
539struct cse_basic_block_data
540{
6cd4575e
RK
541 /* Lowest CUID value of insns in block. */
542 int low_cuid;
543 /* Highest CUID value of insns in block. */
544 int high_cuid;
545 /* Total number of SETs in block. */
546 int nsets;
6cd4575e
RK
547 /* Size of current branch path, if any. */
548 int path_size;
932ad4d9 549 /* Current path, indicating which basic_blocks will be processed. */
14a774a9
RK
550 struct branch_path
551 {
932ad4d9
SB
552 /* The basic block for this path entry. */
553 basic_block bb;
9bf8cfbf 554 } *path;
6cd4575e
RK
555};
556
932ad4d9
SB
557/* A simple bitmap to track which basic blocks have been visited
558 already as part of an already processed extended basic block. */
559static sbitmap cse_visited_basic_blocks;
560
7080f735
AJ
561static bool fixed_base_plus_p (rtx x);
562static int notreg_cost (rtx, enum rtx_code);
563static int approx_reg_cost_1 (rtx *, void *);
564static int approx_reg_cost (rtx);
56ae04af 565static int preferable (int, int, int, int);
7080f735
AJ
566static void new_basic_block (void);
567static void make_new_qty (unsigned int, enum machine_mode);
568static void make_regs_eqv (unsigned int, unsigned int);
569static void delete_reg_equiv (unsigned int);
570static int mention_regs (rtx);
571static int insert_regs (rtx, struct table_elt *, int);
572static void remove_from_table (struct table_elt *, unsigned);
573static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
574static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
575static rtx lookup_as_function (rtx, enum rtx_code);
576static struct table_elt *insert (rtx, struct table_elt *, unsigned,
577 enum machine_mode);
578static void merge_equiv_classes (struct table_elt *, struct table_elt *);
579static void invalidate (rtx, enum machine_mode);
580static int cse_rtx_varies_p (rtx, int);
581static void remove_invalid_refs (unsigned int);
582static void remove_invalid_subreg_refs (unsigned int, unsigned int,
583 enum machine_mode);
584static void rehash_using_reg (rtx);
585static void invalidate_memory (void);
586static void invalidate_for_call (void);
587static rtx use_related_value (rtx, struct table_elt *);
0516f6fe
SB
588
589static inline unsigned canon_hash (rtx, enum machine_mode);
590static inline unsigned safe_hash (rtx, enum machine_mode);
591static unsigned hash_rtx_string (const char *);
592
7080f735 593static rtx canon_reg (rtx, rtx);
7080f735
AJ
594static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
595 enum machine_mode *,
596 enum machine_mode *);
597static rtx fold_rtx (rtx, rtx);
598static rtx equiv_constant (rtx);
0129d079 599static void record_jump_equiv (rtx, bool);
7080f735
AJ
600static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
601 int);
602static void cse_insn (rtx, rtx);
932ad4d9 603static void cse_prescan_path (struct cse_basic_block_data *);
7080f735
AJ
604static void invalidate_from_clobbers (rtx);
605static rtx cse_process_notes (rtx, rtx);
932ad4d9 606static void cse_extended_basic_block (struct cse_basic_block_data *);
b92ba6ff 607static void count_reg_usage (rtx, int *, rtx, int);
7080f735
AJ
608static int check_for_label_ref (rtx *, void *);
609extern void dump_class (struct table_elt*);
bc5e3b54
KH
610static void get_cse_reg_info_1 (unsigned int regno);
611static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
7080f735
AJ
612static int check_dependence (rtx *, void *);
613
614static void flush_hash_table (void);
615static bool insn_live_p (rtx, int *);
616static bool set_live_p (rtx, rtx, int *);
617static bool dead_libcall_p (rtx, int *);
e129d93a 618static int cse_change_cc_mode (rtx *, void *);
fc188d37 619static void cse_change_cc_mode_insn (rtx, rtx);
e129d93a
ILT
620static void cse_change_cc_mode_insns (rtx, rtx, rtx);
621static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
7afe21cc 622\f
2f93eea8
PB
623
624#undef RTL_HOOKS_GEN_LOWPART
625#define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
626
627static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
628\f
4977bab6
ZW
629/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
630 virtual regs here because the simplify_*_operation routines are called
631 by integrate.c, which is called before virtual register instantiation. */
632
633static bool
7080f735 634fixed_base_plus_p (rtx x)
4977bab6
ZW
635{
636 switch (GET_CODE (x))
637 {
638 case REG:
639 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
640 return true;
641 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
642 return true;
643 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
644 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
645 return true;
646 return false;
647
648 case PLUS:
649 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
650 return false;
651 return fixed_base_plus_p (XEXP (x, 0));
652
4977bab6
ZW
653 default:
654 return false;
655 }
656}
657
a4c6502a
MM
658/* Dump the expressions in the equivalence class indicated by CLASSP.
659 This function is used only for debugging. */
a0153051 660void
7080f735 661dump_class (struct table_elt *classp)
a4c6502a
MM
662{
663 struct table_elt *elt;
664
665 fprintf (stderr, "Equivalence chain for ");
666 print_rtl (stderr, classp->exp);
667 fprintf (stderr, ": \n");
278a83b2 668
a4c6502a
MM
669 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
670 {
671 print_rtl (stderr, elt->exp);
672 fprintf (stderr, "\n");
673 }
674}
675
630c79be 676/* Subroutine of approx_reg_cost; called through for_each_rtx. */
be8ac49a 677
630c79be 678static int
7080f735 679approx_reg_cost_1 (rtx *xp, void *data)
630c79be
BS
680{
681 rtx x = *xp;
c863f8c2 682 int *cost_p = data;
630c79be 683
f8cfc6aa 684 if (x && REG_P (x))
c863f8c2
DM
685 {
686 unsigned int regno = REGNO (x);
687
688 if (! CHEAP_REGNO (regno))
689 {
690 if (regno < FIRST_PSEUDO_REGISTER)
691 {
692 if (SMALL_REGISTER_CLASSES)
693 return 1;
694 *cost_p += 2;
695 }
696 else
697 *cost_p += 1;
698 }
699 }
700
630c79be
BS
701 return 0;
702}
703
704/* Return an estimate of the cost of the registers used in an rtx.
705 This is mostly the number of different REG expressions in the rtx;
a1f300c0 706 however for some exceptions like fixed registers we use a cost of
f1c1dfc3 707 0. If any other hard register reference occurs, return MAX_COST. */
630c79be
BS
708
709static int
7080f735 710approx_reg_cost (rtx x)
630c79be 711{
630c79be 712 int cost = 0;
f1c1dfc3 713
c863f8c2
DM
714 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
715 return MAX_COST;
630c79be 716
c863f8c2 717 return cost;
630c79be
BS
718}
719
720/* Return a negative value if an rtx A, whose costs are given by COST_A
721 and REGCOST_A, is more desirable than an rtx B.
722 Return a positive value if A is less desirable, or 0 if the two are
723 equally good. */
724static int
56ae04af 725preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
630c79be 726{
423adbb9 727 /* First, get rid of cases involving expressions that are entirely
f1c1dfc3
BS
728 unwanted. */
729 if (cost_a != cost_b)
730 {
731 if (cost_a == MAX_COST)
732 return 1;
733 if (cost_b == MAX_COST)
734 return -1;
735 }
736
737 /* Avoid extending lifetimes of hardregs. */
738 if (regcost_a != regcost_b)
739 {
740 if (regcost_a == MAX_COST)
741 return 1;
742 if (regcost_b == MAX_COST)
743 return -1;
744 }
745
746 /* Normal operation costs take precedence. */
630c79be
BS
747 if (cost_a != cost_b)
748 return cost_a - cost_b;
f1c1dfc3 749 /* Only if these are identical consider effects on register pressure. */
630c79be
BS
750 if (regcost_a != regcost_b)
751 return regcost_a - regcost_b;
752 return 0;
753}
754
954a5693
RK
755/* Internal function, to compute cost when X is not a register; called
756 from COST macro to keep it simple. */
757
758static int
7080f735 759notreg_cost (rtx x, enum rtx_code outer)
954a5693
RK
760{
761 return ((GET_CODE (x) == SUBREG
f8cfc6aa 762 && REG_P (SUBREG_REG (x))
954a5693
RK
763 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
764 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
765 && (GET_MODE_SIZE (GET_MODE (x))
766 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
767 && subreg_lowpart_p (x)
768 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
769 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
630c79be 770 ? 0
f2fa288f 771 : rtx_cost (x, outer) * 2);
954a5693
RK
772}
773
01329426 774\f
bc5e3b54 775/* Initialize CSE_REG_INFO_TABLE. */
9b1549b8 776
bc5e3b54
KH
777static void
778init_cse_reg_info (unsigned int nregs)
779{
780 /* Do we need to grow the table? */
781 if (nregs > cse_reg_info_table_size)
30f72379 782 {
bc5e3b54
KH
783 unsigned int new_size;
784
785 if (cse_reg_info_table_size < 2048)
30f72379 786 {
bc5e3b54
KH
787 /* Compute a new size that is a power of 2 and no smaller
788 than the large of NREGS and 64. */
789 new_size = (cse_reg_info_table_size
790 ? cse_reg_info_table_size : 64);
791
792 while (new_size < nregs)
793 new_size *= 2;
30f72379
MM
794 }
795 else
1590d0d4 796 {
bc5e3b54
KH
797 /* If we need a big table, allocate just enough to hold
798 NREGS registers. */
799 new_size = nregs;
1590d0d4 800 }
9b1549b8 801
bc5e3b54 802 /* Reallocate the table with NEW_SIZE entries. */
a811c672
KH
803 if (cse_reg_info_table)
804 free (cse_reg_info_table);
5ed6ace5 805 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
bc5e3b54 806 cse_reg_info_table_size = new_size;
a811c672 807 cse_reg_info_table_first_uninitialized = 0;
bc5e3b54
KH
808 }
809
810 /* Do we have all of the first NREGS entries initialized? */
811 if (cse_reg_info_table_first_uninitialized < nregs)
812 {
813 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
814 unsigned int i;
815
816 /* Put the old timestamp on newly allocated entries so that they
817 will all be considered out of date. We do not touch those
818 entries beyond the first NREGS entries to be nice to the
819 virtual memory. */
820 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
821 cse_reg_info_table[i].timestamp = old_timestamp;
30f72379 822
bc5e3b54 823 cse_reg_info_table_first_uninitialized = nregs;
30f72379 824 }
bc5e3b54
KH
825}
826
a52aff23 827/* Given REGNO, initialize the cse_reg_info entry for REGNO. */
bc5e3b54
KH
828
829static void
830get_cse_reg_info_1 (unsigned int regno)
831{
832 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
833 entry will be considered to have been initialized. */
834 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
835
836 /* Initialize the rest of the entry. */
837 cse_reg_info_table[regno].reg_tick = 1;
838 cse_reg_info_table[regno].reg_in_table = -1;
839 cse_reg_info_table[regno].subreg_ticked = -1;
840 cse_reg_info_table[regno].reg_qty = -regno - 1;
841}
842
843/* Find a cse_reg_info entry for REGNO. */
30f72379 844
bc5e3b54
KH
845static inline struct cse_reg_info *
846get_cse_reg_info (unsigned int regno)
847{
848 struct cse_reg_info *p = &cse_reg_info_table[regno];
849
782c0a3e
KH
850 /* If this entry has not been initialized, go ahead and initialize
851 it. */
bc5e3b54
KH
852 if (p->timestamp != cse_reg_info_timestamp)
853 get_cse_reg_info_1 (regno);
30f72379 854
9b1549b8 855 return p;
30f72379
MM
856}
857
7afe21cc
RK
858/* Clear the hash table and initialize each register with its own quantity,
859 for a new basic block. */
860
861static void
7080f735 862new_basic_block (void)
7afe21cc 863{
b3694847 864 int i;
7afe21cc 865
08a69267 866 next_qty = 0;
7afe21cc 867
a52aff23 868 /* Invalidate cse_reg_info_table. */
bc5e3b54 869 cse_reg_info_timestamp++;
7afe21cc 870
bc5e3b54 871 /* Clear out hash table state for this pass. */
7afe21cc
RK
872 CLEAR_HARD_REG_SET (hard_regs_in_table);
873
874 /* The per-quantity values used to be initialized here, but it is
875 much faster to initialize each as it is made in `make_new_qty'. */
876
9b1549b8 877 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 878 {
9b1549b8
DM
879 struct table_elt *first;
880
881 first = table[i];
882 if (first != NULL)
7afe21cc 883 {
9b1549b8
DM
884 struct table_elt *last = first;
885
886 table[i] = NULL;
887
888 while (last->next_same_hash != NULL)
889 last = last->next_same_hash;
890
891 /* Now relink this hash entire chain into
892 the free element list. */
893
894 last->next_same_hash = free_element_chain;
895 free_element_chain = first;
7afe21cc
RK
896 }
897 }
898
7afe21cc
RK
899#ifdef HAVE_cc0
900 prev_insn_cc0 = 0;
901#endif
902}
903
1bb98cec
DM
904/* Say that register REG contains a quantity in mode MODE not in any
905 register before and initialize that quantity. */
7afe21cc
RK
906
907static void
7080f735 908make_new_qty (unsigned int reg, enum machine_mode mode)
7afe21cc 909{
b3694847
SS
910 int q;
911 struct qty_table_elem *ent;
912 struct reg_eqv_elem *eqv;
7afe21cc 913
341c100f 914 gcc_assert (next_qty < max_qty);
7afe21cc 915
30f72379 916 q = REG_QTY (reg) = next_qty++;
1bb98cec
DM
917 ent = &qty_table[q];
918 ent->first_reg = reg;
919 ent->last_reg = reg;
920 ent->mode = mode;
921 ent->const_rtx = ent->const_insn = NULL_RTX;
922 ent->comparison_code = UNKNOWN;
923
924 eqv = &reg_eqv_table[reg];
925 eqv->next = eqv->prev = -1;
7afe21cc
RK
926}
927
928/* Make reg NEW equivalent to reg OLD.
929 OLD is not changing; NEW is. */
930
931static void
7080f735 932make_regs_eqv (unsigned int new, unsigned int old)
7afe21cc 933{
770ae6cc
RK
934 unsigned int lastr, firstr;
935 int q = REG_QTY (old);
936 struct qty_table_elem *ent;
1bb98cec
DM
937
938 ent = &qty_table[q];
7afe21cc
RK
939
940 /* Nothing should become eqv until it has a "non-invalid" qty number. */
341c100f 941 gcc_assert (REGNO_QTY_VALID_P (old));
7afe21cc 942
30f72379 943 REG_QTY (new) = q;
1bb98cec
DM
944 firstr = ent->first_reg;
945 lastr = ent->last_reg;
7afe21cc
RK
946
947 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
948 hard regs. Among pseudos, if NEW will live longer than any other reg
949 of the same qty, and that is beyond the current basic block,
950 make it the new canonical replacement for this qty. */
951 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
952 /* Certain fixed registers might be of the class NO_REGS. This means
953 that not only can they not be allocated by the compiler, but
830a38ee 954 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
955 either. */
956 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
957 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
958 || (new >= FIRST_PSEUDO_REGISTER
959 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
960 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
961 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 962 < cse_basic_block_start))
b1f21e0a
MM
963 && (uid_cuid[REGNO_LAST_UID (new)]
964 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc 965 {
1bb98cec
DM
966 reg_eqv_table[firstr].prev = new;
967 reg_eqv_table[new].next = firstr;
968 reg_eqv_table[new].prev = -1;
969 ent->first_reg = new;
7afe21cc
RK
970 }
971 else
972 {
973 /* If NEW is a hard reg (known to be non-fixed), insert at end.
974 Otherwise, insert before any non-fixed hard regs that are at the
975 end. Registers of class NO_REGS cannot be used as an
976 equivalent for anything. */
1bb98cec 977 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
7afe21cc
RK
978 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
979 && new >= FIRST_PSEUDO_REGISTER)
1bb98cec
DM
980 lastr = reg_eqv_table[lastr].prev;
981 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
982 if (reg_eqv_table[lastr].next >= 0)
983 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
7afe21cc 984 else
1bb98cec
DM
985 qty_table[q].last_reg = new;
986 reg_eqv_table[lastr].next = new;
987 reg_eqv_table[new].prev = lastr;
7afe21cc
RK
988 }
989}
990
991/* Remove REG from its equivalence class. */
992
993static void
7080f735 994delete_reg_equiv (unsigned int reg)
7afe21cc 995{
b3694847
SS
996 struct qty_table_elem *ent;
997 int q = REG_QTY (reg);
998 int p, n;
7afe21cc 999
a4e262bc 1000 /* If invalid, do nothing. */
08a69267 1001 if (! REGNO_QTY_VALID_P (reg))
7afe21cc
RK
1002 return;
1003
1bb98cec
DM
1004 ent = &qty_table[q];
1005
1006 p = reg_eqv_table[reg].prev;
1007 n = reg_eqv_table[reg].next;
a4e262bc 1008
7afe21cc 1009 if (n != -1)
1bb98cec 1010 reg_eqv_table[n].prev = p;
7afe21cc 1011 else
1bb98cec 1012 ent->last_reg = p;
7afe21cc 1013 if (p != -1)
1bb98cec 1014 reg_eqv_table[p].next = n;
7afe21cc 1015 else
1bb98cec 1016 ent->first_reg = n;
7afe21cc 1017
08a69267 1018 REG_QTY (reg) = -reg - 1;
7afe21cc
RK
1019}
1020
1021/* Remove any invalid expressions from the hash table
1022 that refer to any of the registers contained in expression X.
1023
1024 Make sure that newly inserted references to those registers
1025 as subexpressions will be considered valid.
1026
1027 mention_regs is not called when a register itself
1028 is being stored in the table.
1029
1030 Return 1 if we have done something that may have changed the hash code
1031 of X. */
1032
1033static int
7080f735 1034mention_regs (rtx x)
7afe21cc 1035{
b3694847
SS
1036 enum rtx_code code;
1037 int i, j;
1038 const char *fmt;
1039 int changed = 0;
7afe21cc
RK
1040
1041 if (x == 0)
e5f6a288 1042 return 0;
7afe21cc
RK
1043
1044 code = GET_CODE (x);
1045 if (code == REG)
1046 {
770ae6cc 1047 unsigned int regno = REGNO (x);
09e18274 1048 unsigned int endregno = END_REGNO (x);
770ae6cc 1049 unsigned int i;
7afe21cc
RK
1050
1051 for (i = regno; i < endregno; i++)
1052 {
30f72379 1053 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1054 remove_invalid_refs (i);
1055
30f72379 1056 REG_IN_TABLE (i) = REG_TICK (i);
46081bb3 1057 SUBREG_TICKED (i) = -1;
7afe21cc
RK
1058 }
1059
1060 return 0;
1061 }
1062
34c73909
R
1063 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1064 pseudo if they don't use overlapping words. We handle only pseudos
1065 here for simplicity. */
f8cfc6aa 1066 if (code == SUBREG && REG_P (SUBREG_REG (x))
34c73909
R
1067 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1068 {
770ae6cc 1069 unsigned int i = REGNO (SUBREG_REG (x));
34c73909 1070
30f72379 1071 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909 1072 {
46081bb3
SH
1073 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1074 the last store to this register really stored into this
1075 subreg, then remove the memory of this subreg.
1076 Otherwise, remove any memory of the entire register and
1077 all its subregs from the table. */
1078 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
5dd78e9a 1079 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
34c73909
R
1080 remove_invalid_refs (i);
1081 else
ddef6bc7 1082 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
34c73909
R
1083 }
1084
30f72379 1085 REG_IN_TABLE (i) = REG_TICK (i);
5dd78e9a 1086 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
34c73909
R
1087 return 0;
1088 }
1089
7afe21cc
RK
1090 /* If X is a comparison or a COMPARE and either operand is a register
1091 that does not have a quantity, give it one. This is so that a later
1092 call to record_jump_equiv won't cause X to be assigned a different
1093 hash code and not found in the table after that call.
1094
1095 It is not necessary to do this here, since rehash_using_reg can
1096 fix up the table later, but doing this here eliminates the need to
1097 call that expensive function in the most common case where the only
1098 use of the register is in the comparison. */
1099
ec8e098d 1100 if (code == COMPARE || COMPARISON_P (x))
7afe21cc 1101 {
f8cfc6aa 1102 if (REG_P (XEXP (x, 0))
7afe21cc 1103 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
9714cf43 1104 if (insert_regs (XEXP (x, 0), NULL, 0))
7afe21cc
RK
1105 {
1106 rehash_using_reg (XEXP (x, 0));
1107 changed = 1;
1108 }
1109
f8cfc6aa 1110 if (REG_P (XEXP (x, 1))
7afe21cc 1111 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
9714cf43 1112 if (insert_regs (XEXP (x, 1), NULL, 0))
7afe21cc
RK
1113 {
1114 rehash_using_reg (XEXP (x, 1));
1115 changed = 1;
1116 }
1117 }
1118
1119 fmt = GET_RTX_FORMAT (code);
1120 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1121 if (fmt[i] == 'e')
1122 changed |= mention_regs (XEXP (x, i));
1123 else if (fmt[i] == 'E')
1124 for (j = 0; j < XVECLEN (x, i); j++)
1125 changed |= mention_regs (XVECEXP (x, i, j));
1126
1127 return changed;
1128}
1129
1130/* Update the register quantities for inserting X into the hash table
1131 with a value equivalent to CLASSP.
1132 (If the class does not contain a REG, it is irrelevant.)
1133 If MODIFIED is nonzero, X is a destination; it is being modified.
1134 Note that delete_reg_equiv should be called on a register
1135 before insert_regs is done on that register with MODIFIED != 0.
1136
1137 Nonzero value means that elements of reg_qty have changed
1138 so X's hash code may be different. */
1139
1140static int
7080f735 1141insert_regs (rtx x, struct table_elt *classp, int modified)
7afe21cc 1142{
f8cfc6aa 1143 if (REG_P (x))
7afe21cc 1144 {
770ae6cc
RK
1145 unsigned int regno = REGNO (x);
1146 int qty_valid;
7afe21cc 1147
1ff0c00d
RK
1148 /* If REGNO is in the equivalence table already but is of the
1149 wrong mode for that equivalence, don't do anything here. */
1150
1bb98cec
DM
1151 qty_valid = REGNO_QTY_VALID_P (regno);
1152 if (qty_valid)
1153 {
1154 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1ff0c00d 1155
1bb98cec
DM
1156 if (ent->mode != GET_MODE (x))
1157 return 0;
1158 }
1159
1160 if (modified || ! qty_valid)
7afe21cc
RK
1161 {
1162 if (classp)
1163 for (classp = classp->first_same_value;
1164 classp != 0;
1165 classp = classp->next_same_value)
f8cfc6aa 1166 if (REG_P (classp->exp)
7afe21cc
RK
1167 && GET_MODE (classp->exp) == GET_MODE (x))
1168 {
cd928652
ZD
1169 unsigned c_regno = REGNO (classp->exp);
1170
1171 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1172
1173 /* Suppose that 5 is hard reg and 100 and 101 are
1174 pseudos. Consider
1175
1176 (set (reg:si 100) (reg:si 5))
1177 (set (reg:si 5) (reg:si 100))
1178 (set (reg:di 101) (reg:di 5))
1179
1180 We would now set REG_QTY (101) = REG_QTY (5), but the
1181 entry for 5 is in SImode. When we use this later in
1182 copy propagation, we get the register in wrong mode. */
1183 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1184 continue;
1185
1186 make_regs_eqv (regno, c_regno);
7afe21cc
RK
1187 return 1;
1188 }
1189
d9f20424
R
1190 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1191 than REG_IN_TABLE to find out if there was only a single preceding
1192 invalidation - for the SUBREG - or another one, which would be
1193 for the full register. However, if we find here that REG_TICK
1194 indicates that the register is invalid, it means that it has
1195 been invalidated in a separate operation. The SUBREG might be used
1196 now (then this is a recursive call), or we might use the full REG
1197 now and a SUBREG of it later. So bump up REG_TICK so that
1198 mention_regs will do the right thing. */
1199 if (! modified
1200 && REG_IN_TABLE (regno) >= 0
1201 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1202 REG_TICK (regno)++;
1bb98cec 1203 make_new_qty (regno, GET_MODE (x));
7afe21cc
RK
1204 return 1;
1205 }
cdf4112f
TG
1206
1207 return 0;
7afe21cc 1208 }
c610adec
RK
1209
1210 /* If X is a SUBREG, we will likely be inserting the inner register in the
1211 table. If that register doesn't have an assigned quantity number at
1212 this point but does later, the insertion that we will be doing now will
1213 not be accessible because its hash code will have changed. So assign
1214 a quantity number now. */
1215
f8cfc6aa 1216 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
c610adec
RK
1217 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1218 {
9714cf43 1219 insert_regs (SUBREG_REG (x), NULL, 0);
34c73909 1220 mention_regs (x);
c610adec
RK
1221 return 1;
1222 }
7afe21cc
RK
1223 else
1224 return mention_regs (x);
1225}
1226\f
1227/* Look in or update the hash table. */
1228
7afe21cc
RK
1229/* Remove table element ELT from use in the table.
1230 HASH is its hash code, made using the HASH macro.
1231 It's an argument because often that is known in advance
1232 and we save much time not recomputing it. */
1233
1234static void
7080f735 1235remove_from_table (struct table_elt *elt, unsigned int hash)
7afe21cc
RK
1236{
1237 if (elt == 0)
1238 return;
1239
1240 /* Mark this element as removed. See cse_insn. */
1241 elt->first_same_value = 0;
1242
1243 /* Remove the table element from its equivalence class. */
278a83b2 1244
7afe21cc 1245 {
b3694847
SS
1246 struct table_elt *prev = elt->prev_same_value;
1247 struct table_elt *next = elt->next_same_value;
7afe21cc 1248
278a83b2
KH
1249 if (next)
1250 next->prev_same_value = prev;
7afe21cc
RK
1251
1252 if (prev)
1253 prev->next_same_value = next;
1254 else
1255 {
b3694847 1256 struct table_elt *newfirst = next;
7afe21cc
RK
1257 while (next)
1258 {
1259 next->first_same_value = newfirst;
1260 next = next->next_same_value;
1261 }
1262 }
1263 }
1264
1265 /* Remove the table element from its hash bucket. */
1266
1267 {
b3694847
SS
1268 struct table_elt *prev = elt->prev_same_hash;
1269 struct table_elt *next = elt->next_same_hash;
7afe21cc 1270
278a83b2
KH
1271 if (next)
1272 next->prev_same_hash = prev;
7afe21cc
RK
1273
1274 if (prev)
1275 prev->next_same_hash = next;
1276 else if (table[hash] == elt)
1277 table[hash] = next;
1278 else
1279 {
1280 /* This entry is not in the proper hash bucket. This can happen
1281 when two classes were merged by `merge_equiv_classes'. Search
1282 for the hash bucket that it heads. This happens only very
1283 rarely, so the cost is acceptable. */
9b1549b8 1284 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1285 if (table[hash] == elt)
1286 table[hash] = next;
1287 }
1288 }
1289
1290 /* Remove the table element from its related-value circular chain. */
1291
1292 if (elt->related_value != 0 && elt->related_value != elt)
1293 {
b3694847 1294 struct table_elt *p = elt->related_value;
770ae6cc 1295
7afe21cc
RK
1296 while (p->related_value != elt)
1297 p = p->related_value;
1298 p->related_value = elt->related_value;
1299 if (p->related_value == p)
1300 p->related_value = 0;
1301 }
1302
9b1549b8
DM
1303 /* Now add it to the free element chain. */
1304 elt->next_same_hash = free_element_chain;
1305 free_element_chain = elt;
7afe21cc
RK
1306}
1307
1308/* Look up X in the hash table and return its table element,
1309 or 0 if X is not in the table.
1310
1311 MODE is the machine-mode of X, or if X is an integer constant
1312 with VOIDmode then MODE is the mode with which X will be used.
1313
1314 Here we are satisfied to find an expression whose tree structure
1315 looks like X. */
1316
1317static struct table_elt *
7080f735 1318lookup (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1319{
b3694847 1320 struct table_elt *p;
7afe21cc
RK
1321
1322 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1323 if (mode == p->mode && ((x == p->exp && REG_P (x))
0516f6fe 1324 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
7afe21cc
RK
1325 return p;
1326
1327 return 0;
1328}
1329
1330/* Like `lookup' but don't care whether the table element uses invalid regs.
1331 Also ignore discrepancies in the machine mode of a register. */
1332
1333static struct table_elt *
7080f735 1334lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
7afe21cc 1335{
b3694847 1336 struct table_elt *p;
7afe21cc 1337
f8cfc6aa 1338 if (REG_P (x))
7afe21cc 1339 {
770ae6cc
RK
1340 unsigned int regno = REGNO (x);
1341
7afe21cc
RK
1342 /* Don't check the machine mode when comparing registers;
1343 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1344 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1345 if (REG_P (p->exp)
7afe21cc
RK
1346 && REGNO (p->exp) == regno)
1347 return p;
1348 }
1349 else
1350 {
1351 for (p = table[hash]; p; p = p->next_same_hash)
0516f6fe
SB
1352 if (mode == p->mode
1353 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
7afe21cc
RK
1354 return p;
1355 }
1356
1357 return 0;
1358}
1359
1360/* Look for an expression equivalent to X and with code CODE.
1361 If one is found, return that expression. */
1362
1363static rtx
7080f735 1364lookup_as_function (rtx x, enum rtx_code code)
7afe21cc 1365{
b3694847 1366 struct table_elt *p
0516f6fe 1367 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
770ae6cc 1368
34c73909
R
1369 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1370 long as we are narrowing. So if we looked in vain for a mode narrower
1371 than word_mode before, look for word_mode now. */
1372 if (p == 0 && code == CONST_INT
1373 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1374 {
1375 x = copy_rtx (x);
1376 PUT_MODE (x, word_mode);
0516f6fe 1377 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
34c73909
R
1378 }
1379
7afe21cc
RK
1380 if (p == 0)
1381 return 0;
1382
1383 for (p = p->first_same_value; p; p = p->next_same_value)
770ae6cc
RK
1384 if (GET_CODE (p->exp) == code
1385 /* Make sure this is a valid entry in the table. */
0516f6fe 1386 && exp_equiv_p (p->exp, p->exp, 1, false))
770ae6cc 1387 return p->exp;
278a83b2 1388
7afe21cc
RK
1389 return 0;
1390}
1391
1392/* Insert X in the hash table, assuming HASH is its hash code
1393 and CLASSP is an element of the class it should go in
1394 (or 0 if a new class should be made).
1395 It is inserted at the proper position to keep the class in
1396 the order cheapest first.
1397
1398 MODE is the machine-mode of X, or if X is an integer constant
1399 with VOIDmode then MODE is the mode with which X will be used.
1400
1401 For elements of equal cheapness, the most recent one
1402 goes in front, except that the first element in the list
1403 remains first unless a cheaper element is added. The order of
1404 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1405 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1406
1407 The in_memory field in the hash table element is set to 0.
1408 The caller must set it nonzero if appropriate.
1409
1410 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1411 and if insert_regs returns a nonzero value
1412 you must then recompute its hash code before calling here.
1413
1414 If necessary, update table showing constant values of quantities. */
1415
630c79be 1416#define CHEAPER(X, Y) \
56ae04af 1417 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
7afe21cc
RK
1418
1419static struct table_elt *
7080f735 1420insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
7afe21cc 1421{
b3694847 1422 struct table_elt *elt;
7afe21cc
RK
1423
1424 /* If X is a register and we haven't made a quantity for it,
1425 something is wrong. */
341c100f 1426 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
7afe21cc
RK
1427
1428 /* If X is a hard register, show it is being put in the table. */
f8cfc6aa 1429 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
09e18274 1430 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
7afe21cc 1431
7afe21cc
RK
1432 /* Put an element for X into the right hash bucket. */
1433
9b1549b8
DM
1434 elt = free_element_chain;
1435 if (elt)
770ae6cc 1436 free_element_chain = elt->next_same_hash;
9b1549b8 1437 else
5ed6ace5 1438 elt = XNEW (struct table_elt);
9b1549b8 1439
7afe21cc 1440 elt->exp = x;
db048faf 1441 elt->canon_exp = NULL_RTX;
7afe21cc 1442 elt->cost = COST (x);
630c79be 1443 elt->regcost = approx_reg_cost (x);
7afe21cc
RK
1444 elt->next_same_value = 0;
1445 elt->prev_same_value = 0;
1446 elt->next_same_hash = table[hash];
1447 elt->prev_same_hash = 0;
1448 elt->related_value = 0;
1449 elt->in_memory = 0;
1450 elt->mode = mode;
389fdba0 1451 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
7afe21cc
RK
1452
1453 if (table[hash])
1454 table[hash]->prev_same_hash = elt;
1455 table[hash] = elt;
1456
1457 /* Put it into the proper value-class. */
1458 if (classp)
1459 {
1460 classp = classp->first_same_value;
1461 if (CHEAPER (elt, classp))
f9da5064 1462 /* Insert at the head of the class. */
7afe21cc 1463 {
b3694847 1464 struct table_elt *p;
7afe21cc
RK
1465 elt->next_same_value = classp;
1466 classp->prev_same_value = elt;
1467 elt->first_same_value = elt;
1468
1469 for (p = classp; p; p = p->next_same_value)
1470 p->first_same_value = elt;
1471 }
1472 else
1473 {
1474 /* Insert not at head of the class. */
1475 /* Put it after the last element cheaper than X. */
b3694847 1476 struct table_elt *p, *next;
770ae6cc 1477
7afe21cc
RK
1478 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1479 p = next);
770ae6cc 1480
7afe21cc
RK
1481 /* Put it after P and before NEXT. */
1482 elt->next_same_value = next;
1483 if (next)
1484 next->prev_same_value = elt;
770ae6cc 1485
7afe21cc
RK
1486 elt->prev_same_value = p;
1487 p->next_same_value = elt;
1488 elt->first_same_value = classp;
1489 }
1490 }
1491 else
1492 elt->first_same_value = elt;
1493
1494 /* If this is a constant being set equivalent to a register or a register
1495 being set equivalent to a constant, note the constant equivalence.
1496
1497 If this is a constant, it cannot be equivalent to a different constant,
1498 and a constant is the only thing that can be cheaper than a register. So
1499 we know the register is the head of the class (before the constant was
1500 inserted).
1501
1502 If this is a register that is not already known equivalent to a
1503 constant, we must check the entire class.
1504
1505 If this is a register that is already known equivalent to an insn,
1bb98cec 1506 update the qtys `const_insn' to show that `this_insn' is the latest
7afe21cc
RK
1507 insn making that quantity equivalent to the constant. */
1508
f8cfc6aa
JQ
1509 if (elt->is_const && classp && REG_P (classp->exp)
1510 && !REG_P (x))
7afe21cc 1511 {
1bb98cec
DM
1512 int exp_q = REG_QTY (REGNO (classp->exp));
1513 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1514
4de249d9 1515 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1bb98cec 1516 exp_ent->const_insn = this_insn;
7afe21cc
RK
1517 }
1518
f8cfc6aa 1519 else if (REG_P (x)
1bb98cec
DM
1520 && classp
1521 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
f353588a 1522 && ! elt->is_const)
7afe21cc 1523 {
b3694847 1524 struct table_elt *p;
7afe21cc
RK
1525
1526 for (p = classp; p != 0; p = p->next_same_value)
1527 {
f8cfc6aa 1528 if (p->is_const && !REG_P (p->exp))
7afe21cc 1529 {
1bb98cec
DM
1530 int x_q = REG_QTY (REGNO (x));
1531 struct qty_table_elem *x_ent = &qty_table[x_q];
1532
770ae6cc 1533 x_ent->const_rtx
4de249d9 1534 = gen_lowpart (GET_MODE (x), p->exp);
1bb98cec 1535 x_ent->const_insn = this_insn;
7afe21cc
RK
1536 break;
1537 }
1538 }
1539 }
1540
f8cfc6aa 1541 else if (REG_P (x)
1bb98cec
DM
1542 && qty_table[REG_QTY (REGNO (x))].const_rtx
1543 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1544 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
7afe21cc
RK
1545
1546 /* If this is a constant with symbolic value,
1547 and it has a term with an explicit integer value,
1548 link it up with related expressions. */
1549 if (GET_CODE (x) == CONST)
1550 {
1551 rtx subexp = get_related_value (x);
2197a88a 1552 unsigned subhash;
7afe21cc
RK
1553 struct table_elt *subelt, *subelt_prev;
1554
1555 if (subexp != 0)
1556 {
1557 /* Get the integer-free subexpression in the hash table. */
0516f6fe 1558 subhash = SAFE_HASH (subexp, mode);
7afe21cc
RK
1559 subelt = lookup (subexp, subhash, mode);
1560 if (subelt == 0)
9714cf43 1561 subelt = insert (subexp, NULL, subhash, mode);
7afe21cc
RK
1562 /* Initialize SUBELT's circular chain if it has none. */
1563 if (subelt->related_value == 0)
1564 subelt->related_value = subelt;
1565 /* Find the element in the circular chain that precedes SUBELT. */
1566 subelt_prev = subelt;
1567 while (subelt_prev->related_value != subelt)
1568 subelt_prev = subelt_prev->related_value;
1569 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1570 This way the element that follows SUBELT is the oldest one. */
1571 elt->related_value = subelt_prev->related_value;
1572 subelt_prev->related_value = elt;
1573 }
1574 }
1575
1576 return elt;
1577}
1578\f
1579/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1580 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1581 the two classes equivalent.
1582
1583 CLASS1 will be the surviving class; CLASS2 should not be used after this
1584 call.
1585
1586 Any invalid entries in CLASS2 will not be copied. */
1587
1588static void
7080f735 1589merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
7afe21cc
RK
1590{
1591 struct table_elt *elt, *next, *new;
1592
1593 /* Ensure we start with the head of the classes. */
1594 class1 = class1->first_same_value;
1595 class2 = class2->first_same_value;
1596
1597 /* If they were already equal, forget it. */
1598 if (class1 == class2)
1599 return;
1600
1601 for (elt = class2; elt; elt = next)
1602 {
770ae6cc 1603 unsigned int hash;
7afe21cc
RK
1604 rtx exp = elt->exp;
1605 enum machine_mode mode = elt->mode;
1606
1607 next = elt->next_same_value;
1608
1609 /* Remove old entry, make a new one in CLASS1's class.
1610 Don't do this for invalid entries as we cannot find their
0f41302f 1611 hash code (it also isn't necessary). */
0516f6fe 1612 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
7afe21cc 1613 {
a90fc8e0
RH
1614 bool need_rehash = false;
1615
7afe21cc 1616 hash_arg_in_memory = 0;
7afe21cc 1617 hash = HASH (exp, mode);
278a83b2 1618
f8cfc6aa 1619 if (REG_P (exp))
a90fc8e0 1620 {
08a69267 1621 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
a90fc8e0
RH
1622 delete_reg_equiv (REGNO (exp));
1623 }
278a83b2 1624
7afe21cc
RK
1625 remove_from_table (elt, hash);
1626
a90fc8e0 1627 if (insert_regs (exp, class1, 0) || need_rehash)
8ae2b8f6
JW
1628 {
1629 rehash_using_reg (exp);
1630 hash = HASH (exp, mode);
1631 }
7afe21cc
RK
1632 new = insert (exp, class1, hash, mode);
1633 new->in_memory = hash_arg_in_memory;
7afe21cc
RK
1634 }
1635 }
1636}
1637\f
01e752d3
JL
1638/* Flush the entire hash table. */
1639
1640static void
7080f735 1641flush_hash_table (void)
01e752d3
JL
1642{
1643 int i;
1644 struct table_elt *p;
1645
9b1549b8 1646 for (i = 0; i < HASH_SIZE; i++)
01e752d3
JL
1647 for (p = table[i]; p; p = table[i])
1648 {
1649 /* Note that invalidate can remove elements
1650 after P in the current hash chain. */
f8cfc6aa 1651 if (REG_P (p->exp))
524e3576 1652 invalidate (p->exp, VOIDmode);
01e752d3
JL
1653 else
1654 remove_from_table (p, i);
1655 }
1656}
14a774a9 1657\f
2ce6dc2f
JH
1658/* Function called for each rtx to check whether true dependence exist. */
1659struct check_dependence_data
1660{
1661 enum machine_mode mode;
1662 rtx exp;
9ddb66ca 1663 rtx addr;
2ce6dc2f 1664};
be8ac49a 1665
2ce6dc2f 1666static int
7080f735 1667check_dependence (rtx *x, void *data)
2ce6dc2f
JH
1668{
1669 struct check_dependence_data *d = (struct check_dependence_data *) data;
3c0cb5de 1670 if (*x && MEM_P (*x))
9ddb66ca
JH
1671 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1672 cse_rtx_varies_p);
2ce6dc2f
JH
1673 else
1674 return 0;
1675}
1676\f
14a774a9
RK
1677/* Remove from the hash table, or mark as invalid, all expressions whose
1678 values could be altered by storing in X. X is a register, a subreg, or
1679 a memory reference with nonvarying address (because, when a memory
1680 reference with a varying address is stored in, all memory references are
1681 removed by invalidate_memory so specific invalidation is superfluous).
1682 FULL_MODE, if not VOIDmode, indicates that this much should be
1683 invalidated instead of just the amount indicated by the mode of X. This
1684 is only used for bitfield stores into memory.
1685
1686 A nonvarying address may be just a register or just a symbol reference,
1687 or it may be either of those plus a numeric offset. */
7afe21cc
RK
1688
1689static void
7080f735 1690invalidate (rtx x, enum machine_mode full_mode)
7afe21cc 1691{
b3694847
SS
1692 int i;
1693 struct table_elt *p;
9ddb66ca 1694 rtx addr;
7afe21cc 1695
14a774a9 1696 switch (GET_CODE (x))
7afe21cc 1697 {
14a774a9
RK
1698 case REG:
1699 {
1700 /* If X is a register, dependencies on its contents are recorded
1701 through the qty number mechanism. Just change the qty number of
1702 the register, mark it as invalid for expressions that refer to it,
1703 and remove it itself. */
770ae6cc
RK
1704 unsigned int regno = REGNO (x);
1705 unsigned int hash = HASH (x, GET_MODE (x));
7afe21cc 1706
14a774a9
RK
1707 /* Remove REGNO from any quantity list it might be on and indicate
1708 that its value might have changed. If it is a pseudo, remove its
1709 entry from the hash table.
7afe21cc 1710
14a774a9
RK
1711 For a hard register, we do the first two actions above for any
1712 additional hard registers corresponding to X. Then, if any of these
1713 registers are in the table, we must remove any REG entries that
1714 overlap these registers. */
7afe21cc 1715
14a774a9
RK
1716 delete_reg_equiv (regno);
1717 REG_TICK (regno)++;
46081bb3 1718 SUBREG_TICKED (regno) = -1;
85e4d983 1719
14a774a9
RK
1720 if (regno >= FIRST_PSEUDO_REGISTER)
1721 {
1722 /* Because a register can be referenced in more than one mode,
1723 we might have to remove more than one table entry. */
1724 struct table_elt *elt;
85e4d983 1725
14a774a9
RK
1726 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1727 remove_from_table (elt, hash);
1728 }
1729 else
1730 {
1731 HOST_WIDE_INT in_table
1732 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
09e18274 1733 unsigned int endregno = END_HARD_REGNO (x);
770ae6cc 1734 unsigned int tregno, tendregno, rn;
b3694847 1735 struct table_elt *p, *next;
7afe21cc 1736
14a774a9 1737 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc 1738
770ae6cc 1739 for (rn = regno + 1; rn < endregno; rn++)
14a774a9 1740 {
770ae6cc
RK
1741 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1742 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1743 delete_reg_equiv (rn);
1744 REG_TICK (rn)++;
46081bb3 1745 SUBREG_TICKED (rn) = -1;
14a774a9 1746 }
7afe21cc 1747
14a774a9 1748 if (in_table)
9b1549b8 1749 for (hash = 0; hash < HASH_SIZE; hash++)
14a774a9
RK
1750 for (p = table[hash]; p; p = next)
1751 {
1752 next = p->next_same_hash;
7afe21cc 1753
f8cfc6aa 1754 if (!REG_P (p->exp)
278a83b2
KH
1755 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1756 continue;
1757
14a774a9 1758 tregno = REGNO (p->exp);
09e18274 1759 tendregno = END_HARD_REGNO (p->exp);
14a774a9
RK
1760 if (tendregno > regno && tregno < endregno)
1761 remove_from_table (p, hash);
1762 }
1763 }
1764 }
7afe21cc 1765 return;
7afe21cc 1766
14a774a9 1767 case SUBREG:
bb4034b3 1768 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc 1769 return;
aac5cc16 1770
14a774a9 1771 case PARALLEL:
278a83b2 1772 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
aac5cc16
RH
1773 invalidate (XVECEXP (x, 0, i), VOIDmode);
1774 return;
aac5cc16 1775
14a774a9
RK
1776 case EXPR_LIST:
1777 /* This is part of a disjoint return value; extract the location in
1778 question ignoring the offset. */
aac5cc16
RH
1779 invalidate (XEXP (x, 0), VOIDmode);
1780 return;
7afe21cc 1781
14a774a9 1782 case MEM:
9ddb66ca 1783 addr = canon_rtx (get_addr (XEXP (x, 0)));
db048faf
MM
1784 /* Calculate the canonical version of X here so that
1785 true_dependence doesn't generate new RTL for X on each call. */
1786 x = canon_rtx (x);
1787
14a774a9
RK
1788 /* Remove all hash table elements that refer to overlapping pieces of
1789 memory. */
1790 if (full_mode == VOIDmode)
1791 full_mode = GET_MODE (x);
bb4034b3 1792
9b1549b8 1793 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 1794 {
b3694847 1795 struct table_elt *next;
14a774a9
RK
1796
1797 for (p = table[i]; p; p = next)
1798 {
1799 next = p->next_same_hash;
db048faf
MM
1800 if (p->in_memory)
1801 {
2ce6dc2f
JH
1802 struct check_dependence_data d;
1803
1804 /* Just canonicalize the expression once;
1805 otherwise each time we call invalidate
1806 true_dependence will canonicalize the
1807 expression again. */
1808 if (!p->canon_exp)
1809 p->canon_exp = canon_rtx (p->exp);
1810 d.exp = x;
9ddb66ca 1811 d.addr = addr;
2ce6dc2f
JH
1812 d.mode = full_mode;
1813 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
db048faf 1814 remove_from_table (p, i);
db048faf 1815 }
14a774a9 1816 }
7afe21cc 1817 }
14a774a9
RK
1818 return;
1819
1820 default:
341c100f 1821 gcc_unreachable ();
7afe21cc
RK
1822 }
1823}
14a774a9 1824\f
7afe21cc
RK
1825/* Remove all expressions that refer to register REGNO,
1826 since they are already invalid, and we are about to
1827 mark that register valid again and don't want the old
1828 expressions to reappear as valid. */
1829
1830static void
7080f735 1831remove_invalid_refs (unsigned int regno)
7afe21cc 1832{
770ae6cc
RK
1833 unsigned int i;
1834 struct table_elt *p, *next;
7afe21cc 1835
9b1549b8 1836 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1837 for (p = table[i]; p; p = next)
1838 {
1839 next = p->next_same_hash;
f8cfc6aa 1840 if (!REG_P (p->exp)
68252e27 1841 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
7afe21cc
RK
1842 remove_from_table (p, i);
1843 }
1844}
34c73909 1845
ddef6bc7
JJ
1846/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1847 and mode MODE. */
34c73909 1848static void
7080f735
AJ
1849remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1850 enum machine_mode mode)
34c73909 1851{
770ae6cc
RK
1852 unsigned int i;
1853 struct table_elt *p, *next;
ddef6bc7 1854 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
34c73909 1855
9b1549b8 1856 for (i = 0; i < HASH_SIZE; i++)
34c73909
R
1857 for (p = table[i]; p; p = next)
1858 {
ddef6bc7 1859 rtx exp = p->exp;
34c73909 1860 next = p->next_same_hash;
278a83b2 1861
f8cfc6aa 1862 if (!REG_P (exp)
34c73909 1863 && (GET_CODE (exp) != SUBREG
f8cfc6aa 1864 || !REG_P (SUBREG_REG (exp))
34c73909 1865 || REGNO (SUBREG_REG (exp)) != regno
ddef6bc7
JJ
1866 || (((SUBREG_BYTE (exp)
1867 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1868 && SUBREG_BYTE (exp) <= end))
68252e27 1869 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
34c73909
R
1870 remove_from_table (p, i);
1871 }
1872}
7afe21cc
RK
1873\f
1874/* Recompute the hash codes of any valid entries in the hash table that
1875 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1876
1877 This is called when we make a jump equivalence. */
1878
1879static void
7080f735 1880rehash_using_reg (rtx x)
7afe21cc 1881{
973838fd 1882 unsigned int i;
7afe21cc 1883 struct table_elt *p, *next;
2197a88a 1884 unsigned hash;
7afe21cc
RK
1885
1886 if (GET_CODE (x) == SUBREG)
1887 x = SUBREG_REG (x);
1888
1889 /* If X is not a register or if the register is known not to be in any
1890 valid entries in the table, we have no work to do. */
1891
f8cfc6aa 1892 if (!REG_P (x)
30f72379
MM
1893 || REG_IN_TABLE (REGNO (x)) < 0
1894 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1895 return;
1896
1897 /* Scan all hash chains looking for valid entries that mention X.
a90fc8e0 1898 If we find one and it is in the wrong hash chain, move it. */
7afe21cc 1899
9b1549b8 1900 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
1901 for (p = table[i]; p; p = next)
1902 {
1903 next = p->next_same_hash;
a90fc8e0 1904 if (reg_mentioned_p (x, p->exp)
0516f6fe
SB
1905 && exp_equiv_p (p->exp, p->exp, 1, false)
1906 && i != (hash = SAFE_HASH (p->exp, p->mode)))
7afe21cc
RK
1907 {
1908 if (p->next_same_hash)
1909 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1910
1911 if (p->prev_same_hash)
1912 p->prev_same_hash->next_same_hash = p->next_same_hash;
1913 else
1914 table[i] = p->next_same_hash;
1915
1916 p->next_same_hash = table[hash];
1917 p->prev_same_hash = 0;
1918 if (table[hash])
1919 table[hash]->prev_same_hash = p;
1920 table[hash] = p;
1921 }
1922 }
1923}
1924\f
7afe21cc
RK
1925/* Remove from the hash table any expression that is a call-clobbered
1926 register. Also update their TICK values. */
1927
1928static void
7080f735 1929invalidate_for_call (void)
7afe21cc 1930{
770ae6cc
RK
1931 unsigned int regno, endregno;
1932 unsigned int i;
2197a88a 1933 unsigned hash;
7afe21cc
RK
1934 struct table_elt *p, *next;
1935 int in_table = 0;
1936
1937 /* Go through all the hard registers. For each that is clobbered in
1938 a CALL_INSN, remove the register from quantity chains and update
1939 reg_tick if defined. Also see if any of these registers is currently
1940 in the table. */
1941
1942 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1943 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1944 {
1945 delete_reg_equiv (regno);
30f72379 1946 if (REG_TICK (regno) >= 0)
46081bb3
SH
1947 {
1948 REG_TICK (regno)++;
1949 SUBREG_TICKED (regno) = -1;
1950 }
7afe21cc 1951
0e227018 1952 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
1953 }
1954
1955 /* In the case where we have no call-clobbered hard registers in the
1956 table, we are done. Otherwise, scan the table and remove any
1957 entry that overlaps a call-clobbered register. */
1958
1959 if (in_table)
9b1549b8 1960 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1961 for (p = table[hash]; p; p = next)
1962 {
1963 next = p->next_same_hash;
1964
f8cfc6aa 1965 if (!REG_P (p->exp)
7afe21cc
RK
1966 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1967 continue;
1968
1969 regno = REGNO (p->exp);
09e18274 1970 endregno = END_HARD_REGNO (p->exp);
7afe21cc
RK
1971
1972 for (i = regno; i < endregno; i++)
1973 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1974 {
1975 remove_from_table (p, hash);
1976 break;
1977 }
1978 }
1979}
1980\f
1981/* Given an expression X of type CONST,
1982 and ELT which is its table entry (or 0 if it
1983 is not in the hash table),
1984 return an alternate expression for X as a register plus integer.
1985 If none can be found, return 0. */
1986
1987static rtx
7080f735 1988use_related_value (rtx x, struct table_elt *elt)
7afe21cc 1989{
b3694847
SS
1990 struct table_elt *relt = 0;
1991 struct table_elt *p, *q;
906c4e36 1992 HOST_WIDE_INT offset;
7afe21cc
RK
1993
1994 /* First, is there anything related known?
1995 If we have a table element, we can tell from that.
1996 Otherwise, must look it up. */
1997
1998 if (elt != 0 && elt->related_value != 0)
1999 relt = elt;
2000 else if (elt == 0 && GET_CODE (x) == CONST)
2001 {
2002 rtx subexp = get_related_value (x);
2003 if (subexp != 0)
2004 relt = lookup (subexp,
0516f6fe 2005 SAFE_HASH (subexp, GET_MODE (subexp)),
7afe21cc
RK
2006 GET_MODE (subexp));
2007 }
2008
2009 if (relt == 0)
2010 return 0;
2011
2012 /* Search all related table entries for one that has an
2013 equivalent register. */
2014
2015 p = relt;
2016 while (1)
2017 {
2018 /* This loop is strange in that it is executed in two different cases.
2019 The first is when X is already in the table. Then it is searching
2020 the RELATED_VALUE list of X's class (RELT). The second case is when
2021 X is not in the table. Then RELT points to a class for the related
2022 value.
2023
2024 Ensure that, whatever case we are in, that we ignore classes that have
2025 the same value as X. */
2026
2027 if (rtx_equal_p (x, p->exp))
2028 q = 0;
2029 else
2030 for (q = p->first_same_value; q; q = q->next_same_value)
f8cfc6aa 2031 if (REG_P (q->exp))
7afe21cc
RK
2032 break;
2033
2034 if (q)
2035 break;
2036
2037 p = p->related_value;
2038
2039 /* We went all the way around, so there is nothing to be found.
2040 Alternatively, perhaps RELT was in the table for some other reason
2041 and it has no related values recorded. */
2042 if (p == relt || p == 0)
2043 break;
2044 }
2045
2046 if (q == 0)
2047 return 0;
2048
2049 offset = (get_integer_term (x) - get_integer_term (p->exp));
2050 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2051 return plus_constant (q->exp, offset);
2052}
2053\f
6462bb43
AO
2054/* Hash a string. Just add its bytes up. */
2055static inline unsigned
0516f6fe 2056hash_rtx_string (const char *ps)
6462bb43
AO
2057{
2058 unsigned hash = 0;
68252e27
KH
2059 const unsigned char *p = (const unsigned char *) ps;
2060
6462bb43
AO
2061 if (p)
2062 while (*p)
2063 hash += *p++;
2064
2065 return hash;
2066}
2067
7afe21cc
RK
2068/* Hash an rtx. We are careful to make sure the value is never negative.
2069 Equivalent registers hash identically.
2070 MODE is used in hashing for CONST_INTs only;
2071 otherwise the mode of X is used.
2072
0516f6fe 2073 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
7afe21cc 2074
0516f6fe
SB
2075 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2076 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
7afe21cc
RK
2077
2078 Note that cse_insn knows that the hash code of a MEM expression
2079 is just (int) MEM plus the hash code of the address. */
2080
0516f6fe
SB
2081unsigned
2082hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2083 int *hash_arg_in_memory_p, bool have_reg_qty)
7afe21cc 2084{
b3694847
SS
2085 int i, j;
2086 unsigned hash = 0;
2087 enum rtx_code code;
2088 const char *fmt;
7afe21cc 2089
0516f6fe
SB
2090 /* Used to turn recursion into iteration. We can't rely on GCC's
2091 tail-recursion elimination since we need to keep accumulating values
2092 in HASH. */
7afe21cc
RK
2093 repeat:
2094 if (x == 0)
2095 return hash;
2096
2097 code = GET_CODE (x);
2098 switch (code)
2099 {
2100 case REG:
2101 {
770ae6cc 2102 unsigned int regno = REGNO (x);
7afe21cc 2103
0516f6fe 2104 if (!reload_completed)
7afe21cc 2105 {
0516f6fe
SB
2106 /* On some machines, we can't record any non-fixed hard register,
2107 because extending its life will cause reload problems. We
2108 consider ap, fp, sp, gp to be fixed for this purpose.
2109
2110 We also consider CCmode registers to be fixed for this purpose;
2111 failure to do so leads to failure to simplify 0<100 type of
2112 conditionals.
2113
2114 On all machines, we can't record any global registers.
2115 Nor should we record any register that is in a small
2116 class, as defined by CLASS_LIKELY_SPILLED_P. */
2117 bool record;
2118
2119 if (regno >= FIRST_PSEUDO_REGISTER)
2120 record = true;
2121 else if (x == frame_pointer_rtx
2122 || x == hard_frame_pointer_rtx
2123 || x == arg_pointer_rtx
2124 || x == stack_pointer_rtx
2125 || x == pic_offset_table_rtx)
2126 record = true;
2127 else if (global_regs[regno])
2128 record = false;
2129 else if (fixed_regs[regno])
2130 record = true;
2131 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2132 record = true;
2133 else if (SMALL_REGISTER_CLASSES)
2134 record = false;
2135 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2136 record = false;
2137 else
2138 record = true;
2139
2140 if (!record)
2141 {
2142 *do_not_record_p = 1;
2143 return 0;
2144 }
7afe21cc 2145 }
770ae6cc 2146
0516f6fe
SB
2147 hash += ((unsigned int) REG << 7);
2148 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2197a88a 2149 return hash;
7afe21cc
RK
2150 }
2151
34c73909
R
2152 /* We handle SUBREG of a REG specially because the underlying
2153 reg changes its hash value with every value change; we don't
2154 want to have to forget unrelated subregs when one subreg changes. */
2155 case SUBREG:
2156 {
f8cfc6aa 2157 if (REG_P (SUBREG_REG (x)))
34c73909 2158 {
0516f6fe 2159 hash += (((unsigned int) SUBREG << 7)
ddef6bc7
JJ
2160 + REGNO (SUBREG_REG (x))
2161 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
34c73909
R
2162 return hash;
2163 }
2164 break;
2165 }
2166
7afe21cc 2167 case CONST_INT:
0516f6fe
SB
2168 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2169 + (unsigned int) INTVAL (x));
2170 return hash;
7afe21cc
RK
2171
2172 case CONST_DOUBLE:
2173 /* This is like the general case, except that it only counts
2174 the integers representing the constant. */
0516f6fe 2175 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
969c8517 2176 if (GET_MODE (x) != VOIDmode)
46b33600 2177 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
969c8517 2178 else
0516f6fe
SB
2179 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2180 + (unsigned int) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2181 return hash;
2182
69ef87e2
AH
2183 case CONST_VECTOR:
2184 {
2185 int units;
2186 rtx elt;
2187
2188 units = CONST_VECTOR_NUNITS (x);
2189
2190 for (i = 0; i < units; ++i)
2191 {
2192 elt = CONST_VECTOR_ELT (x, i);
0516f6fe
SB
2193 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2194 hash_arg_in_memory_p, have_reg_qty);
69ef87e2
AH
2195 }
2196
2197 return hash;
2198 }
2199
7afe21cc
RK
2200 /* Assume there is only one rtx object for any given label. */
2201 case LABEL_REF:
0516f6fe
SB
2202 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2203 differences and differences between each stage's debugging dumps. */
2204 hash += (((unsigned int) LABEL_REF << 7)
2205 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2197a88a 2206 return hash;
7afe21cc
RK
2207
2208 case SYMBOL_REF:
0516f6fe
SB
2209 {
2210 /* Don't hash on the symbol's address to avoid bootstrap differences.
2211 Different hash values may cause expressions to be recorded in
2212 different orders and thus different registers to be used in the
2213 final assembler. This also avoids differences in the dump files
2214 between various stages. */
2215 unsigned int h = 0;
2216 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2217
2218 while (*p)
2219 h += (h << 7) + *p++; /* ??? revisit */
2220
2221 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2222 return hash;
2223 }
7afe21cc
RK
2224
2225 case MEM:
14a774a9
RK
2226 /* We don't record if marked volatile or if BLKmode since we don't
2227 know the size of the move. */
2228 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
7afe21cc 2229 {
0516f6fe 2230 *do_not_record_p = 1;
7afe21cc
RK
2231 return 0;
2232 }
0516f6fe
SB
2233 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2234 *hash_arg_in_memory_p = 1;
4977bab6 2235
7afe21cc
RK
2236 /* Now that we have already found this special case,
2237 might as well speed it up as much as possible. */
2197a88a 2238 hash += (unsigned) MEM;
7afe21cc
RK
2239 x = XEXP (x, 0);
2240 goto repeat;
2241
bb07060a
JW
2242 case USE:
2243 /* A USE that mentions non-volatile memory needs special
2244 handling since the MEM may be BLKmode which normally
2245 prevents an entry from being made. Pure calls are
0516f6fe
SB
2246 marked by a USE which mentions BLKmode memory.
2247 See calls.c:emit_call_1. */
3c0cb5de 2248 if (MEM_P (XEXP (x, 0))
bb07060a
JW
2249 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2250 {
68252e27 2251 hash += (unsigned) USE;
bb07060a
JW
2252 x = XEXP (x, 0);
2253
0516f6fe
SB
2254 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2255 *hash_arg_in_memory_p = 1;
bb07060a
JW
2256
2257 /* Now that we have already found this special case,
2258 might as well speed it up as much as possible. */
2259 hash += (unsigned) MEM;
2260 x = XEXP (x, 0);
2261 goto repeat;
2262 }
2263 break;
2264
7afe21cc
RK
2265 case PRE_DEC:
2266 case PRE_INC:
2267 case POST_DEC:
2268 case POST_INC:
4b983fdc
RH
2269 case PRE_MODIFY:
2270 case POST_MODIFY:
7afe21cc
RK
2271 case PC:
2272 case CC0:
2273 case CALL:
2274 case UNSPEC_VOLATILE:
0516f6fe 2275 *do_not_record_p = 1;
7afe21cc
RK
2276 return 0;
2277
2278 case ASM_OPERANDS:
2279 if (MEM_VOLATILE_P (x))
2280 {
0516f6fe 2281 *do_not_record_p = 1;
7afe21cc
RK
2282 return 0;
2283 }
6462bb43
AO
2284 else
2285 {
2286 /* We don't want to take the filename and line into account. */
2287 hash += (unsigned) code + (unsigned) GET_MODE (x)
0516f6fe
SB
2288 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2289 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
6462bb43
AO
2290 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2291
2292 if (ASM_OPERANDS_INPUT_LENGTH (x))
2293 {
2294 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2295 {
0516f6fe
SB
2296 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2297 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2298 do_not_record_p, hash_arg_in_memory_p,
2299 have_reg_qty)
2300 + hash_rtx_string
2301 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
6462bb43
AO
2302 }
2303
0516f6fe 2304 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
6462bb43
AO
2305 x = ASM_OPERANDS_INPUT (x, 0);
2306 mode = GET_MODE (x);
2307 goto repeat;
2308 }
2309
2310 return hash;
2311 }
e9a25f70 2312 break;
278a83b2 2313
e9a25f70
JL
2314 default:
2315 break;
7afe21cc
RK
2316 }
2317
2318 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2319 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2320 fmt = GET_RTX_FORMAT (code);
2321 for (; i >= 0; i--)
2322 {
341c100f 2323 switch (fmt[i])
7afe21cc 2324 {
341c100f 2325 case 'e':
7afe21cc
RK
2326 /* If we are about to do the last recursive call
2327 needed at this level, change it into iteration.
2328 This function is called enough to be worth it. */
2329 if (i == 0)
2330 {
0516f6fe 2331 x = XEXP (x, i);
7afe21cc
RK
2332 goto repeat;
2333 }
0516f6fe
SB
2334
2335 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2336 hash_arg_in_memory_p, have_reg_qty);
341c100f 2337 break;
0516f6fe 2338
341c100f
NS
2339 case 'E':
2340 for (j = 0; j < XVECLEN (x, i); j++)
0516f6fe
SB
2341 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2342 hash_arg_in_memory_p, have_reg_qty);
341c100f 2343 break;
0516f6fe 2344
341c100f
NS
2345 case 's':
2346 hash += hash_rtx_string (XSTR (x, i));
2347 break;
2348
2349 case 'i':
2350 hash += (unsigned int) XINT (x, i);
2351 break;
2352
2353 case '0': case 't':
2354 /* Unused. */
2355 break;
2356
2357 default:
2358 gcc_unreachable ();
2359 }
7afe21cc 2360 }
0516f6fe 2361
7afe21cc
RK
2362 return hash;
2363}
2364
0516f6fe
SB
2365/* Hash an rtx X for cse via hash_rtx.
2366 Stores 1 in do_not_record if any subexpression is volatile.
2367 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2368 does not have the RTX_UNCHANGING_P bit set. */
2369
2370static inline unsigned
2371canon_hash (rtx x, enum machine_mode mode)
2372{
2373 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2374}
2375
2376/* Like canon_hash but with no side effects, i.e. do_not_record
2377 and hash_arg_in_memory are not changed. */
7afe21cc 2378
0516f6fe 2379static inline unsigned
7080f735 2380safe_hash (rtx x, enum machine_mode mode)
7afe21cc 2381{
0516f6fe
SB
2382 int dummy_do_not_record;
2383 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
7afe21cc
RK
2384}
2385\f
2386/* Return 1 iff X and Y would canonicalize into the same thing,
2387 without actually constructing the canonicalization of either one.
2388 If VALIDATE is nonzero,
2389 we assume X is an expression being processed from the rtl
2390 and Y was found in the hash table. We check register refs
2391 in Y for being marked as valid.
2392
0516f6fe 2393 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
7afe21cc 2394
0516f6fe
SB
2395int
2396exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
7afe21cc 2397{
b3694847
SS
2398 int i, j;
2399 enum rtx_code code;
2400 const char *fmt;
7afe21cc
RK
2401
2402 /* Note: it is incorrect to assume an expression is equivalent to itself
2403 if VALIDATE is nonzero. */
2404 if (x == y && !validate)
2405 return 1;
0516f6fe 2406
7afe21cc
RK
2407 if (x == 0 || y == 0)
2408 return x == y;
2409
2410 code = GET_CODE (x);
2411 if (code != GET_CODE (y))
0516f6fe 2412 return 0;
7afe21cc
RK
2413
2414 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2415 if (GET_MODE (x) != GET_MODE (y))
2416 return 0;
2417
2418 switch (code)
2419 {
2420 case PC:
2421 case CC0:
7afe21cc 2422 case CONST_INT:
37cf6116 2423 case CONST_DOUBLE:
c13e8210 2424 return x == y;
7afe21cc
RK
2425
2426 case LABEL_REF:
7afe21cc
RK
2427 return XEXP (x, 0) == XEXP (y, 0);
2428
f54d4924
RK
2429 case SYMBOL_REF:
2430 return XSTR (x, 0) == XSTR (y, 0);
2431
7afe21cc 2432 case REG:
0516f6fe
SB
2433 if (for_gcse)
2434 return REGNO (x) == REGNO (y);
2435 else
2436 {
2437 unsigned int regno = REGNO (y);
2438 unsigned int i;
09e18274 2439 unsigned int endregno = END_REGNO (y);
7afe21cc 2440
0516f6fe
SB
2441 /* If the quantities are not the same, the expressions are not
2442 equivalent. If there are and we are not to validate, they
2443 are equivalent. Otherwise, ensure all regs are up-to-date. */
7afe21cc 2444
0516f6fe
SB
2445 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2446 return 0;
2447
2448 if (! validate)
2449 return 1;
2450
2451 for (i = regno; i < endregno; i++)
2452 if (REG_IN_TABLE (i) != REG_TICK (i))
2453 return 0;
7afe21cc 2454
7afe21cc 2455 return 1;
0516f6fe 2456 }
7afe21cc 2457
0516f6fe
SB
2458 case MEM:
2459 if (for_gcse)
2460 {
0516f6fe
SB
2461 /* A volatile mem should not be considered equivalent to any
2462 other. */
2463 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2464 return 0;
78b76d08
SB
2465
2466 /* Can't merge two expressions in different alias sets, since we
2467 can decide that the expression is transparent in a block when
2468 it isn't, due to it being set with the different alias set.
2469
2470 Also, can't merge two expressions with different MEM_ATTRS.
2471 They could e.g. be two different entities allocated into the
2472 same space on the stack (see e.g. PR25130). In that case, the
2473 MEM addresses can be the same, even though the two MEMs are
2474 absolutely not equivalent.
2475
2476 But because really all MEM attributes should be the same for
2477 equivalent MEMs, we just use the invariant that MEMs that have
2478 the same attributes share the same mem_attrs data structure. */
2479 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2480 return 0;
0516f6fe
SB
2481 }
2482 break;
7afe21cc
RK
2483
2484 /* For commutative operations, check both orders. */
2485 case PLUS:
2486 case MULT:
2487 case AND:
2488 case IOR:
2489 case XOR:
2490 case NE:
2491 case EQ:
0516f6fe
SB
2492 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2493 validate, for_gcse)
7afe21cc 2494 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
0516f6fe 2495 validate, for_gcse))
7afe21cc 2496 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
0516f6fe 2497 validate, for_gcse)
7afe21cc 2498 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
0516f6fe 2499 validate, for_gcse)));
278a83b2 2500
6462bb43
AO
2501 case ASM_OPERANDS:
2502 /* We don't use the generic code below because we want to
2503 disregard filename and line numbers. */
2504
2505 /* A volatile asm isn't equivalent to any other. */
2506 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2507 return 0;
2508
2509 if (GET_MODE (x) != GET_MODE (y)
2510 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2511 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2512 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2513 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2514 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2515 return 0;
2516
2517 if (ASM_OPERANDS_INPUT_LENGTH (x))
2518 {
2519 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2520 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2521 ASM_OPERANDS_INPUT (y, i),
0516f6fe 2522 validate, for_gcse)
6462bb43
AO
2523 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2524 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2525 return 0;
2526 }
2527
2528 return 1;
2529
e9a25f70
JL
2530 default:
2531 break;
7afe21cc
RK
2532 }
2533
2534 /* Compare the elements. If any pair of corresponding elements
0516f6fe 2535 fail to match, return 0 for the whole thing. */
7afe21cc
RK
2536
2537 fmt = GET_RTX_FORMAT (code);
2538 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2539 {
906c4e36 2540 switch (fmt[i])
7afe21cc 2541 {
906c4e36 2542 case 'e':
0516f6fe
SB
2543 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2544 validate, for_gcse))
7afe21cc 2545 return 0;
906c4e36
RK
2546 break;
2547
2548 case 'E':
7afe21cc
RK
2549 if (XVECLEN (x, i) != XVECLEN (y, i))
2550 return 0;
2551 for (j = 0; j < XVECLEN (x, i); j++)
2552 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
0516f6fe 2553 validate, for_gcse))
7afe21cc 2554 return 0;
906c4e36
RK
2555 break;
2556
2557 case 's':
7afe21cc
RK
2558 if (strcmp (XSTR (x, i), XSTR (y, i)))
2559 return 0;
906c4e36
RK
2560 break;
2561
2562 case 'i':
7afe21cc
RK
2563 if (XINT (x, i) != XINT (y, i))
2564 return 0;
906c4e36
RK
2565 break;
2566
2567 case 'w':
2568 if (XWINT (x, i) != XWINT (y, i))
2569 return 0;
278a83b2 2570 break;
906c4e36
RK
2571
2572 case '0':
8f985ec4 2573 case 't':
906c4e36
RK
2574 break;
2575
2576 default:
341c100f 2577 gcc_unreachable ();
7afe21cc 2578 }
278a83b2 2579 }
906c4e36 2580
7afe21cc
RK
2581 return 1;
2582}
2583\f
9ae8ffe7
JL
2584/* Return 1 if X has a value that can vary even between two
2585 executions of the program. 0 means X can be compared reliably
2586 against certain constants or near-constants. */
7afe21cc
RK
2587
2588static int
7080f735 2589cse_rtx_varies_p (rtx x, int from_alias)
7afe21cc
RK
2590{
2591 /* We need not check for X and the equivalence class being of the same
2592 mode because if X is equivalent to a constant in some mode, it
2593 doesn't vary in any mode. */
2594
f8cfc6aa 2595 if (REG_P (x)
1bb98cec
DM
2596 && REGNO_QTY_VALID_P (REGNO (x)))
2597 {
2598 int x_q = REG_QTY (REGNO (x));
2599 struct qty_table_elem *x_ent = &qty_table[x_q];
2600
2601 if (GET_MODE (x) == x_ent->mode
2602 && x_ent->const_rtx != NULL_RTX)
2603 return 0;
2604 }
7afe21cc 2605
9ae8ffe7
JL
2606 if (GET_CODE (x) == PLUS
2607 && GET_CODE (XEXP (x, 1)) == CONST_INT
f8cfc6aa 2608 && REG_P (XEXP (x, 0))
1bb98cec
DM
2609 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2610 {
2611 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2612 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2613
2614 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2615 && x0_ent->const_rtx != NULL_RTX)
2616 return 0;
2617 }
7afe21cc 2618
9c6b0bae
RK
2619 /* This can happen as the result of virtual register instantiation, if
2620 the initial constant is too large to be a valid address. This gives
2621 us a three instruction sequence, load large offset into a register,
2622 load fp minus a constant into a register, then a MEM which is the
2623 sum of the two `constant' registers. */
9ae8ffe7 2624 if (GET_CODE (x) == PLUS
f8cfc6aa
JQ
2625 && REG_P (XEXP (x, 0))
2626 && REG_P (XEXP (x, 1))
9ae8ffe7 2627 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
1bb98cec
DM
2628 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2629 {
2630 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2631 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2632 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2633 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2634
2635 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2636 && x0_ent->const_rtx != NULL_RTX
2637 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2638 && x1_ent->const_rtx != NULL_RTX)
2639 return 0;
2640 }
9c6b0bae 2641
2be28ee2 2642 return rtx_varies_p (x, from_alias);
7afe21cc
RK
2643}
2644\f
eef3c949
RS
2645/* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2646 the result if necessary. INSN is as for canon_reg. */
2647
2648static void
2649validate_canon_reg (rtx *xloc, rtx insn)
2650{
2651 rtx new = canon_reg (*xloc, insn);
eef3c949
RS
2652
2653 /* If replacing pseudo with hard reg or vice versa, ensure the
2654 insn remains valid. Likewise if the insn has MATCH_DUPs. */
67e0a632 2655 if (insn != 0 && new != 0)
eef3c949
RS
2656 validate_change (insn, xloc, new, 1);
2657 else
2658 *xloc = new;
2659}
2660
7afe21cc
RK
2661/* Canonicalize an expression:
2662 replace each register reference inside it
2663 with the "oldest" equivalent register.
2664
67e0a632 2665 If INSN is nonzero validate_change is used to ensure that INSN remains valid
da7d8304 2666 after we make our substitution. The calls are made with IN_GROUP nonzero
7722328e
RK
2667 so apply_change_group must be called upon the outermost return from this
2668 function (unless INSN is zero). The result of apply_change_group can
2669 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2670
2671static rtx
7080f735 2672canon_reg (rtx x, rtx insn)
7afe21cc 2673{
b3694847
SS
2674 int i;
2675 enum rtx_code code;
2676 const char *fmt;
7afe21cc
RK
2677
2678 if (x == 0)
2679 return x;
2680
2681 code = GET_CODE (x);
2682 switch (code)
2683 {
2684 case PC:
2685 case CC0:
2686 case CONST:
2687 case CONST_INT:
2688 case CONST_DOUBLE:
69ef87e2 2689 case CONST_VECTOR:
7afe21cc
RK
2690 case SYMBOL_REF:
2691 case LABEL_REF:
2692 case ADDR_VEC:
2693 case ADDR_DIFF_VEC:
2694 return x;
2695
2696 case REG:
2697 {
b3694847
SS
2698 int first;
2699 int q;
2700 struct qty_table_elem *ent;
7afe21cc
RK
2701
2702 /* Never replace a hard reg, because hard regs can appear
2703 in more than one machine mode, and we must preserve the mode
2704 of each occurrence. Also, some hard regs appear in
2705 MEMs that are shared and mustn't be altered. Don't try to
2706 replace any reg that maps to a reg of class NO_REGS. */
2707 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2708 || ! REGNO_QTY_VALID_P (REGNO (x)))
2709 return x;
2710
278a83b2 2711 q = REG_QTY (REGNO (x));
1bb98cec
DM
2712 ent = &qty_table[q];
2713 first = ent->first_reg;
7afe21cc
RK
2714 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2715 : REGNO_REG_CLASS (first) == NO_REGS ? x
1bb98cec 2716 : gen_rtx_REG (ent->mode, first));
7afe21cc 2717 }
278a83b2 2718
e9a25f70
JL
2719 default:
2720 break;
7afe21cc
RK
2721 }
2722
2723 fmt = GET_RTX_FORMAT (code);
2724 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2725 {
b3694847 2726 int j;
7afe21cc
RK
2727
2728 if (fmt[i] == 'e')
eef3c949 2729 validate_canon_reg (&XEXP (x, i), insn);
7afe21cc
RK
2730 else if (fmt[i] == 'E')
2731 for (j = 0; j < XVECLEN (x, i); j++)
eef3c949 2732 validate_canon_reg (&XVECEXP (x, i, j), insn);
7afe21cc
RK
2733 }
2734
2735 return x;
2736}
2737\f
bca05d20
RK
2738/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2739 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2740 what values are being compared.
1a87eea2 2741
bca05d20
RK
2742 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2743 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2744 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2745 compared to produce cc0.
a432f20d 2746
bca05d20
RK
2747 The return value is the comparison operator and is either the code of
2748 A or the code corresponding to the inverse of the comparison. */
7afe21cc 2749
0cedb36c 2750static enum rtx_code
7080f735
AJ
2751find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2752 enum machine_mode *pmode1, enum machine_mode *pmode2)
7afe21cc 2753{
0cedb36c 2754 rtx arg1, arg2;
1a87eea2 2755
0cedb36c 2756 arg1 = *parg1, arg2 = *parg2;
7afe21cc 2757
0cedb36c 2758 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
7afe21cc 2759
0cedb36c 2760 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
a432f20d 2761 {
da7d8304 2762 /* Set nonzero when we find something of interest. */
0cedb36c
JL
2763 rtx x = 0;
2764 int reverse_code = 0;
2765 struct table_elt *p = 0;
6076248a 2766
0cedb36c
JL
2767 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2768 On machines with CC0, this is the only case that can occur, since
2769 fold_rtx will return the COMPARE or item being compared with zero
2770 when given CC0. */
6076248a 2771
0cedb36c
JL
2772 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2773 x = arg1;
6076248a 2774
0cedb36c
JL
2775 /* If ARG1 is a comparison operator and CODE is testing for
2776 STORE_FLAG_VALUE, get the inner arguments. */
a432f20d 2777
ec8e098d 2778 else if (COMPARISON_P (arg1))
7afe21cc 2779 {
efdc7e19
RH
2780#ifdef FLOAT_STORE_FLAG_VALUE
2781 REAL_VALUE_TYPE fsfv;
2782#endif
2783
0cedb36c
JL
2784 if (code == NE
2785 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2786 && code == LT && STORE_FLAG_VALUE == -1)
2787#ifdef FLOAT_STORE_FLAG_VALUE
9b92bf04 2788 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
efdc7e19
RH
2789 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2790 REAL_VALUE_NEGATIVE (fsfv)))
7afe21cc 2791#endif
a432f20d 2792 )
0cedb36c
JL
2793 x = arg1;
2794 else if (code == EQ
2795 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2796 && code == GE && STORE_FLAG_VALUE == -1)
2797#ifdef FLOAT_STORE_FLAG_VALUE
9b92bf04 2798 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
efdc7e19
RH
2799 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2800 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
2801#endif
2802 )
2803 x = arg1, reverse_code = 1;
7afe21cc
RK
2804 }
2805
0cedb36c 2806 /* ??? We could also check for
7afe21cc 2807
0cedb36c 2808 (ne (and (eq (...) (const_int 1))) (const_int 0))
7afe21cc 2809
0cedb36c 2810 and related forms, but let's wait until we see them occurring. */
7afe21cc 2811
0cedb36c
JL
2812 if (x == 0)
2813 /* Look up ARG1 in the hash table and see if it has an equivalence
2814 that lets us see what is being compared. */
0516f6fe 2815 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
278a83b2 2816 if (p)
8b03b984
R
2817 {
2818 p = p->first_same_value;
2819
2820 /* If what we compare is already known to be constant, that is as
2821 good as it gets.
2822 We need to break the loop in this case, because otherwise we
2823 can have an infinite loop when looking at a reg that is known
2824 to be a constant which is the same as a comparison of a reg
2825 against zero which appears later in the insn stream, which in
2826 turn is constant and the same as the comparison of the first reg
2827 against zero... */
2828 if (p->is_const)
2829 break;
2830 }
7afe21cc 2831
0cedb36c 2832 for (; p; p = p->next_same_value)
7afe21cc 2833 {
0cedb36c 2834 enum machine_mode inner_mode = GET_MODE (p->exp);
efdc7e19
RH
2835#ifdef FLOAT_STORE_FLAG_VALUE
2836 REAL_VALUE_TYPE fsfv;
2837#endif
7afe21cc 2838
0cedb36c 2839 /* If the entry isn't valid, skip it. */
0516f6fe 2840 if (! exp_equiv_p (p->exp, p->exp, 1, false))
0cedb36c 2841 continue;
f76b9db2 2842
bca05d20
RK
2843 if (GET_CODE (p->exp) == COMPARE
2844 /* Another possibility is that this machine has a compare insn
2845 that includes the comparison code. In that case, ARG1 would
2846 be equivalent to a comparison operation that would set ARG1 to
2847 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2848 ORIG_CODE is the actual comparison being done; if it is an EQ,
2849 we must reverse ORIG_CODE. On machine with a negative value
2850 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2851 || ((code == NE
2852 || (code == LT
2853 && GET_MODE_CLASS (inner_mode) == MODE_INT
2854 && (GET_MODE_BITSIZE (inner_mode)
2855 <= HOST_BITS_PER_WIDE_INT)
2856 && (STORE_FLAG_VALUE
2857 & ((HOST_WIDE_INT) 1
2858 << (GET_MODE_BITSIZE (inner_mode) - 1))))
0cedb36c 2859#ifdef FLOAT_STORE_FLAG_VALUE
bca05d20 2860 || (code == LT
3d8bf70f 2861 && SCALAR_FLOAT_MODE_P (inner_mode)
efdc7e19
RH
2862 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2863 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c 2864#endif
bca05d20 2865 )
ec8e098d 2866 && COMPARISON_P (p->exp)))
7afe21cc 2867 {
0cedb36c
JL
2868 x = p->exp;
2869 break;
2870 }
2871 else if ((code == EQ
2872 || (code == GE
2873 && GET_MODE_CLASS (inner_mode) == MODE_INT
2874 && (GET_MODE_BITSIZE (inner_mode)
2875 <= HOST_BITS_PER_WIDE_INT)
2876 && (STORE_FLAG_VALUE
2877 & ((HOST_WIDE_INT) 1
2878 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2879#ifdef FLOAT_STORE_FLAG_VALUE
2880 || (code == GE
3d8bf70f 2881 && SCALAR_FLOAT_MODE_P (inner_mode)
efdc7e19
RH
2882 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2883 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
2884#endif
2885 )
ec8e098d 2886 && COMPARISON_P (p->exp))
0cedb36c
JL
2887 {
2888 reverse_code = 1;
2889 x = p->exp;
2890 break;
7afe21cc
RK
2891 }
2892
4977bab6
ZW
2893 /* If this non-trapping address, e.g. fp + constant, the
2894 equivalent is a better operand since it may let us predict
2895 the value of the comparison. */
2896 else if (!rtx_addr_can_trap_p (p->exp))
0cedb36c
JL
2897 {
2898 arg1 = p->exp;
2899 continue;
2900 }
7afe21cc 2901 }
7afe21cc 2902
0cedb36c
JL
2903 /* If we didn't find a useful equivalence for ARG1, we are done.
2904 Otherwise, set up for the next iteration. */
2905 if (x == 0)
2906 break;
7afe21cc 2907
78192b09
RH
2908 /* If we need to reverse the comparison, make sure that that is
2909 possible -- we can't necessarily infer the value of GE from LT
2910 with floating-point operands. */
0cedb36c 2911 if (reverse_code)
261efdef
JH
2912 {
2913 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
2914 if (reversed == UNKNOWN)
2915 break;
68252e27
KH
2916 else
2917 code = reversed;
261efdef 2918 }
ec8e098d 2919 else if (COMPARISON_P (x))
261efdef
JH
2920 code = GET_CODE (x);
2921 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
7afe21cc
RK
2922 }
2923
0cedb36c
JL
2924 /* Return our results. Return the modes from before fold_rtx
2925 because fold_rtx might produce const_int, and then it's too late. */
2926 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2927 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2928
2929 return code;
7afe21cc
RK
2930}
2931\f
a52b023a
PB
2932/* If X is a nontrivial arithmetic operation on an argument for which
2933 a constant value can be determined, return the result of operating
2934 on that value, as a constant. Otherwise, return X, possibly with
2935 one or more operands changed to a forward-propagated constant.
25910ca4 2936
a52b023a
PB
2937 If X is a register whose contents are known, we do NOT return
2938 those contents here; equiv_constant is called to perform that task.
2939 For SUBREGs and MEMs, we do that both here and in equiv_constant.
7afe21cc
RK
2940
2941 INSN is the insn that we may be modifying. If it is 0, make a copy
2942 of X before modifying it. */
2943
2944static rtx
7080f735 2945fold_rtx (rtx x, rtx insn)
7afe21cc 2946{
b3694847
SS
2947 enum rtx_code code;
2948 enum machine_mode mode;
2949 const char *fmt;
2950 int i;
7afe21cc 2951 rtx new = 0;
a52b023a 2952 int changed = 0;
7afe21cc 2953
a52b023a 2954 /* Operands of X. */
7afe21cc
RK
2955 rtx folded_arg0;
2956 rtx folded_arg1;
2957
2958 /* Constant equivalents of first three operands of X;
2959 0 when no such equivalent is known. */
2960 rtx const_arg0;
2961 rtx const_arg1;
2962 rtx const_arg2;
2963
2964 /* The mode of the first operand of X. We need this for sign and zero
2965 extends. */
2966 enum machine_mode mode_arg0;
2967
2968 if (x == 0)
2969 return x;
2970
a52b023a 2971 /* Try to perform some initial simplifications on X. */
7afe21cc
RK
2972 code = GET_CODE (x);
2973 switch (code)
2974 {
a52b023a
PB
2975 case MEM:
2976 case SUBREG:
2977 if ((new = equiv_constant (x)) != NULL_RTX)
2978 return new;
2979 return x;
2980
7afe21cc
RK
2981 case CONST:
2982 case CONST_INT:
2983 case CONST_DOUBLE:
69ef87e2 2984 case CONST_VECTOR:
7afe21cc
RK
2985 case SYMBOL_REF:
2986 case LABEL_REF:
2987 case REG:
01aa1d43 2988 case PC:
7afe21cc
RK
2989 /* No use simplifying an EXPR_LIST
2990 since they are used only for lists of args
2991 in a function call's REG_EQUAL note. */
2992 case EXPR_LIST:
2993 return x;
2994
2995#ifdef HAVE_cc0
2996 case CC0:
2997 return prev_insn_cc0;
2998#endif
2999
9255709c 3000 case ASM_OPERANDS:
6c667859
AB
3001 if (insn)
3002 {
3003 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3004 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3005 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3006 }
a52b023a
PB
3007 return x;
3008
3009#ifdef NO_FUNCTION_CSE
3010 case CALL:
3011 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3012 return x;
9255709c 3013 break;
a52b023a 3014#endif
278a83b2 3015
a52b023a 3016 /* Anything else goes through the loop below. */
e9a25f70
JL
3017 default:
3018 break;
7afe21cc
RK
3019 }
3020
a52b023a 3021 mode = GET_MODE (x);
7afe21cc
RK
3022 const_arg0 = 0;
3023 const_arg1 = 0;
3024 const_arg2 = 0;
3025 mode_arg0 = VOIDmode;
3026
3027 /* Try folding our operands.
3028 Then see which ones have constant values known. */
3029
3030 fmt = GET_RTX_FORMAT (code);
3031 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3032 if (fmt[i] == 'e')
3033 {
a52b023a
PB
3034 rtx folded_arg = XEXP (x, i), const_arg;
3035 enum machine_mode mode_arg = GET_MODE (folded_arg);
7e7e28c7
AO
3036
3037 switch (GET_CODE (folded_arg))
3038 {
3039 case MEM:
3040 case REG:
3041 case SUBREG:
3042 const_arg = equiv_constant (folded_arg);
3043 break;
3044
3045 case CONST:
3046 case CONST_INT:
3047 case SYMBOL_REF:
3048 case LABEL_REF:
3049 case CONST_DOUBLE:
3050 case CONST_VECTOR:
3051 const_arg = folded_arg;
3052 break;
3053
7afe21cc 3054#ifdef HAVE_cc0
7e7e28c7
AO
3055 case CC0:
3056 folded_arg = prev_insn_cc0;
3057 mode_arg = prev_insn_cc0_mode;
3058 const_arg = equiv_constant (folded_arg);
3059 break;
7afe21cc 3060#endif
7e7e28c7
AO
3061
3062 default:
3063 folded_arg = fold_rtx (folded_arg, insn);
3064 const_arg = equiv_constant (folded_arg);
3065 break;
3066 }
7afe21cc
RK
3067
3068 /* For the first three operands, see if the operand
3069 is constant or equivalent to a constant. */
3070 switch (i)
3071 {
3072 case 0:
3073 folded_arg0 = folded_arg;
3074 const_arg0 = const_arg;
3075 mode_arg0 = mode_arg;
3076 break;
3077 case 1:
3078 folded_arg1 = folded_arg;
3079 const_arg1 = const_arg;
3080 break;
3081 case 2:
3082 const_arg2 = const_arg;
3083 break;
3084 }
3085
a52b023a
PB
3086 /* Pick the least expensive of the argument and an equivalent constant
3087 argument. */
3088 if (const_arg != 0
3089 && const_arg != folded_arg
3090 && COST_IN (const_arg, code) <= COST_IN (folded_arg, code)
f2fa288f 3091
8cce3d04
RS
3092 /* It's not safe to substitute the operand of a conversion
3093 operator with a constant, as the conversion's identity
f652d14b 3094 depends upon the mode of its operand. This optimization
8cce3d04 3095 is handled by the call to simplify_unary_operation. */
a52b023a
PB
3096 && (GET_RTX_CLASS (code) != RTX_UNARY
3097 || GET_MODE (const_arg) == mode_arg0
3098 || (code != ZERO_EXTEND
3099 && code != SIGN_EXTEND
3100 && code != TRUNCATE
3101 && code != FLOAT_TRUNCATE
3102 && code != FLOAT_EXTEND
3103 && code != FLOAT
3104 && code != FIX
3105 && code != UNSIGNED_FLOAT
3106 && code != UNSIGNED_FIX)))
3107 folded_arg = const_arg;
3108
3109 if (folded_arg == XEXP (x, i))
3110 continue;
7afe21cc 3111
a52b023a
PB
3112 if (insn == NULL_RTX && !changed)
3113 x = copy_rtx (x);
3114 changed = 1;
3115 validate_change (insn, &XEXP (x, i), folded_arg, 1);
2d8b0f3a 3116 }
7afe21cc 3117
a52b023a 3118 if (changed)
7afe21cc 3119 {
a52b023a
PB
3120 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3121 consistent with the order in X. */
3122 if (canonicalize_change_group (insn, x))
7afe21cc 3123 {
a52b023a
PB
3124 rtx tem;
3125 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3126 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
7afe21cc 3127 }
a52b023a
PB
3128
3129 apply_change_group ();
7afe21cc
RK
3130 }
3131
3132 /* If X is an arithmetic operation, see if we can simplify it. */
3133
3134 switch (GET_RTX_CLASS (code))
3135 {
ec8e098d 3136 case RTX_UNARY:
67a37737
RK
3137 {
3138 int is_const = 0;
3139
3140 /* We can't simplify extension ops unless we know the
3141 original mode. */
3142 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3143 && mode_arg0 == VOIDmode)
3144 break;
3145
3146 /* If we had a CONST, strip it off and put it back later if we
3147 fold. */
3148 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3149 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3150
3151 new = simplify_unary_operation (code, mode,
3152 const_arg0 ? const_arg0 : folded_arg0,
3153 mode_arg0);
ec666d23
JH
3154 /* NEG of PLUS could be converted into MINUS, but that causes
3155 expressions of the form
3156 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3157 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3158 FIXME: those ports should be fixed. */
3159 if (new != 0 && is_const
3160 && GET_CODE (new) == PLUS
3161 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3162 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3163 && GET_CODE (XEXP (new, 1)) == CONST_INT)
38a448ca 3164 new = gen_rtx_CONST (mode, new);
67a37737 3165 }
7afe21cc 3166 break;
278a83b2 3167
ec8e098d
PB
3168 case RTX_COMPARE:
3169 case RTX_COMM_COMPARE:
7afe21cc
RK
3170 /* See what items are actually being compared and set FOLDED_ARG[01]
3171 to those values and CODE to the actual comparison code. If any are
3172 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3173 do anything if both operands are already known to be constant. */
3174
21e5076a
UB
3175 /* ??? Vector mode comparisons are not supported yet. */
3176 if (VECTOR_MODE_P (mode))
3177 break;
3178
7afe21cc
RK
3179 if (const_arg0 == 0 || const_arg1 == 0)
3180 {
3181 struct table_elt *p0, *p1;
d6edb99e 3182 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
13c9910f 3183 enum machine_mode mode_arg1;
c610adec
RK
3184
3185#ifdef FLOAT_STORE_FLAG_VALUE
9b92bf04 3186 if (SCALAR_FLOAT_MODE_P (mode))
c610adec 3187 {
d6edb99e 3188 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
68252e27 3189 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3190 false_rtx = CONST0_RTX (mode);
c610adec
RK
3191 }
3192#endif
7afe21cc 3193
13c9910f
RS
3194 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3195 &mode_arg0, &mode_arg1);
7afe21cc 3196
13c9910f
RS
3197 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3198 what kinds of things are being compared, so we can't do
3199 anything with this comparison. */
7afe21cc
RK
3200
3201 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3202 break;
3203
75335440
KH
3204 const_arg0 = equiv_constant (folded_arg0);
3205 const_arg1 = equiv_constant (folded_arg1);
3206
0f41302f
MS
3207 /* If we do not now have two constants being compared, see
3208 if we can nevertheless deduce some things about the
3209 comparison. */
7afe21cc
RK
3210 if (const_arg0 == 0 || const_arg1 == 0)
3211 {
08678f51
HPN
3212 if (const_arg1 != NULL)
3213 {
3214 rtx cheapest_simplification;
3215 int cheapest_cost;
3216 rtx simp_result;
3217 struct table_elt *p;
3218
3219 /* See if we can find an equivalent of folded_arg0
3220 that gets us a cheaper expression, possibly a
3221 constant through simplifications. */
3222 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3223 mode_arg0);
3224
3225 if (p != NULL)
3226 {
3227 cheapest_simplification = x;
3228 cheapest_cost = COST (x);
3229
3230 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3231 {
3232 int cost;
3233
3234 /* If the entry isn't valid, skip it. */
3235 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3236 continue;
3237
3238 /* Try to simplify using this equivalence. */
3239 simp_result
3240 = simplify_relational_operation (code, mode,
3241 mode_arg0,
3242 p->exp,
3243 const_arg1);
3244
3245 if (simp_result == NULL)
3246 continue;
3247
3248 cost = COST (simp_result);
3249 if (cost < cheapest_cost)
3250 {
3251 cheapest_cost = cost;
3252 cheapest_simplification = simp_result;
3253 }
3254 }
3255
3256 /* If we have a cheaper expression now, use that
3257 and try folding it further, from the top. */
3258 if (cheapest_simplification != x)
3259 return fold_rtx (cheapest_simplification, insn);
3260 }
3261 }
3262
4977bab6
ZW
3263 /* Some addresses are known to be nonzero. We don't know
3264 their sign, but equality comparisons are known. */
7afe21cc 3265 if (const_arg1 == const0_rtx
4977bab6 3266 && nonzero_address_p (folded_arg0))
7afe21cc
RK
3267 {
3268 if (code == EQ)
d6edb99e 3269 return false_rtx;
7afe21cc 3270 else if (code == NE)
d6edb99e 3271 return true_rtx;
7afe21cc
RK
3272 }
3273
fd13313f
JH
3274 /* See if the two operands are the same. */
3275
3276 if (folded_arg0 == folded_arg1
f8cfc6aa
JQ
3277 || (REG_P (folded_arg0)
3278 && REG_P (folded_arg1)
fd13313f
JH
3279 && (REG_QTY (REGNO (folded_arg0))
3280 == REG_QTY (REGNO (folded_arg1))))
3281 || ((p0 = lookup (folded_arg0,
0516f6fe
SB
3282 SAFE_HASH (folded_arg0, mode_arg0),
3283 mode_arg0))
fd13313f 3284 && (p1 = lookup (folded_arg1,
0516f6fe
SB
3285 SAFE_HASH (folded_arg1, mode_arg0),
3286 mode_arg0))
fd13313f
JH
3287 && p0->first_same_value == p1->first_same_value))
3288 {
71925bc0
RS
3289 /* Sadly two equal NaNs are not equivalent. */
3290 if (!HONOR_NANS (mode_arg0))
3291 return ((code == EQ || code == LE || code == GE
3292 || code == LEU || code == GEU || code == UNEQ
3293 || code == UNLE || code == UNGE
3294 || code == ORDERED)
3295 ? true_rtx : false_rtx);
3296 /* Take care for the FP compares we can resolve. */
3297 if (code == UNEQ || code == UNLE || code == UNGE)
3298 return true_rtx;
3299 if (code == LTGT || code == LT || code == GT)
3300 return false_rtx;
fd13313f 3301 }
7afe21cc
RK
3302
3303 /* If FOLDED_ARG0 is a register, see if the comparison we are
3304 doing now is either the same as we did before or the reverse
3305 (we only check the reverse if not floating-point). */
f8cfc6aa 3306 else if (REG_P (folded_arg0))
7afe21cc 3307 {
30f72379 3308 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc 3309
1bb98cec
DM
3310 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3311 {
3312 struct qty_table_elem *ent = &qty_table[qty];
3313
3314 if ((comparison_dominates_p (ent->comparison_code, code)
1eb8759b
RH
3315 || (! FLOAT_MODE_P (mode_arg0)
3316 && comparison_dominates_p (ent->comparison_code,
3317 reverse_condition (code))))
1bb98cec
DM
3318 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3319 || (const_arg1
3320 && rtx_equal_p (ent->comparison_const,
3321 const_arg1))
f8cfc6aa 3322 || (REG_P (folded_arg1)
1bb98cec
DM
3323 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3324 return (comparison_dominates_p (ent->comparison_code, code)
d6edb99e 3325 ? true_rtx : false_rtx);
1bb98cec 3326 }
7afe21cc
RK
3327 }
3328 }
3329 }
3330
3331 /* If we are comparing against zero, see if the first operand is
3332 equivalent to an IOR with a constant. If so, we may be able to
3333 determine the result of this comparison. */
3334
3335 if (const_arg1 == const0_rtx)
3336 {
3337 rtx y = lookup_as_function (folded_arg0, IOR);
3338 rtx inner_const;
3339
3340 if (y != 0
3341 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3342 && GET_CODE (inner_const) == CONST_INT
3343 && INTVAL (inner_const) != 0)
3344 {
3345 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
3346 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3347 && (INTVAL (inner_const)
3348 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
d6edb99e 3349 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
c610adec
RK
3350
3351#ifdef FLOAT_STORE_FLAG_VALUE
3d8bf70f 3352 if (SCALAR_FLOAT_MODE_P (mode))
c610adec 3353 {
d6edb99e 3354 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
12530dbe 3355 (FLOAT_STORE_FLAG_VALUE (mode), mode));
d6edb99e 3356 false_rtx = CONST0_RTX (mode);
c610adec
RK
3357 }
3358#endif
7afe21cc
RK
3359
3360 switch (code)
3361 {
3362 case EQ:
d6edb99e 3363 return false_rtx;
7afe21cc 3364 case NE:
d6edb99e 3365 return true_rtx;
7afe21cc
RK
3366 case LT: case LE:
3367 if (has_sign)
d6edb99e 3368 return true_rtx;
7afe21cc
RK
3369 break;
3370 case GT: case GE:
3371 if (has_sign)
d6edb99e 3372 return false_rtx;
7afe21cc 3373 break;
e9a25f70
JL
3374 default:
3375 break;
7afe21cc
RK
3376 }
3377 }
3378 }
3379
c6fb08ad
PB
3380 {
3381 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3382 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3383 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3384 }
7afe21cc
RK
3385 break;
3386
ec8e098d
PB
3387 case RTX_BIN_ARITH:
3388 case RTX_COMM_ARITH:
7afe21cc
RK
3389 switch (code)
3390 {
3391 case PLUS:
3392 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3393 with that LABEL_REF as its second operand. If so, the result is
3394 the first operand of that MINUS. This handles switches with an
3395 ADDR_DIFF_VEC table. */
3396 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3397 {
e650cbda
RK
3398 rtx y
3399 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
ddc356e8 3400 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
3401
3402 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3403 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3404 return XEXP (y, 0);
67a37737
RK
3405
3406 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
3407 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3408 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
3409 && GET_CODE (XEXP (y, 0)) == MINUS
3410 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 3411 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
67a37737 3412 return XEXP (XEXP (y, 0), 0);
7afe21cc 3413 }
c2cc0778 3414
e650cbda
RK
3415 /* Likewise if the operands are in the other order. */
3416 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3417 {
3418 rtx y
3419 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
ddc356e8 3420 : lookup_as_function (folded_arg1, MINUS);
e650cbda
RK
3421
3422 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3423 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3424 return XEXP (y, 0);
3425
3426 /* Now try for a CONST of a MINUS like the above. */
3427 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3428 : lookup_as_function (folded_arg1, CONST))) != 0
3429 && GET_CODE (XEXP (y, 0)) == MINUS
3430 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
ddc356e8 3431 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
e650cbda
RK
3432 return XEXP (XEXP (y, 0), 0);
3433 }
3434
c2cc0778
RK
3435 /* If second operand is a register equivalent to a negative
3436 CONST_INT, see if we can find a register equivalent to the
3437 positive constant. Make a MINUS if so. Don't do this for
5d595063 3438 a non-negative constant since we might then alternate between
a1f300c0 3439 choosing positive and negative constants. Having the positive
5d595063
RK
3440 constant previously-used is the more common case. Be sure
3441 the resulting constant is non-negative; if const_arg1 were
3442 the smallest negative number this would overflow: depending
3443 on the mode, this would either just be the same value (and
3444 hence not save anything) or be incorrect. */
3445 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3446 && INTVAL (const_arg1) < 0
4741f6ad
JL
3447 /* This used to test
3448
ddc356e8 3449 -INTVAL (const_arg1) >= 0
4741f6ad
JL
3450
3451 But The Sun V5.0 compilers mis-compiled that test. So
3452 instead we test for the problematic value in a more direct
3453 manner and hope the Sun compilers get it correct. */
5c45a8ac
KG
3454 && INTVAL (const_arg1) !=
3455 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
f8cfc6aa 3456 && REG_P (folded_arg1))
c2cc0778 3457 {
ddc356e8 3458 rtx new_const = GEN_INT (-INTVAL (const_arg1));
c2cc0778 3459 struct table_elt *p
0516f6fe 3460 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
c2cc0778
RK
3461
3462 if (p)
3463 for (p = p->first_same_value; p; p = p->next_same_value)
f8cfc6aa 3464 if (REG_P (p->exp))
0cedb36c
JL
3465 return simplify_gen_binary (MINUS, mode, folded_arg0,
3466 canon_reg (p->exp, NULL_RTX));
c2cc0778 3467 }
13c9910f
RS
3468 goto from_plus;
3469
3470 case MINUS:
3471 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3472 If so, produce (PLUS Z C2-C). */
3473 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3474 {
3475 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3476 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
3477 return fold_rtx (plus_constant (copy_rtx (y),
3478 -INTVAL (const_arg1)),
a3b5c94a 3479 NULL_RTX);
13c9910f 3480 }
7afe21cc 3481
ddc356e8 3482 /* Fall through. */
7afe21cc 3483
13c9910f 3484 from_plus:
7afe21cc
RK
3485 case SMIN: case SMAX: case UMIN: case UMAX:
3486 case IOR: case AND: case XOR:
f930bfd0 3487 case MULT:
7afe21cc
RK
3488 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3489 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3490 is known to be of similar form, we may be able to replace the
3491 operation with a combined operation. This may eliminate the
3492 intermediate operation if every use is simplified in this way.
3493 Note that the similar optimization done by combine.c only works
3494 if the intermediate operation's result has only one reference. */
3495
f8cfc6aa 3496 if (REG_P (folded_arg0)
7afe21cc
RK
3497 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3498 {
3499 int is_shift
3500 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5bb51e1d 3501 rtx y, inner_const, new_const;
7afe21cc 3502 enum rtx_code associate_code;
7afe21cc 3503
824a4527
JDA
3504 if (is_shift
3505 && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
3506 || INTVAL (const_arg1) < 0))
3507 {
3508 if (SHIFT_COUNT_TRUNCATED)
3509 const_arg1 = GEN_INT (INTVAL (const_arg1)
3510 & (GET_MODE_BITSIZE (mode) - 1));
3511 else
3512 break;
3513 }
3514
5bb51e1d 3515 y = lookup_as_function (folded_arg0, code);
824a4527
JDA
3516 if (y == 0)
3517 break;
824a4527
JDA
3518
3519 /* If we have compiled a statement like
3520 "if (x == (x & mask1))", and now are looking at
3521 "x & mask2", we will have a case where the first operand
3522 of Y is the same as our first operand. Unless we detect
3523 this case, an infinite loop will result. */
3524 if (XEXP (y, 0) == folded_arg0)
7afe21cc
RK
3525 break;
3526
5bb51e1d
EB
3527 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3528 if (!inner_const || GET_CODE (inner_const) != CONST_INT)
3529 break;
3530
7afe21cc
RK
3531 /* Don't associate these operations if they are a PLUS with the
3532 same constant and it is a power of two. These might be doable
3533 with a pre- or post-increment. Similarly for two subtracts of
3534 identical powers of two with post decrement. */
3535
213d5fbc 3536 if (code == PLUS && const_arg1 == inner_const
940da324
JL
3537 && ((HAVE_PRE_INCREMENT
3538 && exact_log2 (INTVAL (const_arg1)) >= 0)
3539 || (HAVE_POST_INCREMENT
3540 && exact_log2 (INTVAL (const_arg1)) >= 0)
3541 || (HAVE_PRE_DECREMENT
3542 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3543 || (HAVE_POST_DECREMENT
3544 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
3545 break;
3546
824a4527
JDA
3547 if (is_shift
3548 && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
3549 || INTVAL (inner_const) < 0))
3550 {
3551 if (SHIFT_COUNT_TRUNCATED)
3552 inner_const = GEN_INT (INTVAL (inner_const)
3553 & (GET_MODE_BITSIZE (mode) - 1));
3554 else
3555 break;
3556 }
3557
7afe21cc 3558 /* Compute the code used to compose the constants. For example,
f930bfd0 3559 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
7afe21cc 3560
f930bfd0 3561 associate_code = (is_shift || code == MINUS ? PLUS : code);
7afe21cc
RK
3562
3563 new_const = simplify_binary_operation (associate_code, mode,
3564 const_arg1, inner_const);
3565
3566 if (new_const == 0)
3567 break;
3568
3569 /* If we are associating shift operations, don't let this
4908e508
RS
3570 produce a shift of the size of the object or larger.
3571 This could occur when we follow a sign-extend by a right
3572 shift on a machine that does a sign-extend as a pair
3573 of shifts. */
7afe21cc 3574
824a4527
JDA
3575 if (is_shift
3576 && GET_CODE (new_const) == CONST_INT
4908e508
RS
3577 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3578 {
3579 /* As an exception, we can turn an ASHIFTRT of this
3580 form into a shift of the number of bits - 1. */
3581 if (code == ASHIFTRT)
3582 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
824a4527
JDA
3583 else if (!side_effects_p (XEXP (y, 0)))
3584 return CONST0_RTX (mode);
4908e508
RS
3585 else
3586 break;
3587 }
7afe21cc
RK
3588
3589 y = copy_rtx (XEXP (y, 0));
3590
3591 /* If Y contains our first operand (the most common way this
3592 can happen is if Y is a MEM), we would do into an infinite
3593 loop if we tried to fold it. So don't in that case. */
3594
3595 if (! reg_mentioned_p (folded_arg0, y))
3596 y = fold_rtx (y, insn);
3597
0cedb36c 3598 return simplify_gen_binary (code, mode, y, new_const);
7afe21cc 3599 }
e9a25f70
JL
3600 break;
3601
f930bfd0
JW
3602 case DIV: case UDIV:
3603 /* ??? The associative optimization performed immediately above is
3604 also possible for DIV and UDIV using associate_code of MULT.
3605 However, we would need extra code to verify that the
3606 multiplication does not overflow, that is, there is no overflow
3607 in the calculation of new_const. */
3608 break;
3609
e9a25f70
JL
3610 default:
3611 break;
7afe21cc
RK
3612 }
3613
3614 new = simplify_binary_operation (code, mode,
3615 const_arg0 ? const_arg0 : folded_arg0,
3616 const_arg1 ? const_arg1 : folded_arg1);
3617 break;
3618
ec8e098d 3619 case RTX_OBJ:
7afe21cc
RK
3620 /* (lo_sum (high X) X) is simply X. */
3621 if (code == LO_SUM && const_arg0 != 0
3622 && GET_CODE (const_arg0) == HIGH
3623 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3624 return const_arg1;
3625 break;
3626
ec8e098d
PB
3627 case RTX_TERNARY:
3628 case RTX_BITFIELD_OPS:
7afe21cc
RK
3629 new = simplify_ternary_operation (code, mode, mode_arg0,
3630 const_arg0 ? const_arg0 : folded_arg0,
3631 const_arg1 ? const_arg1 : folded_arg1,
3632 const_arg2 ? const_arg2 : XEXP (x, 2));
3633 break;
ee5332b8 3634
ec8e098d
PB
3635 default:
3636 break;
7afe21cc
RK
3637 }
3638
3639 return new ? new : x;
3640}
3641\f
3642/* Return a constant value currently equivalent to X.
3643 Return 0 if we don't know one. */
3644
3645static rtx
7080f735 3646equiv_constant (rtx x)
7afe21cc 3647{
f8cfc6aa 3648 if (REG_P (x)
1bb98cec
DM
3649 && REGNO_QTY_VALID_P (REGNO (x)))
3650 {
3651 int x_q = REG_QTY (REGNO (x));
3652 struct qty_table_elem *x_ent = &qty_table[x_q];
3653
3654 if (x_ent->const_rtx)
4de249d9 3655 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
1bb98cec 3656 }
7afe21cc 3657
2ce5e1b4 3658 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
3659 return x;
3660
a52b023a
PB
3661 if (GET_CODE (x) == SUBREG)
3662 {
3663 rtx new;
3664
3665 /* See if we previously assigned a constant value to this SUBREG. */
3666 if ((new = lookup_as_function (x, CONST_INT)) != 0
3667 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3668 return new;
3669
3670 if (REG_P (SUBREG_REG (x))
3671 && (new = equiv_constant (SUBREG_REG (x))) != 0)
3672 return simplify_subreg (GET_MODE (x), SUBREG_REG (x),
3673 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3674
3675 return 0;
3676 }
3677
3678 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3679 the hash table in case its value was seen before. */
fc3ffe83 3680
3c0cb5de 3681 if (MEM_P (x))
fc3ffe83
RK
3682 {
3683 struct table_elt *elt;
3684
a52b023a 3685 x = avoid_constant_pool_reference (x);
fc3ffe83
RK
3686 if (CONSTANT_P (x))
3687 return x;
3688
0516f6fe 3689 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
fc3ffe83
RK
3690 if (elt == 0)
3691 return 0;
3692
3693 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3694 if (elt->is_const && CONSTANT_P (elt->exp))
3695 return elt->exp;
3696 }
3697
7afe21cc
RK
3698 return 0;
3699}
3700\f
0129d079
SB
3701/* Given INSN, a jump insn, TAKEN indicates if we are following the
3702 "taken" branch.
7afe21cc
RK
3703
3704 In certain cases, this can cause us to add an equivalence. For example,
278a83b2 3705 if we are following the taken case of
7080f735 3706 if (i == 2)
7afe21cc
RK
3707 we can add the fact that `i' and '2' are now equivalent.
3708
3709 In any case, we can record that this comparison was passed. If the same
3710 comparison is seen later, we will know its value. */
3711
3712static void
0129d079 3713record_jump_equiv (rtx insn, bool taken)
7afe21cc
RK
3714{
3715 int cond_known_true;
3716 rtx op0, op1;
7f1c097d 3717 rtx set;
13c9910f 3718 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
3719 int reversed_nonequality = 0;
3720 enum rtx_code code;
3721
3722 /* Ensure this is the right kind of insn. */
0129d079
SB
3723 gcc_assert (any_condjump_p (insn));
3724
7f1c097d 3725 set = pc_set (insn);
7afe21cc
RK
3726
3727 /* See if this jump condition is known true or false. */
3728 if (taken)
7f1c097d 3729 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
7afe21cc 3730 else
7f1c097d 3731 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
7afe21cc
RK
3732
3733 /* Get the type of comparison being done and the operands being compared.
3734 If we had to reverse a non-equality condition, record that fact so we
3735 know that it isn't valid for floating-point. */
7f1c097d
JH
3736 code = GET_CODE (XEXP (SET_SRC (set), 0));
3737 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3738 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
7afe21cc 3739
13c9910f 3740 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
3741 if (! cond_known_true)
3742 {
261efdef 3743 code = reversed_comparison_code_parts (code, op0, op1, insn);
1eb8759b
RH
3744
3745 /* Don't remember if we can't find the inverse. */
3746 if (code == UNKNOWN)
3747 return;
7afe21cc
RK
3748 }
3749
3750 /* The mode is the mode of the non-constant. */
13c9910f
RS
3751 mode = mode0;
3752 if (mode1 != VOIDmode)
3753 mode = mode1;
7afe21cc
RK
3754
3755 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3756}
3757
794693c0
RH
3758/* Yet another form of subreg creation. In this case, we want something in
3759 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3760
3761static rtx
3762record_jump_cond_subreg (enum machine_mode mode, rtx op)
3763{
3764 enum machine_mode op_mode = GET_MODE (op);
3765 if (op_mode == mode || op_mode == VOIDmode)
3766 return op;
3767 return lowpart_subreg (mode, op, op_mode);
3768}
3769
7afe21cc
RK
3770/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3771 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3772 Make any useful entries we can with that information. Called from
3773 above function and called recursively. */
3774
3775static void
7080f735
AJ
3776record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3777 rtx op1, int reversed_nonequality)
7afe21cc 3778{
2197a88a 3779 unsigned op0_hash, op1_hash;
e428d738 3780 int op0_in_memory, op1_in_memory;
7afe21cc
RK
3781 struct table_elt *op0_elt, *op1_elt;
3782
3783 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3784 we know that they are also equal in the smaller mode (this is also
3785 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 3786 is not worth testing for with no SUBREG). */
7afe21cc 3787
2e794ee8 3788 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 3789 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
3790 && (GET_MODE_SIZE (GET_MODE (op0))
3791 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
3792 {
3793 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
3794 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3795 if (tem)
3796 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3797 reversed_nonequality);
7afe21cc
RK
3798 }
3799
3800 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
3801 && (GET_MODE_SIZE (GET_MODE (op1))
3802 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
3803 {
3804 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
3805 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3806 if (tem)
3807 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3808 reversed_nonequality);
7afe21cc
RK
3809 }
3810
278a83b2 3811 /* Similarly, if this is an NE comparison, and either is a SUBREG
7afe21cc
RK
3812 making a smaller mode, we know the whole thing is also NE. */
3813
2e794ee8
RS
3814 /* Note that GET_MODE (op0) may not equal MODE;
3815 if we test MODE instead, we can get an infinite recursion
3816 alternating between two modes each wider than MODE. */
3817
7afe21cc
RK
3818 if (code == NE && GET_CODE (op0) == SUBREG
3819 && subreg_lowpart_p (op0)
2e794ee8
RS
3820 && (GET_MODE_SIZE (GET_MODE (op0))
3821 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
3822 {
3823 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
3824 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3825 if (tem)
3826 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3827 reversed_nonequality);
7afe21cc
RK
3828 }
3829
3830 if (code == NE && GET_CODE (op1) == SUBREG
3831 && subreg_lowpart_p (op1)
2e794ee8
RS
3832 && (GET_MODE_SIZE (GET_MODE (op1))
3833 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
3834 {
3835 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
3836 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3837 if (tem)
3838 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3839 reversed_nonequality);
7afe21cc
RK
3840 }
3841
3842 /* Hash both operands. */
3843
3844 do_not_record = 0;
3845 hash_arg_in_memory = 0;
2197a88a 3846 op0_hash = HASH (op0, mode);
7afe21cc 3847 op0_in_memory = hash_arg_in_memory;
7afe21cc
RK
3848
3849 if (do_not_record)
3850 return;
3851
3852 do_not_record = 0;
3853 hash_arg_in_memory = 0;
2197a88a 3854 op1_hash = HASH (op1, mode);
7afe21cc 3855 op1_in_memory = hash_arg_in_memory;
278a83b2 3856
7afe21cc
RK
3857 if (do_not_record)
3858 return;
3859
3860 /* Look up both operands. */
2197a88a
RK
3861 op0_elt = lookup (op0, op0_hash, mode);
3862 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 3863
af3869c1
RK
3864 /* If both operands are already equivalent or if they are not in the
3865 table but are identical, do nothing. */
3866 if ((op0_elt != 0 && op1_elt != 0
3867 && op0_elt->first_same_value == op1_elt->first_same_value)
3868 || op0 == op1 || rtx_equal_p (op0, op1))
3869 return;
3870
7afe21cc 3871 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
3872 comparison. Similarly if this is floating-point. In the latter
3873 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
3874 If we record the equality, we might inadvertently delete code
3875 whose intent was to change -0 to +0. */
3876
cbf6a543 3877 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc 3878 {
1bb98cec
DM
3879 struct qty_table_elem *ent;
3880 int qty;
3881
7afe21cc
RK
3882 /* If we reversed a floating-point comparison, if OP0 is not a
3883 register, or if OP1 is neither a register or constant, we can't
3884 do anything. */
3885
f8cfc6aa 3886 if (!REG_P (op1))
7afe21cc
RK
3887 op1 = equiv_constant (op1);
3888
cbf6a543 3889 if ((reversed_nonequality && FLOAT_MODE_P (mode))
f8cfc6aa 3890 || !REG_P (op0) || op1 == 0)
7afe21cc
RK
3891 return;
3892
3893 /* Put OP0 in the hash table if it isn't already. This gives it a
3894 new quantity number. */
3895 if (op0_elt == 0)
3896 {
9714cf43 3897 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
3898 {
3899 rehash_using_reg (op0);
2197a88a 3900 op0_hash = HASH (op0, mode);
2bb81c86
RK
3901
3902 /* If OP0 is contained in OP1, this changes its hash code
3903 as well. Faster to rehash than to check, except
3904 for the simple case of a constant. */
3905 if (! CONSTANT_P (op1))
2197a88a 3906 op1_hash = HASH (op1,mode);
7afe21cc
RK
3907 }
3908
9714cf43 3909 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 3910 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
3911 }
3912
1bb98cec
DM
3913 qty = REG_QTY (REGNO (op0));
3914 ent = &qty_table[qty];
3915
3916 ent->comparison_code = code;
f8cfc6aa 3917 if (REG_P (op1))
7afe21cc 3918 {
5d5ea909 3919 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 3920 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 3921
7afe21cc
RK
3922 /* Put OP1 in the hash table so it gets a new quantity number. */
3923 if (op1_elt == 0)
3924 {
9714cf43 3925 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
3926 {
3927 rehash_using_reg (op1);
2197a88a 3928 op1_hash = HASH (op1, mode);
7afe21cc
RK
3929 }
3930
9714cf43 3931 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 3932 op1_elt->in_memory = op1_in_memory;
7afe21cc
RK
3933 }
3934
1bb98cec
DM
3935 ent->comparison_const = NULL_RTX;
3936 ent->comparison_qty = REG_QTY (REGNO (op1));
7afe21cc
RK
3937 }
3938 else
3939 {
1bb98cec
DM
3940 ent->comparison_const = op1;
3941 ent->comparison_qty = -1;
7afe21cc
RK
3942 }
3943
3944 return;
3945 }
3946
eb5ad42a
RS
3947 /* If either side is still missing an equivalence, make it now,
3948 then merge the equivalences. */
7afe21cc 3949
7afe21cc
RK
3950 if (op0_elt == 0)
3951 {
9714cf43 3952 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
3953 {
3954 rehash_using_reg (op0);
2197a88a 3955 op0_hash = HASH (op0, mode);
7afe21cc
RK
3956 }
3957
9714cf43 3958 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 3959 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
3960 }
3961
3962 if (op1_elt == 0)
3963 {
9714cf43 3964 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
3965 {
3966 rehash_using_reg (op1);
2197a88a 3967 op1_hash = HASH (op1, mode);
7afe21cc
RK
3968 }
3969
9714cf43 3970 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 3971 op1_elt->in_memory = op1_in_memory;
7afe21cc 3972 }
eb5ad42a
RS
3973
3974 merge_equiv_classes (op0_elt, op1_elt);
7afe21cc
RK
3975}
3976\f
3977/* CSE processing for one instruction.
3978 First simplify sources and addresses of all assignments
3979 in the instruction, using previously-computed equivalents values.
3980 Then install the new sources and destinations in the table
278a83b2 3981 of available values.
7afe21cc 3982
1ed0205e
VM
3983 If LIBCALL_INSN is nonzero, don't record any equivalence made in
3984 the insn. It means that INSN is inside libcall block. In this
ddc356e8 3985 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
3986
3987/* Data on one SET contained in the instruction. */
3988
3989struct set
3990{
3991 /* The SET rtx itself. */
3992 rtx rtl;
3993 /* The SET_SRC of the rtx (the original value, if it is changing). */
3994 rtx src;
3995 /* The hash-table element for the SET_SRC of the SET. */
3996 struct table_elt *src_elt;
2197a88a
RK
3997 /* Hash value for the SET_SRC. */
3998 unsigned src_hash;
3999 /* Hash value for the SET_DEST. */
4000 unsigned dest_hash;
7afe21cc
RK
4001 /* The SET_DEST, with SUBREG, etc., stripped. */
4002 rtx inner_dest;
278a83b2 4003 /* Nonzero if the SET_SRC is in memory. */
7afe21cc 4004 char src_in_memory;
7afe21cc
RK
4005 /* Nonzero if the SET_SRC contains something
4006 whose value cannot be predicted and understood. */
4007 char src_volatile;
496324d0
DN
4008 /* Original machine mode, in case it becomes a CONST_INT.
4009 The size of this field should match the size of the mode
4010 field of struct rtx_def (see rtl.h). */
4011 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc
RK
4012 /* A constant equivalent for SET_SRC, if any. */
4013 rtx src_const;
47841d1b
JJ
4014 /* Original SET_SRC value used for libcall notes. */
4015 rtx orig_src;
2197a88a
RK
4016 /* Hash value of constant equivalent for SET_SRC. */
4017 unsigned src_const_hash;
7afe21cc
RK
4018 /* Table entry for constant equivalent for SET_SRC, if any. */
4019 struct table_elt *src_const_elt;
05c433f3
PB
4020 /* Table entry for the destination address. */
4021 struct table_elt *dest_addr_elt;
7afe21cc
RK
4022};
4023
4024static void
7080f735 4025cse_insn (rtx insn, rtx libcall_insn)
7afe21cc 4026{
b3694847
SS
4027 rtx x = PATTERN (insn);
4028 int i;
92f9aa51 4029 rtx tem;
b3694847 4030 int n_sets = 0;
7afe21cc 4031
7afe21cc
RK
4032 rtx src_eqv = 0;
4033 struct table_elt *src_eqv_elt = 0;
6a651371
KG
4034 int src_eqv_volatile = 0;
4035 int src_eqv_in_memory = 0;
6a651371 4036 unsigned src_eqv_hash = 0;
7afe21cc 4037
9714cf43 4038 struct set *sets = (struct set *) 0;
7afe21cc
RK
4039
4040 this_insn = insn;
192232e2
RS
4041#ifdef HAVE_cc0
4042 /* Records what this insn does to set CC0. */
4043 this_insn_cc0 = 0;
4044 this_insn_cc0_mode = VOIDmode;
4045#endif
7afe21cc
RK
4046
4047 /* Find all the SETs and CLOBBERs in this instruction.
4048 Record all the SETs in the array `set' and count them.
4049 Also determine whether there is a CLOBBER that invalidates
4050 all memory references, or all references at varying addresses. */
4051
4b4bf941 4052 if (CALL_P (insn))
f1e7c95f
RK
4053 {
4054 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
f474c6f8
AO
4055 {
4056 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4057 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4058 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4059 }
f1e7c95f
RK
4060 }
4061
7afe21cc
RK
4062 if (GET_CODE (x) == SET)
4063 {
703ad42b 4064 sets = alloca (sizeof (struct set));
7afe21cc
RK
4065 sets[0].rtl = x;
4066
4067 /* Ignore SETs that are unconditional jumps.
4068 They never need cse processing, so this does not hurt.
4069 The reason is not efficiency but rather
4070 so that we can test at the end for instructions
4071 that have been simplified to unconditional jumps
4072 and not be misled by unchanged instructions
4073 that were unconditional jumps to begin with. */
4074 if (SET_DEST (x) == pc_rtx
4075 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4076 ;
4077
4078 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4079 The hard function value register is used only once, to copy to
4080 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4081 Ensure we invalidate the destination register. On the 80386 no
7722328e 4082 other code would invalidate it since it is a fixed_reg.
0f41302f 4083 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
4084
4085 else if (GET_CODE (SET_SRC (x)) == CALL)
4086 {
4087 canon_reg (SET_SRC (x), insn);
77fa0940 4088 apply_change_group ();
7afe21cc 4089 fold_rtx (SET_SRC (x), insn);
bb4034b3 4090 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
4091 }
4092 else
4093 n_sets = 1;
4094 }
4095 else if (GET_CODE (x) == PARALLEL)
4096 {
b3694847 4097 int lim = XVECLEN (x, 0);
7afe21cc 4098
703ad42b 4099 sets = alloca (lim * sizeof (struct set));
7afe21cc
RK
4100
4101 /* Find all regs explicitly clobbered in this insn,
4102 and ensure they are not replaced with any other regs
4103 elsewhere in this insn.
4104 When a reg that is clobbered is also used for input,
4105 we should presume that that is for a reason,
4106 and we should not substitute some other register
4107 which is not supposed to be clobbered.
4108 Therefore, this loop cannot be merged into the one below
830a38ee 4109 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
4110 value clobbered. We must not let a canonicalization do
4111 anything in that case. */
4112 for (i = 0; i < lim; i++)
4113 {
b3694847 4114 rtx y = XVECEXP (x, 0, i);
2708da92
RS
4115 if (GET_CODE (y) == CLOBBER)
4116 {
4117 rtx clobbered = XEXP (y, 0);
4118
f8cfc6aa 4119 if (REG_P (clobbered)
2708da92 4120 || GET_CODE (clobbered) == SUBREG)
bb4034b3 4121 invalidate (clobbered, VOIDmode);
2708da92
RS
4122 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4123 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 4124 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 4125 }
7afe21cc 4126 }
278a83b2 4127
7afe21cc
RK
4128 for (i = 0; i < lim; i++)
4129 {
b3694847 4130 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
4131 if (GET_CODE (y) == SET)
4132 {
7722328e
RK
4133 /* As above, we ignore unconditional jumps and call-insns and
4134 ignore the result of apply_change_group. */
7afe21cc
RK
4135 if (GET_CODE (SET_SRC (y)) == CALL)
4136 {
4137 canon_reg (SET_SRC (y), insn);
77fa0940 4138 apply_change_group ();
7afe21cc 4139 fold_rtx (SET_SRC (y), insn);
bb4034b3 4140 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
4141 }
4142 else if (SET_DEST (y) == pc_rtx
4143 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4144 ;
4145 else
4146 sets[n_sets++].rtl = y;
4147 }
4148 else if (GET_CODE (y) == CLOBBER)
4149 {
9ae8ffe7 4150 /* If we clobber memory, canon the address.
7afe21cc
RK
4151 This does nothing when a register is clobbered
4152 because we have already invalidated the reg. */
3c0cb5de 4153 if (MEM_P (XEXP (y, 0)))
9ae8ffe7 4154 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
4155 }
4156 else if (GET_CODE (y) == USE
f8cfc6aa 4157 && ! (REG_P (XEXP (y, 0))
7afe21cc 4158 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4159 canon_reg (y, NULL_RTX);
7afe21cc
RK
4160 else if (GET_CODE (y) == CALL)
4161 {
7722328e
RK
4162 /* The result of apply_change_group can be ignored; see
4163 canon_reg. */
7afe21cc 4164 canon_reg (y, insn);
77fa0940 4165 apply_change_group ();
7afe21cc
RK
4166 fold_rtx (y, insn);
4167 }
4168 }
4169 }
4170 else if (GET_CODE (x) == CLOBBER)
4171 {
3c0cb5de 4172 if (MEM_P (XEXP (x, 0)))
9ae8ffe7 4173 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4174 }
4175
4176 /* Canonicalize a USE of a pseudo register or memory location. */
4177 else if (GET_CODE (x) == USE
f8cfc6aa 4178 && ! (REG_P (XEXP (x, 0))
7afe21cc 4179 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 4180 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
4181 else if (GET_CODE (x) == CALL)
4182 {
7722328e 4183 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 4184 canon_reg (x, insn);
77fa0940 4185 apply_change_group ();
7afe21cc
RK
4186 fold_rtx (x, insn);
4187 }
4188
7b3ab05e
JW
4189 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4190 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4191 is handled specially for this case, and if it isn't set, then there will
9faa82d8 4192 be no equivalence for the destination. */
92f9aa51
RK
4193 if (n_sets == 1 && REG_NOTES (insn) != 0
4194 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
4195 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4196 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
7b668f9e
JJ
4197 {
4198 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4199 XEXP (tem, 0) = src_eqv;
4200 }
7afe21cc
RK
4201
4202 /* Canonicalize sources and addresses of destinations.
4203 We do this in a separate pass to avoid problems when a MATCH_DUP is
4204 present in the insn pattern. In that case, we want to ensure that
4205 we don't break the duplicate nature of the pattern. So we will replace
4206 both operands at the same time. Otherwise, we would fail to find an
4207 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
4208
4209 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 4210 but we don't do this any more. */
7afe21cc
RK
4211
4212 for (i = 0; i < n_sets; i++)
4213 {
4214 rtx dest = SET_DEST (sets[i].rtl);
4215 rtx src = SET_SRC (sets[i].rtl);
4216 rtx new = canon_reg (src, insn);
4217
47841d1b 4218 sets[i].orig_src = src;
67e0a632 4219 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc 4220
46d096a3 4221 if (GET_CODE (dest) == ZERO_EXTRACT)
7afe21cc
RK
4222 {
4223 validate_change (insn, &XEXP (dest, 1),
77fa0940 4224 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 4225 validate_change (insn, &XEXP (dest, 2),
77fa0940 4226 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
4227 }
4228
46d096a3 4229 while (GET_CODE (dest) == SUBREG
7afe21cc 4230 || GET_CODE (dest) == ZERO_EXTRACT
46d096a3 4231 || GET_CODE (dest) == STRICT_LOW_PART)
7afe21cc
RK
4232 dest = XEXP (dest, 0);
4233
3c0cb5de 4234 if (MEM_P (dest))
7afe21cc
RK
4235 canon_reg (dest, insn);
4236 }
4237
77fa0940
RK
4238 /* Now that we have done all the replacements, we can apply the change
4239 group and see if they all work. Note that this will cause some
4240 canonicalizations that would have worked individually not to be applied
4241 because some other canonicalization didn't work, but this should not
278a83b2 4242 occur often.
7722328e
RK
4243
4244 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
4245
4246 apply_change_group ();
4247
7afe21cc
RK
4248 /* Set sets[i].src_elt to the class each source belongs to.
4249 Detect assignments from or to volatile things
4250 and set set[i] to zero so they will be ignored
4251 in the rest of this function.
4252
4253 Nothing in this loop changes the hash table or the register chains. */
4254
4255 for (i = 0; i < n_sets; i++)
4256 {
b3694847
SS
4257 rtx src, dest;
4258 rtx src_folded;
4259 struct table_elt *elt = 0, *p;
7afe21cc
RK
4260 enum machine_mode mode;
4261 rtx src_eqv_here;
4262 rtx src_const = 0;
4263 rtx src_related = 0;
4264 struct table_elt *src_const_elt = 0;
99a9c946
GS
4265 int src_cost = MAX_COST;
4266 int src_eqv_cost = MAX_COST;
4267 int src_folded_cost = MAX_COST;
4268 int src_related_cost = MAX_COST;
4269 int src_elt_cost = MAX_COST;
4270 int src_regcost = MAX_COST;
4271 int src_eqv_regcost = MAX_COST;
4272 int src_folded_regcost = MAX_COST;
4273 int src_related_regcost = MAX_COST;
4274 int src_elt_regcost = MAX_COST;
da7d8304 4275 /* Set nonzero if we need to call force_const_mem on with the
7afe21cc
RK
4276 contents of src_folded before using it. */
4277 int src_folded_force_flag = 0;
4278
4279 dest = SET_DEST (sets[i].rtl);
4280 src = SET_SRC (sets[i].rtl);
4281
4282 /* If SRC is a constant that has no machine mode,
4283 hash it with the destination's machine mode.
4284 This way we can keep different modes separate. */
4285
4286 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4287 sets[i].mode = mode;
4288
4289 if (src_eqv)
4290 {
4291 enum machine_mode eqvmode = mode;
4292 if (GET_CODE (dest) == STRICT_LOW_PART)
4293 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4294 do_not_record = 0;
4295 hash_arg_in_memory = 0;
2197a88a 4296 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
4297
4298 /* Find the equivalence class for the equivalent expression. */
4299
4300 if (!do_not_record)
2197a88a 4301 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
4302
4303 src_eqv_volatile = do_not_record;
4304 src_eqv_in_memory = hash_arg_in_memory;
7afe21cc
RK
4305 }
4306
4307 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4308 value of the INNER register, not the destination. So it is not
3826a3da 4309 a valid substitution for the source. But save it for later. */
7afe21cc
RK
4310 if (GET_CODE (dest) == STRICT_LOW_PART)
4311 src_eqv_here = 0;
4312 else
4313 src_eqv_here = src_eqv;
4314
4315 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4316 simplified result, which may not necessarily be valid. */
4317 src_folded = fold_rtx (src, insn);
4318
e6a125a0
RK
4319#if 0
4320 /* ??? This caused bad code to be generated for the m68k port with -O2.
4321 Suppose src is (CONST_INT -1), and that after truncation src_folded
4322 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4323 At the end we will add src and src_const to the same equivalence
4324 class. We now have 3 and -1 on the same equivalence class. This
4325 causes later instructions to be mis-optimized. */
7afe21cc
RK
4326 /* If storing a constant in a bitfield, pre-truncate the constant
4327 so we will be able to record it later. */
46d096a3 4328 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
4329 {
4330 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4331
4332 if (GET_CODE (src) == CONST_INT
4333 && GET_CODE (width) == CONST_INT
906c4e36
RK
4334 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4335 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4336 src_folded
4337 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4338 << INTVAL (width)) - 1));
7afe21cc 4339 }
e6a125a0 4340#endif
7afe21cc
RK
4341
4342 /* Compute SRC's hash code, and also notice if it
4343 should not be recorded at all. In that case,
4344 prevent any further processing of this assignment. */
4345 do_not_record = 0;
4346 hash_arg_in_memory = 0;
7afe21cc
RK
4347
4348 sets[i].src = src;
2197a88a 4349 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
4350 sets[i].src_volatile = do_not_record;
4351 sets[i].src_in_memory = hash_arg_in_memory;
7afe21cc 4352
50196afa 4353 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
43e72072
JJ
4354 a pseudo, do not record SRC. Using SRC as a replacement for
4355 anything else will be incorrect in that situation. Note that
4356 this usually occurs only for stack slots, in which case all the
4357 RTL would be referring to SRC, so we don't lose any optimization
4358 opportunities by not having SRC in the hash table. */
50196afa 4359
3c0cb5de 4360 if (MEM_P (src)
43e72072 4361 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
f8cfc6aa 4362 && REG_P (dest)
43e72072 4363 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
50196afa
RK
4364 sets[i].src_volatile = 1;
4365
0dadecf6
RK
4366#if 0
4367 /* It is no longer clear why we used to do this, but it doesn't
4368 appear to still be needed. So let's try without it since this
4369 code hurts cse'ing widened ops. */
9a5a17f3 4370 /* If source is a paradoxical subreg (such as QI treated as an SI),
7afe21cc
RK
4371 treat it as volatile. It may do the work of an SI in one context
4372 where the extra bits are not being used, but cannot replace an SI
4373 in general. */
4374 if (GET_CODE (src) == SUBREG
4375 && (GET_MODE_SIZE (GET_MODE (src))
4376 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4377 sets[i].src_volatile = 1;
0dadecf6 4378#endif
7afe21cc
RK
4379
4380 /* Locate all possible equivalent forms for SRC. Try to replace
4381 SRC in the insn with each cheaper equivalent.
4382
4383 We have the following types of equivalents: SRC itself, a folded
4384 version, a value given in a REG_EQUAL note, or a value related
4385 to a constant.
4386
4387 Each of these equivalents may be part of an additional class
4388 of equivalents (if more than one is in the table, they must be in
4389 the same class; we check for this).
4390
4391 If the source is volatile, we don't do any table lookups.
4392
4393 We note any constant equivalent for possible later use in a
4394 REG_NOTE. */
4395
4396 if (!sets[i].src_volatile)
2197a88a 4397 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
4398
4399 sets[i].src_elt = elt;
4400
4401 if (elt && src_eqv_here && src_eqv_elt)
278a83b2
KH
4402 {
4403 if (elt->first_same_value != src_eqv_elt->first_same_value)
7afe21cc
RK
4404 {
4405 /* The REG_EQUAL is indicating that two formerly distinct
4406 classes are now equivalent. So merge them. */
4407 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
4408 src_eqv_hash = HASH (src_eqv, elt->mode);
4409 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
4410 }
4411
278a83b2
KH
4412 src_eqv_here = 0;
4413 }
7afe21cc
RK
4414
4415 else if (src_eqv_elt)
278a83b2 4416 elt = src_eqv_elt;
7afe21cc
RK
4417
4418 /* Try to find a constant somewhere and record it in `src_const'.
4419 Record its table element, if any, in `src_const_elt'. Look in
4420 any known equivalences first. (If the constant is not in the
2197a88a 4421 table, also set `sets[i].src_const_hash'). */
7afe21cc 4422 if (elt)
278a83b2 4423 for (p = elt->first_same_value; p; p = p->next_same_value)
7afe21cc
RK
4424 if (p->is_const)
4425 {
4426 src_const = p->exp;
4427 src_const_elt = elt;
4428 break;
4429 }
4430
4431 if (src_const == 0
4432 && (CONSTANT_P (src_folded)
278a83b2 4433 /* Consider (minus (label_ref L1) (label_ref L2)) as
7afe21cc
RK
4434 "constant" here so we will record it. This allows us
4435 to fold switch statements when an ADDR_DIFF_VEC is used. */
4436 || (GET_CODE (src_folded) == MINUS
4437 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4438 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4439 src_const = src_folded, src_const_elt = elt;
4440 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4441 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4442
4443 /* If we don't know if the constant is in the table, get its
4444 hash code and look it up. */
4445 if (src_const && src_const_elt == 0)
4446 {
2197a88a
RK
4447 sets[i].src_const_hash = HASH (src_const, mode);
4448 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
4449 }
4450
4451 sets[i].src_const = src_const;
4452 sets[i].src_const_elt = src_const_elt;
4453
4454 /* If the constant and our source are both in the table, mark them as
4455 equivalent. Otherwise, if a constant is in the table but the source
4456 isn't, set ELT to it. */
4457 if (src_const_elt && elt
4458 && src_const_elt->first_same_value != elt->first_same_value)
4459 merge_equiv_classes (elt, src_const_elt);
4460 else if (src_const_elt && elt == 0)
4461 elt = src_const_elt;
4462
4463 /* See if there is a register linearly related to a constant
4464 equivalent of SRC. */
4465 if (src_const
4466 && (GET_CODE (src_const) == CONST
4467 || (src_const_elt && src_const_elt->related_value != 0)))
278a83b2
KH
4468 {
4469 src_related = use_related_value (src_const, src_const_elt);
4470 if (src_related)
4471 {
7afe21cc 4472 struct table_elt *src_related_elt
278a83b2 4473 = lookup (src_related, HASH (src_related, mode), mode);
7afe21cc 4474 if (src_related_elt && elt)
278a83b2 4475 {
7afe21cc
RK
4476 if (elt->first_same_value
4477 != src_related_elt->first_same_value)
278a83b2 4478 /* This can occur when we previously saw a CONST
7afe21cc
RK
4479 involving a SYMBOL_REF and then see the SYMBOL_REF
4480 twice. Merge the involved classes. */
4481 merge_equiv_classes (elt, src_related_elt);
4482
278a83b2 4483 src_related = 0;
7afe21cc 4484 src_related_elt = 0;
278a83b2
KH
4485 }
4486 else if (src_related_elt && elt == 0)
4487 elt = src_related_elt;
7afe21cc 4488 }
278a83b2 4489 }
7afe21cc 4490
e4600702
RK
4491 /* See if we have a CONST_INT that is already in a register in a
4492 wider mode. */
4493
4494 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4495 && GET_MODE_CLASS (mode) == MODE_INT
4496 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4497 {
4498 enum machine_mode wider_mode;
4499
4500 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4501 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4502 && src_related == 0;
4503 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4504 {
4505 struct table_elt *const_elt
4506 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4507
4508 if (const_elt == 0)
4509 continue;
4510
4511 for (const_elt = const_elt->first_same_value;
4512 const_elt; const_elt = const_elt->next_same_value)
f8cfc6aa 4513 if (REG_P (const_elt->exp))
e4600702 4514 {
73ca11ed 4515 src_related = gen_lowpart (mode, const_elt->exp);
e4600702
RK
4516 break;
4517 }
4518 }
4519 }
4520
d45cf215
RS
4521 /* Another possibility is that we have an AND with a constant in
4522 a mode narrower than a word. If so, it might have been generated
4523 as part of an "if" which would narrow the AND. If we already
4524 have done the AND in a wider mode, we can use a SUBREG of that
4525 value. */
4526
4527 if (flag_expensive_optimizations && ! src_related
4528 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
4529 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4530 {
4531 enum machine_mode tmode;
38a448ca 4532 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
4533
4534 for (tmode = GET_MODE_WIDER_MODE (mode);
4535 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4536 tmode = GET_MODE_WIDER_MODE (tmode))
4537 {
4de249d9 4538 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
d45cf215
RS
4539 struct table_elt *larger_elt;
4540
4541 if (inner)
4542 {
4543 PUT_MODE (new_and, tmode);
4544 XEXP (new_and, 0) = inner;
4545 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4546 if (larger_elt == 0)
4547 continue;
4548
4549 for (larger_elt = larger_elt->first_same_value;
4550 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 4551 if (REG_P (larger_elt->exp))
d45cf215
RS
4552 {
4553 src_related
4de249d9 4554 = gen_lowpart (mode, larger_elt->exp);
d45cf215
RS
4555 break;
4556 }
4557
4558 if (src_related)
4559 break;
4560 }
4561 }
4562 }
7bac1be0
RK
4563
4564#ifdef LOAD_EXTEND_OP
4565 /* See if a MEM has already been loaded with a widening operation;
4566 if it has, we can use a subreg of that. Many CISC machines
4567 also have such operations, but this is only likely to be
71cc389b 4568 beneficial on these machines. */
278a83b2 4569
ddc356e8 4570 if (flag_expensive_optimizations && src_related == 0
7bac1be0
RK
4571 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4572 && GET_MODE_CLASS (mode) == MODE_INT
3c0cb5de 4573 && MEM_P (src) && ! do_not_record
f822d252 4574 && LOAD_EXTEND_OP (mode) != UNKNOWN)
7bac1be0 4575 {
9d80ef7c
RH
4576 struct rtx_def memory_extend_buf;
4577 rtx memory_extend_rtx = &memory_extend_buf;
7bac1be0 4578 enum machine_mode tmode;
278a83b2 4579
7bac1be0
RK
4580 /* Set what we are trying to extend and the operation it might
4581 have been extended with. */
9d80ef7c 4582 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
7bac1be0
RK
4583 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4584 XEXP (memory_extend_rtx, 0) = src;
278a83b2 4585
7bac1be0
RK
4586 for (tmode = GET_MODE_WIDER_MODE (mode);
4587 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4588 tmode = GET_MODE_WIDER_MODE (tmode))
4589 {
4590 struct table_elt *larger_elt;
278a83b2 4591
7bac1be0 4592 PUT_MODE (memory_extend_rtx, tmode);
278a83b2 4593 larger_elt = lookup (memory_extend_rtx,
7bac1be0
RK
4594 HASH (memory_extend_rtx, tmode), tmode);
4595 if (larger_elt == 0)
4596 continue;
278a83b2 4597
7bac1be0
RK
4598 for (larger_elt = larger_elt->first_same_value;
4599 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 4600 if (REG_P (larger_elt->exp))
7bac1be0 4601 {
73ca11ed 4602 src_related = gen_lowpart (mode, larger_elt->exp);
7bac1be0
RK
4603 break;
4604 }
278a83b2 4605
7bac1be0
RK
4606 if (src_related)
4607 break;
4608 }
4609 }
4610#endif /* LOAD_EXTEND_OP */
278a83b2 4611
7afe21cc 4612 if (src == src_folded)
278a83b2 4613 src_folded = 0;
7afe21cc 4614
da7d8304 4615 /* At this point, ELT, if nonzero, points to a class of expressions
7afe21cc 4616 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
da7d8304 4617 and SRC_RELATED, if nonzero, each contain additional equivalent
7afe21cc
RK
4618 expressions. Prune these latter expressions by deleting expressions
4619 already in the equivalence class.
4620
4621 Check for an equivalent identical to the destination. If found,
4622 this is the preferred equivalent since it will likely lead to
4623 elimination of the insn. Indicate this by placing it in
4624 `src_related'. */
4625
278a83b2
KH
4626 if (elt)
4627 elt = elt->first_same_value;
7afe21cc 4628 for (p = elt; p; p = p->next_same_value)
278a83b2 4629 {
7afe21cc
RK
4630 enum rtx_code code = GET_CODE (p->exp);
4631
4632 /* If the expression is not valid, ignore it. Then we do not
4633 have to check for validity below. In most cases, we can use
4634 `rtx_equal_p', since canonicalization has already been done. */
0516f6fe 4635 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
7afe21cc
RK
4636 continue;
4637
5a03c8c4
RK
4638 /* Also skip paradoxical subregs, unless that's what we're
4639 looking for. */
4640 if (code == SUBREG
4641 && (GET_MODE_SIZE (GET_MODE (p->exp))
4642 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
4643 && ! (src != 0
4644 && GET_CODE (src) == SUBREG
4645 && GET_MODE (src) == GET_MODE (p->exp)
4646 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4647 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4648 continue;
4649
278a83b2 4650 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7afe21cc 4651 src = 0;
278a83b2 4652 else if (src_folded && GET_CODE (src_folded) == code
7afe21cc
RK
4653 && rtx_equal_p (src_folded, p->exp))
4654 src_folded = 0;
278a83b2 4655 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7afe21cc
RK
4656 && rtx_equal_p (src_eqv_here, p->exp))
4657 src_eqv_here = 0;
278a83b2 4658 else if (src_related && GET_CODE (src_related) == code
7afe21cc
RK
4659 && rtx_equal_p (src_related, p->exp))
4660 src_related = 0;
4661
4662 /* This is the same as the destination of the insns, we want
4663 to prefer it. Copy it to src_related. The code below will
4664 then give it a negative cost. */
4665 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4666 src_related = dest;
278a83b2 4667 }
7afe21cc
RK
4668
4669 /* Find the cheapest valid equivalent, trying all the available
4670 possibilities. Prefer items not in the hash table to ones
4671 that are when they are equal cost. Note that we can never
4672 worsen an insn as the current contents will also succeed.
05c33dd8 4673 If we find an equivalent identical to the destination, use it as best,
0f41302f 4674 since this insn will probably be eliminated in that case. */
7afe21cc
RK
4675 if (src)
4676 {
4677 if (rtx_equal_p (src, dest))
f1c1dfc3 4678 src_cost = src_regcost = -1;
7afe21cc 4679 else
630c79be
BS
4680 {
4681 src_cost = COST (src);
4682 src_regcost = approx_reg_cost (src);
4683 }
7afe21cc
RK
4684 }
4685
4686 if (src_eqv_here)
4687 {
4688 if (rtx_equal_p (src_eqv_here, dest))
f1c1dfc3 4689 src_eqv_cost = src_eqv_regcost = -1;
7afe21cc 4690 else
630c79be
BS
4691 {
4692 src_eqv_cost = COST (src_eqv_here);
4693 src_eqv_regcost = approx_reg_cost (src_eqv_here);
4694 }
7afe21cc
RK
4695 }
4696
4697 if (src_folded)
4698 {
4699 if (rtx_equal_p (src_folded, dest))
f1c1dfc3 4700 src_folded_cost = src_folded_regcost = -1;
7afe21cc 4701 else
630c79be
BS
4702 {
4703 src_folded_cost = COST (src_folded);
4704 src_folded_regcost = approx_reg_cost (src_folded);
4705 }
7afe21cc
RK
4706 }
4707
4708 if (src_related)
4709 {
4710 if (rtx_equal_p (src_related, dest))
f1c1dfc3 4711 src_related_cost = src_related_regcost = -1;
7afe21cc 4712 else
630c79be
BS
4713 {
4714 src_related_cost = COST (src_related);
4715 src_related_regcost = approx_reg_cost (src_related);
4716 }
7afe21cc
RK
4717 }
4718
4719 /* If this was an indirect jump insn, a known label will really be
4720 cheaper even though it looks more expensive. */
4721 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
99a9c946 4722 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
278a83b2 4723
7afe21cc
RK
4724 /* Terminate loop when replacement made. This must terminate since
4725 the current contents will be tested and will always be valid. */
4726 while (1)
278a83b2
KH
4727 {
4728 rtx trial;
7afe21cc 4729
278a83b2 4730 /* Skip invalid entries. */
f8cfc6aa 4731 while (elt && !REG_P (elt->exp)
0516f6fe 4732 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
278a83b2 4733 elt = elt->next_same_value;
5a03c8c4
RK
4734
4735 /* A paradoxical subreg would be bad here: it'll be the right
4736 size, but later may be adjusted so that the upper bits aren't
4737 what we want. So reject it. */
4738 if (elt != 0
4739 && GET_CODE (elt->exp) == SUBREG
4740 && (GET_MODE_SIZE (GET_MODE (elt->exp))
4741 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
4742 /* It is okay, though, if the rtx we're trying to match
4743 will ignore any of the bits we can't predict. */
4744 && ! (src != 0
4745 && GET_CODE (src) == SUBREG
4746 && GET_MODE (src) == GET_MODE (elt->exp)
4747 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4748 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4749 {
4750 elt = elt->next_same_value;
4751 continue;
4752 }
278a83b2 4753
68252e27 4754 if (elt)
630c79be
BS
4755 {
4756 src_elt_cost = elt->cost;
4757 src_elt_regcost = elt->regcost;
4758 }
7afe21cc 4759
68252e27 4760 /* Find cheapest and skip it for the next time. For items
7afe21cc
RK
4761 of equal cost, use this order:
4762 src_folded, src, src_eqv, src_related and hash table entry. */
99a9c946 4763 if (src_folded
56ae04af
KH
4764 && preferable (src_folded_cost, src_folded_regcost,
4765 src_cost, src_regcost) <= 0
4766 && preferable (src_folded_cost, src_folded_regcost,
4767 src_eqv_cost, src_eqv_regcost) <= 0
4768 && preferable (src_folded_cost, src_folded_regcost,
4769 src_related_cost, src_related_regcost) <= 0
4770 && preferable (src_folded_cost, src_folded_regcost,
4771 src_elt_cost, src_elt_regcost) <= 0)
7afe21cc 4772 {
f1c1dfc3 4773 trial = src_folded, src_folded_cost = MAX_COST;
7afe21cc 4774 if (src_folded_force_flag)
9d8de1de
EB
4775 {
4776 rtx forced = force_const_mem (mode, trial);
4777 if (forced)
4778 trial = forced;
4779 }
7afe21cc 4780 }
99a9c946 4781 else if (src
56ae04af
KH
4782 && preferable (src_cost, src_regcost,
4783 src_eqv_cost, src_eqv_regcost) <= 0
4784 && preferable (src_cost, src_regcost,
4785 src_related_cost, src_related_regcost) <= 0
4786 && preferable (src_cost, src_regcost,
4787 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 4788 trial = src, src_cost = MAX_COST;
99a9c946 4789 else if (src_eqv_here
56ae04af
KH
4790 && preferable (src_eqv_cost, src_eqv_regcost,
4791 src_related_cost, src_related_regcost) <= 0
4792 && preferable (src_eqv_cost, src_eqv_regcost,
4793 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 4794 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
99a9c946 4795 else if (src_related
56ae04af
KH
4796 && preferable (src_related_cost, src_related_regcost,
4797 src_elt_cost, src_elt_regcost) <= 0)
68252e27 4798 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
278a83b2 4799 else
7afe21cc 4800 {
05c33dd8 4801 trial = copy_rtx (elt->exp);
7afe21cc 4802 elt = elt->next_same_value;
f1c1dfc3 4803 src_elt_cost = MAX_COST;
7afe21cc
RK
4804 }
4805
4806 /* We don't normally have an insn matching (set (pc) (pc)), so
4807 check for this separately here. We will delete such an
4808 insn below.
4809
d466c016
JL
4810 For other cases such as a table jump or conditional jump
4811 where we know the ultimate target, go ahead and replace the
4812 operand. While that may not make a valid insn, we will
4813 reemit the jump below (and also insert any necessary
4814 barriers). */
7afe21cc
RK
4815 if (n_sets == 1 && dest == pc_rtx
4816 && (trial == pc_rtx
4817 || (GET_CODE (trial) == LABEL_REF
4818 && ! condjump_p (insn))))
4819 {
2f39b6ca
UW
4820 /* Don't substitute non-local labels, this confuses CFG. */
4821 if (GET_CODE (trial) == LABEL_REF
4822 && LABEL_REF_NONLOCAL_P (trial))
4823 continue;
4824
d466c016 4825 SET_SRC (sets[i].rtl) = trial;
602c4c0d 4826 cse_jumps_altered = 1;
7afe21cc
RK
4827 break;
4828 }
278a83b2 4829
1ef6855c
KH
4830 /* Reject certain invalid forms of CONST that we create. */
4831 else if (CONSTANT_P (trial)
4832 && GET_CODE (trial) == CONST
4833 /* Reject cases that will cause decode_rtx_const to
4834 die. On the alpha when simplifying a switch, we
4835 get (const (truncate (minus (label_ref)
4836 (label_ref)))). */
4837 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
4838 /* Likewise on IA-64, except without the
4839 truncate. */
4840 || (GET_CODE (XEXP (trial, 0)) == MINUS
4841 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
4842 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
4843 /* Do nothing for this case. */
4844 ;
4845
7afe21cc 4846 /* Look for a substitution that makes a valid insn. */
ddc356e8 4847 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 4848 {
dbaff908
RS
4849 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
4850
7bd8b2a8
JL
4851 /* If we just made a substitution inside a libcall, then we
4852 need to make the same substitution in any notes attached
4853 to the RETVAL insn. */
1ed0205e 4854 if (libcall_insn
f8cfc6aa 4855 && (REG_P (sets[i].orig_src)
47841d1b 4856 || GET_CODE (sets[i].orig_src) == SUBREG
3c0cb5de 4857 || MEM_P (sets[i].orig_src)))
d8b7ec41
RS
4858 {
4859 rtx note = find_reg_equal_equiv_note (libcall_insn);
4860 if (note != 0)
4861 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
4862 sets[i].orig_src,
4863 copy_rtx (new));
4864 }
7bd8b2a8 4865
7722328e
RK
4866 /* The result of apply_change_group can be ignored; see
4867 canon_reg. */
4868
dbaff908 4869 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6702af89 4870 apply_change_group ();
932ad4d9 4871
05c33dd8
RK
4872 break;
4873 }
7afe21cc 4874
278a83b2 4875 /* If we previously found constant pool entries for
7afe21cc
RK
4876 constants and this is a constant, try making a
4877 pool entry. Put it in src_folded unless we already have done
4878 this since that is where it likely came from. */
4879
4880 else if (constant_pool_entries_cost
4881 && CONSTANT_P (trial)
1bbd065b 4882 && (src_folded == 0
3c0cb5de 4883 || (!MEM_P (src_folded)
1bbd065b 4884 && ! src_folded_force_flag))
9ae8ffe7
JL
4885 && GET_MODE_CLASS (mode) != MODE_CC
4886 && mode != VOIDmode)
7afe21cc
RK
4887 {
4888 src_folded_force_flag = 1;
4889 src_folded = trial;
4890 src_folded_cost = constant_pool_entries_cost;
dd0ba281 4891 src_folded_regcost = constant_pool_entries_regcost;
7afe21cc 4892 }
278a83b2 4893 }
7afe21cc
RK
4894
4895 src = SET_SRC (sets[i].rtl);
4896
4897 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
4898 However, there is an important exception: If both are registers
4899 that are not the head of their equivalence class, replace SET_SRC
4900 with the head of the class. If we do not do this, we will have
4901 both registers live over a portion of the basic block. This way,
4902 their lifetimes will likely abut instead of overlapping. */
f8cfc6aa 4903 if (REG_P (dest)
1bb98cec 4904 && REGNO_QTY_VALID_P (REGNO (dest)))
7afe21cc 4905 {
1bb98cec
DM
4906 int dest_q = REG_QTY (REGNO (dest));
4907 struct qty_table_elem *dest_ent = &qty_table[dest_q];
4908
4909 if (dest_ent->mode == GET_MODE (dest)
4910 && dest_ent->first_reg != REGNO (dest)
f8cfc6aa 4911 && REG_P (src) && REGNO (src) == REGNO (dest)
1bb98cec
DM
4912 /* Don't do this if the original insn had a hard reg as
4913 SET_SRC or SET_DEST. */
f8cfc6aa 4914 && (!REG_P (sets[i].src)
1bb98cec 4915 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
f8cfc6aa 4916 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
1bb98cec
DM
4917 /* We can't call canon_reg here because it won't do anything if
4918 SRC is a hard register. */
759bd8b7 4919 {
1bb98cec
DM
4920 int src_q = REG_QTY (REGNO (src));
4921 struct qty_table_elem *src_ent = &qty_table[src_q];
4922 int first = src_ent->first_reg;
4923 rtx new_src
4924 = (first >= FIRST_PSEUDO_REGISTER
4925 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
4926
4927 /* We must use validate-change even for this, because this
4928 might be a special no-op instruction, suitable only to
4929 tag notes onto. */
4930 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
4931 {
4932 src = new_src;
4933 /* If we had a constant that is cheaper than what we are now
4934 setting SRC to, use that constant. We ignored it when we
4935 thought we could make this into a no-op. */
4936 if (src_const && COST (src_const) < COST (src)
278a83b2
KH
4937 && validate_change (insn, &SET_SRC (sets[i].rtl),
4938 src_const, 0))
1bb98cec
DM
4939 src = src_const;
4940 }
759bd8b7 4941 }
7afe21cc
RK
4942 }
4943
4944 /* If we made a change, recompute SRC values. */
4945 if (src != sets[i].src)
278a83b2 4946 {
278a83b2
KH
4947 do_not_record = 0;
4948 hash_arg_in_memory = 0;
7afe21cc 4949 sets[i].src = src;
278a83b2
KH
4950 sets[i].src_hash = HASH (src, mode);
4951 sets[i].src_volatile = do_not_record;
4952 sets[i].src_in_memory = hash_arg_in_memory;
4953 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
4954 }
7afe21cc
RK
4955
4956 /* If this is a single SET, we are setting a register, and we have an
4957 equivalent constant, we want to add a REG_NOTE. We don't want
4958 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 4959 that pseudo hasn't been eliminated is a pain. Such a note also
278a83b2 4960 won't help anything.
ac7ef8d5
FS
4961
4962 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
4963 which can be created for a reference to a compile time computable
4964 entry in a jump table. */
4965
f8cfc6aa
JQ
4966 if (n_sets == 1 && src_const && REG_P (dest)
4967 && !REG_P (src_const)
ac7ef8d5
FS
4968 && ! (GET_CODE (src_const) == CONST
4969 && GET_CODE (XEXP (src_const, 0)) == MINUS
4970 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
4971 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 4972 {
a77b7e32
RS
4973 /* We only want a REG_EQUAL note if src_const != src. */
4974 if (! rtx_equal_p (src, src_const))
4975 {
4976 /* Make sure that the rtx is not shared. */
4977 src_const = copy_rtx (src_const);
51e2a951 4978
a77b7e32
RS
4979 /* Record the actual constant value in a REG_EQUAL note,
4980 making a new one if one does not already exist. */
4981 set_unique_reg_note (insn, REG_EQUAL, src_const);
4982 }
7afe21cc
RK
4983 }
4984
4985 /* Now deal with the destination. */
4986 do_not_record = 0;
7afe21cc 4987
46d096a3
SB
4988 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
4989 while (GET_CODE (dest) == SUBREG
7afe21cc 4990 || GET_CODE (dest) == ZERO_EXTRACT
7afe21cc 4991 || GET_CODE (dest) == STRICT_LOW_PART)
0339ce7e 4992 dest = XEXP (dest, 0);
7afe21cc
RK
4993
4994 sets[i].inner_dest = dest;
4995
3c0cb5de 4996 if (MEM_P (dest))
7afe21cc 4997 {
9ae8ffe7
JL
4998#ifdef PUSH_ROUNDING
4999 /* Stack pushes invalidate the stack pointer. */
5000 rtx addr = XEXP (dest, 0);
ec8e098d 5001 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
9ae8ffe7 5002 && XEXP (addr, 0) == stack_pointer_rtx)
524e3576 5003 invalidate (stack_pointer_rtx, VOIDmode);
9ae8ffe7 5004#endif
7afe21cc 5005 dest = fold_rtx (dest, insn);
7afe21cc
RK
5006 }
5007
5008 /* Compute the hash code of the destination now,
5009 before the effects of this instruction are recorded,
5010 since the register values used in the address computation
5011 are those before this instruction. */
2197a88a 5012 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
5013
5014 /* Don't enter a bit-field in the hash table
5015 because the value in it after the store
5016 may not equal what was stored, due to truncation. */
5017
46d096a3 5018 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
5019 {
5020 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5021
5022 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5023 && GET_CODE (width) == CONST_INT
906c4e36
RK
5024 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5025 && ! (INTVAL (src_const)
5026 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
5027 /* Exception: if the value is constant,
5028 and it won't be truncated, record it. */
5029 ;
5030 else
5031 {
5032 /* This is chosen so that the destination will be invalidated
5033 but no new value will be recorded.
5034 We must invalidate because sometimes constant
5035 values can be recorded for bitfields. */
5036 sets[i].src_elt = 0;
5037 sets[i].src_volatile = 1;
5038 src_eqv = 0;
5039 src_eqv_elt = 0;
5040 }
5041 }
5042
5043 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5044 the insn. */
5045 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5046 {
ef178af3 5047 /* One less use of the label this insn used to jump to. */
0129d079 5048 delete_insn_and_edges (insn);
7afe21cc 5049 cse_jumps_altered = 1;
7afe21cc
RK
5050 /* No more processing for this set. */
5051 sets[i].rtl = 0;
5052 }
5053
5054 /* If this SET is now setting PC to a label, we know it used to
d466c016 5055 be a conditional or computed branch. */
8f235343
JH
5056 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5057 && !LABEL_REF_NONLOCAL_P (src))
7afe21cc 5058 {
8fb1e50e
GS
5059 /* Now emit a BARRIER after the unconditional jump. */
5060 if (NEXT_INSN (insn) == 0
4b4bf941 5061 || !BARRIER_P (NEXT_INSN (insn)))
8fb1e50e
GS
5062 emit_barrier_after (insn);
5063
d466c016
JL
5064 /* We reemit the jump in as many cases as possible just in
5065 case the form of an unconditional jump is significantly
5066 different than a computed jump or conditional jump.
5067
5068 If this insn has multiple sets, then reemitting the
5069 jump is nontrivial. So instead we just force rerecognition
5070 and hope for the best. */
5071 if (n_sets == 1)
7afe21cc 5072 {
9dcb4381 5073 rtx new, note;
8fb1e50e 5074
0129d079 5075 new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7afe21cc
RK
5076 JUMP_LABEL (new) = XEXP (src, 0);
5077 LABEL_NUSES (XEXP (src, 0))++;
9dcb4381
RH
5078
5079 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5080 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5081 if (note)
5082 {
5083 XEXP (note, 1) = NULL_RTX;
5084 REG_NOTES (new) = note;
5085 }
5086
0129d079 5087 delete_insn_and_edges (insn);
7afe21cc 5088 insn = new;
8fb1e50e
GS
5089
5090 /* Now emit a BARRIER after the unconditional jump. */
5091 if (NEXT_INSN (insn) == 0
4b4bf941 5092 || !BARRIER_P (NEXT_INSN (insn)))
8fb1e50e 5093 emit_barrier_after (insn);
7afe21cc 5094 }
31dcf83f 5095 else
31dcf83f 5096 INSN_CODE (insn) = -1;
7afe21cc 5097
8fb1e50e
GS
5098 /* Do not bother deleting any unreachable code,
5099 let jump/flow do that. */
7afe21cc
RK
5100
5101 cse_jumps_altered = 1;
5102 sets[i].rtl = 0;
5103 }
5104
c2a47e48
RK
5105 /* If destination is volatile, invalidate it and then do no further
5106 processing for this assignment. */
7afe21cc
RK
5107
5108 else if (do_not_record)
c2a47e48 5109 {
f8cfc6aa 5110 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5111 invalidate (dest, VOIDmode);
3c0cb5de 5112 else if (MEM_P (dest))
32fab725 5113 invalidate (dest, VOIDmode);
2708da92
RS
5114 else if (GET_CODE (dest) == STRICT_LOW_PART
5115 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5116 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
5117 sets[i].rtl = 0;
5118 }
7afe21cc
RK
5119
5120 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 5121 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
5122
5123#ifdef HAVE_cc0
5124 /* If setting CC0, record what it was set to, or a constant, if it
5125 is equivalent to a constant. If it is being set to a floating-point
5126 value, make a COMPARE with the appropriate constant of 0. If we
5127 don't do this, later code can interpret this as a test against
5128 const0_rtx, which can cause problems if we try to put it into an
5129 insn as a floating-point operand. */
5130 if (dest == cc0_rtx)
5131 {
5132 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5133 this_insn_cc0_mode = mode;
cbf6a543 5134 if (FLOAT_MODE_P (mode))
38a448ca
RH
5135 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5136 CONST0_RTX (mode));
7afe21cc
RK
5137 }
5138#endif
5139 }
5140
5141 /* Now enter all non-volatile source expressions in the hash table
5142 if they are not already present.
5143 Record their equivalence classes in src_elt.
5144 This way we can insert the corresponding destinations into
5145 the same classes even if the actual sources are no longer in them
5146 (having been invalidated). */
5147
5148 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5149 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5150 {
b3694847
SS
5151 struct table_elt *elt;
5152 struct table_elt *classp = sets[0].src_elt;
7afe21cc
RK
5153 rtx dest = SET_DEST (sets[0].rtl);
5154 enum machine_mode eqvmode = GET_MODE (dest);
5155
5156 if (GET_CODE (dest) == STRICT_LOW_PART)
5157 {
5158 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5159 classp = 0;
5160 }
5161 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
5162 {
5163 rehash_using_reg (src_eqv);
5164 src_eqv_hash = HASH (src_eqv, eqvmode);
5165 }
2197a88a 5166 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc 5167 elt->in_memory = src_eqv_in_memory;
7afe21cc 5168 src_eqv_elt = elt;
f7911249
JW
5169
5170 /* Check to see if src_eqv_elt is the same as a set source which
5171 does not yet have an elt, and if so set the elt of the set source
5172 to src_eqv_elt. */
5173 for (i = 0; i < n_sets; i++)
26132f71
JW
5174 if (sets[i].rtl && sets[i].src_elt == 0
5175 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
f7911249 5176 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
5177 }
5178
5179 for (i = 0; i < n_sets; i++)
5180 if (sets[i].rtl && ! sets[i].src_volatile
5181 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5182 {
5183 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5184 {
5185 /* REG_EQUAL in setting a STRICT_LOW_PART
5186 gives an equivalent for the entire destination register,
5187 not just for the subreg being stored in now.
5188 This is a more interesting equivalence, so we arrange later
5189 to treat the entire reg as the destination. */
5190 sets[i].src_elt = src_eqv_elt;
2197a88a 5191 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
5192 }
5193 else
5194 {
5195 /* Insert source and constant equivalent into hash table, if not
5196 already present. */
b3694847
SS
5197 struct table_elt *classp = src_eqv_elt;
5198 rtx src = sets[i].src;
5199 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5200 enum machine_mode mode
5201 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5202
1fcc57f1
AM
5203 /* It's possible that we have a source value known to be
5204 constant but don't have a REG_EQUAL note on the insn.
5205 Lack of a note will mean src_eqv_elt will be NULL. This
5206 can happen where we've generated a SUBREG to access a
5207 CONST_INT that is already in a register in a wider mode.
5208 Ensure that the source expression is put in the proper
5209 constant class. */
5210 if (!classp)
5211 classp = sets[i].src_const_elt;
5212
26132f71 5213 if (sets[i].src_elt == 0)
7afe21cc 5214 {
26132f71
JW
5215 /* Don't put a hard register source into the table if this is
5216 the last insn of a libcall. In this case, we only need
5217 to put src_eqv_elt in src_elt. */
db4a8254 5218 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
8ae2b8f6 5219 {
b3694847 5220 struct table_elt *elt;
26132f71
JW
5221
5222 /* Note that these insert_regs calls cannot remove
5223 any of the src_elt's, because they would have failed to
5224 match if not still valid. */
5225 if (insert_regs (src, classp, 0))
5226 {
5227 rehash_using_reg (src);
5228 sets[i].src_hash = HASH (src, mode);
5229 }
5230 elt = insert (src, classp, sets[i].src_hash, mode);
5231 elt->in_memory = sets[i].src_in_memory;
26132f71 5232 sets[i].src_elt = classp = elt;
8ae2b8f6 5233 }
26132f71
JW
5234 else
5235 sets[i].src_elt = classp;
7afe21cc 5236 }
7afe21cc
RK
5237 if (sets[i].src_const && sets[i].src_const_elt == 0
5238 && src != sets[i].src_const
5239 && ! rtx_equal_p (sets[i].src_const, src))
5240 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 5241 sets[i].src_const_hash, mode);
7afe21cc
RK
5242 }
5243 }
5244 else if (sets[i].src_elt == 0)
5245 /* If we did not insert the source into the hash table (e.g., it was
5246 volatile), note the equivalence class for the REG_EQUAL value, if any,
5247 so that the destination goes into that class. */
5248 sets[i].src_elt = src_eqv_elt;
5249
05c433f3
PB
5250 /* Record destination addresses in the hash table. This allows us to
5251 check if they are invalidated by other sets. */
5252 for (i = 0; i < n_sets; i++)
5253 {
5254 if (sets[i].rtl)
5255 {
5256 rtx x = sets[i].inner_dest;
5257 struct table_elt *elt;
5258 enum machine_mode mode;
5259 unsigned hash;
5260
5261 if (MEM_P (x))
5262 {
5263 x = XEXP (x, 0);
5264 mode = GET_MODE (x);
5265 hash = HASH (x, mode);
5266 elt = lookup (x, hash, mode);
5267 if (!elt)
5268 {
5269 if (insert_regs (x, NULL, 0))
5270 {
7e7e28c7
AO
5271 rtx dest = SET_DEST (sets[i].rtl);
5272
05c433f3
PB
5273 rehash_using_reg (x);
5274 hash = HASH (x, mode);
7e7e28c7 5275 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
05c433f3
PB
5276 }
5277 elt = insert (x, NULL, hash, mode);
5278 }
5279
5280 sets[i].dest_addr_elt = elt;
5281 }
5282 else
5283 sets[i].dest_addr_elt = NULL;
5284 }
5285 }
5286
9ae8ffe7 5287 invalidate_from_clobbers (x);
77fa0940 5288
278a83b2 5289 /* Some registers are invalidated by subroutine calls. Memory is
77fa0940
RK
5290 invalidated by non-constant calls. */
5291
4b4bf941 5292 if (CALL_P (insn))
7afe21cc 5293 {
24a28584 5294 if (! CONST_OR_PURE_CALL_P (insn))
9ae8ffe7 5295 invalidate_memory ();
7afe21cc
RK
5296 invalidate_for_call ();
5297 }
5298
5299 /* Now invalidate everything set by this instruction.
5300 If a SUBREG or other funny destination is being set,
5301 sets[i].rtl is still nonzero, so here we invalidate the reg
5302 a part of which is being set. */
5303
5304 for (i = 0; i < n_sets; i++)
5305 if (sets[i].rtl)
5306 {
bb4034b3
JW
5307 /* We can't use the inner dest, because the mode associated with
5308 a ZERO_EXTRACT is significant. */
b3694847 5309 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5310
5311 /* Needed for registers to remove the register from its
5312 previous quantity's chain.
5313 Needed for memory if this is a nonvarying address, unless
5314 we have just done an invalidate_memory that covers even those. */
f8cfc6aa 5315 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5316 invalidate (dest, VOIDmode);
3c0cb5de 5317 else if (MEM_P (dest))
32fab725 5318 invalidate (dest, VOIDmode);
2708da92
RS
5319 else if (GET_CODE (dest) == STRICT_LOW_PART
5320 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5321 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
5322 }
5323
01e752d3 5324 /* A volatile ASM invalidates everything. */
4b4bf941 5325 if (NONJUMP_INSN_P (insn)
01e752d3
JL
5326 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5327 && MEM_VOLATILE_P (PATTERN (insn)))
5328 flush_hash_table ();
5329
932ad4d9
SB
5330 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5331 the regs restored by the longjmp come from a later time
5332 than the setjmp. */
5333 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5334 {
5335 flush_hash_table ();
5336 goto done;
5337 }
5338
7afe21cc
RK
5339 /* Make sure registers mentioned in destinations
5340 are safe for use in an expression to be inserted.
5341 This removes from the hash table
5342 any invalid entry that refers to one of these registers.
5343
5344 We don't care about the return value from mention_regs because
5345 we are going to hash the SET_DEST values unconditionally. */
5346
5347 for (i = 0; i < n_sets; i++)
34c73909
R
5348 {
5349 if (sets[i].rtl)
5350 {
5351 rtx x = SET_DEST (sets[i].rtl);
5352
f8cfc6aa 5353 if (!REG_P (x))
34c73909
R
5354 mention_regs (x);
5355 else
5356 {
5357 /* We used to rely on all references to a register becoming
5358 inaccessible when a register changes to a new quantity,
5359 since that changes the hash code. However, that is not
9b1549b8 5360 safe, since after HASH_SIZE new quantities we get a
34c73909
R
5361 hash 'collision' of a register with its own invalid
5362 entries. And since SUBREGs have been changed not to
5363 change their hash code with the hash code of the register,
5364 it wouldn't work any longer at all. So we have to check
5365 for any invalid references lying around now.
5366 This code is similar to the REG case in mention_regs,
5367 but it knows that reg_tick has been incremented, and
5368 it leaves reg_in_table as -1 . */
770ae6cc 5369 unsigned int regno = REGNO (x);
09e18274 5370 unsigned int endregno = END_REGNO (x);
770ae6cc 5371 unsigned int i;
34c73909
R
5372
5373 for (i = regno; i < endregno; i++)
5374 {
30f72379 5375 if (REG_IN_TABLE (i) >= 0)
34c73909
R
5376 {
5377 remove_invalid_refs (i);
30f72379 5378 REG_IN_TABLE (i) = -1;
34c73909
R
5379 }
5380 }
5381 }
5382 }
5383 }
7afe21cc
RK
5384
5385 /* We may have just removed some of the src_elt's from the hash table.
05c433f3
PB
5386 So replace each one with the current head of the same class.
5387 Also check if destination addresses have been removed. */
7afe21cc
RK
5388
5389 for (i = 0; i < n_sets; i++)
5390 if (sets[i].rtl)
5391 {
05c433f3
PB
5392 if (sets[i].dest_addr_elt
5393 && sets[i].dest_addr_elt->first_same_value == 0)
5394 {
e67b81d1 5395 /* The elt was removed, which means this destination is not
05c433f3
PB
5396 valid after this instruction. */
5397 sets[i].rtl = NULL_RTX;
5398 }
5399 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7afe21cc
RK
5400 /* If elt was removed, find current head of same class,
5401 or 0 if nothing remains of that class. */
5402 {
b3694847 5403 struct table_elt *elt = sets[i].src_elt;
7afe21cc
RK
5404
5405 while (elt && elt->prev_same_value)
5406 elt = elt->prev_same_value;
5407
5408 while (elt && elt->first_same_value == 0)
5409 elt = elt->next_same_value;
5410 sets[i].src_elt = elt ? elt->first_same_value : 0;
5411 }
5412 }
5413
5414 /* Now insert the destinations into their equivalence classes. */
5415
5416 for (i = 0; i < n_sets; i++)
5417 if (sets[i].rtl)
5418 {
b3694847 5419 rtx dest = SET_DEST (sets[i].rtl);
b3694847 5420 struct table_elt *elt;
7afe21cc
RK
5421
5422 /* Don't record value if we are not supposed to risk allocating
5423 floating-point values in registers that might be wider than
5424 memory. */
5425 if ((flag_float_store
3c0cb5de 5426 && MEM_P (dest)
cbf6a543 5427 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
5428 /* Don't record BLKmode values, because we don't know the
5429 size of it, and can't be sure that other BLKmode values
5430 have the same or smaller size. */
5431 || GET_MODE (dest) == BLKmode
7afe21cc
RK
5432 /* Don't record values of destinations set inside a libcall block
5433 since we might delete the libcall. Things should have been set
5434 up so we won't want to reuse such a value, but we play it safe
5435 here. */
7bd8b2a8 5436 || libcall_insn
7afe21cc
RK
5437 /* If we didn't put a REG_EQUAL value or a source into the hash
5438 table, there is no point is recording DEST. */
1a8e9a8e
RK
5439 || sets[i].src_elt == 0
5440 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5441 or SIGN_EXTEND, don't record DEST since it can cause
5442 some tracking to be wrong.
5443
5444 ??? Think about this more later. */
5445 || (GET_CODE (dest) == SUBREG
5446 && (GET_MODE_SIZE (GET_MODE (dest))
5447 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5448 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5449 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
5450 continue;
5451
5452 /* STRICT_LOW_PART isn't part of the value BEING set,
5453 and neither is the SUBREG inside it.
5454 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5455 if (GET_CODE (dest) == STRICT_LOW_PART)
5456 dest = SUBREG_REG (XEXP (dest, 0));
5457
f8cfc6aa 5458 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
7afe21cc
RK
5459 /* Registers must also be inserted into chains for quantities. */
5460 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
5461 {
5462 /* If `insert_regs' changes something, the hash code must be
5463 recalculated. */
5464 rehash_using_reg (dest);
5465 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5466 }
7afe21cc 5467
8fff4fc1
RH
5468 elt = insert (dest, sets[i].src_elt,
5469 sets[i].dest_hash, GET_MODE (dest));
9de2c71a 5470
3c0cb5de 5471 elt->in_memory = (MEM_P (sets[i].inner_dest)
389fdba0 5472 && !MEM_READONLY_P (sets[i].inner_dest));
c256df0b 5473
fc3ffe83
RK
5474 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5475 narrower than M2, and both M1 and M2 are the same number of words,
5476 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5477 make that equivalence as well.
7afe21cc 5478
4de249d9
PB
5479 However, BAR may have equivalences for which gen_lowpart
5480 will produce a simpler value than gen_lowpart applied to
7afe21cc 5481 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
278a83b2 5482 BAR's equivalences. If we don't get a simplified form, make
7afe21cc
RK
5483 the SUBREG. It will not be used in an equivalence, but will
5484 cause two similar assignments to be detected.
5485
5486 Note the loop below will find SUBREG_REG (DEST) since we have
5487 already entered SRC and DEST of the SET in the table. */
5488
5489 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
5490 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5491 / UNITS_PER_WORD)
278a83b2 5492 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
7afe21cc
RK
5493 && (GET_MODE_SIZE (GET_MODE (dest))
5494 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5495 && sets[i].src_elt != 0)
5496 {
5497 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5498 struct table_elt *elt, *classp = 0;
5499
5500 for (elt = sets[i].src_elt->first_same_value; elt;
5501 elt = elt->next_same_value)
5502 {
5503 rtx new_src = 0;
2197a88a 5504 unsigned src_hash;
7afe21cc 5505 struct table_elt *src_elt;
ff27a429 5506 int byte = 0;
7afe21cc
RK
5507
5508 /* Ignore invalid entries. */
f8cfc6aa 5509 if (!REG_P (elt->exp)
0516f6fe 5510 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
7afe21cc
RK
5511 continue;
5512
9beb7d20
RH
5513 /* We may have already been playing subreg games. If the
5514 mode is already correct for the destination, use it. */
5515 if (GET_MODE (elt->exp) == new_mode)
5516 new_src = elt->exp;
5517 else
5518 {
5519 /* Calculate big endian correction for the SUBREG_BYTE.
5520 We have already checked that M1 (GET_MODE (dest))
5521 is not narrower than M2 (new_mode). */
5522 if (BYTES_BIG_ENDIAN)
5523 byte = (GET_MODE_SIZE (GET_MODE (dest))
5524 - GET_MODE_SIZE (new_mode));
5525
5526 new_src = simplify_gen_subreg (new_mode, elt->exp,
5527 GET_MODE (dest), byte);
5528 }
5529
ff27a429
R
5530 /* The call to simplify_gen_subreg fails if the value
5531 is VOIDmode, yet we can't do any simplification, e.g.
5532 for EXPR_LISTs denoting function call results.
5533 It is invalid to construct a SUBREG with a VOIDmode
5534 SUBREG_REG, hence a zero new_src means we can't do
5535 this substitution. */
5536 if (! new_src)
5537 continue;
7afe21cc
RK
5538
5539 src_hash = HASH (new_src, new_mode);
5540 src_elt = lookup (new_src, src_hash, new_mode);
5541
5542 /* Put the new source in the hash table is if isn't
5543 already. */
5544 if (src_elt == 0)
5545 {
5546 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
5547 {
5548 rehash_using_reg (new_src);
5549 src_hash = HASH (new_src, new_mode);
5550 }
7afe21cc
RK
5551 src_elt = insert (new_src, classp, src_hash, new_mode);
5552 src_elt->in_memory = elt->in_memory;
7afe21cc
RK
5553 }
5554 else if (classp && classp != src_elt->first_same_value)
278a83b2 5555 /* Show that two things that we've seen before are
7afe21cc
RK
5556 actually the same. */
5557 merge_equiv_classes (src_elt, classp);
5558
5559 classp = src_elt->first_same_value;
da932f04
JL
5560 /* Ignore invalid entries. */
5561 while (classp
f8cfc6aa 5562 && !REG_P (classp->exp)
0516f6fe 5563 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
da932f04 5564 classp = classp->next_same_value;
7afe21cc
RK
5565 }
5566 }
5567 }
5568
403e25d0
RK
5569 /* Special handling for (set REG0 REG1) where REG0 is the
5570 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5571 be used in the sequel, so (if easily done) change this insn to
5572 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5573 that computed their value. Then REG1 will become a dead store
5574 and won't cloud the situation for later optimizations.
7afe21cc
RK
5575
5576 Do not make this change if REG1 is a hard register, because it will
5577 then be used in the sequel and we may be changing a two-operand insn
5578 into a three-operand insn.
5579
50270076
R
5580 Also do not do this if we are operating on a copy of INSN.
5581
5582 Also don't do this if INSN ends a libcall; this would cause an unrelated
5583 register to be set in the middle of a libcall, and we then get bad code
5584 if the libcall is deleted. */
7afe21cc 5585
f8cfc6aa 5586 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
7afe21cc 5587 && NEXT_INSN (PREV_INSN (insn)) == insn
f8cfc6aa 5588 && REG_P (SET_SRC (sets[0].rtl))
7afe21cc 5589 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
1bb98cec 5590 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
7afe21cc 5591 {
1bb98cec
DM
5592 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5593 struct qty_table_elem *src_ent = &qty_table[src_q];
7afe21cc 5594
1bb98cec
DM
5595 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5596 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc 5597 {
3e25353e
AH
5598 /* Scan for the previous nonnote insn, but stop at a basic
5599 block boundary. */
932ad4d9
SB
5600 rtx prev = insn;
5601 rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
3e25353e
AH
5602 do
5603 {
5604 prev = PREV_INSN (prev);
5605 }
932ad4d9 5606 while (prev != bb_head && NOTE_P (prev));
7080f735 5607
58ecb5e2
RS
5608 /* Do not swap the registers around if the previous instruction
5609 attaches a REG_EQUIV note to REG1.
5610
5611 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5612 from the pseudo that originally shadowed an incoming argument
5613 to another register. Some uses of REG_EQUIV might rely on it
5614 being attached to REG1 rather than REG2.
5615
5616 This section previously turned the REG_EQUIV into a REG_EQUAL
5617 note. We cannot do that because REG_EQUIV may provide an
4912a07c 5618 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
932ad4d9 5619 if (NONJUMP_INSN_P (prev)
403e25d0 5620 && GET_CODE (PATTERN (prev)) == SET
58ecb5e2
RS
5621 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5622 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
1bb98cec
DM
5623 {
5624 rtx dest = SET_DEST (sets[0].rtl);
403e25d0 5625 rtx src = SET_SRC (sets[0].rtl);
58ecb5e2 5626 rtx note;
7afe21cc 5627
278a83b2
KH
5628 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5629 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5630 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
1bb98cec 5631 apply_change_group ();
7afe21cc 5632
403e25d0
RK
5633 /* If INSN has a REG_EQUAL note, and this note mentions
5634 REG0, then we must delete it, because the value in
5635 REG0 has changed. If the note's value is REG1, we must
5636 also delete it because that is now this insn's dest. */
1bb98cec 5637 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
403e25d0
RK
5638 if (note != 0
5639 && (reg_mentioned_p (dest, XEXP (note, 0))
5640 || rtx_equal_p (src, XEXP (note, 0))))
1bb98cec
DM
5641 remove_note (insn, note);
5642 }
7afe21cc
RK
5643 }
5644 }
5645
932ad4d9 5646done:;
7afe21cc
RK
5647}
5648\f
a4c6502a 5649/* Remove from the hash table all expressions that reference memory. */
14a774a9 5650
7afe21cc 5651static void
7080f735 5652invalidate_memory (void)
7afe21cc 5653{
b3694847
SS
5654 int i;
5655 struct table_elt *p, *next;
7afe21cc 5656
9b1549b8 5657 for (i = 0; i < HASH_SIZE; i++)
9ae8ffe7
JL
5658 for (p = table[i]; p; p = next)
5659 {
5660 next = p->next_same_hash;
5661 if (p->in_memory)
5662 remove_from_table (p, i);
5663 }
5664}
5665
7afe21cc
RK
5666/* Perform invalidation on the basis of everything about an insn
5667 except for invalidating the actual places that are SET in it.
5668 This includes the places CLOBBERed, and anything that might
5669 alias with something that is SET or CLOBBERed.
5670
7afe21cc
RK
5671 X is the pattern of the insn. */
5672
5673static void
7080f735 5674invalidate_from_clobbers (rtx x)
7afe21cc 5675{
7afe21cc
RK
5676 if (GET_CODE (x) == CLOBBER)
5677 {
5678 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
5679 if (ref)
5680 {
f8cfc6aa 5681 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 5682 || MEM_P (ref))
9ae8ffe7
JL
5683 invalidate (ref, VOIDmode);
5684 else if (GET_CODE (ref) == STRICT_LOW_PART
5685 || GET_CODE (ref) == ZERO_EXTRACT)
5686 invalidate (XEXP (ref, 0), GET_MODE (ref));
5687 }
7afe21cc
RK
5688 }
5689 else if (GET_CODE (x) == PARALLEL)
5690 {
b3694847 5691 int i;
7afe21cc
RK
5692 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5693 {
b3694847 5694 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
5695 if (GET_CODE (y) == CLOBBER)
5696 {
5697 rtx ref = XEXP (y, 0);
f8cfc6aa 5698 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 5699 || MEM_P (ref))
9ae8ffe7
JL
5700 invalidate (ref, VOIDmode);
5701 else if (GET_CODE (ref) == STRICT_LOW_PART
5702 || GET_CODE (ref) == ZERO_EXTRACT)
5703 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
5704 }
5705 }
5706 }
5707}
5708\f
5709/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5710 and replace any registers in them with either an equivalent constant
5711 or the canonical form of the register. If we are inside an address,
5712 only do this if the address remains valid.
5713
5714 OBJECT is 0 except when within a MEM in which case it is the MEM.
5715
5716 Return the replacement for X. */
5717
5718static rtx
7080f735 5719cse_process_notes (rtx x, rtx object)
7afe21cc
RK
5720{
5721 enum rtx_code code = GET_CODE (x);
6f7d635c 5722 const char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
5723 int i;
5724
5725 switch (code)
5726 {
5727 case CONST_INT:
5728 case CONST:
5729 case SYMBOL_REF:
5730 case LABEL_REF:
5731 case CONST_DOUBLE:
69ef87e2 5732 case CONST_VECTOR:
7afe21cc
RK
5733 case PC:
5734 case CC0:
5735 case LO_SUM:
5736 return x;
5737
5738 case MEM:
c96208fa
DC
5739 validate_change (x, &XEXP (x, 0),
5740 cse_process_notes (XEXP (x, 0), x), 0);
7afe21cc
RK
5741 return x;
5742
5743 case EXPR_LIST:
5744 case INSN_LIST:
5745 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 5746 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 5747 if (XEXP (x, 1))
906c4e36 5748 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
5749 return x;
5750
e4890d45
RS
5751 case SIGN_EXTEND:
5752 case ZERO_EXTEND:
0b0ee36c 5753 case SUBREG:
e4890d45
RS
5754 {
5755 rtx new = cse_process_notes (XEXP (x, 0), object);
5756 /* We don't substitute VOIDmode constants into these rtx,
5757 since they would impede folding. */
5758 if (GET_MODE (new) != VOIDmode)
5759 validate_change (object, &XEXP (x, 0), new, 0);
5760 return x;
5761 }
5762
7afe21cc 5763 case REG:
30f72379 5764 i = REG_QTY (REGNO (x));
7afe21cc
RK
5765
5766 /* Return a constant or a constant register. */
1bb98cec 5767 if (REGNO_QTY_VALID_P (REGNO (x)))
7afe21cc 5768 {
1bb98cec
DM
5769 struct qty_table_elem *ent = &qty_table[i];
5770
5771 if (ent->const_rtx != NULL_RTX
5772 && (CONSTANT_P (ent->const_rtx)
f8cfc6aa 5773 || REG_P (ent->const_rtx)))
1bb98cec 5774 {
4de249d9 5775 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
1bb98cec 5776 if (new)
3e916873 5777 return copy_rtx (new);
1bb98cec 5778 }
7afe21cc
RK
5779 }
5780
5781 /* Otherwise, canonicalize this register. */
906c4e36 5782 return canon_reg (x, NULL_RTX);
278a83b2 5783
e9a25f70
JL
5784 default:
5785 break;
7afe21cc
RK
5786 }
5787
5788 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5789 if (fmt[i] == 'e')
5790 validate_change (object, &XEXP (x, i),
7fe34fdf 5791 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
5792
5793 return x;
5794}
5795\f
932ad4d9 5796/* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
7afe21cc 5797
932ad4d9
SB
5798 DATA is a pointer to a struct cse_basic_block_data, that is used to
5799 describe the path.
5800 It is filled with a queue of basic blocks, starting with FIRST_BB
5801 and following a trace through the CFG.
5802
5803 If all paths starting at FIRST_BB have been followed, or no new path
5804 starting at FIRST_BB can be constructed, this function returns FALSE.
5805 Otherwise, DATA->path is filled and the function returns TRUE indicating
5806 that a path to follow was found.
7afe21cc 5807
2e226e66 5808 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
932ad4d9 5809 block in the path will be FIRST_BB. */
7afe21cc 5810
932ad4d9
SB
5811static bool
5812cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
5813 int follow_jumps)
7afe21cc 5814{
932ad4d9
SB
5815 basic_block bb;
5816 edge e;
5817 int path_size;
5818
5819 SET_BIT (cse_visited_basic_blocks, first_bb->index);
7afe21cc 5820
932ad4d9
SB
5821 /* See if there is a previous path. */
5822 path_size = data->path_size;
5823
5824 /* There is a previous path. Make sure it started with FIRST_BB. */
5825 if (path_size)
5826 gcc_assert (data->path[0].bb == first_bb);
5827
5828 /* There was only one basic block in the last path. Clear the path and
5829 return, so that paths starting at another basic block can be tried. */
5830 if (path_size == 1)
5831 {
5832 path_size = 0;
5833 goto done;
5834 }
5835
5836 /* If the path was empty from the beginning, construct a new path. */
5837 if (path_size == 0)
5838 data->path[path_size++].bb = first_bb;
5839 else
7afe21cc 5840 {
932ad4d9
SB
5841 /* Otherwise, path_size must be equal to or greater than 2, because
5842 a previous path exists that is at least two basic blocks long.
5843
5844 Update the previous branch path, if any. If the last branch was
5845 previously along the branch edge, take the fallthrough edge now. */
5846 while (path_size >= 2)
7afe21cc 5847 {
932ad4d9
SB
5848 basic_block last_bb_in_path, previous_bb_in_path;
5849 edge e;
5850
5851 --path_size;
5852 last_bb_in_path = data->path[path_size].bb;
5853 previous_bb_in_path = data->path[path_size - 1].bb;
5854
5855 /* If we previously followed a path along the branch edge, try
5856 the fallthru edge now. */
5857 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
5858 && any_condjump_p (BB_END (previous_bb_in_path))
5859 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
5860 && e == BRANCH_EDGE (previous_bb_in_path))
5861 {
5862 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
5863 if (bb != EXIT_BLOCK_PTR
481e0a49
AO
5864 && single_pred_p (bb)
5865 /* We used to assert here that we would only see blocks
5866 that we have not visited yet. But we may end up
5867 visiting basic blocks twice if the CFG has changed
5868 in this run of cse_main, because when the CFG changes
5869 the topological sort of the CFG also changes. A basic
5870 blocks that previously had more than two predecessors
5871 may now have a single predecessor, and become part of
5872 a path that starts at another basic block.
5873
5874 We still want to visit each basic block only once, so
5875 halt the path here if we have already visited BB. */
5876 && !TEST_BIT (cse_visited_basic_blocks, bb->index))
932ad4d9 5877 {
932ad4d9
SB
5878 SET_BIT (cse_visited_basic_blocks, bb->index);
5879 data->path[path_size++].bb = bb;
5880 break;
5881 }
5882 }
5883
5884 data->path[path_size].bb = NULL;
5885 }
5886
5887 /* If only one block remains in the path, bail. */
5888 if (path_size == 1)
5889 {
5890 path_size = 0;
5891 goto done;
7afe21cc 5892 }
7afe21cc
RK
5893 }
5894
932ad4d9
SB
5895 /* Extend the path if possible. */
5896 if (follow_jumps)
7afe21cc 5897 {
932ad4d9
SB
5898 bb = data->path[path_size - 1].bb;
5899 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
5900 {
5901 if (single_succ_p (bb))
5902 e = single_succ_edge (bb);
5903 else if (EDGE_COUNT (bb->succs) == 2
5904 && any_condjump_p (BB_END (bb)))
5905 {
5906 /* First try to follow the branch. If that doesn't lead
5907 to a useful path, follow the fallthru edge. */
5908 e = BRANCH_EDGE (bb);
5909 if (!single_pred_p (e->dest))
5910 e = FALLTHRU_EDGE (bb);
5911 }
5912 else
5913 e = NULL;
7afe21cc 5914
932ad4d9 5915 if (e && e->dest != EXIT_BLOCK_PTR
481e0a49
AO
5916 && single_pred_p (e->dest)
5917 /* Avoid visiting basic blocks twice. The large comment
5918 above explains why this can happen. */
5919 && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
932ad4d9
SB
5920 {
5921 basic_block bb2 = e->dest;
932ad4d9
SB
5922 SET_BIT (cse_visited_basic_blocks, bb2->index);
5923 data->path[path_size++].bb = bb2;
5924 bb = bb2;
5925 }
5926 else
5927 bb = NULL;
5928 }
5929 }
5930
5931done:
5932 data->path_size = path_size;
5933 return path_size != 0;
5934}
5935\f
5936/* Dump the path in DATA to file F. NSETS is the number of sets
5937 in the path. */
5938
5939static void
5940cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
5941{
5942 int path_entry;
5943
5944 fprintf (f, ";; Following path with %d sets: ", nsets);
5945 for (path_entry = 0; path_entry < data->path_size; path_entry++)
5946 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
5947 fputc ('\n', dump_file);
5948 fflush (f);
5949}
5950
a7582f7c
SB
5951\f
5952/* Return true if BB has exception handling successor edges. */
5953
5954static bool
5955have_eh_succ_edges (basic_block bb)
5956{
5957 edge e;
5958 edge_iterator ei;
5959
5960 FOR_EACH_EDGE (e, ei, bb->succs)
5961 if (e->flags & EDGE_EH)
5962 return true;
5963
5964 return false;
5965}
5966
932ad4d9
SB
5967\f
5968/* Scan to the end of the path described by DATA. Return an estimate of
5969 the total number of SETs, and the lowest and highest insn CUID, of all
5970 insns in the path. */
5971
5972static void
5973cse_prescan_path (struct cse_basic_block_data *data)
5974{
5975 int nsets = 0;
5976 int low_cuid = -1, high_cuid = -1; /* FIXME low_cuid not computed correctly */
5977 int path_size = data->path_size;
5978 int path_entry;
5979
5980 /* Scan to end of each basic block in the path. */
5981 for (path_entry = 0; path_entry < path_size; path_entry++)
5982 {
5983 basic_block bb;
5984 rtx insn;
164c8956 5985
932ad4d9 5986 bb = data->path[path_entry].bb;
7afe21cc 5987
932ad4d9 5988 FOR_BB_INSNS (bb, insn)
7afe21cc 5989 {
932ad4d9
SB
5990 if (!INSN_P (insn))
5991 continue;
278a83b2 5992
932ad4d9
SB
5993 /* A PARALLEL can have lots of SETs in it,
5994 especially if it is really an ASM_OPERANDS. */
5995 if (GET_CODE (PATTERN (insn)) == PARALLEL)
5996 nsets += XVECLEN (PATTERN (insn), 0);
5997 else
5998 nsets += 1;
5999
6000 /* Ignore insns made by CSE in a previous traversal of this
6001 basic block. They cannot affect the boundaries of the
6002 basic block.
6003 FIXME: When we only visit each basic block at most once,
6004 this can go away. */
6005 if (INSN_UID (insn) <= max_uid && INSN_CUID (insn) > high_cuid)
6006 high_cuid = INSN_CUID (insn);
6007 if (INSN_UID (insn) <= max_uid && INSN_CUID (insn) < low_cuid)
6008 low_cuid = INSN_CUID (insn);
7afe21cc 6009 }
932ad4d9
SB
6010 }
6011
6012 data->low_cuid = low_cuid;
6013 data->high_cuid = high_cuid;
6014 data->nsets = nsets;
6015}
6016\f
6017/* Process a single extended basic block described by EBB_DATA. */
7afe21cc 6018
932ad4d9
SB
6019static void
6020cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6021{
6022 int path_size = ebb_data->path_size;
6023 int path_entry;
6024 int num_insns = 0;
6025
6026 /* Allocate the space needed by qty_table. */
6027 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6028
6029 new_basic_block ();
6030 for (path_entry = 0; path_entry < path_size; path_entry++)
6031 {
6032 basic_block bb;
6033 rtx insn;
6034 rtx libcall_insn = NULL_RTX;
6035 int no_conflict = 0;
6036
6037 bb = ebb_data->path[path_entry].bb;
6038 FOR_BB_INSNS (bb, insn)
7afe21cc 6039 {
932ad4d9
SB
6040 /* If we have processed 1,000 insns, flush the hash table to
6041 avoid extreme quadratic behavior. We must not include NOTEs
6042 in the count since there may be more of them when generating
6043 debugging information. If we clear the table at different
6044 times, code generated with -g -O might be different than code
6045 generated with -O but not -g.
6046
6047 FIXME: This is a real kludge and needs to be done some other
6048 way. */
6049 if (INSN_P (insn)
6050 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6051 {
6052 flush_hash_table ();
6053 num_insns = 0;
6054 }
7afe21cc 6055
932ad4d9 6056 if (INSN_P (insn))
7afe21cc 6057 {
932ad4d9
SB
6058 /* Process notes first so we have all notes in canonical forms
6059 when looking for duplicate operations. */
6060 if (REG_NOTES (insn))
6061 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6062 NULL_RTX);
6063
6064 /* Track when we are inside in LIBCALL block. Inside such
6065 a block we do not want to record destinations. The last
6066 insn of a LIBCALL block is not considered to be part of
6067 the block, since its destination is the result of the
6068 block and hence should be recorded. */
6069 if (REG_NOTES (insn) != 0)
fc3ffe83 6070 {
932ad4d9
SB
6071 rtx p;
6072
6073 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6074 libcall_insn = XEXP (p, 0);
6075 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6076 {
6077 /* Keep libcall_insn for the last SET insn of
6078 a no-conflict block to prevent changing the
6079 destination. */
6080 if (!no_conflict)
6081 libcall_insn = NULL_RTX;
6082 else
6083 no_conflict = -1;
6084 }
6085 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6086 no_conflict = 1;
fc3ffe83 6087 }
7afe21cc 6088
932ad4d9 6089 cse_insn (insn, libcall_insn);
7afe21cc 6090
932ad4d9
SB
6091 /* If we kept libcall_insn for a no-conflict bock,
6092 clear it here. */
6093 if (no_conflict == -1)
6094 {
6095 libcall_insn = NULL_RTX;
6096 no_conflict = 0;
6097 }
6098
6099 /* If we haven't already found an insn where we added a LABEL_REF,
6100 check this one. */
6101 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6102 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6103 (void *) insn))
6104 recorded_label_ref = 1;
96fb470d
SB
6105
6106#ifdef HAVE_cc0
6107 /* If the previous insn set CC0 and this insn no longer
6108 references CC0, delete the previous insn. Here we use
6109 fact that nothing expects CC0 to be valid over an insn,
6110 which is true until the final pass. */
6111 {
6112 rtx prev_insn, tem;
6113
6114 prev_insn = PREV_INSN (insn);
6115 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6116 && (tem = single_set (prev_insn)) != 0
6117 && SET_DEST (tem) == cc0_rtx
6118 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6119 delete_insn (prev_insn);
6120 }
6121
6122 /* If this insn is not the last insn in the basic block,
6123 it will be PREV_INSN(insn) in the next iteration. If
6124 we recorded any CC0-related information for this insn,
6125 remember it. */
6126 if (insn != BB_END (bb))
6127 {
6128 prev_insn_cc0 = this_insn_cc0;
6129 prev_insn_cc0_mode = this_insn_cc0_mode;
6130 }
6131#endif
932ad4d9
SB
6132 }
6133 }
7afe21cc 6134
932ad4d9
SB
6135 /* Make sure that libcalls don't span multiple basic blocks. */
6136 gcc_assert (libcall_insn == NULL_RTX);
7afe21cc 6137
a7582f7c
SB
6138 /* With non-call exceptions, we are not always able to update
6139 the CFG properly inside cse_insn. So clean up possibly
6140 redundant EH edges here. */
6141 if (flag_non_call_exceptions && have_eh_succ_edges (bb))
6142 purge_dead_edges (bb);
6143
932ad4d9
SB
6144 /* If we changed a conditional jump, we may have terminated
6145 the path we are following. Check that by verifying that
6146 the edge we would take still exists. If the edge does
6147 not exist anymore, purge the remainder of the path.
6148 Note that this will cause us to return to the caller. */
6149 if (path_entry < path_size - 1)
6150 {
6151 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6152 if (!find_edge (bb, next_bb))
27511c65
SB
6153 {
6154 do
6155 {
6156 path_size--;
6157
6158 /* If we truncate the path, we must also reset the
6159 visited bit on the remaining blocks in the path,
6160 or we will never visit them at all. */
6161 RESET_BIT (cse_visited_basic_blocks,
6162 ebb_data->path[path_size].bb->index);
6163 ebb_data->path[path_size].bb = NULL;
6164 }
6165 while (path_size - 1 != path_entry);
6166 ebb_data->path_size = path_size;
6167 }
7afe21cc 6168 }
7afe21cc 6169
932ad4d9
SB
6170 /* If this is a conditional jump insn, record any known
6171 equivalences due to the condition being tested. */
6172 insn = BB_END (bb);
6173 if (path_entry < path_size - 1
6174 && JUMP_P (insn)
6175 && single_set (insn)
6176 && any_condjump_p (insn))
6177 {
6178 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6179 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6180 record_jump_equiv (insn, taken);
6181 }
96fb470d
SB
6182
6183#ifdef HAVE_cc0
6184 /* Clear the CC0-tracking related insns, they can't provide
6185 useful information across basic block boundaries. */
6186 prev_insn_cc0 = 0;
6187#endif
932ad4d9 6188 }
7afe21cc 6189
932ad4d9 6190 gcc_assert (next_qty <= max_qty);
7afe21cc 6191
932ad4d9 6192 free (qty_table);
7afe21cc
RK
6193}
6194\f
7afe21cc
RK
6195/* Perform cse on the instructions of a function.
6196 F is the first instruction.
6197 NREGS is one plus the highest pseudo-reg number used in the instruction.
6198
7afe21cc
RK
6199 Returns 1 if jump_optimize should be redone due to simplifications
6200 in conditional jump instructions. */
6201
6202int
932ad4d9 6203cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
7afe21cc 6204{
932ad4d9
SB
6205 struct cse_basic_block_data ebb_data;
6206 basic_block bb;
27511c65 6207 int *rc_order = XNEWVEC (int, last_basic_block);
932ad4d9 6208 int i, n_blocks;
7afe21cc 6209
bc5e3b54
KH
6210 init_cse_reg_info (nregs);
6211
932ad4d9
SB
6212 ebb_data.path = XNEWVEC (struct branch_path,
6213 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
9bf8cfbf 6214
7afe21cc 6215 cse_jumps_altered = 0;
a5dfb4ee 6216 recorded_label_ref = 0;
7afe21cc 6217 constant_pool_entries_cost = 0;
dd0ba281 6218 constant_pool_entries_regcost = 0;
932ad4d9
SB
6219 ebb_data.path_size = 0;
6220 ebb_data.nsets = 0;
2f93eea8 6221 rtl_hooks = cse_rtl_hooks;
7afe21cc
RK
6222
6223 init_recog ();
9ae8ffe7 6224 init_alias_analysis ();
7afe21cc 6225
5ed6ace5 6226 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
7afe21cc 6227
932ad4d9
SB
6228 /* Set up the table of already visited basic blocks. */
6229 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6230 sbitmap_zero (cse_visited_basic_blocks);
7afe21cc 6231
932ad4d9
SB
6232 /* Compute the mapping from uids to cuids.
6233 CUIDs are numbers assigned to insns, like uids, except that
6234 that cuids increase monotonically through the code. */
164c8956 6235 max_uid = get_max_uid ();
5ed6ace5 6236 uid_cuid = XCNEWVEC (int, max_uid + 1);
932ad4d9
SB
6237 i = 0;
6238 FOR_EACH_BB (bb)
7afe21cc 6239 {
932ad4d9
SB
6240 rtx insn;
6241 FOR_BB_INSNS (bb, insn)
6242 INSN_CUID (insn) = ++i;
7afe21cc
RK
6243 }
6244
a7582f7c 6245 /* Loop over basic blocks in reverse completion order (RPO),
932ad4d9 6246 excluding the ENTRY and EXIT blocks. */
27511c65 6247 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
932ad4d9
SB
6248 i = 0;
6249 while (i < n_blocks)
7afe21cc 6250 {
a7582f7c 6251 /* Find the first block in the RPO queue that we have not yet
932ad4d9
SB
6252 processed before. */
6253 do
e9a25f70 6254 {
27511c65 6255 bb = BASIC_BLOCK (rc_order[i++]);
e9a25f70 6256 }
932ad4d9
SB
6257 while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6258 && i < n_blocks);
7afe21cc 6259
932ad4d9
SB
6260 /* Find all paths starting with BB, and process them. */
6261 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
7afe21cc 6262 {
932ad4d9
SB
6263 /* Pre-scan the path. */
6264 cse_prescan_path (&ebb_data);
7afe21cc 6265
932ad4d9
SB
6266 /* If this basic block has no sets, skip it. */
6267 if (ebb_data.nsets == 0)
6268 continue;
7afe21cc 6269
2e226e66 6270 /* Get a reasonable estimate for the maximum number of qty's
932ad4d9
SB
6271 needed for this path. For this, we take the number of sets
6272 and multiply that by MAX_RECOG_OPERANDS. */
6273 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6274 cse_basic_block_start = ebb_data.low_cuid;
6275 cse_basic_block_end = ebb_data.high_cuid;
7afe21cc 6276
932ad4d9
SB
6277 /* Dump the path we're about to process. */
6278 if (dump_file)
6279 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6a5293dc 6280
932ad4d9 6281 cse_extended_basic_block (&ebb_data);
7afe21cc 6282 }
7afe21cc
RK
6283 }
6284
932ad4d9
SB
6285 /* Clean up. */
6286 end_alias_analysis ();
6287 free (uid_cuid);
6288 free (reg_eqv_table);
6289 free (ebb_data.path);
6290 sbitmap_free (cse_visited_basic_blocks);
27511c65 6291 free (rc_order);
932ad4d9 6292 rtl_hooks = general_rtl_hooks;
75c6bd46 6293
932ad4d9 6294 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
6295}
6296\f
be8ac49a 6297/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
45c23566 6298 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
be8ac49a
RK
6299
6300static int
7080f735 6301check_for_label_ref (rtx *rtl, void *data)
be8ac49a
RK
6302{
6303 rtx insn = (rtx) data;
6304
6305 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
6306 we must rerun jump since it needs to place the note. If this is a
6307 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
ec5c56db 6308 since no REG_LABEL will be added. */
be8ac49a 6309 return (GET_CODE (*rtl) == LABEL_REF
45c23566 6310 && ! LABEL_REF_NONLOCAL_P (*rtl)
4838c5ee 6311 && LABEL_P (XEXP (*rtl, 0))
be8ac49a
RK
6312 && INSN_UID (XEXP (*rtl, 0)) != 0
6313 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
6314}
6315\f
7afe21cc
RK
6316/* Count the number of times registers are used (not set) in X.
6317 COUNTS is an array in which we accumulate the count, INCR is how much
b92ba6ff
R
6318 we count each register usage.
6319
6320 Don't count a usage of DEST, which is the SET_DEST of a SET which
6321 contains X in its SET_SRC. This is because such a SET does not
6322 modify the liveness of DEST.
6323 DEST is set to pc_rtx for a trapping insn, which means that we must count
6324 uses of a SET_DEST regardless because the insn can't be deleted here. */
7afe21cc
RK
6325
6326static void
b92ba6ff 6327count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7afe21cc 6328{
f1e7c95f 6329 enum rtx_code code;
b17d5d7c 6330 rtx note;
6f7d635c 6331 const char *fmt;
7afe21cc
RK
6332 int i, j;
6333
f1e7c95f
RK
6334 if (x == 0)
6335 return;
6336
6337 switch (code = GET_CODE (x))
7afe21cc
RK
6338 {
6339 case REG:
b92ba6ff
R
6340 if (x != dest)
6341 counts[REGNO (x)] += incr;
7afe21cc
RK
6342 return;
6343
6344 case PC:
6345 case CC0:
6346 case CONST:
6347 case CONST_INT:
6348 case CONST_DOUBLE:
69ef87e2 6349 case CONST_VECTOR:
7afe21cc
RK
6350 case SYMBOL_REF:
6351 case LABEL_REF:
02e39abc
JL
6352 return;
6353
278a83b2 6354 case CLOBBER:
02e39abc
JL
6355 /* If we are clobbering a MEM, mark any registers inside the address
6356 as being used. */
3c0cb5de 6357 if (MEM_P (XEXP (x, 0)))
b92ba6ff 6358 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7afe21cc
RK
6359 return;
6360
6361 case SET:
6362 /* Unless we are setting a REG, count everything in SET_DEST. */
f8cfc6aa 6363 if (!REG_P (SET_DEST (x)))
b92ba6ff
R
6364 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6365 count_reg_usage (SET_SRC (x), counts,
6366 dest ? dest : SET_DEST (x),
6367 incr);
7afe21cc
RK
6368 return;
6369
f1e7c95f 6370 case CALL_INSN:
7afe21cc
RK
6371 case INSN:
6372 case JUMP_INSN:
b92ba6ff
R
6373 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
6374 this fact by setting DEST to pc_rtx. */
6375 if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
6376 dest = pc_rtx;
6377 if (code == CALL_INSN)
6378 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6379 count_reg_usage (PATTERN (x), counts, dest, incr);
7afe21cc
RK
6380
6381 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6382 use them. */
6383
b17d5d7c
ZD
6384 note = find_reg_equal_equiv_note (x);
6385 if (note)
839844be
R
6386 {
6387 rtx eqv = XEXP (note, 0);
6388
6389 if (GET_CODE (eqv) == EXPR_LIST)
6390 /* This REG_EQUAL note describes the result of a function call.
6391 Process all the arguments. */
6392 do
6393 {
b92ba6ff 6394 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
839844be
R
6395 eqv = XEXP (eqv, 1);
6396 }
6397 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6398 else
b92ba6ff 6399 count_reg_usage (eqv, counts, dest, incr);
839844be 6400 }
7afe21cc
RK
6401 return;
6402
ee960939
OH
6403 case EXPR_LIST:
6404 if (REG_NOTE_KIND (x) == REG_EQUAL
6405 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6406 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6407 involving registers in the address. */
6408 || GET_CODE (XEXP (x, 0)) == CLOBBER)
b92ba6ff 6409 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
ee960939 6410
b92ba6ff 6411 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
ee960939
OH
6412 return;
6413
a6c14a64 6414 case ASM_OPERANDS:
b92ba6ff
R
6415 /* If the asm is volatile, then this insn cannot be deleted,
6416 and so the inputs *must* be live. */
6417 if (MEM_VOLATILE_P (x))
6418 dest = NULL_RTX;
a6c14a64
RH
6419 /* Iterate over just the inputs, not the constraints as well. */
6420 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
b92ba6ff 6421 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
a6c14a64
RH
6422 return;
6423
7afe21cc 6424 case INSN_LIST:
341c100f 6425 gcc_unreachable ();
278a83b2 6426
e9a25f70
JL
6427 default:
6428 break;
7afe21cc
RK
6429 }
6430
6431 fmt = GET_RTX_FORMAT (code);
6432 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6433 {
6434 if (fmt[i] == 'e')
b92ba6ff 6435 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
6436 else if (fmt[i] == 'E')
6437 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
b92ba6ff 6438 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
6439 }
6440}
6441\f
4793dca1
JH
6442/* Return true if set is live. */
6443static bool
7080f735
AJ
6444set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
6445 int *counts)
4793dca1
JH
6446{
6447#ifdef HAVE_cc0
6448 rtx tem;
6449#endif
6450
6451 if (set_noop_p (set))
6452 ;
6453
6454#ifdef HAVE_cc0
6455 else if (GET_CODE (SET_DEST (set)) == CC0
6456 && !side_effects_p (SET_SRC (set))
6457 && ((tem = next_nonnote_insn (insn)) == 0
6458 || !INSN_P (tem)
6459 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6460 return false;
6461#endif
f8cfc6aa 6462 else if (!REG_P (SET_DEST (set))
4793dca1
JH
6463 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
6464 || counts[REGNO (SET_DEST (set))] != 0
8fff4fc1 6465 || side_effects_p (SET_SRC (set)))
4793dca1
JH
6466 return true;
6467 return false;
6468}
6469
6470/* Return true if insn is live. */
6471
6472static bool
7080f735 6473insn_live_p (rtx insn, int *counts)
4793dca1
JH
6474{
6475 int i;
a3745024 6476 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
a646f6cc
AH
6477 return true;
6478 else if (GET_CODE (PATTERN (insn)) == SET)
0021de69 6479 return set_live_p (PATTERN (insn), insn, counts);
4793dca1 6480 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
0021de69
DB
6481 {
6482 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6483 {
6484 rtx elt = XVECEXP (PATTERN (insn), 0, i);
4793dca1 6485
0021de69
DB
6486 if (GET_CODE (elt) == SET)
6487 {
6488 if (set_live_p (elt, insn, counts))
6489 return true;
6490 }
6491 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6492 return true;
6493 }
6494 return false;
6495 }
4793dca1
JH
6496 else
6497 return true;
6498}
6499
6500/* Return true if libcall is dead as a whole. */
6501
6502static bool
7080f735 6503dead_libcall_p (rtx insn, int *counts)
4793dca1 6504{
0c19a26f
RS
6505 rtx note, set, new;
6506
4793dca1
JH
6507 /* See if there's a REG_EQUAL note on this insn and try to
6508 replace the source with the REG_EQUAL expression.
6509
6510 We assume that insns with REG_RETVALs can only be reg->reg
6511 copies at this point. */
6512 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
0c19a26f
RS
6513 if (!note)
6514 return false;
6515
6516 set = single_set (insn);
6517 if (!set)
6518 return false;
4793dca1 6519
0c19a26f
RS
6520 new = simplify_rtx (XEXP (note, 0));
6521 if (!new)
6522 new = XEXP (note, 0);
4793dca1 6523
0c19a26f 6524 /* While changing insn, we must update the counts accordingly. */
b92ba6ff 6525 count_reg_usage (insn, counts, NULL_RTX, -1);
1e150f2c 6526
0c19a26f
RS
6527 if (validate_change (insn, &SET_SRC (set), new, 0))
6528 {
b92ba6ff 6529 count_reg_usage (insn, counts, NULL_RTX, 1);
0c19a26f
RS
6530 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
6531 remove_note (insn, note);
6532 return true;
6533 }
6534
6535 if (CONSTANT_P (new))
6536 {
6537 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
6538 if (new && validate_change (insn, &SET_SRC (set), new, 0))
4793dca1 6539 {
b92ba6ff 6540 count_reg_usage (insn, counts, NULL_RTX, 1);
4793dca1 6541 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
1e150f2c 6542 remove_note (insn, note);
4793dca1
JH
6543 return true;
6544 }
6545 }
7080f735 6546
b92ba6ff 6547 count_reg_usage (insn, counts, NULL_RTX, 1);
4793dca1
JH
6548 return false;
6549}
6550
7afe21cc
RK
6551/* Scan all the insns and delete any that are dead; i.e., they store a register
6552 that is never used or they copy a register to itself.
6553
c6a26dc4
JL
6554 This is used to remove insns made obviously dead by cse, loop or other
6555 optimizations. It improves the heuristics in loop since it won't try to
6556 move dead invariants out of loops or make givs for dead quantities. The
6557 remaining passes of the compilation are also sped up. */
7afe21cc 6558
3dec4024 6559int
7080f735 6560delete_trivially_dead_insns (rtx insns, int nreg)
7afe21cc 6561{
4da896b2 6562 int *counts;
77fa0940 6563 rtx insn, prev;
614bb5d4 6564 int in_libcall = 0, dead_libcall = 0;
65e9fa10 6565 int ndead = 0;
7afe21cc 6566
3dec4024 6567 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7afe21cc 6568 /* First count the number of times each register is used. */
5ed6ace5 6569 counts = XCNEWVEC (int, nreg);
03ce14db
KH
6570 for (insn = insns; insn; insn = NEXT_INSN (insn))
6571 if (INSN_P (insn))
b92ba6ff 6572 count_reg_usage (insn, counts, NULL_RTX, 1);
7afe21cc 6573
65e9fa10
KH
6574 /* Go from the last insn to the first and delete insns that only set unused
6575 registers or copy a register to itself. As we delete an insn, remove
6576 usage counts for registers it uses.
0cedb36c 6577
65e9fa10
KH
6578 The first jump optimization pass may leave a real insn as the last
6579 insn in the function. We must not skip that insn or we may end
6580 up deleting code that is not really dead. */
03ce14db 6581 for (insn = get_last_insn (); insn; insn = prev)
65e9fa10
KH
6582 {
6583 int live_insn = 0;
7afe21cc 6584
03ce14db
KH
6585 prev = PREV_INSN (insn);
6586 if (!INSN_P (insn))
6587 continue;
7afe21cc 6588
65e9fa10
KH
6589 /* Don't delete any insns that are part of a libcall block unless
6590 we can delete the whole libcall block.
6591
6592 Flow or loop might get confused if we did that. Remember
6593 that we are scanning backwards. */
6594 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6595 {
6596 in_libcall = 1;
6597 live_insn = 1;
6598 dead_libcall = dead_libcall_p (insn, counts);
6599 }
6600 else if (in_libcall)
6601 live_insn = ! dead_libcall;
6602 else
6603 live_insn = insn_live_p (insn, counts);
7afe21cc 6604
65e9fa10
KH
6605 /* If this is a dead insn, delete it and show registers in it aren't
6606 being used. */
7afe21cc 6607
65e9fa10
KH
6608 if (! live_insn)
6609 {
b92ba6ff 6610 count_reg_usage (insn, counts, NULL_RTX, -1);
65e9fa10
KH
6611 delete_insn_and_edges (insn);
6612 ndead++;
6613 }
e4890d45 6614
2b5936fd 6615 if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
65e9fa10
KH
6616 {
6617 in_libcall = 0;
6618 dead_libcall = 0;
614bb5d4 6619 }
68252e27 6620 }
4da896b2 6621
c263766c 6622 if (dump_file && ndead)
65e9fa10
KH
6623 fprintf (dump_file, "Deleted %i trivially dead insns\n",
6624 ndead);
4da896b2
MM
6625 /* Clean up. */
6626 free (counts);
3dec4024
JH
6627 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6628 return ndead;
7afe21cc 6629}
e129d93a
ILT
6630
6631/* This function is called via for_each_rtx. The argument, NEWREG, is
6632 a condition code register with the desired mode. If we are looking
6633 at the same register in a different mode, replace it with
6634 NEWREG. */
6635
6636static int
6637cse_change_cc_mode (rtx *loc, void *data)
6638{
fc188d37 6639 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
e129d93a
ILT
6640
6641 if (*loc
f8cfc6aa 6642 && REG_P (*loc)
fc188d37
AK
6643 && REGNO (*loc) == REGNO (args->newreg)
6644 && GET_MODE (*loc) != GET_MODE (args->newreg))
e129d93a 6645 {
fc188d37
AK
6646 validate_change (args->insn, loc, args->newreg, 1);
6647
e129d93a
ILT
6648 return -1;
6649 }
6650 return 0;
6651}
6652
fc188d37
AK
6653/* Change the mode of any reference to the register REGNO (NEWREG) to
6654 GET_MODE (NEWREG) in INSN. */
6655
6656static void
6657cse_change_cc_mode_insn (rtx insn, rtx newreg)
6658{
6659 struct change_cc_mode_args args;
6660 int success;
6661
6662 if (!INSN_P (insn))
6663 return;
6664
6665 args.insn = insn;
6666 args.newreg = newreg;
6667
6668 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
6669 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
6670
6671 /* If the following assertion was triggered, there is most probably
6672 something wrong with the cc_modes_compatible back end function.
6673 CC modes only can be considered compatible if the insn - with the mode
6674 replaced by any of the compatible modes - can still be recognized. */
6675 success = apply_change_group ();
6676 gcc_assert (success);
6677}
6678
e129d93a
ILT
6679/* Change the mode of any reference to the register REGNO (NEWREG) to
6680 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
2e802a6f 6681 any instruction which modifies NEWREG. */
e129d93a
ILT
6682
6683static void
6684cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
6685{
6686 rtx insn;
6687
6688 for (insn = start; insn != end; insn = NEXT_INSN (insn))
6689 {
6690 if (! INSN_P (insn))
6691 continue;
6692
2e802a6f 6693 if (reg_set_p (newreg, insn))
e129d93a
ILT
6694 return;
6695
fc188d37 6696 cse_change_cc_mode_insn (insn, newreg);
e129d93a
ILT
6697 }
6698}
6699
6700/* BB is a basic block which finishes with CC_REG as a condition code
6701 register which is set to CC_SRC. Look through the successors of BB
6702 to find blocks which have a single predecessor (i.e., this one),
6703 and look through those blocks for an assignment to CC_REG which is
6704 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
6705 permitted to change the mode of CC_SRC to a compatible mode. This
6706 returns VOIDmode if no equivalent assignments were found.
6707 Otherwise it returns the mode which CC_SRC should wind up with.
6708
6709 The main complexity in this function is handling the mode issues.
6710 We may have more than one duplicate which we can eliminate, and we
6711 try to find a mode which will work for multiple duplicates. */
6712
6713static enum machine_mode
6714cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
6715{
6716 bool found_equiv;
6717 enum machine_mode mode;
6718 unsigned int insn_count;
6719 edge e;
6720 rtx insns[2];
6721 enum machine_mode modes[2];
6722 rtx last_insns[2];
6723 unsigned int i;
6724 rtx newreg;
628f6a4e 6725 edge_iterator ei;
e129d93a
ILT
6726
6727 /* We expect to have two successors. Look at both before picking
6728 the final mode for the comparison. If we have more successors
6729 (i.e., some sort of table jump, although that seems unlikely),
6730 then we require all beyond the first two to use the same
6731 mode. */
6732
6733 found_equiv = false;
6734 mode = GET_MODE (cc_src);
6735 insn_count = 0;
628f6a4e 6736 FOR_EACH_EDGE (e, ei, bb->succs)
e129d93a
ILT
6737 {
6738 rtx insn;
6739 rtx end;
6740
6741 if (e->flags & EDGE_COMPLEX)
6742 continue;
6743
628f6a4e 6744 if (EDGE_COUNT (e->dest->preds) != 1
e129d93a
ILT
6745 || e->dest == EXIT_BLOCK_PTR)
6746 continue;
6747
6748 end = NEXT_INSN (BB_END (e->dest));
6749 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
6750 {
6751 rtx set;
6752
6753 if (! INSN_P (insn))
6754 continue;
6755
6756 /* If CC_SRC is modified, we have to stop looking for
6757 something which uses it. */
6758 if (modified_in_p (cc_src, insn))
6759 break;
6760
6761 /* Check whether INSN sets CC_REG to CC_SRC. */
6762 set = single_set (insn);
6763 if (set
f8cfc6aa 6764 && REG_P (SET_DEST (set))
e129d93a
ILT
6765 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
6766 {
6767 bool found;
6768 enum machine_mode set_mode;
6769 enum machine_mode comp_mode;
6770
6771 found = false;
6772 set_mode = GET_MODE (SET_SRC (set));
6773 comp_mode = set_mode;
6774 if (rtx_equal_p (cc_src, SET_SRC (set)))
6775 found = true;
6776 else if (GET_CODE (cc_src) == COMPARE
6777 && GET_CODE (SET_SRC (set)) == COMPARE
1f44254c 6778 && mode != set_mode
e129d93a
ILT
6779 && rtx_equal_p (XEXP (cc_src, 0),
6780 XEXP (SET_SRC (set), 0))
6781 && rtx_equal_p (XEXP (cc_src, 1),
6782 XEXP (SET_SRC (set), 1)))
6783
6784 {
5fd9b178 6785 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
e129d93a
ILT
6786 if (comp_mode != VOIDmode
6787 && (can_change_mode || comp_mode == mode))
6788 found = true;
6789 }
6790
6791 if (found)
6792 {
6793 found_equiv = true;
1f44254c 6794 if (insn_count < ARRAY_SIZE (insns))
e129d93a
ILT
6795 {
6796 insns[insn_count] = insn;
6797 modes[insn_count] = set_mode;
6798 last_insns[insn_count] = end;
6799 ++insn_count;
6800
1f44254c
ILT
6801 if (mode != comp_mode)
6802 {
341c100f 6803 gcc_assert (can_change_mode);
1f44254c 6804 mode = comp_mode;
fc188d37
AK
6805
6806 /* The modified insn will be re-recognized later. */
1f44254c
ILT
6807 PUT_MODE (cc_src, mode);
6808 }
e129d93a
ILT
6809 }
6810 else
6811 {
6812 if (set_mode != mode)
1f44254c
ILT
6813 {
6814 /* We found a matching expression in the
6815 wrong mode, but we don't have room to
6816 store it in the array. Punt. This case
6817 should be rare. */
6818 break;
6819 }
e129d93a
ILT
6820 /* INSN sets CC_REG to a value equal to CC_SRC
6821 with the right mode. We can simply delete
6822 it. */
6823 delete_insn (insn);
6824 }
6825
6826 /* We found an instruction to delete. Keep looking,
6827 in the hopes of finding a three-way jump. */
6828 continue;
6829 }
6830
6831 /* We found an instruction which sets the condition
6832 code, so don't look any farther. */
6833 break;
6834 }
6835
6836 /* If INSN sets CC_REG in some other way, don't look any
6837 farther. */
6838 if (reg_set_p (cc_reg, insn))
6839 break;
6840 }
6841
6842 /* If we fell off the bottom of the block, we can keep looking
6843 through successors. We pass CAN_CHANGE_MODE as false because
6844 we aren't prepared to handle compatibility between the
6845 further blocks and this block. */
6846 if (insn == end)
6847 {
1f44254c
ILT
6848 enum machine_mode submode;
6849
6850 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
6851 if (submode != VOIDmode)
6852 {
341c100f 6853 gcc_assert (submode == mode);
1f44254c
ILT
6854 found_equiv = true;
6855 can_change_mode = false;
6856 }
e129d93a
ILT
6857 }
6858 }
6859
6860 if (! found_equiv)
6861 return VOIDmode;
6862
6863 /* Now INSN_COUNT is the number of instructions we found which set
6864 CC_REG to a value equivalent to CC_SRC. The instructions are in
6865 INSNS. The modes used by those instructions are in MODES. */
6866
6867 newreg = NULL_RTX;
6868 for (i = 0; i < insn_count; ++i)
6869 {
6870 if (modes[i] != mode)
6871 {
6872 /* We need to change the mode of CC_REG in INSNS[i] and
6873 subsequent instructions. */
6874 if (! newreg)
6875 {
6876 if (GET_MODE (cc_reg) == mode)
6877 newreg = cc_reg;
6878 else
6879 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
6880 }
6881 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
6882 newreg);
6883 }
6884
6885 delete_insn (insns[i]);
6886 }
6887
6888 return mode;
6889}
6890
6891/* If we have a fixed condition code register (or two), walk through
6892 the instructions and try to eliminate duplicate assignments. */
6893
cab2264d 6894static void
e129d93a
ILT
6895cse_condition_code_reg (void)
6896{
6897 unsigned int cc_regno_1;
6898 unsigned int cc_regno_2;
6899 rtx cc_reg_1;
6900 rtx cc_reg_2;
6901 basic_block bb;
6902
5fd9b178 6903 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
e129d93a
ILT
6904 return;
6905
6906 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
6907 if (cc_regno_2 != INVALID_REGNUM)
6908 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
6909 else
6910 cc_reg_2 = NULL_RTX;
6911
6912 FOR_EACH_BB (bb)
6913 {
6914 rtx last_insn;
6915 rtx cc_reg;
6916 rtx insn;
6917 rtx cc_src_insn;
6918 rtx cc_src;
6919 enum machine_mode mode;
1f44254c 6920 enum machine_mode orig_mode;
e129d93a
ILT
6921
6922 /* Look for blocks which end with a conditional jump based on a
6923 condition code register. Then look for the instruction which
6924 sets the condition code register. Then look through the
6925 successor blocks for instructions which set the condition
6926 code register to the same value. There are other possible
6927 uses of the condition code register, but these are by far the
6928 most common and the ones which we are most likely to be able
6929 to optimize. */
6930
6931 last_insn = BB_END (bb);
4b4bf941 6932 if (!JUMP_P (last_insn))
e129d93a
ILT
6933 continue;
6934
6935 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
6936 cc_reg = cc_reg_1;
6937 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
6938 cc_reg = cc_reg_2;
6939 else
6940 continue;
6941
6942 cc_src_insn = NULL_RTX;
6943 cc_src = NULL_RTX;
6944 for (insn = PREV_INSN (last_insn);
6945 insn && insn != PREV_INSN (BB_HEAD (bb));
6946 insn = PREV_INSN (insn))
6947 {
6948 rtx set;
6949
6950 if (! INSN_P (insn))
6951 continue;
6952 set = single_set (insn);
6953 if (set
f8cfc6aa 6954 && REG_P (SET_DEST (set))
e129d93a
ILT
6955 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
6956 {
6957 cc_src_insn = insn;
6958 cc_src = SET_SRC (set);
6959 break;
6960 }
6961 else if (reg_set_p (cc_reg, insn))
6962 break;
6963 }
6964
6965 if (! cc_src_insn)
6966 continue;
6967
6968 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
6969 continue;
6970
6971 /* Now CC_REG is a condition code register used for a
6972 conditional jump at the end of the block, and CC_SRC, in
6973 CC_SRC_INSN, is the value to which that condition code
6974 register is set, and CC_SRC is still meaningful at the end of
6975 the basic block. */
6976
1f44254c 6977 orig_mode = GET_MODE (cc_src);
e129d93a 6978 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
1f44254c 6979 if (mode != VOIDmode)
e129d93a 6980 {
341c100f 6981 gcc_assert (mode == GET_MODE (cc_src));
1f44254c 6982 if (mode != orig_mode)
2e802a6f
KH
6983 {
6984 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
6985
fc188d37 6986 cse_change_cc_mode_insn (cc_src_insn, newreg);
2e802a6f
KH
6987
6988 /* Do the same in the following insns that use the
6989 current value of CC_REG within BB. */
6990 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
6991 NEXT_INSN (last_insn),
6992 newreg);
6993 }
e129d93a
ILT
6994 }
6995 }
6996}
ef330312
PB
6997\f
6998
6999/* Perform common subexpression elimination. Nonzero value from
7000 `cse_main' means that jumps were simplified and some code may now
7001 be unreachable, so do jump optimization again. */
7002static bool
7003gate_handle_cse (void)
7004{
7005 return optimize > 0;
7006}
7007
c2924966 7008static unsigned int
ef330312
PB
7009rest_of_handle_cse (void)
7010{
7011 int tem;
ef330312 7012 if (dump_file)
5b4fdb20 7013 dump_flow_info (dump_file, dump_flags);
ef330312
PB
7014
7015 reg_scan (get_insns (), max_reg_num ());
7016
10d22567 7017 tem = cse_main (get_insns (), max_reg_num ());
ef330312
PB
7018
7019 /* If we are not running more CSE passes, then we are no longer
7020 expecting CSE to be run. But always rerun it in a cheap mode. */
7021 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7022
7023 if (tem)
932ad4d9 7024 rebuild_jump_labels (get_insns ());
ef330312
PB
7025
7026 if (tem || optimize > 1)
59994160 7027 cleanup_cfg (CLEANUP_EXPENSIVE);
932ad4d9 7028
c2924966 7029 return 0;
ef330312
PB
7030}
7031
7032struct tree_opt_pass pass_cse =
7033{
7034 "cse1", /* name */
7035 gate_handle_cse, /* gate */
7036 rest_of_handle_cse, /* execute */
7037 NULL, /* sub */
7038 NULL, /* next */
7039 0, /* static_pass_number */
7040 TV_CSE, /* tv_id */
7041 0, /* properties_required */
7042 0, /* properties_provided */
7043 0, /* properties_destroyed */
7044 0, /* todo_flags_start */
7045 TODO_dump_func |
932ad4d9
SB
7046 TODO_ggc_collect |
7047 TODO_verify_flow, /* todo_flags_finish */
ef330312
PB
7048 's' /* letter */
7049};
7050
7051
7052static bool
7053gate_handle_cse2 (void)
7054{
7055 return optimize > 0 && flag_rerun_cse_after_loop;
7056}
7057
7058/* Run second CSE pass after loop optimizations. */
c2924966 7059static unsigned int
ef330312
PB
7060rest_of_handle_cse2 (void)
7061{
7062 int tem;
7063
7064 if (dump_file)
5b4fdb20 7065 dump_flow_info (dump_file, dump_flags);
ef330312 7066
10d22567 7067 tem = cse_main (get_insns (), max_reg_num ());
ef330312
PB
7068
7069 /* Run a pass to eliminate duplicated assignments to condition code
7070 registers. We have to run this after bypass_jumps, because it
7071 makes it harder for that pass to determine whether a jump can be
7072 bypassed safely. */
7073 cse_condition_code_reg ();
7074
ef330312
PB
7075 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7076
7077 if (tem)
7078 {
7079 timevar_push (TV_JUMP);
7080 rebuild_jump_labels (get_insns ());
7081 delete_dead_jumptables ();
7082 cleanup_cfg (CLEANUP_EXPENSIVE);
7083 timevar_pop (TV_JUMP);
7084 }
7085 reg_scan (get_insns (), max_reg_num ());
7086 cse_not_expected = 1;
c2924966 7087 return 0;
ef330312
PB
7088}
7089
7090
7091struct tree_opt_pass pass_cse2 =
7092{
7093 "cse2", /* name */
7094 gate_handle_cse2, /* gate */
7095 rest_of_handle_cse2, /* execute */
7096 NULL, /* sub */
7097 NULL, /* next */
7098 0, /* static_pass_number */
7099 TV_CSE2, /* tv_id */
7100 0, /* properties_required */
7101 0, /* properties_provided */
7102 0, /* properties_destroyed */
7103 0, /* todo_flags_start */
7104 TODO_dump_func |
932ad4d9
SB
7105 TODO_ggc_collect |
7106 TODO_verify_flow, /* todo_flags_finish */
ef330312
PB
7107 't' /* letter */
7108};
7109