]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
always define HAVE_cc0
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
7afe21cc 3
1322177d 4This file is part of GCC.
7afe21cc 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
7afe21cc 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
7afe21cc
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
7afe21cc 19
7afe21cc 20#include "config.h"
670ee920 21#include "system.h"
4977bab6
ZW
22#include "coretypes.h"
23#include "tm.h"
7afe21cc 24#include "rtl.h"
6baf1cc8 25#include "tm_p.h"
7afe21cc 26#include "hard-reg-set.h"
7932a3db 27#include "regs.h"
60393bbc
AM
28#include "predict.h"
29#include "vec.h"
83685514
AM
30#include "hashtab.h"
31#include "hash-set.h"
83685514
AM
32#include "machmode.h"
33#include "input.h"
49ad7cfa 34#include "function.h"
60393bbc
AM
35#include "dominance.h"
36#include "cfg.h"
37#include "cfgrtl.h"
38#include "cfganal.h"
39#include "cfgcleanup.h"
40#include "basic-block.h"
41#include "flags.h"
42#include "insn-config.h"
43#include "recog.h"
40e23961 44#include "symtab.h"
36566b39
PK
45#include "statistics.h"
46#include "double-int.h"
47#include "real.h"
48#include "fixed-value.h"
49#include "alias.h"
50#include "wide-int.h"
51#include "inchash.h"
52#include "tree.h"
53#include "expmed.h"
54#include "dojump.h"
55#include "explow.h"
56#include "calls.h"
57#include "emit-rtl.h"
58#include "varasm.h"
59#include "stmt.h"
956d6950 60#include "expr.h"
718f9c0f 61#include "diagnostic-core.h"
50b2596f 62#include "toplev.h"
1497faf6 63#include "ggc.h"
26771da7 64#include "except.h"
3c50106f 65#include "target.h"
9bf8cfbf 66#include "params.h"
2f93eea8 67#include "rtlhooks-def.h"
ef330312 68#include "tree-pass.h"
6fb5fa3c
DB
69#include "df.h"
70#include "dbgcnt.h"
e89b312e 71#include "rtl-iter.h"
7afe21cc
RK
72
73/* The basic idea of common subexpression elimination is to go
74 through the code, keeping a record of expressions that would
75 have the same value at the current scan point, and replacing
76 expressions encountered with the cheapest equivalent expression.
77
78 It is too complicated to keep track of the different possibilities
e48a7fbe
JL
79 when control paths merge in this code; so, at each label, we forget all
80 that is known and start fresh. This can be described as processing each
81 extended basic block separately. We have a separate pass to perform
82 global CSE.
83
84 Note CSE can turn a conditional or computed jump into a nop or
85 an unconditional jump. When this occurs we arrange to run the jump
86 optimizer after CSE to delete the unreachable code.
7afe21cc
RK
87
88 We use two data structures to record the equivalent expressions:
1bb98cec
DM
89 a hash table for most expressions, and a vector of "quantity
90 numbers" to record equivalent (pseudo) registers.
7afe21cc
RK
91
92 The use of the special data structure for registers is desirable
93 because it is faster. It is possible because registers references
94 contain a fairly small number, the register number, taken from
95 a contiguously allocated series, and two register references are
96 identical if they have the same number. General expressions
97 do not have any such thing, so the only way to retrieve the
98 information recorded on an expression other than a register
99 is to keep it in a hash table.
100
101Registers and "quantity numbers":
278a83b2 102
7afe21cc
RK
103 At the start of each basic block, all of the (hardware and pseudo)
104 registers used in the function are given distinct quantity
105 numbers to indicate their contents. During scan, when the code
106 copies one register into another, we copy the quantity number.
107 When a register is loaded in any other way, we allocate a new
108 quantity number to describe the value generated by this operation.
459281be 109 `REG_QTY (N)' records what quantity register N is currently thought
7afe21cc
RK
110 of as containing.
111
08a69267 112 All real quantity numbers are greater than or equal to zero.
459281be 113 If register N has not been assigned a quantity, `REG_QTY (N)' will
08a69267 114 equal -N - 1, which is always negative.
7afe21cc 115
08a69267
RS
116 Quantity numbers below zero do not exist and none of the `qty_table'
117 entries should be referenced with a negative index.
7afe21cc
RK
118
119 We also maintain a bidirectional chain of registers for each
1bb98cec
DM
120 quantity number. The `qty_table` members `first_reg' and `last_reg',
121 and `reg_eqv_table' members `next' and `prev' hold these chains.
7afe21cc
RK
122
123 The first register in a chain is the one whose lifespan is least local.
124 Among equals, it is the one that was seen first.
125 We replace any equivalent register with that one.
126
127 If two registers have the same quantity number, it must be true that
1bb98cec 128 REG expressions with qty_table `mode' must be in the hash table for both
7afe21cc
RK
129 registers and must be in the same class.
130
131 The converse is not true. Since hard registers may be referenced in
132 any mode, two REG expressions might be equivalent in the hash table
133 but not have the same quantity number if the quantity number of one
134 of the registers is not the same mode as those expressions.
278a83b2 135
7afe21cc
RK
136Constants and quantity numbers
137
138 When a quantity has a known constant value, that value is stored
1bb98cec 139 in the appropriate qty_table `const_rtx'. This is in addition to
7afe21cc
RK
140 putting the constant in the hash table as is usual for non-regs.
141
d45cf215 142 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
143 macro CONST_COSTS and will often depend on the constant value. In any
144 event, expressions containing constants can be simplified, by fold_rtx.
145
146 When a quantity has a known nearly constant value (such as an address
1bb98cec
DM
147 of a stack slot), that value is stored in the appropriate qty_table
148 `const_rtx'.
7afe21cc
RK
149
150 Integer constants don't have a machine mode. However, cse
151 determines the intended machine mode from the destination
152 of the instruction that moves the constant. The machine mode
153 is recorded in the hash table along with the actual RTL
154 constant expression so that different modes are kept separate.
155
156Other expressions:
157
158 To record known equivalences among expressions in general
159 we use a hash table called `table'. It has a fixed number of buckets
160 that contain chains of `struct table_elt' elements for expressions.
161 These chains connect the elements whose expressions have the same
162 hash codes.
163
164 Other chains through the same elements connect the elements which
165 currently have equivalent values.
166
167 Register references in an expression are canonicalized before hashing
1bb98cec 168 the expression. This is done using `reg_qty' and qty_table `first_reg'.
7afe21cc
RK
169 The hash code of a register reference is computed using the quantity
170 number, not the register number.
171
172 When the value of an expression changes, it is necessary to remove from the
173 hash table not just that expression but all expressions whose values
174 could be different as a result.
175
176 1. If the value changing is in memory, except in special cases
177 ANYTHING referring to memory could be changed. That is because
178 nobody knows where a pointer does not point.
179 The function `invalidate_memory' removes what is necessary.
180
181 The special cases are when the address is constant or is
182 a constant plus a fixed register such as the frame pointer
183 or a static chain pointer. When such addresses are stored in,
184 we can tell exactly which other such addresses must be invalidated
185 due to overlap. `invalidate' does this.
186 All expressions that refer to non-constant
187 memory addresses are also invalidated. `invalidate_memory' does this.
188
189 2. If the value changing is a register, all expressions
190 containing references to that register, and only those,
191 must be removed.
192
193 Because searching the entire hash table for expressions that contain
194 a register is very slow, we try to figure out when it isn't necessary.
195 Precisely, this is necessary only when expressions have been
196 entered in the hash table using this register, and then the value has
197 changed, and then another expression wants to be added to refer to
198 the register's new value. This sequence of circumstances is rare
199 within any one basic block.
200
459281be
KH
201 `REG_TICK' and `REG_IN_TABLE', accessors for members of
202 cse_reg_info, are used to detect this case. REG_TICK (i) is
203 incremented whenever a value is stored in register i.
204 REG_IN_TABLE (i) holds -1 if no references to register i have been
205 entered in the table; otherwise, it contains the value REG_TICK (i)
206 had when the references were entered. If we want to enter a
207 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
208 remove old references. Until we want to enter a new entry, the
209 mere fact that the two vectors don't match makes the entries be
210 ignored if anyone tries to match them.
7afe21cc
RK
211
212 Registers themselves are entered in the hash table as well as in
459281be
KH
213 the equivalent-register chains. However, `REG_TICK' and
214 `REG_IN_TABLE' do not apply to expressions which are simple
7afe21cc
RK
215 register references. These expressions are removed from the table
216 immediately when they become invalid, and this can be done even if
217 we do not immediately search for all the expressions that refer to
218 the register.
219
220 A CLOBBER rtx in an instruction invalidates its operand for further
221 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
222 invalidates everything that resides in memory.
223
224Related expressions:
225
226 Constant expressions that differ only by an additive integer
227 are called related. When a constant expression is put in
228 the table, the related expression with no constant term
229 is also entered. These are made to point at each other
230 so that it is possible to find out if there exists any
231 register equivalent to an expression related to a given expression. */
278a83b2 232
1bb98cec
DM
233/* Length of qty_table vector. We know in advance we will not need
234 a quantity number this big. */
7afe21cc
RK
235
236static int max_qty;
237
238/* Next quantity number to be allocated.
239 This is 1 + the largest number needed so far. */
240
241static int next_qty;
242
1bb98cec 243/* Per-qty information tracking.
7afe21cc 244
1bb98cec
DM
245 `first_reg' and `last_reg' track the head and tail of the
246 chain of registers which currently contain this quantity.
7afe21cc 247
1bb98cec 248 `mode' contains the machine mode of this quantity.
7afe21cc 249
1bb98cec
DM
250 `const_rtx' holds the rtx of the constant value of this
251 quantity, if known. A summations of the frame/arg pointer
252 and a constant can also be entered here. When this holds
253 a known value, `const_insn' is the insn which stored the
254 constant value.
7afe21cc 255
1bb98cec
DM
256 `comparison_{code,const,qty}' are used to track when a
257 comparison between a quantity and some constant or register has
258 been passed. In such a case, we know the results of the comparison
259 in case we see it again. These members record a comparison that
260 is known to be true. `comparison_code' holds the rtx code of such
261 a comparison, else it is set to UNKNOWN and the other two
262 comparison members are undefined. `comparison_const' holds
263 the constant being compared against, or zero if the comparison
264 is not against a constant. `comparison_qty' holds the quantity
265 being compared against when the result is known. If the comparison
266 is not with a register, `comparison_qty' is -1. */
7afe21cc 267
1bb98cec
DM
268struct qty_table_elem
269{
270 rtx const_rtx;
20468884 271 rtx_insn *const_insn;
1bb98cec
DM
272 rtx comparison_const;
273 int comparison_qty;
770ae6cc 274 unsigned int first_reg, last_reg;
496324d0
DN
275 /* The sizes of these fields should match the sizes of the
276 code and mode fields of struct rtx_def (see rtl.h). */
277 ENUM_BITFIELD(rtx_code) comparison_code : 16;
278 ENUM_BITFIELD(machine_mode) mode : 8;
1bb98cec 279};
7afe21cc 280
1bb98cec
DM
281/* The table of all qtys, indexed by qty number. */
282static struct qty_table_elem *qty_table;
7afe21cc 283
7afe21cc
RK
284/* For machines that have a CC0, we do not record its value in the hash
285 table since its use is guaranteed to be the insn immediately following
286 its definition and any other insn is presumed to invalidate it.
287
96fb470d
SB
288 Instead, we store below the current and last value assigned to CC0.
289 If it should happen to be a constant, it is stored in preference
290 to the actual assigned value. In case it is a constant, we store
291 the mode in which the constant should be interpreted. */
7afe21cc 292
96fb470d 293static rtx this_insn_cc0, prev_insn_cc0;
ef4bddc2 294static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
7afe21cc
RK
295
296/* Insn being scanned. */
297
20468884 298static rtx_insn *this_insn;
f40751dd 299static bool optimize_this_for_speed_p;
7afe21cc 300
71d306d1
DE
301/* Index by register number, gives the number of the next (or
302 previous) register in the chain of registers sharing the same
7afe21cc
RK
303 value.
304
305 Or -1 if this register is at the end of the chain.
306
459281be 307 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
1bb98cec
DM
308
309/* Per-register equivalence chain. */
310struct reg_eqv_elem
311{
312 int next, prev;
313};
7afe21cc 314
1bb98cec
DM
315/* The table of all register equivalence chains. */
316static struct reg_eqv_elem *reg_eqv_table;
7afe21cc 317
14a774a9
RK
318struct cse_reg_info
319{
bc5e3b54
KH
320 /* The timestamp at which this register is initialized. */
321 unsigned int timestamp;
9b1549b8
DM
322
323 /* The quantity number of the register's current contents. */
324 int reg_qty;
325
326 /* The number of times the register has been altered in the current
327 basic block. */
328 int reg_tick;
329
30f72379
MM
330 /* The REG_TICK value at which rtx's containing this register are
331 valid in the hash table. If this does not equal the current
332 reg_tick value, such expressions existing in the hash table are
333 invalid. */
334 int reg_in_table;
46081bb3
SH
335
336 /* The SUBREG that was set when REG_TICK was last incremented. Set
337 to -1 if the last store was to the whole register, not a subreg. */
5dd78e9a 338 unsigned int subreg_ticked;
30f72379 339};
7afe21cc 340
bc5e3b54 341/* A table of cse_reg_info indexed by register numbers. */
f00822b2 342static struct cse_reg_info *cse_reg_info_table;
c1edba58 343
bc5e3b54
KH
344/* The size of the above table. */
345static unsigned int cse_reg_info_table_size;
9b1549b8 346
bc5e3b54
KH
347/* The index of the first entry that has not been initialized. */
348static unsigned int cse_reg_info_table_first_uninitialized;
7afe21cc 349
bc5e3b54 350/* The timestamp at the beginning of the current run of
932ad4d9
SB
351 cse_extended_basic_block. We increment this variable at the beginning of
352 the current run of cse_extended_basic_block. The timestamp field of a
bc5e3b54
KH
353 cse_reg_info entry matches the value of this variable if and only
354 if the entry has been initialized during the current run of
932ad4d9 355 cse_extended_basic_block. */
bc5e3b54 356static unsigned int cse_reg_info_timestamp;
7afe21cc 357
278a83b2 358/* A HARD_REG_SET containing all the hard registers for which there is
7afe21cc
RK
359 currently a REG expression in the hash table. Note the difference
360 from the above variables, which indicate if the REG is mentioned in some
361 expression in the table. */
362
363static HARD_REG_SET hard_regs_in_table;
364
2aac3a01
EB
365/* True if CSE has altered the CFG. */
366static bool cse_cfg_altered;
7afe21cc 367
2aac3a01
EB
368/* True if CSE has altered conditional jump insns in such a way
369 that jump optimization should be redone. */
370static bool cse_jumps_altered;
7afe21cc 371
2aac3a01
EB
372/* True if we put a LABEL_REF into the hash table for an INSN
373 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
374 to put in the note. */
375static bool recorded_label_ref;
a5dfb4ee 376
7afe21cc
RK
377/* canon_hash stores 1 in do_not_record
378 if it notices a reference to CC0, PC, or some other volatile
379 subexpression. */
380
381static int do_not_record;
382
383/* canon_hash stores 1 in hash_arg_in_memory
384 if it notices a reference to memory within the expression being hashed. */
385
386static int hash_arg_in_memory;
387
7afe21cc
RK
388/* The hash table contains buckets which are chains of `struct table_elt's,
389 each recording one expression's information.
390 That expression is in the `exp' field.
391
db048faf
MM
392 The canon_exp field contains a canonical (from the point of view of
393 alias analysis) version of the `exp' field.
394
7afe21cc
RK
395 Those elements with the same hash code are chained in both directions
396 through the `next_same_hash' and `prev_same_hash' fields.
397
398 Each set of expressions with equivalent values
399 are on a two-way chain through the `next_same_value'
400 and `prev_same_value' fields, and all point with
401 the `first_same_value' field at the first element in
402 that chain. The chain is in order of increasing cost.
403 Each element's cost value is in its `cost' field.
404
405 The `in_memory' field is nonzero for elements that
406 involve any reference to memory. These elements are removed
407 whenever a write is done to an unidentified location in memory.
408 To be safe, we assume that a memory address is unidentified unless
409 the address is either a symbol constant or a constant plus
410 the frame pointer or argument pointer.
411
7afe21cc
RK
412 The `related_value' field is used to connect related expressions
413 (that differ by adding an integer).
414 The related expressions are chained in a circular fashion.
415 `related_value' is zero for expressions for which this
416 chain is not useful.
417
418 The `cost' field stores the cost of this element's expression.
630c79be
BS
419 The `regcost' field stores the value returned by approx_reg_cost for
420 this element's expression.
7afe21cc
RK
421
422 The `is_const' flag is set if the element is a constant (including
423 a fixed address).
424
425 The `flag' field is used as a temporary during some search routines.
426
427 The `mode' field is usually the same as GET_MODE (`exp'), but
428 if `exp' is a CONST_INT and has no machine mode then the `mode'
429 field is the mode it was being used as. Each constant is
430 recorded separately for each mode it is used with. */
431
7afe21cc
RK
432struct table_elt
433{
434 rtx exp;
db048faf 435 rtx canon_exp;
7afe21cc
RK
436 struct table_elt *next_same_hash;
437 struct table_elt *prev_same_hash;
438 struct table_elt *next_same_value;
439 struct table_elt *prev_same_value;
440 struct table_elt *first_same_value;
441 struct table_elt *related_value;
442 int cost;
630c79be 443 int regcost;
496324d0
DN
444 /* The size of this field should match the size
445 of the mode field of struct rtx_def (see rtl.h). */
446 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc 447 char in_memory;
7afe21cc
RK
448 char is_const;
449 char flag;
450};
451
7afe21cc
RK
452/* We don't want a lot of buckets, because we rarely have very many
453 things stored in the hash table, and a lot of buckets slows
454 down a lot of loops that happen frequently. */
9b1549b8
DM
455#define HASH_SHIFT 5
456#define HASH_SIZE (1 << HASH_SHIFT)
457#define HASH_MASK (HASH_SIZE - 1)
7afe21cc
RK
458
459/* Compute hash code of X in mode M. Special-case case where X is a pseudo
460 register (hard registers may require `do_not_record' to be set). */
461
462#define HASH(X, M) \
f8cfc6aa 463 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
9b1549b8
DM
464 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
465 : canon_hash (X, M)) & HASH_MASK)
7afe21cc 466
0516f6fe
SB
467/* Like HASH, but without side-effects. */
468#define SAFE_HASH(X, M) \
469 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
470 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
471 : safe_hash (X, M)) & HASH_MASK)
472
630c79be
BS
473/* Determine whether register number N is considered a fixed register for the
474 purpose of approximating register costs.
7afe21cc
RK
475 It is desirable to replace other regs with fixed regs, to reduce need for
476 non-fixed hard regs.
553687c9 477 A reg wins if it is either the frame pointer or designated as fixed. */
7afe21cc 478#define FIXED_REGNO_P(N) \
8bc169f2 479 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 480 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
481
482/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
483 hard registers and pointers into the frame are the cheapest with a cost
484 of 0. Next come pseudos with a cost of one and other hard registers with
485 a cost of 2. Aside from these special cases, call `rtx_cost'. */
486
d67fb775 487#define CHEAP_REGNO(N) \
c3284718 488 (REGNO_PTR_FRAME_P (N) \
d67fb775 489 || (HARD_REGISTER_NUM_P (N) \
e7bb59fa 490 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 491
68f932c4
RS
492#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
493#define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
7afe21cc 494
30f72379
MM
495/* Get the number of times this register has been updated in this
496 basic block. */
497
bc5e3b54 498#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
30f72379
MM
499
500/* Get the point at which REG was recorded in the table. */
501
bc5e3b54 502#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
30f72379 503
46081bb3
SH
504/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
505 SUBREG). */
506
bc5e3b54 507#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
46081bb3 508
30f72379
MM
509/* Get the quantity number for REG. */
510
bc5e3b54 511#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
30f72379 512
7afe21cc 513/* Determine if the quantity number for register X represents a valid index
1bb98cec 514 into the qty_table. */
7afe21cc 515
08a69267 516#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
7afe21cc 517
2c5bfdf7
AN
518/* Compare table_elt X and Y and return true iff X is cheaper than Y. */
519
520#define CHEAPER(X, Y) \
521 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
522
9b1549b8 523static struct table_elt *table[HASH_SIZE];
7afe21cc
RK
524
525/* Chain of `struct table_elt's made so far for this function
526 but currently removed from the table. */
527
528static struct table_elt *free_element_chain;
529
7afe21cc
RK
530/* Set to the cost of a constant pool reference if one was found for a
531 symbolic constant. If this was found, it means we should try to
532 convert constants into constant pool entries if they don't fit in
533 the insn. */
534
535static int constant_pool_entries_cost;
dd0ba281 536static int constant_pool_entries_regcost;
7afe21cc 537
24b97832
ILT
538/* Trace a patch through the CFG. */
539
540struct branch_path
541{
542 /* The basic block for this path entry. */
543 basic_block bb;
544};
545
932ad4d9
SB
546/* This data describes a block that will be processed by
547 cse_extended_basic_block. */
6cd4575e 548
14a774a9
RK
549struct cse_basic_block_data
550{
6cd4575e
RK
551 /* Total number of SETs in block. */
552 int nsets;
6cd4575e
RK
553 /* Size of current branch path, if any. */
554 int path_size;
932ad4d9 555 /* Current path, indicating which basic_blocks will be processed. */
24b97832 556 struct branch_path *path;
6cd4575e
RK
557};
558
6fb5fa3c
DB
559
560/* Pointers to the live in/live out bitmaps for the boundaries of the
561 current EBB. */
562static bitmap cse_ebb_live_in, cse_ebb_live_out;
563
932ad4d9
SB
564/* A simple bitmap to track which basic blocks have been visited
565 already as part of an already processed extended basic block. */
566static sbitmap cse_visited_basic_blocks;
567
7080f735 568static bool fixed_base_plus_p (rtx x);
68f932c4 569static int notreg_cost (rtx, enum rtx_code, int);
56ae04af 570static int preferable (int, int, int, int);
7080f735 571static void new_basic_block (void);
ef4bddc2 572static void make_new_qty (unsigned int, machine_mode);
7080f735
AJ
573static void make_regs_eqv (unsigned int, unsigned int);
574static void delete_reg_equiv (unsigned int);
575static int mention_regs (rtx);
576static int insert_regs (rtx, struct table_elt *, int);
577static void remove_from_table (struct table_elt *, unsigned);
d556d181 578static void remove_pseudo_from_table (rtx, unsigned);
ef4bddc2
RS
579static struct table_elt *lookup (rtx, unsigned, machine_mode);
580static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
7080f735 581static rtx lookup_as_function (rtx, enum rtx_code);
2c5bfdf7 582static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
ef4bddc2 583 machine_mode, int, int);
7080f735 584static struct table_elt *insert (rtx, struct table_elt *, unsigned,
ef4bddc2 585 machine_mode);
7080f735 586static void merge_equiv_classes (struct table_elt *, struct table_elt *);
ef4bddc2 587static void invalidate (rtx, machine_mode);
7080f735
AJ
588static void remove_invalid_refs (unsigned int);
589static void remove_invalid_subreg_refs (unsigned int, unsigned int,
ef4bddc2 590 machine_mode);
7080f735
AJ
591static void rehash_using_reg (rtx);
592static void invalidate_memory (void);
593static void invalidate_for_call (void);
594static rtx use_related_value (rtx, struct table_elt *);
0516f6fe 595
ef4bddc2
RS
596static inline unsigned canon_hash (rtx, machine_mode);
597static inline unsigned safe_hash (rtx, machine_mode);
e855c69d 598static inline unsigned hash_rtx_string (const char *);
0516f6fe 599
20468884 600static rtx canon_reg (rtx, rtx_insn *);
7080f735 601static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
ef4bddc2
RS
602 machine_mode *,
603 machine_mode *);
20468884 604static rtx fold_rtx (rtx, rtx_insn *);
7080f735 605static rtx equiv_constant (rtx);
20468884 606static void record_jump_equiv (rtx_insn *, bool);
ef4bddc2 607static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
7080f735 608 int);
20468884 609static void cse_insn (rtx_insn *);
932ad4d9 610static void cse_prescan_path (struct cse_basic_block_data *);
20468884
DM
611static void invalidate_from_clobbers (rtx_insn *);
612static void invalidate_from_sets_and_clobbers (rtx_insn *);
6fb5fa3c 613static rtx cse_process_notes (rtx, rtx, bool *);
932ad4d9 614static void cse_extended_basic_block (struct cse_basic_block_data *);
7080f735 615extern void dump_class (struct table_elt*);
bc5e3b54
KH
616static void get_cse_reg_info_1 (unsigned int regno);
617static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
7080f735
AJ
618
619static void flush_hash_table (void);
20468884
DM
620static bool insn_live_p (rtx_insn *, int *);
621static bool set_live_p (rtx, rtx_insn *, int *);
20468884
DM
622static void cse_change_cc_mode_insn (rtx_insn *, rtx);
623static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
ef4bddc2 624static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
31e9ebaf 625 bool);
7afe21cc 626\f
2f93eea8
PB
627
628#undef RTL_HOOKS_GEN_LOWPART
629#define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
630
631static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
632\f
6399c0ab 633/* Nonzero if X has the form (PLUS frame-pointer integer). */
4977bab6
ZW
634
635static bool
7080f735 636fixed_base_plus_p (rtx x)
4977bab6
ZW
637{
638 switch (GET_CODE (x))
639 {
640 case REG:
641 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
642 return true;
643 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
644 return true;
4977bab6
ZW
645 return false;
646
647 case PLUS:
481683e1 648 if (!CONST_INT_P (XEXP (x, 1)))
4977bab6
ZW
649 return false;
650 return fixed_base_plus_p (XEXP (x, 0));
651
4977bab6
ZW
652 default:
653 return false;
654 }
655}
656
a4c6502a
MM
657/* Dump the expressions in the equivalence class indicated by CLASSP.
658 This function is used only for debugging. */
711417cd 659DEBUG_FUNCTION void
7080f735 660dump_class (struct table_elt *classp)
a4c6502a
MM
661{
662 struct table_elt *elt;
663
664 fprintf (stderr, "Equivalence chain for ");
665 print_rtl (stderr, classp->exp);
666 fprintf (stderr, ": \n");
278a83b2 667
a4c6502a
MM
668 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
669 {
670 print_rtl (stderr, elt->exp);
671 fprintf (stderr, "\n");
672 }
673}
674
e89b312e
RS
675/* Return an estimate of the cost of the registers used in an rtx.
676 This is mostly the number of different REG expressions in the rtx;
677 however for some exceptions like fixed registers we use a cost of
678 0. If any other hard register reference occurs, return MAX_COST. */
be8ac49a 679
630c79be 680static int
e89b312e 681approx_reg_cost (const_rtx x)
630c79be 682{
e89b312e
RS
683 int cost = 0;
684 subrtx_iterator::array_type array;
685 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
c863f8c2 686 {
e89b312e
RS
687 const_rtx x = *iter;
688 if (REG_P (x))
c863f8c2 689 {
e89b312e
RS
690 unsigned int regno = REGNO (x);
691 if (!CHEAP_REGNO (regno))
c863f8c2 692 {
e89b312e
RS
693 if (regno < FIRST_PSEUDO_REGISTER)
694 {
695 if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
696 return MAX_COST;
697 cost += 2;
698 }
699 else
700 cost += 1;
c863f8c2 701 }
c863f8c2
DM
702 }
703 }
c863f8c2 704 return cost;
630c79be
BS
705}
706
707/* Return a negative value if an rtx A, whose costs are given by COST_A
708 and REGCOST_A, is more desirable than an rtx B.
709 Return a positive value if A is less desirable, or 0 if the two are
710 equally good. */
711static int
56ae04af 712preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
630c79be 713{
423adbb9 714 /* First, get rid of cases involving expressions that are entirely
f1c1dfc3
BS
715 unwanted. */
716 if (cost_a != cost_b)
717 {
718 if (cost_a == MAX_COST)
719 return 1;
720 if (cost_b == MAX_COST)
721 return -1;
722 }
723
724 /* Avoid extending lifetimes of hardregs. */
725 if (regcost_a != regcost_b)
726 {
727 if (regcost_a == MAX_COST)
728 return 1;
729 if (regcost_b == MAX_COST)
730 return -1;
731 }
732
733 /* Normal operation costs take precedence. */
630c79be
BS
734 if (cost_a != cost_b)
735 return cost_a - cost_b;
f1c1dfc3 736 /* Only if these are identical consider effects on register pressure. */
630c79be
BS
737 if (regcost_a != regcost_b)
738 return regcost_a - regcost_b;
739 return 0;
740}
741
954a5693
RK
742/* Internal function, to compute cost when X is not a register; called
743 from COST macro to keep it simple. */
744
745static int
68f932c4 746notreg_cost (rtx x, enum rtx_code outer, int opno)
954a5693
RK
747{
748 return ((GET_CODE (x) == SUBREG
f8cfc6aa 749 && REG_P (SUBREG_REG (x))
954a5693
RK
750 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
751 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
752 && (GET_MODE_SIZE (GET_MODE (x))
753 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
754 && subreg_lowpart_p (x)
d0edd768
BS
755 && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
756 GET_MODE (SUBREG_REG (x))))
630c79be 757 ? 0
68f932c4 758 : rtx_cost (x, outer, opno, optimize_this_for_speed_p) * 2);
954a5693
RK
759}
760
01329426 761\f
bc5e3b54 762/* Initialize CSE_REG_INFO_TABLE. */
9b1549b8 763
bc5e3b54
KH
764static void
765init_cse_reg_info (unsigned int nregs)
766{
767 /* Do we need to grow the table? */
768 if (nregs > cse_reg_info_table_size)
30f72379 769 {
bc5e3b54
KH
770 unsigned int new_size;
771
772 if (cse_reg_info_table_size < 2048)
30f72379 773 {
bc5e3b54
KH
774 /* Compute a new size that is a power of 2 and no smaller
775 than the large of NREGS and 64. */
776 new_size = (cse_reg_info_table_size
777 ? cse_reg_info_table_size : 64);
778
779 while (new_size < nregs)
780 new_size *= 2;
30f72379
MM
781 }
782 else
1590d0d4 783 {
bc5e3b54
KH
784 /* If we need a big table, allocate just enough to hold
785 NREGS registers. */
786 new_size = nregs;
1590d0d4 787 }
9b1549b8 788
bc5e3b54 789 /* Reallocate the table with NEW_SIZE entries. */
04695783 790 free (cse_reg_info_table);
5ed6ace5 791 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
bc5e3b54 792 cse_reg_info_table_size = new_size;
a811c672 793 cse_reg_info_table_first_uninitialized = 0;
bc5e3b54
KH
794 }
795
796 /* Do we have all of the first NREGS entries initialized? */
797 if (cse_reg_info_table_first_uninitialized < nregs)
798 {
799 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
800 unsigned int i;
801
802 /* Put the old timestamp on newly allocated entries so that they
803 will all be considered out of date. We do not touch those
804 entries beyond the first NREGS entries to be nice to the
805 virtual memory. */
806 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
807 cse_reg_info_table[i].timestamp = old_timestamp;
30f72379 808
bc5e3b54 809 cse_reg_info_table_first_uninitialized = nregs;
30f72379 810 }
bc5e3b54
KH
811}
812
a52aff23 813/* Given REGNO, initialize the cse_reg_info entry for REGNO. */
bc5e3b54
KH
814
815static void
816get_cse_reg_info_1 (unsigned int regno)
817{
818 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
819 entry will be considered to have been initialized. */
820 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
821
822 /* Initialize the rest of the entry. */
823 cse_reg_info_table[regno].reg_tick = 1;
824 cse_reg_info_table[regno].reg_in_table = -1;
825 cse_reg_info_table[regno].subreg_ticked = -1;
826 cse_reg_info_table[regno].reg_qty = -regno - 1;
827}
828
829/* Find a cse_reg_info entry for REGNO. */
30f72379 830
bc5e3b54
KH
831static inline struct cse_reg_info *
832get_cse_reg_info (unsigned int regno)
833{
834 struct cse_reg_info *p = &cse_reg_info_table[regno];
835
782c0a3e
KH
836 /* If this entry has not been initialized, go ahead and initialize
837 it. */
bc5e3b54
KH
838 if (p->timestamp != cse_reg_info_timestamp)
839 get_cse_reg_info_1 (regno);
30f72379 840
9b1549b8 841 return p;
30f72379
MM
842}
843
7afe21cc
RK
844/* Clear the hash table and initialize each register with its own quantity,
845 for a new basic block. */
846
847static void
7080f735 848new_basic_block (void)
7afe21cc 849{
b3694847 850 int i;
7afe21cc 851
08a69267 852 next_qty = 0;
7afe21cc 853
a52aff23 854 /* Invalidate cse_reg_info_table. */
bc5e3b54 855 cse_reg_info_timestamp++;
7afe21cc 856
bc5e3b54 857 /* Clear out hash table state for this pass. */
7afe21cc
RK
858 CLEAR_HARD_REG_SET (hard_regs_in_table);
859
860 /* The per-quantity values used to be initialized here, but it is
861 much faster to initialize each as it is made in `make_new_qty'. */
862
9b1549b8 863 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 864 {
9b1549b8
DM
865 struct table_elt *first;
866
867 first = table[i];
868 if (first != NULL)
7afe21cc 869 {
9b1549b8
DM
870 struct table_elt *last = first;
871
872 table[i] = NULL;
873
874 while (last->next_same_hash != NULL)
875 last = last->next_same_hash;
876
877 /* Now relink this hash entire chain into
878 the free element list. */
879
880 last->next_same_hash = free_element_chain;
881 free_element_chain = first;
7afe21cc
RK
882 }
883 }
884
7afe21cc 885 prev_insn_cc0 = 0;
7afe21cc
RK
886}
887
1bb98cec
DM
888/* Say that register REG contains a quantity in mode MODE not in any
889 register before and initialize that quantity. */
7afe21cc
RK
890
891static void
ef4bddc2 892make_new_qty (unsigned int reg, machine_mode mode)
7afe21cc 893{
b3694847
SS
894 int q;
895 struct qty_table_elem *ent;
896 struct reg_eqv_elem *eqv;
7afe21cc 897
341c100f 898 gcc_assert (next_qty < max_qty);
7afe21cc 899
30f72379 900 q = REG_QTY (reg) = next_qty++;
1bb98cec
DM
901 ent = &qty_table[q];
902 ent->first_reg = reg;
903 ent->last_reg = reg;
904 ent->mode = mode;
20468884 905 ent->const_rtx = ent->const_insn = NULL;
1bb98cec
DM
906 ent->comparison_code = UNKNOWN;
907
908 eqv = &reg_eqv_table[reg];
909 eqv->next = eqv->prev = -1;
7afe21cc
RK
910}
911
912/* Make reg NEW equivalent to reg OLD.
913 OLD is not changing; NEW is. */
914
915static void
32e9fa48 916make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
7afe21cc 917{
770ae6cc 918 unsigned int lastr, firstr;
32e9fa48 919 int q = REG_QTY (old_reg);
770ae6cc 920 struct qty_table_elem *ent;
1bb98cec
DM
921
922 ent = &qty_table[q];
7afe21cc
RK
923
924 /* Nothing should become eqv until it has a "non-invalid" qty number. */
32e9fa48 925 gcc_assert (REGNO_QTY_VALID_P (old_reg));
7afe21cc 926
32e9fa48 927 REG_QTY (new_reg) = q;
1bb98cec
DM
928 firstr = ent->first_reg;
929 lastr = ent->last_reg;
7afe21cc
RK
930
931 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
932 hard regs. Among pseudos, if NEW will live longer than any other reg
933 of the same qty, and that is beyond the current basic block,
934 make it the new canonical replacement for this qty. */
935 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
936 /* Certain fixed registers might be of the class NO_REGS. This means
937 that not only can they not be allocated by the compiler, but
830a38ee 938 they cannot be used in substitutions or canonicalizations
7afe21cc 939 either. */
32e9fa48
KG
940 && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
941 && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
942 || (new_reg >= FIRST_PSEUDO_REGISTER
7afe21cc 943 && (firstr < FIRST_PSEUDO_REGISTER
32e9fa48 944 || (bitmap_bit_p (cse_ebb_live_out, new_reg)
6fb5fa3c 945 && !bitmap_bit_p (cse_ebb_live_out, firstr))
32e9fa48 946 || (bitmap_bit_p (cse_ebb_live_in, new_reg)
6fb5fa3c 947 && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
7afe21cc 948 {
32e9fa48
KG
949 reg_eqv_table[firstr].prev = new_reg;
950 reg_eqv_table[new_reg].next = firstr;
951 reg_eqv_table[new_reg].prev = -1;
952 ent->first_reg = new_reg;
7afe21cc
RK
953 }
954 else
955 {
956 /* If NEW is a hard reg (known to be non-fixed), insert at end.
957 Otherwise, insert before any non-fixed hard regs that are at the
958 end. Registers of class NO_REGS cannot be used as an
959 equivalent for anything. */
1bb98cec 960 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
7afe21cc 961 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
32e9fa48 962 && new_reg >= FIRST_PSEUDO_REGISTER)
1bb98cec 963 lastr = reg_eqv_table[lastr].prev;
32e9fa48 964 reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
1bb98cec 965 if (reg_eqv_table[lastr].next >= 0)
32e9fa48 966 reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
7afe21cc 967 else
32e9fa48
KG
968 qty_table[q].last_reg = new_reg;
969 reg_eqv_table[lastr].next = new_reg;
970 reg_eqv_table[new_reg].prev = lastr;
7afe21cc
RK
971 }
972}
973
974/* Remove REG from its equivalence class. */
975
976static void
7080f735 977delete_reg_equiv (unsigned int reg)
7afe21cc 978{
b3694847
SS
979 struct qty_table_elem *ent;
980 int q = REG_QTY (reg);
981 int p, n;
7afe21cc 982
a4e262bc 983 /* If invalid, do nothing. */
08a69267 984 if (! REGNO_QTY_VALID_P (reg))
7afe21cc
RK
985 return;
986
1bb98cec
DM
987 ent = &qty_table[q];
988
989 p = reg_eqv_table[reg].prev;
990 n = reg_eqv_table[reg].next;
a4e262bc 991
7afe21cc 992 if (n != -1)
1bb98cec 993 reg_eqv_table[n].prev = p;
7afe21cc 994 else
1bb98cec 995 ent->last_reg = p;
7afe21cc 996 if (p != -1)
1bb98cec 997 reg_eqv_table[p].next = n;
7afe21cc 998 else
1bb98cec 999 ent->first_reg = n;
7afe21cc 1000
08a69267 1001 REG_QTY (reg) = -reg - 1;
7afe21cc
RK
1002}
1003
1004/* Remove any invalid expressions from the hash table
1005 that refer to any of the registers contained in expression X.
1006
1007 Make sure that newly inserted references to those registers
1008 as subexpressions will be considered valid.
1009
1010 mention_regs is not called when a register itself
1011 is being stored in the table.
1012
1013 Return 1 if we have done something that may have changed the hash code
1014 of X. */
1015
1016static int
7080f735 1017mention_regs (rtx x)
7afe21cc 1018{
b3694847
SS
1019 enum rtx_code code;
1020 int i, j;
1021 const char *fmt;
1022 int changed = 0;
7afe21cc
RK
1023
1024 if (x == 0)
e5f6a288 1025 return 0;
7afe21cc
RK
1026
1027 code = GET_CODE (x);
1028 if (code == REG)
1029 {
770ae6cc 1030 unsigned int regno = REGNO (x);
09e18274 1031 unsigned int endregno = END_REGNO (x);
770ae6cc 1032 unsigned int i;
7afe21cc
RK
1033
1034 for (i = regno; i < endregno; i++)
1035 {
30f72379 1036 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1037 remove_invalid_refs (i);
1038
30f72379 1039 REG_IN_TABLE (i) = REG_TICK (i);
46081bb3 1040 SUBREG_TICKED (i) = -1;
7afe21cc
RK
1041 }
1042
1043 return 0;
1044 }
1045
34c73909
R
1046 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1047 pseudo if they don't use overlapping words. We handle only pseudos
1048 here for simplicity. */
f8cfc6aa 1049 if (code == SUBREG && REG_P (SUBREG_REG (x))
34c73909
R
1050 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1051 {
770ae6cc 1052 unsigned int i = REGNO (SUBREG_REG (x));
34c73909 1053
30f72379 1054 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909 1055 {
46081bb3
SH
1056 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1057 the last store to this register really stored into this
1058 subreg, then remove the memory of this subreg.
1059 Otherwise, remove any memory of the entire register and
1060 all its subregs from the table. */
1061 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
5dd78e9a 1062 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
34c73909
R
1063 remove_invalid_refs (i);
1064 else
ddef6bc7 1065 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
34c73909
R
1066 }
1067
30f72379 1068 REG_IN_TABLE (i) = REG_TICK (i);
5dd78e9a 1069 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
34c73909
R
1070 return 0;
1071 }
1072
7afe21cc
RK
1073 /* If X is a comparison or a COMPARE and either operand is a register
1074 that does not have a quantity, give it one. This is so that a later
1075 call to record_jump_equiv won't cause X to be assigned a different
1076 hash code and not found in the table after that call.
1077
1078 It is not necessary to do this here, since rehash_using_reg can
1079 fix up the table later, but doing this here eliminates the need to
1080 call that expensive function in the most common case where the only
1081 use of the register is in the comparison. */
1082
ec8e098d 1083 if (code == COMPARE || COMPARISON_P (x))
7afe21cc 1084 {
f8cfc6aa 1085 if (REG_P (XEXP (x, 0))
7afe21cc 1086 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
9714cf43 1087 if (insert_regs (XEXP (x, 0), NULL, 0))
7afe21cc
RK
1088 {
1089 rehash_using_reg (XEXP (x, 0));
1090 changed = 1;
1091 }
1092
f8cfc6aa 1093 if (REG_P (XEXP (x, 1))
7afe21cc 1094 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
9714cf43 1095 if (insert_regs (XEXP (x, 1), NULL, 0))
7afe21cc
RK
1096 {
1097 rehash_using_reg (XEXP (x, 1));
1098 changed = 1;
1099 }
1100 }
1101
1102 fmt = GET_RTX_FORMAT (code);
1103 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1104 if (fmt[i] == 'e')
1105 changed |= mention_regs (XEXP (x, i));
1106 else if (fmt[i] == 'E')
1107 for (j = 0; j < XVECLEN (x, i); j++)
1108 changed |= mention_regs (XVECEXP (x, i, j));
1109
1110 return changed;
1111}
1112
1113/* Update the register quantities for inserting X into the hash table
1114 with a value equivalent to CLASSP.
1115 (If the class does not contain a REG, it is irrelevant.)
1116 If MODIFIED is nonzero, X is a destination; it is being modified.
1117 Note that delete_reg_equiv should be called on a register
1118 before insert_regs is done on that register with MODIFIED != 0.
1119
1120 Nonzero value means that elements of reg_qty have changed
1121 so X's hash code may be different. */
1122
1123static int
7080f735 1124insert_regs (rtx x, struct table_elt *classp, int modified)
7afe21cc 1125{
f8cfc6aa 1126 if (REG_P (x))
7afe21cc 1127 {
770ae6cc
RK
1128 unsigned int regno = REGNO (x);
1129 int qty_valid;
7afe21cc 1130
1ff0c00d
RK
1131 /* If REGNO is in the equivalence table already but is of the
1132 wrong mode for that equivalence, don't do anything here. */
1133
1bb98cec
DM
1134 qty_valid = REGNO_QTY_VALID_P (regno);
1135 if (qty_valid)
1136 {
1137 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1ff0c00d 1138
1bb98cec
DM
1139 if (ent->mode != GET_MODE (x))
1140 return 0;
1141 }
1142
1143 if (modified || ! qty_valid)
7afe21cc
RK
1144 {
1145 if (classp)
1146 for (classp = classp->first_same_value;
1147 classp != 0;
1148 classp = classp->next_same_value)
f8cfc6aa 1149 if (REG_P (classp->exp)
7afe21cc
RK
1150 && GET_MODE (classp->exp) == GET_MODE (x))
1151 {
cd928652
ZD
1152 unsigned c_regno = REGNO (classp->exp);
1153
1154 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1155
1156 /* Suppose that 5 is hard reg and 100 and 101 are
1157 pseudos. Consider
1158
1159 (set (reg:si 100) (reg:si 5))
1160 (set (reg:si 5) (reg:si 100))
1161 (set (reg:di 101) (reg:di 5))
1162
1163 We would now set REG_QTY (101) = REG_QTY (5), but the
1164 entry for 5 is in SImode. When we use this later in
1165 copy propagation, we get the register in wrong mode. */
1166 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1167 continue;
1168
1169 make_regs_eqv (regno, c_regno);
7afe21cc
RK
1170 return 1;
1171 }
1172
d9f20424
R
1173 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1174 than REG_IN_TABLE to find out if there was only a single preceding
1175 invalidation - for the SUBREG - or another one, which would be
1176 for the full register. However, if we find here that REG_TICK
1177 indicates that the register is invalid, it means that it has
1178 been invalidated in a separate operation. The SUBREG might be used
1179 now (then this is a recursive call), or we might use the full REG
1180 now and a SUBREG of it later. So bump up REG_TICK so that
1181 mention_regs will do the right thing. */
1182 if (! modified
1183 && REG_IN_TABLE (regno) >= 0
1184 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1185 REG_TICK (regno)++;
1bb98cec 1186 make_new_qty (regno, GET_MODE (x));
7afe21cc
RK
1187 return 1;
1188 }
cdf4112f
TG
1189
1190 return 0;
7afe21cc 1191 }
c610adec
RK
1192
1193 /* If X is a SUBREG, we will likely be inserting the inner register in the
1194 table. If that register doesn't have an assigned quantity number at
1195 this point but does later, the insertion that we will be doing now will
1196 not be accessible because its hash code will have changed. So assign
1197 a quantity number now. */
1198
f8cfc6aa 1199 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
c610adec
RK
1200 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1201 {
9714cf43 1202 insert_regs (SUBREG_REG (x), NULL, 0);
34c73909 1203 mention_regs (x);
c610adec
RK
1204 return 1;
1205 }
7afe21cc
RK
1206 else
1207 return mention_regs (x);
1208}
1209\f
2c5bfdf7
AN
1210
1211/* Compute upper and lower anchors for CST. Also compute the offset of CST
1212 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1213 CST is equal to an anchor. */
1214
1215static bool
1216compute_const_anchors (rtx cst,
1217 HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1218 HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1219{
1220 HOST_WIDE_INT n = INTVAL (cst);
1221
1222 *lower_base = n & ~(targetm.const_anchor - 1);
1223 if (*lower_base == n)
1224 return false;
1225
1226 *upper_base =
1227 (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1228 *upper_offs = n - *upper_base;
1229 *lower_offs = n - *lower_base;
1230 return true;
1231}
1232
1233/* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1234
1235static void
1236insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
ef4bddc2 1237 machine_mode mode)
2c5bfdf7
AN
1238{
1239 struct table_elt *elt;
1240 unsigned hash;
1241 rtx anchor_exp;
1242 rtx exp;
1243
1244 anchor_exp = GEN_INT (anchor);
1245 hash = HASH (anchor_exp, mode);
1246 elt = lookup (anchor_exp, hash, mode);
1247 if (!elt)
1248 elt = insert (anchor_exp, NULL, hash, mode);
1249
0a81f074 1250 exp = plus_constant (mode, reg, offs);
2c5bfdf7
AN
1251 /* REG has just been inserted and the hash codes recomputed. */
1252 mention_regs (exp);
1253 hash = HASH (exp, mode);
1254
1255 /* Use the cost of the register rather than the whole expression. When
1256 looking up constant anchors we will further offset the corresponding
1257 expression therefore it does not make sense to prefer REGs over
1258 reg-immediate additions. Prefer instead the oldest expression. Also
1259 don't prefer pseudos over hard regs so that we derive constants in
1260 argument registers from other argument registers rather than from the
1261 original pseudo that was used to synthesize the constant. */
1262 insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1263}
1264
1265/* The constant CST is equivalent to the register REG. Create
1266 equivalences between the two anchors of CST and the corresponding
1267 register-offset expressions using REG. */
1268
1269static void
ef4bddc2 1270insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
2c5bfdf7
AN
1271{
1272 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1273
1274 if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1275 &upper_base, &upper_offs))
1276 return;
1277
1278 /* Ignore anchors of value 0. Constants accessible from zero are
1279 simple. */
1280 if (lower_base != 0)
1281 insert_const_anchor (lower_base, reg, -lower_offs, mode);
1282
1283 if (upper_base != 0)
1284 insert_const_anchor (upper_base, reg, -upper_offs, mode);
1285}
1286
1287/* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1288 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1289 valid expression. Return the cheapest and oldest of such expressions. In
1290 *OLD, return how old the resulting expression is compared to the other
1291 equivalent expressions. */
1292
1293static rtx
1294find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1295 unsigned *old)
1296{
1297 struct table_elt *elt;
1298 unsigned idx;
1299 struct table_elt *match_elt;
1300 rtx match;
1301
1302 /* Find the cheapest and *oldest* expression to maximize the chance of
1303 reusing the same pseudo. */
1304
1305 match_elt = NULL;
1306 match = NULL_RTX;
1307 for (elt = anchor_elt->first_same_value, idx = 0;
1308 elt;
1309 elt = elt->next_same_value, idx++)
1310 {
1311 if (match_elt && CHEAPER (match_elt, elt))
1312 return match;
1313
1314 if (REG_P (elt->exp)
1315 || (GET_CODE (elt->exp) == PLUS
1316 && REG_P (XEXP (elt->exp, 0))
1317 && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1318 {
1319 rtx x;
1320
1321 /* Ignore expressions that are no longer valid. */
1322 if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1323 continue;
1324
0a81f074 1325 x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
2c5bfdf7
AN
1326 if (REG_P (x)
1327 || (GET_CODE (x) == PLUS
1328 && IN_RANGE (INTVAL (XEXP (x, 1)),
1329 -targetm.const_anchor,
1330 targetm.const_anchor - 1)))
1331 {
1332 match = x;
1333 match_elt = elt;
1334 *old = idx;
1335 }
1336 }
1337 }
1338
1339 return match;
1340}
1341
1342/* Try to express the constant SRC_CONST using a register+offset expression
1343 derived from a constant anchor. Return it if successful or NULL_RTX,
1344 otherwise. */
1345
1346static rtx
ef4bddc2 1347try_const_anchors (rtx src_const, machine_mode mode)
2c5bfdf7
AN
1348{
1349 struct table_elt *lower_elt, *upper_elt;
1350 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1351 rtx lower_anchor_rtx, upper_anchor_rtx;
1352 rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1353 unsigned lower_old, upper_old;
1354
40dbb05c
RS
1355 /* CONST_INT is used for CC modes, but we should leave those alone. */
1356 if (GET_MODE_CLASS (mode) == MODE_CC)
1357 return NULL_RTX;
1358
1359 gcc_assert (SCALAR_INT_MODE_P (mode));
2c5bfdf7
AN
1360 if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1361 &upper_base, &upper_offs))
1362 return NULL_RTX;
1363
1364 lower_anchor_rtx = GEN_INT (lower_base);
1365 upper_anchor_rtx = GEN_INT (upper_base);
1366 lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1367 upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1368
1369 if (lower_elt)
1370 lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1371 if (upper_elt)
1372 upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1373
1374 if (!lower_exp)
1375 return upper_exp;
1376 if (!upper_exp)
1377 return lower_exp;
1378
1379 /* Return the older expression. */
1380 return (upper_old > lower_old ? upper_exp : lower_exp);
1381}
1382\f
7afe21cc
RK
1383/* Look in or update the hash table. */
1384
7afe21cc
RK
1385/* Remove table element ELT from use in the table.
1386 HASH is its hash code, made using the HASH macro.
1387 It's an argument because often that is known in advance
1388 and we save much time not recomputing it. */
1389
1390static void
7080f735 1391remove_from_table (struct table_elt *elt, unsigned int hash)
7afe21cc
RK
1392{
1393 if (elt == 0)
1394 return;
1395
1396 /* Mark this element as removed. See cse_insn. */
1397 elt->first_same_value = 0;
1398
1399 /* Remove the table element from its equivalence class. */
278a83b2 1400
7afe21cc 1401 {
b3694847
SS
1402 struct table_elt *prev = elt->prev_same_value;
1403 struct table_elt *next = elt->next_same_value;
7afe21cc 1404
278a83b2
KH
1405 if (next)
1406 next->prev_same_value = prev;
7afe21cc
RK
1407
1408 if (prev)
1409 prev->next_same_value = next;
1410 else
1411 {
b3694847 1412 struct table_elt *newfirst = next;
7afe21cc
RK
1413 while (next)
1414 {
1415 next->first_same_value = newfirst;
1416 next = next->next_same_value;
1417 }
1418 }
1419 }
1420
1421 /* Remove the table element from its hash bucket. */
1422
1423 {
b3694847
SS
1424 struct table_elt *prev = elt->prev_same_hash;
1425 struct table_elt *next = elt->next_same_hash;
7afe21cc 1426
278a83b2
KH
1427 if (next)
1428 next->prev_same_hash = prev;
7afe21cc
RK
1429
1430 if (prev)
1431 prev->next_same_hash = next;
1432 else if (table[hash] == elt)
1433 table[hash] = next;
1434 else
1435 {
1436 /* This entry is not in the proper hash bucket. This can happen
1437 when two classes were merged by `merge_equiv_classes'. Search
1438 for the hash bucket that it heads. This happens only very
1439 rarely, so the cost is acceptable. */
9b1549b8 1440 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
1441 if (table[hash] == elt)
1442 table[hash] = next;
1443 }
1444 }
1445
1446 /* Remove the table element from its related-value circular chain. */
1447
1448 if (elt->related_value != 0 && elt->related_value != elt)
1449 {
b3694847 1450 struct table_elt *p = elt->related_value;
770ae6cc 1451
7afe21cc
RK
1452 while (p->related_value != elt)
1453 p = p->related_value;
1454 p->related_value = elt->related_value;
1455 if (p->related_value == p)
1456 p->related_value = 0;
1457 }
1458
9b1549b8
DM
1459 /* Now add it to the free element chain. */
1460 elt->next_same_hash = free_element_chain;
1461 free_element_chain = elt;
7afe21cc
RK
1462}
1463
d556d181
EB
1464/* Same as above, but X is a pseudo-register. */
1465
1466static void
1467remove_pseudo_from_table (rtx x, unsigned int hash)
1468{
1469 struct table_elt *elt;
1470
1471 /* Because a pseudo-register can be referenced in more than one
1472 mode, we might have to remove more than one table entry. */
1473 while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1474 remove_from_table (elt, hash);
1475}
1476
7afe21cc
RK
1477/* Look up X in the hash table and return its table element,
1478 or 0 if X is not in the table.
1479
1480 MODE is the machine-mode of X, or if X is an integer constant
1481 with VOIDmode then MODE is the mode with which X will be used.
1482
1483 Here we are satisfied to find an expression whose tree structure
1484 looks like X. */
1485
1486static struct table_elt *
ef4bddc2 1487lookup (rtx x, unsigned int hash, machine_mode mode)
7afe21cc 1488{
b3694847 1489 struct table_elt *p;
7afe21cc
RK
1490
1491 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1492 if (mode == p->mode && ((x == p->exp && REG_P (x))
0516f6fe 1493 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
7afe21cc
RK
1494 return p;
1495
1496 return 0;
1497}
1498
1499/* Like `lookup' but don't care whether the table element uses invalid regs.
1500 Also ignore discrepancies in the machine mode of a register. */
1501
1502static struct table_elt *
ef4bddc2 1503lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
7afe21cc 1504{
b3694847 1505 struct table_elt *p;
7afe21cc 1506
f8cfc6aa 1507 if (REG_P (x))
7afe21cc 1508 {
770ae6cc
RK
1509 unsigned int regno = REGNO (x);
1510
7afe21cc
RK
1511 /* Don't check the machine mode when comparing registers;
1512 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1513 for (p = table[hash]; p; p = p->next_same_hash)
f8cfc6aa 1514 if (REG_P (p->exp)
7afe21cc
RK
1515 && REGNO (p->exp) == regno)
1516 return p;
1517 }
1518 else
1519 {
1520 for (p = table[hash]; p; p = p->next_same_hash)
0516f6fe
SB
1521 if (mode == p->mode
1522 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
7afe21cc
RK
1523 return p;
1524 }
1525
1526 return 0;
1527}
1528
1529/* Look for an expression equivalent to X and with code CODE.
1530 If one is found, return that expression. */
1531
1532static rtx
7080f735 1533lookup_as_function (rtx x, enum rtx_code code)
7afe21cc 1534{
b3694847 1535 struct table_elt *p
0516f6fe 1536 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
770ae6cc 1537
7afe21cc
RK
1538 if (p == 0)
1539 return 0;
1540
1541 for (p = p->first_same_value; p; p = p->next_same_value)
770ae6cc
RK
1542 if (GET_CODE (p->exp) == code
1543 /* Make sure this is a valid entry in the table. */
0516f6fe 1544 && exp_equiv_p (p->exp, p->exp, 1, false))
770ae6cc 1545 return p->exp;
278a83b2 1546
7afe21cc
RK
1547 return 0;
1548}
1549
2c5bfdf7
AN
1550/* Insert X in the hash table, assuming HASH is its hash code and
1551 CLASSP is an element of the class it should go in (or 0 if a new
1552 class should be made). COST is the code of X and reg_cost is the
1553 cost of registers in X. It is inserted at the proper position to
1554 keep the class in the order cheapest first.
7afe21cc
RK
1555
1556 MODE is the machine-mode of X, or if X is an integer constant
1557 with VOIDmode then MODE is the mode with which X will be used.
1558
1559 For elements of equal cheapness, the most recent one
1560 goes in front, except that the first element in the list
1561 remains first unless a cheaper element is added. The order of
1562 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1563 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1564
1565 The in_memory field in the hash table element is set to 0.
1566 The caller must set it nonzero if appropriate.
1567
1568 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1569 and if insert_regs returns a nonzero value
1570 you must then recompute its hash code before calling here.
1571
1572 If necessary, update table showing constant values of quantities. */
1573
7afe21cc 1574static struct table_elt *
2c5bfdf7 1575insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
ef4bddc2 1576 machine_mode mode, int cost, int reg_cost)
7afe21cc 1577{
b3694847 1578 struct table_elt *elt;
7afe21cc
RK
1579
1580 /* If X is a register and we haven't made a quantity for it,
1581 something is wrong. */
341c100f 1582 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
7afe21cc
RK
1583
1584 /* If X is a hard register, show it is being put in the table. */
f8cfc6aa 1585 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
09e18274 1586 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
7afe21cc 1587
7afe21cc
RK
1588 /* Put an element for X into the right hash bucket. */
1589
9b1549b8
DM
1590 elt = free_element_chain;
1591 if (elt)
770ae6cc 1592 free_element_chain = elt->next_same_hash;
9b1549b8 1593 else
5ed6ace5 1594 elt = XNEW (struct table_elt);
9b1549b8 1595
7afe21cc 1596 elt->exp = x;
db048faf 1597 elt->canon_exp = NULL_RTX;
2c5bfdf7
AN
1598 elt->cost = cost;
1599 elt->regcost = reg_cost;
7afe21cc
RK
1600 elt->next_same_value = 0;
1601 elt->prev_same_value = 0;
1602 elt->next_same_hash = table[hash];
1603 elt->prev_same_hash = 0;
1604 elt->related_value = 0;
1605 elt->in_memory = 0;
1606 elt->mode = mode;
389fdba0 1607 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
7afe21cc
RK
1608
1609 if (table[hash])
1610 table[hash]->prev_same_hash = elt;
1611 table[hash] = elt;
1612
1613 /* Put it into the proper value-class. */
1614 if (classp)
1615 {
1616 classp = classp->first_same_value;
1617 if (CHEAPER (elt, classp))
f9da5064 1618 /* Insert at the head of the class. */
7afe21cc 1619 {
b3694847 1620 struct table_elt *p;
7afe21cc
RK
1621 elt->next_same_value = classp;
1622 classp->prev_same_value = elt;
1623 elt->first_same_value = elt;
1624
1625 for (p = classp; p; p = p->next_same_value)
1626 p->first_same_value = elt;
1627 }
1628 else
1629 {
1630 /* Insert not at head of the class. */
1631 /* Put it after the last element cheaper than X. */
b3694847 1632 struct table_elt *p, *next;
770ae6cc 1633
e84a58ff
EB
1634 for (p = classp;
1635 (next = p->next_same_value) && CHEAPER (next, elt);
1636 p = next)
1637 ;
770ae6cc 1638
7afe21cc
RK
1639 /* Put it after P and before NEXT. */
1640 elt->next_same_value = next;
1641 if (next)
1642 next->prev_same_value = elt;
770ae6cc 1643
7afe21cc
RK
1644 elt->prev_same_value = p;
1645 p->next_same_value = elt;
1646 elt->first_same_value = classp;
1647 }
1648 }
1649 else
1650 elt->first_same_value = elt;
1651
1652 /* If this is a constant being set equivalent to a register or a register
1653 being set equivalent to a constant, note the constant equivalence.
1654
1655 If this is a constant, it cannot be equivalent to a different constant,
1656 and a constant is the only thing that can be cheaper than a register. So
1657 we know the register is the head of the class (before the constant was
1658 inserted).
1659
1660 If this is a register that is not already known equivalent to a
1661 constant, we must check the entire class.
1662
1663 If this is a register that is already known equivalent to an insn,
1bb98cec 1664 update the qtys `const_insn' to show that `this_insn' is the latest
7afe21cc
RK
1665 insn making that quantity equivalent to the constant. */
1666
f8cfc6aa
JQ
1667 if (elt->is_const && classp && REG_P (classp->exp)
1668 && !REG_P (x))
7afe21cc 1669 {
1bb98cec
DM
1670 int exp_q = REG_QTY (REGNO (classp->exp));
1671 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1672
4de249d9 1673 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1bb98cec 1674 exp_ent->const_insn = this_insn;
7afe21cc
RK
1675 }
1676
f8cfc6aa 1677 else if (REG_P (x)
1bb98cec
DM
1678 && classp
1679 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
f353588a 1680 && ! elt->is_const)
7afe21cc 1681 {
b3694847 1682 struct table_elt *p;
7afe21cc
RK
1683
1684 for (p = classp; p != 0; p = p->next_same_value)
1685 {
f8cfc6aa 1686 if (p->is_const && !REG_P (p->exp))
7afe21cc 1687 {
1bb98cec
DM
1688 int x_q = REG_QTY (REGNO (x));
1689 struct qty_table_elem *x_ent = &qty_table[x_q];
1690
770ae6cc 1691 x_ent->const_rtx
4de249d9 1692 = gen_lowpart (GET_MODE (x), p->exp);
1bb98cec 1693 x_ent->const_insn = this_insn;
7afe21cc
RK
1694 break;
1695 }
1696 }
1697 }
1698
f8cfc6aa 1699 else if (REG_P (x)
1bb98cec
DM
1700 && qty_table[REG_QTY (REGNO (x))].const_rtx
1701 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1702 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
7afe21cc
RK
1703
1704 /* If this is a constant with symbolic value,
1705 and it has a term with an explicit integer value,
1706 link it up with related expressions. */
1707 if (GET_CODE (x) == CONST)
1708 {
1709 rtx subexp = get_related_value (x);
2197a88a 1710 unsigned subhash;
7afe21cc
RK
1711 struct table_elt *subelt, *subelt_prev;
1712
1713 if (subexp != 0)
1714 {
1715 /* Get the integer-free subexpression in the hash table. */
0516f6fe 1716 subhash = SAFE_HASH (subexp, mode);
7afe21cc
RK
1717 subelt = lookup (subexp, subhash, mode);
1718 if (subelt == 0)
9714cf43 1719 subelt = insert (subexp, NULL, subhash, mode);
7afe21cc
RK
1720 /* Initialize SUBELT's circular chain if it has none. */
1721 if (subelt->related_value == 0)
1722 subelt->related_value = subelt;
1723 /* Find the element in the circular chain that precedes SUBELT. */
1724 subelt_prev = subelt;
1725 while (subelt_prev->related_value != subelt)
1726 subelt_prev = subelt_prev->related_value;
1727 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1728 This way the element that follows SUBELT is the oldest one. */
1729 elt->related_value = subelt_prev->related_value;
1730 subelt_prev->related_value = elt;
1731 }
1732 }
1733
1734 return elt;
1735}
2c5bfdf7
AN
1736
1737/* Wrap insert_with_costs by passing the default costs. */
1738
1739static struct table_elt *
1740insert (rtx x, struct table_elt *classp, unsigned int hash,
ef4bddc2 1741 machine_mode mode)
2c5bfdf7
AN
1742{
1743 return
1744 insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1745}
1746
7afe21cc
RK
1747\f
1748/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1749 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1750 the two classes equivalent.
1751
1752 CLASS1 will be the surviving class; CLASS2 should not be used after this
1753 call.
1754
1755 Any invalid entries in CLASS2 will not be copied. */
1756
1757static void
7080f735 1758merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
7afe21cc 1759{
32e9fa48 1760 struct table_elt *elt, *next, *new_elt;
7afe21cc
RK
1761
1762 /* Ensure we start with the head of the classes. */
1763 class1 = class1->first_same_value;
1764 class2 = class2->first_same_value;
1765
1766 /* If they were already equal, forget it. */
1767 if (class1 == class2)
1768 return;
1769
1770 for (elt = class2; elt; elt = next)
1771 {
770ae6cc 1772 unsigned int hash;
7afe21cc 1773 rtx exp = elt->exp;
ef4bddc2 1774 machine_mode mode = elt->mode;
7afe21cc
RK
1775
1776 next = elt->next_same_value;
1777
1778 /* Remove old entry, make a new one in CLASS1's class.
1779 Don't do this for invalid entries as we cannot find their
0f41302f 1780 hash code (it also isn't necessary). */
0516f6fe 1781 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
7afe21cc 1782 {
a90fc8e0
RH
1783 bool need_rehash = false;
1784
7afe21cc 1785 hash_arg_in_memory = 0;
7afe21cc 1786 hash = HASH (exp, mode);
278a83b2 1787
f8cfc6aa 1788 if (REG_P (exp))
a90fc8e0 1789 {
08a69267 1790 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
a90fc8e0
RH
1791 delete_reg_equiv (REGNO (exp));
1792 }
278a83b2 1793
d556d181
EB
1794 if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1795 remove_pseudo_from_table (exp, hash);
1796 else
1797 remove_from_table (elt, hash);
7afe21cc 1798
a90fc8e0 1799 if (insert_regs (exp, class1, 0) || need_rehash)
8ae2b8f6
JW
1800 {
1801 rehash_using_reg (exp);
1802 hash = HASH (exp, mode);
1803 }
32e9fa48
KG
1804 new_elt = insert (exp, class1, hash, mode);
1805 new_elt->in_memory = hash_arg_in_memory;
6c4d60f8
JJ
1806 if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1807 new_elt->cost = MAX_COST;
7afe21cc
RK
1808 }
1809 }
1810}
1811\f
01e752d3
JL
1812/* Flush the entire hash table. */
1813
1814static void
7080f735 1815flush_hash_table (void)
01e752d3
JL
1816{
1817 int i;
1818 struct table_elt *p;
1819
9b1549b8 1820 for (i = 0; i < HASH_SIZE; i++)
01e752d3
JL
1821 for (p = table[i]; p; p = table[i])
1822 {
1823 /* Note that invalidate can remove elements
1824 after P in the current hash chain. */
f8cfc6aa 1825 if (REG_P (p->exp))
524e3576 1826 invalidate (p->exp, VOIDmode);
01e752d3
JL
1827 else
1828 remove_from_table (p, i);
1829 }
1830}
14a774a9 1831\f
c992c066
RS
1832/* Check whether an anti dependence exists between X and EXP. MODE and
1833 ADDR are as for canon_anti_dependence. */
be8ac49a 1834
c992c066 1835static bool
ef4bddc2 1836check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
2ce6dc2f 1837{
c992c066
RS
1838 subrtx_iterator::array_type array;
1839 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1840 {
1841 const_rtx x = *iter;
1842 if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1843 return true;
1844 }
1845 return false;
2ce6dc2f
JH
1846}
1847\f
14a774a9
RK
1848/* Remove from the hash table, or mark as invalid, all expressions whose
1849 values could be altered by storing in X. X is a register, a subreg, or
1850 a memory reference with nonvarying address (because, when a memory
1851 reference with a varying address is stored in, all memory references are
1852 removed by invalidate_memory so specific invalidation is superfluous).
1853 FULL_MODE, if not VOIDmode, indicates that this much should be
1854 invalidated instead of just the amount indicated by the mode of X. This
1855 is only used for bitfield stores into memory.
1856
1857 A nonvarying address may be just a register or just a symbol reference,
1858 or it may be either of those plus a numeric offset. */
7afe21cc
RK
1859
1860static void
ef4bddc2 1861invalidate (rtx x, machine_mode full_mode)
7afe21cc 1862{
b3694847
SS
1863 int i;
1864 struct table_elt *p;
9ddb66ca 1865 rtx addr;
7afe21cc 1866
14a774a9 1867 switch (GET_CODE (x))
7afe21cc 1868 {
14a774a9
RK
1869 case REG:
1870 {
1871 /* If X is a register, dependencies on its contents are recorded
1872 through the qty number mechanism. Just change the qty number of
1873 the register, mark it as invalid for expressions that refer to it,
1874 and remove it itself. */
770ae6cc
RK
1875 unsigned int regno = REGNO (x);
1876 unsigned int hash = HASH (x, GET_MODE (x));
7afe21cc 1877
14a774a9
RK
1878 /* Remove REGNO from any quantity list it might be on and indicate
1879 that its value might have changed. If it is a pseudo, remove its
1880 entry from the hash table.
7afe21cc 1881
14a774a9
RK
1882 For a hard register, we do the first two actions above for any
1883 additional hard registers corresponding to X. Then, if any of these
1884 registers are in the table, we must remove any REG entries that
1885 overlap these registers. */
7afe21cc 1886
14a774a9
RK
1887 delete_reg_equiv (regno);
1888 REG_TICK (regno)++;
46081bb3 1889 SUBREG_TICKED (regno) = -1;
85e4d983 1890
14a774a9 1891 if (regno >= FIRST_PSEUDO_REGISTER)
d556d181 1892 remove_pseudo_from_table (x, hash);
14a774a9
RK
1893 else
1894 {
1895 HOST_WIDE_INT in_table
1896 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
09e18274 1897 unsigned int endregno = END_HARD_REGNO (x);
770ae6cc 1898 unsigned int tregno, tendregno, rn;
b3694847 1899 struct table_elt *p, *next;
7afe21cc 1900
14a774a9 1901 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc 1902
770ae6cc 1903 for (rn = regno + 1; rn < endregno; rn++)
14a774a9 1904 {
770ae6cc
RK
1905 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1906 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1907 delete_reg_equiv (rn);
1908 REG_TICK (rn)++;
46081bb3 1909 SUBREG_TICKED (rn) = -1;
14a774a9 1910 }
7afe21cc 1911
14a774a9 1912 if (in_table)
9b1549b8 1913 for (hash = 0; hash < HASH_SIZE; hash++)
14a774a9
RK
1914 for (p = table[hash]; p; p = next)
1915 {
1916 next = p->next_same_hash;
7afe21cc 1917
f8cfc6aa 1918 if (!REG_P (p->exp)
278a83b2
KH
1919 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1920 continue;
1921
14a774a9 1922 tregno = REGNO (p->exp);
09e18274 1923 tendregno = END_HARD_REGNO (p->exp);
14a774a9
RK
1924 if (tendregno > regno && tregno < endregno)
1925 remove_from_table (p, hash);
1926 }
1927 }
1928 }
7afe21cc 1929 return;
7afe21cc 1930
14a774a9 1931 case SUBREG:
bb4034b3 1932 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc 1933 return;
aac5cc16 1934
14a774a9 1935 case PARALLEL:
278a83b2 1936 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
aac5cc16
RH
1937 invalidate (XVECEXP (x, 0, i), VOIDmode);
1938 return;
aac5cc16 1939
14a774a9
RK
1940 case EXPR_LIST:
1941 /* This is part of a disjoint return value; extract the location in
1942 question ignoring the offset. */
aac5cc16
RH
1943 invalidate (XEXP (x, 0), VOIDmode);
1944 return;
7afe21cc 1945
14a774a9 1946 case MEM:
9ddb66ca 1947 addr = canon_rtx (get_addr (XEXP (x, 0)));
db048faf
MM
1948 /* Calculate the canonical version of X here so that
1949 true_dependence doesn't generate new RTL for X on each call. */
1950 x = canon_rtx (x);
1951
14a774a9
RK
1952 /* Remove all hash table elements that refer to overlapping pieces of
1953 memory. */
1954 if (full_mode == VOIDmode)
1955 full_mode = GET_MODE (x);
bb4034b3 1956
9b1549b8 1957 for (i = 0; i < HASH_SIZE; i++)
7afe21cc 1958 {
b3694847 1959 struct table_elt *next;
14a774a9
RK
1960
1961 for (p = table[i]; p; p = next)
1962 {
1963 next = p->next_same_hash;
db048faf
MM
1964 if (p->in_memory)
1965 {
2ce6dc2f
JH
1966 /* Just canonicalize the expression once;
1967 otherwise each time we call invalidate
1968 true_dependence will canonicalize the
1969 expression again. */
1970 if (!p->canon_exp)
1971 p->canon_exp = canon_rtx (p->exp);
c992c066 1972 if (check_dependence (p->canon_exp, x, full_mode, addr))
db048faf 1973 remove_from_table (p, i);
db048faf 1974 }
14a774a9 1975 }
7afe21cc 1976 }
14a774a9
RK
1977 return;
1978
1979 default:
341c100f 1980 gcc_unreachable ();
7afe21cc
RK
1981 }
1982}
2a1d78d8
JJ
1983
1984/* Invalidate DEST. Used when DEST is not going to be added
1985 into the hash table for some reason, e.g. do_not_record
1986 flagged on it. */
1987
1988static void
1989invalidate_dest (rtx dest)
1990{
1991 if (REG_P (dest)
1992 || GET_CODE (dest) == SUBREG
1993 || MEM_P (dest))
1994 invalidate (dest, VOIDmode);
1995 else if (GET_CODE (dest) == STRICT_LOW_PART
1996 || GET_CODE (dest) == ZERO_EXTRACT)
1997 invalidate (XEXP (dest, 0), GET_MODE (dest));
1998}
14a774a9 1999\f
7afe21cc
RK
2000/* Remove all expressions that refer to register REGNO,
2001 since they are already invalid, and we are about to
2002 mark that register valid again and don't want the old
2003 expressions to reappear as valid. */
2004
2005static void
7080f735 2006remove_invalid_refs (unsigned int regno)
7afe21cc 2007{
770ae6cc
RK
2008 unsigned int i;
2009 struct table_elt *p, *next;
7afe21cc 2010
9b1549b8 2011 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
2012 for (p = table[i]; p; p = next)
2013 {
2014 next = p->next_same_hash;
c9bd6bcd 2015 if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
7afe21cc
RK
2016 remove_from_table (p, i);
2017 }
2018}
34c73909 2019
ddef6bc7
JJ
2020/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2021 and mode MODE. */
34c73909 2022static void
7080f735 2023remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
ef4bddc2 2024 machine_mode mode)
34c73909 2025{
770ae6cc
RK
2026 unsigned int i;
2027 struct table_elt *p, *next;
ddef6bc7 2028 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
34c73909 2029
9b1549b8 2030 for (i = 0; i < HASH_SIZE; i++)
34c73909
R
2031 for (p = table[i]; p; p = next)
2032 {
ddef6bc7 2033 rtx exp = p->exp;
34c73909 2034 next = p->next_same_hash;
278a83b2 2035
f8cfc6aa 2036 if (!REG_P (exp)
34c73909 2037 && (GET_CODE (exp) != SUBREG
f8cfc6aa 2038 || !REG_P (SUBREG_REG (exp))
34c73909 2039 || REGNO (SUBREG_REG (exp)) != regno
ddef6bc7
JJ
2040 || (((SUBREG_BYTE (exp)
2041 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2042 && SUBREG_BYTE (exp) <= end))
c9bd6bcd 2043 && refers_to_regno_p (regno, p->exp))
34c73909
R
2044 remove_from_table (p, i);
2045 }
2046}
7afe21cc
RK
2047\f
2048/* Recompute the hash codes of any valid entries in the hash table that
2049 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2050
2051 This is called when we make a jump equivalence. */
2052
2053static void
7080f735 2054rehash_using_reg (rtx x)
7afe21cc 2055{
973838fd 2056 unsigned int i;
7afe21cc 2057 struct table_elt *p, *next;
2197a88a 2058 unsigned hash;
7afe21cc
RK
2059
2060 if (GET_CODE (x) == SUBREG)
2061 x = SUBREG_REG (x);
2062
2063 /* If X is not a register or if the register is known not to be in any
2064 valid entries in the table, we have no work to do. */
2065
f8cfc6aa 2066 if (!REG_P (x)
30f72379
MM
2067 || REG_IN_TABLE (REGNO (x)) < 0
2068 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
2069 return;
2070
2071 /* Scan all hash chains looking for valid entries that mention X.
a90fc8e0 2072 If we find one and it is in the wrong hash chain, move it. */
7afe21cc 2073
9b1549b8 2074 for (i = 0; i < HASH_SIZE; i++)
7afe21cc
RK
2075 for (p = table[i]; p; p = next)
2076 {
2077 next = p->next_same_hash;
a90fc8e0 2078 if (reg_mentioned_p (x, p->exp)
0516f6fe
SB
2079 && exp_equiv_p (p->exp, p->exp, 1, false)
2080 && i != (hash = SAFE_HASH (p->exp, p->mode)))
7afe21cc
RK
2081 {
2082 if (p->next_same_hash)
2083 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2084
2085 if (p->prev_same_hash)
2086 p->prev_same_hash->next_same_hash = p->next_same_hash;
2087 else
2088 table[i] = p->next_same_hash;
2089
2090 p->next_same_hash = table[hash];
2091 p->prev_same_hash = 0;
2092 if (table[hash])
2093 table[hash]->prev_same_hash = p;
2094 table[hash] = p;
2095 }
2096 }
2097}
2098\f
7afe21cc
RK
2099/* Remove from the hash table any expression that is a call-clobbered
2100 register. Also update their TICK values. */
2101
2102static void
7080f735 2103invalidate_for_call (void)
7afe21cc 2104{
770ae6cc
RK
2105 unsigned int regno, endregno;
2106 unsigned int i;
2197a88a 2107 unsigned hash;
7afe21cc
RK
2108 struct table_elt *p, *next;
2109 int in_table = 0;
c7fb4c7a 2110 hard_reg_set_iterator hrsi;
7afe21cc
RK
2111
2112 /* Go through all the hard registers. For each that is clobbered in
2113 a CALL_INSN, remove the register from quantity chains and update
2114 reg_tick if defined. Also see if any of these registers is currently
2115 in the table. */
c7fb4c7a
SB
2116 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
2117 {
2118 delete_reg_equiv (regno);
2119 if (REG_TICK (regno) >= 0)
2120 {
2121 REG_TICK (regno)++;
2122 SUBREG_TICKED (regno) = -1;
2123 }
2124 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2125 }
7afe21cc
RK
2126
2127 /* In the case where we have no call-clobbered hard registers in the
2128 table, we are done. Otherwise, scan the table and remove any
2129 entry that overlaps a call-clobbered register. */
2130
2131 if (in_table)
9b1549b8 2132 for (hash = 0; hash < HASH_SIZE; hash++)
7afe21cc
RK
2133 for (p = table[hash]; p; p = next)
2134 {
2135 next = p->next_same_hash;
2136
f8cfc6aa 2137 if (!REG_P (p->exp)
7afe21cc
RK
2138 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2139 continue;
2140
2141 regno = REGNO (p->exp);
09e18274 2142 endregno = END_HARD_REGNO (p->exp);
7afe21cc
RK
2143
2144 for (i = regno; i < endregno; i++)
2145 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2146 {
2147 remove_from_table (p, hash);
2148 break;
2149 }
2150 }
2151}
2152\f
2153/* Given an expression X of type CONST,
2154 and ELT which is its table entry (or 0 if it
2155 is not in the hash table),
2156 return an alternate expression for X as a register plus integer.
2157 If none can be found, return 0. */
2158
2159static rtx
7080f735 2160use_related_value (rtx x, struct table_elt *elt)
7afe21cc 2161{
b3694847
SS
2162 struct table_elt *relt = 0;
2163 struct table_elt *p, *q;
906c4e36 2164 HOST_WIDE_INT offset;
7afe21cc
RK
2165
2166 /* First, is there anything related known?
2167 If we have a table element, we can tell from that.
2168 Otherwise, must look it up. */
2169
2170 if (elt != 0 && elt->related_value != 0)
2171 relt = elt;
2172 else if (elt == 0 && GET_CODE (x) == CONST)
2173 {
2174 rtx subexp = get_related_value (x);
2175 if (subexp != 0)
2176 relt = lookup (subexp,
0516f6fe 2177 SAFE_HASH (subexp, GET_MODE (subexp)),
7afe21cc
RK
2178 GET_MODE (subexp));
2179 }
2180
2181 if (relt == 0)
2182 return 0;
2183
2184 /* Search all related table entries for one that has an
2185 equivalent register. */
2186
2187 p = relt;
2188 while (1)
2189 {
2190 /* This loop is strange in that it is executed in two different cases.
2191 The first is when X is already in the table. Then it is searching
2192 the RELATED_VALUE list of X's class (RELT). The second case is when
2193 X is not in the table. Then RELT points to a class for the related
2194 value.
2195
2196 Ensure that, whatever case we are in, that we ignore classes that have
2197 the same value as X. */
2198
2199 if (rtx_equal_p (x, p->exp))
2200 q = 0;
2201 else
2202 for (q = p->first_same_value; q; q = q->next_same_value)
f8cfc6aa 2203 if (REG_P (q->exp))
7afe21cc
RK
2204 break;
2205
2206 if (q)
2207 break;
2208
2209 p = p->related_value;
2210
2211 /* We went all the way around, so there is nothing to be found.
2212 Alternatively, perhaps RELT was in the table for some other reason
2213 and it has no related values recorded. */
2214 if (p == relt || p == 0)
2215 break;
2216 }
2217
2218 if (q == 0)
2219 return 0;
2220
2221 offset = (get_integer_term (x) - get_integer_term (p->exp));
2222 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
0a81f074 2223 return plus_constant (q->mode, q->exp, offset);
7afe21cc
RK
2224}
2225\f
e855c69d 2226
6462bb43
AO
2227/* Hash a string. Just add its bytes up. */
2228static inline unsigned
0516f6fe 2229hash_rtx_string (const char *ps)
6462bb43
AO
2230{
2231 unsigned hash = 0;
68252e27
KH
2232 const unsigned char *p = (const unsigned char *) ps;
2233
6462bb43
AO
2234 if (p)
2235 while (*p)
2236 hash += *p++;
2237
2238 return hash;
2239}
2240
b8698a0f 2241/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
e855c69d 2242 When the callback returns true, we continue with the new rtx. */
7afe21cc 2243
0516f6fe 2244unsigned
ef4bddc2 2245hash_rtx_cb (const_rtx x, machine_mode mode,
e855c69d
AB
2246 int *do_not_record_p, int *hash_arg_in_memory_p,
2247 bool have_reg_qty, hash_rtx_callback_function cb)
7afe21cc 2248{
b3694847
SS
2249 int i, j;
2250 unsigned hash = 0;
2251 enum rtx_code code;
2252 const char *fmt;
ef4bddc2 2253 machine_mode newmode;
e855c69d 2254 rtx newx;
7afe21cc 2255
0516f6fe
SB
2256 /* Used to turn recursion into iteration. We can't rely on GCC's
2257 tail-recursion elimination since we need to keep accumulating values
2258 in HASH. */
7afe21cc
RK
2259 repeat:
2260 if (x == 0)
2261 return hash;
2262
e855c69d 2263 /* Invoke the callback first. */
b8698a0f 2264 if (cb != NULL
e855c69d
AB
2265 && ((*cb) (x, mode, &newx, &newmode)))
2266 {
2267 hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2268 hash_arg_in_memory_p, have_reg_qty, cb);
2269 return hash;
2270 }
2271
7afe21cc
RK
2272 code = GET_CODE (x);
2273 switch (code)
2274 {
2275 case REG:
2276 {
770ae6cc 2277 unsigned int regno = REGNO (x);
7afe21cc 2278
e855c69d 2279 if (do_not_record_p && !reload_completed)
7afe21cc 2280 {
0516f6fe
SB
2281 /* On some machines, we can't record any non-fixed hard register,
2282 because extending its life will cause reload problems. We
2283 consider ap, fp, sp, gp to be fixed for this purpose.
2284
2285 We also consider CCmode registers to be fixed for this purpose;
2286 failure to do so leads to failure to simplify 0<100 type of
2287 conditionals.
2288
2289 On all machines, we can't record any global registers.
2290 Nor should we record any register that is in a small
07b8f0a8 2291 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
0516f6fe
SB
2292 bool record;
2293
2294 if (regno >= FIRST_PSEUDO_REGISTER)
2295 record = true;
2296 else if (x == frame_pointer_rtx
2297 || x == hard_frame_pointer_rtx
2298 || x == arg_pointer_rtx
2299 || x == stack_pointer_rtx
2300 || x == pic_offset_table_rtx)
2301 record = true;
2302 else if (global_regs[regno])
2303 record = false;
2304 else if (fixed_regs[regno])
2305 record = true;
2306 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2307 record = true;
42db504c 2308 else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
0516f6fe 2309 record = false;
07b8f0a8 2310 else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
0516f6fe
SB
2311 record = false;
2312 else
2313 record = true;
2314
2315 if (!record)
2316 {
2317 *do_not_record_p = 1;
2318 return 0;
2319 }
7afe21cc 2320 }
770ae6cc 2321
0516f6fe
SB
2322 hash += ((unsigned int) REG << 7);
2323 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2197a88a 2324 return hash;
7afe21cc
RK
2325 }
2326
34c73909
R
2327 /* We handle SUBREG of a REG specially because the underlying
2328 reg changes its hash value with every value change; we don't
2329 want to have to forget unrelated subregs when one subreg changes. */
2330 case SUBREG:
2331 {
f8cfc6aa 2332 if (REG_P (SUBREG_REG (x)))
34c73909 2333 {
0516f6fe 2334 hash += (((unsigned int) SUBREG << 7)
ddef6bc7
JJ
2335 + REGNO (SUBREG_REG (x))
2336 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
34c73909
R
2337 return hash;
2338 }
2339 break;
2340 }
2341
7afe21cc 2342 case CONST_INT:
0516f6fe
SB
2343 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2344 + (unsigned int) INTVAL (x));
2345 return hash;
7afe21cc 2346
807e902e
KZ
2347 case CONST_WIDE_INT:
2348 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2349 hash += CONST_WIDE_INT_ELT (x, i);
2350 return hash;
2351
7afe21cc
RK
2352 case CONST_DOUBLE:
2353 /* This is like the general case, except that it only counts
2354 the integers representing the constant. */
0516f6fe 2355 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
807e902e 2356 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
0516f6fe
SB
2357 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2358 + (unsigned int) CONST_DOUBLE_HIGH (x));
807e902e
KZ
2359 else
2360 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
7afe21cc
RK
2361 return hash;
2362
091a3ac7
CF
2363 case CONST_FIXED:
2364 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2365 hash += fixed_hash (CONST_FIXED_VALUE (x));
2366 return hash;
2367
69ef87e2
AH
2368 case CONST_VECTOR:
2369 {
2370 int units;
2371 rtx elt;
2372
2373 units = CONST_VECTOR_NUNITS (x);
2374
2375 for (i = 0; i < units; ++i)
2376 {
2377 elt = CONST_VECTOR_ELT (x, i);
e855c69d 2378 hash += hash_rtx_cb (elt, GET_MODE (elt),
b8698a0f 2379 do_not_record_p, hash_arg_in_memory_p,
e855c69d 2380 have_reg_qty, cb);
69ef87e2
AH
2381 }
2382
2383 return hash;
2384 }
2385
7afe21cc
RK
2386 /* Assume there is only one rtx object for any given label. */
2387 case LABEL_REF:
0516f6fe
SB
2388 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2389 differences and differences between each stage's debugging dumps. */
2390 hash += (((unsigned int) LABEL_REF << 7)
a827d9b1 2391 + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x)));
2197a88a 2392 return hash;
7afe21cc
RK
2393
2394 case SYMBOL_REF:
0516f6fe
SB
2395 {
2396 /* Don't hash on the symbol's address to avoid bootstrap differences.
2397 Different hash values may cause expressions to be recorded in
2398 different orders and thus different registers to be used in the
2399 final assembler. This also avoids differences in the dump files
2400 between various stages. */
2401 unsigned int h = 0;
2402 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2403
2404 while (*p)
2405 h += (h << 7) + *p++; /* ??? revisit */
2406
2407 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2408 return hash;
2409 }
7afe21cc
RK
2410
2411 case MEM:
14a774a9
RK
2412 /* We don't record if marked volatile or if BLKmode since we don't
2413 know the size of the move. */
e855c69d 2414 if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
7afe21cc 2415 {
0516f6fe 2416 *do_not_record_p = 1;
7afe21cc
RK
2417 return 0;
2418 }
0516f6fe
SB
2419 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2420 *hash_arg_in_memory_p = 1;
4977bab6 2421
7afe21cc
RK
2422 /* Now that we have already found this special case,
2423 might as well speed it up as much as possible. */
2197a88a 2424 hash += (unsigned) MEM;
7afe21cc
RK
2425 x = XEXP (x, 0);
2426 goto repeat;
2427
bb07060a
JW
2428 case USE:
2429 /* A USE that mentions non-volatile memory needs special
2430 handling since the MEM may be BLKmode which normally
2431 prevents an entry from being made. Pure calls are
0516f6fe
SB
2432 marked by a USE which mentions BLKmode memory.
2433 See calls.c:emit_call_1. */
3c0cb5de 2434 if (MEM_P (XEXP (x, 0))
bb07060a
JW
2435 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2436 {
68252e27 2437 hash += (unsigned) USE;
bb07060a
JW
2438 x = XEXP (x, 0);
2439
0516f6fe
SB
2440 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2441 *hash_arg_in_memory_p = 1;
bb07060a
JW
2442
2443 /* Now that we have already found this special case,
2444 might as well speed it up as much as possible. */
2445 hash += (unsigned) MEM;
2446 x = XEXP (x, 0);
2447 goto repeat;
2448 }
2449 break;
2450
7afe21cc
RK
2451 case PRE_DEC:
2452 case PRE_INC:
2453 case POST_DEC:
2454 case POST_INC:
4b983fdc
RH
2455 case PRE_MODIFY:
2456 case POST_MODIFY:
7afe21cc
RK
2457 case PC:
2458 case CC0:
2459 case CALL:
2460 case UNSPEC_VOLATILE:
e855c69d
AB
2461 if (do_not_record_p) {
2462 *do_not_record_p = 1;
2463 return 0;
2464 }
2465 else
2466 return hash;
2467 break;
7afe21cc
RK
2468
2469 case ASM_OPERANDS:
e855c69d 2470 if (do_not_record_p && MEM_VOLATILE_P (x))
7afe21cc 2471 {
0516f6fe 2472 *do_not_record_p = 1;
7afe21cc
RK
2473 return 0;
2474 }
6462bb43
AO
2475 else
2476 {
2477 /* We don't want to take the filename and line into account. */
2478 hash += (unsigned) code + (unsigned) GET_MODE (x)
0516f6fe
SB
2479 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2480 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
6462bb43
AO
2481 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2482
2483 if (ASM_OPERANDS_INPUT_LENGTH (x))
2484 {
2485 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2486 {
e855c69d
AB
2487 hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2488 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2489 do_not_record_p, hash_arg_in_memory_p,
2490 have_reg_qty, cb)
0516f6fe 2491 + hash_rtx_string
e855c69d 2492 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
6462bb43
AO
2493 }
2494
0516f6fe 2495 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
6462bb43
AO
2496 x = ASM_OPERANDS_INPUT (x, 0);
2497 mode = GET_MODE (x);
2498 goto repeat;
2499 }
2500
2501 return hash;
2502 }
e9a25f70 2503 break;
278a83b2 2504
e9a25f70
JL
2505 default:
2506 break;
7afe21cc
RK
2507 }
2508
2509 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2510 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2511 fmt = GET_RTX_FORMAT (code);
2512 for (; i >= 0; i--)
2513 {
341c100f 2514 switch (fmt[i])
7afe21cc 2515 {
341c100f 2516 case 'e':
7afe21cc
RK
2517 /* If we are about to do the last recursive call
2518 needed at this level, change it into iteration.
2519 This function is called enough to be worth it. */
2520 if (i == 0)
2521 {
0516f6fe 2522 x = XEXP (x, i);
7afe21cc
RK
2523 goto repeat;
2524 }
b8698a0f 2525
bbbbb16a 2526 hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
e855c69d
AB
2527 hash_arg_in_memory_p,
2528 have_reg_qty, cb);
341c100f 2529 break;
0516f6fe 2530
341c100f
NS
2531 case 'E':
2532 for (j = 0; j < XVECLEN (x, i); j++)
bbbbb16a 2533 hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
e855c69d
AB
2534 hash_arg_in_memory_p,
2535 have_reg_qty, cb);
341c100f 2536 break;
0516f6fe 2537
341c100f
NS
2538 case 's':
2539 hash += hash_rtx_string (XSTR (x, i));
2540 break;
2541
2542 case 'i':
2543 hash += (unsigned int) XINT (x, i);
2544 break;
2545
2546 case '0': case 't':
2547 /* Unused. */
2548 break;
2549
2550 default:
2551 gcc_unreachable ();
2552 }
7afe21cc 2553 }
0516f6fe 2554
7afe21cc
RK
2555 return hash;
2556}
2557
e855c69d
AB
2558/* Hash an rtx. We are careful to make sure the value is never negative.
2559 Equivalent registers hash identically.
2560 MODE is used in hashing for CONST_INTs only;
2561 otherwise the mode of X is used.
2562
2563 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2564
2565 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
3e55d79b 2566 a MEM rtx which does not have the MEM_READONLY_P flag set.
e855c69d
AB
2567
2568 Note that cse_insn knows that the hash code of a MEM expression
2569 is just (int) MEM plus the hash code of the address. */
2570
2571unsigned
ef4bddc2 2572hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
e855c69d
AB
2573 int *hash_arg_in_memory_p, bool have_reg_qty)
2574{
2575 return hash_rtx_cb (x, mode, do_not_record_p,
2576 hash_arg_in_memory_p, have_reg_qty, NULL);
2577}
2578
0516f6fe
SB
2579/* Hash an rtx X for cse via hash_rtx.
2580 Stores 1 in do_not_record if any subexpression is volatile.
2581 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
3e55d79b 2582 does not have the MEM_READONLY_P flag set. */
0516f6fe
SB
2583
2584static inline unsigned
ef4bddc2 2585canon_hash (rtx x, machine_mode mode)
0516f6fe
SB
2586{
2587 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2588}
2589
2590/* Like canon_hash but with no side effects, i.e. do_not_record
2591 and hash_arg_in_memory are not changed. */
7afe21cc 2592
0516f6fe 2593static inline unsigned
ef4bddc2 2594safe_hash (rtx x, machine_mode mode)
7afe21cc 2595{
0516f6fe
SB
2596 int dummy_do_not_record;
2597 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
7afe21cc
RK
2598}
2599\f
2600/* Return 1 iff X and Y would canonicalize into the same thing,
2601 without actually constructing the canonicalization of either one.
2602 If VALIDATE is nonzero,
2603 we assume X is an expression being processed from the rtl
2604 and Y was found in the hash table. We check register refs
2605 in Y for being marked as valid.
2606
0516f6fe 2607 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
7afe21cc 2608
0516f6fe 2609int
4f588890 2610exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
7afe21cc 2611{
b3694847
SS
2612 int i, j;
2613 enum rtx_code code;
2614 const char *fmt;
7afe21cc
RK
2615
2616 /* Note: it is incorrect to assume an expression is equivalent to itself
2617 if VALIDATE is nonzero. */
2618 if (x == y && !validate)
2619 return 1;
0516f6fe 2620
7afe21cc
RK
2621 if (x == 0 || y == 0)
2622 return x == y;
2623
2624 code = GET_CODE (x);
2625 if (code != GET_CODE (y))
0516f6fe 2626 return 0;
7afe21cc
RK
2627
2628 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2629 if (GET_MODE (x) != GET_MODE (y))
2630 return 0;
2631
5932a4d4 2632 /* MEMs referring to different address space are not equivalent. */
09e881c9
BE
2633 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2634 return 0;
2635
7afe21cc
RK
2636 switch (code)
2637 {
2638 case PC:
2639 case CC0:
d8116890 2640 CASE_CONST_UNIQUE:
c13e8210 2641 return x == y;
7afe21cc
RK
2642
2643 case LABEL_REF:
a827d9b1 2644 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
7afe21cc 2645
f54d4924
RK
2646 case SYMBOL_REF:
2647 return XSTR (x, 0) == XSTR (y, 0);
2648
7afe21cc 2649 case REG:
0516f6fe
SB
2650 if (for_gcse)
2651 return REGNO (x) == REGNO (y);
2652 else
2653 {
2654 unsigned int regno = REGNO (y);
2655 unsigned int i;
09e18274 2656 unsigned int endregno = END_REGNO (y);
7afe21cc 2657
0516f6fe
SB
2658 /* If the quantities are not the same, the expressions are not
2659 equivalent. If there are and we are not to validate, they
2660 are equivalent. Otherwise, ensure all regs are up-to-date. */
7afe21cc 2661
0516f6fe
SB
2662 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2663 return 0;
2664
2665 if (! validate)
2666 return 1;
2667
2668 for (i = regno; i < endregno; i++)
2669 if (REG_IN_TABLE (i) != REG_TICK (i))
2670 return 0;
7afe21cc 2671
7afe21cc 2672 return 1;
0516f6fe 2673 }
7afe21cc 2674
0516f6fe
SB
2675 case MEM:
2676 if (for_gcse)
2677 {
0516f6fe
SB
2678 /* A volatile mem should not be considered equivalent to any
2679 other. */
2680 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2681 return 0;
8a76c4a0
JJ
2682
2683 /* Can't merge two expressions in different alias sets, since we
2684 can decide that the expression is transparent in a block when
2685 it isn't, due to it being set with the different alias set.
2686
2687 Also, can't merge two expressions with different MEM_ATTRS.
2688 They could e.g. be two different entities allocated into the
2689 same space on the stack (see e.g. PR25130). In that case, the
2690 MEM addresses can be the same, even though the two MEMs are
2691 absolutely not equivalent.
2692
2693 But because really all MEM attributes should be the same for
2694 equivalent MEMs, we just use the invariant that MEMs that have
2695 the same attributes share the same mem_attrs data structure. */
96b3c03f 2696 if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
8a76c4a0 2697 return 0;
e304caa4
EB
2698
2699 /* If we are handling exceptions, we cannot consider two expressions
2700 with different trapping status as equivalent, because simple_mem
2701 might accept one and reject the other. */
2702 if (cfun->can_throw_non_call_exceptions
2703 && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2704 return 0;
0516f6fe
SB
2705 }
2706 break;
7afe21cc
RK
2707
2708 /* For commutative operations, check both orders. */
2709 case PLUS:
2710 case MULT:
2711 case AND:
2712 case IOR:
2713 case XOR:
2714 case NE:
2715 case EQ:
0516f6fe
SB
2716 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2717 validate, for_gcse)
7afe21cc 2718 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
0516f6fe 2719 validate, for_gcse))
7afe21cc 2720 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
0516f6fe 2721 validate, for_gcse)
7afe21cc 2722 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
0516f6fe 2723 validate, for_gcse)));
278a83b2 2724
6462bb43
AO
2725 case ASM_OPERANDS:
2726 /* We don't use the generic code below because we want to
2727 disregard filename and line numbers. */
2728
2729 /* A volatile asm isn't equivalent to any other. */
2730 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2731 return 0;
2732
2733 if (GET_MODE (x) != GET_MODE (y)
2734 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2735 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2736 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2737 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2738 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2739 return 0;
2740
2741 if (ASM_OPERANDS_INPUT_LENGTH (x))
2742 {
2743 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2744 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2745 ASM_OPERANDS_INPUT (y, i),
0516f6fe 2746 validate, for_gcse)
6462bb43
AO
2747 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2748 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2749 return 0;
2750 }
2751
2752 return 1;
2753
e9a25f70
JL
2754 default:
2755 break;
7afe21cc
RK
2756 }
2757
2758 /* Compare the elements. If any pair of corresponding elements
0516f6fe 2759 fail to match, return 0 for the whole thing. */
7afe21cc
RK
2760
2761 fmt = GET_RTX_FORMAT (code);
2762 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2763 {
906c4e36 2764 switch (fmt[i])
7afe21cc 2765 {
906c4e36 2766 case 'e':
0516f6fe
SB
2767 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2768 validate, for_gcse))
7afe21cc 2769 return 0;
906c4e36
RK
2770 break;
2771
2772 case 'E':
7afe21cc
RK
2773 if (XVECLEN (x, i) != XVECLEN (y, i))
2774 return 0;
2775 for (j = 0; j < XVECLEN (x, i); j++)
2776 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
0516f6fe 2777 validate, for_gcse))
7afe21cc 2778 return 0;
906c4e36
RK
2779 break;
2780
2781 case 's':
7afe21cc
RK
2782 if (strcmp (XSTR (x, i), XSTR (y, i)))
2783 return 0;
906c4e36
RK
2784 break;
2785
2786 case 'i':
7afe21cc
RK
2787 if (XINT (x, i) != XINT (y, i))
2788 return 0;
906c4e36
RK
2789 break;
2790
2791 case 'w':
2792 if (XWINT (x, i) != XWINT (y, i))
2793 return 0;
278a83b2 2794 break;
906c4e36
RK
2795
2796 case '0':
8f985ec4 2797 case 't':
906c4e36
RK
2798 break;
2799
2800 default:
341c100f 2801 gcc_unreachable ();
7afe21cc 2802 }
278a83b2 2803 }
906c4e36 2804
7afe21cc
RK
2805 return 1;
2806}
2807\f
eef3c949
RS
2808/* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2809 the result if necessary. INSN is as for canon_reg. */
2810
2811static void
20468884 2812validate_canon_reg (rtx *xloc, rtx_insn *insn)
eef3c949 2813{
6fb5fa3c
DB
2814 if (*xloc)
2815 {
32e9fa48 2816 rtx new_rtx = canon_reg (*xloc, insn);
eef3c949 2817
6fb5fa3c
DB
2818 /* If replacing pseudo with hard reg or vice versa, ensure the
2819 insn remains valid. Likewise if the insn has MATCH_DUPs. */
32e9fa48
KG
2820 gcc_assert (insn && new_rtx);
2821 validate_change (insn, xloc, new_rtx, 1);
6fb5fa3c 2822 }
eef3c949
RS
2823}
2824
7afe21cc
RK
2825/* Canonicalize an expression:
2826 replace each register reference inside it
2827 with the "oldest" equivalent register.
2828
67e0a632 2829 If INSN is nonzero validate_change is used to ensure that INSN remains valid
da7d8304 2830 after we make our substitution. The calls are made with IN_GROUP nonzero
7722328e
RK
2831 so apply_change_group must be called upon the outermost return from this
2832 function (unless INSN is zero). The result of apply_change_group can
2833 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2834
2835static rtx
20468884 2836canon_reg (rtx x, rtx_insn *insn)
7afe21cc 2837{
b3694847
SS
2838 int i;
2839 enum rtx_code code;
2840 const char *fmt;
7afe21cc
RK
2841
2842 if (x == 0)
2843 return x;
2844
2845 code = GET_CODE (x);
2846 switch (code)
2847 {
2848 case PC:
2849 case CC0:
2850 case CONST:
d8116890 2851 CASE_CONST_ANY:
7afe21cc
RK
2852 case SYMBOL_REF:
2853 case LABEL_REF:
2854 case ADDR_VEC:
2855 case ADDR_DIFF_VEC:
2856 return x;
2857
2858 case REG:
2859 {
b3694847
SS
2860 int first;
2861 int q;
2862 struct qty_table_elem *ent;
7afe21cc
RK
2863
2864 /* Never replace a hard reg, because hard regs can appear
2865 in more than one machine mode, and we must preserve the mode
2866 of each occurrence. Also, some hard regs appear in
2867 MEMs that are shared and mustn't be altered. Don't try to
2868 replace any reg that maps to a reg of class NO_REGS. */
2869 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2870 || ! REGNO_QTY_VALID_P (REGNO (x)))
2871 return x;
2872
278a83b2 2873 q = REG_QTY (REGNO (x));
1bb98cec
DM
2874 ent = &qty_table[q];
2875 first = ent->first_reg;
7afe21cc
RK
2876 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2877 : REGNO_REG_CLASS (first) == NO_REGS ? x
1bb98cec 2878 : gen_rtx_REG (ent->mode, first));
7afe21cc 2879 }
278a83b2 2880
e9a25f70
JL
2881 default:
2882 break;
7afe21cc
RK
2883 }
2884
2885 fmt = GET_RTX_FORMAT (code);
2886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2887 {
b3694847 2888 int j;
7afe21cc
RK
2889
2890 if (fmt[i] == 'e')
eef3c949 2891 validate_canon_reg (&XEXP (x, i), insn);
7afe21cc
RK
2892 else if (fmt[i] == 'E')
2893 for (j = 0; j < XVECLEN (x, i); j++)
eef3c949 2894 validate_canon_reg (&XVECEXP (x, i, j), insn);
7afe21cc
RK
2895 }
2896
2897 return x;
2898}
2899\f
bca05d20
RK
2900/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2901 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2902 what values are being compared.
1a87eea2 2903
bca05d20
RK
2904 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2905 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2906 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2907 compared to produce cc0.
a432f20d 2908
bca05d20
RK
2909 The return value is the comparison operator and is either the code of
2910 A or the code corresponding to the inverse of the comparison. */
7afe21cc 2911
0cedb36c 2912static enum rtx_code
7080f735 2913find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
ef4bddc2 2914 machine_mode *pmode1, machine_mode *pmode2)
7afe21cc 2915{
0cedb36c 2916 rtx arg1, arg2;
6e2830c3 2917 hash_set<rtx> *visited = NULL;
27ec0502
AJ
2918 /* Set nonzero when we find something of interest. */
2919 rtx x = NULL;
1a87eea2 2920
0cedb36c 2921 arg1 = *parg1, arg2 = *parg2;
7afe21cc 2922
0cedb36c 2923 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
7afe21cc 2924
0cedb36c 2925 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
a432f20d 2926 {
0cedb36c
JL
2927 int reverse_code = 0;
2928 struct table_elt *p = 0;
6076248a 2929
27ec0502
AJ
2930 /* Remember state from previous iteration. */
2931 if (x)
2932 {
2933 if (!visited)
6e2830c3
TS
2934 visited = new hash_set<rtx>;
2935 visited->add (x);
27ec0502
AJ
2936 x = 0;
2937 }
2938
0cedb36c
JL
2939 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2940 On machines with CC0, this is the only case that can occur, since
2941 fold_rtx will return the COMPARE or item being compared with zero
2942 when given CC0. */
6076248a 2943
0cedb36c
JL
2944 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2945 x = arg1;
6076248a 2946
0cedb36c
JL
2947 /* If ARG1 is a comparison operator and CODE is testing for
2948 STORE_FLAG_VALUE, get the inner arguments. */
a432f20d 2949
ec8e098d 2950 else if (COMPARISON_P (arg1))
7afe21cc 2951 {
efdc7e19
RH
2952#ifdef FLOAT_STORE_FLAG_VALUE
2953 REAL_VALUE_TYPE fsfv;
2954#endif
2955
0cedb36c
JL
2956 if (code == NE
2957 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2958 && code == LT && STORE_FLAG_VALUE == -1)
2959#ifdef FLOAT_STORE_FLAG_VALUE
9b92bf04 2960 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
efdc7e19
RH
2961 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2962 REAL_VALUE_NEGATIVE (fsfv)))
7afe21cc 2963#endif
a432f20d 2964 )
0cedb36c
JL
2965 x = arg1;
2966 else if (code == EQ
2967 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2968 && code == GE && STORE_FLAG_VALUE == -1)
2969#ifdef FLOAT_STORE_FLAG_VALUE
9b92bf04 2970 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
efdc7e19
RH
2971 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2972 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
2973#endif
2974 )
2975 x = arg1, reverse_code = 1;
7afe21cc
RK
2976 }
2977
0cedb36c 2978 /* ??? We could also check for
7afe21cc 2979
0cedb36c 2980 (ne (and (eq (...) (const_int 1))) (const_int 0))
7afe21cc 2981
0cedb36c 2982 and related forms, but let's wait until we see them occurring. */
7afe21cc 2983
0cedb36c
JL
2984 if (x == 0)
2985 /* Look up ARG1 in the hash table and see if it has an equivalence
2986 that lets us see what is being compared. */
0516f6fe 2987 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
278a83b2 2988 if (p)
8b03b984
R
2989 {
2990 p = p->first_same_value;
2991
2992 /* If what we compare is already known to be constant, that is as
2993 good as it gets.
2994 We need to break the loop in this case, because otherwise we
2995 can have an infinite loop when looking at a reg that is known
2996 to be a constant which is the same as a comparison of a reg
2997 against zero which appears later in the insn stream, which in
2998 turn is constant and the same as the comparison of the first reg
2999 against zero... */
3000 if (p->is_const)
3001 break;
3002 }
7afe21cc 3003
0cedb36c 3004 for (; p; p = p->next_same_value)
7afe21cc 3005 {
ef4bddc2 3006 machine_mode inner_mode = GET_MODE (p->exp);
efdc7e19
RH
3007#ifdef FLOAT_STORE_FLAG_VALUE
3008 REAL_VALUE_TYPE fsfv;
3009#endif
7afe21cc 3010
0cedb36c 3011 /* If the entry isn't valid, skip it. */
0516f6fe 3012 if (! exp_equiv_p (p->exp, p->exp, 1, false))
0cedb36c 3013 continue;
f76b9db2 3014
27ec0502 3015 /* If it's a comparison we've used before, skip it. */
6e2830c3 3016 if (visited && visited->contains (p->exp))
8f1ad6b6
SL
3017 continue;
3018
bca05d20
RK
3019 if (GET_CODE (p->exp) == COMPARE
3020 /* Another possibility is that this machine has a compare insn
3021 that includes the comparison code. In that case, ARG1 would
3022 be equivalent to a comparison operation that would set ARG1 to
3023 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3024 ORIG_CODE is the actual comparison being done; if it is an EQ,
3025 we must reverse ORIG_CODE. On machine with a negative value
3026 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3027 || ((code == NE
3028 || (code == LT
2d0c270f
BS
3029 && val_signbit_known_set_p (inner_mode,
3030 STORE_FLAG_VALUE))
0cedb36c 3031#ifdef FLOAT_STORE_FLAG_VALUE
bca05d20 3032 || (code == LT
3d8bf70f 3033 && SCALAR_FLOAT_MODE_P (inner_mode)
efdc7e19
RH
3034 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3035 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c 3036#endif
bca05d20 3037 )
ec8e098d 3038 && COMPARISON_P (p->exp)))
7afe21cc 3039 {
0cedb36c
JL
3040 x = p->exp;
3041 break;
3042 }
3043 else if ((code == EQ
3044 || (code == GE
2d0c270f
BS
3045 && val_signbit_known_set_p (inner_mode,
3046 STORE_FLAG_VALUE))
0cedb36c
JL
3047#ifdef FLOAT_STORE_FLAG_VALUE
3048 || (code == GE
3d8bf70f 3049 && SCALAR_FLOAT_MODE_P (inner_mode)
efdc7e19
RH
3050 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3051 REAL_VALUE_NEGATIVE (fsfv)))
0cedb36c
JL
3052#endif
3053 )
ec8e098d 3054 && COMPARISON_P (p->exp))
0cedb36c
JL
3055 {
3056 reverse_code = 1;
3057 x = p->exp;
3058 break;
7afe21cc
RK
3059 }
3060
4977bab6
ZW
3061 /* If this non-trapping address, e.g. fp + constant, the
3062 equivalent is a better operand since it may let us predict
3063 the value of the comparison. */
3064 else if (!rtx_addr_can_trap_p (p->exp))
0cedb36c
JL
3065 {
3066 arg1 = p->exp;
3067 continue;
3068 }
7afe21cc 3069 }
7afe21cc 3070
0cedb36c
JL
3071 /* If we didn't find a useful equivalence for ARG1, we are done.
3072 Otherwise, set up for the next iteration. */
3073 if (x == 0)
3074 break;
7afe21cc 3075
78192b09
RH
3076 /* If we need to reverse the comparison, make sure that that is
3077 possible -- we can't necessarily infer the value of GE from LT
3078 with floating-point operands. */
0cedb36c 3079 if (reverse_code)
261efdef
JH
3080 {
3081 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3082 if (reversed == UNKNOWN)
3083 break;
68252e27
KH
3084 else
3085 code = reversed;
261efdef 3086 }
ec8e098d 3087 else if (COMPARISON_P (x))
261efdef
JH
3088 code = GET_CODE (x);
3089 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
7afe21cc
RK
3090 }
3091
0cedb36c
JL
3092 /* Return our results. Return the modes from before fold_rtx
3093 because fold_rtx might produce const_int, and then it's too late. */
3094 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3095 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3096
27ec0502 3097 if (visited)
6e2830c3 3098 delete visited;
0cedb36c 3099 return code;
7afe21cc
RK
3100}
3101\f
a52b023a
PB
3102/* If X is a nontrivial arithmetic operation on an argument for which
3103 a constant value can be determined, return the result of operating
3104 on that value, as a constant. Otherwise, return X, possibly with
3105 one or more operands changed to a forward-propagated constant.
25910ca4 3106
a52b023a
PB
3107 If X is a register whose contents are known, we do NOT return
3108 those contents here; equiv_constant is called to perform that task.
3109 For SUBREGs and MEMs, we do that both here and in equiv_constant.
7afe21cc
RK
3110
3111 INSN is the insn that we may be modifying. If it is 0, make a copy
3112 of X before modifying it. */
3113
3114static rtx
20468884 3115fold_rtx (rtx x, rtx_insn *insn)
7afe21cc 3116{
b3694847 3117 enum rtx_code code;
ef4bddc2 3118 machine_mode mode;
b3694847
SS
3119 const char *fmt;
3120 int i;
32e9fa48 3121 rtx new_rtx = 0;
a52b023a 3122 int changed = 0;
7afe21cc 3123
a52b023a 3124 /* Operands of X. */
e54bd4ab
JJ
3125 /* Workaround -Wmaybe-uninitialized false positive during
3126 profiledbootstrap by initializing them. */
3127 rtx folded_arg0 = NULL_RTX;
3128 rtx folded_arg1 = NULL_RTX;
7afe21cc
RK
3129
3130 /* Constant equivalents of first three operands of X;
3131 0 when no such equivalent is known. */
3132 rtx const_arg0;
3133 rtx const_arg1;
3134 rtx const_arg2;
3135
3136 /* The mode of the first operand of X. We need this for sign and zero
3137 extends. */
ef4bddc2 3138 machine_mode mode_arg0;
7afe21cc
RK
3139
3140 if (x == 0)
3141 return x;
3142
a52b023a 3143 /* Try to perform some initial simplifications on X. */
7afe21cc
RK
3144 code = GET_CODE (x);
3145 switch (code)
3146 {
a52b023a
PB
3147 case MEM:
3148 case SUBREG:
5141ed42
JL
3149 /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3150 than it would in other contexts. Basically its mode does not
3151 signify the size of the object read. That information is carried
3152 by size operand. If we happen to have a MEM of the appropriate
3153 mode in our tables with a constant value we could simplify the
3154 extraction incorrectly if we allowed substitution of that value
3155 for the MEM. */
3156 case ZERO_EXTRACT:
3157 case SIGN_EXTRACT:
32e9fa48
KG
3158 if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3159 return new_rtx;
a52b023a
PB
3160 return x;
3161
7afe21cc 3162 case CONST:
d8116890 3163 CASE_CONST_ANY:
7afe21cc
RK
3164 case SYMBOL_REF:
3165 case LABEL_REF:
3166 case REG:
01aa1d43 3167 case PC:
7afe21cc
RK
3168 /* No use simplifying an EXPR_LIST
3169 since they are used only for lists of args
3170 in a function call's REG_EQUAL note. */
3171 case EXPR_LIST:
3172 return x;
3173
7afe21cc
RK
3174 case CC0:
3175 return prev_insn_cc0;
7afe21cc 3176
9255709c 3177 case ASM_OPERANDS:
6c667859
AB
3178 if (insn)
3179 {
3180 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3181 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3182 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3183 }
a52b023a
PB
3184 return x;
3185
3186#ifdef NO_FUNCTION_CSE
3187 case CALL:
3188 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3189 return x;
9255709c 3190 break;
a52b023a 3191#endif
278a83b2 3192
a52b023a 3193 /* Anything else goes through the loop below. */
e9a25f70
JL
3194 default:
3195 break;
7afe21cc
RK
3196 }
3197
a52b023a 3198 mode = GET_MODE (x);
7afe21cc
RK
3199 const_arg0 = 0;
3200 const_arg1 = 0;
3201 const_arg2 = 0;
3202 mode_arg0 = VOIDmode;
3203
3204 /* Try folding our operands.
3205 Then see which ones have constant values known. */
3206
3207 fmt = GET_RTX_FORMAT (code);
3208 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3209 if (fmt[i] == 'e')
3210 {
a52b023a 3211 rtx folded_arg = XEXP (x, i), const_arg;
ef4bddc2 3212 machine_mode mode_arg = GET_MODE (folded_arg);
7e7e28c7
AO
3213
3214 switch (GET_CODE (folded_arg))
3215 {
3216 case MEM:
3217 case REG:
3218 case SUBREG:
3219 const_arg = equiv_constant (folded_arg);
3220 break;
3221
3222 case CONST:
d8116890 3223 CASE_CONST_ANY:
7e7e28c7
AO
3224 case SYMBOL_REF:
3225 case LABEL_REF:
7e7e28c7
AO
3226 const_arg = folded_arg;
3227 break;
3228
7e7e28c7 3229 case CC0:
728acca0
MP
3230 /* The cc0-user and cc0-setter may be in different blocks if
3231 the cc0-setter potentially traps. In that case PREV_INSN_CC0
3232 will have been cleared as we exited the block with the
3233 setter.
3234
3235 While we could potentially track cc0 in this case, it just
3236 doesn't seem to be worth it given that cc0 targets are not
3237 terribly common or important these days and trapping math
3238 is rarely used. The combination of those two conditions
3239 necessary to trip this situation is exceedingly rare in the
3240 real world. */
3241 if (!prev_insn_cc0)
3242 {
3243 const_arg = NULL_RTX;
3244 }
3245 else
3246 {
3247 folded_arg = prev_insn_cc0;
3248 mode_arg = prev_insn_cc0_mode;
3249 const_arg = equiv_constant (folded_arg);
3250 }
7e7e28c7 3251 break;
7e7e28c7
AO
3252
3253 default:
3254 folded_arg = fold_rtx (folded_arg, insn);
3255 const_arg = equiv_constant (folded_arg);
3256 break;
3257 }
7afe21cc
RK
3258
3259 /* For the first three operands, see if the operand
3260 is constant or equivalent to a constant. */
3261 switch (i)
3262 {
3263 case 0:
3264 folded_arg0 = folded_arg;
3265 const_arg0 = const_arg;
3266 mode_arg0 = mode_arg;
3267 break;
3268 case 1:
3269 folded_arg1 = folded_arg;
3270 const_arg1 = const_arg;
3271 break;
3272 case 2:
3273 const_arg2 = const_arg;
3274 break;
3275 }
3276
a52b023a
PB
3277 /* Pick the least expensive of the argument and an equivalent constant
3278 argument. */
3279 if (const_arg != 0
3280 && const_arg != folded_arg
68f932c4 3281 && COST_IN (const_arg, code, i) <= COST_IN (folded_arg, code, i)
f2fa288f 3282
8cce3d04
RS
3283 /* It's not safe to substitute the operand of a conversion
3284 operator with a constant, as the conversion's identity
f652d14b 3285 depends upon the mode of its operand. This optimization
8cce3d04 3286 is handled by the call to simplify_unary_operation. */
a52b023a
PB
3287 && (GET_RTX_CLASS (code) != RTX_UNARY
3288 || GET_MODE (const_arg) == mode_arg0
3289 || (code != ZERO_EXTEND
3290 && code != SIGN_EXTEND
3291 && code != TRUNCATE
3292 && code != FLOAT_TRUNCATE
3293 && code != FLOAT_EXTEND
3294 && code != FLOAT
3295 && code != FIX
3296 && code != UNSIGNED_FLOAT
3297 && code != UNSIGNED_FIX)))
3298 folded_arg = const_arg;
3299
3300 if (folded_arg == XEXP (x, i))
3301 continue;
7afe21cc 3302
a52b023a
PB
3303 if (insn == NULL_RTX && !changed)
3304 x = copy_rtx (x);
3305 changed = 1;
b8b89e7c 3306 validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
2d8b0f3a 3307 }
7afe21cc 3308
a52b023a 3309 if (changed)
7afe21cc 3310 {
a52b023a
PB
3311 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3312 consistent with the order in X. */
3313 if (canonicalize_change_group (insn, x))
7afe21cc 3314 {
a52b023a
PB
3315 rtx tem;
3316 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3317 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
7afe21cc 3318 }
a52b023a
PB
3319
3320 apply_change_group ();
7afe21cc
RK
3321 }
3322
3323 /* If X is an arithmetic operation, see if we can simplify it. */
3324
3325 switch (GET_RTX_CLASS (code))
3326 {
ec8e098d 3327 case RTX_UNARY:
67a37737 3328 {
67a37737
RK
3329 /* We can't simplify extension ops unless we know the
3330 original mode. */
3331 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3332 && mode_arg0 == VOIDmode)
3333 break;
3334
32e9fa48 3335 new_rtx = simplify_unary_operation (code, mode,
696d76a5
MS
3336 const_arg0 ? const_arg0 : folded_arg0,
3337 mode_arg0);
67a37737 3338 }
7afe21cc 3339 break;
278a83b2 3340
ec8e098d
PB
3341 case RTX_COMPARE:
3342 case RTX_COMM_COMPARE:
7afe21cc
RK
3343 /* See what items are actually being compared and set FOLDED_ARG[01]
3344 to those values and CODE to the actual comparison code. If any are
3345 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3346 do anything if both operands are already known to be constant. */
3347
21e5076a
UB
3348 /* ??? Vector mode comparisons are not supported yet. */
3349 if (VECTOR_MODE_P (mode))
3350 break;
3351
7afe21cc
RK
3352 if (const_arg0 == 0 || const_arg1 == 0)
3353 {
3354 struct table_elt *p0, *p1;
9e6a14a4 3355 rtx true_rtx, false_rtx;
ef4bddc2 3356 machine_mode mode_arg1;
c610adec 3357
9b92bf04 3358 if (SCALAR_FLOAT_MODE_P (mode))
c610adec 3359 {
9e6a14a4 3360#ifdef FLOAT_STORE_FLAG_VALUE
d6edb99e 3361 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
68252e27 3362 (FLOAT_STORE_FLAG_VALUE (mode), mode));
9e6a14a4
L
3363#else
3364 true_rtx = NULL_RTX;
3365#endif
d6edb99e 3366 false_rtx = CONST0_RTX (mode);
c610adec 3367 }
9e6a14a4
L
3368 else
3369 {
3370 true_rtx = const_true_rtx;
3371 false_rtx = const0_rtx;
3372 }
7afe21cc 3373
13c9910f
RS
3374 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3375 &mode_arg0, &mode_arg1);
7afe21cc 3376
13c9910f
RS
3377 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3378 what kinds of things are being compared, so we can't do
3379 anything with this comparison. */
7afe21cc
RK
3380
3381 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3382 break;
3383
75335440
KH
3384 const_arg0 = equiv_constant (folded_arg0);
3385 const_arg1 = equiv_constant (folded_arg1);
3386
0f41302f
MS
3387 /* If we do not now have two constants being compared, see
3388 if we can nevertheless deduce some things about the
3389 comparison. */
7afe21cc
RK
3390 if (const_arg0 == 0 || const_arg1 == 0)
3391 {
08678f51
HPN
3392 if (const_arg1 != NULL)
3393 {
3394 rtx cheapest_simplification;
3395 int cheapest_cost;
3396 rtx simp_result;
3397 struct table_elt *p;
3398
3399 /* See if we can find an equivalent of folded_arg0
3400 that gets us a cheaper expression, possibly a
3401 constant through simplifications. */
3402 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3403 mode_arg0);
b8698a0f 3404
08678f51
HPN
3405 if (p != NULL)
3406 {
3407 cheapest_simplification = x;
3408 cheapest_cost = COST (x);
3409
3410 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3411 {
3412 int cost;
3413
3414 /* If the entry isn't valid, skip it. */
3415 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3416 continue;
3417
3418 /* Try to simplify using this equivalence. */
3419 simp_result
3420 = simplify_relational_operation (code, mode,
3421 mode_arg0,
3422 p->exp,
3423 const_arg1);
3424
3425 if (simp_result == NULL)
3426 continue;
3427
3428 cost = COST (simp_result);
3429 if (cost < cheapest_cost)
3430 {
3431 cheapest_cost = cost;
3432 cheapest_simplification = simp_result;
3433 }
3434 }
3435
3436 /* If we have a cheaper expression now, use that
3437 and try folding it further, from the top. */
3438 if (cheapest_simplification != x)
7903b3e5
JH
3439 return fold_rtx (copy_rtx (cheapest_simplification),
3440 insn);
08678f51
HPN
3441 }
3442 }
3443
fd13313f
JH
3444 /* See if the two operands are the same. */
3445
39641489
PB
3446 if ((REG_P (folded_arg0)
3447 && REG_P (folded_arg1)
3448 && (REG_QTY (REGNO (folded_arg0))
3449 == REG_QTY (REGNO (folded_arg1))))
fd13313f 3450 || ((p0 = lookup (folded_arg0,
0516f6fe
SB
3451 SAFE_HASH (folded_arg0, mode_arg0),
3452 mode_arg0))
fd13313f 3453 && (p1 = lookup (folded_arg1,
0516f6fe
SB
3454 SAFE_HASH (folded_arg1, mode_arg0),
3455 mode_arg0))
fd13313f 3456 && p0->first_same_value == p1->first_same_value))
39641489 3457 folded_arg1 = folded_arg0;
7afe21cc
RK
3458
3459 /* If FOLDED_ARG0 is a register, see if the comparison we are
3460 doing now is either the same as we did before or the reverse
3461 (we only check the reverse if not floating-point). */
f8cfc6aa 3462 else if (REG_P (folded_arg0))
7afe21cc 3463 {
30f72379 3464 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc 3465
1bb98cec
DM
3466 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3467 {
3468 struct qty_table_elem *ent = &qty_table[qty];
3469
3470 if ((comparison_dominates_p (ent->comparison_code, code)
1eb8759b
RH
3471 || (! FLOAT_MODE_P (mode_arg0)
3472 && comparison_dominates_p (ent->comparison_code,
3473 reverse_condition (code))))
1bb98cec
DM
3474 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3475 || (const_arg1
3476 && rtx_equal_p (ent->comparison_const,
3477 const_arg1))
f8cfc6aa 3478 || (REG_P (folded_arg1)
1bb98cec 3479 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
9e6a14a4
L
3480 {
3481 if (comparison_dominates_p (ent->comparison_code, code))
3482 {
3483 if (true_rtx)
3484 return true_rtx;
3485 else
3486 break;
3487 }
3488 else
3489 return false_rtx;
3490 }
1bb98cec 3491 }
7afe21cc
RK
3492 }
3493 }
3494 }
3495
3496 /* If we are comparing against zero, see if the first operand is
3497 equivalent to an IOR with a constant. If so, we may be able to
3498 determine the result of this comparison. */
39641489 3499 if (const_arg1 == const0_rtx && !const_arg0)
7afe21cc
RK
3500 {
3501 rtx y = lookup_as_function (folded_arg0, IOR);
3502 rtx inner_const;
3503
3504 if (y != 0
3505 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
481683e1 3506 && CONST_INT_P (inner_const)
7afe21cc 3507 && INTVAL (inner_const) != 0)
39641489 3508 folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
7afe21cc
RK
3509 }
3510
c6fb08ad 3511 {
bca3cc97
JJ
3512 rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3513 rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3514 new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3515 op0, op1);
c6fb08ad 3516 }
7afe21cc
RK
3517 break;
3518
ec8e098d
PB
3519 case RTX_BIN_ARITH:
3520 case RTX_COMM_ARITH:
7afe21cc
RK
3521 switch (code)
3522 {
3523 case PLUS:
3524 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3525 with that LABEL_REF as its second operand. If so, the result is
3526 the first operand of that MINUS. This handles switches with an
3527 ADDR_DIFF_VEC table. */
3528 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3529 {
e650cbda
RK
3530 rtx y
3531 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
ddc356e8 3532 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
3533
3534 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
a827d9b1 3535 && LABEL_REF_LABEL (XEXP (y, 1)) == LABEL_REF_LABEL (const_arg1))
7afe21cc 3536 return XEXP (y, 0);
67a37737
RK
3537
3538 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
3539 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3540 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
3541 && GET_CODE (XEXP (y, 0)) == MINUS
3542 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
a827d9b1 3543 && LABEL_REF_LABEL (XEXP (XEXP (y, 0), 1)) == LABEL_REF_LABEL (const_arg1))
67a37737 3544 return XEXP (XEXP (y, 0), 0);
7afe21cc 3545 }
c2cc0778 3546
e650cbda
RK
3547 /* Likewise if the operands are in the other order. */
3548 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3549 {
3550 rtx y
3551 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
ddc356e8 3552 : lookup_as_function (folded_arg1, MINUS);
e650cbda
RK
3553
3554 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
a827d9b1 3555 && LABEL_REF_LABEL (XEXP (y, 1)) == LABEL_REF_LABEL (const_arg0))
e650cbda
RK
3556 return XEXP (y, 0);
3557
3558 /* Now try for a CONST of a MINUS like the above. */
3559 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3560 : lookup_as_function (folded_arg1, CONST))) != 0
3561 && GET_CODE (XEXP (y, 0)) == MINUS
3562 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
a827d9b1 3563 && LABEL_REF_LABEL (XEXP (XEXP (y, 0), 1)) == LABEL_REF_LABEL (const_arg0))
e650cbda
RK
3564 return XEXP (XEXP (y, 0), 0);
3565 }
3566
c2cc0778
RK
3567 /* If second operand is a register equivalent to a negative
3568 CONST_INT, see if we can find a register equivalent to the
3569 positive constant. Make a MINUS if so. Don't do this for
5d595063 3570 a non-negative constant since we might then alternate between
a1f300c0 3571 choosing positive and negative constants. Having the positive
5d595063
RK
3572 constant previously-used is the more common case. Be sure
3573 the resulting constant is non-negative; if const_arg1 were
3574 the smallest negative number this would overflow: depending
3575 on the mode, this would either just be the same value (and
3576 hence not save anything) or be incorrect. */
481683e1 3577 if (const_arg1 != 0 && CONST_INT_P (const_arg1)
5d595063 3578 && INTVAL (const_arg1) < 0
4741f6ad
JL
3579 /* This used to test
3580
ddc356e8 3581 -INTVAL (const_arg1) >= 0
4741f6ad
JL
3582
3583 But The Sun V5.0 compilers mis-compiled that test. So
3584 instead we test for the problematic value in a more direct
3585 manner and hope the Sun compilers get it correct. */
5c45a8ac
KG
3586 && INTVAL (const_arg1) !=
3587 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
f8cfc6aa 3588 && REG_P (folded_arg1))
c2cc0778 3589 {
ddc356e8 3590 rtx new_const = GEN_INT (-INTVAL (const_arg1));
c2cc0778 3591 struct table_elt *p
0516f6fe 3592 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
c2cc0778
RK
3593
3594 if (p)
3595 for (p = p->first_same_value; p; p = p->next_same_value)
f8cfc6aa 3596 if (REG_P (p->exp))
0cedb36c 3597 return simplify_gen_binary (MINUS, mode, folded_arg0,
20468884 3598 canon_reg (p->exp, NULL));
c2cc0778 3599 }
13c9910f
RS
3600 goto from_plus;
3601
3602 case MINUS:
3603 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3604 If so, produce (PLUS Z C2-C). */
481683e1 3605 if (const_arg1 != 0 && CONST_INT_P (const_arg1))
13c9910f
RS
3606 {
3607 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
481683e1 3608 if (y && CONST_INT_P (XEXP (y, 1)))
0a81f074 3609 return fold_rtx (plus_constant (mode, copy_rtx (y),
f3becefd 3610 -INTVAL (const_arg1)),
20468884 3611 NULL);
13c9910f 3612 }
7afe21cc 3613
ddc356e8 3614 /* Fall through. */
7afe21cc 3615
13c9910f 3616 from_plus:
7afe21cc
RK
3617 case SMIN: case SMAX: case UMIN: case UMAX:
3618 case IOR: case AND: case XOR:
f930bfd0 3619 case MULT:
7afe21cc
RK
3620 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3621 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3622 is known to be of similar form, we may be able to replace the
3623 operation with a combined operation. This may eliminate the
3624 intermediate operation if every use is simplified in this way.
3625 Note that the similar optimization done by combine.c only works
3626 if the intermediate operation's result has only one reference. */
3627
f8cfc6aa 3628 if (REG_P (folded_arg0)
481683e1 3629 && const_arg1 && CONST_INT_P (const_arg1))
7afe21cc
RK
3630 {
3631 int is_shift
3632 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5bb51e1d 3633 rtx y, inner_const, new_const;
39b2ac74 3634 rtx canon_const_arg1 = const_arg1;
7afe21cc 3635 enum rtx_code associate_code;
7afe21cc 3636
824a4527 3637 if (is_shift
5511bc5a 3638 && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
824a4527
JDA
3639 || INTVAL (const_arg1) < 0))
3640 {
3641 if (SHIFT_COUNT_TRUNCATED)
39b2ac74
JJ
3642 canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3643 & (GET_MODE_BITSIZE (mode)
3644 - 1));
824a4527
JDA
3645 else
3646 break;
3647 }
3648
5bb51e1d 3649 y = lookup_as_function (folded_arg0, code);
824a4527
JDA
3650 if (y == 0)
3651 break;
824a4527
JDA
3652
3653 /* If we have compiled a statement like
3654 "if (x == (x & mask1))", and now are looking at
3655 "x & mask2", we will have a case where the first operand
3656 of Y is the same as our first operand. Unless we detect
3657 this case, an infinite loop will result. */
3658 if (XEXP (y, 0) == folded_arg0)
7afe21cc
RK
3659 break;
3660
5bb51e1d 3661 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
481683e1 3662 if (!inner_const || !CONST_INT_P (inner_const))
5bb51e1d
EB
3663 break;
3664
7afe21cc
RK
3665 /* Don't associate these operations if they are a PLUS with the
3666 same constant and it is a power of two. These might be doable
3667 with a pre- or post-increment. Similarly for two subtracts of
3668 identical powers of two with post decrement. */
3669
213d5fbc 3670 if (code == PLUS && const_arg1 == inner_const
940da324
JL
3671 && ((HAVE_PRE_INCREMENT
3672 && exact_log2 (INTVAL (const_arg1)) >= 0)
3673 || (HAVE_POST_INCREMENT
3674 && exact_log2 (INTVAL (const_arg1)) >= 0)
3675 || (HAVE_PRE_DECREMENT
3676 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3677 || (HAVE_POST_DECREMENT
3678 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
3679 break;
3680
88057dc8
UB
3681 /* ??? Vector mode shifts by scalar
3682 shift operand are not supported yet. */
3683 if (is_shift && VECTOR_MODE_P (mode))
3684 break;
3685
824a4527 3686 if (is_shift
5511bc5a 3687 && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
824a4527
JDA
3688 || INTVAL (inner_const) < 0))
3689 {
3690 if (SHIFT_COUNT_TRUNCATED)
3691 inner_const = GEN_INT (INTVAL (inner_const)
3692 & (GET_MODE_BITSIZE (mode) - 1));
3693 else
3694 break;
3695 }
3696
7afe21cc 3697 /* Compute the code used to compose the constants. For example,
f930bfd0 3698 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
7afe21cc 3699
f930bfd0 3700 associate_code = (is_shift || code == MINUS ? PLUS : code);
7afe21cc
RK
3701
3702 new_const = simplify_binary_operation (associate_code, mode,
39b2ac74
JJ
3703 canon_const_arg1,
3704 inner_const);
7afe21cc
RK
3705
3706 if (new_const == 0)
3707 break;
3708
3709 /* If we are associating shift operations, don't let this
4908e508
RS
3710 produce a shift of the size of the object or larger.
3711 This could occur when we follow a sign-extend by a right
3712 shift on a machine that does a sign-extend as a pair
3713 of shifts. */
7afe21cc 3714
824a4527 3715 if (is_shift
481683e1 3716 && CONST_INT_P (new_const)
5511bc5a 3717 && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
4908e508
RS
3718 {
3719 /* As an exception, we can turn an ASHIFTRT of this
3720 form into a shift of the number of bits - 1. */
3721 if (code == ASHIFTRT)
3722 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
824a4527
JDA
3723 else if (!side_effects_p (XEXP (y, 0)))
3724 return CONST0_RTX (mode);
4908e508
RS
3725 else
3726 break;
3727 }
7afe21cc
RK
3728
3729 y = copy_rtx (XEXP (y, 0));
3730
3731 /* If Y contains our first operand (the most common way this
3732 can happen is if Y is a MEM), we would do into an infinite
3733 loop if we tried to fold it. So don't in that case. */
3734
3735 if (! reg_mentioned_p (folded_arg0, y))
3736 y = fold_rtx (y, insn);
3737
0cedb36c 3738 return simplify_gen_binary (code, mode, y, new_const);
7afe21cc 3739 }
e9a25f70
JL
3740 break;
3741
f930bfd0
JW
3742 case DIV: case UDIV:
3743 /* ??? The associative optimization performed immediately above is
3744 also possible for DIV and UDIV using associate_code of MULT.
3745 However, we would need extra code to verify that the
3746 multiplication does not overflow, that is, there is no overflow
3747 in the calculation of new_const. */
3748 break;
3749
e9a25f70
JL
3750 default:
3751 break;
7afe21cc
RK
3752 }
3753
32e9fa48 3754 new_rtx = simplify_binary_operation (code, mode,
7afe21cc
RK
3755 const_arg0 ? const_arg0 : folded_arg0,
3756 const_arg1 ? const_arg1 : folded_arg1);
3757 break;
3758
ec8e098d 3759 case RTX_OBJ:
7afe21cc
RK
3760 /* (lo_sum (high X) X) is simply X. */
3761 if (code == LO_SUM && const_arg0 != 0
3762 && GET_CODE (const_arg0) == HIGH
3763 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3764 return const_arg1;
3765 break;
3766
ec8e098d
PB
3767 case RTX_TERNARY:
3768 case RTX_BITFIELD_OPS:
32e9fa48 3769 new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
7afe21cc
RK
3770 const_arg0 ? const_arg0 : folded_arg0,
3771 const_arg1 ? const_arg1 : folded_arg1,
3772 const_arg2 ? const_arg2 : XEXP (x, 2));
3773 break;
ee5332b8 3774
ec8e098d
PB
3775 default:
3776 break;
7afe21cc
RK
3777 }
3778
32e9fa48 3779 return new_rtx ? new_rtx : x;
7afe21cc
RK
3780}
3781\f
3782/* Return a constant value currently equivalent to X.
3783 Return 0 if we don't know one. */
3784
3785static rtx
7080f735 3786equiv_constant (rtx x)
7afe21cc 3787{
f8cfc6aa 3788 if (REG_P (x)
1bb98cec
DM
3789 && REGNO_QTY_VALID_P (REGNO (x)))
3790 {
3791 int x_q = REG_QTY (REGNO (x));
3792 struct qty_table_elem *x_ent = &qty_table[x_q];
3793
3794 if (x_ent->const_rtx)
4de249d9 3795 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
1bb98cec 3796 }
7afe21cc 3797
2ce5e1b4 3798 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
3799 return x;
3800
a52b023a
PB
3801 if (GET_CODE (x) == SUBREG)
3802 {
ef4bddc2
RS
3803 machine_mode mode = GET_MODE (x);
3804 machine_mode imode = GET_MODE (SUBREG_REG (x));
32e9fa48 3805 rtx new_rtx;
a52b023a
PB
3806
3807 /* See if we previously assigned a constant value to this SUBREG. */
32e9fa48 3808 if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
807e902e 3809 || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
32e9fa48
KG
3810 || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3811 || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3812 return new_rtx;
a52b023a 3813
f5f8d79d
EB
3814 /* If we didn't and if doing so makes sense, see if we previously
3815 assigned a constant value to the enclosing word mode SUBREG. */
3816 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3817 && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3818 {
3819 int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3820 if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3821 {
3822 rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3823 new_rtx = lookup_as_function (y, CONST_INT);
3824 if (new_rtx)
3825 return gen_lowpart (mode, new_rtx);
3826 }
3827 }
3828
7cb6668a
MI
3829 /* Otherwise see if we already have a constant for the inner REG,
3830 and if that is enough to calculate an equivalent constant for
3831 the subreg. Note that the upper bits of paradoxical subregs
3832 are undefined, so they cannot be said to equal anything. */
a52b023a 3833 if (REG_P (SUBREG_REG (x))
7cb6668a 3834 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (imode)
32e9fa48 3835 && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
f5f8d79d 3836 return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
a52b023a
PB
3837
3838 return 0;
3839 }
3840
3841 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3842 the hash table in case its value was seen before. */
fc3ffe83 3843
3c0cb5de 3844 if (MEM_P (x))
fc3ffe83
RK
3845 {
3846 struct table_elt *elt;
3847
a52b023a 3848 x = avoid_constant_pool_reference (x);
fc3ffe83
RK
3849 if (CONSTANT_P (x))
3850 return x;
3851
0516f6fe 3852 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
fc3ffe83
RK
3853 if (elt == 0)
3854 return 0;
3855
3856 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3857 if (elt->is_const && CONSTANT_P (elt->exp))
3858 return elt->exp;
3859 }
3860
7afe21cc
RK
3861 return 0;
3862}
3863\f
0129d079
SB
3864/* Given INSN, a jump insn, TAKEN indicates if we are following the
3865 "taken" branch.
7afe21cc
RK
3866
3867 In certain cases, this can cause us to add an equivalence. For example,
278a83b2 3868 if we are following the taken case of
7080f735 3869 if (i == 2)
7afe21cc
RK
3870 we can add the fact that `i' and '2' are now equivalent.
3871
3872 In any case, we can record that this comparison was passed. If the same
3873 comparison is seen later, we will know its value. */
3874
3875static void
20468884 3876record_jump_equiv (rtx_insn *insn, bool taken)
7afe21cc
RK
3877{
3878 int cond_known_true;
3879 rtx op0, op1;
7f1c097d 3880 rtx set;
ef4bddc2 3881 machine_mode mode, mode0, mode1;
7afe21cc
RK
3882 int reversed_nonequality = 0;
3883 enum rtx_code code;
3884
3885 /* Ensure this is the right kind of insn. */
0129d079
SB
3886 gcc_assert (any_condjump_p (insn));
3887
7f1c097d 3888 set = pc_set (insn);
7afe21cc
RK
3889
3890 /* See if this jump condition is known true or false. */
3891 if (taken)
7f1c097d 3892 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
7afe21cc 3893 else
7f1c097d 3894 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
7afe21cc
RK
3895
3896 /* Get the type of comparison being done and the operands being compared.
3897 If we had to reverse a non-equality condition, record that fact so we
3898 know that it isn't valid for floating-point. */
7f1c097d
JH
3899 code = GET_CODE (XEXP (SET_SRC (set), 0));
3900 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3901 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
7afe21cc 3902
13c9910f 3903 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
3904 if (! cond_known_true)
3905 {
261efdef 3906 code = reversed_comparison_code_parts (code, op0, op1, insn);
1eb8759b
RH
3907
3908 /* Don't remember if we can't find the inverse. */
3909 if (code == UNKNOWN)
3910 return;
7afe21cc
RK
3911 }
3912
3913 /* The mode is the mode of the non-constant. */
13c9910f
RS
3914 mode = mode0;
3915 if (mode1 != VOIDmode)
3916 mode = mode1;
7afe21cc
RK
3917
3918 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3919}
3920
794693c0
RH
3921/* Yet another form of subreg creation. In this case, we want something in
3922 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3923
3924static rtx
ef4bddc2 3925record_jump_cond_subreg (machine_mode mode, rtx op)
794693c0 3926{
ef4bddc2 3927 machine_mode op_mode = GET_MODE (op);
794693c0
RH
3928 if (op_mode == mode || op_mode == VOIDmode)
3929 return op;
3930 return lowpart_subreg (mode, op, op_mode);
3931}
3932
7afe21cc
RK
3933/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3934 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3935 Make any useful entries we can with that information. Called from
3936 above function and called recursively. */
3937
3938static void
ef4bddc2 3939record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
7080f735 3940 rtx op1, int reversed_nonequality)
7afe21cc 3941{
2197a88a 3942 unsigned op0_hash, op1_hash;
e428d738 3943 int op0_in_memory, op1_in_memory;
7afe21cc
RK
3944 struct table_elt *op0_elt, *op1_elt;
3945
3946 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3947 we know that they are also equal in the smaller mode (this is also
3948 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 3949 is not worth testing for with no SUBREG). */
7afe21cc 3950
2e794ee8 3951 /* Note that GET_MODE (op0) may not equal MODE. */
6a4bdc79 3952 if (code == EQ && paradoxical_subreg_p (op0))
7afe21cc 3953 {
ef4bddc2 3954 machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
3955 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3956 if (tem)
3957 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3958 reversed_nonequality);
7afe21cc
RK
3959 }
3960
6a4bdc79 3961 if (code == EQ && paradoxical_subreg_p (op1))
7afe21cc 3962 {
ef4bddc2 3963 machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
3964 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3965 if (tem)
3966 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3967 reversed_nonequality);
7afe21cc
RK
3968 }
3969
278a83b2 3970 /* Similarly, if this is an NE comparison, and either is a SUBREG
7afe21cc
RK
3971 making a smaller mode, we know the whole thing is also NE. */
3972
2e794ee8
RS
3973 /* Note that GET_MODE (op0) may not equal MODE;
3974 if we test MODE instead, we can get an infinite recursion
3975 alternating between two modes each wider than MODE. */
3976
7afe21cc
RK
3977 if (code == NE && GET_CODE (op0) == SUBREG
3978 && subreg_lowpart_p (op0)
2e794ee8
RS
3979 && (GET_MODE_SIZE (GET_MODE (op0))
3980 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc 3981 {
ef4bddc2 3982 machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
794693c0
RH
3983 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3984 if (tem)
3985 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3986 reversed_nonequality);
7afe21cc
RK
3987 }
3988
3989 if (code == NE && GET_CODE (op1) == SUBREG
3990 && subreg_lowpart_p (op1)
2e794ee8
RS
3991 && (GET_MODE_SIZE (GET_MODE (op1))
3992 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc 3993 {
ef4bddc2 3994 machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
794693c0
RH
3995 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3996 if (tem)
3997 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3998 reversed_nonequality);
7afe21cc
RK
3999 }
4000
4001 /* Hash both operands. */
4002
4003 do_not_record = 0;
4004 hash_arg_in_memory = 0;
2197a88a 4005 op0_hash = HASH (op0, mode);
7afe21cc 4006 op0_in_memory = hash_arg_in_memory;
7afe21cc
RK
4007
4008 if (do_not_record)
4009 return;
4010
4011 do_not_record = 0;
4012 hash_arg_in_memory = 0;
2197a88a 4013 op1_hash = HASH (op1, mode);
7afe21cc 4014 op1_in_memory = hash_arg_in_memory;
278a83b2 4015
7afe21cc
RK
4016 if (do_not_record)
4017 return;
4018
4019 /* Look up both operands. */
2197a88a
RK
4020 op0_elt = lookup (op0, op0_hash, mode);
4021 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 4022
af3869c1
RK
4023 /* If both operands are already equivalent or if they are not in the
4024 table but are identical, do nothing. */
4025 if ((op0_elt != 0 && op1_elt != 0
4026 && op0_elt->first_same_value == op1_elt->first_same_value)
4027 || op0 == op1 || rtx_equal_p (op0, op1))
4028 return;
4029
7afe21cc 4030 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
4031 comparison. Similarly if this is floating-point. In the latter
4032 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4033 If we record the equality, we might inadvertently delete code
4034 whose intent was to change -0 to +0. */
4035
cbf6a543 4036 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc 4037 {
1bb98cec
DM
4038 struct qty_table_elem *ent;
4039 int qty;
4040
7afe21cc
RK
4041 /* If we reversed a floating-point comparison, if OP0 is not a
4042 register, or if OP1 is neither a register or constant, we can't
4043 do anything. */
4044
f8cfc6aa 4045 if (!REG_P (op1))
7afe21cc
RK
4046 op1 = equiv_constant (op1);
4047
cbf6a543 4048 if ((reversed_nonequality && FLOAT_MODE_P (mode))
f8cfc6aa 4049 || !REG_P (op0) || op1 == 0)
7afe21cc
RK
4050 return;
4051
4052 /* Put OP0 in the hash table if it isn't already. This gives it a
4053 new quantity number. */
4054 if (op0_elt == 0)
4055 {
9714cf43 4056 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4057 {
4058 rehash_using_reg (op0);
2197a88a 4059 op0_hash = HASH (op0, mode);
2bb81c86
RK
4060
4061 /* If OP0 is contained in OP1, this changes its hash code
4062 as well. Faster to rehash than to check, except
4063 for the simple case of a constant. */
4064 if (! CONSTANT_P (op1))
2197a88a 4065 op1_hash = HASH (op1,mode);
7afe21cc
RK
4066 }
4067
9714cf43 4068 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4069 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4070 }
4071
1bb98cec
DM
4072 qty = REG_QTY (REGNO (op0));
4073 ent = &qty_table[qty];
4074
4075 ent->comparison_code = code;
f8cfc6aa 4076 if (REG_P (op1))
7afe21cc 4077 {
5d5ea909 4078 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 4079 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 4080
7afe21cc
RK
4081 /* Put OP1 in the hash table so it gets a new quantity number. */
4082 if (op1_elt == 0)
4083 {
9714cf43 4084 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4085 {
4086 rehash_using_reg (op1);
2197a88a 4087 op1_hash = HASH (op1, mode);
7afe21cc
RK
4088 }
4089
9714cf43 4090 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4091 op1_elt->in_memory = op1_in_memory;
7afe21cc
RK
4092 }
4093
1bb98cec
DM
4094 ent->comparison_const = NULL_RTX;
4095 ent->comparison_qty = REG_QTY (REGNO (op1));
7afe21cc
RK
4096 }
4097 else
4098 {
1bb98cec
DM
4099 ent->comparison_const = op1;
4100 ent->comparison_qty = -1;
7afe21cc
RK
4101 }
4102
4103 return;
4104 }
4105
eb5ad42a
RS
4106 /* If either side is still missing an equivalence, make it now,
4107 then merge the equivalences. */
7afe21cc 4108
7afe21cc
RK
4109 if (op0_elt == 0)
4110 {
9714cf43 4111 if (insert_regs (op0, NULL, 0))
7afe21cc
RK
4112 {
4113 rehash_using_reg (op0);
2197a88a 4114 op0_hash = HASH (op0, mode);
7afe21cc
RK
4115 }
4116
9714cf43 4117 op0_elt = insert (op0, NULL, op0_hash, mode);
7afe21cc 4118 op0_elt->in_memory = op0_in_memory;
7afe21cc
RK
4119 }
4120
4121 if (op1_elt == 0)
4122 {
9714cf43 4123 if (insert_regs (op1, NULL, 0))
7afe21cc
RK
4124 {
4125 rehash_using_reg (op1);
2197a88a 4126 op1_hash = HASH (op1, mode);
7afe21cc
RK
4127 }
4128
9714cf43 4129 op1_elt = insert (op1, NULL, op1_hash, mode);
7afe21cc 4130 op1_elt->in_memory = op1_in_memory;
7afe21cc 4131 }
eb5ad42a
RS
4132
4133 merge_equiv_classes (op0_elt, op1_elt);
7afe21cc
RK
4134}
4135\f
4136/* CSE processing for one instruction.
7b02f4e0
SB
4137
4138 Most "true" common subexpressions are mostly optimized away in GIMPLE,
4139 but the few that "leak through" are cleaned up by cse_insn, and complex
4140 addressing modes are often formed here.
4141
4142 The main function is cse_insn, and between here and that function
4143 a couple of helper functions is defined to keep the size of cse_insn
4144 within reasonable proportions.
4145
4146 Data is shared between the main and helper functions via STRUCT SET,
4147 that contains all data related for every set in the instruction that
4148 is being processed.
4149
4150 Note that cse_main processes all sets in the instruction. Most
4151 passes in GCC only process simple SET insns or single_set insns, but
4152 CSE processes insns with multiple sets as well. */
7afe21cc
RK
4153
4154/* Data on one SET contained in the instruction. */
4155
4156struct set
4157{
4158 /* The SET rtx itself. */
4159 rtx rtl;
4160 /* The SET_SRC of the rtx (the original value, if it is changing). */
4161 rtx src;
4162 /* The hash-table element for the SET_SRC of the SET. */
4163 struct table_elt *src_elt;
2197a88a
RK
4164 /* Hash value for the SET_SRC. */
4165 unsigned src_hash;
4166 /* Hash value for the SET_DEST. */
4167 unsigned dest_hash;
7afe21cc
RK
4168 /* The SET_DEST, with SUBREG, etc., stripped. */
4169 rtx inner_dest;
278a83b2 4170 /* Nonzero if the SET_SRC is in memory. */
7afe21cc 4171 char src_in_memory;
7afe21cc
RK
4172 /* Nonzero if the SET_SRC contains something
4173 whose value cannot be predicted and understood. */
4174 char src_volatile;
496324d0
DN
4175 /* Original machine mode, in case it becomes a CONST_INT.
4176 The size of this field should match the size of the mode
4177 field of struct rtx_def (see rtl.h). */
4178 ENUM_BITFIELD(machine_mode) mode : 8;
7afe21cc
RK
4179 /* A constant equivalent for SET_SRC, if any. */
4180 rtx src_const;
2197a88a
RK
4181 /* Hash value of constant equivalent for SET_SRC. */
4182 unsigned src_const_hash;
7afe21cc
RK
4183 /* Table entry for constant equivalent for SET_SRC, if any. */
4184 struct table_elt *src_const_elt;
05c433f3
PB
4185 /* Table entry for the destination address. */
4186 struct table_elt *dest_addr_elt;
7afe21cc 4187};
7b02f4e0
SB
4188\f
4189/* Special handling for (set REG0 REG1) where REG0 is the
4190 "cheapest", cheaper than REG1. After cse, REG1 will probably not
4191 be used in the sequel, so (if easily done) change this insn to
4192 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4193 that computed their value. Then REG1 will become a dead store
4194 and won't cloud the situation for later optimizations.
4195
4196 Do not make this change if REG1 is a hard register, because it will
4197 then be used in the sequel and we may be changing a two-operand insn
4198 into a three-operand insn.
4199
4200 This is the last transformation that cse_insn will try to do. */
7afe21cc
RK
4201
4202static void
20468884 4203try_back_substitute_reg (rtx set, rtx_insn *insn)
7afe21cc 4204{
7b02f4e0
SB
4205 rtx dest = SET_DEST (set);
4206 rtx src = SET_SRC (set);
7afe21cc 4207
7b02f4e0
SB
4208 if (REG_P (dest)
4209 && REG_P (src) && ! HARD_REGISTER_P (src)
4210 && REGNO_QTY_VALID_P (REGNO (src)))
4211 {
4212 int src_q = REG_QTY (REGNO (src));
4213 struct qty_table_elem *src_ent = &qty_table[src_q];
7afe21cc 4214
7b02f4e0
SB
4215 if (src_ent->first_reg == REGNO (dest))
4216 {
4217 /* Scan for the previous nonnote insn, but stop at a basic
4218 block boundary. */
20468884
DM
4219 rtx_insn *prev = insn;
4220 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
7b02f4e0
SB
4221 do
4222 {
4223 prev = PREV_INSN (prev);
4224 }
4225 while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
7afe21cc 4226
7b02f4e0
SB
4227 /* Do not swap the registers around if the previous instruction
4228 attaches a REG_EQUIV note to REG1.
7afe21cc 4229
7b02f4e0
SB
4230 ??? It's not entirely clear whether we can transfer a REG_EQUIV
4231 from the pseudo that originally shadowed an incoming argument
4232 to another register. Some uses of REG_EQUIV might rely on it
4233 being attached to REG1 rather than REG2.
7afe21cc 4234
7b02f4e0
SB
4235 This section previously turned the REG_EQUIV into a REG_EQUAL
4236 note. We cannot do that because REG_EQUIV may provide an
4237 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4238 if (NONJUMP_INSN_P (prev)
4239 && GET_CODE (PATTERN (prev)) == SET
4240 && SET_DEST (PATTERN (prev)) == src
4241 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4242 {
4243 rtx note;
4244
4245 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4246 validate_change (insn, &SET_DEST (set), src, 1);
4247 validate_change (insn, &SET_SRC (set), dest, 1);
4248 apply_change_group ();
4249
4250 /* If INSN has a REG_EQUAL note, and this note mentions
4251 REG0, then we must delete it, because the value in
4252 REG0 has changed. If the note's value is REG1, we must
4253 also delete it because that is now this insn's dest. */
4254 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4255 if (note != 0
4256 && (reg_mentioned_p (dest, XEXP (note, 0))
4257 || rtx_equal_p (src, XEXP (note, 0))))
4258 remove_note (insn, note);
4259 }
f474c6f8 4260 }
f1e7c95f 4261 }
7b02f4e0
SB
4262}
4263\f
4264/* Record all the SETs in this instruction into SETS_PTR,
4265 and return the number of recorded sets. */
4266static int
20468884 4267find_sets_in_insn (rtx_insn *insn, struct set **psets)
7b02f4e0
SB
4268{
4269 struct set *sets = *psets;
4270 int n_sets = 0;
4271 rtx x = PATTERN (insn);
f1e7c95f 4272
7afe21cc
RK
4273 if (GET_CODE (x) == SET)
4274 {
7afe21cc
RK
4275 /* Ignore SETs that are unconditional jumps.
4276 They never need cse processing, so this does not hurt.
4277 The reason is not efficiency but rather
4278 so that we can test at the end for instructions
4279 that have been simplified to unconditional jumps
4280 and not be misled by unchanged instructions
4281 that were unconditional jumps to begin with. */
4282 if (SET_DEST (x) == pc_rtx
4283 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4284 ;
7afe21cc
RK
4285 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4286 The hard function value register is used only once, to copy to
7b02f4e0 4287 someplace else, so it isn't worth cse'ing. */
7afe21cc 4288 else if (GET_CODE (SET_SRC (x)) == CALL)
7b02f4e0 4289 ;
7afe21cc 4290 else
7b02f4e0 4291 sets[n_sets++].rtl = x;
7afe21cc
RK
4292 }
4293 else if (GET_CODE (x) == PARALLEL)
4294 {
7b02f4e0 4295 int i, lim = XVECLEN (x, 0);
278a83b2 4296
6c4d60f8 4297 /* Go over the expressions of the PARALLEL in forward order, to
7b02f4e0 4298 put them in the same order in the SETS array. */
7afe21cc
RK
4299 for (i = 0; i < lim; i++)
4300 {
b3694847 4301 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
4302 if (GET_CODE (y) == SET)
4303 {
7722328e
RK
4304 /* As above, we ignore unconditional jumps and call-insns and
4305 ignore the result of apply_change_group. */
7b02f4e0
SB
4306 if (SET_DEST (y) == pc_rtx
4307 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4308 ;
4309 else if (GET_CODE (SET_SRC (y)) == CALL)
7afe21cc
RK
4310 ;
4311 else
4312 sets[n_sets++].rtl = y;
4313 }
7afe21cc
RK
4314 }
4315 }
7b02f4e0
SB
4316
4317 return n_sets;
4318}
4319\f
4320/* Where possible, substitute every register reference in the N_SETS
4321 number of SETS in INSN with the the canonical register.
4322
4323 Register canonicalization propagatest the earliest register (i.e.
4324 one that is set before INSN) with the same value. This is a very
4325 useful, simple form of CSE, to clean up warts from expanding GIMPLE
4326 to RTL. For instance, a CONST for an address is usually expanded
4327 multiple times to loads into different registers, thus creating many
4328 subexpressions of the form:
4329
4330 (set (reg1) (some_const))
4331 (set (mem (... reg1 ...) (thing)))
4332 (set (reg2) (some_const))
4333 (set (mem (... reg2 ...) (thing)))
4334
4335 After canonicalizing, the code takes the following form:
4336
4337 (set (reg1) (some_const))
4338 (set (mem (... reg1 ...) (thing)))
4339 (set (reg2) (some_const))
4340 (set (mem (... reg1 ...) (thing)))
4341
4342 The set to reg2 is now trivially dead, and the memory reference (or
4343 address, or whatever) may be a candidate for further CSEing.
4344
4345 In this function, the result of apply_change_group can be ignored;
4346 see canon_reg. */
4347
4348static void
20468884 4349canonicalize_insn (rtx_insn *insn, struct set **psets, int n_sets)
7b02f4e0
SB
4350{
4351 struct set *sets = *psets;
4352 rtx tem;
4353 rtx x = PATTERN (insn);
4354 int i;
4355
4356 if (CALL_P (insn))
4357 {
4358 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
e384e6b5
BS
4359 if (GET_CODE (XEXP (tem, 0)) != SET)
4360 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
7b02f4e0
SB
4361 }
4362
4363 if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4364 {
4365 canon_reg (SET_SRC (x), insn);
4366 apply_change_group ();
4367 fold_rtx (SET_SRC (x), insn);
4368 }
7afe21cc
RK
4369 else if (GET_CODE (x) == CLOBBER)
4370 {
7b02f4e0
SB
4371 /* If we clobber memory, canon the address.
4372 This does nothing when a register is clobbered
4373 because we have already invalidated the reg. */
3c0cb5de 4374 if (MEM_P (XEXP (x, 0)))
6fb5fa3c 4375 canon_reg (XEXP (x, 0), insn);
7afe21cc 4376 }
7afe21cc 4377 else if (GET_CODE (x) == USE
f8cfc6aa 4378 && ! (REG_P (XEXP (x, 0))
7afe21cc 4379 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
7b02f4e0 4380 /* Canonicalize a USE of a pseudo register or memory location. */
b1ba284c
EB
4381 canon_reg (x, insn);
4382 else if (GET_CODE (x) == ASM_OPERANDS)
4383 {
4384 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4385 {
4386 rtx input = ASM_OPERANDS_INPUT (x, i);
4387 if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
4388 {
4389 input = canon_reg (input, insn);
4390 validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4391 }
4392 }
4393 }
7afe21cc
RK
4394 else if (GET_CODE (x) == CALL)
4395 {
4396 canon_reg (x, insn);
77fa0940 4397 apply_change_group ();
7afe21cc
RK
4398 fold_rtx (x, insn);
4399 }
b5b8b0ac
AO
4400 else if (DEBUG_INSN_P (insn))
4401 canon_reg (PATTERN (insn), insn);
7b02f4e0
SB
4402 else if (GET_CODE (x) == PARALLEL)
4403 {
4404 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4405 {
4406 rtx y = XVECEXP (x, 0, i);
4407 if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4408 {
4409 canon_reg (SET_SRC (y), insn);
4410 apply_change_group ();
4411 fold_rtx (SET_SRC (y), insn);
4412 }
4413 else if (GET_CODE (y) == CLOBBER)
4414 {
4415 if (MEM_P (XEXP (y, 0)))
4416 canon_reg (XEXP (y, 0), insn);
4417 }
4418 else if (GET_CODE (y) == USE
4419 && ! (REG_P (XEXP (y, 0))
4420 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4421 canon_reg (y, insn);
4422 else if (GET_CODE (y) == CALL)
4423 {
4424 canon_reg (y, insn);
4425 apply_change_group ();
4426 fold_rtx (y, insn);
4427 }
4428 }
4429 }
7afe21cc 4430
92f9aa51 4431 if (n_sets == 1 && REG_NOTES (insn) != 0
7b02f4e0 4432 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
7b668f9e 4433 {
7b02f4e0
SB
4434 /* We potentially will process this insn many times. Therefore,
4435 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4436 unique set in INSN.
4437
4438 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4439 because cse_insn handles those specially. */
4440 if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4441 && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4442 remove_note (insn, tem);
4443 else
4444 {
4445 canon_reg (XEXP (tem, 0), insn);
4446 apply_change_group ();
4447 XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4448 df_notes_rescan (insn);
4449 }
7b668f9e 4450 }
7afe21cc
RK
4451
4452 /* Canonicalize sources and addresses of destinations.
4453 We do this in a separate pass to avoid problems when a MATCH_DUP is
4454 present in the insn pattern. In that case, we want to ensure that
4455 we don't break the duplicate nature of the pattern. So we will replace
4456 both operands at the same time. Otherwise, we would fail to find an
4457 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
4458
4459 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 4460 but we don't do this any more. */
7afe21cc
RK
4461
4462 for (i = 0; i < n_sets; i++)
4463 {
4464 rtx dest = SET_DEST (sets[i].rtl);
4465 rtx src = SET_SRC (sets[i].rtl);
32e9fa48 4466 rtx new_rtx = canon_reg (src, insn);
7afe21cc 4467
32e9fa48 4468 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
7afe21cc 4469
46d096a3 4470 if (GET_CODE (dest) == ZERO_EXTRACT)
7afe21cc
RK
4471 {
4472 validate_change (insn, &XEXP (dest, 1),
77fa0940 4473 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 4474 validate_change (insn, &XEXP (dest, 2),
77fa0940 4475 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
4476 }
4477
46d096a3 4478 while (GET_CODE (dest) == SUBREG
7afe21cc 4479 || GET_CODE (dest) == ZERO_EXTRACT
46d096a3 4480 || GET_CODE (dest) == STRICT_LOW_PART)
7afe21cc
RK
4481 dest = XEXP (dest, 0);
4482
3c0cb5de 4483 if (MEM_P (dest))
7afe21cc
RK
4484 canon_reg (dest, insn);
4485 }
4486
77fa0940
RK
4487 /* Now that we have done all the replacements, we can apply the change
4488 group and see if they all work. Note that this will cause some
4489 canonicalizations that would have worked individually not to be applied
4490 because some other canonicalization didn't work, but this should not
278a83b2 4491 occur often.
7722328e
RK
4492
4493 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
4494
4495 apply_change_group ();
7b02f4e0
SB
4496}
4497\f
4498/* Main function of CSE.
4499 First simplify sources and addresses of all assignments
4500 in the instruction, using previously-computed equivalents values.
4501 Then install the new sources and destinations in the table
4502 of available values. */
4503
4504static void
20468884 4505cse_insn (rtx_insn *insn)
7b02f4e0
SB
4506{
4507 rtx x = PATTERN (insn);
4508 int i;
4509 rtx tem;
4510 int n_sets = 0;
4511
4512 rtx src_eqv = 0;
4513 struct table_elt *src_eqv_elt = 0;
4514 int src_eqv_volatile = 0;
4515 int src_eqv_in_memory = 0;
4516 unsigned src_eqv_hash = 0;
4517
4518 struct set *sets = (struct set *) 0;
4519
4520 if (GET_CODE (x) == SET)
4521 sets = XALLOCA (struct set);
4522 else if (GET_CODE (x) == PARALLEL)
4523 sets = XALLOCAVEC (struct set, XVECLEN (x, 0));
4524
4525 this_insn = insn;
7b02f4e0
SB
4526 /* Records what this insn does to set CC0. */
4527 this_insn_cc0 = 0;
4528 this_insn_cc0_mode = VOIDmode;
7b02f4e0
SB
4529
4530 /* Find all regs explicitly clobbered in this insn,
4531 to ensure they are not replaced with any other regs
4532 elsewhere in this insn. */
4533 invalidate_from_sets_and_clobbers (insn);
4534
4535 /* Record all the SETs in this instruction. */
4536 n_sets = find_sets_in_insn (insn, &sets);
4537
4538 /* Substitute the canonical register where possible. */
4539 canonicalize_insn (insn, &sets, n_sets);
4540
4541 /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4542 if different, or if the DEST is a STRICT_LOW_PART. The latter condition
4543 is necessary because SRC_EQV is handled specially for this case, and if
4544 it isn't set, then there will be no equivalence for the destination. */
4545 if (n_sets == 1 && REG_NOTES (insn) != 0
4546 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4547 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4548 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4549 src_eqv = copy_rtx (XEXP (tem, 0));
77fa0940 4550
7afe21cc
RK
4551 /* Set sets[i].src_elt to the class each source belongs to.
4552 Detect assignments from or to volatile things
4553 and set set[i] to zero so they will be ignored
4554 in the rest of this function.
4555
4556 Nothing in this loop changes the hash table or the register chains. */
4557
4558 for (i = 0; i < n_sets; i++)
4559 {
b4ab701f 4560 bool repeat = false;
b3694847
SS
4561 rtx src, dest;
4562 rtx src_folded;
4563 struct table_elt *elt = 0, *p;
ef4bddc2 4564 machine_mode mode;
7afe21cc
RK
4565 rtx src_eqv_here;
4566 rtx src_const = 0;
4567 rtx src_related = 0;
2c5bfdf7 4568 bool src_related_is_const_anchor = false;
7afe21cc 4569 struct table_elt *src_const_elt = 0;
99a9c946
GS
4570 int src_cost = MAX_COST;
4571 int src_eqv_cost = MAX_COST;
4572 int src_folded_cost = MAX_COST;
4573 int src_related_cost = MAX_COST;
4574 int src_elt_cost = MAX_COST;
4575 int src_regcost = MAX_COST;
4576 int src_eqv_regcost = MAX_COST;
4577 int src_folded_regcost = MAX_COST;
4578 int src_related_regcost = MAX_COST;
4579 int src_elt_regcost = MAX_COST;
da7d8304 4580 /* Set nonzero if we need to call force_const_mem on with the
7afe21cc
RK
4581 contents of src_folded before using it. */
4582 int src_folded_force_flag = 0;
4583
4584 dest = SET_DEST (sets[i].rtl);
4585 src = SET_SRC (sets[i].rtl);
4586
4587 /* If SRC is a constant that has no machine mode,
4588 hash it with the destination's machine mode.
4589 This way we can keep different modes separate. */
4590
4591 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4592 sets[i].mode = mode;
4593
4594 if (src_eqv)
4595 {
ef4bddc2 4596 machine_mode eqvmode = mode;
7afe21cc
RK
4597 if (GET_CODE (dest) == STRICT_LOW_PART)
4598 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4599 do_not_record = 0;
4600 hash_arg_in_memory = 0;
2197a88a 4601 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
4602
4603 /* Find the equivalence class for the equivalent expression. */
4604
4605 if (!do_not_record)
2197a88a 4606 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
4607
4608 src_eqv_volatile = do_not_record;
4609 src_eqv_in_memory = hash_arg_in_memory;
7afe21cc
RK
4610 }
4611
4612 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4613 value of the INNER register, not the destination. So it is not
3826a3da 4614 a valid substitution for the source. But save it for later. */
7afe21cc
RK
4615 if (GET_CODE (dest) == STRICT_LOW_PART)
4616 src_eqv_here = 0;
4617 else
4618 src_eqv_here = src_eqv;
4619
4620 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4621 simplified result, which may not necessarily be valid. */
4622 src_folded = fold_rtx (src, insn);
4623
e6a125a0
RK
4624#if 0
4625 /* ??? This caused bad code to be generated for the m68k port with -O2.
4626 Suppose src is (CONST_INT -1), and that after truncation src_folded
4627 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4628 At the end we will add src and src_const to the same equivalence
4629 class. We now have 3 and -1 on the same equivalence class. This
4630 causes later instructions to be mis-optimized. */
7afe21cc
RK
4631 /* If storing a constant in a bitfield, pre-truncate the constant
4632 so we will be able to record it later. */
46d096a3 4633 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
4634 {
4635 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4636
481683e1
SZ
4637 if (CONST_INT_P (src)
4638 && CONST_INT_P (width)
906c4e36
RK
4639 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4640 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4641 src_folded
4642 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4643 << INTVAL (width)) - 1));
7afe21cc 4644 }
e6a125a0 4645#endif
7afe21cc
RK
4646
4647 /* Compute SRC's hash code, and also notice if it
4648 should not be recorded at all. In that case,
4649 prevent any further processing of this assignment. */
4650 do_not_record = 0;
4651 hash_arg_in_memory = 0;
7afe21cc
RK
4652
4653 sets[i].src = src;
2197a88a 4654 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
4655 sets[i].src_volatile = do_not_record;
4656 sets[i].src_in_memory = hash_arg_in_memory;
7afe21cc 4657
50196afa 4658 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
43e72072
JJ
4659 a pseudo, do not record SRC. Using SRC as a replacement for
4660 anything else will be incorrect in that situation. Note that
4661 this usually occurs only for stack slots, in which case all the
4662 RTL would be referring to SRC, so we don't lose any optimization
4663 opportunities by not having SRC in the hash table. */
50196afa 4664
3c0cb5de 4665 if (MEM_P (src)
43e72072 4666 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
f8cfc6aa 4667 && REG_P (dest)
43e72072 4668 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
50196afa
RK
4669 sets[i].src_volatile = 1;
4670
d8d6ea53
JJ
4671 else if (GET_CODE (src) == ASM_OPERANDS
4672 && GET_CODE (x) == PARALLEL)
6c4d60f8
JJ
4673 {
4674 /* Do not record result of a non-volatile inline asm with
4675 more than one result. */
4676 if (n_sets > 1)
4677 sets[i].src_volatile = 1;
4678
4679 int j, lim = XVECLEN (x, 0);
4680 for (j = 0; j < lim; j++)
4681 {
4682 rtx y = XVECEXP (x, 0, j);
4683 /* And do not record result of a non-volatile inline asm
4684 with "memory" clobber. */
4685 if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4686 {
4687 sets[i].src_volatile = 1;
4688 break;
4689 }
4690 }
4691 }
d8d6ea53 4692
0dadecf6
RK
4693#if 0
4694 /* It is no longer clear why we used to do this, but it doesn't
4695 appear to still be needed. So let's try without it since this
4696 code hurts cse'ing widened ops. */
9a5a17f3 4697 /* If source is a paradoxical subreg (such as QI treated as an SI),
7afe21cc
RK
4698 treat it as volatile. It may do the work of an SI in one context
4699 where the extra bits are not being used, but cannot replace an SI
4700 in general. */
6a4bdc79 4701 if (paradoxical_subreg_p (src))
7afe21cc 4702 sets[i].src_volatile = 1;
0dadecf6 4703#endif
7afe21cc
RK
4704
4705 /* Locate all possible equivalent forms for SRC. Try to replace
4706 SRC in the insn with each cheaper equivalent.
4707
4708 We have the following types of equivalents: SRC itself, a folded
4709 version, a value given in a REG_EQUAL note, or a value related
4710 to a constant.
4711
4712 Each of these equivalents may be part of an additional class
4713 of equivalents (if more than one is in the table, they must be in
4714 the same class; we check for this).
4715
4716 If the source is volatile, we don't do any table lookups.
4717
4718 We note any constant equivalent for possible later use in a
4719 REG_NOTE. */
4720
4721 if (!sets[i].src_volatile)
2197a88a 4722 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
4723
4724 sets[i].src_elt = elt;
4725
4726 if (elt && src_eqv_here && src_eqv_elt)
278a83b2
KH
4727 {
4728 if (elt->first_same_value != src_eqv_elt->first_same_value)
7afe21cc
RK
4729 {
4730 /* The REG_EQUAL is indicating that two formerly distinct
4731 classes are now equivalent. So merge them. */
4732 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
4733 src_eqv_hash = HASH (src_eqv, elt->mode);
4734 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
4735 }
4736
278a83b2
KH
4737 src_eqv_here = 0;
4738 }
7afe21cc
RK
4739
4740 else if (src_eqv_elt)
278a83b2 4741 elt = src_eqv_elt;
7afe21cc
RK
4742
4743 /* Try to find a constant somewhere and record it in `src_const'.
4744 Record its table element, if any, in `src_const_elt'. Look in
4745 any known equivalences first. (If the constant is not in the
2197a88a 4746 table, also set `sets[i].src_const_hash'). */
7afe21cc 4747 if (elt)
278a83b2 4748 for (p = elt->first_same_value; p; p = p->next_same_value)
7afe21cc
RK
4749 if (p->is_const)
4750 {
4751 src_const = p->exp;
4752 src_const_elt = elt;
4753 break;
4754 }
4755
4756 if (src_const == 0
4757 && (CONSTANT_P (src_folded)
278a83b2 4758 /* Consider (minus (label_ref L1) (label_ref L2)) as
7afe21cc
RK
4759 "constant" here so we will record it. This allows us
4760 to fold switch statements when an ADDR_DIFF_VEC is used. */
4761 || (GET_CODE (src_folded) == MINUS
4762 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4763 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4764 src_const = src_folded, src_const_elt = elt;
4765 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4766 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4767
4768 /* If we don't know if the constant is in the table, get its
4769 hash code and look it up. */
4770 if (src_const && src_const_elt == 0)
4771 {
2197a88a
RK
4772 sets[i].src_const_hash = HASH (src_const, mode);
4773 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
4774 }
4775
4776 sets[i].src_const = src_const;
4777 sets[i].src_const_elt = src_const_elt;
4778
4779 /* If the constant and our source are both in the table, mark them as
4780 equivalent. Otherwise, if a constant is in the table but the source
4781 isn't, set ELT to it. */
4782 if (src_const_elt && elt
4783 && src_const_elt->first_same_value != elt->first_same_value)
4784 merge_equiv_classes (elt, src_const_elt);
4785 else if (src_const_elt && elt == 0)
4786 elt = src_const_elt;
4787
4788 /* See if there is a register linearly related to a constant
4789 equivalent of SRC. */
4790 if (src_const
4791 && (GET_CODE (src_const) == CONST
4792 || (src_const_elt && src_const_elt->related_value != 0)))
278a83b2
KH
4793 {
4794 src_related = use_related_value (src_const, src_const_elt);
4795 if (src_related)
4796 {
7afe21cc 4797 struct table_elt *src_related_elt
278a83b2 4798 = lookup (src_related, HASH (src_related, mode), mode);
7afe21cc 4799 if (src_related_elt && elt)
278a83b2 4800 {
7afe21cc
RK
4801 if (elt->first_same_value
4802 != src_related_elt->first_same_value)
278a83b2 4803 /* This can occur when we previously saw a CONST
7afe21cc
RK
4804 involving a SYMBOL_REF and then see the SYMBOL_REF
4805 twice. Merge the involved classes. */
4806 merge_equiv_classes (elt, src_related_elt);
4807
278a83b2 4808 src_related = 0;
7afe21cc 4809 src_related_elt = 0;
278a83b2
KH
4810 }
4811 else if (src_related_elt && elt == 0)
4812 elt = src_related_elt;
7afe21cc 4813 }
278a83b2 4814 }
7afe21cc 4815
e4600702
RK
4816 /* See if we have a CONST_INT that is already in a register in a
4817 wider mode. */
4818
481683e1 4819 if (src_const && src_related == 0 && CONST_INT_P (src_const)
e4600702 4820 && GET_MODE_CLASS (mode) == MODE_INT
5511bc5a 4821 && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
e4600702 4822 {
ef4bddc2 4823 machine_mode wider_mode;
e4600702
RK
4824
4825 for (wider_mode = GET_MODE_WIDER_MODE (mode);
1f3ad3f0 4826 wider_mode != VOIDmode
5511bc5a 4827 && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
e4600702
RK
4828 && src_related == 0;
4829 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4830 {
4831 struct table_elt *const_elt
4832 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4833
4834 if (const_elt == 0)
4835 continue;
4836
4837 for (const_elt = const_elt->first_same_value;
4838 const_elt; const_elt = const_elt->next_same_value)
f8cfc6aa 4839 if (REG_P (const_elt->exp))
e4600702 4840 {
73ca11ed 4841 src_related = gen_lowpart (mode, const_elt->exp);
e4600702
RK
4842 break;
4843 }
4844 }
4845 }
4846
d45cf215
RS
4847 /* Another possibility is that we have an AND with a constant in
4848 a mode narrower than a word. If so, it might have been generated
4849 as part of an "if" which would narrow the AND. If we already
4850 have done the AND in a wider mode, we can use a SUBREG of that
4851 value. */
4852
4853 if (flag_expensive_optimizations && ! src_related
481683e1 4854 && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
d45cf215
RS
4855 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4856 {
ef4bddc2 4857 machine_mode tmode;
38a448ca 4858 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
4859
4860 for (tmode = GET_MODE_WIDER_MODE (mode);
4861 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4862 tmode = GET_MODE_WIDER_MODE (tmode))
4863 {
4de249d9 4864 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
d45cf215
RS
4865 struct table_elt *larger_elt;
4866
4867 if (inner)
4868 {
4869 PUT_MODE (new_and, tmode);
4870 XEXP (new_and, 0) = inner;
4871 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4872 if (larger_elt == 0)
4873 continue;
4874
4875 for (larger_elt = larger_elt->first_same_value;
4876 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 4877 if (REG_P (larger_elt->exp))
d45cf215
RS
4878 {
4879 src_related
4de249d9 4880 = gen_lowpart (mode, larger_elt->exp);
d45cf215
RS
4881 break;
4882 }
4883
4884 if (src_related)
4885 break;
4886 }
4887 }
4888 }
7bac1be0
RK
4889
4890#ifdef LOAD_EXTEND_OP
4891 /* See if a MEM has already been loaded with a widening operation;
4892 if it has, we can use a subreg of that. Many CISC machines
4893 also have such operations, but this is only likely to be
71cc389b 4894 beneficial on these machines. */
278a83b2 4895
ddc356e8 4896 if (flag_expensive_optimizations && src_related == 0
7bac1be0
RK
4897 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4898 && GET_MODE_CLASS (mode) == MODE_INT
3c0cb5de 4899 && MEM_P (src) && ! do_not_record
f822d252 4900 && LOAD_EXTEND_OP (mode) != UNKNOWN)
7bac1be0 4901 {
9d80ef7c
RH
4902 struct rtx_def memory_extend_buf;
4903 rtx memory_extend_rtx = &memory_extend_buf;
ef4bddc2 4904 machine_mode tmode;
278a83b2 4905
7bac1be0
RK
4906 /* Set what we are trying to extend and the operation it might
4907 have been extended with. */
c3284718 4908 memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
7bac1be0
RK
4909 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4910 XEXP (memory_extend_rtx, 0) = src;
278a83b2 4911
7bac1be0
RK
4912 for (tmode = GET_MODE_WIDER_MODE (mode);
4913 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4914 tmode = GET_MODE_WIDER_MODE (tmode))
4915 {
4916 struct table_elt *larger_elt;
278a83b2 4917
7bac1be0 4918 PUT_MODE (memory_extend_rtx, tmode);
278a83b2 4919 larger_elt = lookup (memory_extend_rtx,
7bac1be0
RK
4920 HASH (memory_extend_rtx, tmode), tmode);
4921 if (larger_elt == 0)
4922 continue;
278a83b2 4923
7bac1be0
RK
4924 for (larger_elt = larger_elt->first_same_value;
4925 larger_elt; larger_elt = larger_elt->next_same_value)
f8cfc6aa 4926 if (REG_P (larger_elt->exp))
7bac1be0 4927 {
73ca11ed 4928 src_related = gen_lowpart (mode, larger_elt->exp);
7bac1be0
RK
4929 break;
4930 }
278a83b2 4931
7bac1be0
RK
4932 if (src_related)
4933 break;
4934 }
4935 }
4936#endif /* LOAD_EXTEND_OP */
278a83b2 4937
2c5bfdf7
AN
4938 /* Try to express the constant using a register+offset expression
4939 derived from a constant anchor. */
4940
4941 if (targetm.const_anchor
4942 && !src_related
4943 && src_const
4944 && GET_CODE (src_const) == CONST_INT)
4945 {
4946 src_related = try_const_anchors (src_const, mode);
4947 src_related_is_const_anchor = src_related != NULL_RTX;
4948 }
4949
4950
7afe21cc 4951 if (src == src_folded)
278a83b2 4952 src_folded = 0;
7afe21cc 4953
da7d8304 4954 /* At this point, ELT, if nonzero, points to a class of expressions
7afe21cc 4955 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
da7d8304 4956 and SRC_RELATED, if nonzero, each contain additional equivalent
7afe21cc
RK
4957 expressions. Prune these latter expressions by deleting expressions
4958 already in the equivalence class.
4959
4960 Check for an equivalent identical to the destination. If found,
4961 this is the preferred equivalent since it will likely lead to
4962 elimination of the insn. Indicate this by placing it in
4963 `src_related'. */
4964
278a83b2
KH
4965 if (elt)
4966 elt = elt->first_same_value;
7afe21cc 4967 for (p = elt; p; p = p->next_same_value)
278a83b2 4968 {
7afe21cc
RK
4969 enum rtx_code code = GET_CODE (p->exp);
4970
4971 /* If the expression is not valid, ignore it. Then we do not
4972 have to check for validity below. In most cases, we can use
4973 `rtx_equal_p', since canonicalization has already been done. */
0516f6fe 4974 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
7afe21cc
RK
4975 continue;
4976
5a03c8c4
RK
4977 /* Also skip paradoxical subregs, unless that's what we're
4978 looking for. */
6a4bdc79 4979 if (paradoxical_subreg_p (p->exp)
5a03c8c4
RK
4980 && ! (src != 0
4981 && GET_CODE (src) == SUBREG
4982 && GET_MODE (src) == GET_MODE (p->exp)
4983 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4984 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4985 continue;
4986
278a83b2 4987 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7afe21cc 4988 src = 0;
278a83b2 4989 else if (src_folded && GET_CODE (src_folded) == code
7afe21cc
RK
4990 && rtx_equal_p (src_folded, p->exp))
4991 src_folded = 0;
278a83b2 4992 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7afe21cc
RK
4993 && rtx_equal_p (src_eqv_here, p->exp))
4994 src_eqv_here = 0;
278a83b2 4995 else if (src_related && GET_CODE (src_related) == code
7afe21cc
RK
4996 && rtx_equal_p (src_related, p->exp))
4997 src_related = 0;
4998
4999 /* This is the same as the destination of the insns, we want
5000 to prefer it. Copy it to src_related. The code below will
5001 then give it a negative cost. */
5002 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5003 src_related = dest;
278a83b2 5004 }
7afe21cc
RK
5005
5006 /* Find the cheapest valid equivalent, trying all the available
5007 possibilities. Prefer items not in the hash table to ones
5008 that are when they are equal cost. Note that we can never
5009 worsen an insn as the current contents will also succeed.
05c33dd8 5010 If we find an equivalent identical to the destination, use it as best,
0f41302f 5011 since this insn will probably be eliminated in that case. */
7afe21cc
RK
5012 if (src)
5013 {
5014 if (rtx_equal_p (src, dest))
f1c1dfc3 5015 src_cost = src_regcost = -1;
7afe21cc 5016 else
630c79be
BS
5017 {
5018 src_cost = COST (src);
5019 src_regcost = approx_reg_cost (src);
5020 }
7afe21cc
RK
5021 }
5022
5023 if (src_eqv_here)
5024 {
5025 if (rtx_equal_p (src_eqv_here, dest))
f1c1dfc3 5026 src_eqv_cost = src_eqv_regcost = -1;
7afe21cc 5027 else
630c79be
BS
5028 {
5029 src_eqv_cost = COST (src_eqv_here);
5030 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5031 }
7afe21cc
RK
5032 }
5033
5034 if (src_folded)
5035 {
5036 if (rtx_equal_p (src_folded, dest))
f1c1dfc3 5037 src_folded_cost = src_folded_regcost = -1;
7afe21cc 5038 else
630c79be
BS
5039 {
5040 src_folded_cost = COST (src_folded);
5041 src_folded_regcost = approx_reg_cost (src_folded);
5042 }
7afe21cc
RK
5043 }
5044
5045 if (src_related)
5046 {
5047 if (rtx_equal_p (src_related, dest))
f1c1dfc3 5048 src_related_cost = src_related_regcost = -1;
7afe21cc 5049 else
630c79be
BS
5050 {
5051 src_related_cost = COST (src_related);
5052 src_related_regcost = approx_reg_cost (src_related);
2c5bfdf7
AN
5053
5054 /* If a const-anchor is used to synthesize a constant that
5055 normally requires multiple instructions then slightly prefer
5056 it over the original sequence. These instructions are likely
5057 to become redundant now. We can't compare against the cost
5058 of src_eqv_here because, on MIPS for example, multi-insn
5059 constants have zero cost; they are assumed to be hoisted from
5060 loops. */
5061 if (src_related_is_const_anchor
5062 && src_related_cost == src_cost
5063 && src_eqv_here)
5064 src_related_cost--;
630c79be 5065 }
7afe21cc
RK
5066 }
5067
5068 /* If this was an indirect jump insn, a known label will really be
5069 cheaper even though it looks more expensive. */
5070 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
99a9c946 5071 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
278a83b2 5072
7afe21cc
RK
5073 /* Terminate loop when replacement made. This must terminate since
5074 the current contents will be tested and will always be valid. */
5075 while (1)
278a83b2
KH
5076 {
5077 rtx trial;
7afe21cc 5078
278a83b2 5079 /* Skip invalid entries. */
f8cfc6aa 5080 while (elt && !REG_P (elt->exp)
0516f6fe 5081 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
278a83b2 5082 elt = elt->next_same_value;
5a03c8c4
RK
5083
5084 /* A paradoxical subreg would be bad here: it'll be the right
5085 size, but later may be adjusted so that the upper bits aren't
5086 what we want. So reject it. */
5087 if (elt != 0
6a4bdc79 5088 && paradoxical_subreg_p (elt->exp)
5a03c8c4
RK
5089 /* It is okay, though, if the rtx we're trying to match
5090 will ignore any of the bits we can't predict. */
5091 && ! (src != 0
5092 && GET_CODE (src) == SUBREG
5093 && GET_MODE (src) == GET_MODE (elt->exp)
5094 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5095 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5096 {
5097 elt = elt->next_same_value;
5098 continue;
5099 }
278a83b2 5100
68252e27 5101 if (elt)
630c79be
BS
5102 {
5103 src_elt_cost = elt->cost;
5104 src_elt_regcost = elt->regcost;
5105 }
7afe21cc 5106
68252e27 5107 /* Find cheapest and skip it for the next time. For items
7afe21cc
RK
5108 of equal cost, use this order:
5109 src_folded, src, src_eqv, src_related and hash table entry. */
99a9c946 5110 if (src_folded
56ae04af
KH
5111 && preferable (src_folded_cost, src_folded_regcost,
5112 src_cost, src_regcost) <= 0
5113 && preferable (src_folded_cost, src_folded_regcost,
5114 src_eqv_cost, src_eqv_regcost) <= 0
5115 && preferable (src_folded_cost, src_folded_regcost,
5116 src_related_cost, src_related_regcost) <= 0
5117 && preferable (src_folded_cost, src_folded_regcost,
5118 src_elt_cost, src_elt_regcost) <= 0)
7afe21cc 5119 {
f1c1dfc3 5120 trial = src_folded, src_folded_cost = MAX_COST;
7afe21cc 5121 if (src_folded_force_flag)
9d8de1de
EB
5122 {
5123 rtx forced = force_const_mem (mode, trial);
5124 if (forced)
5125 trial = forced;
5126 }
7afe21cc 5127 }
99a9c946 5128 else if (src
56ae04af
KH
5129 && preferable (src_cost, src_regcost,
5130 src_eqv_cost, src_eqv_regcost) <= 0
5131 && preferable (src_cost, src_regcost,
5132 src_related_cost, src_related_regcost) <= 0
5133 && preferable (src_cost, src_regcost,
5134 src_elt_cost, src_elt_regcost) <= 0)
f1c1dfc3 5135 trial = src, src_cost = MAX_COST;
99a9c946 5136 else if (src_eqv_here
56ae04af
KH
5137 && preferable (src_eqv_cost, src_eqv_regcost,
5138 src_related_cost, src_related_regcost) <= 0
5139 && preferable (src_eqv_cost, src_eqv_regcost,
5140 src_elt_cost, src_elt_regcost) <= 0)
a36b8a1e 5141 trial = src_eqv_here, src_eqv_cost = MAX_COST;
99a9c946 5142 else if (src_related
56ae04af
KH
5143 && preferable (src_related_cost, src_related_regcost,
5144 src_elt_cost, src_elt_regcost) <= 0)
a36b8a1e 5145 trial = src_related, src_related_cost = MAX_COST;
278a83b2 5146 else
7afe21cc 5147 {
a36b8a1e 5148 trial = elt->exp;
7afe21cc 5149 elt = elt->next_same_value;
f1c1dfc3 5150 src_elt_cost = MAX_COST;
7afe21cc
RK
5151 }
5152
2e4e39f6
AK
5153 /* Avoid creation of overlapping memory moves. */
5154 if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5155 {
5156 rtx src, dest;
5157
5158 /* BLKmode moves are not handled by cse anyway. */
5159 if (GET_MODE (trial) == BLKmode)
5160 break;
5161
5162 src = canon_rtx (trial);
5163 dest = canon_rtx (SET_DEST (sets[i].rtl));
5164
5165 if (!MEM_P (src) || !MEM_P (dest)
c6ea834c 5166 || !nonoverlapping_memrefs_p (src, dest, false))
2e4e39f6
AK
5167 break;
5168 }
5169
b4ab701f
JJ
5170 /* Try to optimize
5171 (set (reg:M N) (const_int A))
5172 (set (reg:M2 O) (const_int B))
5173 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5174 (reg:M2 O)). */
5175 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5176 && CONST_INT_P (trial)
5177 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5178 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5179 && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5511bc5a 5180 && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
b4ab701f
JJ
5181 >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5182 && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5183 + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5184 <= HOST_BITS_PER_WIDE_INT))
5185 {
5186 rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5187 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5188 rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5189 unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5190 struct table_elt *dest_elt
5191 = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5192 rtx dest_cst = NULL;
5193
5194 if (dest_elt)
5195 for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5196 if (p->is_const && CONST_INT_P (p->exp))
5197 {
5198 dest_cst = p->exp;
5199 break;
5200 }
5201 if (dest_cst)
5202 {
5203 HOST_WIDE_INT val = INTVAL (dest_cst);
5204 HOST_WIDE_INT mask;
5205 unsigned int shift;
5206 if (BITS_BIG_ENDIAN)
5511bc5a 5207 shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
b4ab701f
JJ
5208 - INTVAL (pos) - INTVAL (width);
5209 else
5210 shift = INTVAL (pos);
5211 if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5212 mask = ~(HOST_WIDE_INT) 0;
5213 else
5214 mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5215 val &= ~(mask << shift);
5216 val |= (INTVAL (trial) & mask) << shift;
5217 val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5218 validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5219 dest_reg, 1);
5220 validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5221 GEN_INT (val), 1);
5222 if (apply_change_group ())
5223 {
5224 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5225 if (note)
5226 {
5227 remove_note (insn, note);
5228 df_notes_rescan (insn);
5229 }
5230 src_eqv = NULL_RTX;
5231 src_eqv_elt = NULL;
5232 src_eqv_volatile = 0;
5233 src_eqv_in_memory = 0;
5234 src_eqv_hash = 0;
5235 repeat = true;
5236 break;
5237 }
5238 }
5239 }
5240
7afe21cc
RK
5241 /* We don't normally have an insn matching (set (pc) (pc)), so
5242 check for this separately here. We will delete such an
5243 insn below.
5244
d466c016
JL
5245 For other cases such as a table jump or conditional jump
5246 where we know the ultimate target, go ahead and replace the
5247 operand. While that may not make a valid insn, we will
5248 reemit the jump below (and also insert any necessary
5249 barriers). */
7afe21cc
RK
5250 if (n_sets == 1 && dest == pc_rtx
5251 && (trial == pc_rtx
5252 || (GET_CODE (trial) == LABEL_REF
5253 && ! condjump_p (insn))))
5254 {
2f39b6ca
UW
5255 /* Don't substitute non-local labels, this confuses CFG. */
5256 if (GET_CODE (trial) == LABEL_REF
5257 && LABEL_REF_NONLOCAL_P (trial))
5258 continue;
5259
d466c016 5260 SET_SRC (sets[i].rtl) = trial;
2aac3a01 5261 cse_jumps_altered = true;
7afe21cc
RK
5262 break;
5263 }
278a83b2 5264
1ef6855c
KH
5265 /* Reject certain invalid forms of CONST that we create. */
5266 else if (CONSTANT_P (trial)
5267 && GET_CODE (trial) == CONST
5268 /* Reject cases that will cause decode_rtx_const to
5269 die. On the alpha when simplifying a switch, we
5270 get (const (truncate (minus (label_ref)
5271 (label_ref)))). */
5272 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5273 /* Likewise on IA-64, except without the
5274 truncate. */
5275 || (GET_CODE (XEXP (trial, 0)) == MINUS
5276 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5277 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5278 /* Do nothing for this case. */
5279 ;
5280
7afe21cc 5281 /* Look for a substitution that makes a valid insn. */
6c4d60f8
JJ
5282 else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5283 trial, 0))
05c33dd8 5284 {
32e9fa48 5285 rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
dbaff908 5286
7722328e
RK
5287 /* The result of apply_change_group can be ignored; see
5288 canon_reg. */
5289
32e9fa48 5290 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
6702af89 5291 apply_change_group ();
932ad4d9 5292
05c33dd8
RK
5293 break;
5294 }
7afe21cc 5295
278a83b2 5296 /* If we previously found constant pool entries for
7afe21cc
RK
5297 constants and this is a constant, try making a
5298 pool entry. Put it in src_folded unless we already have done
5299 this since that is where it likely came from. */
5300
5301 else if (constant_pool_entries_cost
5302 && CONSTANT_P (trial)
1bbd065b 5303 && (src_folded == 0
3c0cb5de 5304 || (!MEM_P (src_folded)
1bbd065b 5305 && ! src_folded_force_flag))
9ae8ffe7
JL
5306 && GET_MODE_CLASS (mode) != MODE_CC
5307 && mode != VOIDmode)
7afe21cc
RK
5308 {
5309 src_folded_force_flag = 1;
5310 src_folded = trial;
5311 src_folded_cost = constant_pool_entries_cost;
dd0ba281 5312 src_folded_regcost = constant_pool_entries_regcost;
7afe21cc 5313 }
278a83b2 5314 }
7afe21cc 5315
b4ab701f
JJ
5316 /* If we changed the insn too much, handle this set from scratch. */
5317 if (repeat)
5318 {
5319 i--;
5320 continue;
5321 }
5322
7afe21cc
RK
5323 src = SET_SRC (sets[i].rtl);
5324
5325 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5326 However, there is an important exception: If both are registers
5327 that are not the head of their equivalence class, replace SET_SRC
5328 with the head of the class. If we do not do this, we will have
5329 both registers live over a portion of the basic block. This way,
5330 their lifetimes will likely abut instead of overlapping. */
f8cfc6aa 5331 if (REG_P (dest)
1bb98cec 5332 && REGNO_QTY_VALID_P (REGNO (dest)))
7afe21cc 5333 {
1bb98cec
DM
5334 int dest_q = REG_QTY (REGNO (dest));
5335 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5336
5337 if (dest_ent->mode == GET_MODE (dest)
5338 && dest_ent->first_reg != REGNO (dest)
f8cfc6aa 5339 && REG_P (src) && REGNO (src) == REGNO (dest)
1bb98cec
DM
5340 /* Don't do this if the original insn had a hard reg as
5341 SET_SRC or SET_DEST. */
f8cfc6aa 5342 && (!REG_P (sets[i].src)
1bb98cec 5343 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
f8cfc6aa 5344 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
1bb98cec
DM
5345 /* We can't call canon_reg here because it won't do anything if
5346 SRC is a hard register. */
759bd8b7 5347 {
1bb98cec
DM
5348 int src_q = REG_QTY (REGNO (src));
5349 struct qty_table_elem *src_ent = &qty_table[src_q];
5350 int first = src_ent->first_reg;
5351 rtx new_src
5352 = (first >= FIRST_PSEUDO_REGISTER
5353 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5354
5355 /* We must use validate-change even for this, because this
5356 might be a special no-op instruction, suitable only to
5357 tag notes onto. */
5358 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5359 {
5360 src = new_src;
5361 /* If we had a constant that is cheaper than what we are now
5362 setting SRC to, use that constant. We ignored it when we
5363 thought we could make this into a no-op. */
5364 if (src_const && COST (src_const) < COST (src)
278a83b2
KH
5365 && validate_change (insn, &SET_SRC (sets[i].rtl),
5366 src_const, 0))
1bb98cec
DM
5367 src = src_const;
5368 }
759bd8b7 5369 }
7afe21cc
RK
5370 }
5371
5372 /* If we made a change, recompute SRC values. */
5373 if (src != sets[i].src)
278a83b2 5374 {
278a83b2
KH
5375 do_not_record = 0;
5376 hash_arg_in_memory = 0;
7afe21cc 5377 sets[i].src = src;
278a83b2
KH
5378 sets[i].src_hash = HASH (src, mode);
5379 sets[i].src_volatile = do_not_record;
5380 sets[i].src_in_memory = hash_arg_in_memory;
5381 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5382 }
7afe21cc
RK
5383
5384 /* If this is a single SET, we are setting a register, and we have an
73dd3123
EB
5385 equivalent constant, we want to add a REG_EQUAL note if the constant
5386 is different from the source. We don't want to do it for a constant
5387 pseudo since verifying that this pseudo hasn't been eliminated is a
5388 pain; moreover such a note won't help anything.
ac7ef8d5
FS
5389
5390 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5391 which can be created for a reference to a compile time computable
5392 entry in a jump table. */
73dd3123
EB
5393 if (n_sets == 1
5394 && REG_P (dest)
5395 && src_const
f8cfc6aa 5396 && !REG_P (src_const)
73dd3123
EB
5397 && !(GET_CODE (src_const) == SUBREG
5398 && REG_P (SUBREG_REG (src_const)))
5399 && !(GET_CODE (src_const) == CONST
5400 && GET_CODE (XEXP (src_const, 0)) == MINUS
5401 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5402 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5403 && !rtx_equal_p (src, src_const))
7afe21cc 5404 {
73dd3123
EB
5405 /* Make sure that the rtx is not shared. */
5406 src_const = copy_rtx (src_const);
51e2a951 5407
73dd3123
EB
5408 /* Record the actual constant value in a REG_EQUAL note,
5409 making a new one if one does not already exist. */
5410 set_unique_reg_note (insn, REG_EQUAL, src_const);
5411 df_notes_rescan (insn);
7afe21cc
RK
5412 }
5413
5414 /* Now deal with the destination. */
5415 do_not_record = 0;
7afe21cc 5416
46d096a3
SB
5417 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5418 while (GET_CODE (dest) == SUBREG
7afe21cc 5419 || GET_CODE (dest) == ZERO_EXTRACT
7afe21cc 5420 || GET_CODE (dest) == STRICT_LOW_PART)
0339ce7e 5421 dest = XEXP (dest, 0);
7afe21cc
RK
5422
5423 sets[i].inner_dest = dest;
5424
3c0cb5de 5425 if (MEM_P (dest))
7afe21cc 5426 {
9ae8ffe7
JL
5427#ifdef PUSH_ROUNDING
5428 /* Stack pushes invalidate the stack pointer. */
5429 rtx addr = XEXP (dest, 0);
ec8e098d 5430 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
9ae8ffe7 5431 && XEXP (addr, 0) == stack_pointer_rtx)
524e3576 5432 invalidate (stack_pointer_rtx, VOIDmode);
9ae8ffe7 5433#endif
7afe21cc 5434 dest = fold_rtx (dest, insn);
7afe21cc
RK
5435 }
5436
5437 /* Compute the hash code of the destination now,
5438 before the effects of this instruction are recorded,
5439 since the register values used in the address computation
5440 are those before this instruction. */
2197a88a 5441 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
5442
5443 /* Don't enter a bit-field in the hash table
5444 because the value in it after the store
5445 may not equal what was stored, due to truncation. */
5446
46d096a3 5447 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
7afe21cc
RK
5448 {
5449 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5450
481683e1
SZ
5451 if (src_const != 0 && CONST_INT_P (src_const)
5452 && CONST_INT_P (width)
906c4e36
RK
5453 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5454 && ! (INTVAL (src_const)
0cadbfaa 5455 & (HOST_WIDE_INT_M1U << INTVAL (width))))
7afe21cc
RK
5456 /* Exception: if the value is constant,
5457 and it won't be truncated, record it. */
5458 ;
5459 else
5460 {
5461 /* This is chosen so that the destination will be invalidated
5462 but no new value will be recorded.
5463 We must invalidate because sometimes constant
5464 values can be recorded for bitfields. */
5465 sets[i].src_elt = 0;
5466 sets[i].src_volatile = 1;
5467 src_eqv = 0;
5468 src_eqv_elt = 0;
5469 }
5470 }
5471
5472 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5473 the insn. */
5474 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5475 {
ef178af3 5476 /* One less use of the label this insn used to jump to. */
0129d079 5477 delete_insn_and_edges (insn);
2aac3a01 5478 cse_jumps_altered = true;
7afe21cc
RK
5479 /* No more processing for this set. */
5480 sets[i].rtl = 0;
5481 }
5482
5483 /* If this SET is now setting PC to a label, we know it used to
d466c016 5484 be a conditional or computed branch. */
8f235343
JH
5485 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5486 && !LABEL_REF_NONLOCAL_P (src))
7afe21cc 5487 {
d466c016
JL
5488 /* We reemit the jump in as many cases as possible just in
5489 case the form of an unconditional jump is significantly
5490 different than a computed jump or conditional jump.
5491
5492 If this insn has multiple sets, then reemitting the
5493 jump is nontrivial. So instead we just force rerecognition
5494 and hope for the best. */
5495 if (n_sets == 1)
7afe21cc 5496 {
49506606
DM
5497 rtx_insn *new_rtx;
5498 rtx note;
8fb1e50e 5499
32e9fa48
KG
5500 new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5501 JUMP_LABEL (new_rtx) = XEXP (src, 0);
7afe21cc 5502 LABEL_NUSES (XEXP (src, 0))++;
9dcb4381
RH
5503
5504 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5505 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5506 if (note)
5507 {
5508 XEXP (note, 1) = NULL_RTX;
32e9fa48 5509 REG_NOTES (new_rtx) = note;
9dcb4381
RH
5510 }
5511
0129d079 5512 delete_insn_and_edges (insn);
49506606 5513 insn = new_rtx;
7afe21cc 5514 }
31dcf83f 5515 else
31dcf83f 5516 INSN_CODE (insn) = -1;
7afe21cc 5517
2aac3a01
EB
5518 /* Do not bother deleting any unreachable code, let jump do it. */
5519 cse_jumps_altered = true;
7afe21cc
RK
5520 sets[i].rtl = 0;
5521 }
5522
c2a47e48
RK
5523 /* If destination is volatile, invalidate it and then do no further
5524 processing for this assignment. */
7afe21cc
RK
5525
5526 else if (do_not_record)
c2a47e48 5527 {
2a1d78d8 5528 invalidate_dest (dest);
c2a47e48
RK
5529 sets[i].rtl = 0;
5530 }
7afe21cc
RK
5531
5532 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2a1d78d8
JJ
5533 {
5534 do_not_record = 0;
5535 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5536 if (do_not_record)
5537 {
5538 invalidate_dest (SET_DEST (sets[i].rtl));
5539 sets[i].rtl = 0;
5540 }
5541 }
7afe21cc 5542
7afe21cc
RK
5543 /* If setting CC0, record what it was set to, or a constant, if it
5544 is equivalent to a constant. If it is being set to a floating-point
5545 value, make a COMPARE with the appropriate constant of 0. If we
5546 don't do this, later code can interpret this as a test against
5547 const0_rtx, which can cause problems if we try to put it into an
5548 insn as a floating-point operand. */
5549 if (dest == cc0_rtx)
5550 {
5551 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5552 this_insn_cc0_mode = mode;
cbf6a543 5553 if (FLOAT_MODE_P (mode))
38a448ca
RH
5554 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5555 CONST0_RTX (mode));
7afe21cc 5556 }
7afe21cc
RK
5557 }
5558
5559 /* Now enter all non-volatile source expressions in the hash table
5560 if they are not already present.
5561 Record their equivalence classes in src_elt.
5562 This way we can insert the corresponding destinations into
5563 the same classes even if the actual sources are no longer in them
5564 (having been invalidated). */
5565
5566 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5567 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5568 {
b3694847
SS
5569 struct table_elt *elt;
5570 struct table_elt *classp = sets[0].src_elt;
7afe21cc 5571 rtx dest = SET_DEST (sets[0].rtl);
ef4bddc2 5572 machine_mode eqvmode = GET_MODE (dest);
7afe21cc
RK
5573
5574 if (GET_CODE (dest) == STRICT_LOW_PART)
5575 {
5576 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5577 classp = 0;
5578 }
5579 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
5580 {
5581 rehash_using_reg (src_eqv);
5582 src_eqv_hash = HASH (src_eqv, eqvmode);
5583 }
2197a88a 5584 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc 5585 elt->in_memory = src_eqv_in_memory;
7afe21cc 5586 src_eqv_elt = elt;
f7911249
JW
5587
5588 /* Check to see if src_eqv_elt is the same as a set source which
5589 does not yet have an elt, and if so set the elt of the set source
5590 to src_eqv_elt. */
5591 for (i = 0; i < n_sets; i++)
26132f71
JW
5592 if (sets[i].rtl && sets[i].src_elt == 0
5593 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
f7911249 5594 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
5595 }
5596
5597 for (i = 0; i < n_sets; i++)
5598 if (sets[i].rtl && ! sets[i].src_volatile
5599 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5600 {
5601 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5602 {
5603 /* REG_EQUAL in setting a STRICT_LOW_PART
5604 gives an equivalent for the entire destination register,
5605 not just for the subreg being stored in now.
5606 This is a more interesting equivalence, so we arrange later
5607 to treat the entire reg as the destination. */
5608 sets[i].src_elt = src_eqv_elt;
2197a88a 5609 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
5610 }
5611 else
5612 {
5613 /* Insert source and constant equivalent into hash table, if not
5614 already present. */
b3694847
SS
5615 struct table_elt *classp = src_eqv_elt;
5616 rtx src = sets[i].src;
5617 rtx dest = SET_DEST (sets[i].rtl);
ef4bddc2 5618 machine_mode mode
7afe21cc
RK
5619 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5620
1fcc57f1
AM
5621 /* It's possible that we have a source value known to be
5622 constant but don't have a REG_EQUAL note on the insn.
5623 Lack of a note will mean src_eqv_elt will be NULL. This
5624 can happen where we've generated a SUBREG to access a
5625 CONST_INT that is already in a register in a wider mode.
5626 Ensure that the source expression is put in the proper
5627 constant class. */
5628 if (!classp)
5629 classp = sets[i].src_const_elt;
5630
26132f71 5631 if (sets[i].src_elt == 0)
7afe21cc 5632 {
4a8cae83 5633 struct table_elt *elt;
26132f71 5634
4a8cae83
SB
5635 /* Note that these insert_regs calls cannot remove
5636 any of the src_elt's, because they would have failed to
5637 match if not still valid. */
5638 if (insert_regs (src, classp, 0))
5639 {
5640 rehash_using_reg (src);
5641 sets[i].src_hash = HASH (src, mode);
8ae2b8f6 5642 }
4a8cae83
SB
5643 elt = insert (src, classp, sets[i].src_hash, mode);
5644 elt->in_memory = sets[i].src_in_memory;
6c4d60f8
JJ
5645 /* If inline asm has any clobbers, ensure we only reuse
5646 existing inline asms and never try to put the ASM_OPERANDS
5647 into an insn that isn't inline asm. */
5648 if (GET_CODE (src) == ASM_OPERANDS
5649 && GET_CODE (x) == PARALLEL)
5650 elt->cost = MAX_COST;
4a8cae83 5651 sets[i].src_elt = classp = elt;
7afe21cc 5652 }
7afe21cc
RK
5653 if (sets[i].src_const && sets[i].src_const_elt == 0
5654 && src != sets[i].src_const
5655 && ! rtx_equal_p (sets[i].src_const, src))
5656 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 5657 sets[i].src_const_hash, mode);
7afe21cc
RK
5658 }
5659 }
5660 else if (sets[i].src_elt == 0)
5661 /* If we did not insert the source into the hash table (e.g., it was
5662 volatile), note the equivalence class for the REG_EQUAL value, if any,
5663 so that the destination goes into that class. */
5664 sets[i].src_elt = src_eqv_elt;
5665
05c433f3
PB
5666 /* Record destination addresses in the hash table. This allows us to
5667 check if they are invalidated by other sets. */
5668 for (i = 0; i < n_sets; i++)
5669 {
5670 if (sets[i].rtl)
5671 {
5672 rtx x = sets[i].inner_dest;
5673 struct table_elt *elt;
ef4bddc2 5674 machine_mode mode;
05c433f3
PB
5675 unsigned hash;
5676
5677 if (MEM_P (x))
5678 {
5679 x = XEXP (x, 0);
5680 mode = GET_MODE (x);
5681 hash = HASH (x, mode);
5682 elt = lookup (x, hash, mode);
5683 if (!elt)
5684 {
5685 if (insert_regs (x, NULL, 0))
5686 {
7e7e28c7
AO
5687 rtx dest = SET_DEST (sets[i].rtl);
5688
05c433f3
PB
5689 rehash_using_reg (x);
5690 hash = HASH (x, mode);
7e7e28c7 5691 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
05c433f3
PB
5692 }
5693 elt = insert (x, NULL, hash, mode);
5694 }
5695
5696 sets[i].dest_addr_elt = elt;
5697 }
5698 else
5699 sets[i].dest_addr_elt = NULL;
5700 }
5701 }
5702
7b02f4e0 5703 invalidate_from_clobbers (insn);
77fa0940 5704
278a83b2 5705 /* Some registers are invalidated by subroutine calls. Memory is
77fa0940
RK
5706 invalidated by non-constant calls. */
5707
4b4bf941 5708 if (CALL_P (insn))
7afe21cc 5709 {
becfd6e5 5710 if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
9ae8ffe7 5711 invalidate_memory ();
7afe21cc
RK
5712 invalidate_for_call ();
5713 }
5714
5715 /* Now invalidate everything set by this instruction.
5716 If a SUBREG or other funny destination is being set,
5717 sets[i].rtl is still nonzero, so here we invalidate the reg
5718 a part of which is being set. */
5719
5720 for (i = 0; i < n_sets; i++)
5721 if (sets[i].rtl)
5722 {
bb4034b3
JW
5723 /* We can't use the inner dest, because the mode associated with
5724 a ZERO_EXTRACT is significant. */
b3694847 5725 rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
5726
5727 /* Needed for registers to remove the register from its
5728 previous quantity's chain.
5729 Needed for memory if this is a nonvarying address, unless
5730 we have just done an invalidate_memory that covers even those. */
f8cfc6aa 5731 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
bb4034b3 5732 invalidate (dest, VOIDmode);
3c0cb5de 5733 else if (MEM_P (dest))
32fab725 5734 invalidate (dest, VOIDmode);
2708da92
RS
5735 else if (GET_CODE (dest) == STRICT_LOW_PART
5736 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 5737 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
5738 }
5739
932ad4d9
SB
5740 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5741 the regs restored by the longjmp come from a later time
5742 than the setjmp. */
5743 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5744 {
5745 flush_hash_table ();
5746 goto done;
5747 }
5748
7afe21cc
RK
5749 /* Make sure registers mentioned in destinations
5750 are safe for use in an expression to be inserted.
5751 This removes from the hash table
5752 any invalid entry that refers to one of these registers.
5753
5754 We don't care about the return value from mention_regs because
5755 we are going to hash the SET_DEST values unconditionally. */
5756
5757 for (i = 0; i < n_sets; i++)
34c73909
R
5758 {
5759 if (sets[i].rtl)
5760 {
5761 rtx x = SET_DEST (sets[i].rtl);
5762
f8cfc6aa 5763 if (!REG_P (x))
34c73909
R
5764 mention_regs (x);
5765 else
5766 {
5767 /* We used to rely on all references to a register becoming
5768 inaccessible when a register changes to a new quantity,
5769 since that changes the hash code. However, that is not
9b1549b8 5770 safe, since after HASH_SIZE new quantities we get a
34c73909
R
5771 hash 'collision' of a register with its own invalid
5772 entries. And since SUBREGs have been changed not to
5773 change their hash code with the hash code of the register,
5774 it wouldn't work any longer at all. So we have to check
5775 for any invalid references lying around now.
5776 This code is similar to the REG case in mention_regs,
5777 but it knows that reg_tick has been incremented, and
5778 it leaves reg_in_table as -1 . */
770ae6cc 5779 unsigned int regno = REGNO (x);
09e18274 5780 unsigned int endregno = END_REGNO (x);
770ae6cc 5781 unsigned int i;
34c73909
R
5782
5783 for (i = regno; i < endregno; i++)
5784 {
30f72379 5785 if (REG_IN_TABLE (i) >= 0)
34c73909
R
5786 {
5787 remove_invalid_refs (i);
30f72379 5788 REG_IN_TABLE (i) = -1;
34c73909
R
5789 }
5790 }
5791 }
5792 }
5793 }
7afe21cc
RK
5794
5795 /* We may have just removed some of the src_elt's from the hash table.
05c433f3
PB
5796 So replace each one with the current head of the same class.
5797 Also check if destination addresses have been removed. */
7afe21cc
RK
5798
5799 for (i = 0; i < n_sets; i++)
5800 if (sets[i].rtl)
5801 {
05c433f3
PB
5802 if (sets[i].dest_addr_elt
5803 && sets[i].dest_addr_elt->first_same_value == 0)
5804 {
e67b81d1 5805 /* The elt was removed, which means this destination is not
05c433f3
PB
5806 valid after this instruction. */
5807 sets[i].rtl = NULL_RTX;
5808 }
5809 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7afe21cc
RK
5810 /* If elt was removed, find current head of same class,
5811 or 0 if nothing remains of that class. */
5812 {
b3694847 5813 struct table_elt *elt = sets[i].src_elt;
7afe21cc
RK
5814
5815 while (elt && elt->prev_same_value)
5816 elt = elt->prev_same_value;
5817
5818 while (elt && elt->first_same_value == 0)
5819 elt = elt->next_same_value;
5820 sets[i].src_elt = elt ? elt->first_same_value : 0;
5821 }
5822 }
5823
5824 /* Now insert the destinations into their equivalence classes. */
5825
5826 for (i = 0; i < n_sets; i++)
5827 if (sets[i].rtl)
5828 {
b3694847 5829 rtx dest = SET_DEST (sets[i].rtl);
b3694847 5830 struct table_elt *elt;
7afe21cc
RK
5831
5832 /* Don't record value if we are not supposed to risk allocating
5833 floating-point values in registers that might be wider than
5834 memory. */
5835 if ((flag_float_store
3c0cb5de 5836 && MEM_P (dest)
cbf6a543 5837 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
5838 /* Don't record BLKmode values, because we don't know the
5839 size of it, and can't be sure that other BLKmode values
5840 have the same or smaller size. */
5841 || GET_MODE (dest) == BLKmode
7afe21cc
RK
5842 /* If we didn't put a REG_EQUAL value or a source into the hash
5843 table, there is no point is recording DEST. */
1a8e9a8e
RK
5844 || sets[i].src_elt == 0
5845 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5846 or SIGN_EXTEND, don't record DEST since it can cause
5847 some tracking to be wrong.
5848
5849 ??? Think about this more later. */
6a4bdc79 5850 || (paradoxical_subreg_p (dest)
1a8e9a8e
RK
5851 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5852 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
5853 continue;
5854
5855 /* STRICT_LOW_PART isn't part of the value BEING set,
5856 and neither is the SUBREG inside it.
5857 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5858 if (GET_CODE (dest) == STRICT_LOW_PART)
5859 dest = SUBREG_REG (XEXP (dest, 0));
5860
f8cfc6aa 5861 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
7afe21cc
RK
5862 /* Registers must also be inserted into chains for quantities. */
5863 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
5864 {
5865 /* If `insert_regs' changes something, the hash code must be
5866 recalculated. */
5867 rehash_using_reg (dest);
5868 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5869 }
7afe21cc 5870
8fff4fc1
RH
5871 elt = insert (dest, sets[i].src_elt,
5872 sets[i].dest_hash, GET_MODE (dest));
9de2c71a 5873
2c5bfdf7
AN
5874 /* If this is a constant, insert the constant anchors with the
5875 equivalent register-offset expressions using register DEST. */
5876 if (targetm.const_anchor
5877 && REG_P (dest)
5878 && SCALAR_INT_MODE_P (GET_MODE (dest))
5879 && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5880 insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5881
3c0cb5de 5882 elt->in_memory = (MEM_P (sets[i].inner_dest)
389fdba0 5883 && !MEM_READONLY_P (sets[i].inner_dest));
c256df0b 5884
fc3ffe83
RK
5885 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5886 narrower than M2, and both M1 and M2 are the same number of words,
5887 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5888 make that equivalence as well.
7afe21cc 5889
4de249d9
PB
5890 However, BAR may have equivalences for which gen_lowpart
5891 will produce a simpler value than gen_lowpart applied to
7afe21cc 5892 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
278a83b2 5893 BAR's equivalences. If we don't get a simplified form, make
7afe21cc
RK
5894 the SUBREG. It will not be used in an equivalence, but will
5895 cause two similar assignments to be detected.
5896
5897 Note the loop below will find SUBREG_REG (DEST) since we have
5898 already entered SRC and DEST of the SET in the table. */
5899
5900 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
5901 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5902 / UNITS_PER_WORD)
278a83b2 5903 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
7afe21cc
RK
5904 && (GET_MODE_SIZE (GET_MODE (dest))
5905 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5906 && sets[i].src_elt != 0)
5907 {
ef4bddc2 5908 machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7afe21cc
RK
5909 struct table_elt *elt, *classp = 0;
5910
5911 for (elt = sets[i].src_elt->first_same_value; elt;
5912 elt = elt->next_same_value)
5913 {
5914 rtx new_src = 0;
2197a88a 5915 unsigned src_hash;
7afe21cc 5916 struct table_elt *src_elt;
ff27a429 5917 int byte = 0;
7afe21cc
RK
5918
5919 /* Ignore invalid entries. */
f8cfc6aa 5920 if (!REG_P (elt->exp)
0516f6fe 5921 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
7afe21cc
RK
5922 continue;
5923
9beb7d20
RH
5924 /* We may have already been playing subreg games. If the
5925 mode is already correct for the destination, use it. */
5926 if (GET_MODE (elt->exp) == new_mode)
5927 new_src = elt->exp;
5928 else
5929 {
5930 /* Calculate big endian correction for the SUBREG_BYTE.
5931 We have already checked that M1 (GET_MODE (dest))
5932 is not narrower than M2 (new_mode). */
5933 if (BYTES_BIG_ENDIAN)
5934 byte = (GET_MODE_SIZE (GET_MODE (dest))
5935 - GET_MODE_SIZE (new_mode));
5936
5937 new_src = simplify_gen_subreg (new_mode, elt->exp,
5938 GET_MODE (dest), byte);
5939 }
5940
ff27a429
R
5941 /* The call to simplify_gen_subreg fails if the value
5942 is VOIDmode, yet we can't do any simplification, e.g.
5943 for EXPR_LISTs denoting function call results.
5944 It is invalid to construct a SUBREG with a VOIDmode
5945 SUBREG_REG, hence a zero new_src means we can't do
5946 this substitution. */
5947 if (! new_src)
5948 continue;
7afe21cc
RK
5949
5950 src_hash = HASH (new_src, new_mode);
5951 src_elt = lookup (new_src, src_hash, new_mode);
5952
5953 /* Put the new source in the hash table is if isn't
5954 already. */
5955 if (src_elt == 0)
5956 {
5957 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
5958 {
5959 rehash_using_reg (new_src);
5960 src_hash = HASH (new_src, new_mode);
5961 }
7afe21cc
RK
5962 src_elt = insert (new_src, classp, src_hash, new_mode);
5963 src_elt->in_memory = elt->in_memory;
6c4d60f8
JJ
5964 if (GET_CODE (new_src) == ASM_OPERANDS
5965 && elt->cost == MAX_COST)
5966 src_elt->cost = MAX_COST;
7afe21cc
RK
5967 }
5968 else if (classp && classp != src_elt->first_same_value)
278a83b2 5969 /* Show that two things that we've seen before are
7afe21cc
RK
5970 actually the same. */
5971 merge_equiv_classes (src_elt, classp);
5972
5973 classp = src_elt->first_same_value;
da932f04
JL
5974 /* Ignore invalid entries. */
5975 while (classp
f8cfc6aa 5976 && !REG_P (classp->exp)
0516f6fe 5977 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
da932f04 5978 classp = classp->next_same_value;
7afe21cc
RK
5979 }
5980 }
5981 }
5982
403e25d0
RK
5983 /* Special handling for (set REG0 REG1) where REG0 is the
5984 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5985 be used in the sequel, so (if easily done) change this insn to
5986 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5987 that computed their value. Then REG1 will become a dead store
5988 and won't cloud the situation for later optimizations.
7afe21cc
RK
5989
5990 Do not make this change if REG1 is a hard register, because it will
5991 then be used in the sequel and we may be changing a two-operand insn
5992 into a three-operand insn.
5993
4a8cae83 5994 Also do not do this if we are operating on a copy of INSN. */
7afe21cc 5995
7b02f4e0
SB
5996 if (n_sets == 1 && sets[0].rtl)
5997 try_back_substitute_reg (sets[0].rtl, insn);
7afe21cc 5998
932ad4d9 5999done:;
7afe21cc
RK
6000}
6001\f
a4c6502a 6002/* Remove from the hash table all expressions that reference memory. */
14a774a9 6003
7afe21cc 6004static void
7080f735 6005invalidate_memory (void)
7afe21cc 6006{
b3694847
SS
6007 int i;
6008 struct table_elt *p, *next;
7afe21cc 6009
9b1549b8 6010 for (i = 0; i < HASH_SIZE; i++)
9ae8ffe7
JL
6011 for (p = table[i]; p; p = next)
6012 {
6013 next = p->next_same_hash;
6014 if (p->in_memory)
6015 remove_from_table (p, i);
6016 }
6017}
6018
7b02f4e0 6019/* Perform invalidation on the basis of everything about INSN,
7afe21cc
RK
6020 except for invalidating the actual places that are SET in it.
6021 This includes the places CLOBBERed, and anything that might
7b02f4e0 6022 alias with something that is SET or CLOBBERed. */
7afe21cc
RK
6023
6024static void
20468884 6025invalidate_from_clobbers (rtx_insn *insn)
7afe21cc 6026{
7b02f4e0
SB
6027 rtx x = PATTERN (insn);
6028
7afe21cc
RK
6029 if (GET_CODE (x) == CLOBBER)
6030 {
6031 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
6032 if (ref)
6033 {
f8cfc6aa 6034 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 6035 || MEM_P (ref))
9ae8ffe7
JL
6036 invalidate (ref, VOIDmode);
6037 else if (GET_CODE (ref) == STRICT_LOW_PART
6038 || GET_CODE (ref) == ZERO_EXTRACT)
6039 invalidate (XEXP (ref, 0), GET_MODE (ref));
6040 }
7afe21cc
RK
6041 }
6042 else if (GET_CODE (x) == PARALLEL)
6043 {
b3694847 6044 int i;
7afe21cc
RK
6045 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6046 {
b3694847 6047 rtx y = XVECEXP (x, 0, i);
7afe21cc
RK
6048 if (GET_CODE (y) == CLOBBER)
6049 {
6050 rtx ref = XEXP (y, 0);
f8cfc6aa 6051 if (REG_P (ref) || GET_CODE (ref) == SUBREG
3c0cb5de 6052 || MEM_P (ref))
9ae8ffe7
JL
6053 invalidate (ref, VOIDmode);
6054 else if (GET_CODE (ref) == STRICT_LOW_PART
6055 || GET_CODE (ref) == ZERO_EXTRACT)
6056 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
6057 }
6058 }
6059 }
6060}
6061\f
7b02f4e0
SB
6062/* Perform invalidation on the basis of everything about INSN.
6063 This includes the places CLOBBERed, and anything that might
6064 alias with something that is SET or CLOBBERed. */
6065
6066static void
20468884 6067invalidate_from_sets_and_clobbers (rtx_insn *insn)
7b02f4e0
SB
6068{
6069 rtx tem;
6070 rtx x = PATTERN (insn);
6071
6072 if (CALL_P (insn))
6073 {
6074 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6075 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6076 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6077 }
6078
6079 /* Ensure we invalidate the destination register of a CALL insn.
6080 This is necessary for machines where this register is a fixed_reg,
6081 because no other code would invalidate it. */
6082 if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6083 invalidate (SET_DEST (x), VOIDmode);
6084
6085 else if (GET_CODE (x) == PARALLEL)
6086 {
6087 int i;
6088
6089 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6090 {
6091 rtx y = XVECEXP (x, 0, i);
6092 if (GET_CODE (y) == CLOBBER)
6093 {
6094 rtx clobbered = XEXP (y, 0);
6095
6096 if (REG_P (clobbered)
6097 || GET_CODE (clobbered) == SUBREG)
6098 invalidate (clobbered, VOIDmode);
6099 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6100 || GET_CODE (clobbered) == ZERO_EXTRACT)
6101 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6102 }
6103 else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6104 invalidate (SET_DEST (y), VOIDmode);
6105 }
6106 }
6107}
6108\f
7afe21cc
RK
6109/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6110 and replace any registers in them with either an equivalent constant
6111 or the canonical form of the register. If we are inside an address,
6112 only do this if the address remains valid.
6113
6114 OBJECT is 0 except when within a MEM in which case it is the MEM.
6115
6116 Return the replacement for X. */
6117
6118static rtx
6fb5fa3c 6119cse_process_notes_1 (rtx x, rtx object, bool *changed)
7afe21cc
RK
6120{
6121 enum rtx_code code = GET_CODE (x);
6f7d635c 6122 const char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
6123 int i;
6124
6125 switch (code)
6126 {
7afe21cc
RK
6127 case CONST:
6128 case SYMBOL_REF:
6129 case LABEL_REF:
d8116890 6130 CASE_CONST_ANY:
7afe21cc
RK
6131 case PC:
6132 case CC0:
6133 case LO_SUM:
6134 return x;
6135
6136 case MEM:
c96208fa 6137 validate_change (x, &XEXP (x, 0),
6fb5fa3c 6138 cse_process_notes (XEXP (x, 0), x, changed), 0);
7afe21cc
RK
6139 return x;
6140
6141 case EXPR_LIST:
7afe21cc 6142 if (REG_NOTE_KIND (x) == REG_EQUAL)
6fb5fa3c 6143 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
e5af9ddd
RS
6144 /* Fall through. */
6145
6146 case INSN_LIST:
6147 case INT_LIST:
7afe21cc 6148 if (XEXP (x, 1))
6fb5fa3c 6149 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
7afe21cc
RK
6150 return x;
6151
e4890d45
RS
6152 case SIGN_EXTEND:
6153 case ZERO_EXTEND:
0b0ee36c 6154 case SUBREG:
e4890d45 6155 {
32e9fa48 6156 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
e4890d45
RS
6157 /* We don't substitute VOIDmode constants into these rtx,
6158 since they would impede folding. */
32e9fa48
KG
6159 if (GET_MODE (new_rtx) != VOIDmode)
6160 validate_change (object, &XEXP (x, 0), new_rtx, 0);
e4890d45
RS
6161 return x;
6162 }
6163
dfebbdc6
JJ
6164 case UNSIGNED_FLOAT:
6165 {
6166 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6167 /* We don't substitute negative VOIDmode constants into these rtx,
6168 since they would impede folding. */
6169 if (GET_MODE (new_rtx) != VOIDmode
6170 || (CONST_INT_P (new_rtx) && INTVAL (new_rtx) >= 0)
6171 || (CONST_DOUBLE_P (new_rtx) && CONST_DOUBLE_HIGH (new_rtx) >= 0))
6172 validate_change (object, &XEXP (x, 0), new_rtx, 0);
6173 return x;
6174 }
6175
7afe21cc 6176 case REG:
30f72379 6177 i = REG_QTY (REGNO (x));
7afe21cc
RK
6178
6179 /* Return a constant or a constant register. */
1bb98cec 6180 if (REGNO_QTY_VALID_P (REGNO (x)))
7afe21cc 6181 {
1bb98cec
DM
6182 struct qty_table_elem *ent = &qty_table[i];
6183
6184 if (ent->const_rtx != NULL_RTX
6185 && (CONSTANT_P (ent->const_rtx)
f8cfc6aa 6186 || REG_P (ent->const_rtx)))
1bb98cec 6187 {
32e9fa48
KG
6188 rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6189 if (new_rtx)
6190 return copy_rtx (new_rtx);
1bb98cec 6191 }
7afe21cc
RK
6192 }
6193
6194 /* Otherwise, canonicalize this register. */
20468884 6195 return canon_reg (x, NULL);
278a83b2 6196
e9a25f70
JL
6197 default:
6198 break;
7afe21cc
RK
6199 }
6200
6201 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6202 if (fmt[i] == 'e')
6203 validate_change (object, &XEXP (x, i),
6fb5fa3c 6204 cse_process_notes (XEXP (x, i), object, changed), 0);
7afe21cc
RK
6205
6206 return x;
6207}
6fb5fa3c
DB
6208
6209static rtx
6210cse_process_notes (rtx x, rtx object, bool *changed)
6211{
32e9fa48
KG
6212 rtx new_rtx = cse_process_notes_1 (x, object, changed);
6213 if (new_rtx != x)
6fb5fa3c 6214 *changed = true;
32e9fa48 6215 return new_rtx;
6fb5fa3c
DB
6216}
6217
7afe21cc 6218\f
932ad4d9 6219/* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
7afe21cc 6220
932ad4d9
SB
6221 DATA is a pointer to a struct cse_basic_block_data, that is used to
6222 describe the path.
6223 It is filled with a queue of basic blocks, starting with FIRST_BB
6224 and following a trace through the CFG.
b8698a0f 6225
932ad4d9
SB
6226 If all paths starting at FIRST_BB have been followed, or no new path
6227 starting at FIRST_BB can be constructed, this function returns FALSE.
6228 Otherwise, DATA->path is filled and the function returns TRUE indicating
6229 that a path to follow was found.
7afe21cc 6230
2e226e66 6231 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
932ad4d9 6232 block in the path will be FIRST_BB. */
7afe21cc 6233
932ad4d9
SB
6234static bool
6235cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6236 int follow_jumps)
7afe21cc 6237{
932ad4d9
SB
6238 basic_block bb;
6239 edge e;
6240 int path_size;
b8698a0f 6241
d7c028c0 6242 bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
7afe21cc 6243
932ad4d9
SB
6244 /* See if there is a previous path. */
6245 path_size = data->path_size;
6246
6247 /* There is a previous path. Make sure it started with FIRST_BB. */
6248 if (path_size)
6249 gcc_assert (data->path[0].bb == first_bb);
6250
6251 /* There was only one basic block in the last path. Clear the path and
6252 return, so that paths starting at another basic block can be tried. */
6253 if (path_size == 1)
6254 {
6255 path_size = 0;
6256 goto done;
6257 }
6258
6259 /* If the path was empty from the beginning, construct a new path. */
6260 if (path_size == 0)
6261 data->path[path_size++].bb = first_bb;
6262 else
7afe21cc 6263 {
932ad4d9
SB
6264 /* Otherwise, path_size must be equal to or greater than 2, because
6265 a previous path exists that is at least two basic blocks long.
6266
6267 Update the previous branch path, if any. If the last branch was
6268 previously along the branch edge, take the fallthrough edge now. */
6269 while (path_size >= 2)
7afe21cc 6270 {
932ad4d9
SB
6271 basic_block last_bb_in_path, previous_bb_in_path;
6272 edge e;
6273
6274 --path_size;
6275 last_bb_in_path = data->path[path_size].bb;
6276 previous_bb_in_path = data->path[path_size - 1].bb;
6277
6278 /* If we previously followed a path along the branch edge, try
6279 the fallthru edge now. */
6280 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6281 && any_condjump_p (BB_END (previous_bb_in_path))
6282 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6283 && e == BRANCH_EDGE (previous_bb_in_path))
6284 {
6285 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
fefa31b5 6286 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
481e0a49
AO
6287 && single_pred_p (bb)
6288 /* We used to assert here that we would only see blocks
6289 that we have not visited yet. But we may end up
6290 visiting basic blocks twice if the CFG has changed
6291 in this run of cse_main, because when the CFG changes
6292 the topological sort of the CFG also changes. A basic
6293 blocks that previously had more than two predecessors
6294 may now have a single predecessor, and become part of
6295 a path that starts at another basic block.
6296
6297 We still want to visit each basic block only once, so
6298 halt the path here if we have already visited BB. */
d7c028c0 6299 && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
932ad4d9 6300 {
d7c028c0 6301 bitmap_set_bit (cse_visited_basic_blocks, bb->index);
932ad4d9
SB
6302 data->path[path_size++].bb = bb;
6303 break;
6304 }
6305 }
6306
6307 data->path[path_size].bb = NULL;
6308 }
6309
6310 /* If only one block remains in the path, bail. */
6311 if (path_size == 1)
6312 {
6313 path_size = 0;
6314 goto done;
7afe21cc 6315 }
7afe21cc
RK
6316 }
6317
932ad4d9
SB
6318 /* Extend the path if possible. */
6319 if (follow_jumps)
7afe21cc 6320 {
932ad4d9
SB
6321 bb = data->path[path_size - 1].bb;
6322 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6323 {
6324 if (single_succ_p (bb))
6325 e = single_succ_edge (bb);
6326 else if (EDGE_COUNT (bb->succs) == 2
6327 && any_condjump_p (BB_END (bb)))
6328 {
6329 /* First try to follow the branch. If that doesn't lead
6330 to a useful path, follow the fallthru edge. */
6331 e = BRANCH_EDGE (bb);
6332 if (!single_pred_p (e->dest))
6333 e = FALLTHRU_EDGE (bb);
6334 }
6335 else
6336 e = NULL;
7afe21cc 6337
bc6d3f91 6338 if (e
76015c34 6339 && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
fefa31b5 6340 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
481e0a49
AO
6341 && single_pred_p (e->dest)
6342 /* Avoid visiting basic blocks twice. The large comment
6343 above explains why this can happen. */
d7c028c0 6344 && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
932ad4d9
SB
6345 {
6346 basic_block bb2 = e->dest;
d7c028c0 6347 bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
932ad4d9
SB
6348 data->path[path_size++].bb = bb2;
6349 bb = bb2;
6350 }
6351 else
6352 bb = NULL;
6353 }
6354 }
6355
6356done:
6357 data->path_size = path_size;
6358 return path_size != 0;
6359}
6360\f
6361/* Dump the path in DATA to file F. NSETS is the number of sets
6362 in the path. */
6363
6364static void
6365cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6366{
6367 int path_entry;
6368
6369 fprintf (f, ";; Following path with %d sets: ", nsets);
6370 for (path_entry = 0; path_entry < data->path_size; path_entry++)
6371 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6372 fputc ('\n', dump_file);
6373 fflush (f);
6374}
6375
a7582f7c
SB
6376\f
6377/* Return true if BB has exception handling successor edges. */
6378
6379static bool
6380have_eh_succ_edges (basic_block bb)
6381{
6382 edge e;
6383 edge_iterator ei;
6384
6385 FOR_EACH_EDGE (e, ei, bb->succs)
6386 if (e->flags & EDGE_EH)
6387 return true;
6388
6389 return false;
6390}
6391
932ad4d9
SB
6392\f
6393/* Scan to the end of the path described by DATA. Return an estimate of
6fb5fa3c 6394 the total number of SETs of all insns in the path. */
932ad4d9
SB
6395
6396static void
6397cse_prescan_path (struct cse_basic_block_data *data)
6398{
6399 int nsets = 0;
932ad4d9
SB
6400 int path_size = data->path_size;
6401 int path_entry;
6402
6403 /* Scan to end of each basic block in the path. */
b8698a0f 6404 for (path_entry = 0; path_entry < path_size; path_entry++)
932ad4d9
SB
6405 {
6406 basic_block bb;
20468884 6407 rtx_insn *insn;
164c8956 6408
932ad4d9 6409 bb = data->path[path_entry].bb;
7afe21cc 6410
932ad4d9 6411 FOR_BB_INSNS (bb, insn)
7afe21cc 6412 {
932ad4d9
SB
6413 if (!INSN_P (insn))
6414 continue;
278a83b2 6415
932ad4d9
SB
6416 /* A PARALLEL can have lots of SETs in it,
6417 especially if it is really an ASM_OPERANDS. */
6418 if (GET_CODE (PATTERN (insn)) == PARALLEL)
6419 nsets += XVECLEN (PATTERN (insn), 0);
6420 else
6421 nsets += 1;
7afe21cc 6422 }
932ad4d9
SB
6423 }
6424
932ad4d9
SB
6425 data->nsets = nsets;
6426}
6427\f
f0002948
RS
6428/* Return true if the pattern of INSN uses a LABEL_REF for which
6429 there isn't a REG_LABEL_OPERAND note. */
6430
6431static bool
6432check_for_label_ref (rtx_insn *insn)
6433{
6434 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6435 note for it, we must rerun jump since it needs to place the note. If
6436 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6437 don't do this since no REG_LABEL_OPERAND will be added. */
6438 subrtx_iterator::array_type array;
6439 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6440 {
6441 const_rtx x = *iter;
6442 if (GET_CODE (x) == LABEL_REF
6443 && !LABEL_REF_NONLOCAL_P (x)
6444 && (!JUMP_P (insn)
a827d9b1
DM
6445 || !label_is_jump_target_p (LABEL_REF_LABEL (x), insn))
6446 && LABEL_P (LABEL_REF_LABEL (x))
6447 && INSN_UID (LABEL_REF_LABEL (x)) != 0
6448 && !find_reg_note (insn, REG_LABEL_OPERAND, LABEL_REF_LABEL (x)))
f0002948
RS
6449 return true;
6450 }
6451 return false;
6452}
6453
932ad4d9 6454/* Process a single extended basic block described by EBB_DATA. */
7afe21cc 6455
932ad4d9
SB
6456static void
6457cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6458{
6459 int path_size = ebb_data->path_size;
6460 int path_entry;
6461 int num_insns = 0;
6462
6463 /* Allocate the space needed by qty_table. */
6464 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6465
6466 new_basic_block ();
89a95777
KZ
6467 cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6468 cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
932ad4d9
SB
6469 for (path_entry = 0; path_entry < path_size; path_entry++)
6470 {
6471 basic_block bb;
20468884 6472 rtx_insn *insn;
932ad4d9
SB
6473
6474 bb = ebb_data->path[path_entry].bb;
e186ff69
AK
6475
6476 /* Invalidate recorded information for eh regs if there is an EH
6477 edge pointing to that bb. */
6478 if (bb_has_eh_pred (bb))
6479 {
292321a5 6480 df_ref def;
e186ff69 6481
292321a5
RS
6482 FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6483 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6484 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
e186ff69
AK
6485 }
6486
9fcb01de 6487 optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
ba4807a0 6488 FOR_BB_INSNS (bb, insn)
7afe21cc 6489 {
932ad4d9
SB
6490 /* If we have processed 1,000 insns, flush the hash table to
6491 avoid extreme quadratic behavior. We must not include NOTEs
6492 in the count since there may be more of them when generating
6493 debugging information. If we clear the table at different
6494 times, code generated with -g -O might be different than code
6495 generated with -O but not -g.
6496
6497 FIXME: This is a real kludge and needs to be done some other
6498 way. */
b5b8b0ac 6499 if (NONDEBUG_INSN_P (insn)
932ad4d9
SB
6500 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6501 {
6502 flush_hash_table ();
6503 num_insns = 0;
6504 }
7afe21cc 6505
932ad4d9 6506 if (INSN_P (insn))
7afe21cc 6507 {
932ad4d9
SB
6508 /* Process notes first so we have all notes in canonical forms
6509 when looking for duplicate operations. */
6510 if (REG_NOTES (insn))
6fb5fa3c
DB
6511 {
6512 bool changed = false;
6513 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6514 NULL_RTX, &changed);
6515 if (changed)
6516 df_notes_rescan (insn);
6517 }
932ad4d9 6518
4a8cae83 6519 cse_insn (insn);
932ad4d9 6520
932ad4d9
SB
6521 /* If we haven't already found an insn where we added a LABEL_REF,
6522 check this one. */
2aac3a01 6523 if (INSN_P (insn) && !recorded_label_ref
f0002948 6524 && check_for_label_ref (insn))
2aac3a01 6525 recorded_label_ref = true;
96fb470d 6526
f1e52ed6 6527#if HAVE_cc0
5f262d13 6528 if (NONDEBUG_INSN_P (insn))
96fb470d 6529 {
5f262d13
AO
6530 /* If the previous insn sets CC0 and this insn no
6531 longer references CC0, delete the previous insn.
6532 Here we use fact that nothing expects CC0 to be
6533 valid over an insn, which is true until the final
6534 pass. */
20468884
DM
6535 rtx_insn *prev_insn;
6536 rtx tem;
5f262d13
AO
6537
6538 prev_insn = prev_nonnote_nondebug_insn (insn);
6539 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6540 && (tem = single_set (prev_insn)) != NULL_RTX
6541 && SET_DEST (tem) == cc0_rtx
6542 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6543 delete_insn (prev_insn);
6544
6545 /* If this insn is not the last insn in the basic
6546 block, it will be PREV_INSN(insn) in the next
6547 iteration. If we recorded any CC0-related
6548 information for this insn, remember it. */
6549 if (insn != BB_END (bb))
6550 {
6551 prev_insn_cc0 = this_insn_cc0;
6552 prev_insn_cc0_mode = this_insn_cc0_mode;
6553 }
96fb470d
SB
6554 }
6555#endif
932ad4d9
SB
6556 }
6557 }
7afe21cc 6558
a7582f7c
SB
6559 /* With non-call exceptions, we are not always able to update
6560 the CFG properly inside cse_insn. So clean up possibly
6561 redundant EH edges here. */
8f4f502f 6562 if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
2aac3a01 6563 cse_cfg_altered |= purge_dead_edges (bb);
a7582f7c 6564
932ad4d9
SB
6565 /* If we changed a conditional jump, we may have terminated
6566 the path we are following. Check that by verifying that
6567 the edge we would take still exists. If the edge does
6568 not exist anymore, purge the remainder of the path.
6569 Note that this will cause us to return to the caller. */
6570 if (path_entry < path_size - 1)
6571 {
6572 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6573 if (!find_edge (bb, next_bb))
27511c65
SB
6574 {
6575 do
6576 {
6577 path_size--;
6578
6579 /* If we truncate the path, we must also reset the
6580 visited bit on the remaining blocks in the path,
6581 or we will never visit them at all. */
d7c028c0 6582 bitmap_clear_bit (cse_visited_basic_blocks,
27511c65
SB
6583 ebb_data->path[path_size].bb->index);
6584 ebb_data->path[path_size].bb = NULL;
6585 }
6586 while (path_size - 1 != path_entry);
6587 ebb_data->path_size = path_size;
6588 }
7afe21cc 6589 }
7afe21cc 6590
932ad4d9
SB
6591 /* If this is a conditional jump insn, record any known
6592 equivalences due to the condition being tested. */
6593 insn = BB_END (bb);
6594 if (path_entry < path_size - 1
6595 && JUMP_P (insn)
6596 && single_set (insn)
6597 && any_condjump_p (insn))
6598 {
6599 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6600 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6601 record_jump_equiv (insn, taken);
6602 }
96fb470d 6603
96fb470d
SB
6604 /* Clear the CC0-tracking related insns, they can't provide
6605 useful information across basic block boundaries. */
6606 prev_insn_cc0 = 0;
932ad4d9 6607 }
7afe21cc 6608
932ad4d9 6609 gcc_assert (next_qty <= max_qty);
7afe21cc 6610
932ad4d9 6611 free (qty_table);
7afe21cc 6612}
6fb5fa3c 6613
7afe21cc 6614\f
7afe21cc
RK
6615/* Perform cse on the instructions of a function.
6616 F is the first instruction.
6617 NREGS is one plus the highest pseudo-reg number used in the instruction.
6618
2aac3a01
EB
6619 Return 2 if jump optimizations should be redone due to simplifications
6620 in conditional jump instructions.
6621 Return 1 if the CFG should be cleaned up because it has been modified.
6622 Return 0 otherwise. */
7afe21cc 6623
711417cd 6624static int
20468884 6625cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
7afe21cc 6626{
932ad4d9
SB
6627 struct cse_basic_block_data ebb_data;
6628 basic_block bb;
8b1c6fd7 6629 int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
932ad4d9 6630 int i, n_blocks;
7afe21cc 6631
6fb5fa3c 6632 df_set_flags (DF_LR_RUN_DCE);
dca3da7a 6633 df_note_add_problem ();
6fb5fa3c
DB
6634 df_analyze ();
6635 df_set_flags (DF_DEFER_INSN_RESCAN);
6636
6637 reg_scan (get_insns (), max_reg_num ());
bc5e3b54
KH
6638 init_cse_reg_info (nregs);
6639
932ad4d9
SB
6640 ebb_data.path = XNEWVEC (struct branch_path,
6641 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
9bf8cfbf 6642
2aac3a01
EB
6643 cse_cfg_altered = false;
6644 cse_jumps_altered = false;
6645 recorded_label_ref = false;
7afe21cc 6646 constant_pool_entries_cost = 0;
dd0ba281 6647 constant_pool_entries_regcost = 0;
932ad4d9
SB
6648 ebb_data.path_size = 0;
6649 ebb_data.nsets = 0;
2f93eea8 6650 rtl_hooks = cse_rtl_hooks;
7afe21cc
RK
6651
6652 init_recog ();
9ae8ffe7 6653 init_alias_analysis ();
7afe21cc 6654
5ed6ace5 6655 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
7afe21cc 6656
932ad4d9 6657 /* Set up the table of already visited basic blocks. */
8b1c6fd7 6658 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 6659 bitmap_clear (cse_visited_basic_blocks);
7afe21cc 6660
a7582f7c 6661 /* Loop over basic blocks in reverse completion order (RPO),
932ad4d9 6662 excluding the ENTRY and EXIT blocks. */
27511c65 6663 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
932ad4d9
SB
6664 i = 0;
6665 while (i < n_blocks)
7afe21cc 6666 {
a7582f7c 6667 /* Find the first block in the RPO queue that we have not yet
932ad4d9
SB
6668 processed before. */
6669 do
e9a25f70 6670 {
06e28de2 6671 bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
e9a25f70 6672 }
d7c028c0 6673 while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
932ad4d9 6674 && i < n_blocks);
7afe21cc 6675
932ad4d9
SB
6676 /* Find all paths starting with BB, and process them. */
6677 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
7afe21cc 6678 {
932ad4d9
SB
6679 /* Pre-scan the path. */
6680 cse_prescan_path (&ebb_data);
7afe21cc 6681
932ad4d9
SB
6682 /* If this basic block has no sets, skip it. */
6683 if (ebb_data.nsets == 0)
6684 continue;
7afe21cc 6685
2e226e66 6686 /* Get a reasonable estimate for the maximum number of qty's
932ad4d9
SB
6687 needed for this path. For this, we take the number of sets
6688 and multiply that by MAX_RECOG_OPERANDS. */
6689 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
7afe21cc 6690
932ad4d9
SB
6691 /* Dump the path we're about to process. */
6692 if (dump_file)
6693 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6a5293dc 6694
932ad4d9 6695 cse_extended_basic_block (&ebb_data);
7afe21cc 6696 }
7afe21cc
RK
6697 }
6698
932ad4d9
SB
6699 /* Clean up. */
6700 end_alias_analysis ();
932ad4d9
SB
6701 free (reg_eqv_table);
6702 free (ebb_data.path);
6703 sbitmap_free (cse_visited_basic_blocks);
27511c65 6704 free (rc_order);
932ad4d9 6705 rtl_hooks = general_rtl_hooks;
75c6bd46 6706
2aac3a01
EB
6707 if (cse_jumps_altered || recorded_label_ref)
6708 return 2;
6709 else if (cse_cfg_altered)
6710 return 1;
6711 else
6712 return 0;
7afe21cc
RK
6713}
6714\f
6715/* Count the number of times registers are used (not set) in X.
6716 COUNTS is an array in which we accumulate the count, INCR is how much
b92ba6ff
R
6717 we count each register usage.
6718
6719 Don't count a usage of DEST, which is the SET_DEST of a SET which
6720 contains X in its SET_SRC. This is because such a SET does not
6721 modify the liveness of DEST.
34161e98
RS
6722 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6723 We must then count uses of a SET_DEST regardless, because the insn can't be
6724 deleted here. */
7afe21cc
RK
6725
6726static void
b92ba6ff 6727count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7afe21cc 6728{
f1e7c95f 6729 enum rtx_code code;
b17d5d7c 6730 rtx note;
6f7d635c 6731 const char *fmt;
7afe21cc
RK
6732 int i, j;
6733
f1e7c95f
RK
6734 if (x == 0)
6735 return;
6736
6737 switch (code = GET_CODE (x))
7afe21cc
RK
6738 {
6739 case REG:
b92ba6ff
R
6740 if (x != dest)
6741 counts[REGNO (x)] += incr;
7afe21cc
RK
6742 return;
6743
6744 case PC:
6745 case CC0:
6746 case CONST:
d8116890 6747 CASE_CONST_ANY:
7afe21cc
RK
6748 case SYMBOL_REF:
6749 case LABEL_REF:
02e39abc
JL
6750 return;
6751
278a83b2 6752 case CLOBBER:
02e39abc
JL
6753 /* If we are clobbering a MEM, mark any registers inside the address
6754 as being used. */
3c0cb5de 6755 if (MEM_P (XEXP (x, 0)))
b92ba6ff 6756 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7afe21cc
RK
6757 return;
6758
6759 case SET:
6760 /* Unless we are setting a REG, count everything in SET_DEST. */
f8cfc6aa 6761 if (!REG_P (SET_DEST (x)))
b92ba6ff
R
6762 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6763 count_reg_usage (SET_SRC (x), counts,
6764 dest ? dest : SET_DEST (x),
6765 incr);
7afe21cc
RK
6766 return;
6767
b5b8b0ac
AO
6768 case DEBUG_INSN:
6769 return;
6770
f1e7c95f 6771 case CALL_INSN:
7afe21cc
RK
6772 case INSN:
6773 case JUMP_INSN:
2da02156 6774 /* We expect dest to be NULL_RTX here. If the insn may throw,
34161e98
RS
6775 or if it cannot be deleted due to side-effects, mark this fact
6776 by setting DEST to pc_rtx. */
2da02156
EB
6777 if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6778 || side_effects_p (PATTERN (x)))
b92ba6ff
R
6779 dest = pc_rtx;
6780 if (code == CALL_INSN)
6781 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6782 count_reg_usage (PATTERN (x), counts, dest, incr);
7afe21cc
RK
6783
6784 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6785 use them. */
6786
b17d5d7c
ZD
6787 note = find_reg_equal_equiv_note (x);
6788 if (note)
839844be
R
6789 {
6790 rtx eqv = XEXP (note, 0);
6791
6792 if (GET_CODE (eqv) == EXPR_LIST)
6793 /* This REG_EQUAL note describes the result of a function call.
6794 Process all the arguments. */
6795 do
6796 {
b92ba6ff 6797 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
839844be
R
6798 eqv = XEXP (eqv, 1);
6799 }
6800 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6801 else
b92ba6ff 6802 count_reg_usage (eqv, counts, dest, incr);
839844be 6803 }
7afe21cc
RK
6804 return;
6805
ee960939
OH
6806 case EXPR_LIST:
6807 if (REG_NOTE_KIND (x) == REG_EQUAL
6808 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6809 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6810 involving registers in the address. */
6811 || GET_CODE (XEXP (x, 0)) == CLOBBER)
b92ba6ff 6812 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
ee960939 6813
b92ba6ff 6814 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
ee960939
OH
6815 return;
6816
a6c14a64 6817 case ASM_OPERANDS:
a6c14a64
RH
6818 /* Iterate over just the inputs, not the constraints as well. */
6819 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
b92ba6ff 6820 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
a6c14a64
RH
6821 return;
6822
7afe21cc 6823 case INSN_LIST:
f91aec98 6824 case INT_LIST:
341c100f 6825 gcc_unreachable ();
278a83b2 6826
e9a25f70
JL
6827 default:
6828 break;
7afe21cc
RK
6829 }
6830
6831 fmt = GET_RTX_FORMAT (code);
6832 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6833 {
6834 if (fmt[i] == 'e')
b92ba6ff 6835 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
6836 else if (fmt[i] == 'E')
6837 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
b92ba6ff 6838 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
6839 }
6840}
6841\f
6699b754 6842/* Return true if X is a dead register. */
b5b8b0ac 6843
6699b754 6844static inline int
a5b9bc17 6845is_dead_reg (const_rtx x, int *counts)
b5b8b0ac 6846{
b5b8b0ac
AO
6847 return (REG_P (x)
6848 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6849 && counts[REGNO (x)] == 0);
6850}
6851
4793dca1
JH
6852/* Return true if set is live. */
6853static bool
20468884 6854set_live_p (rtx set, rtx_insn *insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7080f735 6855 int *counts)
4793dca1 6856{
4793dca1 6857 rtx tem;
4793dca1
JH
6858
6859 if (set_noop_p (set))
6860 ;
6861
4793dca1
JH
6862 else if (GET_CODE (SET_DEST (set)) == CC0
6863 && !side_effects_p (SET_SRC (set))
5f262d13 6864 && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
4793dca1
JH
6865 || !INSN_P (tem)
6866 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6867 return false;
6699b754 6868 else if (!is_dead_reg (SET_DEST (set), counts)
8fff4fc1 6869 || side_effects_p (SET_SRC (set)))
4793dca1
JH
6870 return true;
6871 return false;
6872}
6873
6874/* Return true if insn is live. */
6875
6876static bool
20468884 6877insn_live_p (rtx_insn *insn, int *counts)
4793dca1
JH
6878{
6879 int i;
2da02156 6880 if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
a646f6cc
AH
6881 return true;
6882 else if (GET_CODE (PATTERN (insn)) == SET)
0021de69 6883 return set_live_p (PATTERN (insn), insn, counts);
4793dca1 6884 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
0021de69
DB
6885 {
6886 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6887 {
6888 rtx elt = XVECEXP (PATTERN (insn), 0, i);
4793dca1 6889
0021de69
DB
6890 if (GET_CODE (elt) == SET)
6891 {
6892 if (set_live_p (elt, insn, counts))
6893 return true;
6894 }
6895 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6896 return true;
6897 }
6898 return false;
6899 }
b5b8b0ac
AO
6900 else if (DEBUG_INSN_P (insn))
6901 {
20468884 6902 rtx_insn *next;
b5b8b0ac
AO
6903
6904 for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6905 if (NOTE_P (next))
6906 continue;
6907 else if (!DEBUG_INSN_P (next))
6908 return true;
6909 else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6910 return false;
6911
b5b8b0ac
AO
6912 return true;
6913 }
4793dca1
JH
6914 else
6915 return true;
6916}
6917
6699b754
JJ
6918/* Count the number of stores into pseudo. Callback for note_stores. */
6919
6920static void
6921count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6922{
6923 int *counts = (int *) data;
6924 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6925 counts[REGNO (x)]++;
6926}
6927
a5b9bc17
RS
6928/* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6929 pseudo doesn't have a replacement. COUNTS[X] is zero if register X
6930 is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6931 Set *SEEN_REPL to true if we see a dead register that does have
6932 a replacement. */
6699b754 6933
a5b9bc17
RS
6934static bool
6935is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
6936 bool *seen_repl)
6699b754 6937{
a5b9bc17
RS
6938 subrtx_iterator::array_type array;
6939 FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
6699b754 6940 {
a5b9bc17
RS
6941 const_rtx x = *iter;
6942 if (is_dead_reg (x, counts))
6943 {
6944 if (replacements && replacements[REGNO (x)] != NULL_RTX)
6945 *seen_repl = true;
6946 else
6947 return true;
6948 }
6699b754 6949 }
a5b9bc17 6950 return false;
6699b754
JJ
6951}
6952
6953/* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6954 Callback for simplify_replace_fn_rtx. */
6955
6956static rtx
6957replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
6958{
6959 rtx *replacements = (rtx *) data;
6960
6961 if (REG_P (x)
6962 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6963 && replacements[REGNO (x)] != NULL_RTX)
6964 {
6965 if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
6966 return replacements[REGNO (x)];
6967 return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
6968 GET_MODE (replacements[REGNO (x)]));
6969 }
6970 return NULL_RTX;
6971}
6972
7afe21cc
RK
6973/* Scan all the insns and delete any that are dead; i.e., they store a register
6974 that is never used or they copy a register to itself.
6975
c6a26dc4
JL
6976 This is used to remove insns made obviously dead by cse, loop or other
6977 optimizations. It improves the heuristics in loop since it won't try to
6978 move dead invariants out of loops or make givs for dead quantities. The
6979 remaining passes of the compilation are also sped up. */
7afe21cc 6980
3dec4024 6981int
169d13f5 6982delete_trivially_dead_insns (rtx_insn *insns, int nreg)
7afe21cc 6983{
4da896b2 6984 int *counts;
169d13f5 6985 rtx_insn *insn, *prev;
6699b754 6986 rtx *replacements = NULL;
65e9fa10 6987 int ndead = 0;
7afe21cc 6988
3dec4024 6989 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7afe21cc 6990 /* First count the number of times each register is used. */
6699b754
JJ
6991 if (MAY_HAVE_DEBUG_INSNS)
6992 {
6993 counts = XCNEWVEC (int, nreg * 3);
6994 for (insn = insns; insn; insn = NEXT_INSN (insn))
6995 if (DEBUG_INSN_P (insn))
6996 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6997 NULL_RTX, 1);
6998 else if (INSN_P (insn))
6999 {
7000 count_reg_usage (insn, counts, NULL_RTX, 1);
7001 note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
7002 }
7003 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7004 First one counts how many times each pseudo is used outside
7005 of debug insns, second counts how many times each pseudo is
7006 used in debug insns and third counts how many times a pseudo
7007 is stored. */
7008 }
7009 else
7010 {
7011 counts = XCNEWVEC (int, nreg);
7012 for (insn = insns; insn; insn = NEXT_INSN (insn))
7013 if (INSN_P (insn))
7014 count_reg_usage (insn, counts, NULL_RTX, 1);
7015 /* If no debug insns can be present, COUNTS is just an array
7016 which counts how many times each pseudo is used. */
7017 }
56873e13
ES
7018 /* Pseudo PIC register should be considered as used due to possible
7019 new usages generated. */
7020 if (!reload_completed
7021 && pic_offset_table_rtx
7022 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7023 counts[REGNO (pic_offset_table_rtx)]++;
65e9fa10
KH
7024 /* Go from the last insn to the first and delete insns that only set unused
7025 registers or copy a register to itself. As we delete an insn, remove
7026 usage counts for registers it uses.
0cedb36c 7027
65e9fa10
KH
7028 The first jump optimization pass may leave a real insn as the last
7029 insn in the function. We must not skip that insn or we may end
6699b754
JJ
7030 up deleting code that is not really dead.
7031
7032 If some otherwise unused register is only used in DEBUG_INSNs,
7033 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7034 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7035 has been created for the unused register, replace it with
7036 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
03ce14db 7037 for (insn = get_last_insn (); insn; insn = prev)
65e9fa10
KH
7038 {
7039 int live_insn = 0;
7afe21cc 7040
03ce14db
KH
7041 prev = PREV_INSN (insn);
7042 if (!INSN_P (insn))
7043 continue;
7afe21cc 7044
4a8cae83 7045 live_insn = insn_live_p (insn, counts);
7afe21cc 7046
65e9fa10
KH
7047 /* If this is a dead insn, delete it and show registers in it aren't
7048 being used. */
7afe21cc 7049
6fb5fa3c 7050 if (! live_insn && dbg_cnt (delete_trivial_dead))
65e9fa10 7051 {
6699b754
JJ
7052 if (DEBUG_INSN_P (insn))
7053 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7054 NULL_RTX, -1);
7055 else
7056 {
7057 rtx set;
7058 if (MAY_HAVE_DEBUG_INSNS
7059 && (set = single_set (insn)) != NULL_RTX
7060 && is_dead_reg (SET_DEST (set), counts)
7061 /* Used at least once in some DEBUG_INSN. */
7062 && counts[REGNO (SET_DEST (set)) + nreg] > 0
7063 /* And set exactly once. */
7064 && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7065 && !side_effects_p (SET_SRC (set))
7066 && asm_noperands (PATTERN (insn)) < 0)
7067 {
b2908ba6
DM
7068 rtx dval, bind_var_loc;
7069 rtx_insn *bind;
6699b754
JJ
7070
7071 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7072 dval = make_debug_expr_from_rtl (SET_DEST (set));
7073
7074 /* Emit a debug bind insn before the insn in which
7075 reg dies. */
b2908ba6
DM
7076 bind_var_loc =
7077 gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7078 DEBUG_EXPR_TREE_DECL (dval),
7079 SET_SRC (set),
7080 VAR_INIT_STATUS_INITIALIZED);
7081 count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7082
7083 bind = emit_debug_insn_before (bind_var_loc, insn);
6699b754
JJ
7084 df_insn_rescan (bind);
7085
7086 if (replacements == NULL)
7087 replacements = XCNEWVEC (rtx, nreg);
7088 replacements[REGNO (SET_DEST (set))] = dval;
7089 }
7090
7091 count_reg_usage (insn, counts, NULL_RTX, -1);
7092 ndead++;
7093 }
65e9fa10 7094 delete_insn_and_edges (insn);
65e9fa10 7095 }
68252e27 7096 }
4da896b2 7097
6699b754
JJ
7098 if (MAY_HAVE_DEBUG_INSNS)
7099 {
6699b754
JJ
7100 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7101 if (DEBUG_INSN_P (insn))
7102 {
7103 /* If this debug insn references a dead register that wasn't replaced
7104 with an DEBUG_EXPR, reset the DEBUG_INSN. */
a5b9bc17
RS
7105 bool seen_repl = false;
7106 if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7107 counts, replacements, &seen_repl))
6699b754
JJ
7108 {
7109 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7110 df_insn_rescan (insn);
7111 }
a5b9bc17 7112 else if (seen_repl)
6699b754
JJ
7113 {
7114 INSN_VAR_LOCATION_LOC (insn)
7115 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7116 NULL_RTX, replace_dead_reg,
7117 replacements);
7118 df_insn_rescan (insn);
7119 }
7120 }
04695783 7121 free (replacements);
6699b754
JJ
7122 }
7123
c263766c 7124 if (dump_file && ndead)
65e9fa10
KH
7125 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7126 ndead);
4da896b2
MM
7127 /* Clean up. */
7128 free (counts);
3dec4024
JH
7129 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7130 return ndead;
7afe21cc 7131}
e129d93a 7132
a9052a40
RS
7133/* If LOC contains references to NEWREG in a different mode, change them
7134 to use NEWREG instead. */
e129d93a 7135
a9052a40
RS
7136static void
7137cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7138 rtx *loc, rtx insn, rtx newreg)
e129d93a 7139{
a9052a40 7140 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
e129d93a 7141 {
a9052a40
RS
7142 rtx *loc = *iter;
7143 rtx x = *loc;
7144 if (x
7145 && REG_P (x)
7146 && REGNO (x) == REGNO (newreg)
7147 && GET_MODE (x) != GET_MODE (newreg))
7148 {
7149 validate_change (insn, loc, newreg, 1);
7150 iter.skip_subrtxes ();
7151 }
e129d93a 7152 }
e129d93a
ILT
7153}
7154
fc188d37
AK
7155/* Change the mode of any reference to the register REGNO (NEWREG) to
7156 GET_MODE (NEWREG) in INSN. */
7157
7158static void
20468884 7159cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
fc188d37 7160{
fc188d37
AK
7161 int success;
7162
7163 if (!INSN_P (insn))
7164 return;
7165
a9052a40
RS
7166 subrtx_ptr_iterator::array_type array;
7167 cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7168 cse_change_cc_mode (array, &REG_NOTES (insn), insn, newreg);
b8698a0f 7169
fc188d37
AK
7170 /* If the following assertion was triggered, there is most probably
7171 something wrong with the cc_modes_compatible back end function.
7172 CC modes only can be considered compatible if the insn - with the mode
7173 replaced by any of the compatible modes - can still be recognized. */
7174 success = apply_change_group ();
7175 gcc_assert (success);
7176}
7177
e129d93a
ILT
7178/* Change the mode of any reference to the register REGNO (NEWREG) to
7179 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
2e802a6f 7180 any instruction which modifies NEWREG. */
e129d93a
ILT
7181
7182static void
20468884 7183cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
e129d93a 7184{
20468884 7185 rtx_insn *insn;
e129d93a
ILT
7186
7187 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7188 {
7189 if (! INSN_P (insn))
7190 continue;
7191
2e802a6f 7192 if (reg_set_p (newreg, insn))
e129d93a
ILT
7193 return;
7194
fc188d37 7195 cse_change_cc_mode_insn (insn, newreg);
e129d93a
ILT
7196 }
7197}
7198
7199/* BB is a basic block which finishes with CC_REG as a condition code
7200 register which is set to CC_SRC. Look through the successors of BB
7201 to find blocks which have a single predecessor (i.e., this one),
7202 and look through those blocks for an assignment to CC_REG which is
7203 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7204 permitted to change the mode of CC_SRC to a compatible mode. This
7205 returns VOIDmode if no equivalent assignments were found.
7206 Otherwise it returns the mode which CC_SRC should wind up with.
31e9ebaf
JJ
7207 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7208 but is passed unmodified down to recursive calls in order to prevent
7209 endless recursion.
e129d93a
ILT
7210
7211 The main complexity in this function is handling the mode issues.
7212 We may have more than one duplicate which we can eliminate, and we
7213 try to find a mode which will work for multiple duplicates. */
7214
ef4bddc2 7215static machine_mode
31e9ebaf
JJ
7216cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7217 bool can_change_mode)
e129d93a
ILT
7218{
7219 bool found_equiv;
ef4bddc2 7220 machine_mode mode;
e129d93a
ILT
7221 unsigned int insn_count;
7222 edge e;
20468884 7223 rtx_insn *insns[2];
ef4bddc2 7224 machine_mode modes[2];
20468884 7225 rtx_insn *last_insns[2];
e129d93a
ILT
7226 unsigned int i;
7227 rtx newreg;
628f6a4e 7228 edge_iterator ei;
e129d93a
ILT
7229
7230 /* We expect to have two successors. Look at both before picking
7231 the final mode for the comparison. If we have more successors
7232 (i.e., some sort of table jump, although that seems unlikely),
7233 then we require all beyond the first two to use the same
7234 mode. */
7235
7236 found_equiv = false;
7237 mode = GET_MODE (cc_src);
7238 insn_count = 0;
628f6a4e 7239 FOR_EACH_EDGE (e, ei, bb->succs)
e129d93a 7240 {
20468884
DM
7241 rtx_insn *insn;
7242 rtx_insn *end;
e129d93a
ILT
7243
7244 if (e->flags & EDGE_COMPLEX)
7245 continue;
7246
628f6a4e 7247 if (EDGE_COUNT (e->dest->preds) != 1
fefa31b5 7248 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
31e9ebaf
JJ
7249 /* Avoid endless recursion on unreachable blocks. */
7250 || e->dest == orig_bb)
e129d93a
ILT
7251 continue;
7252
7253 end = NEXT_INSN (BB_END (e->dest));
7254 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7255 {
7256 rtx set;
7257
7258 if (! INSN_P (insn))
7259 continue;
7260
7261 /* If CC_SRC is modified, we have to stop looking for
7262 something which uses it. */
7263 if (modified_in_p (cc_src, insn))
7264 break;
7265
7266 /* Check whether INSN sets CC_REG to CC_SRC. */
7267 set = single_set (insn);
7268 if (set
f8cfc6aa 7269 && REG_P (SET_DEST (set))
e129d93a
ILT
7270 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7271 {
7272 bool found;
ef4bddc2
RS
7273 machine_mode set_mode;
7274 machine_mode comp_mode;
e129d93a
ILT
7275
7276 found = false;
7277 set_mode = GET_MODE (SET_SRC (set));
7278 comp_mode = set_mode;
7279 if (rtx_equal_p (cc_src, SET_SRC (set)))
7280 found = true;
7281 else if (GET_CODE (cc_src) == COMPARE
7282 && GET_CODE (SET_SRC (set)) == COMPARE
1f44254c 7283 && mode != set_mode
e129d93a
ILT
7284 && rtx_equal_p (XEXP (cc_src, 0),
7285 XEXP (SET_SRC (set), 0))
7286 && rtx_equal_p (XEXP (cc_src, 1),
7287 XEXP (SET_SRC (set), 1)))
b8698a0f 7288
e129d93a 7289 {
5fd9b178 7290 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
e129d93a
ILT
7291 if (comp_mode != VOIDmode
7292 && (can_change_mode || comp_mode == mode))
7293 found = true;
7294 }
7295
7296 if (found)
7297 {
7298 found_equiv = true;
1f44254c 7299 if (insn_count < ARRAY_SIZE (insns))
e129d93a
ILT
7300 {
7301 insns[insn_count] = insn;
7302 modes[insn_count] = set_mode;
7303 last_insns[insn_count] = end;
7304 ++insn_count;
7305
1f44254c
ILT
7306 if (mode != comp_mode)
7307 {
341c100f 7308 gcc_assert (can_change_mode);
1f44254c 7309 mode = comp_mode;
fc188d37
AK
7310
7311 /* The modified insn will be re-recognized later. */
1f44254c
ILT
7312 PUT_MODE (cc_src, mode);
7313 }
e129d93a
ILT
7314 }
7315 else
7316 {
7317 if (set_mode != mode)
1f44254c
ILT
7318 {
7319 /* We found a matching expression in the
7320 wrong mode, but we don't have room to
7321 store it in the array. Punt. This case
7322 should be rare. */
7323 break;
7324 }
e129d93a
ILT
7325 /* INSN sets CC_REG to a value equal to CC_SRC
7326 with the right mode. We can simply delete
7327 it. */
7328 delete_insn (insn);
7329 }
7330
7331 /* We found an instruction to delete. Keep looking,
7332 in the hopes of finding a three-way jump. */
7333 continue;
7334 }
7335
7336 /* We found an instruction which sets the condition
7337 code, so don't look any farther. */
7338 break;
7339 }
7340
7341 /* If INSN sets CC_REG in some other way, don't look any
7342 farther. */
7343 if (reg_set_p (cc_reg, insn))
7344 break;
7345 }
7346
7347 /* If we fell off the bottom of the block, we can keep looking
7348 through successors. We pass CAN_CHANGE_MODE as false because
7349 we aren't prepared to handle compatibility between the
7350 further blocks and this block. */
7351 if (insn == end)
7352 {
ef4bddc2 7353 machine_mode submode;
1f44254c 7354
31e9ebaf 7355 submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
1f44254c
ILT
7356 if (submode != VOIDmode)
7357 {
341c100f 7358 gcc_assert (submode == mode);
1f44254c
ILT
7359 found_equiv = true;
7360 can_change_mode = false;
7361 }
e129d93a
ILT
7362 }
7363 }
7364
7365 if (! found_equiv)
7366 return VOIDmode;
7367
7368 /* Now INSN_COUNT is the number of instructions we found which set
7369 CC_REG to a value equivalent to CC_SRC. The instructions are in
7370 INSNS. The modes used by those instructions are in MODES. */
7371
7372 newreg = NULL_RTX;
7373 for (i = 0; i < insn_count; ++i)
7374 {
7375 if (modes[i] != mode)
7376 {
7377 /* We need to change the mode of CC_REG in INSNS[i] and
7378 subsequent instructions. */
7379 if (! newreg)
7380 {
7381 if (GET_MODE (cc_reg) == mode)
7382 newreg = cc_reg;
7383 else
7384 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7385 }
7386 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7387 newreg);
7388 }
7389
0beb3d66 7390 delete_insn_and_edges (insns[i]);
e129d93a
ILT
7391 }
7392
7393 return mode;
7394}
7395
7396/* If we have a fixed condition code register (or two), walk through
7397 the instructions and try to eliminate duplicate assignments. */
7398
cab2264d 7399static void
e129d93a
ILT
7400cse_condition_code_reg (void)
7401{
7402 unsigned int cc_regno_1;
7403 unsigned int cc_regno_2;
7404 rtx cc_reg_1;
7405 rtx cc_reg_2;
7406 basic_block bb;
7407
5fd9b178 7408 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
e129d93a
ILT
7409 return;
7410
7411 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7412 if (cc_regno_2 != INVALID_REGNUM)
7413 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7414 else
7415 cc_reg_2 = NULL_RTX;
7416
11cd3bed 7417 FOR_EACH_BB_FN (bb, cfun)
e129d93a 7418 {
20468884 7419 rtx_insn *last_insn;
e129d93a 7420 rtx cc_reg;
20468884
DM
7421 rtx_insn *insn;
7422 rtx_insn *cc_src_insn;
e129d93a 7423 rtx cc_src;
ef4bddc2
RS
7424 machine_mode mode;
7425 machine_mode orig_mode;
e129d93a
ILT
7426
7427 /* Look for blocks which end with a conditional jump based on a
7428 condition code register. Then look for the instruction which
7429 sets the condition code register. Then look through the
7430 successor blocks for instructions which set the condition
7431 code register to the same value. There are other possible
7432 uses of the condition code register, but these are by far the
7433 most common and the ones which we are most likely to be able
7434 to optimize. */
7435
7436 last_insn = BB_END (bb);
4b4bf941 7437 if (!JUMP_P (last_insn))
e129d93a
ILT
7438 continue;
7439
7440 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7441 cc_reg = cc_reg_1;
7442 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7443 cc_reg = cc_reg_2;
7444 else
7445 continue;
7446
20468884 7447 cc_src_insn = NULL;
e129d93a
ILT
7448 cc_src = NULL_RTX;
7449 for (insn = PREV_INSN (last_insn);
7450 insn && insn != PREV_INSN (BB_HEAD (bb));
7451 insn = PREV_INSN (insn))
7452 {
7453 rtx set;
7454
7455 if (! INSN_P (insn))
7456 continue;
7457 set = single_set (insn);
7458 if (set
f8cfc6aa 7459 && REG_P (SET_DEST (set))
e129d93a
ILT
7460 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7461 {
7462 cc_src_insn = insn;
7463 cc_src = SET_SRC (set);
7464 break;
7465 }
7466 else if (reg_set_p (cc_reg, insn))
7467 break;
7468 }
7469
7470 if (! cc_src_insn)
7471 continue;
7472
7473 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7474 continue;
7475
7476 /* Now CC_REG is a condition code register used for a
7477 conditional jump at the end of the block, and CC_SRC, in
7478 CC_SRC_INSN, is the value to which that condition code
7479 register is set, and CC_SRC is still meaningful at the end of
7480 the basic block. */
7481
1f44254c 7482 orig_mode = GET_MODE (cc_src);
31e9ebaf 7483 mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
1f44254c 7484 if (mode != VOIDmode)
e129d93a 7485 {
341c100f 7486 gcc_assert (mode == GET_MODE (cc_src));
1f44254c 7487 if (mode != orig_mode)
2e802a6f
KH
7488 {
7489 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7490
fc188d37 7491 cse_change_cc_mode_insn (cc_src_insn, newreg);
2e802a6f
KH
7492
7493 /* Do the same in the following insns that use the
7494 current value of CC_REG within BB. */
7495 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7496 NEXT_INSN (last_insn),
7497 newreg);
7498 }
e129d93a
ILT
7499 }
7500 }
7501}
ef330312
PB
7502\f
7503
7504/* Perform common subexpression elimination. Nonzero value from
7505 `cse_main' means that jumps were simplified and some code may now
7506 be unreachable, so do jump optimization again. */
c2924966 7507static unsigned int
ef330312
PB
7508rest_of_handle_cse (void)
7509{
7510 int tem;
6fb5fa3c 7511
ef330312 7512 if (dump_file)
5b4fdb20 7513 dump_flow_info (dump_file, dump_flags);
ef330312 7514
10d22567 7515 tem = cse_main (get_insns (), max_reg_num ());
ef330312
PB
7516
7517 /* If we are not running more CSE passes, then we are no longer
7518 expecting CSE to be run. But always rerun it in a cheap mode. */
7519 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7520
2aac3a01
EB
7521 if (tem == 2)
7522 {
7523 timevar_push (TV_JUMP);
7524 rebuild_jump_labels (get_insns ());
7d776ee2 7525 cleanup_cfg (CLEANUP_CFG_CHANGED);
2aac3a01
EB
7526 timevar_pop (TV_JUMP);
7527 }
7528 else if (tem == 1 || optimize > 1)
6fb5fa3c 7529 cleanup_cfg (0);
932ad4d9 7530
c2924966 7531 return 0;
ef330312
PB
7532}
7533
27a4cd48
DM
7534namespace {
7535
7536const pass_data pass_data_cse =
ef330312 7537{
27a4cd48
DM
7538 RTL_PASS, /* type */
7539 "cse1", /* name */
7540 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
7541 TV_CSE, /* tv_id */
7542 0, /* properties_required */
7543 0, /* properties_provided */
7544 0, /* properties_destroyed */
7545 0, /* todo_flags_start */
3bea341f 7546 TODO_df_finish, /* todo_flags_finish */
ef330312
PB
7547};
7548
27a4cd48
DM
7549class pass_cse : public rtl_opt_pass
7550{
7551public:
c3284718
RS
7552 pass_cse (gcc::context *ctxt)
7553 : rtl_opt_pass (pass_data_cse, ctxt)
27a4cd48
DM
7554 {}
7555
7556 /* opt_pass methods: */
1a3d085c 7557 virtual bool gate (function *) { return optimize > 0; }
be55bfe6 7558 virtual unsigned int execute (function *) { return rest_of_handle_cse (); }
27a4cd48
DM
7559
7560}; // class pass_cse
7561
7562} // anon namespace
7563
7564rtl_opt_pass *
7565make_pass_cse (gcc::context *ctxt)
7566{
7567 return new pass_cse (ctxt);
7568}
7569
ef330312 7570
ef330312 7571/* Run second CSE pass after loop optimizations. */
c2924966 7572static unsigned int
ef330312
PB
7573rest_of_handle_cse2 (void)
7574{
7575 int tem;
7576
7577 if (dump_file)
5b4fdb20 7578 dump_flow_info (dump_file, dump_flags);
ef330312 7579
10d22567 7580 tem = cse_main (get_insns (), max_reg_num ());
ef330312
PB
7581
7582 /* Run a pass to eliminate duplicated assignments to condition code
7583 registers. We have to run this after bypass_jumps, because it
7584 makes it harder for that pass to determine whether a jump can be
7585 bypassed safely. */
7586 cse_condition_code_reg ();
7587
ef330312
PB
7588 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7589
2aac3a01 7590 if (tem == 2)
ef330312
PB
7591 {
7592 timevar_push (TV_JUMP);
7593 rebuild_jump_labels (get_insns ());
7d776ee2 7594 cleanup_cfg (CLEANUP_CFG_CHANGED);
ef330312
PB
7595 timevar_pop (TV_JUMP);
7596 }
2aac3a01
EB
7597 else if (tem == 1)
7598 cleanup_cfg (0);
7599
ef330312 7600 cse_not_expected = 1;
c2924966 7601 return 0;
ef330312
PB
7602}
7603
7604
27a4cd48
DM
7605namespace {
7606
7607const pass_data pass_data_cse2 =
ef330312 7608{
27a4cd48
DM
7609 RTL_PASS, /* type */
7610 "cse2", /* name */
7611 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
7612 TV_CSE2, /* tv_id */
7613 0, /* properties_required */
7614 0, /* properties_provided */
7615 0, /* properties_destroyed */
7616 0, /* todo_flags_start */
3bea341f 7617 TODO_df_finish, /* todo_flags_finish */
ef330312 7618};
5f39ad47 7619
27a4cd48
DM
7620class pass_cse2 : public rtl_opt_pass
7621{
7622public:
c3284718
RS
7623 pass_cse2 (gcc::context *ctxt)
7624 : rtl_opt_pass (pass_data_cse2, ctxt)
27a4cd48
DM
7625 {}
7626
7627 /* opt_pass methods: */
1a3d085c
TS
7628 virtual bool gate (function *)
7629 {
7630 return optimize > 0 && flag_rerun_cse_after_loop;
7631 }
7632
be55bfe6 7633 virtual unsigned int execute (function *) { return rest_of_handle_cse2 (); }
27a4cd48
DM
7634
7635}; // class pass_cse2
7636
7637} // anon namespace
7638
7639rtl_opt_pass *
7640make_pass_cse2 (gcc::context *ctxt)
7641{
7642 return new pass_cse2 (ctxt);
7643}
7644
5f39ad47
SB
7645/* Run second CSE pass after loop optimizations. */
7646static unsigned int
7647rest_of_handle_cse_after_global_opts (void)
7648{
7649 int save_cfj;
7650 int tem;
7651
7652 /* We only want to do local CSE, so don't follow jumps. */
7653 save_cfj = flag_cse_follow_jumps;
7654 flag_cse_follow_jumps = 0;
7655
7656 rebuild_jump_labels (get_insns ());
7657 tem = cse_main (get_insns (), max_reg_num ());
7658 purge_all_dead_edges ();
7659 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7660
7661 cse_not_expected = !flag_rerun_cse_after_loop;
7662
7663 /* If cse altered any jumps, rerun jump opts to clean things up. */
7664 if (tem == 2)
7665 {
7666 timevar_push (TV_JUMP);
7667 rebuild_jump_labels (get_insns ());
7d776ee2 7668 cleanup_cfg (CLEANUP_CFG_CHANGED);
5f39ad47
SB
7669 timevar_pop (TV_JUMP);
7670 }
7671 else if (tem == 1)
7672 cleanup_cfg (0);
7673
7674 flag_cse_follow_jumps = save_cfj;
7675 return 0;
7676}
7677
27a4cd48
DM
7678namespace {
7679
7680const pass_data pass_data_cse_after_global_opts =
5f39ad47 7681{
27a4cd48
DM
7682 RTL_PASS, /* type */
7683 "cse_local", /* name */
7684 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
7685 TV_CSE, /* tv_id */
7686 0, /* properties_required */
7687 0, /* properties_provided */
7688 0, /* properties_destroyed */
7689 0, /* todo_flags_start */
3bea341f 7690 TODO_df_finish, /* todo_flags_finish */
5f39ad47 7691};
27a4cd48
DM
7692
7693class pass_cse_after_global_opts : public rtl_opt_pass
7694{
7695public:
c3284718
RS
7696 pass_cse_after_global_opts (gcc::context *ctxt)
7697 : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
27a4cd48
DM
7698 {}
7699
7700 /* opt_pass methods: */
1a3d085c
TS
7701 virtual bool gate (function *)
7702 {
7703 return optimize > 0 && flag_rerun_cse_after_global_opts;
7704 }
7705
be55bfe6
TS
7706 virtual unsigned int execute (function *)
7707 {
7708 return rest_of_handle_cse_after_global_opts ();
7709 }
27a4cd48
DM
7710
7711}; // class pass_cse_after_global_opts
7712
7713} // anon namespace
7714
7715rtl_opt_pass *
7716make_pass_cse_after_global_opts (gcc::context *ctxt)
7717{
7718 return new pass_cse_after_global_opts (ctxt);
7719}