]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
Daily bump.
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
747215f1 2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
7afe21cc
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
7afe21cc
RK
20
21
22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
50b2596f 25#include <setjmp.h>
9c3b4c8b 26
7afe21cc
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
956d6950 34#include "expr.h"
50b2596f
KG
35#include "toplev.h"
36#include "output.h"
7afe21cc
RK
37
38/* The basic idea of common subexpression elimination is to go
39 through the code, keeping a record of expressions that would
40 have the same value at the current scan point, and replacing
41 expressions encountered with the cheapest equivalent expression.
42
43 It is too complicated to keep track of the different possibilities
44 when control paths merge; so, at each label, we forget all that is
45 known and start fresh. This can be described as processing each
46 basic block separately. Note, however, that these are not quite
47 the same as the basic blocks found by a later pass and used for
48 data flow analysis and register packing. We do not need to start fresh
49 after a conditional jump instruction if there is no label there.
50
51 We use two data structures to record the equivalent expressions:
52 a hash table for most expressions, and several vectors together
53 with "quantity numbers" to record equivalent (pseudo) registers.
54
55 The use of the special data structure for registers is desirable
56 because it is faster. It is possible because registers references
57 contain a fairly small number, the register number, taken from
58 a contiguously allocated series, and two register references are
59 identical if they have the same number. General expressions
60 do not have any such thing, so the only way to retrieve the
61 information recorded on an expression other than a register
62 is to keep it in a hash table.
63
64Registers and "quantity numbers":
65
66 At the start of each basic block, all of the (hardware and pseudo)
67 registers used in the function are given distinct quantity
68 numbers to indicate their contents. During scan, when the code
69 copies one register into another, we copy the quantity number.
70 When a register is loaded in any other way, we allocate a new
71 quantity number to describe the value generated by this operation.
72 `reg_qty' records what quantity a register is currently thought
73 of as containing.
74
75 All real quantity numbers are greater than or equal to `max_reg'.
76 If register N has not been assigned a quantity, reg_qty[N] will equal N.
77
78 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
79 variables should be referenced with an index below `max_reg'.
80
81 We also maintain a bidirectional chain of registers for each
82 quantity number. `qty_first_reg', `qty_last_reg',
83 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
84
85 The first register in a chain is the one whose lifespan is least local.
86 Among equals, it is the one that was seen first.
87 We replace any equivalent register with that one.
88
89 If two registers have the same quantity number, it must be true that
90 REG expressions with `qty_mode' must be in the hash table for both
91 registers and must be in the same class.
92
93 The converse is not true. Since hard registers may be referenced in
94 any mode, two REG expressions might be equivalent in the hash table
95 but not have the same quantity number if the quantity number of one
96 of the registers is not the same mode as those expressions.
97
98Constants and quantity numbers
99
100 When a quantity has a known constant value, that value is stored
101 in the appropriate element of qty_const. This is in addition to
102 putting the constant in the hash table as is usual for non-regs.
103
d45cf215 104 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
105 macro CONST_COSTS and will often depend on the constant value. In any
106 event, expressions containing constants can be simplified, by fold_rtx.
107
108 When a quantity has a known nearly constant value (such as an address
109 of a stack slot), that value is stored in the appropriate element
110 of qty_const.
111
112 Integer constants don't have a machine mode. However, cse
113 determines the intended machine mode from the destination
114 of the instruction that moves the constant. The machine mode
115 is recorded in the hash table along with the actual RTL
116 constant expression so that different modes are kept separate.
117
118Other expressions:
119
120 To record known equivalences among expressions in general
121 we use a hash table called `table'. It has a fixed number of buckets
122 that contain chains of `struct table_elt' elements for expressions.
123 These chains connect the elements whose expressions have the same
124 hash codes.
125
126 Other chains through the same elements connect the elements which
127 currently have equivalent values.
128
129 Register references in an expression are canonicalized before hashing
130 the expression. This is done using `reg_qty' and `qty_first_reg'.
131 The hash code of a register reference is computed using the quantity
132 number, not the register number.
133
134 When the value of an expression changes, it is necessary to remove from the
135 hash table not just that expression but all expressions whose values
136 could be different as a result.
137
138 1. If the value changing is in memory, except in special cases
139 ANYTHING referring to memory could be changed. That is because
140 nobody knows where a pointer does not point.
141 The function `invalidate_memory' removes what is necessary.
142
143 The special cases are when the address is constant or is
144 a constant plus a fixed register such as the frame pointer
145 or a static chain pointer. When such addresses are stored in,
146 we can tell exactly which other such addresses must be invalidated
147 due to overlap. `invalidate' does this.
148 All expressions that refer to non-constant
149 memory addresses are also invalidated. `invalidate_memory' does this.
150
151 2. If the value changing is a register, all expressions
152 containing references to that register, and only those,
153 must be removed.
154
155 Because searching the entire hash table for expressions that contain
156 a register is very slow, we try to figure out when it isn't necessary.
157 Precisely, this is necessary only when expressions have been
158 entered in the hash table using this register, and then the value has
159 changed, and then another expression wants to be added to refer to
160 the register's new value. This sequence of circumstances is rare
161 within any one basic block.
162
163 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
164 reg_tick[i] is incremented whenever a value is stored in register i.
165 reg_in_table[i] holds -1 if no references to register i have been
166 entered in the table; otherwise, it contains the value reg_tick[i] had
167 when the references were entered. If we want to enter a reference
168 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
169 Until we want to enter a new entry, the mere fact that the two vectors
170 don't match makes the entries be ignored if anyone tries to match them.
171
172 Registers themselves are entered in the hash table as well as in
173 the equivalent-register chains. However, the vectors `reg_tick'
174 and `reg_in_table' do not apply to expressions which are simple
175 register references. These expressions are removed from the table
176 immediately when they become invalid, and this can be done even if
177 we do not immediately search for all the expressions that refer to
178 the register.
179
180 A CLOBBER rtx in an instruction invalidates its operand for further
181 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
182 invalidates everything that resides in memory.
183
184Related expressions:
185
186 Constant expressions that differ only by an additive integer
187 are called related. When a constant expression is put in
188 the table, the related expression with no constant term
189 is also entered. These are made to point at each other
190 so that it is possible to find out if there exists any
191 register equivalent to an expression related to a given expression. */
192
193/* One plus largest register number used in this function. */
194
195static int max_reg;
196
556c714b
JW
197/* One plus largest instruction UID used in this function at time of
198 cse_main call. */
199
200static int max_insn_uid;
201
7afe21cc
RK
202/* Length of vectors indexed by quantity number.
203 We know in advance we will not need a quantity number this big. */
204
205static int max_qty;
206
207/* Next quantity number to be allocated.
208 This is 1 + the largest number needed so far. */
209
210static int next_qty;
211
71d306d1 212/* Indexed by quantity number, gives the first (or last) register
7afe21cc
RK
213 in the chain of registers that currently contain this quantity. */
214
215static int *qty_first_reg;
216static int *qty_last_reg;
217
218/* Index by quantity number, gives the mode of the quantity. */
219
220static enum machine_mode *qty_mode;
221
222/* Indexed by quantity number, gives the rtx of the constant value of the
223 quantity, or zero if it does not have a known value.
224 A sum of the frame pointer (or arg pointer) plus a constant
225 can also be entered here. */
226
227static rtx *qty_const;
228
229/* Indexed by qty number, gives the insn that stored the constant value
230 recorded in `qty_const'. */
231
232static rtx *qty_const_insn;
233
234/* The next three variables are used to track when a comparison between a
235 quantity and some constant or register has been passed. In that case, we
236 know the results of the comparison in case we see it again. These variables
237 record a comparison that is known to be true. */
238
239/* Indexed by qty number, gives the rtx code of a comparison with a known
240 result involving this quantity. If none, it is UNKNOWN. */
241static enum rtx_code *qty_comparison_code;
242
243/* Indexed by qty number, gives the constant being compared against in a
244 comparison of known result. If no such comparison, it is undefined.
245 If the comparison is not with a constant, it is zero. */
246
247static rtx *qty_comparison_const;
248
249/* Indexed by qty number, gives the quantity being compared against in a
250 comparison of known result. If no such comparison, if it undefined.
251 If the comparison is not with a register, it is -1. */
252
253static int *qty_comparison_qty;
254
255#ifdef HAVE_cc0
256/* For machines that have a CC0, we do not record its value in the hash
257 table since its use is guaranteed to be the insn immediately following
258 its definition and any other insn is presumed to invalidate it.
259
260 Instead, we store below the value last assigned to CC0. If it should
261 happen to be a constant, it is stored in preference to the actual
262 assigned value. In case it is a constant, we store the mode in which
263 the constant should be interpreted. */
264
265static rtx prev_insn_cc0;
266static enum machine_mode prev_insn_cc0_mode;
267#endif
268
269/* Previous actual insn. 0 if at first insn of basic block. */
270
271static rtx prev_insn;
272
273/* Insn being scanned. */
274
275static rtx this_insn;
276
71d306d1 277/* Index by register number, gives the quantity number
7afe21cc
RK
278 of the register's current contents. */
279
280static int *reg_qty;
281
71d306d1
DE
282/* Index by register number, gives the number of the next (or
283 previous) register in the chain of registers sharing the same
7afe21cc
RK
284 value.
285
286 Or -1 if this register is at the end of the chain.
287
288 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
289
290static int *reg_next_eqv;
291static int *reg_prev_eqv;
292
71d306d1 293/* Index by register number, gives the number of times
7afe21cc
RK
294 that register has been altered in the current basic block. */
295
296static int *reg_tick;
297
71d306d1 298/* Index by register number, gives the reg_tick value at which
7afe21cc
RK
299 rtx's containing this register are valid in the hash table.
300 If this does not equal the current reg_tick value, such expressions
301 existing in the hash table are invalid.
302 If this is -1, no expressions containing this register have been
303 entered in the table. */
304
305static int *reg_in_table;
306
307/* A HARD_REG_SET containing all the hard registers for which there is
308 currently a REG expression in the hash table. Note the difference
309 from the above variables, which indicate if the REG is mentioned in some
310 expression in the table. */
311
312static HARD_REG_SET hard_regs_in_table;
313
314/* A HARD_REG_SET containing all the hard registers that are invalidated
315 by a CALL_INSN. */
316
317static HARD_REG_SET regs_invalidated_by_call;
318
319/* Two vectors of ints:
320 one containing max_reg -1's; the other max_reg + 500 (an approximation
321 for max_qty) elements where element i contains i.
322 These are used to initialize various other vectors fast. */
323
324static int *all_minus_one;
325static int *consec_ints;
326
327/* CUID of insn that starts the basic block currently being cse-processed. */
328
329static int cse_basic_block_start;
330
331/* CUID of insn that ends the basic block currently being cse-processed. */
332
333static int cse_basic_block_end;
334
335/* Vector mapping INSN_UIDs to cuids.
d45cf215 336 The cuids are like uids but increase monotonically always.
7afe21cc
RK
337 We use them to see whether a reg is used outside a given basic block. */
338
906c4e36 339static int *uid_cuid;
7afe21cc 340
164c8956
RK
341/* Highest UID in UID_CUID. */
342static int max_uid;
343
7afe21cc
RK
344/* Get the cuid of an insn. */
345
346#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
347
348/* Nonzero if cse has altered conditional jump insns
349 in such a way that jump optimization should be redone. */
350
351static int cse_jumps_altered;
352
a5dfb4ee
RK
353/* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
354 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
355 to put in the note. */
356static int recorded_label_ref;
357
7afe21cc
RK
358/* canon_hash stores 1 in do_not_record
359 if it notices a reference to CC0, PC, or some other volatile
360 subexpression. */
361
362static int do_not_record;
363
7bac1be0
RK
364#ifdef LOAD_EXTEND_OP
365
366/* Scratch rtl used when looking for load-extended copy of a MEM. */
367static rtx memory_extend_rtx;
368#endif
369
7afe21cc
RK
370/* canon_hash stores 1 in hash_arg_in_memory
371 if it notices a reference to memory within the expression being hashed. */
372
373static int hash_arg_in_memory;
374
375/* canon_hash stores 1 in hash_arg_in_struct
376 if it notices a reference to memory that's part of a structure. */
377
378static int hash_arg_in_struct;
379
380/* The hash table contains buckets which are chains of `struct table_elt's,
381 each recording one expression's information.
382 That expression is in the `exp' field.
383
384 Those elements with the same hash code are chained in both directions
385 through the `next_same_hash' and `prev_same_hash' fields.
386
387 Each set of expressions with equivalent values
388 are on a two-way chain through the `next_same_value'
389 and `prev_same_value' fields, and all point with
390 the `first_same_value' field at the first element in
391 that chain. The chain is in order of increasing cost.
392 Each element's cost value is in its `cost' field.
393
394 The `in_memory' field is nonzero for elements that
395 involve any reference to memory. These elements are removed
396 whenever a write is done to an unidentified location in memory.
397 To be safe, we assume that a memory address is unidentified unless
398 the address is either a symbol constant or a constant plus
399 the frame pointer or argument pointer.
400
401 The `in_struct' field is nonzero for elements that
402 involve any reference to memory inside a structure or array.
403
404 The `related_value' field is used to connect related expressions
405 (that differ by adding an integer).
406 The related expressions are chained in a circular fashion.
407 `related_value' is zero for expressions for which this
408 chain is not useful.
409
410 The `cost' field stores the cost of this element's expression.
411
412 The `is_const' flag is set if the element is a constant (including
413 a fixed address).
414
415 The `flag' field is used as a temporary during some search routines.
416
417 The `mode' field is usually the same as GET_MODE (`exp'), but
418 if `exp' is a CONST_INT and has no machine mode then the `mode'
419 field is the mode it was being used as. Each constant is
420 recorded separately for each mode it is used with. */
421
422
423struct table_elt
424{
425 rtx exp;
426 struct table_elt *next_same_hash;
427 struct table_elt *prev_same_hash;
428 struct table_elt *next_same_value;
429 struct table_elt *prev_same_value;
430 struct table_elt *first_same_value;
431 struct table_elt *related_value;
432 int cost;
433 enum machine_mode mode;
434 char in_memory;
435 char in_struct;
436 char is_const;
437 char flag;
438};
439
7afe21cc
RK
440/* We don't want a lot of buckets, because we rarely have very many
441 things stored in the hash table, and a lot of buckets slows
442 down a lot of loops that happen frequently. */
443#define NBUCKETS 31
444
445/* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
447
448#define HASH(X, M) \
449 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
2197a88a 450 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
7afe21cc
RK
451 : canon_hash (X, M) % NBUCKETS)
452
453/* Determine whether register number N is considered a fixed register for CSE.
454 It is desirable to replace other regs with fixed regs, to reduce need for
455 non-fixed hard regs.
456 A reg wins if it is either the frame pointer or designated as fixed,
457 but not if it is an overlapping register. */
458#ifdef OVERLAPPING_REGNO_P
459#define FIXED_REGNO_P(N) \
8bc169f2 460 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 461 || fixed_regs[N] || global_regs[N]) \
7afe21cc
RK
462 && ! OVERLAPPING_REGNO_P ((N)))
463#else
464#define FIXED_REGNO_P(N) \
8bc169f2 465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 466 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
467#endif
468
469/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
470 hard registers and pointers into the frame are the cheapest with a cost
471 of 0. Next come pseudos with a cost of one and other hard registers with
472 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473
6ab832bc 474#define CHEAP_REGNO(N) \
8bc169f2
DE
475 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
476 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
477 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
478 || ((N) < FIRST_PSEUDO_REGISTER \
e7bb59fa 479 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 480
6ab832bc
RK
481/* A register is cheap if it is a user variable assigned to the register
482 or if its register number always corresponds to a cheap register. */
483
484#define CHEAP_REG(N) \
485 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
486 || CHEAP_REGNO (REGNO (N)))
487
38734e55
ILT
488#define COST(X) \
489 (GET_CODE (X) == REG \
490 ? (CHEAP_REG (X) ? 0 \
491 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
492 : 2) \
954a5693 493 : notreg_cost(X))
7afe21cc
RK
494
495/* Determine if the quantity number for register X represents a valid index
496 into the `qty_...' variables. */
497
498#define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
499
2f541799
MM
500#ifdef ADDRESS_COST
501/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
502 during CSE, such nodes are present. Using an ADDRESSOF node which
503 refers to the address of a REG is a good thing because we can then
504 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
505#define CSE_ADDRESS_COST(RTX) \
506 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
507 ? -1 : ADDRESS_COST(RTX))
508#endif
509
7afe21cc
RK
510static struct table_elt *table[NBUCKETS];
511
512/* Chain of `struct table_elt's made so far for this function
513 but currently removed from the table. */
514
515static struct table_elt *free_element_chain;
516
517/* Number of `struct table_elt' structures made so far for this function. */
518
519static int n_elements_made;
520
521/* Maximum value `n_elements_made' has had so far in this compilation
522 for functions previously processed. */
523
524static int max_elements_made;
525
526/* Surviving equivalence class when two equivalence classes are merged
527 by recording the effects of a jump in the last insn. Zero if the
528 last insn was not a conditional jump. */
529
530static struct table_elt *last_jump_equiv_class;
531
532/* Set to the cost of a constant pool reference if one was found for a
533 symbolic constant. If this was found, it means we should try to
534 convert constants into constant pool entries if they don't fit in
535 the insn. */
536
537static int constant_pool_entries_cost;
538
6cd4575e
RK
539/* Define maximum length of a branch path. */
540
541#define PATHLENGTH 10
542
543/* This data describes a block that will be processed by cse_basic_block. */
544
545struct cse_basic_block_data {
546 /* Lowest CUID value of insns in block. */
547 int low_cuid;
548 /* Highest CUID value of insns in block. */
549 int high_cuid;
550 /* Total number of SETs in block. */
551 int nsets;
552 /* Last insn in the block. */
553 rtx last;
554 /* Size of current branch path, if any. */
555 int path_size;
556 /* Current branch path, indicating which branches will be taken. */
557 struct branch_path {
0f41302f 558 /* The branch insn. */
6cd4575e
RK
559 rtx branch;
560 /* Whether it should be taken or not. AROUND is the same as taken
561 except that it is used when the destination label is not preceded
562 by a BARRIER. */
563 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
564 } path[PATHLENGTH];
565};
566
7afe21cc
RK
567/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
568 virtual regs here because the simplify_*_operation routines are called
569 by integrate.c, which is called before virtual register instantiation. */
570
571#define FIXED_BASE_PLUS_P(X) \
8bc169f2
DE
572 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
573 || (X) == arg_pointer_rtx \
7afe21cc
RK
574 || (X) == virtual_stack_vars_rtx \
575 || (X) == virtual_incoming_args_rtx \
576 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 578 || XEXP (X, 0) == hard_frame_pointer_rtx \
7afe21cc
RK
579 || XEXP (X, 0) == arg_pointer_rtx \
580 || XEXP (X, 0) == virtual_stack_vars_rtx \
e9a25f70
JL
581 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
582 || GET_CODE (X) == ADDRESSOF)
7afe21cc 583
6f90e075
JW
584/* Similar, but also allows reference to the stack pointer.
585
586 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
587 arg_pointer_rtx by itself is nonzero, because on at least one machine,
588 the i960, the arg pointer is zero when it is unused. */
7afe21cc
RK
589
590#define NONZERO_BASE_PLUS_P(X) \
8bc169f2 591 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
6f90e075
JW
592 || (X) == virtual_stack_vars_rtx \
593 || (X) == virtual_incoming_args_rtx \
594 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
595 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 596 || XEXP (X, 0) == hard_frame_pointer_rtx \
6f90e075
JW
597 || XEXP (X, 0) == arg_pointer_rtx \
598 || XEXP (X, 0) == virtual_stack_vars_rtx \
599 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
7afe21cc
RK
600 || (X) == stack_pointer_rtx \
601 || (X) == virtual_stack_dynamic_rtx \
602 || (X) == virtual_outgoing_args_rtx \
603 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
604 && (XEXP (X, 0) == stack_pointer_rtx \
605 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
e9a25f70
JL
606 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
607 || GET_CODE (X) == ADDRESSOF)
7afe21cc 608
954a5693 609static int notreg_cost PROTO((rtx));
6cd4575e
RK
610static void new_basic_block PROTO((void));
611static void make_new_qty PROTO((int));
612static void make_regs_eqv PROTO((int, int));
613static void delete_reg_equiv PROTO((int));
614static int mention_regs PROTO((rtx));
615static int insert_regs PROTO((rtx, struct table_elt *, int));
616static void free_element PROTO((struct table_elt *));
2197a88a 617static void remove_from_table PROTO((struct table_elt *, unsigned));
6cd4575e 618static struct table_elt *get_element PROTO((void));
2197a88a
RK
619static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
620 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
6cd4575e 621static rtx lookup_as_function PROTO((rtx, enum rtx_code));
2197a88a 622static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
6cd4575e
RK
623 enum machine_mode));
624static void merge_equiv_classes PROTO((struct table_elt *,
625 struct table_elt *));
68c1e173 626static void invalidate PROTO((rtx, enum machine_mode));
9ae8ffe7 627static int cse_rtx_varies_p PROTO((rtx));
6cd4575e 628static void remove_invalid_refs PROTO((int));
34c73909 629static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
6cd4575e 630static void rehash_using_reg PROTO((rtx));
9ae8ffe7 631static void invalidate_memory PROTO((void));
6cd4575e
RK
632static void invalidate_for_call PROTO((void));
633static rtx use_related_value PROTO((rtx, struct table_elt *));
2197a88a
RK
634static unsigned canon_hash PROTO((rtx, enum machine_mode));
635static unsigned safe_hash PROTO((rtx, enum machine_mode));
6cd4575e 636static int exp_equiv_p PROTO((rtx, rtx, int, int));
f451db89 637static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
6500fb43
RK
638 HOST_WIDE_INT *,
639 HOST_WIDE_INT *));
6cd4575e 640static int refers_to_p PROTO((rtx, rtx));
6cd4575e
RK
641static rtx canon_reg PROTO((rtx, rtx));
642static void find_best_addr PROTO((rtx, rtx *));
643static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
644 enum machine_mode *,
645 enum machine_mode *));
96b0e481
RK
646static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
647 rtx, rtx));
648static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
649 rtx, rtx));
6cd4575e
RK
650static rtx fold_rtx PROTO((rtx, rtx));
651static rtx equiv_constant PROTO((rtx));
652static void record_jump_equiv PROTO((rtx, int));
653static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
654 rtx, rtx, int));
7bd8b2a8 655static void cse_insn PROTO((rtx, rtx));
9ae8ffe7
JL
656static int note_mem_written PROTO((rtx));
657static void invalidate_from_clobbers PROTO((rtx));
6cd4575e
RK
658static rtx cse_process_notes PROTO((rtx, rtx));
659static void cse_around_loop PROTO((rtx));
660static void invalidate_skipped_set PROTO((rtx, rtx));
661static void invalidate_skipped_block PROTO((rtx));
662static void cse_check_loop_start PROTO((rtx, rtx));
663static void cse_set_around_loop PROTO((rtx, rtx, rtx));
664static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
79644f06 665static void count_reg_usage PROTO((rtx, int *, rtx, int));
c407b802
RK
666
667extern int rtx_equal_function_value_matters;
7afe21cc
RK
668\f
669/* Return an estimate of the cost of computing rtx X.
670 One use is in cse, to decide which expression to keep in the hash table.
671 Another is in rtl generation, to pick the cheapest way to multiply.
672 Other uses like the latter are expected in the future. */
673
954a5693
RK
674/* Internal function, to compute cost when X is not a register; called
675 from COST macro to keep it simple. */
676
677static int
678notreg_cost (x)
679 rtx x;
680{
681 return ((GET_CODE (x) == SUBREG
682 && GET_CODE (SUBREG_REG (x)) == REG
683 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
684 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
685 && (GET_MODE_SIZE (GET_MODE (x))
686 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
687 && subreg_lowpart_p (x)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
689 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
690 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
691 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
692 : 2))
693 : rtx_cost (x, SET) * 2);
694}
695
7afe21cc
RK
696/* Return the right cost to give to an operation
697 to make the cost of the corresponding register-to-register instruction
698 N times that of a fast register-to-register instruction. */
699
700#define COSTS_N_INSNS(N) ((N) * 4 - 2)
701
702int
e5f6a288 703rtx_cost (x, outer_code)
7afe21cc 704 rtx x;
79c9824e 705 enum rtx_code outer_code ATTRIBUTE_UNUSED;
7afe21cc
RK
706{
707 register int i, j;
708 register enum rtx_code code;
709 register char *fmt;
710 register int total;
711
712 if (x == 0)
713 return 0;
714
715 /* Compute the default costs of certain things.
716 Note that RTX_COSTS can override the defaults. */
717
718 code = GET_CODE (x);
719 switch (code)
720 {
721 case MULT:
722 /* Count multiplication by 2**n as a shift,
723 because if we are considering it, we would output it as a shift. */
724 if (GET_CODE (XEXP (x, 1)) == CONST_INT
725 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
726 total = 2;
727 else
728 total = COSTS_N_INSNS (5);
729 break;
730 case DIV:
731 case UDIV:
732 case MOD:
733 case UMOD:
734 total = COSTS_N_INSNS (7);
735 break;
736 case USE:
737 /* Used in loop.c and combine.c as a marker. */
738 total = 0;
739 break;
538b78e7
RS
740 case ASM_OPERANDS:
741 /* We don't want these to be used in substitutions because
742 we have no way of validating the resulting insn. So assign
743 anything containing an ASM_OPERANDS a very high cost. */
744 total = 1000;
745 break;
7afe21cc
RK
746 default:
747 total = 2;
748 }
749
750 switch (code)
751 {
752 case REG:
6ab832bc 753 return ! CHEAP_REG (x);
ac07e066 754
7afe21cc 755 case SUBREG:
fc3ffe83
RK
756 /* If we can't tie these modes, make this expensive. The larger
757 the mode, the more expensive it is. */
758 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
759 return COSTS_N_INSNS (2
760 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
7afe21cc
RK
761 return 2;
762#ifdef RTX_COSTS
e5f6a288 763 RTX_COSTS (x, code, outer_code);
7afe21cc 764#endif
47a0b68f 765#ifdef CONST_COSTS
e5f6a288 766 CONST_COSTS (x, code, outer_code);
47a0b68f 767#endif
8625fab5
KG
768
769 default:
770#ifdef DEFAULT_RTX_COSTS
771 DEFAULT_RTX_COSTS(x, code, outer_code);
772#endif
773 break;
7afe21cc
RK
774 }
775
776 /* Sum the costs of the sub-rtx's, plus cost of this operation,
777 which is already in total. */
778
779 fmt = GET_RTX_FORMAT (code);
780 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
781 if (fmt[i] == 'e')
e5f6a288 782 total += rtx_cost (XEXP (x, i), code);
7afe21cc
RK
783 else if (fmt[i] == 'E')
784 for (j = 0; j < XVECLEN (x, i); j++)
e5f6a288 785 total += rtx_cost (XVECEXP (x, i, j), code);
7afe21cc
RK
786
787 return total;
788}
789\f
790/* Clear the hash table and initialize each register with its own quantity,
791 for a new basic block. */
792
793static void
794new_basic_block ()
795{
796 register int i;
797
798 next_qty = max_reg;
799
4c9a05bc 800 bzero ((char *) reg_tick, max_reg * sizeof (int));
7afe21cc 801
4c9a05bc
RK
802 bcopy ((char *) all_minus_one, (char *) reg_in_table,
803 max_reg * sizeof (int));
804 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
7afe21cc
RK
805 CLEAR_HARD_REG_SET (hard_regs_in_table);
806
807 /* The per-quantity values used to be initialized here, but it is
808 much faster to initialize each as it is made in `make_new_qty'. */
809
810 for (i = 0; i < NBUCKETS; i++)
811 {
812 register struct table_elt *this, *next;
813 for (this = table[i]; this; this = next)
814 {
815 next = this->next_same_hash;
816 free_element (this);
817 }
818 }
819
4c9a05bc 820 bzero ((char *) table, sizeof table);
7afe21cc
RK
821
822 prev_insn = 0;
823
824#ifdef HAVE_cc0
825 prev_insn_cc0 = 0;
826#endif
827}
828
829/* Say that register REG contains a quantity not in any register before
830 and initialize that quantity. */
831
832static void
833make_new_qty (reg)
834 register int reg;
835{
836 register int q;
837
838 if (next_qty >= max_qty)
839 abort ();
840
841 q = reg_qty[reg] = next_qty++;
842 qty_first_reg[q] = reg;
843 qty_last_reg[q] = reg;
844 qty_const[q] = qty_const_insn[q] = 0;
845 qty_comparison_code[q] = UNKNOWN;
846
847 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
848}
849
850/* Make reg NEW equivalent to reg OLD.
851 OLD is not changing; NEW is. */
852
853static void
854make_regs_eqv (new, old)
855 register int new, old;
856{
857 register int lastr, firstr;
858 register int q = reg_qty[old];
859
860 /* Nothing should become eqv until it has a "non-invalid" qty number. */
861 if (! REGNO_QTY_VALID_P (old))
862 abort ();
863
864 reg_qty[new] = q;
865 firstr = qty_first_reg[q];
866 lastr = qty_last_reg[q];
867
868 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
869 hard regs. Among pseudos, if NEW will live longer than any other reg
870 of the same qty, and that is beyond the current basic block,
871 make it the new canonical replacement for this qty. */
872 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
873 /* Certain fixed registers might be of the class NO_REGS. This means
874 that not only can they not be allocated by the compiler, but
830a38ee 875 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
876 either. */
877 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
878 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
879 || (new >= FIRST_PSEUDO_REGISTER
880 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
881 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
882 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 883 < cse_basic_block_start))
b1f21e0a
MM
884 && (uid_cuid[REGNO_LAST_UID (new)]
885 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc
RK
886 {
887 reg_prev_eqv[firstr] = new;
888 reg_next_eqv[new] = firstr;
889 reg_prev_eqv[new] = -1;
890 qty_first_reg[q] = new;
891 }
892 else
893 {
894 /* If NEW is a hard reg (known to be non-fixed), insert at end.
895 Otherwise, insert before any non-fixed hard regs that are at the
896 end. Registers of class NO_REGS cannot be used as an
897 equivalent for anything. */
898 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
899 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
900 && new >= FIRST_PSEUDO_REGISTER)
901 lastr = reg_prev_eqv[lastr];
902 reg_next_eqv[new] = reg_next_eqv[lastr];
903 if (reg_next_eqv[lastr] >= 0)
904 reg_prev_eqv[reg_next_eqv[lastr]] = new;
905 else
906 qty_last_reg[q] = new;
907 reg_next_eqv[lastr] = new;
908 reg_prev_eqv[new] = lastr;
909 }
910}
911
912/* Remove REG from its equivalence class. */
913
914static void
915delete_reg_equiv (reg)
916 register int reg;
917{
7afe21cc 918 register int q = reg_qty[reg];
a4e262bc 919 register int p, n;
7afe21cc 920
a4e262bc 921 /* If invalid, do nothing. */
7afe21cc
RK
922 if (q == reg)
923 return;
924
a4e262bc
RK
925 p = reg_prev_eqv[reg];
926 n = reg_next_eqv[reg];
927
7afe21cc
RK
928 if (n != -1)
929 reg_prev_eqv[n] = p;
930 else
931 qty_last_reg[q] = p;
932 if (p != -1)
933 reg_next_eqv[p] = n;
934 else
935 qty_first_reg[q] = n;
936
937 reg_qty[reg] = reg;
938}
939
940/* Remove any invalid expressions from the hash table
941 that refer to any of the registers contained in expression X.
942
943 Make sure that newly inserted references to those registers
944 as subexpressions will be considered valid.
945
946 mention_regs is not called when a register itself
947 is being stored in the table.
948
949 Return 1 if we have done something that may have changed the hash code
950 of X. */
951
952static int
953mention_regs (x)
954 rtx x;
955{
956 register enum rtx_code code;
957 register int i, j;
958 register char *fmt;
959 register int changed = 0;
960
961 if (x == 0)
e5f6a288 962 return 0;
7afe21cc
RK
963
964 code = GET_CODE (x);
965 if (code == REG)
966 {
967 register int regno = REGNO (x);
968 register int endregno
969 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
970 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
971 int i;
972
973 for (i = regno; i < endregno; i++)
974 {
975 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
976 remove_invalid_refs (i);
977
978 reg_in_table[i] = reg_tick[i];
979 }
980
981 return 0;
982 }
983
34c73909
R
984 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
985 pseudo if they don't use overlapping words. We handle only pseudos
986 here for simplicity. */
987 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
988 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
989 {
990 int i = REGNO (SUBREG_REG (x));
991
992 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
993 {
994 /* If reg_tick has been incremented more than once since
995 reg_in_table was last set, that means that the entire
996 register has been set before, so discard anything memorized
997 for the entrire register, including all SUBREG expressions. */
998 if (reg_in_table[i] != reg_tick[i] - 1)
999 remove_invalid_refs (i);
1000 else
1001 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1002 }
1003
1004 reg_in_table[i] = reg_tick[i];
1005 return 0;
1006 }
1007
7afe21cc
RK
1008 /* If X is a comparison or a COMPARE and either operand is a register
1009 that does not have a quantity, give it one. This is so that a later
1010 call to record_jump_equiv won't cause X to be assigned a different
1011 hash code and not found in the table after that call.
1012
1013 It is not necessary to do this here, since rehash_using_reg can
1014 fix up the table later, but doing this here eliminates the need to
1015 call that expensive function in the most common case where the only
1016 use of the register is in the comparison. */
1017
1018 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1019 {
1020 if (GET_CODE (XEXP (x, 0)) == REG
1021 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
906c4e36 1022 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
7afe21cc
RK
1023 {
1024 rehash_using_reg (XEXP (x, 0));
1025 changed = 1;
1026 }
1027
1028 if (GET_CODE (XEXP (x, 1)) == REG
1029 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
906c4e36 1030 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
7afe21cc
RK
1031 {
1032 rehash_using_reg (XEXP (x, 1));
1033 changed = 1;
1034 }
1035 }
1036
1037 fmt = GET_RTX_FORMAT (code);
1038 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1039 if (fmt[i] == 'e')
1040 changed |= mention_regs (XEXP (x, i));
1041 else if (fmt[i] == 'E')
1042 for (j = 0; j < XVECLEN (x, i); j++)
1043 changed |= mention_regs (XVECEXP (x, i, j));
1044
1045 return changed;
1046}
1047
1048/* Update the register quantities for inserting X into the hash table
1049 with a value equivalent to CLASSP.
1050 (If the class does not contain a REG, it is irrelevant.)
1051 If MODIFIED is nonzero, X is a destination; it is being modified.
1052 Note that delete_reg_equiv should be called on a register
1053 before insert_regs is done on that register with MODIFIED != 0.
1054
1055 Nonzero value means that elements of reg_qty have changed
1056 so X's hash code may be different. */
1057
1058static int
1059insert_regs (x, classp, modified)
1060 rtx x;
1061 struct table_elt *classp;
1062 int modified;
1063{
1064 if (GET_CODE (x) == REG)
1065 {
1066 register int regno = REGNO (x);
1067
1ff0c00d
RK
1068 /* If REGNO is in the equivalence table already but is of the
1069 wrong mode for that equivalence, don't do anything here. */
1070
1071 if (REGNO_QTY_VALID_P (regno)
1072 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1073 return 0;
1074
1075 if (modified || ! REGNO_QTY_VALID_P (regno))
7afe21cc
RK
1076 {
1077 if (classp)
1078 for (classp = classp->first_same_value;
1079 classp != 0;
1080 classp = classp->next_same_value)
1081 if (GET_CODE (classp->exp) == REG
1082 && GET_MODE (classp->exp) == GET_MODE (x))
1083 {
1084 make_regs_eqv (regno, REGNO (classp->exp));
1085 return 1;
1086 }
1087
1088 make_new_qty (regno);
1089 qty_mode[reg_qty[regno]] = GET_MODE (x);
1090 return 1;
1091 }
cdf4112f
TG
1092
1093 return 0;
7afe21cc 1094 }
c610adec
RK
1095
1096 /* If X is a SUBREG, we will likely be inserting the inner register in the
1097 table. If that register doesn't have an assigned quantity number at
1098 this point but does later, the insertion that we will be doing now will
1099 not be accessible because its hash code will have changed. So assign
1100 a quantity number now. */
1101
1102 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1103 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1104 {
34c73909
R
1105 int regno = REGNO (SUBREG_REG (x));
1106
906c4e36 1107 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
34c73909
R
1108 /* Mention_regs checks if REG_TICK is exactly one larger than
1109 REG_IN_TABLE to find out if there was only a single preceding
1110 invalidation - for the SUBREG - or another one, which would be
1111 for the full register. Since we don't invalidate the SUBREG
1112 here first, we might have to bump up REG_TICK so that mention_regs
1113 will do the right thing. */
1114 if (reg_in_table[regno] >= 0
1115 && reg_tick[regno] == reg_in_table[regno] + 1)
37053d1f 1116 reg_tick[regno]++;
34c73909 1117 mention_regs (x);
c610adec
RK
1118 return 1;
1119 }
7afe21cc
RK
1120 else
1121 return mention_regs (x);
1122}
1123\f
1124/* Look in or update the hash table. */
1125
1126/* Put the element ELT on the list of free elements. */
1127
1128static void
1129free_element (elt)
1130 struct table_elt *elt;
1131{
1132 elt->next_same_hash = free_element_chain;
1133 free_element_chain = elt;
1134}
1135
1136/* Return an element that is free for use. */
1137
1138static struct table_elt *
1139get_element ()
1140{
1141 struct table_elt *elt = free_element_chain;
1142 if (elt)
1143 {
1144 free_element_chain = elt->next_same_hash;
1145 return elt;
1146 }
1147 n_elements_made++;
1148 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1149}
1150
1151/* Remove table element ELT from use in the table.
1152 HASH is its hash code, made using the HASH macro.
1153 It's an argument because often that is known in advance
1154 and we save much time not recomputing it. */
1155
1156static void
1157remove_from_table (elt, hash)
1158 register struct table_elt *elt;
2197a88a 1159 unsigned hash;
7afe21cc
RK
1160{
1161 if (elt == 0)
1162 return;
1163
1164 /* Mark this element as removed. See cse_insn. */
1165 elt->first_same_value = 0;
1166
1167 /* Remove the table element from its equivalence class. */
1168
1169 {
1170 register struct table_elt *prev = elt->prev_same_value;
1171 register struct table_elt *next = elt->next_same_value;
1172
1173 if (next) next->prev_same_value = prev;
1174
1175 if (prev)
1176 prev->next_same_value = next;
1177 else
1178 {
1179 register struct table_elt *newfirst = next;
1180 while (next)
1181 {
1182 next->first_same_value = newfirst;
1183 next = next->next_same_value;
1184 }
1185 }
1186 }
1187
1188 /* Remove the table element from its hash bucket. */
1189
1190 {
1191 register struct table_elt *prev = elt->prev_same_hash;
1192 register struct table_elt *next = elt->next_same_hash;
1193
1194 if (next) next->prev_same_hash = prev;
1195
1196 if (prev)
1197 prev->next_same_hash = next;
1198 else if (table[hash] == elt)
1199 table[hash] = next;
1200 else
1201 {
1202 /* This entry is not in the proper hash bucket. This can happen
1203 when two classes were merged by `merge_equiv_classes'. Search
1204 for the hash bucket that it heads. This happens only very
1205 rarely, so the cost is acceptable. */
1206 for (hash = 0; hash < NBUCKETS; hash++)
1207 if (table[hash] == elt)
1208 table[hash] = next;
1209 }
1210 }
1211
1212 /* Remove the table element from its related-value circular chain. */
1213
1214 if (elt->related_value != 0 && elt->related_value != elt)
1215 {
1216 register struct table_elt *p = elt->related_value;
1217 while (p->related_value != elt)
1218 p = p->related_value;
1219 p->related_value = elt->related_value;
1220 if (p->related_value == p)
1221 p->related_value = 0;
1222 }
1223
1224 free_element (elt);
1225}
1226
1227/* Look up X in the hash table and return its table element,
1228 or 0 if X is not in the table.
1229
1230 MODE is the machine-mode of X, or if X is an integer constant
1231 with VOIDmode then MODE is the mode with which X will be used.
1232
1233 Here we are satisfied to find an expression whose tree structure
1234 looks like X. */
1235
1236static struct table_elt *
1237lookup (x, hash, mode)
1238 rtx x;
2197a88a 1239 unsigned hash;
7afe21cc
RK
1240 enum machine_mode mode;
1241{
1242 register struct table_elt *p;
1243
1244 for (p = table[hash]; p; p = p->next_same_hash)
1245 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1246 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1247 return p;
1248
1249 return 0;
1250}
1251
1252/* Like `lookup' but don't care whether the table element uses invalid regs.
1253 Also ignore discrepancies in the machine mode of a register. */
1254
1255static struct table_elt *
1256lookup_for_remove (x, hash, mode)
1257 rtx x;
2197a88a 1258 unsigned hash;
7afe21cc
RK
1259 enum machine_mode mode;
1260{
1261 register struct table_elt *p;
1262
1263 if (GET_CODE (x) == REG)
1264 {
1265 int regno = REGNO (x);
1266 /* Don't check the machine mode when comparing registers;
1267 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1268 for (p = table[hash]; p; p = p->next_same_hash)
1269 if (GET_CODE (p->exp) == REG
1270 && REGNO (p->exp) == regno)
1271 return p;
1272 }
1273 else
1274 {
1275 for (p = table[hash]; p; p = p->next_same_hash)
1276 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1277 return p;
1278 }
1279
1280 return 0;
1281}
1282
1283/* Look for an expression equivalent to X and with code CODE.
1284 If one is found, return that expression. */
1285
1286static rtx
1287lookup_as_function (x, code)
1288 rtx x;
1289 enum rtx_code code;
1290{
1291 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1292 GET_MODE (x));
34c73909
R
1293 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1294 long as we are narrowing. So if we looked in vain for a mode narrower
1295 than word_mode before, look for word_mode now. */
1296 if (p == 0 && code == CONST_INT
1297 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1298 {
1299 x = copy_rtx (x);
1300 PUT_MODE (x, word_mode);
1301 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1302 }
1303
7afe21cc
RK
1304 if (p == 0)
1305 return 0;
1306
1307 for (p = p->first_same_value; p; p = p->next_same_value)
1308 {
1309 if (GET_CODE (p->exp) == code
1310 /* Make sure this is a valid entry in the table. */
1311 && exp_equiv_p (p->exp, p->exp, 1, 0))
1312 return p->exp;
1313 }
1314
1315 return 0;
1316}
1317
1318/* Insert X in the hash table, assuming HASH is its hash code
1319 and CLASSP is an element of the class it should go in
1320 (or 0 if a new class should be made).
1321 It is inserted at the proper position to keep the class in
1322 the order cheapest first.
1323
1324 MODE is the machine-mode of X, or if X is an integer constant
1325 with VOIDmode then MODE is the mode with which X will be used.
1326
1327 For elements of equal cheapness, the most recent one
1328 goes in front, except that the first element in the list
1329 remains first unless a cheaper element is added. The order of
1330 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1331 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1332
1333 The in_memory field in the hash table element is set to 0.
1334 The caller must set it nonzero if appropriate.
1335
1336 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1337 and if insert_regs returns a nonzero value
1338 you must then recompute its hash code before calling here.
1339
1340 If necessary, update table showing constant values of quantities. */
1341
1342#define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1343
1344static struct table_elt *
1345insert (x, classp, hash, mode)
1346 register rtx x;
1347 register struct table_elt *classp;
2197a88a 1348 unsigned hash;
7afe21cc
RK
1349 enum machine_mode mode;
1350{
1351 register struct table_elt *elt;
1352
1353 /* If X is a register and we haven't made a quantity for it,
1354 something is wrong. */
1355 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1356 abort ();
1357
1358 /* If X is a hard register, show it is being put in the table. */
1359 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1360 {
1361 int regno = REGNO (x);
1362 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1363 int i;
1364
1365 for (i = regno; i < endregno; i++)
1366 SET_HARD_REG_BIT (hard_regs_in_table, i);
1367 }
1368
a5dfb4ee 1369 /* If X is a label, show we recorded it. */
970c9ace
RK
1370 if (GET_CODE (x) == LABEL_REF
1371 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1372 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
a5dfb4ee 1373 recorded_label_ref = 1;
7afe21cc
RK
1374
1375 /* Put an element for X into the right hash bucket. */
1376
1377 elt = get_element ();
1378 elt->exp = x;
1379 elt->cost = COST (x);
1380 elt->next_same_value = 0;
1381 elt->prev_same_value = 0;
1382 elt->next_same_hash = table[hash];
1383 elt->prev_same_hash = 0;
1384 elt->related_value = 0;
1385 elt->in_memory = 0;
1386 elt->mode = mode;
1387 elt->is_const = (CONSTANT_P (x)
1388 /* GNU C++ takes advantage of this for `this'
1389 (and other const values). */
1390 || (RTX_UNCHANGING_P (x)
1391 && GET_CODE (x) == REG
1392 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1393 || FIXED_BASE_PLUS_P (x));
1394
1395 if (table[hash])
1396 table[hash]->prev_same_hash = elt;
1397 table[hash] = elt;
1398
1399 /* Put it into the proper value-class. */
1400 if (classp)
1401 {
1402 classp = classp->first_same_value;
1403 if (CHEAPER (elt, classp))
1404 /* Insert at the head of the class */
1405 {
1406 register struct table_elt *p;
1407 elt->next_same_value = classp;
1408 classp->prev_same_value = elt;
1409 elt->first_same_value = elt;
1410
1411 for (p = classp; p; p = p->next_same_value)
1412 p->first_same_value = elt;
1413 }
1414 else
1415 {
1416 /* Insert not at head of the class. */
1417 /* Put it after the last element cheaper than X. */
1418 register struct table_elt *p, *next;
1419 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1420 p = next);
1421 /* Put it after P and before NEXT. */
1422 elt->next_same_value = next;
1423 if (next)
1424 next->prev_same_value = elt;
1425 elt->prev_same_value = p;
1426 p->next_same_value = elt;
1427 elt->first_same_value = classp;
1428 }
1429 }
1430 else
1431 elt->first_same_value = elt;
1432
1433 /* If this is a constant being set equivalent to a register or a register
1434 being set equivalent to a constant, note the constant equivalence.
1435
1436 If this is a constant, it cannot be equivalent to a different constant,
1437 and a constant is the only thing that can be cheaper than a register. So
1438 we know the register is the head of the class (before the constant was
1439 inserted).
1440
1441 If this is a register that is not already known equivalent to a
1442 constant, we must check the entire class.
1443
1444 If this is a register that is already known equivalent to an insn,
1445 update `qty_const_insn' to show that `this_insn' is the latest
1446 insn making that quantity equivalent to the constant. */
1447
f353588a
RK
1448 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1449 && GET_CODE (x) != REG)
7afe21cc
RK
1450 {
1451 qty_const[reg_qty[REGNO (classp->exp)]]
1452 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1453 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1454 }
1455
f353588a
RK
1456 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1457 && ! elt->is_const)
7afe21cc
RK
1458 {
1459 register struct table_elt *p;
1460
1461 for (p = classp; p != 0; p = p->next_same_value)
1462 {
f353588a 1463 if (p->is_const && GET_CODE (p->exp) != REG)
7afe21cc
RK
1464 {
1465 qty_const[reg_qty[REGNO (x)]]
1466 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1467 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1468 break;
1469 }
1470 }
1471 }
1472
1473 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1474 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1475 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1476
1477 /* If this is a constant with symbolic value,
1478 and it has a term with an explicit integer value,
1479 link it up with related expressions. */
1480 if (GET_CODE (x) == CONST)
1481 {
1482 rtx subexp = get_related_value (x);
2197a88a 1483 unsigned subhash;
7afe21cc
RK
1484 struct table_elt *subelt, *subelt_prev;
1485
1486 if (subexp != 0)
1487 {
1488 /* Get the integer-free subexpression in the hash table. */
1489 subhash = safe_hash (subexp, mode) % NBUCKETS;
1490 subelt = lookup (subexp, subhash, mode);
1491 if (subelt == 0)
906c4e36 1492 subelt = insert (subexp, NULL_PTR, subhash, mode);
7afe21cc
RK
1493 /* Initialize SUBELT's circular chain if it has none. */
1494 if (subelt->related_value == 0)
1495 subelt->related_value = subelt;
1496 /* Find the element in the circular chain that precedes SUBELT. */
1497 subelt_prev = subelt;
1498 while (subelt_prev->related_value != subelt)
1499 subelt_prev = subelt_prev->related_value;
1500 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1501 This way the element that follows SUBELT is the oldest one. */
1502 elt->related_value = subelt_prev->related_value;
1503 subelt_prev->related_value = elt;
1504 }
1505 }
1506
1507 return elt;
1508}
1509\f
1510/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1511 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1512 the two classes equivalent.
1513
1514 CLASS1 will be the surviving class; CLASS2 should not be used after this
1515 call.
1516
1517 Any invalid entries in CLASS2 will not be copied. */
1518
1519static void
1520merge_equiv_classes (class1, class2)
1521 struct table_elt *class1, *class2;
1522{
1523 struct table_elt *elt, *next, *new;
1524
1525 /* Ensure we start with the head of the classes. */
1526 class1 = class1->first_same_value;
1527 class2 = class2->first_same_value;
1528
1529 /* If they were already equal, forget it. */
1530 if (class1 == class2)
1531 return;
1532
1533 for (elt = class2; elt; elt = next)
1534 {
2197a88a 1535 unsigned hash;
7afe21cc
RK
1536 rtx exp = elt->exp;
1537 enum machine_mode mode = elt->mode;
1538
1539 next = elt->next_same_value;
1540
1541 /* Remove old entry, make a new one in CLASS1's class.
1542 Don't do this for invalid entries as we cannot find their
0f41302f 1543 hash code (it also isn't necessary). */
7afe21cc
RK
1544 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1545 {
1546 hash_arg_in_memory = 0;
1547 hash_arg_in_struct = 0;
1548 hash = HASH (exp, mode);
1549
1550 if (GET_CODE (exp) == REG)
1551 delete_reg_equiv (REGNO (exp));
1552
1553 remove_from_table (elt, hash);
1554
1555 if (insert_regs (exp, class1, 0))
8ae2b8f6
JW
1556 {
1557 rehash_using_reg (exp);
1558 hash = HASH (exp, mode);
1559 }
7afe21cc
RK
1560 new = insert (exp, class1, hash, mode);
1561 new->in_memory = hash_arg_in_memory;
1562 new->in_struct = hash_arg_in_struct;
1563 }
1564 }
1565}
1566\f
1567/* Remove from the hash table, or mark as invalid,
1568 all expressions whose values could be altered by storing in X.
1569 X is a register, a subreg, or a memory reference with nonvarying address
1570 (because, when a memory reference with a varying address is stored in,
1571 all memory references are removed by invalidate_memory
1572 so specific invalidation is superfluous).
bb4034b3
JW
1573 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1574 instead of just the amount indicated by the mode of X. This is only used
1575 for bitfield stores into memory.
7afe21cc
RK
1576
1577 A nonvarying address may be just a register or just
1578 a symbol reference, or it may be either of those plus
1579 a numeric offset. */
1580
1581static void
bb4034b3 1582invalidate (x, full_mode)
7afe21cc 1583 rtx x;
bb4034b3 1584 enum machine_mode full_mode;
7afe21cc
RK
1585{
1586 register int i;
1587 register struct table_elt *p;
7afe21cc
RK
1588
1589 /* If X is a register, dependencies on its contents
1590 are recorded through the qty number mechanism.
1591 Just change the qty number of the register,
1592 mark it as invalid for expressions that refer to it,
1593 and remove it itself. */
1594
1595 if (GET_CODE (x) == REG)
1596 {
1597 register int regno = REGNO (x);
2197a88a 1598 register unsigned hash = HASH (x, GET_MODE (x));
7afe21cc
RK
1599
1600 /* Remove REGNO from any quantity list it might be on and indicate
9ec36da5 1601 that its value might have changed. If it is a pseudo, remove its
7afe21cc
RK
1602 entry from the hash table.
1603
1604 For a hard register, we do the first two actions above for any
1605 additional hard registers corresponding to X. Then, if any of these
1606 registers are in the table, we must remove any REG entries that
1607 overlap these registers. */
1608
1609 delete_reg_equiv (regno);
1610 reg_tick[regno]++;
1611
1612 if (regno >= FIRST_PSEUDO_REGISTER)
85e4d983
RK
1613 {
1614 /* Because a register can be referenced in more than one mode,
1615 we might have to remove more than one table entry. */
1616
1617 struct table_elt *elt;
1618
2d8b0f3a 1619 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
85e4d983
RK
1620 remove_from_table (elt, hash);
1621 }
7afe21cc
RK
1622 else
1623 {
54b1de55
RK
1624 HOST_WIDE_INT in_table
1625 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc
RK
1626 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1627 int tregno, tendregno;
1628 register struct table_elt *p, *next;
1629
1630 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1631
1632 for (i = regno + 1; i < endregno; i++)
1633 {
1634 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1635 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1636 delete_reg_equiv (i);
1637 reg_tick[i]++;
1638 }
1639
1640 if (in_table)
1641 for (hash = 0; hash < NBUCKETS; hash++)
1642 for (p = table[hash]; p; p = next)
1643 {
1644 next = p->next_same_hash;
1645
1646 if (GET_CODE (p->exp) != REG
1647 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1648 continue;
1649
1650 tregno = REGNO (p->exp);
1651 tendregno
1652 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1653 if (tendregno > regno && tregno < endregno)
925be47c 1654 remove_from_table (p, hash);
7afe21cc
RK
1655 }
1656 }
1657
1658 return;
1659 }
1660
1661 if (GET_CODE (x) == SUBREG)
1662 {
1663 if (GET_CODE (SUBREG_REG (x)) != REG)
1664 abort ();
bb4034b3 1665 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc
RK
1666 return;
1667 }
1668
aac5cc16
RH
1669 /* If X is a parallel, invalidate all of its elements. */
1670
1671 if (GET_CODE (x) == PARALLEL)
1672 {
1673 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1674 invalidate (XVECEXP (x, 0, i), VOIDmode);
1675 return;
1676 }
1677
1678 /* If X is an expr_list, this is part of a disjoint return value;
1679 extract the location in question ignoring the offset. */
1680
1681 if (GET_CODE (x) == EXPR_LIST)
1682 {
1683 invalidate (XEXP (x, 0), VOIDmode);
1684 return;
1685 }
1686
7afe21cc
RK
1687 /* X is not a register; it must be a memory reference with
1688 a nonvarying address. Remove all hash table elements
1689 that refer to overlapping pieces of memory. */
1690
1691 if (GET_CODE (x) != MEM)
1692 abort ();
7afe21cc 1693
bb4034b3
JW
1694 if (full_mode == VOIDmode)
1695 full_mode = GET_MODE (x);
1696
7afe21cc
RK
1697 for (i = 0; i < NBUCKETS; i++)
1698 {
1699 register struct table_elt *next;
1700 for (p = table[i]; p; p = next)
1701 {
1702 next = p->next_same_hash;
9ae8ffe7
JL
1703 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1704 than checking all the aliases). */
1705 if (p->in_memory
1706 && (GET_CODE (p->exp) != MEM
1707 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
7afe21cc
RK
1708 remove_from_table (p, i);
1709 }
1710 }
1711}
1712
1713/* Remove all expressions that refer to register REGNO,
1714 since they are already invalid, and we are about to
1715 mark that register valid again and don't want the old
1716 expressions to reappear as valid. */
1717
1718static void
1719remove_invalid_refs (regno)
1720 int regno;
1721{
1722 register int i;
1723 register struct table_elt *p, *next;
1724
1725 for (i = 0; i < NBUCKETS; i++)
1726 for (p = table[i]; p; p = next)
1727 {
1728 next = p->next_same_hash;
1729 if (GET_CODE (p->exp) != REG
906c4e36 1730 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
7afe21cc
RK
1731 remove_from_table (p, i);
1732 }
1733}
34c73909
R
1734
1735/* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1736static void
1737remove_invalid_subreg_refs (regno, word, mode)
1738 int regno;
1739 int word;
1740 enum machine_mode mode;
1741{
1742 register int i;
1743 register struct table_elt *p, *next;
1744 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1745
1746 for (i = 0; i < NBUCKETS; i++)
1747 for (p = table[i]; p; p = next)
1748 {
1749 rtx exp;
1750 next = p->next_same_hash;
1751
1752 exp = p->exp;
1753 if (GET_CODE (p->exp) != REG
1754 && (GET_CODE (exp) != SUBREG
1755 || GET_CODE (SUBREG_REG (exp)) != REG
1756 || REGNO (SUBREG_REG (exp)) != regno
1757 || (((SUBREG_WORD (exp)
1758 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1759 >= word)
1760 && SUBREG_WORD (exp) <= end))
1761 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1762 remove_from_table (p, i);
1763 }
1764}
7afe21cc
RK
1765\f
1766/* Recompute the hash codes of any valid entries in the hash table that
1767 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1768
1769 This is called when we make a jump equivalence. */
1770
1771static void
1772rehash_using_reg (x)
1773 rtx x;
1774{
973838fd 1775 unsigned int i;
7afe21cc 1776 struct table_elt *p, *next;
2197a88a 1777 unsigned hash;
7afe21cc
RK
1778
1779 if (GET_CODE (x) == SUBREG)
1780 x = SUBREG_REG (x);
1781
1782 /* If X is not a register or if the register is known not to be in any
1783 valid entries in the table, we have no work to do. */
1784
1785 if (GET_CODE (x) != REG
1786 || reg_in_table[REGNO (x)] < 0
1787 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1788 return;
1789
1790 /* Scan all hash chains looking for valid entries that mention X.
1791 If we find one and it is in the wrong hash chain, move it. We can skip
1792 objects that are registers, since they are handled specially. */
1793
1794 for (i = 0; i < NBUCKETS; i++)
1795 for (p = table[i]; p; p = next)
1796 {
1797 next = p->next_same_hash;
1798 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1799 && exp_equiv_p (p->exp, p->exp, 1, 0)
7afe21cc
RK
1800 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1801 {
1802 if (p->next_same_hash)
1803 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1804
1805 if (p->prev_same_hash)
1806 p->prev_same_hash->next_same_hash = p->next_same_hash;
1807 else
1808 table[i] = p->next_same_hash;
1809
1810 p->next_same_hash = table[hash];
1811 p->prev_same_hash = 0;
1812 if (table[hash])
1813 table[hash]->prev_same_hash = p;
1814 table[hash] = p;
1815 }
1816 }
1817}
1818\f
7afe21cc
RK
1819/* Remove from the hash table any expression that is a call-clobbered
1820 register. Also update their TICK values. */
1821
1822static void
1823invalidate_for_call ()
1824{
1825 int regno, endregno;
1826 int i;
2197a88a 1827 unsigned hash;
7afe21cc
RK
1828 struct table_elt *p, *next;
1829 int in_table = 0;
1830
1831 /* Go through all the hard registers. For each that is clobbered in
1832 a CALL_INSN, remove the register from quantity chains and update
1833 reg_tick if defined. Also see if any of these registers is currently
1834 in the table. */
1835
1836 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1838 {
1839 delete_reg_equiv (regno);
1840 if (reg_tick[regno] >= 0)
1841 reg_tick[regno]++;
1842
0e227018 1843 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
1844 }
1845
1846 /* In the case where we have no call-clobbered hard registers in the
1847 table, we are done. Otherwise, scan the table and remove any
1848 entry that overlaps a call-clobbered register. */
1849
1850 if (in_table)
1851 for (hash = 0; hash < NBUCKETS; hash++)
1852 for (p = table[hash]; p; p = next)
1853 {
1854 next = p->next_same_hash;
1855
9ae8ffe7
JL
1856 if (p->in_memory)
1857 {
1858 remove_from_table (p, hash);
1859 continue;
1860 }
1861
7afe21cc
RK
1862 if (GET_CODE (p->exp) != REG
1863 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1864 continue;
1865
1866 regno = REGNO (p->exp);
1867 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1868
1869 for (i = regno; i < endregno; i++)
1870 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1871 {
1872 remove_from_table (p, hash);
1873 break;
1874 }
1875 }
1876}
1877\f
1878/* Given an expression X of type CONST,
1879 and ELT which is its table entry (or 0 if it
1880 is not in the hash table),
1881 return an alternate expression for X as a register plus integer.
1882 If none can be found, return 0. */
1883
1884static rtx
1885use_related_value (x, elt)
1886 rtx x;
1887 struct table_elt *elt;
1888{
1889 register struct table_elt *relt = 0;
1890 register struct table_elt *p, *q;
906c4e36 1891 HOST_WIDE_INT offset;
7afe21cc
RK
1892
1893 /* First, is there anything related known?
1894 If we have a table element, we can tell from that.
1895 Otherwise, must look it up. */
1896
1897 if (elt != 0 && elt->related_value != 0)
1898 relt = elt;
1899 else if (elt == 0 && GET_CODE (x) == CONST)
1900 {
1901 rtx subexp = get_related_value (x);
1902 if (subexp != 0)
1903 relt = lookup (subexp,
1904 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1905 GET_MODE (subexp));
1906 }
1907
1908 if (relt == 0)
1909 return 0;
1910
1911 /* Search all related table entries for one that has an
1912 equivalent register. */
1913
1914 p = relt;
1915 while (1)
1916 {
1917 /* This loop is strange in that it is executed in two different cases.
1918 The first is when X is already in the table. Then it is searching
1919 the RELATED_VALUE list of X's class (RELT). The second case is when
1920 X is not in the table. Then RELT points to a class for the related
1921 value.
1922
1923 Ensure that, whatever case we are in, that we ignore classes that have
1924 the same value as X. */
1925
1926 if (rtx_equal_p (x, p->exp))
1927 q = 0;
1928 else
1929 for (q = p->first_same_value; q; q = q->next_same_value)
1930 if (GET_CODE (q->exp) == REG)
1931 break;
1932
1933 if (q)
1934 break;
1935
1936 p = p->related_value;
1937
1938 /* We went all the way around, so there is nothing to be found.
1939 Alternatively, perhaps RELT was in the table for some other reason
1940 and it has no related values recorded. */
1941 if (p == relt || p == 0)
1942 break;
1943 }
1944
1945 if (q == 0)
1946 return 0;
1947
1948 offset = (get_integer_term (x) - get_integer_term (p->exp));
1949 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1950 return plus_constant (q->exp, offset);
1951}
1952\f
1953/* Hash an rtx. We are careful to make sure the value is never negative.
1954 Equivalent registers hash identically.
1955 MODE is used in hashing for CONST_INTs only;
1956 otherwise the mode of X is used.
1957
1958 Store 1 in do_not_record if any subexpression is volatile.
1959
1960 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1961 which does not have the RTX_UNCHANGING_P bit set.
1962 In this case, also store 1 in hash_arg_in_struct
1963 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1964
1965 Note that cse_insn knows that the hash code of a MEM expression
1966 is just (int) MEM plus the hash code of the address. */
1967
2197a88a 1968static unsigned
7afe21cc
RK
1969canon_hash (x, mode)
1970 rtx x;
1971 enum machine_mode mode;
1972{
1973 register int i, j;
2197a88a 1974 register unsigned hash = 0;
7afe21cc
RK
1975 register enum rtx_code code;
1976 register char *fmt;
1977
1978 /* repeat is used to turn tail-recursion into iteration. */
1979 repeat:
1980 if (x == 0)
1981 return hash;
1982
1983 code = GET_CODE (x);
1984 switch (code)
1985 {
1986 case REG:
1987 {
1988 register int regno = REGNO (x);
1989
1990 /* On some machines, we can't record any non-fixed hard register,
1991 because extending its life will cause reload problems. We
1992 consider ap, fp, and sp to be fixed for this purpose.
0f41302f 1993 On all machines, we can't record any global registers. */
7afe21cc
RK
1994
1995 if (regno < FIRST_PSEUDO_REGISTER
1996 && (global_regs[regno]
f95182a4
ILT
1997 || (SMALL_REGISTER_CLASSES
1998 && ! fixed_regs[regno]
7afe21cc 1999 && regno != FRAME_POINTER_REGNUM
8bc169f2 2000 && regno != HARD_FRAME_POINTER_REGNUM
7afe21cc 2001 && regno != ARG_POINTER_REGNUM
e9a25f70 2002 && regno != STACK_POINTER_REGNUM)))
7afe21cc
RK
2003 {
2004 do_not_record = 1;
2005 return 0;
2006 }
2197a88a
RK
2007 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
2008 return hash;
7afe21cc
RK
2009 }
2010
34c73909
R
2011 /* We handle SUBREG of a REG specially because the underlying
2012 reg changes its hash value with every value change; we don't
2013 want to have to forget unrelated subregs when one subreg changes. */
2014 case SUBREG:
2015 {
2016 if (GET_CODE (SUBREG_REG (x)) == REG)
2017 {
2018 hash += (((unsigned) SUBREG << 7)
2019 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2020 return hash;
2021 }
2022 break;
2023 }
2024
7afe21cc 2025 case CONST_INT:
2197a88a
RK
2026 {
2027 unsigned HOST_WIDE_INT tem = INTVAL (x);
2028 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2029 return hash;
2030 }
7afe21cc
RK
2031
2032 case CONST_DOUBLE:
2033 /* This is like the general case, except that it only counts
2034 the integers representing the constant. */
2197a88a 2035 hash += (unsigned) code + (unsigned) GET_MODE (x);
969c8517
RK
2036 if (GET_MODE (x) != VOIDmode)
2037 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2038 {
2039 unsigned tem = XINT (x, i);
2040 hash += tem;
2041 }
2042 else
2043 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2044 + (unsigned) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2045 return hash;
2046
2047 /* Assume there is only one rtx object for any given label. */
2048 case LABEL_REF:
3c543775 2049 hash
7bcac048 2050 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2197a88a 2051 return hash;
7afe21cc
RK
2052
2053 case SYMBOL_REF:
3c543775 2054 hash
7bcac048 2055 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2197a88a 2056 return hash;
7afe21cc
RK
2057
2058 case MEM:
2059 if (MEM_VOLATILE_P (x))
2060 {
2061 do_not_record = 1;
2062 return 0;
2063 }
9ad91d71 2064 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
7afe21cc
RK
2065 {
2066 hash_arg_in_memory = 1;
2067 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2068 }
2069 /* Now that we have already found this special case,
2070 might as well speed it up as much as possible. */
2197a88a 2071 hash += (unsigned) MEM;
7afe21cc
RK
2072 x = XEXP (x, 0);
2073 goto repeat;
2074
2075 case PRE_DEC:
2076 case PRE_INC:
2077 case POST_DEC:
2078 case POST_INC:
2079 case PC:
2080 case CC0:
2081 case CALL:
2082 case UNSPEC_VOLATILE:
2083 do_not_record = 1;
2084 return 0;
2085
2086 case ASM_OPERANDS:
2087 if (MEM_VOLATILE_P (x))
2088 {
2089 do_not_record = 1;
2090 return 0;
2091 }
e9a25f70
JL
2092 break;
2093
2094 default:
2095 break;
7afe21cc
RK
2096 }
2097
2098 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2099 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2100 fmt = GET_RTX_FORMAT (code);
2101 for (; i >= 0; i--)
2102 {
2103 if (fmt[i] == 'e')
2104 {
2105 rtx tem = XEXP (x, i);
7afe21cc
RK
2106
2107 /* If we are about to do the last recursive call
2108 needed at this level, change it into iteration.
2109 This function is called enough to be worth it. */
2110 if (i == 0)
2111 {
2112 x = tem;
2113 goto repeat;
2114 }
2115 hash += canon_hash (tem, 0);
2116 }
2117 else if (fmt[i] == 'E')
2118 for (j = 0; j < XVECLEN (x, i); j++)
2119 hash += canon_hash (XVECEXP (x, i, j), 0);
2120 else if (fmt[i] == 's')
2121 {
2197a88a 2122 register unsigned char *p = (unsigned char *) XSTR (x, i);
7afe21cc
RK
2123 if (p)
2124 while (*p)
2197a88a 2125 hash += *p++;
7afe21cc
RK
2126 }
2127 else if (fmt[i] == 'i')
2128 {
2197a88a
RK
2129 register unsigned tem = XINT (x, i);
2130 hash += tem;
7afe21cc 2131 }
e9a25f70
JL
2132 else if (fmt[i] == '0')
2133 /* unused */;
7afe21cc
RK
2134 else
2135 abort ();
2136 }
2137 return hash;
2138}
2139
2140/* Like canon_hash but with no side effects. */
2141
2197a88a 2142static unsigned
7afe21cc
RK
2143safe_hash (x, mode)
2144 rtx x;
2145 enum machine_mode mode;
2146{
2147 int save_do_not_record = do_not_record;
2148 int save_hash_arg_in_memory = hash_arg_in_memory;
2149 int save_hash_arg_in_struct = hash_arg_in_struct;
2197a88a 2150 unsigned hash = canon_hash (x, mode);
7afe21cc
RK
2151 hash_arg_in_memory = save_hash_arg_in_memory;
2152 hash_arg_in_struct = save_hash_arg_in_struct;
2153 do_not_record = save_do_not_record;
2154 return hash;
2155}
2156\f
2157/* Return 1 iff X and Y would canonicalize into the same thing,
2158 without actually constructing the canonicalization of either one.
2159 If VALIDATE is nonzero,
2160 we assume X is an expression being processed from the rtl
2161 and Y was found in the hash table. We check register refs
2162 in Y for being marked as valid.
2163
2164 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2165 that is known to be in the register. Ordinarily, we don't allow them
2166 to match, because letting them match would cause unpredictable results
2167 in all the places that search a hash table chain for an equivalent
2168 for a given value. A possible equivalent that has different structure
2169 has its hash code computed from different data. Whether the hash code
38e01259 2170 is the same as that of the given value is pure luck. */
7afe21cc
RK
2171
2172static int
2173exp_equiv_p (x, y, validate, equal_values)
2174 rtx x, y;
2175 int validate;
2176 int equal_values;
2177{
906c4e36 2178 register int i, j;
7afe21cc
RK
2179 register enum rtx_code code;
2180 register char *fmt;
2181
2182 /* Note: it is incorrect to assume an expression is equivalent to itself
2183 if VALIDATE is nonzero. */
2184 if (x == y && !validate)
2185 return 1;
2186 if (x == 0 || y == 0)
2187 return x == y;
2188
2189 code = GET_CODE (x);
2190 if (code != GET_CODE (y))
2191 {
2192 if (!equal_values)
2193 return 0;
2194
2195 /* If X is a constant and Y is a register or vice versa, they may be
2196 equivalent. We only have to validate if Y is a register. */
2197 if (CONSTANT_P (x) && GET_CODE (y) == REG
2198 && REGNO_QTY_VALID_P (REGNO (y))
2199 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2200 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2201 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2202 return 1;
2203
2204 if (CONSTANT_P (y) && code == REG
2205 && REGNO_QTY_VALID_P (REGNO (x))
2206 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2207 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2208 return 1;
2209
2210 return 0;
2211 }
2212
2213 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2214 if (GET_MODE (x) != GET_MODE (y))
2215 return 0;
2216
2217 switch (code)
2218 {
2219 case PC:
2220 case CC0:
2221 return x == y;
2222
2223 case CONST_INT:
58c8c593 2224 return INTVAL (x) == INTVAL (y);
7afe21cc
RK
2225
2226 case LABEL_REF:
7afe21cc
RK
2227 return XEXP (x, 0) == XEXP (y, 0);
2228
f54d4924
RK
2229 case SYMBOL_REF:
2230 return XSTR (x, 0) == XSTR (y, 0);
2231
7afe21cc
RK
2232 case REG:
2233 {
2234 int regno = REGNO (y);
2235 int endregno
2236 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2237 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2238 int i;
2239
2240 /* If the quantities are not the same, the expressions are not
2241 equivalent. If there are and we are not to validate, they
2242 are equivalent. Otherwise, ensure all regs are up-to-date. */
2243
2244 if (reg_qty[REGNO (x)] != reg_qty[regno])
2245 return 0;
2246
2247 if (! validate)
2248 return 1;
2249
2250 for (i = regno; i < endregno; i++)
2251 if (reg_in_table[i] != reg_tick[i])
2252 return 0;
2253
2254 return 1;
2255 }
2256
2257 /* For commutative operations, check both orders. */
2258 case PLUS:
2259 case MULT:
2260 case AND:
2261 case IOR:
2262 case XOR:
2263 case NE:
2264 case EQ:
2265 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2266 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2267 validate, equal_values))
2268 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2269 validate, equal_values)
2270 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2271 validate, equal_values)));
e9a25f70
JL
2272
2273 default:
2274 break;
7afe21cc
RK
2275 }
2276
2277 /* Compare the elements. If any pair of corresponding elements
2278 fail to match, return 0 for the whole things. */
2279
2280 fmt = GET_RTX_FORMAT (code);
2281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2282 {
906c4e36 2283 switch (fmt[i])
7afe21cc 2284 {
906c4e36 2285 case 'e':
7afe21cc
RK
2286 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2287 return 0;
906c4e36
RK
2288 break;
2289
2290 case 'E':
7afe21cc
RK
2291 if (XVECLEN (x, i) != XVECLEN (y, i))
2292 return 0;
2293 for (j = 0; j < XVECLEN (x, i); j++)
2294 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2295 validate, equal_values))
2296 return 0;
906c4e36
RK
2297 break;
2298
2299 case 's':
7afe21cc
RK
2300 if (strcmp (XSTR (x, i), XSTR (y, i)))
2301 return 0;
906c4e36
RK
2302 break;
2303
2304 case 'i':
7afe21cc
RK
2305 if (XINT (x, i) != XINT (y, i))
2306 return 0;
906c4e36
RK
2307 break;
2308
2309 case 'w':
2310 if (XWINT (x, i) != XWINT (y, i))
2311 return 0;
2312 break;
2313
2314 case '0':
2315 break;
2316
2317 default:
2318 abort ();
7afe21cc 2319 }
906c4e36
RK
2320 }
2321
7afe21cc
RK
2322 return 1;
2323}
2324\f
2325/* Return 1 iff any subexpression of X matches Y.
2326 Here we do not require that X or Y be valid (for registers referred to)
2327 for being in the hash table. */
2328
6cd4575e 2329static int
7afe21cc
RK
2330refers_to_p (x, y)
2331 rtx x, y;
2332{
2333 register int i;
2334 register enum rtx_code code;
2335 register char *fmt;
2336
2337 repeat:
2338 if (x == y)
2339 return 1;
2340 if (x == 0 || y == 0)
2341 return 0;
2342
2343 code = GET_CODE (x);
2344 /* If X as a whole has the same code as Y, they may match.
2345 If so, return 1. */
2346 if (code == GET_CODE (y))
2347 {
2348 if (exp_equiv_p (x, y, 0, 1))
2349 return 1;
2350 }
2351
2352 /* X does not match, so try its subexpressions. */
2353
2354 fmt = GET_RTX_FORMAT (code);
2355 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2356 if (fmt[i] == 'e')
2357 {
2358 if (i == 0)
2359 {
2360 x = XEXP (x, 0);
2361 goto repeat;
2362 }
2363 else
2364 if (refers_to_p (XEXP (x, i), y))
2365 return 1;
2366 }
2367 else if (fmt[i] == 'E')
2368 {
2369 int j;
2370 for (j = 0; j < XVECLEN (x, i); j++)
2371 if (refers_to_p (XVECEXP (x, i, j), y))
2372 return 1;
2373 }
2374
2375 return 0;
2376}
2377\f
f451db89
JL
2378/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2379 set PBASE, PSTART, and PEND which correspond to the base of the address,
2380 the starting offset, and ending offset respectively.
2381
bb4034b3 2382 ADDR is known to be a nonvarying address. */
f451db89 2383
bb4034b3
JW
2384/* ??? Despite what the comments say, this function is in fact frequently
2385 passed varying addresses. This does not appear to cause any problems. */
f451db89
JL
2386
2387static void
2388set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2389 rtx addr;
2390 int size;
2391 rtx *pbase;
6500fb43 2392 HOST_WIDE_INT *pstart, *pend;
f451db89
JL
2393{
2394 rtx base;
c85663b1 2395 HOST_WIDE_INT start, end;
f451db89
JL
2396
2397 base = addr;
2398 start = 0;
2399 end = 0;
2400
e5e809f4
JL
2401 if (flag_pic && GET_CODE (base) == PLUS
2402 && XEXP (base, 0) == pic_offset_table_rtx)
2403 base = XEXP (base, 1);
2404
f451db89
JL
2405 /* Registers with nonvarying addresses usually have constant equivalents;
2406 but the frame pointer register is also possible. */
2407 if (GET_CODE (base) == REG
2408 && qty_const != 0
2409 && REGNO_QTY_VALID_P (REGNO (base))
2410 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2411 && qty_const[reg_qty[REGNO (base)]] != 0)
2412 base = qty_const[reg_qty[REGNO (base)]];
2413 else if (GET_CODE (base) == PLUS
2414 && GET_CODE (XEXP (base, 1)) == CONST_INT
2415 && GET_CODE (XEXP (base, 0)) == REG
2416 && qty_const != 0
2417 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2418 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2419 == GET_MODE (XEXP (base, 0)))
2420 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2421 {
2422 start = INTVAL (XEXP (base, 1));
2423 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2424 }
9c6b0bae 2425 /* This can happen as the result of virtual register instantiation,
abc95ed3 2426 if the initial offset is too large to be a valid address. */
9c6b0bae
RK
2427 else if (GET_CODE (base) == PLUS
2428 && GET_CODE (XEXP (base, 0)) == REG
2429 && GET_CODE (XEXP (base, 1)) == REG
2430 && qty_const != 0
2431 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2432 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2433 == GET_MODE (XEXP (base, 0)))
2434 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2435 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2436 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2437 == GET_MODE (XEXP (base, 1)))
2438 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2439 {
2440 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2441 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2442
2443 /* One of the two values must be a constant. */
2444 if (GET_CODE (base) != CONST_INT)
2445 {
2446 if (GET_CODE (tem) != CONST_INT)
2447 abort ();
2448 start = INTVAL (tem);
2449 }
2450 else
2451 {
2452 start = INTVAL (base);
2453 base = tem;
2454 }
2455 }
f451db89 2456
c85663b1
RK
2457 /* Handle everything that we can find inside an address that has been
2458 viewed as constant. */
f451db89 2459
c85663b1 2460 while (1)
f451db89 2461 {
c85663b1
RK
2462 /* If no part of this switch does a "continue", the code outside
2463 will exit this loop. */
2464
2465 switch (GET_CODE (base))
2466 {
2467 case LO_SUM:
2468 /* By definition, operand1 of a LO_SUM is the associated constant
2469 address. Use the associated constant address as the base
2470 instead. */
2471 base = XEXP (base, 1);
2472 continue;
2473
2474 case CONST:
2475 /* Strip off CONST. */
2476 base = XEXP (base, 0);
2477 continue;
2478
2479 case PLUS:
2480 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2481 {
2482 start += INTVAL (XEXP (base, 1));
2483 base = XEXP (base, 0);
2484 continue;
2485 }
2486 break;
2487
2488 case AND:
2489 /* Handle the case of an AND which is the negative of a power of
2490 two. This is used to represent unaligned memory operations. */
2491 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2492 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2493 {
2494 set_nonvarying_address_components (XEXP (base, 0), size,
2495 pbase, pstart, pend);
2496
2497 /* Assume the worst misalignment. START is affected, but not
2498 END, so compensate but adjusting SIZE. Don't lose any
2499 constant we already had. */
2500
2501 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
89046535
RK
2502 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2503 end += *pend;
c85663b1
RK
2504 base = *pbase;
2505 }
2506 break;
e9a25f70
JL
2507
2508 default:
2509 break;
c85663b1
RK
2510 }
2511
2512 break;
f451db89
JL
2513 }
2514
336d6f0a
RK
2515 if (GET_CODE (base) == CONST_INT)
2516 {
2517 start += INTVAL (base);
2518 base = const0_rtx;
2519 }
2520
f451db89
JL
2521 end = start + size;
2522
2523 /* Set the return values. */
2524 *pbase = base;
2525 *pstart = start;
2526 *pend = end;
2527}
2528
9ae8ffe7
JL
2529/* Return 1 if X has a value that can vary even between two
2530 executions of the program. 0 means X can be compared reliably
2531 against certain constants or near-constants. */
7afe21cc
RK
2532
2533static int
9ae8ffe7
JL
2534cse_rtx_varies_p (x)
2535 register rtx x;
7afe21cc
RK
2536{
2537 /* We need not check for X and the equivalence class being of the same
2538 mode because if X is equivalent to a constant in some mode, it
2539 doesn't vary in any mode. */
2540
9ae8ffe7
JL
2541 if (GET_CODE (x) == REG
2542 && REGNO_QTY_VALID_P (REGNO (x))
2543 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2544 && qty_const[reg_qty[REGNO (x)]] != 0)
7afe21cc
RK
2545 return 0;
2546
9ae8ffe7
JL
2547 if (GET_CODE (x) == PLUS
2548 && GET_CODE (XEXP (x, 1)) == CONST_INT
2549 && GET_CODE (XEXP (x, 0)) == REG
2550 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2551 && (GET_MODE (XEXP (x, 0))
2552 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2553 && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
7afe21cc
RK
2554 return 0;
2555
9c6b0bae
RK
2556 /* This can happen as the result of virtual register instantiation, if
2557 the initial constant is too large to be a valid address. This gives
2558 us a three instruction sequence, load large offset into a register,
2559 load fp minus a constant into a register, then a MEM which is the
2560 sum of the two `constant' registers. */
9ae8ffe7
JL
2561 if (GET_CODE (x) == PLUS
2562 && GET_CODE (XEXP (x, 0)) == REG
2563 && GET_CODE (XEXP (x, 1)) == REG
2564 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2565 && (GET_MODE (XEXP (x, 0))
2566 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2567 && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2568 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2569 && (GET_MODE (XEXP (x, 1))
2570 == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2571 && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
9c6b0bae
RK
2572 return 0;
2573
9ae8ffe7 2574 return rtx_varies_p (x);
7afe21cc
RK
2575}
2576\f
2577/* Canonicalize an expression:
2578 replace each register reference inside it
2579 with the "oldest" equivalent register.
2580
2581 If INSN is non-zero and we are replacing a pseudo with a hard register
7722328e
RK
2582 or vice versa, validate_change is used to ensure that INSN remains valid
2583 after we make our substitution. The calls are made with IN_GROUP non-zero
2584 so apply_change_group must be called upon the outermost return from this
2585 function (unless INSN is zero). The result of apply_change_group can
2586 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2587
2588static rtx
2589canon_reg (x, insn)
2590 rtx x;
2591 rtx insn;
2592{
2593 register int i;
2594 register enum rtx_code code;
2595 register char *fmt;
2596
2597 if (x == 0)
2598 return x;
2599
2600 code = GET_CODE (x);
2601 switch (code)
2602 {
2603 case PC:
2604 case CC0:
2605 case CONST:
2606 case CONST_INT:
2607 case CONST_DOUBLE:
2608 case SYMBOL_REF:
2609 case LABEL_REF:
2610 case ADDR_VEC:
2611 case ADDR_DIFF_VEC:
2612 return x;
2613
2614 case REG:
2615 {
2616 register int first;
2617
2618 /* Never replace a hard reg, because hard regs can appear
2619 in more than one machine mode, and we must preserve the mode
2620 of each occurrence. Also, some hard regs appear in
2621 MEMs that are shared and mustn't be altered. Don't try to
2622 replace any reg that maps to a reg of class NO_REGS. */
2623 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2624 || ! REGNO_QTY_VALID_P (REGNO (x)))
2625 return x;
2626
2627 first = qty_first_reg[reg_qty[REGNO (x)]];
2628 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2629 : REGNO_REG_CLASS (first) == NO_REGS ? x
38a448ca 2630 : gen_rtx_REG (qty_mode[reg_qty[REGNO (x)]], first));
7afe21cc 2631 }
e9a25f70
JL
2632
2633 default:
2634 break;
7afe21cc
RK
2635 }
2636
2637 fmt = GET_RTX_FORMAT (code);
2638 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2639 {
2640 register int j;
2641
2642 if (fmt[i] == 'e')
2643 {
2644 rtx new = canon_reg (XEXP (x, i), insn);
58873255 2645 int insn_code;
7afe21cc
RK
2646
2647 /* If replacing pseudo with hard reg or vice versa, ensure the
178c39f6 2648 insn remains valid. Likewise if the insn has MATCH_DUPs. */
aee9dc31
RS
2649 if (insn != 0 && new != 0
2650 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
178c39f6
RK
2651 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2652 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
58873255
RK
2653 || (insn_code = recog_memoized (insn)) < 0
2654 || insn_n_dups[insn_code] > 0))
77fa0940 2655 validate_change (insn, &XEXP (x, i), new, 1);
7afe21cc
RK
2656 else
2657 XEXP (x, i) = new;
2658 }
2659 else if (fmt[i] == 'E')
2660 for (j = 0; j < XVECLEN (x, i); j++)
2661 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2662 }
2663
2664 return x;
2665}
2666\f
a2cabb29 2667/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2668 a MEM). Find the best equivalent address to use that is valid for this
2669 insn.
2670
2671 On most CISC machines, complicated address modes are costly, and rtx_cost
2672 is a good approximation for that cost. However, most RISC machines have
2673 only a few (usually only one) memory reference formats. If an address is
2674 valid at all, it is often just as cheap as any other address. Hence, for
2675 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2676 costs of various addresses. For two addresses of equal cost, choose the one
2677 with the highest `rtx_cost' value as that has the potential of eliminating
2678 the most insns. For equal costs, we choose the first in the equivalence
2679 class. Note that we ignore the fact that pseudo registers are cheaper
2680 than hard registers here because we would also prefer the pseudo registers.
2681 */
2682
6cd4575e 2683static void
7afe21cc
RK
2684find_best_addr (insn, loc)
2685 rtx insn;
2686 rtx *loc;
2687{
7a87758d 2688 struct table_elt *elt;
7afe21cc 2689 rtx addr = *loc;
7a87758d
AS
2690#ifdef ADDRESS_COST
2691 struct table_elt *p;
7afe21cc 2692 int found_better = 1;
7a87758d 2693#endif
7afe21cc
RK
2694 int save_do_not_record = do_not_record;
2695 int save_hash_arg_in_memory = hash_arg_in_memory;
2696 int save_hash_arg_in_struct = hash_arg_in_struct;
7afe21cc
RK
2697 int addr_volatile;
2698 int regno;
2197a88a 2699 unsigned hash;
7afe21cc
RK
2700
2701 /* Do not try to replace constant addresses or addresses of local and
2702 argument slots. These MEM expressions are made only once and inserted
2703 in many instructions, as well as being used to control symbol table
2704 output. It is not safe to clobber them.
2705
2706 There are some uncommon cases where the address is already in a register
2707 for some reason, but we cannot take advantage of that because we have
2708 no easy way to unshare the MEM. In addition, looking up all stack
2709 addresses is costly. */
2710 if ((GET_CODE (addr) == PLUS
2711 && GET_CODE (XEXP (addr, 0)) == REG
2712 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2713 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2714 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2715 || regno == ARG_POINTER_REGNUM))
7afe21cc 2716 || (GET_CODE (addr) == REG
8bc169f2
DE
2717 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2718 || regno == HARD_FRAME_POINTER_REGNUM
2719 || regno == ARG_POINTER_REGNUM))
e9a25f70 2720 || GET_CODE (addr) == ADDRESSOF
7afe21cc
RK
2721 || CONSTANT_ADDRESS_P (addr))
2722 return;
2723
2724 /* If this address is not simply a register, try to fold it. This will
2725 sometimes simplify the expression. Many simplifications
2726 will not be valid, but some, usually applying the associative rule, will
2727 be valid and produce better code. */
8c87f107
RK
2728 if (GET_CODE (addr) != REG)
2729 {
2730 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2731
2732 if (1
2733#ifdef ADDRESS_COST
2f541799
MM
2734 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2735 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
9a252d29 2736 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
8c87f107 2737#else
9a252d29 2738 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
8c87f107
RK
2739#endif
2740 && validate_change (insn, loc, folded, 0))
2741 addr = folded;
2742 }
7afe21cc 2743
42495ca0
RK
2744 /* If this address is not in the hash table, we can't look for equivalences
2745 of the whole address. Also, ignore if volatile. */
2746
7afe21cc 2747 do_not_record = 0;
2197a88a 2748 hash = HASH (addr, Pmode);
7afe21cc
RK
2749 addr_volatile = do_not_record;
2750 do_not_record = save_do_not_record;
2751 hash_arg_in_memory = save_hash_arg_in_memory;
2752 hash_arg_in_struct = save_hash_arg_in_struct;
2753
2754 if (addr_volatile)
2755 return;
2756
2197a88a 2757 elt = lookup (addr, hash, Pmode);
7afe21cc 2758
7afe21cc 2759#ifndef ADDRESS_COST
42495ca0
RK
2760 if (elt)
2761 {
2d8b0f3a 2762 int our_cost = elt->cost;
42495ca0
RK
2763
2764 /* Find the lowest cost below ours that works. */
2765 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2766 if (elt->cost < our_cost
2767 && (GET_CODE (elt->exp) == REG
2768 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2769 && validate_change (insn, loc,
906c4e36 2770 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
42495ca0
RK
2771 return;
2772 }
2773#else
7afe21cc 2774
42495ca0
RK
2775 if (elt)
2776 {
2777 /* We need to find the best (under the criteria documented above) entry
2778 in the class that is valid. We use the `flag' field to indicate
2779 choices that were invalid and iterate until we can't find a better
2780 one that hasn't already been tried. */
7afe21cc 2781
42495ca0
RK
2782 for (p = elt->first_same_value; p; p = p->next_same_value)
2783 p->flag = 0;
7afe21cc 2784
42495ca0
RK
2785 while (found_better)
2786 {
2f541799 2787 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2788 int best_rtx_cost = (elt->cost + 1) >> 1;
2789 struct table_elt *best_elt = elt;
2790
2791 found_better = 0;
2792 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2793 if (! p->flag)
42495ca0 2794 {
2f541799
MM
2795 if ((GET_CODE (p->exp) == REG
2796 || exp_equiv_p (p->exp, p->exp, 1, 0))
2797 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2798 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2799 && (p->cost + 1) >> 1 > best_rtx_cost)))
2800 {
2801 found_better = 1;
2802 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2803 best_rtx_cost = (p->cost + 1) >> 1;
2804 best_elt = p;
2805 }
42495ca0 2806 }
7afe21cc 2807
42495ca0
RK
2808 if (found_better)
2809 {
2810 if (validate_change (insn, loc,
906c4e36
RK
2811 canon_reg (copy_rtx (best_elt->exp),
2812 NULL_RTX), 0))
42495ca0
RK
2813 return;
2814 else
2815 best_elt->flag = 1;
2816 }
2817 }
2818 }
7afe21cc 2819
42495ca0
RK
2820 /* If the address is a binary operation with the first operand a register
2821 and the second a constant, do the same as above, but looking for
2822 equivalences of the register. Then try to simplify before checking for
2823 the best address to use. This catches a few cases: First is when we
2824 have REG+const and the register is another REG+const. We can often merge
2825 the constants and eliminate one insn and one register. It may also be
2826 that a machine has a cheap REG+REG+const. Finally, this improves the
2827 code on the Alpha for unaligned byte stores. */
2828
2829 if (flag_expensive_optimizations
2830 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2831 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2832 && GET_CODE (XEXP (*loc, 0)) == REG
2833 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
7afe21cc 2834 {
42495ca0
RK
2835 rtx c = XEXP (*loc, 1);
2836
2837 do_not_record = 0;
2197a88a 2838 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2839 do_not_record = save_do_not_record;
2840 hash_arg_in_memory = save_hash_arg_in_memory;
2841 hash_arg_in_struct = save_hash_arg_in_struct;
2842
2197a88a 2843 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2844 if (elt == 0)
2845 return;
2846
2847 /* We need to find the best (under the criteria documented above) entry
2848 in the class that is valid. We use the `flag' field to indicate
2849 choices that were invalid and iterate until we can't find a better
2850 one that hasn't already been tried. */
7afe21cc 2851
7afe21cc 2852 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2853 p->flag = 0;
7afe21cc 2854
42495ca0 2855 while (found_better)
7afe21cc 2856 {
2f541799 2857 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2858 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2859 struct table_elt *best_elt = elt;
2860 rtx best_rtx = *loc;
f6516aee
JW
2861 int count;
2862
2863 /* This is at worst case an O(n^2) algorithm, so limit our search
2864 to the first 32 elements on the list. This avoids trouble
2865 compiling code with very long basic blocks that can easily
2866 call cse_gen_binary so many times that we run out of memory. */
42495ca0
RK
2867
2868 found_better = 0;
f6516aee
JW
2869 for (p = elt->first_same_value, count = 0;
2870 p && count < 32;
2871 p = p->next_same_value, count++)
42495ca0
RK
2872 if (! p->flag
2873 && (GET_CODE (p->exp) == REG
2874 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2875 {
96b0e481 2876 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
42495ca0 2877
2f541799
MM
2878 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2879 || (CSE_ADDRESS_COST (new) == best_addr_cost
42495ca0
RK
2880 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2881 {
2882 found_better = 1;
2f541799 2883 best_addr_cost = CSE_ADDRESS_COST (new);
42495ca0
RK
2884 best_rtx_cost = (COST (new) + 1) >> 1;
2885 best_elt = p;
2886 best_rtx = new;
2887 }
2888 }
2889
2890 if (found_better)
2891 {
2892 if (validate_change (insn, loc,
906c4e36
RK
2893 canon_reg (copy_rtx (best_rtx),
2894 NULL_RTX), 0))
42495ca0
RK
2895 return;
2896 else
2897 best_elt->flag = 1;
2898 }
7afe21cc
RK
2899 }
2900 }
2901#endif
2902}
2903\f
2904/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2905 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2906 what values are being compared.
2907
2908 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2909 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2910 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2911 compared to produce cc0.
2912
2913 The return value is the comparison operator and is either the code of
2914 A or the code corresponding to the inverse of the comparison. */
2915
2916static enum rtx_code
13c9910f 2917find_comparison_args (code, parg1, parg2, pmode1, pmode2)
7afe21cc
RK
2918 enum rtx_code code;
2919 rtx *parg1, *parg2;
13c9910f 2920 enum machine_mode *pmode1, *pmode2;
7afe21cc
RK
2921{
2922 rtx arg1, arg2;
2923
2924 arg1 = *parg1, arg2 = *parg2;
2925
2926 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2927
b2796a4b 2928 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
7afe21cc
RK
2929 {
2930 /* Set non-zero when we find something of interest. */
2931 rtx x = 0;
2932 int reverse_code = 0;
2933 struct table_elt *p = 0;
2934
2935 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2936 On machines with CC0, this is the only case that can occur, since
2937 fold_rtx will return the COMPARE or item being compared with zero
2938 when given CC0. */
2939
2940 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2941 x = arg1;
2942
2943 /* If ARG1 is a comparison operator and CODE is testing for
2944 STORE_FLAG_VALUE, get the inner arguments. */
2945
2946 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2947 {
c610adec
RK
2948 if (code == NE
2949 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2950 && code == LT && STORE_FLAG_VALUE == -1)
2951#ifdef FLOAT_STORE_FLAG_VALUE
2952 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2953 && FLOAT_STORE_FLAG_VALUE < 0)
2954#endif
2955 )
7afe21cc 2956 x = arg1;
c610adec
RK
2957 else if (code == EQ
2958 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2959 && code == GE && STORE_FLAG_VALUE == -1)
2960#ifdef FLOAT_STORE_FLAG_VALUE
2961 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2962 && FLOAT_STORE_FLAG_VALUE < 0)
2963#endif
2964 )
7afe21cc
RK
2965 x = arg1, reverse_code = 1;
2966 }
2967
2968 /* ??? We could also check for
2969
2970 (ne (and (eq (...) (const_int 1))) (const_int 0))
2971
2972 and related forms, but let's wait until we see them occurring. */
2973
2974 if (x == 0)
2975 /* Look up ARG1 in the hash table and see if it has an equivalence
2976 that lets us see what is being compared. */
2977 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2978 GET_MODE (arg1));
2979 if (p) p = p->first_same_value;
2980
2981 for (; p; p = p->next_same_value)
2982 {
2983 enum machine_mode inner_mode = GET_MODE (p->exp);
2984
2985 /* If the entry isn't valid, skip it. */
2986 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2987 continue;
2988
2989 if (GET_CODE (p->exp) == COMPARE
2990 /* Another possibility is that this machine has a compare insn
2991 that includes the comparison code. In that case, ARG1 would
2992 be equivalent to a comparison operation that would set ARG1 to
2993 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2994 ORIG_CODE is the actual comparison being done; if it is an EQ,
2995 we must reverse ORIG_CODE. On machine with a negative value
2996 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2997 || ((code == NE
2998 || (code == LT
c610adec 2999 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3000 && (GET_MODE_BITSIZE (inner_mode)
3001 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3002 && (STORE_FLAG_VALUE
906c4e36
RK
3003 & ((HOST_WIDE_INT) 1
3004 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3005#ifdef FLOAT_STORE_FLAG_VALUE
3006 || (code == LT
3007 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3008 && FLOAT_STORE_FLAG_VALUE < 0)
3009#endif
3010 )
7afe21cc
RK
3011 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3012 {
3013 x = p->exp;
3014 break;
3015 }
3016 else if ((code == EQ
3017 || (code == GE
c610adec 3018 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3019 && (GET_MODE_BITSIZE (inner_mode)
3020 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3021 && (STORE_FLAG_VALUE
906c4e36
RK
3022 & ((HOST_WIDE_INT) 1
3023 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3024#ifdef FLOAT_STORE_FLAG_VALUE
3025 || (code == GE
3026 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3027 && FLOAT_STORE_FLAG_VALUE < 0)
3028#endif
3029 )
7afe21cc
RK
3030 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3031 {
3032 reverse_code = 1;
3033 x = p->exp;
3034 break;
3035 }
3036
3037 /* If this is fp + constant, the equivalent is a better operand since
3038 it may let us predict the value of the comparison. */
3039 else if (NONZERO_BASE_PLUS_P (p->exp))
3040 {
3041 arg1 = p->exp;
3042 continue;
3043 }
3044 }
3045
3046 /* If we didn't find a useful equivalence for ARG1, we are done.
3047 Otherwise, set up for the next iteration. */
3048 if (x == 0)
3049 break;
3050
3051 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3052 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3053 code = GET_CODE (x);
3054
3055 if (reverse_code)
3056 code = reverse_condition (code);
3057 }
3058
13c9910f
RS
3059 /* Return our results. Return the modes from before fold_rtx
3060 because fold_rtx might produce const_int, and then it's too late. */
3061 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
7afe21cc
RK
3062 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3063
3064 return code;
3065}
3066\f
3067/* Try to simplify a unary operation CODE whose output mode is to be
3068 MODE with input operand OP whose mode was originally OP_MODE.
3069 Return zero if no simplification can be made. */
3070
3071rtx
3072simplify_unary_operation (code, mode, op, op_mode)
3073 enum rtx_code code;
3074 enum machine_mode mode;
3075 rtx op;
3076 enum machine_mode op_mode;
3077{
3078 register int width = GET_MODE_BITSIZE (mode);
3079
3080 /* The order of these tests is critical so that, for example, we don't
3081 check the wrong mode (input vs. output) for a conversion operation,
3082 such as FIX. At some point, this should be simplified. */
3083
62c0ea12 3084#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
7afe21cc 3085
62c0ea12
RK
3086 if (code == FLOAT && GET_MODE (op) == VOIDmode
3087 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3088 {
62c0ea12 3089 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3090 REAL_VALUE_TYPE d;
3091
62c0ea12
RK
3092 if (GET_CODE (op) == CONST_INT)
3093 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3094 else
7ac4a266 3095 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
7afe21cc
RK
3096
3097#ifdef REAL_ARITHMETIC
2ebcccf3 3098 REAL_VALUE_FROM_INT (d, lv, hv, mode);
7afe21cc 3099#else
62c0ea12 3100 if (hv < 0)
7afe21cc 3101 {
62c0ea12 3102 d = (double) (~ hv);
906c4e36
RK
3103 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3104 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3105 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
7afe21cc
RK
3106 d = (- d - 1.0);
3107 }
3108 else
3109 {
62c0ea12 3110 d = (double) hv;
906c4e36
RK
3111 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3112 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3113 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc
RK
3114 }
3115#endif /* REAL_ARITHMETIC */
940fd0b5 3116 d = real_value_truncate (mode, d);
7afe21cc
RK
3117 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3118 }
62c0ea12
RK
3119 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3120 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3121 {
62c0ea12 3122 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3123 REAL_VALUE_TYPE d;
3124
62c0ea12
RK
3125 if (GET_CODE (op) == CONST_INT)
3126 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3127 else
7ac4a266 3128 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
62c0ea12 3129
a9c6464d
RK
3130 if (op_mode == VOIDmode)
3131 {
3132 /* We don't know how to interpret negative-looking numbers in
3133 this case, so don't try to fold those. */
3134 if (hv < 0)
3135 return 0;
3136 }
3137 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
62c0ea12
RK
3138 ;
3139 else
3140 hv = 0, lv &= GET_MODE_MASK (op_mode);
3141
7afe21cc 3142#ifdef REAL_ARITHMETIC
2ebcccf3 3143 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
7afe21cc 3144#else
62c0ea12 3145
138cec59 3146 d = (double) (unsigned HOST_WIDE_INT) hv;
906c4e36
RK
3147 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3148 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3149 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc 3150#endif /* REAL_ARITHMETIC */
940fd0b5 3151 d = real_value_truncate (mode, d);
7afe21cc
RK
3152 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3153 }
3154#endif
3155
f89e32e9
RK
3156 if (GET_CODE (op) == CONST_INT
3157 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc 3158 {
906c4e36
RK
3159 register HOST_WIDE_INT arg0 = INTVAL (op);
3160 register HOST_WIDE_INT val;
7afe21cc
RK
3161
3162 switch (code)
3163 {
3164 case NOT:
3165 val = ~ arg0;
3166 break;
3167
3168 case NEG:
3169 val = - arg0;
3170 break;
3171
3172 case ABS:
3173 val = (arg0 >= 0 ? arg0 : - arg0);
3174 break;
3175
3176 case FFS:
3177 /* Don't use ffs here. Instead, get low order bit and then its
3178 number. If arg0 is zero, this will return 0, as desired. */
3179 arg0 &= GET_MODE_MASK (mode);
3180 val = exact_log2 (arg0 & (- arg0)) + 1;
3181 break;
3182
3183 case TRUNCATE:
3184 val = arg0;
3185 break;
3186
3187 case ZERO_EXTEND:
3188 if (op_mode == VOIDmode)
3189 op_mode = mode;
82a5e898 3190 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3191 {
3192 /* If we were really extending the mode,
3193 we would have to distinguish between zero-extension
3194 and sign-extension. */
3195 if (width != GET_MODE_BITSIZE (op_mode))
3196 abort ();
3197 val = arg0;
3198 }
82a5e898
CH
3199 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3200 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
7afe21cc
RK
3201 else
3202 return 0;
3203 break;
3204
3205 case SIGN_EXTEND:
3206 if (op_mode == VOIDmode)
3207 op_mode = mode;
82a5e898 3208 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3209 {
3210 /* If we were really extending the mode,
3211 we would have to distinguish between zero-extension
3212 and sign-extension. */
3213 if (width != GET_MODE_BITSIZE (op_mode))
3214 abort ();
3215 val = arg0;
3216 }
f12564b4 3217 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
7afe21cc 3218 {
82a5e898
CH
3219 val
3220 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3221 if (val
3222 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3223 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
7afe21cc
RK
3224 }
3225 else
3226 return 0;
3227 break;
3228
d45cf215
RS
3229 case SQRT:
3230 return 0;
3231
7afe21cc
RK
3232 default:
3233 abort ();
3234 }
3235
3236 /* Clear the bits that don't belong in our mode,
3237 unless they and our sign bit are all one.
3238 So we get either a reasonable negative value or a reasonable
3239 unsigned value for this mode. */
906c4e36
RK
3240 if (width < HOST_BITS_PER_WIDE_INT
3241 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3242 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4879acf6 3243 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 3244
737e7965
JW
3245 /* If this would be an entire word for the target, but is not for
3246 the host, then sign-extend on the host so that the number will look
3247 the same way on the host that it would on the target.
3248
3249 For example, when building a 64 bit alpha hosted 32 bit sparc
3250 targeted compiler, then we want the 32 bit unsigned value -1 to be
3251 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3252 The later confuses the sparc backend. */
3253
3254 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3255 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3256 val |= ((HOST_WIDE_INT) (-1) << width);
3257
906c4e36 3258 return GEN_INT (val);
7afe21cc
RK
3259 }
3260
3261 /* We can do some operations on integer CONST_DOUBLEs. Also allow
0f41302f 3262 for a DImode operation on a CONST_INT. */
8e0ac43b 3263 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
7afe21cc
RK
3264 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3265 {
906c4e36 3266 HOST_WIDE_INT l1, h1, lv, hv;
7afe21cc
RK
3267
3268 if (GET_CODE (op) == CONST_DOUBLE)
3269 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3270 else
3271 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3272
3273 switch (code)
3274 {
3275 case NOT:
3276 lv = ~ l1;
3277 hv = ~ h1;
3278 break;
3279
3280 case NEG:
3281 neg_double (l1, h1, &lv, &hv);
3282 break;
3283
3284 case ABS:
3285 if (h1 < 0)
3286 neg_double (l1, h1, &lv, &hv);
3287 else
3288 lv = l1, hv = h1;
3289 break;
3290
3291 case FFS:
3292 hv = 0;
3293 if (l1 == 0)
906c4e36 3294 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
7afe21cc
RK
3295 else
3296 lv = exact_log2 (l1 & (-l1)) + 1;
3297 break;
3298
3299 case TRUNCATE:
8e0ac43b 3300 /* This is just a change-of-mode, so do nothing. */
d50d63c0 3301 lv = l1, hv = h1;
7afe21cc
RK
3302 break;
3303
f72aed24
RS
3304 case ZERO_EXTEND:
3305 if (op_mode == VOIDmode
906c4e36 3306 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3307 return 0;
3308
3309 hv = 0;
3310 lv = l1 & GET_MODE_MASK (op_mode);
3311 break;
3312
3313 case SIGN_EXTEND:
3314 if (op_mode == VOIDmode
906c4e36 3315 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3316 return 0;
3317 else
3318 {
3319 lv = l1 & GET_MODE_MASK (op_mode);
906c4e36
RK
3320 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3321 && (lv & ((HOST_WIDE_INT) 1
3322 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3323 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
f72aed24 3324
906c4e36 3325 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
f72aed24
RS
3326 }
3327 break;
3328
d45cf215
RS
3329 case SQRT:
3330 return 0;
3331
7afe21cc
RK
3332 default:
3333 return 0;
3334 }
3335
3336 return immed_double_const (lv, hv, mode);
3337 }
3338
3339#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3340 else if (GET_CODE (op) == CONST_DOUBLE
3341 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3342 {
3343 REAL_VALUE_TYPE d;
3344 jmp_buf handler;
3345 rtx x;
3346
3347 if (setjmp (handler))
3348 /* There used to be a warning here, but that is inadvisable.
3349 People may want to cause traps, and the natural way
3350 to do it should not get a warning. */
3351 return 0;
3352
3353 set_float_handler (handler);
3354
3355 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3356
3357 switch (code)
3358 {
3359 case NEG:
3360 d = REAL_VALUE_NEGATE (d);
3361 break;
3362
3363 case ABS:
8b3686ed 3364 if (REAL_VALUE_NEGATIVE (d))
7afe21cc
RK
3365 d = REAL_VALUE_NEGATE (d);
3366 break;
3367
3368 case FLOAT_TRUNCATE:
d3159aee 3369 d = real_value_truncate (mode, d);
7afe21cc
RK
3370 break;
3371
3372 case FLOAT_EXTEND:
3373 /* All this does is change the mode. */
3374 break;
3375
3376 case FIX:
d3159aee 3377 d = REAL_VALUE_RNDZINT (d);
7afe21cc
RK
3378 break;
3379
3380 case UNSIGNED_FIX:
d3159aee 3381 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
7afe21cc
RK
3382 break;
3383
d45cf215
RS
3384 case SQRT:
3385 return 0;
3386
7afe21cc
RK
3387 default:
3388 abort ();
3389 }
3390
560c94a2 3391 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
906c4e36 3392 set_float_handler (NULL_PTR);
7afe21cc
RK
3393 return x;
3394 }
8e0ac43b
RK
3395
3396 else if (GET_CODE (op) == CONST_DOUBLE
3397 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3398 && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 3399 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc
RK
3400 {
3401 REAL_VALUE_TYPE d;
3402 jmp_buf handler;
906c4e36 3403 HOST_WIDE_INT val;
7afe21cc
RK
3404
3405 if (setjmp (handler))
3406 return 0;
3407
3408 set_float_handler (handler);
3409
3410 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3411
3412 switch (code)
3413 {
3414 case FIX:
3415 val = REAL_VALUE_FIX (d);
3416 break;
3417
3418 case UNSIGNED_FIX:
3419 val = REAL_VALUE_UNSIGNED_FIX (d);
3420 break;
3421
3422 default:
3423 abort ();
3424 }
3425
906c4e36 3426 set_float_handler (NULL_PTR);
7afe21cc
RK
3427
3428 /* Clear the bits that don't belong in our mode,
3429 unless they and our sign bit are all one.
3430 So we get either a reasonable negative value or a reasonable
3431 unsigned value for this mode. */
906c4e36
RK
3432 if (width < HOST_BITS_PER_WIDE_INT
3433 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3434 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3435 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 3436
ad89d6f6
TG
3437 /* If this would be an entire word for the target, but is not for
3438 the host, then sign-extend on the host so that the number will look
3439 the same way on the host that it would on the target.
3440
3441 For example, when building a 64 bit alpha hosted 32 bit sparc
3442 targeted compiler, then we want the 32 bit unsigned value -1 to be
3443 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3444 The later confuses the sparc backend. */
3445
3446 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3447 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3448 val |= ((HOST_WIDE_INT) (-1) << width);
3449
906c4e36 3450 return GEN_INT (val);
7afe21cc
RK
3451 }
3452#endif
a6acbe15
RS
3453 /* This was formerly used only for non-IEEE float.
3454 eggert@twinsun.com says it is safe for IEEE also. */
3455 else
7afe21cc
RK
3456 {
3457 /* There are some simplifications we can do even if the operands
a6acbe15 3458 aren't constant. */
7afe21cc
RK
3459 switch (code)
3460 {
3461 case NEG:
3462 case NOT:
3463 /* (not (not X)) == X, similarly for NEG. */
3464 if (GET_CODE (op) == code)
3465 return XEXP (op, 0);
3466 break;
3467
3468 case SIGN_EXTEND:
3469 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3470 becomes just the MINUS if its mode is MODE. This allows
3471 folding switch statements on machines using casesi (such as
3472 the Vax). */
3473 if (GET_CODE (op) == TRUNCATE
3474 && GET_MODE (XEXP (op, 0)) == mode
3475 && GET_CODE (XEXP (op, 0)) == MINUS
3476 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3477 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3478 return XEXP (op, 0);
cceb347c
RK
3479
3480#ifdef POINTERS_EXTEND_UNSIGNED
3481 if (! POINTERS_EXTEND_UNSIGNED
3482 && mode == Pmode && GET_MODE (op) == ptr_mode
3483 && CONSTANT_P (op))
3484 return convert_memory_address (Pmode, op);
3485#endif
3486 break;
3487
3488#ifdef POINTERS_EXTEND_UNSIGNED
3489 case ZERO_EXTEND:
3490 if (POINTERS_EXTEND_UNSIGNED
3491 && mode == Pmode && GET_MODE (op) == ptr_mode
3492 && CONSTANT_P (op))
3493 return convert_memory_address (Pmode, op);
7afe21cc 3494 break;
cceb347c 3495#endif
e9a25f70
JL
3496
3497 default:
3498 break;
7afe21cc
RK
3499 }
3500
3501 return 0;
3502 }
7afe21cc
RK
3503}
3504\f
3505/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3506 and OP1. Return 0 if no simplification is possible.
3507
3508 Don't use this for relational operations such as EQ or LT.
3509 Use simplify_relational_operation instead. */
3510
3511rtx
3512simplify_binary_operation (code, mode, op0, op1)
3513 enum rtx_code code;
3514 enum machine_mode mode;
3515 rtx op0, op1;
3516{
906c4e36
RK
3517 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3518 HOST_WIDE_INT val;
7afe21cc 3519 int width = GET_MODE_BITSIZE (mode);
96b0e481 3520 rtx tem;
7afe21cc
RK
3521
3522 /* Relational operations don't work here. We must know the mode
3523 of the operands in order to do the comparison correctly.
3524 Assuming a full word can give incorrect results.
3525 Consider comparing 128 with -128 in QImode. */
3526
3527 if (GET_RTX_CLASS (code) == '<')
3528 abort ();
3529
3530#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3531 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3532 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3533 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3534 {
3535 REAL_VALUE_TYPE f0, f1, value;
3536 jmp_buf handler;
3537
3538 if (setjmp (handler))
3539 return 0;
3540
3541 set_float_handler (handler);
3542
3543 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3544 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
5352b11a
RS
3545 f0 = real_value_truncate (mode, f0);
3546 f1 = real_value_truncate (mode, f1);
7afe21cc
RK
3547
3548#ifdef REAL_ARITHMETIC
956d6950
JL
3549#ifndef REAL_INFINITY
3550 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3551 return 0;
3552#endif
d3159aee 3553 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
7afe21cc
RK
3554#else
3555 switch (code)
3556 {
3557 case PLUS:
3558 value = f0 + f1;
3559 break;
3560 case MINUS:
3561 value = f0 - f1;
3562 break;
3563 case MULT:
3564 value = f0 * f1;
3565 break;
3566 case DIV:
3567#ifndef REAL_INFINITY
3568 if (f1 == 0)
21d12b80 3569 return 0;
7afe21cc
RK
3570#endif
3571 value = f0 / f1;
3572 break;
3573 case SMIN:
3574 value = MIN (f0, f1);
3575 break;
3576 case SMAX:
3577 value = MAX (f0, f1);
3578 break;
3579 default:
3580 abort ();
3581 }
3582#endif
3583
5352b11a 3584 value = real_value_truncate (mode, value);
831522a4 3585 set_float_handler (NULL_PTR);
560c94a2 3586 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
7afe21cc 3587 }
6076248a 3588#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc
RK
3589
3590 /* We can fold some multi-word operations. */
6076248a 3591 if (GET_MODE_CLASS (mode) == MODE_INT
33085906 3592 && width == HOST_BITS_PER_WIDE_INT * 2
fe873240 3593 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
6076248a 3594 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
7afe21cc 3595 {
906c4e36 3596 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
7afe21cc 3597
fe873240
RK
3598 if (GET_CODE (op0) == CONST_DOUBLE)
3599 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3600 else
3601 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
7afe21cc
RK
3602
3603 if (GET_CODE (op1) == CONST_DOUBLE)
3604 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3605 else
3606 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3607
3608 switch (code)
3609 {
3610 case MINUS:
3611 /* A - B == A + (-B). */
3612 neg_double (l2, h2, &lv, &hv);
3613 l2 = lv, h2 = hv;
3614
0f41302f 3615 /* .. fall through ... */
7afe21cc
RK
3616
3617 case PLUS:
3618 add_double (l1, h1, l2, h2, &lv, &hv);
3619 break;
3620
3621 case MULT:
3622 mul_double (l1, h1, l2, h2, &lv, &hv);
3623 break;
3624
3625 case DIV: case MOD: case UDIV: case UMOD:
3626 /* We'd need to include tree.h to do this and it doesn't seem worth
3627 it. */
3628 return 0;
3629
3630 case AND:
3631 lv = l1 & l2, hv = h1 & h2;
3632 break;
3633
3634 case IOR:
3635 lv = l1 | l2, hv = h1 | h2;
3636 break;
3637
3638 case XOR:
3639 lv = l1 ^ l2, hv = h1 ^ h2;
3640 break;
3641
3642 case SMIN:
906c4e36
RK
3643 if (h1 < h2
3644 || (h1 == h2
3645 && ((unsigned HOST_WIDE_INT) l1
3646 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3647 lv = l1, hv = h1;
3648 else
3649 lv = l2, hv = h2;
3650 break;
3651
3652 case SMAX:
906c4e36
RK
3653 if (h1 > h2
3654 || (h1 == h2
3655 && ((unsigned HOST_WIDE_INT) l1
3656 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3657 lv = l1, hv = h1;
3658 else
3659 lv = l2, hv = h2;
3660 break;
3661
3662 case UMIN:
906c4e36
RK
3663 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3664 || (h1 == h2
3665 && ((unsigned HOST_WIDE_INT) l1
3666 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3667 lv = l1, hv = h1;
3668 else
3669 lv = l2, hv = h2;
3670 break;
3671
3672 case UMAX:
906c4e36
RK
3673 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3674 || (h1 == h2
3675 && ((unsigned HOST_WIDE_INT) l1
3676 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3677 lv = l1, hv = h1;
3678 else
3679 lv = l2, hv = h2;
3680 break;
3681
3682 case LSHIFTRT: case ASHIFTRT:
45620ed4 3683 case ASHIFT:
7afe21cc
RK
3684 case ROTATE: case ROTATERT:
3685#ifdef SHIFT_COUNT_TRUNCATED
85c0a556
RK
3686 if (SHIFT_COUNT_TRUNCATED)
3687 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
7afe21cc
RK
3688#endif
3689
3690 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3691 return 0;
3692
3693 if (code == LSHIFTRT || code == ASHIFTRT)
3694 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3695 code == ASHIFTRT);
45620ed4
RK
3696 else if (code == ASHIFT)
3697 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
7afe21cc
RK
3698 else if (code == ROTATE)
3699 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3700 else /* code == ROTATERT */
3701 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3702 break;
3703
3704 default:
3705 return 0;
3706 }
3707
3708 return immed_double_const (lv, hv, mode);
3709 }
7afe21cc
RK
3710
3711 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
906c4e36 3712 || width > HOST_BITS_PER_WIDE_INT || width == 0)
7afe21cc
RK
3713 {
3714 /* Even if we can't compute a constant result,
3715 there are some cases worth simplifying. */
3716
3717 switch (code)
3718 {
3719 case PLUS:
3720 /* In IEEE floating point, x+0 is not the same as x. Similarly
3721 for the other optimizations below. */
3722 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3723 && FLOAT_MODE_P (mode) && ! flag_fast_math)
7afe21cc
RK
3724 break;
3725
3726 if (op1 == CONST0_RTX (mode))
3727 return op0;
3728
7afe21cc
RK
3729 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3730 if (GET_CODE (op0) == NEG)
96b0e481 3731 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
7afe21cc 3732 else if (GET_CODE (op1) == NEG)
96b0e481 3733 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
7afe21cc 3734
96b0e481
RK
3735 /* Handle both-operands-constant cases. We can only add
3736 CONST_INTs to constants since the sum of relocatable symbols
fe873240
RK
3737 can't be handled by most assemblers. Don't add CONST_INT
3738 to CONST_INT since overflow won't be computed properly if wider
3739 than HOST_BITS_PER_WIDE_INT. */
7afe21cc 3740
fe873240
RK
3741 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3742 && GET_CODE (op1) == CONST_INT)
96b0e481 3743 return plus_constant (op0, INTVAL (op1));
fe873240
RK
3744 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3745 && GET_CODE (op0) == CONST_INT)
96b0e481 3746 return plus_constant (op1, INTVAL (op0));
7afe21cc 3747
30d69925
RK
3748 /* See if this is something like X * C - X or vice versa or
3749 if the multiplication is written as a shift. If so, we can
3750 distribute and make a new multiply, shift, or maybe just
3751 have X (if C is 2 in the example above). But don't make
3752 real multiply if we didn't have one before. */
3753
3754 if (! FLOAT_MODE_P (mode))
3755 {
3756 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3757 rtx lhs = op0, rhs = op1;
3758 int had_mult = 0;
3759
3760 if (GET_CODE (lhs) == NEG)
3761 coeff0 = -1, lhs = XEXP (lhs, 0);
3762 else if (GET_CODE (lhs) == MULT
3763 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3764 {
3765 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3766 had_mult = 1;
3767 }
3768 else if (GET_CODE (lhs) == ASHIFT
3769 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3770 && INTVAL (XEXP (lhs, 1)) >= 0
3771 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3772 {
3773 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3774 lhs = XEXP (lhs, 0);
3775 }
3776
3777 if (GET_CODE (rhs) == NEG)
3778 coeff1 = -1, rhs = XEXP (rhs, 0);
3779 else if (GET_CODE (rhs) == MULT
3780 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3781 {
3782 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3783 had_mult = 1;
3784 }
3785 else if (GET_CODE (rhs) == ASHIFT
3786 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3787 && INTVAL (XEXP (rhs, 1)) >= 0
3788 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3789 {
3790 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3791 rhs = XEXP (rhs, 0);
3792 }
3793
3794 if (rtx_equal_p (lhs, rhs))
3795 {
3796 tem = cse_gen_binary (MULT, mode, lhs,
3797 GEN_INT (coeff0 + coeff1));
3798 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3799 }
3800 }
3801
96b0e481
RK
3802 /* If one of the operands is a PLUS or a MINUS, see if we can
3803 simplify this by the associative law.
3804 Don't use the associative law for floating point.
3805 The inaccuracy makes it nonassociative,
3806 and subtle programs can break if operations are associated. */
7afe21cc 3807
cbf6a543 3808 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3809 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3810 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3811 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3812 return tem;
7afe21cc
RK
3813 break;
3814
3815 case COMPARE:
3816#ifdef HAVE_cc0
3817 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3818 using cc0, in which case we want to leave it as a COMPARE
3819 so we can distinguish it from a register-register-copy.
3820
3821 In IEEE floating point, x-0 is not the same as x. */
3822
3823 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3824 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3825 && op1 == CONST0_RTX (mode))
3826 return op0;
3827#else
3828 /* Do nothing here. */
3829#endif
3830 break;
3831
3832 case MINUS:
21648b45
RK
3833 /* None of these optimizations can be done for IEEE
3834 floating point. */
3835 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3836 && FLOAT_MODE_P (mode) && ! flag_fast_math)
21648b45
RK
3837 break;
3838
a83afb65
RK
3839 /* We can't assume x-x is 0 even with non-IEEE floating point,
3840 but since it is zero except in very strange circumstances, we
3841 will treat it as zero with -ffast-math. */
7afe21cc
RK
3842 if (rtx_equal_p (op0, op1)
3843 && ! side_effects_p (op0)
a83afb65
RK
3844 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3845 return CONST0_RTX (mode);
7afe21cc
RK
3846
3847 /* Change subtraction from zero into negation. */
3848 if (op0 == CONST0_RTX (mode))
38a448ca 3849 return gen_rtx_NEG (mode, op1);
7afe21cc 3850
96b0e481
RK
3851 /* (-1 - a) is ~a. */
3852 if (op0 == constm1_rtx)
38a448ca 3853 return gen_rtx_NOT (mode, op1);
96b0e481 3854
7afe21cc
RK
3855 /* Subtracting 0 has no effect. */
3856 if (op1 == CONST0_RTX (mode))
3857 return op0;
3858
30d69925
RK
3859 /* See if this is something like X * C - X or vice versa or
3860 if the multiplication is written as a shift. If so, we can
3861 distribute and make a new multiply, shift, or maybe just
3862 have X (if C is 2 in the example above). But don't make
3863 real multiply if we didn't have one before. */
3864
3865 if (! FLOAT_MODE_P (mode))
3866 {
3867 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3868 rtx lhs = op0, rhs = op1;
3869 int had_mult = 0;
3870
3871 if (GET_CODE (lhs) == NEG)
3872 coeff0 = -1, lhs = XEXP (lhs, 0);
3873 else if (GET_CODE (lhs) == MULT
3874 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3875 {
3876 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3877 had_mult = 1;
3878 }
3879 else if (GET_CODE (lhs) == ASHIFT
3880 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3881 && INTVAL (XEXP (lhs, 1)) >= 0
3882 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3883 {
3884 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3885 lhs = XEXP (lhs, 0);
3886 }
3887
3888 if (GET_CODE (rhs) == NEG)
3889 coeff1 = - 1, rhs = XEXP (rhs, 0);
3890 else if (GET_CODE (rhs) == MULT
3891 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3892 {
3893 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3894 had_mult = 1;
3895 }
3896 else if (GET_CODE (rhs) == ASHIFT
3897 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3898 && INTVAL (XEXP (rhs, 1)) >= 0
3899 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3900 {
3901 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3902 rhs = XEXP (rhs, 0);
3903 }
3904
3905 if (rtx_equal_p (lhs, rhs))
3906 {
3907 tem = cse_gen_binary (MULT, mode, lhs,
3908 GEN_INT (coeff0 - coeff1));
3909 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3910 }
3911 }
3912
7afe21cc
RK
3913 /* (a - (-b)) -> (a + b). */
3914 if (GET_CODE (op1) == NEG)
96b0e481 3915 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
7afe21cc 3916
96b0e481
RK
3917 /* If one of the operands is a PLUS or a MINUS, see if we can
3918 simplify this by the associative law.
3919 Don't use the associative law for floating point.
7afe21cc
RK
3920 The inaccuracy makes it nonassociative,
3921 and subtle programs can break if operations are associated. */
7afe21cc 3922
cbf6a543 3923 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3924 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3925 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3926 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3927 return tem;
7afe21cc
RK
3928
3929 /* Don't let a relocatable value get a negative coeff. */
b5a09c41 3930 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
7afe21cc 3931 return plus_constant (op0, - INTVAL (op1));
29d72c4b
TG
3932
3933 /* (x - (x & y)) -> (x & ~y) */
3934 if (GET_CODE (op1) == AND)
3935 {
3936 if (rtx_equal_p (op0, XEXP (op1, 0)))
38a448ca 3937 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
29d72c4b 3938 if (rtx_equal_p (op0, XEXP (op1, 1)))
38a448ca 3939 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
29d72c4b 3940 }
7afe21cc
RK
3941 break;
3942
3943 case MULT:
3944 if (op1 == constm1_rtx)
3945 {
96b0e481 3946 tem = simplify_unary_operation (NEG, mode, op0, mode);
7afe21cc 3947
38a448ca 3948 return tem ? tem : gen_rtx_NEG (mode, op0);
7afe21cc
RK
3949 }
3950
3951 /* In IEEE floating point, x*0 is not always 0. */
3952 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3953 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3954 && op1 == CONST0_RTX (mode)
3955 && ! side_effects_p (op0))
3956 return op1;
3957
3958 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3959 However, ANSI says we can drop signals,
3960 so we can do this anyway. */
3961 if (op1 == CONST1_RTX (mode))
3962 return op0;
3963
c407b802
RK
3964 /* Convert multiply by constant power of two into shift unless
3965 we are still generating RTL. This test is a kludge. */
7afe21cc 3966 if (GET_CODE (op1) == CONST_INT
c407b802 3967 && (val = exact_log2 (INTVAL (op1))) >= 0
2d917903
JW
3968 /* If the mode is larger than the host word size, and the
3969 uppermost bit is set, then this isn't a power of two due
3970 to implicit sign extension. */
3971 && (width <= HOST_BITS_PER_WIDE_INT
3972 || val != HOST_BITS_PER_WIDE_INT - 1)
c407b802 3973 && ! rtx_equal_function_value_matters)
38a448ca 3974 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
7afe21cc
RK
3975
3976 if (GET_CODE (op1) == CONST_DOUBLE
3977 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3978 {
3979 REAL_VALUE_TYPE d;
5a3d4bef
RK
3980 jmp_buf handler;
3981 int op1is2, op1ism1;
3982
3983 if (setjmp (handler))
3984 return 0;
3985
3986 set_float_handler (handler);
7afe21cc 3987 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
5a3d4bef
RK
3988 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3989 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3990 set_float_handler (NULL_PTR);
7afe21cc
RK
3991
3992 /* x*2 is x+x and x*(-1) is -x */
5a3d4bef 3993 if (op1is2 && GET_MODE (op0) == mode)
38a448ca 3994 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
7afe21cc 3995
5a3d4bef 3996 else if (op1ism1 && GET_MODE (op0) == mode)
38a448ca 3997 return gen_rtx_NEG (mode, op0);
7afe21cc
RK
3998 }
3999 break;
4000
4001 case IOR:
4002 if (op1 == const0_rtx)
4003 return op0;
4004 if (GET_CODE (op1) == CONST_INT
4005 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4006 return op1;
4007 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4008 return op0;
4009 /* A | (~A) -> -1 */
4010 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4011 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
31dcf83f 4012 && ! side_effects_p (op0)
8e7e5365 4013 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4014 return constm1_rtx;
4015 break;
4016
4017 case XOR:
4018 if (op1 == const0_rtx)
4019 return op0;
4020 if (GET_CODE (op1) == CONST_INT
4021 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
38a448ca 4022 return gen_rtx_NOT (mode, op0);
31dcf83f 4023 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4024 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4025 return const0_rtx;
4026 break;
4027
4028 case AND:
4029 if (op1 == const0_rtx && ! side_effects_p (op0))
4030 return const0_rtx;
4031 if (GET_CODE (op1) == CONST_INT
4032 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4033 return op0;
31dcf83f 4034 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4035 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4036 return op0;
4037 /* A & (~A) -> 0 */
4038 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4039 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
709ab4fc 4040 && ! side_effects_p (op0)
8e7e5365 4041 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4042 return const0_rtx;
4043 break;
4044
4045 case UDIV:
4046 /* Convert divide by power of two into shift (divide by 1 handled
4047 below). */
4048 if (GET_CODE (op1) == CONST_INT
4049 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
38a448ca 4050 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
7afe21cc 4051
0f41302f 4052 /* ... fall through ... */
7afe21cc
RK
4053
4054 case DIV:
4055 if (op1 == CONST1_RTX (mode))
4056 return op0;
e7a522ba
RS
4057
4058 /* In IEEE floating point, 0/x is not always 0. */
4059 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4060 || ! FLOAT_MODE_P (mode) || flag_fast_math)
e7a522ba
RS
4061 && op0 == CONST0_RTX (mode)
4062 && ! side_effects_p (op1))
7afe21cc 4063 return op0;
e7a522ba 4064
7afe21cc 4065#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a83afb65
RK
4066 /* Change division by a constant into multiplication. Only do
4067 this with -ffast-math until an expert says it is safe in
4068 general. */
7afe21cc
RK
4069 else if (GET_CODE (op1) == CONST_DOUBLE
4070 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
a83afb65
RK
4071 && op1 != CONST0_RTX (mode)
4072 && flag_fast_math)
7afe21cc
RK
4073 {
4074 REAL_VALUE_TYPE d;
4075 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
a83afb65
RK
4076
4077 if (! REAL_VALUES_EQUAL (d, dconst0))
4078 {
7afe21cc 4079#if defined (REAL_ARITHMETIC)
a83afb65 4080 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
38a448ca
RH
4081 return gen_rtx_MULT (mode, op0,
4082 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
7afe21cc 4083#else
38a448ca
RH
4084 return gen_rtx_MULT (mode, op0,
4085 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
7afe21cc 4086#endif
a83afb65
RK
4087 }
4088 }
7afe21cc
RK
4089#endif
4090 break;
4091
4092 case UMOD:
4093 /* Handle modulus by power of two (mod with 1 handled below). */
4094 if (GET_CODE (op1) == CONST_INT
4095 && exact_log2 (INTVAL (op1)) > 0)
38a448ca 4096 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
7afe21cc 4097
0f41302f 4098 /* ... fall through ... */
7afe21cc
RK
4099
4100 case MOD:
4101 if ((op0 == const0_rtx || op1 == const1_rtx)
4102 && ! side_effects_p (op0) && ! side_effects_p (op1))
4103 return const0_rtx;
4104 break;
4105
4106 case ROTATERT:
4107 case ROTATE:
4108 /* Rotating ~0 always results in ~0. */
906c4e36 4109 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
7afe21cc
RK
4110 && INTVAL (op0) == GET_MODE_MASK (mode)
4111 && ! side_effects_p (op1))
4112 return op0;
4113
0f41302f 4114 /* ... fall through ... */
7afe21cc 4115
7afe21cc
RK
4116 case ASHIFT:
4117 case ASHIFTRT:
4118 case LSHIFTRT:
4119 if (op1 == const0_rtx)
4120 return op0;
4121 if (op0 == const0_rtx && ! side_effects_p (op1))
4122 return op0;
4123 break;
4124
4125 case SMIN:
906c4e36
RK
4126 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4127 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
7afe21cc
RK
4128 && ! side_effects_p (op0))
4129 return op1;
4130 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4131 return op0;
4132 break;
4133
4134 case SMAX:
906c4e36 4135 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
dbbe6445
RK
4136 && (INTVAL (op1)
4137 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
7afe21cc
RK
4138 && ! side_effects_p (op0))
4139 return op1;
4140 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4141 return op0;
4142 break;
4143
4144 case UMIN:
4145 if (op1 == const0_rtx && ! side_effects_p (op0))
4146 return op1;
4147 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4148 return op0;
4149 break;
4150
4151 case UMAX:
4152 if (op1 == constm1_rtx && ! side_effects_p (op0))
4153 return op1;
4154 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4155 return op0;
4156 break;
4157
4158 default:
4159 abort ();
4160 }
4161
4162 return 0;
4163 }
4164
4165 /* Get the integer argument values in two forms:
4166 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4167
4168 arg0 = INTVAL (op0);
4169 arg1 = INTVAL (op1);
4170
906c4e36 4171 if (width < HOST_BITS_PER_WIDE_INT)
7afe21cc 4172 {
906c4e36
RK
4173 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4174 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc
RK
4175
4176 arg0s = arg0;
906c4e36
RK
4177 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4178 arg0s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4179
4180 arg1s = arg1;
906c4e36
RK
4181 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4182 arg1s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4183 }
4184 else
4185 {
4186 arg0s = arg0;
4187 arg1s = arg1;
4188 }
4189
4190 /* Compute the value of the arithmetic. */
4191
4192 switch (code)
4193 {
4194 case PLUS:
538b78e7 4195 val = arg0s + arg1s;
7afe21cc
RK
4196 break;
4197
4198 case MINUS:
538b78e7 4199 val = arg0s - arg1s;
7afe21cc
RK
4200 break;
4201
4202 case MULT:
4203 val = arg0s * arg1s;
4204 break;
4205
4206 case DIV:
4207 if (arg1s == 0)
4208 return 0;
4209 val = arg0s / arg1s;
4210 break;
4211
4212 case MOD:
4213 if (arg1s == 0)
4214 return 0;
4215 val = arg0s % arg1s;
4216 break;
4217
4218 case UDIV:
4219 if (arg1 == 0)
4220 return 0;
906c4e36 4221 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
7afe21cc
RK
4222 break;
4223
4224 case UMOD:
4225 if (arg1 == 0)
4226 return 0;
906c4e36 4227 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
7afe21cc
RK
4228 break;
4229
4230 case AND:
4231 val = arg0 & arg1;
4232 break;
4233
4234 case IOR:
4235 val = arg0 | arg1;
4236 break;
4237
4238 case XOR:
4239 val = arg0 ^ arg1;
4240 break;
4241
4242 case LSHIFTRT:
4243 /* If shift count is undefined, don't fold it; let the machine do
4244 what it wants. But truncate it if the machine will do that. */
4245 if (arg1 < 0)
4246 return 0;
4247
4248#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4249 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4250 arg1 %= width;
7afe21cc
RK
4251#endif
4252
906c4e36 4253 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
7afe21cc
RK
4254 break;
4255
4256 case ASHIFT:
7afe21cc
RK
4257 if (arg1 < 0)
4258 return 0;
4259
4260#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4261 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4262 arg1 %= width;
7afe21cc
RK
4263#endif
4264
906c4e36 4265 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
7afe21cc
RK
4266 break;
4267
4268 case ASHIFTRT:
4269 if (arg1 < 0)
4270 return 0;
4271
4272#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4273 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4274 arg1 %= width;
7afe21cc
RK
4275#endif
4276
7afe21cc 4277 val = arg0s >> arg1;
2166571b
RS
4278
4279 /* Bootstrap compiler may not have sign extended the right shift.
4280 Manually extend the sign to insure bootstrap cc matches gcc. */
4281 if (arg0s < 0 && arg1 > 0)
4282 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4283
7afe21cc
RK
4284 break;
4285
4286 case ROTATERT:
4287 if (arg1 < 0)
4288 return 0;
4289
4290 arg1 %= width;
906c4e36
RK
4291 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4292 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
7afe21cc
RK
4293 break;
4294
4295 case ROTATE:
4296 if (arg1 < 0)
4297 return 0;
4298
4299 arg1 %= width;
906c4e36
RK
4300 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4301 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
7afe21cc
RK
4302 break;
4303
4304 case COMPARE:
4305 /* Do nothing here. */
4306 return 0;
4307
830a38ee
RS
4308 case SMIN:
4309 val = arg0s <= arg1s ? arg0s : arg1s;
4310 break;
4311
4312 case UMIN:
906c4e36
RK
4313 val = ((unsigned HOST_WIDE_INT) arg0
4314 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4315 break;
4316
4317 case SMAX:
4318 val = arg0s > arg1s ? arg0s : arg1s;
4319 break;
4320
4321 case UMAX:
906c4e36
RK
4322 val = ((unsigned HOST_WIDE_INT) arg0
4323 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4324 break;
4325
7afe21cc
RK
4326 default:
4327 abort ();
4328 }
4329
4330 /* Clear the bits that don't belong in our mode, unless they and our sign
4331 bit are all one. So we get either a reasonable negative value or a
4332 reasonable unsigned value for this mode. */
906c4e36
RK
4333 if (width < HOST_BITS_PER_WIDE_INT
4334 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4335 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4336 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4337
ad89d6f6
TG
4338 /* If this would be an entire word for the target, but is not for
4339 the host, then sign-extend on the host so that the number will look
4340 the same way on the host that it would on the target.
4341
4342 For example, when building a 64 bit alpha hosted 32 bit sparc
4343 targeted compiler, then we want the 32 bit unsigned value -1 to be
4344 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4345 The later confuses the sparc backend. */
4346
4347 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4348 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4349 val |= ((HOST_WIDE_INT) (-1) << width);
4350
906c4e36 4351 return GEN_INT (val);
7afe21cc
RK
4352}
4353\f
96b0e481
RK
4354/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4355 PLUS or MINUS.
4356
4357 Rather than test for specific case, we do this by a brute-force method
4358 and do all possible simplifications until no more changes occur. Then
4359 we rebuild the operation. */
4360
4361static rtx
4362simplify_plus_minus (code, mode, op0, op1)
4363 enum rtx_code code;
4364 enum machine_mode mode;
4365 rtx op0, op1;
4366{
4367 rtx ops[8];
4368 int negs[8];
4369 rtx result, tem;
fb5c8ce6 4370 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
96b0e481 4371 int first = 1, negate = 0, changed;
fb5c8ce6 4372 int i, j;
96b0e481 4373
4c9a05bc 4374 bzero ((char *) ops, sizeof ops);
96b0e481
RK
4375
4376 /* Set up the two operands and then expand them until nothing has been
4377 changed. If we run out of room in our array, give up; this should
4378 almost never happen. */
4379
4380 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4381
4382 changed = 1;
4383 while (changed)
4384 {
4385 changed = 0;
4386
4387 for (i = 0; i < n_ops; i++)
4388 switch (GET_CODE (ops[i]))
4389 {
4390 case PLUS:
4391 case MINUS:
4392 if (n_ops == 7)
4393 return 0;
4394
4395 ops[n_ops] = XEXP (ops[i], 1);
4396 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4397 ops[i] = XEXP (ops[i], 0);
b7d9299b 4398 input_ops++;
96b0e481
RK
4399 changed = 1;
4400 break;
4401
4402 case NEG:
4403 ops[i] = XEXP (ops[i], 0);
4404 negs[i] = ! negs[i];
4405 changed = 1;
4406 break;
4407
4408 case CONST:
4409 ops[i] = XEXP (ops[i], 0);
fb5c8ce6 4410 input_consts++;
96b0e481
RK
4411 changed = 1;
4412 break;
4413
4414 case NOT:
4415 /* ~a -> (-a - 1) */
4416 if (n_ops != 7)
4417 {
4418 ops[n_ops] = constm1_rtx;
5931019b 4419 negs[n_ops++] = negs[i];
96b0e481
RK
4420 ops[i] = XEXP (ops[i], 0);
4421 negs[i] = ! negs[i];
4422 changed = 1;
4423 }
4424 break;
4425
4426 case CONST_INT:
4427 if (negs[i])
4428 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4429 break;
e9a25f70
JL
4430
4431 default:
4432 break;
96b0e481
RK
4433 }
4434 }
4435
4436 /* If we only have two operands, we can't do anything. */
4437 if (n_ops <= 2)
4438 return 0;
4439
4440 /* Now simplify each pair of operands until nothing changes. The first
4441 time through just simplify constants against each other. */
4442
4443 changed = 1;
4444 while (changed)
4445 {
4446 changed = first;
4447
4448 for (i = 0; i < n_ops - 1; i++)
4449 for (j = i + 1; j < n_ops; j++)
4450 if (ops[i] != 0 && ops[j] != 0
4451 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4452 {
4453 rtx lhs = ops[i], rhs = ops[j];
4454 enum rtx_code ncode = PLUS;
4455
4456 if (negs[i] && ! negs[j])
4457 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4458 else if (! negs[i] && negs[j])
4459 ncode = MINUS;
4460
4461 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
b7d9299b 4462 if (tem)
96b0e481
RK
4463 {
4464 ops[i] = tem, ops[j] = 0;
4465 negs[i] = negs[i] && negs[j];
4466 if (GET_CODE (tem) == NEG)
4467 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4468
4469 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4470 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4471 changed = 1;
4472 }
4473 }
4474
4475 first = 0;
4476 }
4477
4478 /* Pack all the operands to the lower-numbered entries and give up if
91a60f37 4479 we didn't reduce the number of operands we had. Make sure we
fb5c8ce6
RK
4480 count a CONST as two operands. If we have the same number of
4481 operands, but have made more CONSTs than we had, this is also
4482 an improvement, so accept it. */
91a60f37 4483
fb5c8ce6 4484 for (i = 0, j = 0; j < n_ops; j++)
96b0e481 4485 if (ops[j] != 0)
91a60f37
RK
4486 {
4487 ops[i] = ops[j], negs[i++] = negs[j];
4488 if (GET_CODE (ops[j]) == CONST)
fb5c8ce6 4489 n_consts++;
91a60f37 4490 }
96b0e481 4491
fb5c8ce6
RK
4492 if (i + n_consts > input_ops
4493 || (i + n_consts == input_ops && n_consts <= input_consts))
96b0e481
RK
4494 return 0;
4495
4496 n_ops = i;
4497
4498 /* If we have a CONST_INT, put it last. */
4499 for (i = 0; i < n_ops - 1; i++)
4500 if (GET_CODE (ops[i]) == CONST_INT)
4501 {
4502 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4503 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4504 }
4505
4506 /* Put a non-negated operand first. If there aren't any, make all
4507 operands positive and negate the whole thing later. */
4508 for (i = 0; i < n_ops && negs[i]; i++)
4509 ;
4510
4511 if (i == n_ops)
4512 {
4513 for (i = 0; i < n_ops; i++)
4514 negs[i] = 0;
4515 negate = 1;
4516 }
4517 else if (i != 0)
4518 {
4519 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4520 j = negs[0], negs[0] = negs[i], negs[i] = j;
4521 }
4522
4523 /* Now make the result by performing the requested operations. */
4524 result = ops[0];
4525 for (i = 1; i < n_ops; i++)
4526 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4527
38a448ca 4528 return negate ? gen_rtx_NEG (mode, result) : result;
96b0e481
RK
4529}
4530\f
4531/* Make a binary operation by properly ordering the operands and
4532 seeing if the expression folds. */
4533
4534static rtx
4535cse_gen_binary (code, mode, op0, op1)
4536 enum rtx_code code;
4537 enum machine_mode mode;
4538 rtx op0, op1;
4539{
4540 rtx tem;
4541
4542 /* Put complex operands first and constants second if commutative. */
4543 if (GET_RTX_CLASS (code) == 'c'
4544 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4545 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4546 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4547 || (GET_CODE (op0) == SUBREG
4548 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4549 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4550 tem = op0, op0 = op1, op1 = tem;
4551
4552 /* If this simplifies, do it. */
4553 tem = simplify_binary_operation (code, mode, op0, op1);
4554
4555 if (tem)
4556 return tem;
4557
4558 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4559 just form the operation. */
4560
4561 if (code == PLUS && GET_CODE (op1) == CONST_INT
4562 && GET_MODE (op0) != VOIDmode)
4563 return plus_constant (op0, INTVAL (op1));
4564 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4565 && GET_MODE (op0) != VOIDmode)
4566 return plus_constant (op0, - INTVAL (op1));
4567 else
38a448ca 4568 return gen_rtx_fmt_ee (code, mode, op0, op1);
96b0e481
RK
4569}
4570\f
7afe21cc 4571/* Like simplify_binary_operation except used for relational operators.
a432f20d
RK
4572 MODE is the mode of the operands, not that of the result. If MODE
4573 is VOIDmode, both operands must also be VOIDmode and we compare the
4574 operands in "infinite precision".
4575
4576 If no simplification is possible, this function returns zero. Otherwise,
4577 it returns either const_true_rtx or const0_rtx. */
7afe21cc
RK
4578
4579rtx
4580simplify_relational_operation (code, mode, op0, op1)
4581 enum rtx_code code;
4582 enum machine_mode mode;
4583 rtx op0, op1;
4584{
a432f20d
RK
4585 int equal, op0lt, op0ltu, op1lt, op1ltu;
4586 rtx tem;
7afe21cc
RK
4587
4588 /* If op0 is a compare, extract the comparison arguments from it. */
4589 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4590 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4591
28bad1cb
RK
4592 /* We can't simplify MODE_CC values since we don't know what the
4593 actual comparison is. */
4594 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4595#ifdef HAVE_cc0
4596 || op0 == cc0_rtx
4597#endif
4598 )
31dcf83f
RS
4599 return 0;
4600
a432f20d
RK
4601 /* For integer comparisons of A and B maybe we can simplify A - B and can
4602 then simplify a comparison of that with zero. If A and B are both either
4603 a register or a CONST_INT, this can't help; testing for these cases will
4604 prevent infinite recursion here and speed things up.
4605
c27b5c62
JW
4606 If CODE is an unsigned comparison, then we can never do this optimization,
4607 because it gives an incorrect result if the subtraction wraps around zero.
4608 ANSI C defines unsigned operations such that they never overflow, and
4609 thus such cases can not be ignored. */
a432f20d
RK
4610
4611 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4612 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4613 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4614 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
c27b5c62 4615 && code != GTU && code != GEU && code != LTU && code != LEU)
a432f20d
RK
4616 return simplify_relational_operation (signed_condition (code),
4617 mode, tem, const0_rtx);
4618
4619 /* For non-IEEE floating-point, if the two operands are equal, we know the
4620 result. */
4621 if (rtx_equal_p (op0, op1)
4622 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4623 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4624 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4625
4626 /* If the operands are floating-point constants, see if we can fold
4627 the result. */
6076248a 4628#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a432f20d
RK
4629 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4630 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4631 {
4632 REAL_VALUE_TYPE d0, d1;
4633 jmp_buf handler;
4634
4635 if (setjmp (handler))
4636 return 0;
7afe21cc 4637
a432f20d
RK
4638 set_float_handler (handler);
4639 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4640 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4641 equal = REAL_VALUES_EQUAL (d0, d1);
4642 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4643 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4644 set_float_handler (NULL_PTR);
4645 }
4646#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc 4647
a432f20d
RK
4648 /* Otherwise, see if the operands are both integers. */
4649 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4650 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4651 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4652 {
4653 int width = GET_MODE_BITSIZE (mode);
64812ded
RK
4654 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4655 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
7afe21cc 4656
a432f20d
RK
4657 /* Get the two words comprising each integer constant. */
4658 if (GET_CODE (op0) == CONST_DOUBLE)
4659 {
4660 l0u = l0s = CONST_DOUBLE_LOW (op0);
4661 h0u = h0s = CONST_DOUBLE_HIGH (op0);
7afe21cc 4662 }
a432f20d 4663 else
6076248a 4664 {
a432f20d 4665 l0u = l0s = INTVAL (op0);
cb3bb2a7 4666 h0u = h0s = l0s < 0 ? -1 : 0;
a432f20d 4667 }
6076248a 4668
a432f20d
RK
4669 if (GET_CODE (op1) == CONST_DOUBLE)
4670 {
4671 l1u = l1s = CONST_DOUBLE_LOW (op1);
4672 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4673 }
4674 else
4675 {
4676 l1u = l1s = INTVAL (op1);
cb3bb2a7 4677 h1u = h1s = l1s < 0 ? -1 : 0;
a432f20d
RK
4678 }
4679
4680 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4681 we have to sign or zero-extend the values. */
4682 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4683 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
6076248a 4684
a432f20d
RK
4685 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4686 {
4687 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4688 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
6076248a 4689
a432f20d
RK
4690 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4691 l0s |= ((HOST_WIDE_INT) (-1) << width);
6076248a 4692
a432f20d
RK
4693 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4694 l1s |= ((HOST_WIDE_INT) (-1) << width);
6076248a
RK
4695 }
4696
a432f20d
RK
4697 equal = (h0u == h1u && l0u == l1u);
4698 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4699 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4700 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4701 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4702 }
4703
4704 /* Otherwise, there are some code-specific tests we can make. */
4705 else
4706 {
7afe21cc
RK
4707 switch (code)
4708 {
4709 case EQ:
a432f20d
RK
4710 /* References to the frame plus a constant or labels cannot
4711 be zero, but a SYMBOL_REF can due to #pragma weak. */
4712 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4713 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4714#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d
RK
4715 /* On some machines, the ap reg can be 0 sometimes. */
4716 && op0 != arg_pointer_rtx
7afe21cc 4717#endif
a432f20d
RK
4718 )
4719 return const0_rtx;
4720 break;
7afe21cc
RK
4721
4722 case NE:
a432f20d
RK
4723 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4724 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4725#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d 4726 && op0 != arg_pointer_rtx
7afe21cc 4727#endif
a432f20d 4728 )
7afe21cc
RK
4729 return const_true_rtx;
4730 break;
4731
4732 case GEU:
a432f20d
RK
4733 /* Unsigned values are never negative. */
4734 if (op1 == const0_rtx)
7afe21cc
RK
4735 return const_true_rtx;
4736 break;
4737
4738 case LTU:
a432f20d 4739 if (op1 == const0_rtx)
7afe21cc
RK
4740 return const0_rtx;
4741 break;
4742
4743 case LEU:
4744 /* Unsigned values are never greater than the largest
4745 unsigned value. */
4746 if (GET_CODE (op1) == CONST_INT
4747 && INTVAL (op1) == GET_MODE_MASK (mode)
a432f20d
RK
4748 && INTEGRAL_MODE_P (mode))
4749 return const_true_rtx;
7afe21cc
RK
4750 break;
4751
4752 case GTU:
4753 if (GET_CODE (op1) == CONST_INT
4754 && INTVAL (op1) == GET_MODE_MASK (mode)
cbf6a543 4755 && INTEGRAL_MODE_P (mode))
7afe21cc
RK
4756 return const0_rtx;
4757 break;
e9a25f70
JL
4758
4759 default:
4760 break;
7afe21cc
RK
4761 }
4762
4763 return 0;
4764 }
4765
a432f20d
RK
4766 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4767 as appropriate. */
7afe21cc
RK
4768 switch (code)
4769 {
7afe21cc 4770 case EQ:
a432f20d
RK
4771 return equal ? const_true_rtx : const0_rtx;
4772 case NE:
4773 return ! equal ? const_true_rtx : const0_rtx;
7afe21cc 4774 case LT:
a432f20d 4775 return op0lt ? const_true_rtx : const0_rtx;
7afe21cc 4776 case GT:
a432f20d 4777 return op1lt ? const_true_rtx : const0_rtx;
7afe21cc 4778 case LTU:
a432f20d 4779 return op0ltu ? const_true_rtx : const0_rtx;
7afe21cc 4780 case GTU:
a432f20d
RK
4781 return op1ltu ? const_true_rtx : const0_rtx;
4782 case LE:
4783 return equal || op0lt ? const_true_rtx : const0_rtx;
4784 case GE:
4785 return equal || op1lt ? const_true_rtx : const0_rtx;
4786 case LEU:
4787 return equal || op0ltu ? const_true_rtx : const0_rtx;
4788 case GEU:
4789 return equal || op1ltu ? const_true_rtx : const0_rtx;
e9a25f70
JL
4790 default:
4791 abort ();
7afe21cc 4792 }
7afe21cc
RK
4793}
4794\f
4795/* Simplify CODE, an operation with result mode MODE and three operands,
4796 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4797 a constant. Return 0 if no simplifications is possible. */
4798
4799rtx
4800simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4801 enum rtx_code code;
4802 enum machine_mode mode, op0_mode;
4803 rtx op0, op1, op2;
4804{
4805 int width = GET_MODE_BITSIZE (mode);
4806
4807 /* VOIDmode means "infinite" precision. */
4808 if (width == 0)
906c4e36 4809 width = HOST_BITS_PER_WIDE_INT;
7afe21cc
RK
4810
4811 switch (code)
4812 {
4813 case SIGN_EXTRACT:
4814 case ZERO_EXTRACT:
4815 if (GET_CODE (op0) == CONST_INT
4816 && GET_CODE (op1) == CONST_INT
4817 && GET_CODE (op2) == CONST_INT
4818 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
906c4e36 4819 && width <= HOST_BITS_PER_WIDE_INT)
7afe21cc
RK
4820 {
4821 /* Extracting a bit-field from a constant */
906c4e36 4822 HOST_WIDE_INT val = INTVAL (op0);
7afe21cc 4823
f76b9db2
ILT
4824 if (BITS_BIG_ENDIAN)
4825 val >>= (GET_MODE_BITSIZE (op0_mode)
4826 - INTVAL (op2) - INTVAL (op1));
4827 else
4828 val >>= INTVAL (op2);
4829
906c4e36 4830 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
7afe21cc
RK
4831 {
4832 /* First zero-extend. */
906c4e36 4833 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
7afe21cc 4834 /* If desired, propagate sign bit. */
906c4e36
RK
4835 if (code == SIGN_EXTRACT
4836 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4837 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
7afe21cc
RK
4838 }
4839
4840 /* Clear the bits that don't belong in our mode,
4841 unless they and our sign bit are all one.
4842 So we get either a reasonable negative value or a reasonable
4843 unsigned value for this mode. */
906c4e36
RK
4844 if (width < HOST_BITS_PER_WIDE_INT
4845 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4846 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4847 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 4848
906c4e36 4849 return GEN_INT (val);
7afe21cc
RK
4850 }
4851 break;
4852
4853 case IF_THEN_ELSE:
4854 if (GET_CODE (op0) == CONST_INT)
4855 return op0 != const0_rtx ? op1 : op2;
3bf1b082
JW
4856
4857 /* Convert a == b ? b : a to "a". */
4858 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4859 && rtx_equal_p (XEXP (op0, 0), op1)
4860 && rtx_equal_p (XEXP (op0, 1), op2))
4861 return op1;
4862 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4863 && rtx_equal_p (XEXP (op0, 1), op1)
4864 && rtx_equal_p (XEXP (op0, 0), op2))
4865 return op2;
e82ad93d 4866 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
ed1ecb19
JL
4867 {
4868 rtx temp;
4869 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4870 XEXP (op0, 0), XEXP (op0, 1));
4871 /* See if any simplifications were possible. */
4872 if (temp == const0_rtx)
4873 return op2;
4874 else if (temp == const1_rtx)
4875 return op1;
4876 }
7afe21cc
RK
4877 break;
4878
4879 default:
4880 abort ();
4881 }
4882
4883 return 0;
4884}
4885\f
4886/* If X is a nontrivial arithmetic operation on an argument
4887 for which a constant value can be determined, return
4888 the result of operating on that value, as a constant.
4889 Otherwise, return X, possibly with one or more operands
4890 modified by recursive calls to this function.
4891
e7bb59fa
RK
4892 If X is a register whose contents are known, we do NOT
4893 return those contents here. equiv_constant is called to
4894 perform that task.
7afe21cc
RK
4895
4896 INSN is the insn that we may be modifying. If it is 0, make a copy
4897 of X before modifying it. */
4898
4899static rtx
4900fold_rtx (x, insn)
4901 rtx x;
4902 rtx insn;
4903{
4904 register enum rtx_code code;
4905 register enum machine_mode mode;
4906 register char *fmt;
906c4e36 4907 register int i;
7afe21cc
RK
4908 rtx new = 0;
4909 int copied = 0;
4910 int must_swap = 0;
4911
4912 /* Folded equivalents of first two operands of X. */
4913 rtx folded_arg0;
4914 rtx folded_arg1;
4915
4916 /* Constant equivalents of first three operands of X;
4917 0 when no such equivalent is known. */
4918 rtx const_arg0;
4919 rtx const_arg1;
4920 rtx const_arg2;
4921
4922 /* The mode of the first operand of X. We need this for sign and zero
4923 extends. */
4924 enum machine_mode mode_arg0;
4925
4926 if (x == 0)
4927 return x;
4928
4929 mode = GET_MODE (x);
4930 code = GET_CODE (x);
4931 switch (code)
4932 {
4933 case CONST:
185ebd6c
RH
4934 /* If the operand is a CONSTANT_P_RTX, see if what's inside it
4935 is known to be constant and replace the whole thing with a
4936 CONST_INT of either zero or one. Note that this code assumes
4937 that an insn that recognizes a CONST will also recognize a
4938 CONST_INT, but that seems to be a safe assumption. */
4939 if (GET_CODE (XEXP (x, 0)) == CONSTANT_P_RTX)
4940 {
4941 x = equiv_constant (fold_rtx (XEXP (XEXP (x, 0), 0), 0));
4942 return (x != 0 && (GET_CODE (x) == CONST_INT
4943 || GET_CODE (x) == CONST_DOUBLE)
4944 ? const1_rtx : const0_rtx);
4945 }
4946
4947 /* ... fall through ... */
4948
7afe21cc
RK
4949 case CONST_INT:
4950 case CONST_DOUBLE:
4951 case SYMBOL_REF:
4952 case LABEL_REF:
4953 case REG:
4954 /* No use simplifying an EXPR_LIST
4955 since they are used only for lists of args
4956 in a function call's REG_EQUAL note. */
4957 case EXPR_LIST:
956d6950
JL
4958 /* Changing anything inside an ADDRESSOF is incorrect; we don't
4959 want to (e.g.,) make (addressof (const_int 0)) just because
4960 the location is known to be zero. */
4961 case ADDRESSOF:
7afe21cc
RK
4962 return x;
4963
4964#ifdef HAVE_cc0
4965 case CC0:
4966 return prev_insn_cc0;
4967#endif
4968
4969 case PC:
4970 /* If the next insn is a CODE_LABEL followed by a jump table,
4971 PC's value is a LABEL_REF pointing to that label. That
4972 lets us fold switch statements on the Vax. */
4973 if (insn && GET_CODE (insn) == JUMP_INSN)
4974 {
4975 rtx next = next_nonnote_insn (insn);
4976
4977 if (next && GET_CODE (next) == CODE_LABEL
4978 && NEXT_INSN (next) != 0
4979 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4980 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4981 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
38a448ca 4982 return gen_rtx_LABEL_REF (Pmode, next);
7afe21cc
RK
4983 }
4984 break;
4985
4986 case SUBREG:
c610adec
RK
4987 /* See if we previously assigned a constant value to this SUBREG. */
4988 if ((new = lookup_as_function (x, CONST_INT)) != 0
4989 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
4990 return new;
4991
4b980e20
RK
4992 /* If this is a paradoxical SUBREG, we have no idea what value the
4993 extra bits would have. However, if the operand is equivalent
4994 to a SUBREG whose operand is the same as our mode, and all the
4995 modes are within a word, we can just use the inner operand
31c85c78
RK
4996 because these SUBREGs just say how to treat the register.
4997
4998 Similarly if we find an integer constant. */
4b980e20 4999
e5f6a288 5000 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
5001 {
5002 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5003 struct table_elt *elt;
5004
5005 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5006 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5007 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5008 imode)) != 0)
31c85c78
RK
5009 for (elt = elt->first_same_value;
5010 elt; elt = elt->next_same_value)
5011 {
5012 if (CONSTANT_P (elt->exp)
5013 && GET_MODE (elt->exp) == VOIDmode)
5014 return elt->exp;
5015
4b980e20
RK
5016 if (GET_CODE (elt->exp) == SUBREG
5017 && GET_MODE (SUBREG_REG (elt->exp)) == mode
906c4e36 5018 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5019 return copy_rtx (SUBREG_REG (elt->exp));
5020 }
5021
5022 return x;
5023 }
e5f6a288 5024
7afe21cc
RK
5025 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5026 We might be able to if the SUBREG is extracting a single word in an
5027 integral mode or extracting the low part. */
5028
5029 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5030 const_arg0 = equiv_constant (folded_arg0);
5031 if (const_arg0)
5032 folded_arg0 = const_arg0;
5033
5034 if (folded_arg0 != SUBREG_REG (x))
5035 {
5036 new = 0;
5037
5038 if (GET_MODE_CLASS (mode) == MODE_INT
5039 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5040 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5041 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5042 GET_MODE (SUBREG_REG (x)));
5043 if (new == 0 && subreg_lowpart_p (x))
5044 new = gen_lowpart_if_possible (mode, folded_arg0);
5045 if (new)
5046 return new;
5047 }
e5f6a288
RK
5048
5049 /* If this is a narrowing SUBREG and our operand is a REG, see if
858a47b1 5050 we can find an equivalence for REG that is an arithmetic operation
e5f6a288
RK
5051 in a wider mode where both operands are paradoxical SUBREGs
5052 from objects of our result mode. In that case, we couldn't report
5053 an equivalent value for that operation, since we don't know what the
5054 extra bits will be. But we can find an equivalence for this SUBREG
5055 by folding that operation is the narrow mode. This allows us to
5056 fold arithmetic in narrow modes when the machine only supports
4b980e20
RK
5057 word-sized arithmetic.
5058
5059 Also look for a case where we have a SUBREG whose operand is the
5060 same as our result. If both modes are smaller than a word, we
5061 are simply interpreting a register in different modes and we
5062 can use the inner value. */
e5f6a288
RK
5063
5064 if (GET_CODE (folded_arg0) == REG
e8d76a39
RS
5065 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5066 && subreg_lowpart_p (x))
e5f6a288
RK
5067 {
5068 struct table_elt *elt;
5069
5070 /* We can use HASH here since we know that canon_hash won't be
5071 called. */
5072 elt = lookup (folded_arg0,
5073 HASH (folded_arg0, GET_MODE (folded_arg0)),
5074 GET_MODE (folded_arg0));
5075
5076 if (elt)
5077 elt = elt->first_same_value;
5078
5079 for (; elt; elt = elt->next_same_value)
5080 {
e8d76a39
RS
5081 enum rtx_code eltcode = GET_CODE (elt->exp);
5082
e5f6a288
RK
5083 /* Just check for unary and binary operations. */
5084 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5085 && GET_CODE (elt->exp) != SIGN_EXTEND
5086 && GET_CODE (elt->exp) != ZERO_EXTEND
5087 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5088 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5089 {
5090 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5091
5092 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5093 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5094
5095 op0 = equiv_constant (op0);
5096 if (op0)
5097 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5098 op0, mode);
5099 }
5100 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5101 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
e8d76a39
RS
5102 && eltcode != DIV && eltcode != MOD
5103 && eltcode != UDIV && eltcode != UMOD
5104 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5105 && eltcode != ROTATE && eltcode != ROTATERT
e5f6a288
RK
5106 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5107 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5108 == mode))
5109 || CONSTANT_P (XEXP (elt->exp, 0)))
5110 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5111 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5112 == mode))
5113 || CONSTANT_P (XEXP (elt->exp, 1))))
5114 {
5115 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5116 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5117
5118 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5119 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5120
5121 if (op0)
5122 op0 = equiv_constant (op0);
5123
5124 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
906c4e36 5125 op1 = fold_rtx (op1, NULL_RTX);
e5f6a288
RK
5126
5127 if (op1)
5128 op1 = equiv_constant (op1);
5129
76fb0b60
RS
5130 /* If we are looking for the low SImode part of
5131 (ashift:DI c (const_int 32)), it doesn't work
5132 to compute that in SImode, because a 32-bit shift
5133 in SImode is unpredictable. We know the value is 0. */
5134 if (op0 && op1
45620ed4 5135 && GET_CODE (elt->exp) == ASHIFT
76fb0b60
RS
5136 && GET_CODE (op1) == CONST_INT
5137 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5138 {
5139 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5140
5141 /* If the count fits in the inner mode's width,
5142 but exceeds the outer mode's width,
5143 the value will get truncated to 0
5144 by the subreg. */
5145 new = const0_rtx;
5146 else
5147 /* If the count exceeds even the inner mode's width,
5148 don't fold this expression. */
5149 new = 0;
5150 }
5151 else if (op0 && op1)
e5f6a288
RK
5152 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5153 op0, op1);
5154 }
5155
4b980e20
RK
5156 else if (GET_CODE (elt->exp) == SUBREG
5157 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5158 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5159 <= UNITS_PER_WORD)
906c4e36 5160 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5161 new = copy_rtx (SUBREG_REG (elt->exp));
5162
e5f6a288
RK
5163 if (new)
5164 return new;
5165 }
5166 }
5167
7afe21cc
RK
5168 return x;
5169
5170 case NOT:
5171 case NEG:
5172 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5173 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5174 new = lookup_as_function (XEXP (x, 0), code);
5175 if (new)
5176 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5177 break;
13c9910f 5178
7afe21cc
RK
5179 case MEM:
5180 /* If we are not actually processing an insn, don't try to find the
5181 best address. Not only don't we care, but we could modify the
5182 MEM in an invalid way since we have no insn to validate against. */
5183 if (insn != 0)
5184 find_best_addr (insn, &XEXP (x, 0));
5185
5186 {
5187 /* Even if we don't fold in the insn itself,
5188 we can safely do so here, in hopes of getting a constant. */
906c4e36 5189 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 5190 rtx base = 0;
906c4e36 5191 HOST_WIDE_INT offset = 0;
7afe21cc
RK
5192
5193 if (GET_CODE (addr) == REG
5194 && REGNO_QTY_VALID_P (REGNO (addr))
5195 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5196 && qty_const[reg_qty[REGNO (addr)]] != 0)
5197 addr = qty_const[reg_qty[REGNO (addr)]];
5198
5199 /* If address is constant, split it into a base and integer offset. */
5200 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5201 base = addr;
5202 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5203 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5204 {
5205 base = XEXP (XEXP (addr, 0), 0);
5206 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5207 }
5208 else if (GET_CODE (addr) == LO_SUM
5209 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5210 base = XEXP (addr, 1);
e9a25f70 5211 else if (GET_CODE (addr) == ADDRESSOF)
956d6950 5212 return change_address (x, VOIDmode, addr);
7afe21cc
RK
5213
5214 /* If this is a constant pool reference, we can fold it into its
5215 constant to allow better value tracking. */
5216 if (base && GET_CODE (base) == SYMBOL_REF
5217 && CONSTANT_POOL_ADDRESS_P (base))
5218 {
5219 rtx constant = get_pool_constant (base);
5220 enum machine_mode const_mode = get_pool_mode (base);
5221 rtx new;
5222
5223 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5224 constant_pool_entries_cost = COST (constant);
5225
5226 /* If we are loading the full constant, we have an equivalence. */
5227 if (offset == 0 && mode == const_mode)
5228 return constant;
5229
9faa82d8 5230 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
5231 anything. Otherwise, handle the two most common cases:
5232 extracting a word from a multi-word constant, and extracting
5233 the low-order bits. Other cases don't seem common enough to
5234 worry about. */
5235 if (! CONSTANT_P (constant))
5236 return x;
5237
5238 if (GET_MODE_CLASS (mode) == MODE_INT
5239 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5240 && offset % UNITS_PER_WORD == 0
5241 && (new = operand_subword (constant,
5242 offset / UNITS_PER_WORD,
5243 0, const_mode)) != 0)
5244 return new;
5245
5246 if (((BYTES_BIG_ENDIAN
5247 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5248 || (! BYTES_BIG_ENDIAN && offset == 0))
5249 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5250 return new;
5251 }
5252
5253 /* If this is a reference to a label at a known position in a jump
5254 table, we also know its value. */
5255 if (base && GET_CODE (base) == LABEL_REF)
5256 {
5257 rtx label = XEXP (base, 0);
5258 rtx table_insn = NEXT_INSN (label);
5259
5260 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5261 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5262 {
5263 rtx table = PATTERN (table_insn);
5264
5265 if (offset >= 0
5266 && (offset / GET_MODE_SIZE (GET_MODE (table))
5267 < XVECLEN (table, 0)))
5268 return XVECEXP (table, 0,
5269 offset / GET_MODE_SIZE (GET_MODE (table)));
5270 }
5271 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5272 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5273 {
5274 rtx table = PATTERN (table_insn);
5275
5276 if (offset >= 0
5277 && (offset / GET_MODE_SIZE (GET_MODE (table))
5278 < XVECLEN (table, 1)))
5279 {
5280 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
5281 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5282 XEXP (table, 0));
7afe21cc
RK
5283
5284 if (GET_MODE (table) != Pmode)
38a448ca 5285 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 5286
67a37737
RK
5287 /* Indicate this is a constant. This isn't a
5288 valid form of CONST, but it will only be used
5289 to fold the next insns and then discarded, so
5290 it should be safe. */
38a448ca 5291 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
5292 }
5293 }
5294 }
5295
5296 return x;
5297 }
9255709c
RK
5298
5299 case ASM_OPERANDS:
5300 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5301 validate_change (insn, &XVECEXP (x, 3, i),
5302 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5303 break;
e9a25f70
JL
5304
5305 default:
5306 break;
7afe21cc
RK
5307 }
5308
5309 const_arg0 = 0;
5310 const_arg1 = 0;
5311 const_arg2 = 0;
5312 mode_arg0 = VOIDmode;
5313
5314 /* Try folding our operands.
5315 Then see which ones have constant values known. */
5316
5317 fmt = GET_RTX_FORMAT (code);
5318 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5319 if (fmt[i] == 'e')
5320 {
5321 rtx arg = XEXP (x, i);
5322 rtx folded_arg = arg, const_arg = 0;
5323 enum machine_mode mode_arg = GET_MODE (arg);
5324 rtx cheap_arg, expensive_arg;
5325 rtx replacements[2];
5326 int j;
5327
5328 /* Most arguments are cheap, so handle them specially. */
5329 switch (GET_CODE (arg))
5330 {
5331 case REG:
5332 /* This is the same as calling equiv_constant; it is duplicated
5333 here for speed. */
5334 if (REGNO_QTY_VALID_P (REGNO (arg))
5335 && qty_const[reg_qty[REGNO (arg)]] != 0
5336 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5337 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5338 const_arg
5339 = gen_lowpart_if_possible (GET_MODE (arg),
5340 qty_const[reg_qty[REGNO (arg)]]);
5341 break;
5342
5343 case CONST:
5344 case CONST_INT:
5345 case SYMBOL_REF:
5346 case LABEL_REF:
5347 case CONST_DOUBLE:
5348 const_arg = arg;
5349 break;
5350
5351#ifdef HAVE_cc0
5352 case CC0:
5353 folded_arg = prev_insn_cc0;
5354 mode_arg = prev_insn_cc0_mode;
5355 const_arg = equiv_constant (folded_arg);
5356 break;
5357#endif
5358
5359 default:
5360 folded_arg = fold_rtx (arg, insn);
5361 const_arg = equiv_constant (folded_arg);
5362 }
5363
5364 /* For the first three operands, see if the operand
5365 is constant or equivalent to a constant. */
5366 switch (i)
5367 {
5368 case 0:
5369 folded_arg0 = folded_arg;
5370 const_arg0 = const_arg;
5371 mode_arg0 = mode_arg;
5372 break;
5373 case 1:
5374 folded_arg1 = folded_arg;
5375 const_arg1 = const_arg;
5376 break;
5377 case 2:
5378 const_arg2 = const_arg;
5379 break;
5380 }
5381
5382 /* Pick the least expensive of the folded argument and an
5383 equivalent constant argument. */
5384 if (const_arg == 0 || const_arg == folded_arg
5385 || COST (const_arg) > COST (folded_arg))
5386 cheap_arg = folded_arg, expensive_arg = const_arg;
5387 else
5388 cheap_arg = const_arg, expensive_arg = folded_arg;
5389
5390 /* Try to replace the operand with the cheapest of the two
5391 possibilities. If it doesn't work and this is either of the first
5392 two operands of a commutative operation, try swapping them.
5393 If THAT fails, try the more expensive, provided it is cheaper
5394 than what is already there. */
5395
5396 if (cheap_arg == XEXP (x, i))
5397 continue;
5398
5399 if (insn == 0 && ! copied)
5400 {
5401 x = copy_rtx (x);
5402 copied = 1;
5403 }
5404
5405 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5406 for (j = 0;
5407 j < 2 && replacements[j]
5408 && COST (replacements[j]) < COST (XEXP (x, i));
5409 j++)
5410 {
5411 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5412 break;
5413
5414 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5415 {
5416 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5417 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5418
5419 if (apply_change_group ())
5420 {
5421 /* Swap them back to be invalid so that this loop can
5422 continue and flag them to be swapped back later. */
5423 rtx tem;
5424
5425 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5426 XEXP (x, 1) = tem;
5427 must_swap = 1;
5428 break;
5429 }
5430 }
5431 }
5432 }
5433
2d8b0f3a
JL
5434 else
5435 {
5436 if (fmt[i] == 'E')
5437 /* Don't try to fold inside of a vector of expressions.
5438 Doing nothing is harmless. */
5439 {;}
5440 }
7afe21cc
RK
5441
5442 /* If a commutative operation, place a constant integer as the second
5443 operand unless the first operand is also a constant integer. Otherwise,
5444 place any constant second unless the first operand is also a constant. */
5445
5446 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5447 {
5448 if (must_swap || (const_arg0
5449 && (const_arg1 == 0
5450 || (GET_CODE (const_arg0) == CONST_INT
5451 && GET_CODE (const_arg1) != CONST_INT))))
5452 {
5453 register rtx tem = XEXP (x, 0);
5454
5455 if (insn == 0 && ! copied)
5456 {
5457 x = copy_rtx (x);
5458 copied = 1;
5459 }
5460
5461 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5462 validate_change (insn, &XEXP (x, 1), tem, 1);
5463 if (apply_change_group ())
5464 {
5465 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5466 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5467 }
5468 }
5469 }
5470
5471 /* If X is an arithmetic operation, see if we can simplify it. */
5472
5473 switch (GET_RTX_CLASS (code))
5474 {
5475 case '1':
67a37737
RK
5476 {
5477 int is_const = 0;
5478
5479 /* We can't simplify extension ops unless we know the
5480 original mode. */
5481 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5482 && mode_arg0 == VOIDmode)
5483 break;
5484
5485 /* If we had a CONST, strip it off and put it back later if we
5486 fold. */
5487 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5488 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5489
5490 new = simplify_unary_operation (code, mode,
5491 const_arg0 ? const_arg0 : folded_arg0,
5492 mode_arg0);
5493 if (new != 0 && is_const)
38a448ca 5494 new = gen_rtx_CONST (mode, new);
67a37737 5495 }
7afe21cc
RK
5496 break;
5497
5498 case '<':
5499 /* See what items are actually being compared and set FOLDED_ARG[01]
5500 to those values and CODE to the actual comparison code. If any are
5501 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5502 do anything if both operands are already known to be constant. */
5503
5504 if (const_arg0 == 0 || const_arg1 == 0)
5505 {
5506 struct table_elt *p0, *p1;
c610adec 5507 rtx true = const_true_rtx, false = const0_rtx;
13c9910f 5508 enum machine_mode mode_arg1;
c610adec
RK
5509
5510#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5511 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5512 {
560c94a2
RK
5513 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5514 mode);
c610adec
RK
5515 false = CONST0_RTX (mode);
5516 }
5517#endif
7afe21cc 5518
13c9910f
RS
5519 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5520 &mode_arg0, &mode_arg1);
7afe21cc
RK
5521 const_arg0 = equiv_constant (folded_arg0);
5522 const_arg1 = equiv_constant (folded_arg1);
5523
13c9910f
RS
5524 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5525 what kinds of things are being compared, so we can't do
5526 anything with this comparison. */
7afe21cc
RK
5527
5528 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5529 break;
5530
0f41302f
MS
5531 /* If we do not now have two constants being compared, see
5532 if we can nevertheless deduce some things about the
5533 comparison. */
7afe21cc
RK
5534 if (const_arg0 == 0 || const_arg1 == 0)
5535 {
0f41302f
MS
5536 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5537 non-explicit constant? These aren't zero, but we
5538 don't know their sign. */
7afe21cc
RK
5539 if (const_arg1 == const0_rtx
5540 && (NONZERO_BASE_PLUS_P (folded_arg0)
5541#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5542 come out as 0. */
5543 || GET_CODE (folded_arg0) == SYMBOL_REF
5544#endif
5545 || GET_CODE (folded_arg0) == LABEL_REF
5546 || GET_CODE (folded_arg0) == CONST))
5547 {
5548 if (code == EQ)
c610adec 5549 return false;
7afe21cc 5550 else if (code == NE)
c610adec 5551 return true;
7afe21cc
RK
5552 }
5553
5554 /* See if the two operands are the same. We don't do this
5555 for IEEE floating-point since we can't assume x == x
5556 since x might be a NaN. */
5557
5558 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 5559 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
7afe21cc
RK
5560 && (folded_arg0 == folded_arg1
5561 || (GET_CODE (folded_arg0) == REG
5562 && GET_CODE (folded_arg1) == REG
5563 && (reg_qty[REGNO (folded_arg0)]
5564 == reg_qty[REGNO (folded_arg1)]))
5565 || ((p0 = lookup (folded_arg0,
5566 (safe_hash (folded_arg0, mode_arg0)
5567 % NBUCKETS), mode_arg0))
5568 && (p1 = lookup (folded_arg1,
5569 (safe_hash (folded_arg1, mode_arg0)
5570 % NBUCKETS), mode_arg0))
5571 && p0->first_same_value == p1->first_same_value)))
5572 return ((code == EQ || code == LE || code == GE
5573 || code == LEU || code == GEU)
c610adec 5574 ? true : false);
7afe21cc
RK
5575
5576 /* If FOLDED_ARG0 is a register, see if the comparison we are
5577 doing now is either the same as we did before or the reverse
5578 (we only check the reverse if not floating-point). */
5579 else if (GET_CODE (folded_arg0) == REG)
5580 {
5581 int qty = reg_qty[REGNO (folded_arg0)];
5582
5583 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5584 && (comparison_dominates_p (qty_comparison_code[qty], code)
5585 || (comparison_dominates_p (qty_comparison_code[qty],
5586 reverse_condition (code))
cbf6a543 5587 && ! FLOAT_MODE_P (mode_arg0)))
7afe21cc
RK
5588 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5589 || (const_arg1
5590 && rtx_equal_p (qty_comparison_const[qty],
5591 const_arg1))
5592 || (GET_CODE (folded_arg1) == REG
5593 && (reg_qty[REGNO (folded_arg1)]
5594 == qty_comparison_qty[qty]))))
5595 return (comparison_dominates_p (qty_comparison_code[qty],
5596 code)
c610adec 5597 ? true : false);
7afe21cc
RK
5598 }
5599 }
5600 }
5601
5602 /* If we are comparing against zero, see if the first operand is
5603 equivalent to an IOR with a constant. If so, we may be able to
5604 determine the result of this comparison. */
5605
5606 if (const_arg1 == const0_rtx)
5607 {
5608 rtx y = lookup_as_function (folded_arg0, IOR);
5609 rtx inner_const;
5610
5611 if (y != 0
5612 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5613 && GET_CODE (inner_const) == CONST_INT
5614 && INTVAL (inner_const) != 0)
5615 {
5616 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
5617 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5618 && (INTVAL (inner_const)
5619 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
c610adec
RK
5620 rtx true = const_true_rtx, false = const0_rtx;
5621
5622#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5623 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5624 {
560c94a2
RK
5625 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5626 mode);
c610adec
RK
5627 false = CONST0_RTX (mode);
5628 }
5629#endif
7afe21cc
RK
5630
5631 switch (code)
5632 {
5633 case EQ:
c610adec 5634 return false;
7afe21cc 5635 case NE:
c610adec 5636 return true;
7afe21cc
RK
5637 case LT: case LE:
5638 if (has_sign)
c610adec 5639 return true;
7afe21cc
RK
5640 break;
5641 case GT: case GE:
5642 if (has_sign)
c610adec 5643 return false;
7afe21cc 5644 break;
e9a25f70
JL
5645 default:
5646 break;
7afe21cc
RK
5647 }
5648 }
5649 }
5650
5651 new = simplify_relational_operation (code, mode_arg0,
5652 const_arg0 ? const_arg0 : folded_arg0,
5653 const_arg1 ? const_arg1 : folded_arg1);
c610adec
RK
5654#ifdef FLOAT_STORE_FLAG_VALUE
5655 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5656 new = ((new == const0_rtx) ? CONST0_RTX (mode)
560c94a2 5657 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
c610adec 5658#endif
7afe21cc
RK
5659 break;
5660
5661 case '2':
5662 case 'c':
5663 switch (code)
5664 {
5665 case PLUS:
5666 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5667 with that LABEL_REF as its second operand. If so, the result is
5668 the first operand of that MINUS. This handles switches with an
5669 ADDR_DIFF_VEC table. */
5670 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5671 {
e650cbda
RK
5672 rtx y
5673 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5674 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
5675
5676 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5677 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5678 return XEXP (y, 0);
67a37737
RK
5679
5680 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
5681 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5682 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
5683 && GET_CODE (XEXP (y, 0)) == MINUS
5684 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5685 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5686 return XEXP (XEXP (y, 0), 0);
7afe21cc 5687 }
c2cc0778 5688
e650cbda
RK
5689 /* Likewise if the operands are in the other order. */
5690 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5691 {
5692 rtx y
5693 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5694 : lookup_as_function (folded_arg1, MINUS);
5695
5696 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5697 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5698 return XEXP (y, 0);
5699
5700 /* Now try for a CONST of a MINUS like the above. */
5701 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5702 : lookup_as_function (folded_arg1, CONST))) != 0
5703 && GET_CODE (XEXP (y, 0)) == MINUS
5704 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5705 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5706 return XEXP (XEXP (y, 0), 0);
5707 }
5708
c2cc0778
RK
5709 /* If second operand is a register equivalent to a negative
5710 CONST_INT, see if we can find a register equivalent to the
5711 positive constant. Make a MINUS if so. Don't do this for
5d595063 5712 a non-negative constant since we might then alternate between
c2cc0778 5713 chosing positive and negative constants. Having the positive
5d595063
RK
5714 constant previously-used is the more common case. Be sure
5715 the resulting constant is non-negative; if const_arg1 were
5716 the smallest negative number this would overflow: depending
5717 on the mode, this would either just be the same value (and
5718 hence not save anything) or be incorrect. */
5719 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5720 && INTVAL (const_arg1) < 0
5721 && - INTVAL (const_arg1) >= 0
5722 && GET_CODE (folded_arg1) == REG)
c2cc0778
RK
5723 {
5724 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5725 struct table_elt *p
5726 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5727 mode);
5728
5729 if (p)
5730 for (p = p->first_same_value; p; p = p->next_same_value)
5731 if (GET_CODE (p->exp) == REG)
5732 return cse_gen_binary (MINUS, mode, folded_arg0,
5733 canon_reg (p->exp, NULL_RTX));
5734 }
13c9910f
RS
5735 goto from_plus;
5736
5737 case MINUS:
5738 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5739 If so, produce (PLUS Z C2-C). */
5740 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5741 {
5742 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5743 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
5744 return fold_rtx (plus_constant (copy_rtx (y),
5745 -INTVAL (const_arg1)),
a3b5c94a 5746 NULL_RTX);
13c9910f 5747 }
7afe21cc 5748
0f41302f 5749 /* ... fall through ... */
7afe21cc 5750
13c9910f 5751 from_plus:
7afe21cc
RK
5752 case SMIN: case SMAX: case UMIN: case UMAX:
5753 case IOR: case AND: case XOR:
5754 case MULT: case DIV: case UDIV:
5755 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5756 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5757 is known to be of similar form, we may be able to replace the
5758 operation with a combined operation. This may eliminate the
5759 intermediate operation if every use is simplified in this way.
5760 Note that the similar optimization done by combine.c only works
5761 if the intermediate operation's result has only one reference. */
5762
5763 if (GET_CODE (folded_arg0) == REG
5764 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5765 {
5766 int is_shift
5767 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5768 rtx y = lookup_as_function (folded_arg0, code);
5769 rtx inner_const;
5770 enum rtx_code associate_code;
5771 rtx new_const;
5772
5773 if (y == 0
5774 || 0 == (inner_const
5775 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5776 || GET_CODE (inner_const) != CONST_INT
5777 /* If we have compiled a statement like
5778 "if (x == (x & mask1))", and now are looking at
5779 "x & mask2", we will have a case where the first operand
5780 of Y is the same as our first operand. Unless we detect
5781 this case, an infinite loop will result. */
5782 || XEXP (y, 0) == folded_arg0)
5783 break;
5784
5785 /* Don't associate these operations if they are a PLUS with the
5786 same constant and it is a power of two. These might be doable
5787 with a pre- or post-increment. Similarly for two subtracts of
5788 identical powers of two with post decrement. */
5789
5790 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
940da324
JL
5791 && ((HAVE_PRE_INCREMENT
5792 && exact_log2 (INTVAL (const_arg1)) >= 0)
5793 || (HAVE_POST_INCREMENT
5794 && exact_log2 (INTVAL (const_arg1)) >= 0)
5795 || (HAVE_PRE_DECREMENT
5796 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5797 || (HAVE_POST_DECREMENT
5798 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
5799 break;
5800
5801 /* Compute the code used to compose the constants. For example,
5802 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5803
5804 associate_code
5805 = (code == MULT || code == DIV || code == UDIV ? MULT
5806 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5807
5808 new_const = simplify_binary_operation (associate_code, mode,
5809 const_arg1, inner_const);
5810
5811 if (new_const == 0)
5812 break;
5813
5814 /* If we are associating shift operations, don't let this
4908e508
RS
5815 produce a shift of the size of the object or larger.
5816 This could occur when we follow a sign-extend by a right
5817 shift on a machine that does a sign-extend as a pair
5818 of shifts. */
7afe21cc
RK
5819
5820 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
5821 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5822 {
5823 /* As an exception, we can turn an ASHIFTRT of this
5824 form into a shift of the number of bits - 1. */
5825 if (code == ASHIFTRT)
5826 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5827 else
5828 break;
5829 }
7afe21cc
RK
5830
5831 y = copy_rtx (XEXP (y, 0));
5832
5833 /* If Y contains our first operand (the most common way this
5834 can happen is if Y is a MEM), we would do into an infinite
5835 loop if we tried to fold it. So don't in that case. */
5836
5837 if (! reg_mentioned_p (folded_arg0, y))
5838 y = fold_rtx (y, insn);
5839
96b0e481 5840 return cse_gen_binary (code, mode, y, new_const);
7afe21cc 5841 }
e9a25f70
JL
5842 break;
5843
5844 default:
5845 break;
7afe21cc
RK
5846 }
5847
5848 new = simplify_binary_operation (code, mode,
5849 const_arg0 ? const_arg0 : folded_arg0,
5850 const_arg1 ? const_arg1 : folded_arg1);
5851 break;
5852
5853 case 'o':
5854 /* (lo_sum (high X) X) is simply X. */
5855 if (code == LO_SUM && const_arg0 != 0
5856 && GET_CODE (const_arg0) == HIGH
5857 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5858 return const_arg1;
5859 break;
5860
5861 case '3':
5862 case 'b':
5863 new = simplify_ternary_operation (code, mode, mode_arg0,
5864 const_arg0 ? const_arg0 : folded_arg0,
5865 const_arg1 ? const_arg1 : folded_arg1,
5866 const_arg2 ? const_arg2 : XEXP (x, 2));
5867 break;
5868 }
5869
5870 return new ? new : x;
5871}
5872\f
5873/* Return a constant value currently equivalent to X.
5874 Return 0 if we don't know one. */
5875
5876static rtx
5877equiv_constant (x)
5878 rtx x;
5879{
5880 if (GET_CODE (x) == REG
5881 && REGNO_QTY_VALID_P (REGNO (x))
5882 && qty_const[reg_qty[REGNO (x)]])
5883 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5884
2ce5e1b4 5885 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
5886 return x;
5887
fc3ffe83
RK
5888 /* If X is a MEM, try to fold it outside the context of any insn to see if
5889 it might be equivalent to a constant. That handles the case where it
5890 is a constant-pool reference. Then try to look it up in the hash table
5891 in case it is something whose value we have seen before. */
5892
5893 if (GET_CODE (x) == MEM)
5894 {
5895 struct table_elt *elt;
5896
906c4e36 5897 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
5898 if (CONSTANT_P (x))
5899 return x;
5900
5901 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5902 if (elt == 0)
5903 return 0;
5904
5905 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5906 if (elt->is_const && CONSTANT_P (elt->exp))
5907 return elt->exp;
5908 }
5909
7afe21cc
RK
5910 return 0;
5911}
5912\f
5913/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5914 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5915 least-significant part of X.
5916 MODE specifies how big a part of X to return.
5917
5918 If the requested operation cannot be done, 0 is returned.
5919
5920 This is similar to gen_lowpart in emit-rtl.c. */
5921
5922rtx
5923gen_lowpart_if_possible (mode, x)
5924 enum machine_mode mode;
5925 register rtx x;
5926{
5927 rtx result = gen_lowpart_common (mode, x);
5928
5929 if (result)
5930 return result;
5931 else if (GET_CODE (x) == MEM)
5932 {
5933 /* This is the only other case we handle. */
5934 register int offset = 0;
5935 rtx new;
5936
f76b9db2
ILT
5937 if (WORDS_BIG_ENDIAN)
5938 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5939 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5940 if (BYTES_BIG_ENDIAN)
5941 /* Adjust the address so that the address-after-the-data is
5942 unchanged. */
5943 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5944 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
38a448ca 5945 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
7afe21cc
RK
5946 if (! memory_address_p (mode, XEXP (new, 0)))
5947 return 0;
7afe21cc 5948 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 5949 MEM_COPY_ATTRIBUTES (new, x);
7afe21cc
RK
5950 return new;
5951 }
5952 else
5953 return 0;
5954}
5955\f
5956/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5957 branch. It will be zero if not.
5958
5959 In certain cases, this can cause us to add an equivalence. For example,
5960 if we are following the taken case of
5961 if (i == 2)
5962 we can add the fact that `i' and '2' are now equivalent.
5963
5964 In any case, we can record that this comparison was passed. If the same
5965 comparison is seen later, we will know its value. */
5966
5967static void
5968record_jump_equiv (insn, taken)
5969 rtx insn;
5970 int taken;
5971{
5972 int cond_known_true;
5973 rtx op0, op1;
13c9910f 5974 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
5975 int reversed_nonequality = 0;
5976 enum rtx_code code;
5977
5978 /* Ensure this is the right kind of insn. */
5979 if (! condjump_p (insn) || simplejump_p (insn))
5980 return;
5981
5982 /* See if this jump condition is known true or false. */
5983 if (taken)
5984 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5985 else
5986 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5987
5988 /* Get the type of comparison being done and the operands being compared.
5989 If we had to reverse a non-equality condition, record that fact so we
5990 know that it isn't valid for floating-point. */
5991 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5992 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5993 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5994
13c9910f 5995 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
5996 if (! cond_known_true)
5997 {
5998 reversed_nonequality = (code != EQ && code != NE);
5999 code = reverse_condition (code);
6000 }
6001
6002 /* The mode is the mode of the non-constant. */
13c9910f
RS
6003 mode = mode0;
6004 if (mode1 != VOIDmode)
6005 mode = mode1;
7afe21cc
RK
6006
6007 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6008}
6009
6010/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6011 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6012 Make any useful entries we can with that information. Called from
6013 above function and called recursively. */
6014
6015static void
6016record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6017 enum rtx_code code;
6018 enum machine_mode mode;
6019 rtx op0, op1;
6020 int reversed_nonequality;
6021{
2197a88a 6022 unsigned op0_hash, op1_hash;
7afe21cc
RK
6023 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6024 struct table_elt *op0_elt, *op1_elt;
6025
6026 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6027 we know that they are also equal in the smaller mode (this is also
6028 true for all smaller modes whether or not there is a SUBREG, but
6029 is not worth testing for with no SUBREG. */
6030
2e794ee8 6031 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 6032 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
6033 && (GET_MODE_SIZE (GET_MODE (op0))
6034 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6035 {
6036 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6037 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6038
6039 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6040 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6041 reversed_nonequality);
6042 }
6043
6044 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
6045 && (GET_MODE_SIZE (GET_MODE (op1))
6046 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6047 {
6048 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6049 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6050
6051 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6052 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6053 reversed_nonequality);
6054 }
6055
6056 /* Similarly, if this is an NE comparison, and either is a SUBREG
6057 making a smaller mode, we know the whole thing is also NE. */
6058
2e794ee8
RS
6059 /* Note that GET_MODE (op0) may not equal MODE;
6060 if we test MODE instead, we can get an infinite recursion
6061 alternating between two modes each wider than MODE. */
6062
7afe21cc
RK
6063 if (code == NE && GET_CODE (op0) == SUBREG
6064 && subreg_lowpart_p (op0)
2e794ee8
RS
6065 && (GET_MODE_SIZE (GET_MODE (op0))
6066 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6067 {
6068 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6069 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6070
6071 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6072 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6073 reversed_nonequality);
6074 }
6075
6076 if (code == NE && GET_CODE (op1) == SUBREG
6077 && subreg_lowpart_p (op1)
2e794ee8
RS
6078 && (GET_MODE_SIZE (GET_MODE (op1))
6079 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6080 {
6081 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6082 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6083
6084 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6085 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6086 reversed_nonequality);
6087 }
6088
6089 /* Hash both operands. */
6090
6091 do_not_record = 0;
6092 hash_arg_in_memory = 0;
6093 hash_arg_in_struct = 0;
2197a88a 6094 op0_hash = HASH (op0, mode);
7afe21cc
RK
6095 op0_in_memory = hash_arg_in_memory;
6096 op0_in_struct = hash_arg_in_struct;
6097
6098 if (do_not_record)
6099 return;
6100
6101 do_not_record = 0;
6102 hash_arg_in_memory = 0;
6103 hash_arg_in_struct = 0;
2197a88a 6104 op1_hash = HASH (op1, mode);
7afe21cc
RK
6105 op1_in_memory = hash_arg_in_memory;
6106 op1_in_struct = hash_arg_in_struct;
6107
6108 if (do_not_record)
6109 return;
6110
6111 /* Look up both operands. */
2197a88a
RK
6112 op0_elt = lookup (op0, op0_hash, mode);
6113 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 6114
af3869c1
RK
6115 /* If both operands are already equivalent or if they are not in the
6116 table but are identical, do nothing. */
6117 if ((op0_elt != 0 && op1_elt != 0
6118 && op0_elt->first_same_value == op1_elt->first_same_value)
6119 || op0 == op1 || rtx_equal_p (op0, op1))
6120 return;
6121
7afe21cc 6122 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
6123 comparison. Similarly if this is floating-point. In the latter
6124 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6125 If we record the equality, we might inadvertently delete code
6126 whose intent was to change -0 to +0. */
6127
cbf6a543 6128 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc
RK
6129 {
6130 /* If we reversed a floating-point comparison, if OP0 is not a
6131 register, or if OP1 is neither a register or constant, we can't
6132 do anything. */
6133
6134 if (GET_CODE (op1) != REG)
6135 op1 = equiv_constant (op1);
6136
cbf6a543 6137 if ((reversed_nonequality && FLOAT_MODE_P (mode))
7afe21cc
RK
6138 || GET_CODE (op0) != REG || op1 == 0)
6139 return;
6140
6141 /* Put OP0 in the hash table if it isn't already. This gives it a
6142 new quantity number. */
6143 if (op0_elt == 0)
6144 {
906c4e36 6145 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6146 {
6147 rehash_using_reg (op0);
2197a88a 6148 op0_hash = HASH (op0, mode);
2bb81c86
RK
6149
6150 /* If OP0 is contained in OP1, this changes its hash code
6151 as well. Faster to rehash than to check, except
6152 for the simple case of a constant. */
6153 if (! CONSTANT_P (op1))
2197a88a 6154 op1_hash = HASH (op1,mode);
7afe21cc
RK
6155 }
6156
2197a88a 6157 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6158 op0_elt->in_memory = op0_in_memory;
6159 op0_elt->in_struct = op0_in_struct;
6160 }
6161
6162 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
6163 if (GET_CODE (op1) == REG)
6164 {
5d5ea909 6165 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 6166 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 6167
7afe21cc
RK
6168 /* Put OP1 in the hash table so it gets a new quantity number. */
6169 if (op1_elt == 0)
6170 {
906c4e36 6171 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6172 {
6173 rehash_using_reg (op1);
2197a88a 6174 op1_hash = HASH (op1, mode);
7afe21cc
RK
6175 }
6176
2197a88a 6177 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6178 op1_elt->in_memory = op1_in_memory;
6179 op1_elt->in_struct = op1_in_struct;
6180 }
6181
6182 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
6183 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
6184 }
6185 else
6186 {
6187 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
6188 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
6189 }
6190
6191 return;
6192 }
6193
eb5ad42a
RS
6194 /* If either side is still missing an equivalence, make it now,
6195 then merge the equivalences. */
7afe21cc 6196
7afe21cc
RK
6197 if (op0_elt == 0)
6198 {
eb5ad42a 6199 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6200 {
6201 rehash_using_reg (op0);
2197a88a 6202 op0_hash = HASH (op0, mode);
7afe21cc
RK
6203 }
6204
2197a88a 6205 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6206 op0_elt->in_memory = op0_in_memory;
6207 op0_elt->in_struct = op0_in_struct;
7afe21cc
RK
6208 }
6209
6210 if (op1_elt == 0)
6211 {
eb5ad42a 6212 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6213 {
6214 rehash_using_reg (op1);
2197a88a 6215 op1_hash = HASH (op1, mode);
7afe21cc
RK
6216 }
6217
2197a88a 6218 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6219 op1_elt->in_memory = op1_in_memory;
6220 op1_elt->in_struct = op1_in_struct;
7afe21cc 6221 }
eb5ad42a
RS
6222
6223 merge_equiv_classes (op0_elt, op1_elt);
6224 last_jump_equiv_class = op0_elt;
7afe21cc
RK
6225}
6226\f
6227/* CSE processing for one instruction.
6228 First simplify sources and addresses of all assignments
6229 in the instruction, using previously-computed equivalents values.
6230 Then install the new sources and destinations in the table
6231 of available values.
6232
1ed0205e
VM
6233 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6234 the insn. It means that INSN is inside libcall block. In this
6235 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
6236
6237/* Data on one SET contained in the instruction. */
6238
6239struct set
6240{
6241 /* The SET rtx itself. */
6242 rtx rtl;
6243 /* The SET_SRC of the rtx (the original value, if it is changing). */
6244 rtx src;
6245 /* The hash-table element for the SET_SRC of the SET. */
6246 struct table_elt *src_elt;
2197a88a
RK
6247 /* Hash value for the SET_SRC. */
6248 unsigned src_hash;
6249 /* Hash value for the SET_DEST. */
6250 unsigned dest_hash;
7afe21cc
RK
6251 /* The SET_DEST, with SUBREG, etc., stripped. */
6252 rtx inner_dest;
6253 /* Place where the pointer to the INNER_DEST was found. */
6254 rtx *inner_dest_loc;
6255 /* Nonzero if the SET_SRC is in memory. */
6256 char src_in_memory;
6257 /* Nonzero if the SET_SRC is in a structure. */
6258 char src_in_struct;
6259 /* Nonzero if the SET_SRC contains something
6260 whose value cannot be predicted and understood. */
6261 char src_volatile;
6262 /* Original machine mode, in case it becomes a CONST_INT. */
6263 enum machine_mode mode;
6264 /* A constant equivalent for SET_SRC, if any. */
6265 rtx src_const;
2197a88a
RK
6266 /* Hash value of constant equivalent for SET_SRC. */
6267 unsigned src_const_hash;
7afe21cc
RK
6268 /* Table entry for constant equivalent for SET_SRC, if any. */
6269 struct table_elt *src_const_elt;
6270};
6271
6272static void
7bd8b2a8 6273cse_insn (insn, libcall_insn)
7afe21cc 6274 rtx insn;
7bd8b2a8 6275 rtx libcall_insn;
7afe21cc
RK
6276{
6277 register rtx x = PATTERN (insn);
7afe21cc 6278 register int i;
92f9aa51 6279 rtx tem;
7afe21cc
RK
6280 register int n_sets = 0;
6281
2d8b0f3a 6282#ifdef HAVE_cc0
7afe21cc
RK
6283 /* Records what this insn does to set CC0. */
6284 rtx this_insn_cc0 = 0;
135d84b8 6285 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 6286#endif
7afe21cc
RK
6287
6288 rtx src_eqv = 0;
6289 struct table_elt *src_eqv_elt = 0;
6290 int src_eqv_volatile;
6291 int src_eqv_in_memory;
6292 int src_eqv_in_struct;
2197a88a 6293 unsigned src_eqv_hash;
7afe21cc
RK
6294
6295 struct set *sets;
6296
6297 this_insn = insn;
7afe21cc
RK
6298
6299 /* Find all the SETs and CLOBBERs in this instruction.
6300 Record all the SETs in the array `set' and count them.
6301 Also determine whether there is a CLOBBER that invalidates
6302 all memory references, or all references at varying addresses. */
6303
f1e7c95f
RK
6304 if (GET_CODE (insn) == CALL_INSN)
6305 {
6306 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6307 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
bb4034b3 6308 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
f1e7c95f
RK
6309 }
6310
7afe21cc
RK
6311 if (GET_CODE (x) == SET)
6312 {
6313 sets = (struct set *) alloca (sizeof (struct set));
6314 sets[0].rtl = x;
6315
6316 /* Ignore SETs that are unconditional jumps.
6317 They never need cse processing, so this does not hurt.
6318 The reason is not efficiency but rather
6319 so that we can test at the end for instructions
6320 that have been simplified to unconditional jumps
6321 and not be misled by unchanged instructions
6322 that were unconditional jumps to begin with. */
6323 if (SET_DEST (x) == pc_rtx
6324 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6325 ;
6326
6327 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6328 The hard function value register is used only once, to copy to
6329 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6330 Ensure we invalidate the destination register. On the 80386 no
7722328e 6331 other code would invalidate it since it is a fixed_reg.
0f41302f 6332 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
6333
6334 else if (GET_CODE (SET_SRC (x)) == CALL)
6335 {
6336 canon_reg (SET_SRC (x), insn);
77fa0940 6337 apply_change_group ();
7afe21cc 6338 fold_rtx (SET_SRC (x), insn);
bb4034b3 6339 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
6340 }
6341 else
6342 n_sets = 1;
6343 }
6344 else if (GET_CODE (x) == PARALLEL)
6345 {
6346 register int lim = XVECLEN (x, 0);
6347
6348 sets = (struct set *) alloca (lim * sizeof (struct set));
6349
6350 /* Find all regs explicitly clobbered in this insn,
6351 and ensure they are not replaced with any other regs
6352 elsewhere in this insn.
6353 When a reg that is clobbered is also used for input,
6354 we should presume that that is for a reason,
6355 and we should not substitute some other register
6356 which is not supposed to be clobbered.
6357 Therefore, this loop cannot be merged into the one below
830a38ee 6358 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
6359 value clobbered. We must not let a canonicalization do
6360 anything in that case. */
6361 for (i = 0; i < lim; i++)
6362 {
6363 register rtx y = XVECEXP (x, 0, i);
2708da92
RS
6364 if (GET_CODE (y) == CLOBBER)
6365 {
6366 rtx clobbered = XEXP (y, 0);
6367
6368 if (GET_CODE (clobbered) == REG
6369 || GET_CODE (clobbered) == SUBREG)
bb4034b3 6370 invalidate (clobbered, VOIDmode);
2708da92
RS
6371 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6372 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 6373 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 6374 }
7afe21cc
RK
6375 }
6376
6377 for (i = 0; i < lim; i++)
6378 {
6379 register rtx y = XVECEXP (x, 0, i);
6380 if (GET_CODE (y) == SET)
6381 {
7722328e
RK
6382 /* As above, we ignore unconditional jumps and call-insns and
6383 ignore the result of apply_change_group. */
7afe21cc
RK
6384 if (GET_CODE (SET_SRC (y)) == CALL)
6385 {
6386 canon_reg (SET_SRC (y), insn);
77fa0940 6387 apply_change_group ();
7afe21cc 6388 fold_rtx (SET_SRC (y), insn);
bb4034b3 6389 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
6390 }
6391 else if (SET_DEST (y) == pc_rtx
6392 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6393 ;
6394 else
6395 sets[n_sets++].rtl = y;
6396 }
6397 else if (GET_CODE (y) == CLOBBER)
6398 {
9ae8ffe7 6399 /* If we clobber memory, canon the address.
7afe21cc
RK
6400 This does nothing when a register is clobbered
6401 because we have already invalidated the reg. */
6402 if (GET_CODE (XEXP (y, 0)) == MEM)
9ae8ffe7 6403 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
6404 }
6405 else if (GET_CODE (y) == USE
6406 && ! (GET_CODE (XEXP (y, 0)) == REG
6407 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6408 canon_reg (y, NULL_RTX);
7afe21cc
RK
6409 else if (GET_CODE (y) == CALL)
6410 {
7722328e
RK
6411 /* The result of apply_change_group can be ignored; see
6412 canon_reg. */
7afe21cc 6413 canon_reg (y, insn);
77fa0940 6414 apply_change_group ();
7afe21cc
RK
6415 fold_rtx (y, insn);
6416 }
6417 }
6418 }
6419 else if (GET_CODE (x) == CLOBBER)
6420 {
6421 if (GET_CODE (XEXP (x, 0)) == MEM)
9ae8ffe7 6422 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6423 }
6424
6425 /* Canonicalize a USE of a pseudo register or memory location. */
6426 else if (GET_CODE (x) == USE
6427 && ! (GET_CODE (XEXP (x, 0)) == REG
6428 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6429 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6430 else if (GET_CODE (x) == CALL)
6431 {
7722328e 6432 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 6433 canon_reg (x, insn);
77fa0940 6434 apply_change_group ();
7afe21cc
RK
6435 fold_rtx (x, insn);
6436 }
6437
7b3ab05e
JW
6438 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6439 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6440 is handled specially for this case, and if it isn't set, then there will
9faa82d8 6441 be no equivalence for the destination. */
92f9aa51
RK
6442 if (n_sets == 1 && REG_NOTES (insn) != 0
6443 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
6444 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6445 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
92f9aa51 6446 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
7afe21cc
RK
6447
6448 /* Canonicalize sources and addresses of destinations.
6449 We do this in a separate pass to avoid problems when a MATCH_DUP is
6450 present in the insn pattern. In that case, we want to ensure that
6451 we don't break the duplicate nature of the pattern. So we will replace
6452 both operands at the same time. Otherwise, we would fail to find an
6453 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
6454
6455 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 6456 but we don't do this any more. */
7afe21cc
RK
6457
6458 for (i = 0; i < n_sets; i++)
6459 {
6460 rtx dest = SET_DEST (sets[i].rtl);
6461 rtx src = SET_SRC (sets[i].rtl);
6462 rtx new = canon_reg (src, insn);
58873255 6463 int insn_code;
7afe21cc 6464
77fa0940
RK
6465 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6466 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6467 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255
RK
6468 || (insn_code = recog_memoized (insn)) < 0
6469 || insn_n_dups[insn_code] > 0)
77fa0940 6470 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
6471 else
6472 SET_SRC (sets[i].rtl) = new;
6473
6474 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6475 {
6476 validate_change (insn, &XEXP (dest, 1),
77fa0940 6477 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 6478 validate_change (insn, &XEXP (dest, 2),
77fa0940 6479 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
6480 }
6481
6482 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6483 || GET_CODE (dest) == ZERO_EXTRACT
6484 || GET_CODE (dest) == SIGN_EXTRACT)
6485 dest = XEXP (dest, 0);
6486
6487 if (GET_CODE (dest) == MEM)
6488 canon_reg (dest, insn);
6489 }
6490
77fa0940
RK
6491 /* Now that we have done all the replacements, we can apply the change
6492 group and see if they all work. Note that this will cause some
6493 canonicalizations that would have worked individually not to be applied
6494 because some other canonicalization didn't work, but this should not
7722328e
RK
6495 occur often.
6496
6497 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
6498
6499 apply_change_group ();
6500
7afe21cc
RK
6501 /* Set sets[i].src_elt to the class each source belongs to.
6502 Detect assignments from or to volatile things
6503 and set set[i] to zero so they will be ignored
6504 in the rest of this function.
6505
6506 Nothing in this loop changes the hash table or the register chains. */
6507
6508 for (i = 0; i < n_sets; i++)
6509 {
6510 register rtx src, dest;
6511 register rtx src_folded;
6512 register struct table_elt *elt = 0, *p;
6513 enum machine_mode mode;
6514 rtx src_eqv_here;
6515 rtx src_const = 0;
6516 rtx src_related = 0;
6517 struct table_elt *src_const_elt = 0;
6518 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6519 int src_related_cost = 10000, src_elt_cost = 10000;
6520 /* Set non-zero if we need to call force_const_mem on with the
6521 contents of src_folded before using it. */
6522 int src_folded_force_flag = 0;
6523
6524 dest = SET_DEST (sets[i].rtl);
6525 src = SET_SRC (sets[i].rtl);
6526
6527 /* If SRC is a constant that has no machine mode,
6528 hash it with the destination's machine mode.
6529 This way we can keep different modes separate. */
6530
6531 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6532 sets[i].mode = mode;
6533
6534 if (src_eqv)
6535 {
6536 enum machine_mode eqvmode = mode;
6537 if (GET_CODE (dest) == STRICT_LOW_PART)
6538 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6539 do_not_record = 0;
6540 hash_arg_in_memory = 0;
6541 hash_arg_in_struct = 0;
6542 src_eqv = fold_rtx (src_eqv, insn);
2197a88a 6543 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
6544
6545 /* Find the equivalence class for the equivalent expression. */
6546
6547 if (!do_not_record)
2197a88a 6548 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
6549
6550 src_eqv_volatile = do_not_record;
6551 src_eqv_in_memory = hash_arg_in_memory;
6552 src_eqv_in_struct = hash_arg_in_struct;
6553 }
6554
6555 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6556 value of the INNER register, not the destination. So it is not
3826a3da 6557 a valid substitution for the source. But save it for later. */
7afe21cc
RK
6558 if (GET_CODE (dest) == STRICT_LOW_PART)
6559 src_eqv_here = 0;
6560 else
6561 src_eqv_here = src_eqv;
6562
6563 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6564 simplified result, which may not necessarily be valid. */
6565 src_folded = fold_rtx (src, insn);
6566
e6a125a0
RK
6567#if 0
6568 /* ??? This caused bad code to be generated for the m68k port with -O2.
6569 Suppose src is (CONST_INT -1), and that after truncation src_folded
6570 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6571 At the end we will add src and src_const to the same equivalence
6572 class. We now have 3 and -1 on the same equivalence class. This
6573 causes later instructions to be mis-optimized. */
7afe21cc
RK
6574 /* If storing a constant in a bitfield, pre-truncate the constant
6575 so we will be able to record it later. */
6576 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6577 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6578 {
6579 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6580
6581 if (GET_CODE (src) == CONST_INT
6582 && GET_CODE (width) == CONST_INT
906c4e36
RK
6583 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6584 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6585 src_folded
6586 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6587 << INTVAL (width)) - 1));
7afe21cc 6588 }
e6a125a0 6589#endif
7afe21cc
RK
6590
6591 /* Compute SRC's hash code, and also notice if it
6592 should not be recorded at all. In that case,
6593 prevent any further processing of this assignment. */
6594 do_not_record = 0;
6595 hash_arg_in_memory = 0;
6596 hash_arg_in_struct = 0;
6597
6598 sets[i].src = src;
2197a88a 6599 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
6600 sets[i].src_volatile = do_not_record;
6601 sets[i].src_in_memory = hash_arg_in_memory;
6602 sets[i].src_in_struct = hash_arg_in_struct;
6603
50196afa
RK
6604 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6605 a pseudo that is set more than once, do not record SRC. Using
6606 SRC as a replacement for anything else will be incorrect in that
6607 situation. Note that this usually occurs only for stack slots,
956d6950 6608 in which case all the RTL would be referring to SRC, so we don't
50196afa
RK
6609 lose any optimization opportunities by not having SRC in the
6610 hash table. */
6611
6612 if (GET_CODE (src) == MEM
6613 && find_reg_note (insn, REG_EQUIV, src) != 0
6614 && GET_CODE (dest) == REG
6615 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
b1f21e0a 6616 && REG_N_SETS (REGNO (dest)) != 1)
50196afa
RK
6617 sets[i].src_volatile = 1;
6618
0dadecf6
RK
6619#if 0
6620 /* It is no longer clear why we used to do this, but it doesn't
6621 appear to still be needed. So let's try without it since this
6622 code hurts cse'ing widened ops. */
7afe21cc
RK
6623 /* If source is a perverse subreg (such as QI treated as an SI),
6624 treat it as volatile. It may do the work of an SI in one context
6625 where the extra bits are not being used, but cannot replace an SI
6626 in general. */
6627 if (GET_CODE (src) == SUBREG
6628 && (GET_MODE_SIZE (GET_MODE (src))
6629 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6630 sets[i].src_volatile = 1;
0dadecf6 6631#endif
7afe21cc
RK
6632
6633 /* Locate all possible equivalent forms for SRC. Try to replace
6634 SRC in the insn with each cheaper equivalent.
6635
6636 We have the following types of equivalents: SRC itself, a folded
6637 version, a value given in a REG_EQUAL note, or a value related
6638 to a constant.
6639
6640 Each of these equivalents may be part of an additional class
6641 of equivalents (if more than one is in the table, they must be in
6642 the same class; we check for this).
6643
6644 If the source is volatile, we don't do any table lookups.
6645
6646 We note any constant equivalent for possible later use in a
6647 REG_NOTE. */
6648
6649 if (!sets[i].src_volatile)
2197a88a 6650 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
6651
6652 sets[i].src_elt = elt;
6653
6654 if (elt && src_eqv_here && src_eqv_elt)
6655 {
6656 if (elt->first_same_value != src_eqv_elt->first_same_value)
6657 {
6658 /* The REG_EQUAL is indicating that two formerly distinct
6659 classes are now equivalent. So merge them. */
6660 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
6661 src_eqv_hash = HASH (src_eqv, elt->mode);
6662 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
6663 }
6664
6665 src_eqv_here = 0;
6666 }
6667
6668 else if (src_eqv_elt)
6669 elt = src_eqv_elt;
6670
6671 /* Try to find a constant somewhere and record it in `src_const'.
6672 Record its table element, if any, in `src_const_elt'. Look in
6673 any known equivalences first. (If the constant is not in the
2197a88a 6674 table, also set `sets[i].src_const_hash'). */
7afe21cc
RK
6675 if (elt)
6676 for (p = elt->first_same_value; p; p = p->next_same_value)
6677 if (p->is_const)
6678 {
6679 src_const = p->exp;
6680 src_const_elt = elt;
6681 break;
6682 }
6683
6684 if (src_const == 0
6685 && (CONSTANT_P (src_folded)
6686 /* Consider (minus (label_ref L1) (label_ref L2)) as
6687 "constant" here so we will record it. This allows us
6688 to fold switch statements when an ADDR_DIFF_VEC is used. */
6689 || (GET_CODE (src_folded) == MINUS
6690 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6691 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6692 src_const = src_folded, src_const_elt = elt;
6693 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6694 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6695
6696 /* If we don't know if the constant is in the table, get its
6697 hash code and look it up. */
6698 if (src_const && src_const_elt == 0)
6699 {
2197a88a
RK
6700 sets[i].src_const_hash = HASH (src_const, mode);
6701 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
6702 }
6703
6704 sets[i].src_const = src_const;
6705 sets[i].src_const_elt = src_const_elt;
6706
6707 /* If the constant and our source are both in the table, mark them as
6708 equivalent. Otherwise, if a constant is in the table but the source
6709 isn't, set ELT to it. */
6710 if (src_const_elt && elt
6711 && src_const_elt->first_same_value != elt->first_same_value)
6712 merge_equiv_classes (elt, src_const_elt);
6713 else if (src_const_elt && elt == 0)
6714 elt = src_const_elt;
6715
6716 /* See if there is a register linearly related to a constant
6717 equivalent of SRC. */
6718 if (src_const
6719 && (GET_CODE (src_const) == CONST
6720 || (src_const_elt && src_const_elt->related_value != 0)))
6721 {
6722 src_related = use_related_value (src_const, src_const_elt);
6723 if (src_related)
6724 {
6725 struct table_elt *src_related_elt
6726 = lookup (src_related, HASH (src_related, mode), mode);
6727 if (src_related_elt && elt)
6728 {
6729 if (elt->first_same_value
6730 != src_related_elt->first_same_value)
6731 /* This can occur when we previously saw a CONST
6732 involving a SYMBOL_REF and then see the SYMBOL_REF
6733 twice. Merge the involved classes. */
6734 merge_equiv_classes (elt, src_related_elt);
6735
6736 src_related = 0;
6737 src_related_elt = 0;
6738 }
6739 else if (src_related_elt && elt == 0)
6740 elt = src_related_elt;
6741 }
6742 }
6743
e4600702
RK
6744 /* See if we have a CONST_INT that is already in a register in a
6745 wider mode. */
6746
6747 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6748 && GET_MODE_CLASS (mode) == MODE_INT
6749 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6750 {
6751 enum machine_mode wider_mode;
6752
6753 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6754 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6755 && src_related == 0;
6756 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6757 {
6758 struct table_elt *const_elt
6759 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6760
6761 if (const_elt == 0)
6762 continue;
6763
6764 for (const_elt = const_elt->first_same_value;
6765 const_elt; const_elt = const_elt->next_same_value)
6766 if (GET_CODE (const_elt->exp) == REG)
6767 {
6768 src_related = gen_lowpart_if_possible (mode,
6769 const_elt->exp);
6770 break;
6771 }
6772 }
6773 }
6774
d45cf215
RS
6775 /* Another possibility is that we have an AND with a constant in
6776 a mode narrower than a word. If so, it might have been generated
6777 as part of an "if" which would narrow the AND. If we already
6778 have done the AND in a wider mode, we can use a SUBREG of that
6779 value. */
6780
6781 if (flag_expensive_optimizations && ! src_related
6782 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6783 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6784 {
6785 enum machine_mode tmode;
38a448ca 6786 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
6787
6788 for (tmode = GET_MODE_WIDER_MODE (mode);
6789 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6790 tmode = GET_MODE_WIDER_MODE (tmode))
6791 {
6792 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6793 struct table_elt *larger_elt;
6794
6795 if (inner)
6796 {
6797 PUT_MODE (new_and, tmode);
6798 XEXP (new_and, 0) = inner;
6799 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6800 if (larger_elt == 0)
6801 continue;
6802
6803 for (larger_elt = larger_elt->first_same_value;
6804 larger_elt; larger_elt = larger_elt->next_same_value)
6805 if (GET_CODE (larger_elt->exp) == REG)
6806 {
6807 src_related
6808 = gen_lowpart_if_possible (mode, larger_elt->exp);
6809 break;
6810 }
6811
6812 if (src_related)
6813 break;
6814 }
6815 }
6816 }
7bac1be0
RK
6817
6818#ifdef LOAD_EXTEND_OP
6819 /* See if a MEM has already been loaded with a widening operation;
6820 if it has, we can use a subreg of that. Many CISC machines
6821 also have such operations, but this is only likely to be
6822 beneficial these machines. */
6823
6824 if (flag_expensive_optimizations && src_related == 0
6825 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6826 && GET_MODE_CLASS (mode) == MODE_INT
6827 && GET_CODE (src) == MEM && ! do_not_record
6828 && LOAD_EXTEND_OP (mode) != NIL)
6829 {
6830 enum machine_mode tmode;
6831
6832 /* Set what we are trying to extend and the operation it might
6833 have been extended with. */
6834 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6835 XEXP (memory_extend_rtx, 0) = src;
6836
6837 for (tmode = GET_MODE_WIDER_MODE (mode);
6838 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6839 tmode = GET_MODE_WIDER_MODE (tmode))
6840 {
6841 struct table_elt *larger_elt;
6842
6843 PUT_MODE (memory_extend_rtx, tmode);
6844 larger_elt = lookup (memory_extend_rtx,
6845 HASH (memory_extend_rtx, tmode), tmode);
6846 if (larger_elt == 0)
6847 continue;
6848
6849 for (larger_elt = larger_elt->first_same_value;
6850 larger_elt; larger_elt = larger_elt->next_same_value)
6851 if (GET_CODE (larger_elt->exp) == REG)
6852 {
6853 src_related = gen_lowpart_if_possible (mode,
6854 larger_elt->exp);
6855 break;
6856 }
6857
6858 if (src_related)
6859 break;
6860 }
6861 }
6862#endif /* LOAD_EXTEND_OP */
6863
7afe21cc
RK
6864 if (src == src_folded)
6865 src_folded = 0;
6866
1ff136fd
RH
6867 /* Folds of constant_p_rtx are to be preferred, since we do
6868 not wish any to live past CSE. */
6869 if (src && GET_CODE (src) == CONST
6870 && GET_CODE (XEXP (src, 0)) == CONSTANT_P_RTX)
6871 src = 0;
6872
7afe21cc
RK
6873 /* At this point, ELT, if non-zero, points to a class of expressions
6874 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6875 and SRC_RELATED, if non-zero, each contain additional equivalent
6876 expressions. Prune these latter expressions by deleting expressions
6877 already in the equivalence class.
6878
6879 Check for an equivalent identical to the destination. If found,
6880 this is the preferred equivalent since it will likely lead to
6881 elimination of the insn. Indicate this by placing it in
6882 `src_related'. */
6883
6884 if (elt) elt = elt->first_same_value;
6885 for (p = elt; p; p = p->next_same_value)
6886 {
6887 enum rtx_code code = GET_CODE (p->exp);
6888
6889 /* If the expression is not valid, ignore it. Then we do not
6890 have to check for validity below. In most cases, we can use
6891 `rtx_equal_p', since canonicalization has already been done. */
6892 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6893 continue;
6894
5a03c8c4
RK
6895 /* Also skip paradoxical subregs, unless that's what we're
6896 looking for. */
6897 if (code == SUBREG
6898 && (GET_MODE_SIZE (GET_MODE (p->exp))
6899 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6900 && ! (src != 0
6901 && GET_CODE (src) == SUBREG
6902 && GET_MODE (src) == GET_MODE (p->exp)
6903 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6904 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6905 continue;
6906
7afe21cc
RK
6907 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6908 src = 0;
6909 else if (src_folded && GET_CODE (src_folded) == code
6910 && rtx_equal_p (src_folded, p->exp))
6911 src_folded = 0;
6912 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6913 && rtx_equal_p (src_eqv_here, p->exp))
6914 src_eqv_here = 0;
6915 else if (src_related && GET_CODE (src_related) == code
6916 && rtx_equal_p (src_related, p->exp))
6917 src_related = 0;
6918
6919 /* This is the same as the destination of the insns, we want
6920 to prefer it. Copy it to src_related. The code below will
6921 then give it a negative cost. */
6922 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6923 src_related = dest;
6924
6925 }
6926
6927 /* Find the cheapest valid equivalent, trying all the available
6928 possibilities. Prefer items not in the hash table to ones
6929 that are when they are equal cost. Note that we can never
6930 worsen an insn as the current contents will also succeed.
05c33dd8 6931 If we find an equivalent identical to the destination, use it as best,
0f41302f 6932 since this insn will probably be eliminated in that case. */
7afe21cc
RK
6933 if (src)
6934 {
6935 if (rtx_equal_p (src, dest))
6936 src_cost = -1;
6937 else
6938 src_cost = COST (src);
6939 }
6940
6941 if (src_eqv_here)
6942 {
6943 if (rtx_equal_p (src_eqv_here, dest))
6944 src_eqv_cost = -1;
6945 else
6946 src_eqv_cost = COST (src_eqv_here);
6947 }
6948
6949 if (src_folded)
6950 {
6951 if (rtx_equal_p (src_folded, dest))
6952 src_folded_cost = -1;
6953 else
6954 src_folded_cost = COST (src_folded);
6955 }
6956
6957 if (src_related)
6958 {
6959 if (rtx_equal_p (src_related, dest))
6960 src_related_cost = -1;
6961 else
6962 src_related_cost = COST (src_related);
6963 }
6964
6965 /* If this was an indirect jump insn, a known label will really be
6966 cheaper even though it looks more expensive. */
6967 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6968 src_folded = src_const, src_folded_cost = -1;
6969
6970 /* Terminate loop when replacement made. This must terminate since
6971 the current contents will be tested and will always be valid. */
6972 while (1)
6973 {
7bd8b2a8 6974 rtx trial, old_src;
7afe21cc
RK
6975
6976 /* Skip invalid entries. */
6977 while (elt && GET_CODE (elt->exp) != REG
6978 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6979 elt = elt->next_same_value;
5a03c8c4
RK
6980
6981 /* A paradoxical subreg would be bad here: it'll be the right
6982 size, but later may be adjusted so that the upper bits aren't
6983 what we want. So reject it. */
6984 if (elt != 0
6985 && GET_CODE (elt->exp) == SUBREG
6986 && (GET_MODE_SIZE (GET_MODE (elt->exp))
6987 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6988 /* It is okay, though, if the rtx we're trying to match
6989 will ignore any of the bits we can't predict. */
6990 && ! (src != 0
6991 && GET_CODE (src) == SUBREG
6992 && GET_MODE (src) == GET_MODE (elt->exp)
6993 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6994 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6995 {
6996 elt = elt->next_same_value;
6997 continue;
6998 }
7afe21cc
RK
6999
7000 if (elt) src_elt_cost = elt->cost;
7001
7002 /* Find cheapest and skip it for the next time. For items
7003 of equal cost, use this order:
7004 src_folded, src, src_eqv, src_related and hash table entry. */
7005 if (src_folded_cost <= src_cost
7006 && src_folded_cost <= src_eqv_cost
7007 && src_folded_cost <= src_related_cost
7008 && src_folded_cost <= src_elt_cost)
7009 {
7010 trial = src_folded, src_folded_cost = 10000;
7011 if (src_folded_force_flag)
7012 trial = force_const_mem (mode, trial);
7013 }
7014 else if (src_cost <= src_eqv_cost
7015 && src_cost <= src_related_cost
7016 && src_cost <= src_elt_cost)
7017 trial = src, src_cost = 10000;
7018 else if (src_eqv_cost <= src_related_cost
7019 && src_eqv_cost <= src_elt_cost)
0af62b41 7020 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7afe21cc 7021 else if (src_related_cost <= src_elt_cost)
0af62b41 7022 trial = copy_rtx (src_related), src_related_cost = 10000;
7afe21cc
RK
7023 else
7024 {
05c33dd8 7025 trial = copy_rtx (elt->exp);
7afe21cc
RK
7026 elt = elt->next_same_value;
7027 src_elt_cost = 10000;
7028 }
7029
7030 /* We don't normally have an insn matching (set (pc) (pc)), so
7031 check for this separately here. We will delete such an
7032 insn below.
7033
7034 Tablejump insns contain a USE of the table, so simply replacing
7035 the operand with the constant won't match. This is simply an
7036 unconditional branch, however, and is therefore valid. Just
7037 insert the substitution here and we will delete and re-emit
7038 the insn later. */
7039
7bd8b2a8
JL
7040 /* Keep track of the original SET_SRC so that we can fix notes
7041 on libcall instructions. */
7042 old_src = SET_SRC (sets[i].rtl);
7043
7afe21cc
RK
7044 if (n_sets == 1 && dest == pc_rtx
7045 && (trial == pc_rtx
7046 || (GET_CODE (trial) == LABEL_REF
7047 && ! condjump_p (insn))))
7048 {
7049 /* If TRIAL is a label in front of a jump table, we are
7050 really falling through the switch (this is how casesi
7051 insns work), so we must branch around the table. */
7052 if (GET_CODE (trial) == CODE_LABEL
7053 && NEXT_INSN (trial) != 0
7054 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7055 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7056 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7057
38a448ca 7058 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7afe21cc
RK
7059
7060 SET_SRC (sets[i].rtl) = trial;
44333223 7061 cse_jumps_altered = 1;
7afe21cc
RK
7062 break;
7063 }
7064
7065 /* Look for a substitution that makes a valid insn. */
7066 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 7067 {
7bd8b2a8
JL
7068 /* If we just made a substitution inside a libcall, then we
7069 need to make the same substitution in any notes attached
7070 to the RETVAL insn. */
1ed0205e
VM
7071 if (libcall_insn
7072 && (GET_CODE (old_src) == REG
7073 || GET_CODE (old_src) == SUBREG
7074 || GET_CODE (old_src) == MEM))
7bd8b2a8
JL
7075 replace_rtx (REG_NOTES (libcall_insn), old_src,
7076 canon_reg (SET_SRC (sets[i].rtl), insn));
7077
7722328e
RK
7078 /* The result of apply_change_group can be ignored; see
7079 canon_reg. */
7080
7081 validate_change (insn, &SET_SRC (sets[i].rtl),
7082 canon_reg (SET_SRC (sets[i].rtl), insn),
7083 1);
6702af89 7084 apply_change_group ();
05c33dd8
RK
7085 break;
7086 }
7afe21cc
RK
7087
7088 /* If we previously found constant pool entries for
7089 constants and this is a constant, try making a
7090 pool entry. Put it in src_folded unless we already have done
7091 this since that is where it likely came from. */
7092
7093 else if (constant_pool_entries_cost
7094 && CONSTANT_P (trial)
1bbd065b
RK
7095 && ! (GET_CODE (trial) == CONST
7096 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7097 && (src_folded == 0
7098 || (GET_CODE (src_folded) != MEM
7099 && ! src_folded_force_flag))
9ae8ffe7
JL
7100 && GET_MODE_CLASS (mode) != MODE_CC
7101 && mode != VOIDmode)
7afe21cc
RK
7102 {
7103 src_folded_force_flag = 1;
7104 src_folded = trial;
7105 src_folded_cost = constant_pool_entries_cost;
7106 }
7107 }
7108
7109 src = SET_SRC (sets[i].rtl);
7110
7111 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7112 However, there is an important exception: If both are registers
7113 that are not the head of their equivalence class, replace SET_SRC
7114 with the head of the class. If we do not do this, we will have
7115 both registers live over a portion of the basic block. This way,
7116 their lifetimes will likely abut instead of overlapping. */
7117 if (GET_CODE (dest) == REG
7118 && REGNO_QTY_VALID_P (REGNO (dest))
7119 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
7120 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
7121 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7122 /* Don't do this if the original insn had a hard reg as
7123 SET_SRC. */
7124 && (GET_CODE (sets[i].src) != REG
7125 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7126 /* We can't call canon_reg here because it won't do anything if
7127 SRC is a hard register. */
7128 {
7129 int first = qty_first_reg[reg_qty[REGNO (src)]];
759bd8b7
R
7130 rtx new_src
7131 = (first >= FIRST_PSEUDO_REGISTER
7132 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7133
7134 /* We must use validate-change even for this, because this
7135 might be a special no-op instruction, suitable only to
7136 tag notes onto. */
7137 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7138 {
7139 src = new_src;
7140 /* If we had a constant that is cheaper than what we are now
7141 setting SRC to, use that constant. We ignored it when we
7142 thought we could make this into a no-op. */
7143 if (src_const && COST (src_const) < COST (src)
7144 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7145 0))
7146 src = src_const;
7147 }
7afe21cc
RK
7148 }
7149
7150 /* If we made a change, recompute SRC values. */
7151 if (src != sets[i].src)
7152 {
7153 do_not_record = 0;
7154 hash_arg_in_memory = 0;
7155 hash_arg_in_struct = 0;
7156 sets[i].src = src;
2197a88a 7157 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
7158 sets[i].src_volatile = do_not_record;
7159 sets[i].src_in_memory = hash_arg_in_memory;
7160 sets[i].src_in_struct = hash_arg_in_struct;
2197a88a 7161 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
7162 }
7163
7164 /* If this is a single SET, we are setting a register, and we have an
7165 equivalent constant, we want to add a REG_NOTE. We don't want
7166 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 7167 that pseudo hasn't been eliminated is a pain. Such a note also
7afe21cc
RK
7168 won't help anything. */
7169 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7170 && GET_CODE (src_const) != REG)
7171 {
92f9aa51 7172 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7afe21cc
RK
7173
7174 /* Record the actual constant value in a REG_EQUAL note, making
7175 a new one if one does not already exist. */
7176 if (tem)
7177 XEXP (tem, 0) = src_const;
7178 else
38a448ca
RH
7179 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7180 src_const, REG_NOTES (insn));
7afe21cc
RK
7181
7182 /* If storing a constant value in a register that
7183 previously held the constant value 0,
7184 record this fact with a REG_WAS_0 note on this insn.
7185
7186 Note that the *register* is required to have previously held 0,
7187 not just any register in the quantity and we must point to the
7188 insn that set that register to zero.
7189
7190 Rather than track each register individually, we just see if
7191 the last set for this quantity was for this register. */
7192
7193 if (REGNO_QTY_VALID_P (REGNO (dest))
7194 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
7195 {
7196 /* See if we previously had a REG_WAS_0 note. */
906c4e36 7197 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7198 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
7199
7200 if ((tem = single_set (const_insn)) != 0
7201 && rtx_equal_p (SET_DEST (tem), dest))
7202 {
7203 if (note)
7204 XEXP (note, 0) = const_insn;
7205 else
38a448ca
RH
7206 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7207 const_insn,
7208 REG_NOTES (insn));
7afe21cc
RK
7209 }
7210 }
7211 }
7212
7213 /* Now deal with the destination. */
7214 do_not_record = 0;
7215 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7216
7217 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7218 to the MEM or REG within it. */
7219 while (GET_CODE (dest) == SIGN_EXTRACT
7220 || GET_CODE (dest) == ZERO_EXTRACT
7221 || GET_CODE (dest) == SUBREG
7222 || GET_CODE (dest) == STRICT_LOW_PART)
7223 {
7224 sets[i].inner_dest_loc = &XEXP (dest, 0);
7225 dest = XEXP (dest, 0);
7226 }
7227
7228 sets[i].inner_dest = dest;
7229
7230 if (GET_CODE (dest) == MEM)
7231 {
9ae8ffe7
JL
7232#ifdef PUSH_ROUNDING
7233 /* Stack pushes invalidate the stack pointer. */
7234 rtx addr = XEXP (dest, 0);
7235 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7236 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7237 && XEXP (addr, 0) == stack_pointer_rtx)
7238 invalidate (stack_pointer_rtx, Pmode);
7239#endif
7afe21cc 7240 dest = fold_rtx (dest, insn);
7afe21cc
RK
7241 }
7242
7243 /* Compute the hash code of the destination now,
7244 before the effects of this instruction are recorded,
7245 since the register values used in the address computation
7246 are those before this instruction. */
2197a88a 7247 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
7248
7249 /* Don't enter a bit-field in the hash table
7250 because the value in it after the store
7251 may not equal what was stored, due to truncation. */
7252
7253 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7254 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7255 {
7256 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7257
7258 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7259 && GET_CODE (width) == CONST_INT
906c4e36
RK
7260 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7261 && ! (INTVAL (src_const)
7262 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
7263 /* Exception: if the value is constant,
7264 and it won't be truncated, record it. */
7265 ;
7266 else
7267 {
7268 /* This is chosen so that the destination will be invalidated
7269 but no new value will be recorded.
7270 We must invalidate because sometimes constant
7271 values can be recorded for bitfields. */
7272 sets[i].src_elt = 0;
7273 sets[i].src_volatile = 1;
7274 src_eqv = 0;
7275 src_eqv_elt = 0;
7276 }
7277 }
7278
7279 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7280 the insn. */
7281 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7282 {
7283 PUT_CODE (insn, NOTE);
7284 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7285 NOTE_SOURCE_FILE (insn) = 0;
7286 cse_jumps_altered = 1;
7287 /* One less use of the label this insn used to jump to. */
85c3ba60
JL
7288 if (JUMP_LABEL (insn) != 0)
7289 --LABEL_NUSES (JUMP_LABEL (insn));
7afe21cc
RK
7290 /* No more processing for this set. */
7291 sets[i].rtl = 0;
7292 }
7293
7294 /* If this SET is now setting PC to a label, we know it used to
7295 be a conditional or computed branch. So we see if we can follow
7296 it. If it was a computed branch, delete it and re-emit. */
7297 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7298 {
7299 rtx p;
7300
7301 /* If this is not in the format for a simple branch and
7302 we are the only SET in it, re-emit it. */
7303 if (! simplejump_p (insn) && n_sets == 1)
7304 {
7305 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7306 JUMP_LABEL (new) = XEXP (src, 0);
7307 LABEL_NUSES (XEXP (src, 0))++;
7308 delete_insn (insn);
7309 insn = new;
7310 }
31dcf83f
RS
7311 else
7312 /* Otherwise, force rerecognition, since it probably had
7313 a different pattern before.
7314 This shouldn't really be necessary, since whatever
7315 changed the source value above should have done this.
7316 Until the right place is found, might as well do this here. */
7317 INSN_CODE (insn) = -1;
7afe21cc
RK
7318
7319 /* Now that we've converted this jump to an unconditional jump,
7320 there is dead code after it. Delete the dead code until we
7321 reach a BARRIER, the end of the function, or a label. Do
7322 not delete NOTEs except for NOTE_INSN_DELETED since later
7323 phases assume these notes are retained. */
7324
7325 p = insn;
7326
7327 while (NEXT_INSN (p) != 0
7328 && GET_CODE (NEXT_INSN (p)) != BARRIER
7329 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7330 {
7331 if (GET_CODE (NEXT_INSN (p)) != NOTE
7332 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7333 delete_insn (NEXT_INSN (p));
7334 else
7335 p = NEXT_INSN (p);
7336 }
7337
7338 /* If we don't have a BARRIER immediately after INSN, put one there.
7339 Much code assumes that there are no NOTEs between a JUMP_INSN and
7340 BARRIER. */
7341
7342 if (NEXT_INSN (insn) == 0
7343 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
783e5bca 7344 emit_barrier_before (NEXT_INSN (insn));
7afe21cc
RK
7345
7346 /* We might have two BARRIERs separated by notes. Delete the second
7347 one if so. */
7348
538b78e7
RS
7349 if (p != insn && NEXT_INSN (p) != 0
7350 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7afe21cc
RK
7351 delete_insn (NEXT_INSN (p));
7352
7353 cse_jumps_altered = 1;
7354 sets[i].rtl = 0;
7355 }
7356
c2a47e48
RK
7357 /* If destination is volatile, invalidate it and then do no further
7358 processing for this assignment. */
7afe21cc
RK
7359
7360 else if (do_not_record)
c2a47e48
RK
7361 {
7362 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7363 || GET_CODE (dest) == MEM)
bb4034b3 7364 invalidate (dest, VOIDmode);
2708da92
RS
7365 else if (GET_CODE (dest) == STRICT_LOW_PART
7366 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7367 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
7368 sets[i].rtl = 0;
7369 }
7afe21cc
RK
7370
7371 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 7372 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
7373
7374#ifdef HAVE_cc0
7375 /* If setting CC0, record what it was set to, or a constant, if it
7376 is equivalent to a constant. If it is being set to a floating-point
7377 value, make a COMPARE with the appropriate constant of 0. If we
7378 don't do this, later code can interpret this as a test against
7379 const0_rtx, which can cause problems if we try to put it into an
7380 insn as a floating-point operand. */
7381 if (dest == cc0_rtx)
7382 {
7383 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7384 this_insn_cc0_mode = mode;
cbf6a543 7385 if (FLOAT_MODE_P (mode))
38a448ca
RH
7386 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7387 CONST0_RTX (mode));
7afe21cc
RK
7388 }
7389#endif
7390 }
7391
7392 /* Now enter all non-volatile source expressions in the hash table
7393 if they are not already present.
7394 Record their equivalence classes in src_elt.
7395 This way we can insert the corresponding destinations into
7396 the same classes even if the actual sources are no longer in them
7397 (having been invalidated). */
7398
7399 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7400 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7401 {
7402 register struct table_elt *elt;
7403 register struct table_elt *classp = sets[0].src_elt;
7404 rtx dest = SET_DEST (sets[0].rtl);
7405 enum machine_mode eqvmode = GET_MODE (dest);
7406
7407 if (GET_CODE (dest) == STRICT_LOW_PART)
7408 {
7409 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7410 classp = 0;
7411 }
7412 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
7413 {
7414 rehash_using_reg (src_eqv);
7415 src_eqv_hash = HASH (src_eqv, eqvmode);
7416 }
2197a88a 7417 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc
RK
7418 elt->in_memory = src_eqv_in_memory;
7419 elt->in_struct = src_eqv_in_struct;
7420 src_eqv_elt = elt;
f7911249
JW
7421
7422 /* Check to see if src_eqv_elt is the same as a set source which
7423 does not yet have an elt, and if so set the elt of the set source
7424 to src_eqv_elt. */
7425 for (i = 0; i < n_sets; i++)
7426 if (sets[i].rtl && sets[i].src_elt == 0
7427 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7428 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
7429 }
7430
7431 for (i = 0; i < n_sets; i++)
7432 if (sets[i].rtl && ! sets[i].src_volatile
7433 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7434 {
7435 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7436 {
7437 /* REG_EQUAL in setting a STRICT_LOW_PART
7438 gives an equivalent for the entire destination register,
7439 not just for the subreg being stored in now.
7440 This is a more interesting equivalence, so we arrange later
7441 to treat the entire reg as the destination. */
7442 sets[i].src_elt = src_eqv_elt;
2197a88a 7443 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
7444 }
7445 else
7446 {
7447 /* Insert source and constant equivalent into hash table, if not
7448 already present. */
7449 register struct table_elt *classp = src_eqv_elt;
7450 register rtx src = sets[i].src;
7451 register rtx dest = SET_DEST (sets[i].rtl);
7452 enum machine_mode mode
7453 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7454
7455 if (sets[i].src_elt == 0)
7456 {
7457 register struct table_elt *elt;
7458
7459 /* Note that these insert_regs calls cannot remove
7460 any of the src_elt's, because they would have failed to
7461 match if not still valid. */
7462 if (insert_regs (src, classp, 0))
8ae2b8f6
JW
7463 {
7464 rehash_using_reg (src);
7465 sets[i].src_hash = HASH (src, mode);
7466 }
2197a88a 7467 elt = insert (src, classp, sets[i].src_hash, mode);
7afe21cc
RK
7468 elt->in_memory = sets[i].src_in_memory;
7469 elt->in_struct = sets[i].src_in_struct;
7470 sets[i].src_elt = classp = elt;
7471 }
7472
7473 if (sets[i].src_const && sets[i].src_const_elt == 0
7474 && src != sets[i].src_const
7475 && ! rtx_equal_p (sets[i].src_const, src))
7476 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 7477 sets[i].src_const_hash, mode);
7afe21cc
RK
7478 }
7479 }
7480 else if (sets[i].src_elt == 0)
7481 /* If we did not insert the source into the hash table (e.g., it was
7482 volatile), note the equivalence class for the REG_EQUAL value, if any,
7483 so that the destination goes into that class. */
7484 sets[i].src_elt = src_eqv_elt;
7485
9ae8ffe7 7486 invalidate_from_clobbers (x);
77fa0940
RK
7487
7488 /* Some registers are invalidated by subroutine calls. Memory is
7489 invalidated by non-constant calls. */
7490
7afe21cc
RK
7491 if (GET_CODE (insn) == CALL_INSN)
7492 {
77fa0940 7493 if (! CONST_CALL_P (insn))
9ae8ffe7 7494 invalidate_memory ();
7afe21cc
RK
7495 invalidate_for_call ();
7496 }
7497
7498 /* Now invalidate everything set by this instruction.
7499 If a SUBREG or other funny destination is being set,
7500 sets[i].rtl is still nonzero, so here we invalidate the reg
7501 a part of which is being set. */
7502
7503 for (i = 0; i < n_sets; i++)
7504 if (sets[i].rtl)
7505 {
bb4034b3
JW
7506 /* We can't use the inner dest, because the mode associated with
7507 a ZERO_EXTRACT is significant. */
7508 register rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
7509
7510 /* Needed for registers to remove the register from its
7511 previous quantity's chain.
7512 Needed for memory if this is a nonvarying address, unless
7513 we have just done an invalidate_memory that covers even those. */
7514 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
9ae8ffe7 7515 || GET_CODE (dest) == MEM)
bb4034b3 7516 invalidate (dest, VOIDmode);
2708da92
RS
7517 else if (GET_CODE (dest) == STRICT_LOW_PART
7518 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7519 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
7520 }
7521
7522 /* Make sure registers mentioned in destinations
7523 are safe for use in an expression to be inserted.
7524 This removes from the hash table
7525 any invalid entry that refers to one of these registers.
7526
7527 We don't care about the return value from mention_regs because
7528 we are going to hash the SET_DEST values unconditionally. */
7529
7530 for (i = 0; i < n_sets; i++)
34c73909
R
7531 {
7532 if (sets[i].rtl)
7533 {
7534 rtx x = SET_DEST (sets[i].rtl);
7535
7536 if (GET_CODE (x) != REG)
7537 mention_regs (x);
7538 else
7539 {
7540 /* We used to rely on all references to a register becoming
7541 inaccessible when a register changes to a new quantity,
7542 since that changes the hash code. However, that is not
7543 safe, since after NBUCKETS new quantities we get a
7544 hash 'collision' of a register with its own invalid
7545 entries. And since SUBREGs have been changed not to
7546 change their hash code with the hash code of the register,
7547 it wouldn't work any longer at all. So we have to check
7548 for any invalid references lying around now.
7549 This code is similar to the REG case in mention_regs,
7550 but it knows that reg_tick has been incremented, and
7551 it leaves reg_in_table as -1 . */
7552 register int regno = REGNO (x);
7553 register int endregno
7554 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7555 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7556 int i;
7557
7558 for (i = regno; i < endregno; i++)
7559 {
7560 if (reg_in_table[i] >= 0)
7561 {
7562 remove_invalid_refs (i);
7563 reg_in_table[i] = -1;
7564 }
7565 }
7566 }
7567 }
7568 }
7afe21cc
RK
7569
7570 /* We may have just removed some of the src_elt's from the hash table.
7571 So replace each one with the current head of the same class. */
7572
7573 for (i = 0; i < n_sets; i++)
7574 if (sets[i].rtl)
7575 {
7576 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7577 /* If elt was removed, find current head of same class,
7578 or 0 if nothing remains of that class. */
7579 {
7580 register struct table_elt *elt = sets[i].src_elt;
7581
7582 while (elt && elt->prev_same_value)
7583 elt = elt->prev_same_value;
7584
7585 while (elt && elt->first_same_value == 0)
7586 elt = elt->next_same_value;
7587 sets[i].src_elt = elt ? elt->first_same_value : 0;
7588 }
7589 }
7590
7591 /* Now insert the destinations into their equivalence classes. */
7592
7593 for (i = 0; i < n_sets; i++)
7594 if (sets[i].rtl)
7595 {
7596 register rtx dest = SET_DEST (sets[i].rtl);
9de2c71a 7597 rtx inner_dest = sets[i].inner_dest;
7afe21cc
RK
7598 register struct table_elt *elt;
7599
7600 /* Don't record value if we are not supposed to risk allocating
7601 floating-point values in registers that might be wider than
7602 memory. */
7603 if ((flag_float_store
7604 && GET_CODE (dest) == MEM
cbf6a543 7605 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
7606 /* Don't record BLKmode values, because we don't know the
7607 size of it, and can't be sure that other BLKmode values
7608 have the same or smaller size. */
7609 || GET_MODE (dest) == BLKmode
7afe21cc
RK
7610 /* Don't record values of destinations set inside a libcall block
7611 since we might delete the libcall. Things should have been set
7612 up so we won't want to reuse such a value, but we play it safe
7613 here. */
7bd8b2a8 7614 || libcall_insn
7afe21cc
RK
7615 /* If we didn't put a REG_EQUAL value or a source into the hash
7616 table, there is no point is recording DEST. */
1a8e9a8e
RK
7617 || sets[i].src_elt == 0
7618 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7619 or SIGN_EXTEND, don't record DEST since it can cause
7620 some tracking to be wrong.
7621
7622 ??? Think about this more later. */
7623 || (GET_CODE (dest) == SUBREG
7624 && (GET_MODE_SIZE (GET_MODE (dest))
7625 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7626 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7627 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
7628 continue;
7629
7630 /* STRICT_LOW_PART isn't part of the value BEING set,
7631 and neither is the SUBREG inside it.
7632 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7633 if (GET_CODE (dest) == STRICT_LOW_PART)
7634 dest = SUBREG_REG (XEXP (dest, 0));
7635
c610adec 7636 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7afe21cc
RK
7637 /* Registers must also be inserted into chains for quantities. */
7638 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
7639 {
7640 /* If `insert_regs' changes something, the hash code must be
7641 recalculated. */
7642 rehash_using_reg (dest);
7643 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7644 }
7afe21cc 7645
9de2c71a
MM
7646 if (GET_CODE (inner_dest) == MEM
7647 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7648 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7649 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7650 Consider the case in which the address of the MEM is
7651 passed to a function, which alters the MEM. Then, if we
7652 later use Y instead of the MEM we'll miss the update. */
7653 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7654 else
7655 elt = insert (dest, sets[i].src_elt,
7656 sets[i].dest_hash, GET_MODE (dest));
7657
c256df0b 7658 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
9ad91d71
RK
7659 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7660 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7661 0))));
c256df0b 7662
7afe21cc
RK
7663 if (elt->in_memory)
7664 {
7665 /* This implicitly assumes a whole struct
7666 need not have MEM_IN_STRUCT_P.
7667 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7668 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7669 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7670 }
7671
fc3ffe83
RK
7672 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7673 narrower than M2, and both M1 and M2 are the same number of words,
7674 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7675 make that equivalence as well.
7afe21cc
RK
7676
7677 However, BAR may have equivalences for which gen_lowpart_if_possible
7678 will produce a simpler value than gen_lowpart_if_possible applied to
7679 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7680 BAR's equivalences. If we don't get a simplified form, make
7681 the SUBREG. It will not be used in an equivalence, but will
7682 cause two similar assignments to be detected.
7683
7684 Note the loop below will find SUBREG_REG (DEST) since we have
7685 already entered SRC and DEST of the SET in the table. */
7686
7687 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
7688 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7689 / UNITS_PER_WORD)
7690 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7afe21cc
RK
7691 && (GET_MODE_SIZE (GET_MODE (dest))
7692 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7693 && sets[i].src_elt != 0)
7694 {
7695 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7696 struct table_elt *elt, *classp = 0;
7697
7698 for (elt = sets[i].src_elt->first_same_value; elt;
7699 elt = elt->next_same_value)
7700 {
7701 rtx new_src = 0;
2197a88a 7702 unsigned src_hash;
7afe21cc
RK
7703 struct table_elt *src_elt;
7704
7705 /* Ignore invalid entries. */
7706 if (GET_CODE (elt->exp) != REG
7707 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7708 continue;
7709
7710 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7711 if (new_src == 0)
38a448ca 7712 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7afe21cc
RK
7713
7714 src_hash = HASH (new_src, new_mode);
7715 src_elt = lookup (new_src, src_hash, new_mode);
7716
7717 /* Put the new source in the hash table is if isn't
7718 already. */
7719 if (src_elt == 0)
7720 {
7721 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
7722 {
7723 rehash_using_reg (new_src);
7724 src_hash = HASH (new_src, new_mode);
7725 }
7afe21cc
RK
7726 src_elt = insert (new_src, classp, src_hash, new_mode);
7727 src_elt->in_memory = elt->in_memory;
7728 src_elt->in_struct = elt->in_struct;
7729 }
7730 else if (classp && classp != src_elt->first_same_value)
7731 /* Show that two things that we've seen before are
7732 actually the same. */
7733 merge_equiv_classes (src_elt, classp);
7734
7735 classp = src_elt->first_same_value;
da932f04
JL
7736 /* Ignore invalid entries. */
7737 while (classp
7738 && GET_CODE (classp->exp) != REG
7739 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7740 classp = classp->next_same_value;
7afe21cc
RK
7741 }
7742 }
7743 }
7744
7745 /* Special handling for (set REG0 REG1)
7746 where REG0 is the "cheapest", cheaper than REG1.
7747 After cse, REG1 will probably not be used in the sequel,
7748 so (if easily done) change this insn to (set REG1 REG0) and
7749 replace REG1 with REG0 in the previous insn that computed their value.
7750 Then REG1 will become a dead store and won't cloud the situation
7751 for later optimizations.
7752
7753 Do not make this change if REG1 is a hard register, because it will
7754 then be used in the sequel and we may be changing a two-operand insn
7755 into a three-operand insn.
7756
7757 Also do not do this if we are operating on a copy of INSN. */
7758
7759 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7760 && NEXT_INSN (PREV_INSN (insn)) == insn
7761 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7762 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7763 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7764 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7765 == REGNO (SET_DEST (sets[0].rtl))))
7766 {
7767 rtx prev = PREV_INSN (insn);
7768 while (prev && GET_CODE (prev) == NOTE)
7769 prev = PREV_INSN (prev);
7770
7771 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7772 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7773 {
7774 rtx dest = SET_DEST (sets[0].rtl);
906c4e36 7775 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7afe21cc
RK
7776
7777 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7778 validate_change (insn, & SET_DEST (sets[0].rtl),
7779 SET_SRC (sets[0].rtl), 1);
7780 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7781 apply_change_group ();
7782
7783 /* If REG1 was equivalent to a constant, REG0 is not. */
7784 if (note)
7785 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7786
7787 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7788 any REG_WAS_0 note on INSN to PREV. */
906c4e36 7789 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7790 if (note)
7791 remove_note (prev, note);
7792
906c4e36 7793 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7794 if (note)
7795 {
7796 remove_note (insn, note);
7797 XEXP (note, 1) = REG_NOTES (prev);
7798 REG_NOTES (prev) = note;
7799 }
98369a0f
RK
7800
7801 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7802 then we must delete it, because the value in REG0 has changed. */
7803 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7804 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7805 remove_note (insn, note);
7afe21cc
RK
7806 }
7807 }
7808
7809 /* If this is a conditional jump insn, record any known equivalences due to
7810 the condition being tested. */
7811
7812 last_jump_equiv_class = 0;
7813 if (GET_CODE (insn) == JUMP_INSN
7814 && n_sets == 1 && GET_CODE (x) == SET
7815 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7816 record_jump_equiv (insn, 0);
7817
7818#ifdef HAVE_cc0
7819 /* If the previous insn set CC0 and this insn no longer references CC0,
7820 delete the previous insn. Here we use the fact that nothing expects CC0
7821 to be valid over an insn, which is true until the final pass. */
7822 if (prev_insn && GET_CODE (prev_insn) == INSN
7823 && (tem = single_set (prev_insn)) != 0
7824 && SET_DEST (tem) == cc0_rtx
7825 && ! reg_mentioned_p (cc0_rtx, x))
7826 {
7827 PUT_CODE (prev_insn, NOTE);
7828 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7829 NOTE_SOURCE_FILE (prev_insn) = 0;
7830 }
7831
7832 prev_insn_cc0 = this_insn_cc0;
7833 prev_insn_cc0_mode = this_insn_cc0_mode;
7834#endif
7835
7836 prev_insn = insn;
7837}
7838\f
9ae8ffe7 7839/* Remove from the ahsh table all expressions that reference memory. */
7afe21cc 7840static void
9ae8ffe7 7841invalidate_memory ()
7afe21cc 7842{
9ae8ffe7
JL
7843 register int i;
7844 register struct table_elt *p, *next;
7afe21cc 7845
9ae8ffe7
JL
7846 for (i = 0; i < NBUCKETS; i++)
7847 for (p = table[i]; p; p = next)
7848 {
7849 next = p->next_same_hash;
7850 if (p->in_memory)
7851 remove_from_table (p, i);
7852 }
7853}
7854
7855/* XXX ??? The name of this function bears little resemblance to
7856 what this function actually does. FIXME. */
7857static int
7858note_mem_written (addr)
7859 register rtx addr;
7860{
7861 /* Pushing or popping the stack invalidates just the stack pointer. */
7862 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7863 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7864 && GET_CODE (XEXP (addr, 0)) == REG
7865 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 7866 {
9ae8ffe7
JL
7867 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7868 reg_tick[STACK_POINTER_REGNUM]++;
7869
7870 /* This should be *very* rare. */
7871 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7872 invalidate (stack_pointer_rtx, VOIDmode);
7873 return 1;
7afe21cc 7874 }
9ae8ffe7 7875 return 0;
7afe21cc
RK
7876}
7877
7878/* Perform invalidation on the basis of everything about an insn
7879 except for invalidating the actual places that are SET in it.
7880 This includes the places CLOBBERed, and anything that might
7881 alias with something that is SET or CLOBBERed.
7882
7afe21cc
RK
7883 X is the pattern of the insn. */
7884
7885static void
9ae8ffe7 7886invalidate_from_clobbers (x)
7afe21cc
RK
7887 rtx x;
7888{
7afe21cc
RK
7889 if (GET_CODE (x) == CLOBBER)
7890 {
7891 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
7892 if (ref)
7893 {
7894 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7895 || GET_CODE (ref) == MEM)
7896 invalidate (ref, VOIDmode);
7897 else if (GET_CODE (ref) == STRICT_LOW_PART
7898 || GET_CODE (ref) == ZERO_EXTRACT)
7899 invalidate (XEXP (ref, 0), GET_MODE (ref));
7900 }
7afe21cc
RK
7901 }
7902 else if (GET_CODE (x) == PARALLEL)
7903 {
7904 register int i;
7905 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7906 {
7907 register rtx y = XVECEXP (x, 0, i);
7908 if (GET_CODE (y) == CLOBBER)
7909 {
7910 rtx ref = XEXP (y, 0);
9ae8ffe7
JL
7911 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7912 || GET_CODE (ref) == MEM)
7913 invalidate (ref, VOIDmode);
7914 else if (GET_CODE (ref) == STRICT_LOW_PART
7915 || GET_CODE (ref) == ZERO_EXTRACT)
7916 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
7917 }
7918 }
7919 }
7920}
7921\f
7922/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7923 and replace any registers in them with either an equivalent constant
7924 or the canonical form of the register. If we are inside an address,
7925 only do this if the address remains valid.
7926
7927 OBJECT is 0 except when within a MEM in which case it is the MEM.
7928
7929 Return the replacement for X. */
7930
7931static rtx
7932cse_process_notes (x, object)
7933 rtx x;
7934 rtx object;
7935{
7936 enum rtx_code code = GET_CODE (x);
7937 char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
7938 int i;
7939
7940 switch (code)
7941 {
7942 case CONST_INT:
7943 case CONST:
7944 case SYMBOL_REF:
7945 case LABEL_REF:
7946 case CONST_DOUBLE:
7947 case PC:
7948 case CC0:
7949 case LO_SUM:
7950 return x;
7951
7952 case MEM:
7953 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7954 return x;
7955
7956 case EXPR_LIST:
7957 case INSN_LIST:
7958 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 7959 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 7960 if (XEXP (x, 1))
906c4e36 7961 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
7962 return x;
7963
e4890d45
RS
7964 case SIGN_EXTEND:
7965 case ZERO_EXTEND:
0b0ee36c 7966 case SUBREG:
e4890d45
RS
7967 {
7968 rtx new = cse_process_notes (XEXP (x, 0), object);
7969 /* We don't substitute VOIDmode constants into these rtx,
7970 since they would impede folding. */
7971 if (GET_MODE (new) != VOIDmode)
7972 validate_change (object, &XEXP (x, 0), new, 0);
7973 return x;
7974 }
7975
7afe21cc
RK
7976 case REG:
7977 i = reg_qty[REGNO (x)];
7978
7979 /* Return a constant or a constant register. */
7980 if (REGNO_QTY_VALID_P (REGNO (x))
7981 && qty_const[i] != 0
7982 && (CONSTANT_P (qty_const[i])
7983 || GET_CODE (qty_const[i]) == REG))
7984 {
7985 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7986 if (new)
7987 return new;
7988 }
7989
7990 /* Otherwise, canonicalize this register. */
906c4e36 7991 return canon_reg (x, NULL_RTX);
e9a25f70
JL
7992
7993 default:
7994 break;
7afe21cc
RK
7995 }
7996
7997 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7998 if (fmt[i] == 'e')
7999 validate_change (object, &XEXP (x, i),
7fe34fdf 8000 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
8001
8002 return x;
8003}
8004\f
8005/* Find common subexpressions between the end test of a loop and the beginning
8006 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8007
8008 Often we have a loop where an expression in the exit test is used
8009 in the body of the loop. For example "while (*p) *q++ = *p++;".
8010 Because of the way we duplicate the loop exit test in front of the loop,
8011 however, we don't detect that common subexpression. This will be caught
8012 when global cse is implemented, but this is a quite common case.
8013
8014 This function handles the most common cases of these common expressions.
8015 It is called after we have processed the basic block ending with the
8016 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8017 jumps to a label used only once. */
8018
8019static void
8020cse_around_loop (loop_start)
8021 rtx loop_start;
8022{
8023 rtx insn;
8024 int i;
8025 struct table_elt *p;
8026
8027 /* If the jump at the end of the loop doesn't go to the start, we don't
8028 do anything. */
8029 for (insn = PREV_INSN (loop_start);
8030 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8031 insn = PREV_INSN (insn))
8032 ;
8033
8034 if (insn == 0
8035 || GET_CODE (insn) != NOTE
8036 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8037 return;
8038
8039 /* If the last insn of the loop (the end test) was an NE comparison,
8040 we will interpret it as an EQ comparison, since we fell through
f72aed24 8041 the loop. Any equivalences resulting from that comparison are
7afe21cc
RK
8042 therefore not valid and must be invalidated. */
8043 if (last_jump_equiv_class)
8044 for (p = last_jump_equiv_class->first_same_value; p;
8045 p = p->next_same_value)
51723711
KG
8046 {
8047 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8048 || (GET_CODE (p->exp) == SUBREG
8049 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8050 invalidate (p->exp, VOIDmode);
8051 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8052 || GET_CODE (p->exp) == ZERO_EXTRACT)
8053 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8054 }
7afe21cc
RK
8055
8056 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8057 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8058
8059 The only thing we do with SET_DEST is invalidate entries, so we
8060 can safely process each SET in order. It is slightly less efficient
556c714b
JW
8061 to do so, but we only want to handle the most common cases.
8062
8063 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8064 These pseudos won't have valid entries in any of the tables indexed
8065 by register number, such as reg_qty. We avoid out-of-range array
8066 accesses by not processing any instructions created after cse started. */
7afe21cc
RK
8067
8068 for (insn = NEXT_INSN (loop_start);
8069 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
556c714b 8070 && INSN_UID (insn) < max_insn_uid
7afe21cc
RK
8071 && ! (GET_CODE (insn) == NOTE
8072 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8073 insn = NEXT_INSN (insn))
8074 {
8075 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8076 && (GET_CODE (PATTERN (insn)) == SET
8077 || GET_CODE (PATTERN (insn)) == CLOBBER))
8078 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8079 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8080 && GET_CODE (PATTERN (insn)) == PARALLEL)
8081 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8082 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8083 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8084 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8085 loop_start);
8086 }
8087}
8088\f
8b3686ed
RK
8089/* Process one SET of an insn that was skipped. We ignore CLOBBERs
8090 since they are done elsewhere. This function is called via note_stores. */
8091
8092static void
8093invalidate_skipped_set (dest, set)
8094 rtx set;
8095 rtx dest;
8096{
9ae8ffe7
JL
8097 enum rtx_code code = GET_CODE (dest);
8098
8099 if (code == MEM
8100 && ! note_mem_written (dest) /* If this is not a stack push ... */
8101 /* There are times when an address can appear varying and be a PLUS
8102 during this scan when it would be a fixed address were we to know
8103 the proper equivalences. So invalidate all memory if there is
8104 a BLKmode or nonscalar memory reference or a reference to a
8105 variable address. */
8106 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8107 || cse_rtx_varies_p (XEXP (dest, 0))))
8108 {
8109 invalidate_memory ();
8110 return;
8111 }
ffcf6393 8112
f47c02fa
RK
8113 if (GET_CODE (set) == CLOBBER
8114#ifdef HAVE_cc0
8115 || dest == cc0_rtx
8116#endif
8117 || dest == pc_rtx)
8118 return;
8119
9ae8ffe7 8120 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 8121 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
8122 else if (code == REG || code == SUBREG || code == MEM)
8123 invalidate (dest, VOIDmode);
8b3686ed
RK
8124}
8125
8126/* Invalidate all insns from START up to the end of the function or the
8127 next label. This called when we wish to CSE around a block that is
8128 conditionally executed. */
8129
8130static void
8131invalidate_skipped_block (start)
8132 rtx start;
8133{
8134 rtx insn;
8b3686ed
RK
8135
8136 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8137 insn = NEXT_INSN (insn))
8138 {
8139 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8140 continue;
8141
8b3686ed
RK
8142 if (GET_CODE (insn) == CALL_INSN)
8143 {
9ae8ffe7
JL
8144 if (! CONST_CALL_P (insn))
8145 invalidate_memory ();
8b3686ed 8146 invalidate_for_call ();
8b3686ed
RK
8147 }
8148
97577254 8149 invalidate_from_clobbers (PATTERN (insn));
8b3686ed 8150 note_stores (PATTERN (insn), invalidate_skipped_set);
8b3686ed
RK
8151 }
8152}
8153\f
7afe21cc
RK
8154/* Used for communication between the following two routines; contains a
8155 value to be checked for modification. */
8156
8157static rtx cse_check_loop_start_value;
8158
8159/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8160 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8161
8162static void
8163cse_check_loop_start (x, set)
8164 rtx x;
d6f4ec51 8165 rtx set ATTRIBUTE_UNUSED;
7afe21cc
RK
8166{
8167 if (cse_check_loop_start_value == 0
8168 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8169 return;
8170
8171 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8172 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8173 cse_check_loop_start_value = 0;
8174}
8175
8176/* X is a SET or CLOBBER contained in INSN that was found near the start of
8177 a loop that starts with the label at LOOP_START.
8178
8179 If X is a SET, we see if its SET_SRC is currently in our hash table.
8180 If so, we see if it has a value equal to some register used only in the
8181 loop exit code (as marked by jump.c).
8182
8183 If those two conditions are true, we search backwards from the start of
8184 the loop to see if that same value was loaded into a register that still
8185 retains its value at the start of the loop.
8186
8187 If so, we insert an insn after the load to copy the destination of that
8188 load into the equivalent register and (try to) replace our SET_SRC with that
8189 register.
8190
8191 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8192
8193static void
8194cse_set_around_loop (x, insn, loop_start)
8195 rtx x;
8196 rtx insn;
8197 rtx loop_start;
8198{
7afe21cc 8199 struct table_elt *src_elt;
7afe21cc
RK
8200
8201 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8202 are setting PC or CC0 or whose SET_SRC is already a register. */
8203 if (GET_CODE (x) == SET
8204 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8205 && GET_CODE (SET_SRC (x)) != REG)
8206 {
8207 src_elt = lookup (SET_SRC (x),
8208 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8209 GET_MODE (SET_DEST (x)));
8210
8211 if (src_elt)
8212 for (src_elt = src_elt->first_same_value; src_elt;
8213 src_elt = src_elt->next_same_value)
8214 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8215 && COST (src_elt->exp) < COST (SET_SRC (x)))
8216 {
8217 rtx p, set;
8218
8219 /* Look for an insn in front of LOOP_START that sets
8220 something in the desired mode to SET_SRC (x) before we hit
8221 a label or CALL_INSN. */
8222
8223 for (p = prev_nonnote_insn (loop_start);
8224 p && GET_CODE (p) != CALL_INSN
8225 && GET_CODE (p) != CODE_LABEL;
8226 p = prev_nonnote_insn (p))
8227 if ((set = single_set (p)) != 0
8228 && GET_CODE (SET_DEST (set)) == REG
8229 && GET_MODE (SET_DEST (set)) == src_elt->mode
8230 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8231 {
8232 /* We now have to ensure that nothing between P
8233 and LOOP_START modified anything referenced in
8234 SET_SRC (x). We know that nothing within the loop
8235 can modify it, or we would have invalidated it in
8236 the hash table. */
8237 rtx q;
8238
8239 cse_check_loop_start_value = SET_SRC (x);
8240 for (q = p; q != loop_start; q = NEXT_INSN (q))
8241 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8242 note_stores (PATTERN (q), cse_check_loop_start);
8243
8244 /* If nothing was changed and we can replace our
8245 SET_SRC, add an insn after P to copy its destination
8246 to what we will be replacing SET_SRC with. */
8247 if (cse_check_loop_start_value
8248 && validate_change (insn, &SET_SRC (x),
8249 src_elt->exp, 0))
e89d3e6f
R
8250 {
8251 /* If this creates new pseudos, this is unsafe,
8252 because the regno of new pseudo is unsuitable
8253 to index into reg_qty when cse_insn processes
8254 the new insn. Therefore, if a new pseudo was
8255 created, discard this optimization. */
8256 int nregs = max_reg_num ();
8257 rtx move
8258 = gen_move_insn (src_elt->exp, SET_DEST (set));
8259 if (nregs != max_reg_num ())
8260 {
8261 if (! validate_change (insn, &SET_SRC (x),
8262 SET_SRC (set), 0))
8263 abort ();
8264 }
8265 else
8266 emit_insn_after (move, p);
8267 }
7afe21cc
RK
8268 break;
8269 }
8270 }
8271 }
8272
8273 /* Now invalidate anything modified by X. */
9ae8ffe7 8274 note_mem_written (SET_DEST (x));
7afe21cc 8275
9ae8ffe7 8276 /* See comment on similar code in cse_insn for explanation of these tests. */
7afe21cc 8277 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
9ae8ffe7 8278 || GET_CODE (SET_DEST (x)) == MEM)
bb4034b3 8279 invalidate (SET_DEST (x), VOIDmode);
2708da92
RS
8280 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8281 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
bb4034b3 8282 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7afe21cc
RK
8283}
8284\f
8285/* Find the end of INSN's basic block and return its range,
8286 the total number of SETs in all the insns of the block, the last insn of the
8287 block, and the branch path.
8288
8289 The branch path indicates which branches should be followed. If a non-zero
8290 path size is specified, the block should be rescanned and a different set
8291 of branches will be taken. The branch path is only used if
8b3686ed 8292 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7afe21cc
RK
8293
8294 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8295 used to describe the block. It is filled in with the information about
8296 the current block. The incoming structure's branch path, if any, is used
8297 to construct the output branch path. */
8298
7afe21cc 8299void
8b3686ed 8300cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7afe21cc
RK
8301 rtx insn;
8302 struct cse_basic_block_data *data;
8303 int follow_jumps;
8304 int after_loop;
8b3686ed 8305 int skip_blocks;
7afe21cc
RK
8306{
8307 rtx p = insn, q;
8308 int nsets = 0;
8309 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
fc3ffe83 8310 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7afe21cc
RK
8311 int path_size = data->path_size;
8312 int path_entry = 0;
8313 int i;
8314
8315 /* Update the previous branch path, if any. If the last branch was
8316 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8317 shorten the path by one and look at the previous branch. We know that
8318 at least one branch must have been taken if PATH_SIZE is non-zero. */
8319 while (path_size > 0)
8320 {
8b3686ed 8321 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
8322 {
8323 data->path[path_size - 1].status = NOT_TAKEN;
8324 break;
8325 }
8326 else
8327 path_size--;
8328 }
8329
8330 /* Scan to end of this basic block. */
8331 while (p && GET_CODE (p) != CODE_LABEL)
8332 {
8333 /* Don't cse out the end of a loop. This makes a difference
8334 only for the unusual loops that always execute at least once;
8335 all other loops have labels there so we will stop in any case.
8336 Cse'ing out the end of the loop is dangerous because it
8337 might cause an invariant expression inside the loop
8338 to be reused after the end of the loop. This would make it
8339 hard to move the expression out of the loop in loop.c,
8340 especially if it is one of several equivalent expressions
8341 and loop.c would like to eliminate it.
8342
8343 If we are running after loop.c has finished, we can ignore
8344 the NOTE_INSN_LOOP_END. */
8345
8346 if (! after_loop && GET_CODE (p) == NOTE
8347 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8348 break;
8349
8350 /* Don't cse over a call to setjmp; on some machines (eg vax)
8351 the regs restored by the longjmp come from
8352 a later time than the setjmp. */
8353 if (GET_CODE (p) == NOTE
8354 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8355 break;
8356
8357 /* A PARALLEL can have lots of SETs in it,
8358 especially if it is really an ASM_OPERANDS. */
8359 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8360 && GET_CODE (PATTERN (p)) == PARALLEL)
8361 nsets += XVECLEN (PATTERN (p), 0);
8362 else if (GET_CODE (p) != NOTE)
8363 nsets += 1;
8364
164c8956
RK
8365 /* Ignore insns made by CSE; they cannot affect the boundaries of
8366 the basic block. */
8367
8368 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 8369 high_cuid = INSN_CUID (p);
164c8956
RK
8370 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8371 low_cuid = INSN_CUID (p);
7afe21cc
RK
8372
8373 /* See if this insn is in our branch path. If it is and we are to
8374 take it, do so. */
8375 if (path_entry < path_size && data->path[path_entry].branch == p)
8376 {
8b3686ed 8377 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc
RK
8378 p = JUMP_LABEL (p);
8379
8380 /* Point to next entry in path, if any. */
8381 path_entry++;
8382 }
8383
8384 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8385 was specified, we haven't reached our maximum path length, there are
8386 insns following the target of the jump, this is the only use of the
8b3686ed
RK
8387 jump label, and the target label is preceded by a BARRIER.
8388
8389 Alternatively, we can follow the jump if it branches around a
8390 block of code and there are no other branches into the block.
8391 In this case invalidate_skipped_block will be called to invalidate any
8392 registers set in the block when following the jump. */
8393
8394 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7afe21cc
RK
8395 && GET_CODE (p) == JUMP_INSN
8396 && GET_CODE (PATTERN (p)) == SET
8397 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 8398 && JUMP_LABEL (p) != 0
7afe21cc
RK
8399 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8400 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8401 {
8402 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8403 if ((GET_CODE (q) != NOTE
8404 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8405 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8406 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8407 break;
8408
8409 /* If we ran into a BARRIER, this code is an extension of the
8410 basic block when the branch is taken. */
8b3686ed 8411 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
8412 {
8413 /* Don't allow ourself to keep walking around an
8414 always-executed loop. */
fc3ffe83
RK
8415 if (next_real_insn (q) == next)
8416 {
8417 p = NEXT_INSN (p);
8418 continue;
8419 }
7afe21cc
RK
8420
8421 /* Similarly, don't put a branch in our path more than once. */
8422 for (i = 0; i < path_entry; i++)
8423 if (data->path[i].branch == p)
8424 break;
8425
8426 if (i != path_entry)
8427 break;
8428
8429 data->path[path_entry].branch = p;
8430 data->path[path_entry++].status = TAKEN;
8431
8432 /* This branch now ends our path. It was possible that we
8433 didn't see this branch the last time around (when the
8434 insn in front of the target was a JUMP_INSN that was
8435 turned into a no-op). */
8436 path_size = path_entry;
8437
8438 p = JUMP_LABEL (p);
8439 /* Mark block so we won't scan it again later. */
8440 PUT_MODE (NEXT_INSN (p), QImode);
8441 }
8b3686ed
RK
8442 /* Detect a branch around a block of code. */
8443 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8444 {
8445 register rtx tmp;
8446
fc3ffe83
RK
8447 if (next_real_insn (q) == next)
8448 {
8449 p = NEXT_INSN (p);
8450 continue;
8451 }
8b3686ed
RK
8452
8453 for (i = 0; i < path_entry; i++)
8454 if (data->path[i].branch == p)
8455 break;
8456
8457 if (i != path_entry)
8458 break;
8459
8460 /* This is no_labels_between_p (p, q) with an added check for
8461 reaching the end of a function (in case Q precedes P). */
8462 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8463 if (GET_CODE (tmp) == CODE_LABEL)
8464 break;
8465
8466 if (tmp == q)
8467 {
8468 data->path[path_entry].branch = p;
8469 data->path[path_entry++].status = AROUND;
8470
8471 path_size = path_entry;
8472
8473 p = JUMP_LABEL (p);
8474 /* Mark block so we won't scan it again later. */
8475 PUT_MODE (NEXT_INSN (p), QImode);
8476 }
8477 }
7afe21cc 8478 }
7afe21cc
RK
8479 p = NEXT_INSN (p);
8480 }
8481
8482 data->low_cuid = low_cuid;
8483 data->high_cuid = high_cuid;
8484 data->nsets = nsets;
8485 data->last = p;
8486
8487 /* If all jumps in the path are not taken, set our path length to zero
8488 so a rescan won't be done. */
8489 for (i = path_size - 1; i >= 0; i--)
8b3686ed 8490 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
8491 break;
8492
8493 if (i == -1)
8494 data->path_size = 0;
8495 else
8496 data->path_size = path_size;
8497
8498 /* End the current branch path. */
8499 data->path[path_size].branch = 0;
8500}
8501\f
7afe21cc
RK
8502/* Perform cse on the instructions of a function.
8503 F is the first instruction.
8504 NREGS is one plus the highest pseudo-reg number used in the instruction.
8505
8506 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8507 (only if -frerun-cse-after-loop).
8508
8509 Returns 1 if jump_optimize should be redone due to simplifications
8510 in conditional jump instructions. */
8511
8512int
8513cse_main (f, nregs, after_loop, file)
8514 rtx f;
8515 int nregs;
8516 int after_loop;
8517 FILE *file;
8518{
8519 struct cse_basic_block_data val;
8520 register rtx insn = f;
8521 register int i;
8522
8523 cse_jumps_altered = 0;
a5dfb4ee 8524 recorded_label_ref = 0;
7afe21cc
RK
8525 constant_pool_entries_cost = 0;
8526 val.path_size = 0;
8527
8528 init_recog ();
9ae8ffe7 8529 init_alias_analysis ();
7afe21cc
RK
8530
8531 max_reg = nregs;
8532
556c714b
JW
8533 max_insn_uid = get_max_uid ();
8534
7afe21cc
RK
8535 all_minus_one = (int *) alloca (nregs * sizeof (int));
8536 consec_ints = (int *) alloca (nregs * sizeof (int));
8537
8538 for (i = 0; i < nregs; i++)
8539 {
8540 all_minus_one[i] = -1;
8541 consec_ints[i] = i;
8542 }
8543
8544 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8545 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8546 reg_qty = (int *) alloca (nregs * sizeof (int));
8547 reg_in_table = (int *) alloca (nregs * sizeof (int));
8548 reg_tick = (int *) alloca (nregs * sizeof (int));
8549
7bac1be0
RK
8550#ifdef LOAD_EXTEND_OP
8551
8552 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8553 and change the code and mode as appropriate. */
38a448ca 8554 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7bac1be0
RK
8555#endif
8556
7afe21cc
RK
8557 /* Discard all the free elements of the previous function
8558 since they are allocated in the temporarily obstack. */
4c9a05bc 8559 bzero ((char *) table, sizeof table);
7afe21cc
RK
8560 free_element_chain = 0;
8561 n_elements_made = 0;
8562
8563 /* Find the largest uid. */
8564
164c8956
RK
8565 max_uid = get_max_uid ();
8566 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
4c9a05bc 8567 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
7afe21cc
RK
8568
8569 /* Compute the mapping from uids to cuids.
8570 CUIDs are numbers assigned to insns, like uids,
8571 except that cuids increase monotonically through the code.
8572 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8573 between two insns is not affected by -g. */
8574
8575 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8576 {
8577 if (GET_CODE (insn) != NOTE
8578 || NOTE_LINE_NUMBER (insn) < 0)
8579 INSN_CUID (insn) = ++i;
8580 else
8581 /* Give a line number note the same cuid as preceding insn. */
8582 INSN_CUID (insn) = i;
8583 }
8584
8585 /* Initialize which registers are clobbered by calls. */
8586
8587 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8588
8589 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8590 if ((call_used_regs[i]
8591 /* Used to check !fixed_regs[i] here, but that isn't safe;
8592 fixed regs are still call-clobbered, and sched can get
8593 confused if they can "live across calls".
8594
8595 The frame pointer is always preserved across calls. The arg
8596 pointer is if it is fixed. The stack pointer usually is, unless
8597 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8598 will be present. If we are generating PIC code, the PIC offset
8599 table register is preserved across calls. */
8600
8601 && i != STACK_POINTER_REGNUM
8602 && i != FRAME_POINTER_REGNUM
8bc169f2
DE
8603#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8604 && i != HARD_FRAME_POINTER_REGNUM
8605#endif
7afe21cc
RK
8606#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8607 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8608#endif
be8fe470 8609#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
7afe21cc
RK
8610 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8611#endif
8612 )
8613 || global_regs[i])
8614 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8615
8616 /* Loop over basic blocks.
8617 Compute the maximum number of qty's needed for each basic block
8618 (which is 2 for each SET). */
8619 insn = f;
8620 while (insn)
8621 {
8b3686ed
RK
8622 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8623 flag_cse_skip_blocks);
7afe21cc
RK
8624
8625 /* If this basic block was already processed or has no sets, skip it. */
8626 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8627 {
8628 PUT_MODE (insn, VOIDmode);
8629 insn = (val.last ? NEXT_INSN (val.last) : 0);
8630 val.path_size = 0;
8631 continue;
8632 }
8633
8634 cse_basic_block_start = val.low_cuid;
8635 cse_basic_block_end = val.high_cuid;
8636 max_qty = val.nsets * 2;
8637
8638 if (file)
8639 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8640 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8641 val.nsets);
8642
8643 /* Make MAX_QTY bigger to give us room to optimize
8644 past the end of this basic block, if that should prove useful. */
8645 if (max_qty < 500)
8646 max_qty = 500;
8647
8648 max_qty += max_reg;
8649
8650 /* If this basic block is being extended by following certain jumps,
8651 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8652 Otherwise, we start after this basic block. */
8653 if (val.path_size > 0)
8654 cse_basic_block (insn, val.last, val.path, 0);
8655 else
8656 {
8657 int old_cse_jumps_altered = cse_jumps_altered;
8658 rtx temp;
8659
8660 /* When cse changes a conditional jump to an unconditional
8661 jump, we want to reprocess the block, since it will give
8662 us a new branch path to investigate. */
8663 cse_jumps_altered = 0;
8664 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
8665 if (cse_jumps_altered == 0
8666 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8667 insn = temp;
8668
8669 cse_jumps_altered |= old_cse_jumps_altered;
8670 }
8671
8672#ifdef USE_C_ALLOCA
8673 alloca (0);
8674#endif
8675 }
8676
8677 /* Tell refers_to_mem_p that qty_const info is not available. */
8678 qty_const = 0;
8679
8680 if (max_elements_made < n_elements_made)
8681 max_elements_made = n_elements_made;
8682
a5dfb4ee 8683 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
8684}
8685
8686/* Process a single basic block. FROM and TO and the limits of the basic
8687 block. NEXT_BRANCH points to the branch path when following jumps or
8688 a null path when not following jumps.
8689
8690 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8691 loop. This is true when we are being called for the last time on a
8692 block and this CSE pass is before loop.c. */
8693
8694static rtx
8695cse_basic_block (from, to, next_branch, around_loop)
8696 register rtx from, to;
8697 struct branch_path *next_branch;
8698 int around_loop;
8699{
8700 register rtx insn;
8701 int to_usage = 0;
7bd8b2a8 8702 rtx libcall_insn = NULL_RTX;
e9a25f70 8703 int num_insns = 0;
7afe21cc
RK
8704
8705 /* Each of these arrays is undefined before max_reg, so only allocate
8706 the space actually needed and adjust the start below. */
8707
8708 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8709 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8710 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8711 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8712 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8713 qty_comparison_code
8714 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8715 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8716 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8717
8718 qty_first_reg -= max_reg;
8719 qty_last_reg -= max_reg;
8720 qty_mode -= max_reg;
8721 qty_const -= max_reg;
8722 qty_const_insn -= max_reg;
8723 qty_comparison_code -= max_reg;
8724 qty_comparison_qty -= max_reg;
8725 qty_comparison_const -= max_reg;
8726
8727 new_basic_block ();
8728
8729 /* TO might be a label. If so, protect it from being deleted. */
8730 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8731 ++LABEL_NUSES (to);
8732
8733 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8734 {
1d22a2c1 8735 register enum rtx_code code = GET_CODE (insn);
e9a25f70 8736 int i;
925be47c 8737 struct table_elt *p;
e9a25f70 8738
1d22a2c1
MM
8739 /* If we have processed 1,000 insns, flush the hash table to
8740 avoid extreme quadratic behavior. We must not include NOTEs
8741 in the count since there may be more or them when generating
8742 debugging information. If we clear the table at different
8743 times, code generated with -g -O might be different than code
8744 generated with -O but not -g.
e9a25f70
JL
8745
8746 ??? This is a real kludge and needs to be done some other way.
8747 Perhaps for 2.9. */
1d22a2c1 8748 if (code != NOTE && num_insns++ > 1000)
e9a25f70
JL
8749 {
8750 for (i = 0; i < NBUCKETS; i++)
925be47c 8751 for (p = table[i]; p; p = table[i])
e9a25f70 8752 {
925be47c
DM
8753 /* Note that invalidate can remove elements
8754 after P in the current hash chain. */
e9a25f70
JL
8755 if (GET_CODE (p->exp) == REG)
8756 invalidate (p->exp, p->mode);
8757 else
8758 remove_from_table (p, i);
8759 }
8760
8761 num_insns = 0;
8762 }
7afe21cc
RK
8763
8764 /* See if this is a branch that is part of the path. If so, and it is
8765 to be taken, do so. */
8766 if (next_branch->branch == insn)
8767 {
8b3686ed
RK
8768 enum taken status = next_branch++->status;
8769 if (status != NOT_TAKEN)
7afe21cc 8770 {
8b3686ed
RK
8771 if (status == TAKEN)
8772 record_jump_equiv (insn, 1);
8773 else
8774 invalidate_skipped_block (NEXT_INSN (insn));
8775
7afe21cc
RK
8776 /* Set the last insn as the jump insn; it doesn't affect cc0.
8777 Then follow this branch. */
8778#ifdef HAVE_cc0
8779 prev_insn_cc0 = 0;
8780#endif
8781 prev_insn = insn;
8782 insn = JUMP_LABEL (insn);
8783 continue;
8784 }
8785 }
8786
7afe21cc
RK
8787 if (GET_MODE (insn) == QImode)
8788 PUT_MODE (insn, VOIDmode);
8789
8790 if (GET_RTX_CLASS (code) == 'i')
8791 {
7bd8b2a8
JL
8792 rtx p;
8793
7afe21cc
RK
8794 /* Process notes first so we have all notes in canonical forms when
8795 looking for duplicate operations. */
8796
8797 if (REG_NOTES (insn))
906c4e36 8798 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
8799
8800 /* Track when we are inside in LIBCALL block. Inside such a block,
8801 we do not want to record destinations. The last insn of a
8802 LIBCALL block is not considered to be part of the block, since
830a38ee 8803 its destination is the result of the block and hence should be
7afe21cc
RK
8804 recorded. */
8805
63be02db 8806 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7bd8b2a8 8807 libcall_insn = XEXP (p, 0);
906c4e36 8808 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7bd8b2a8 8809 libcall_insn = NULL_RTX;
7afe21cc 8810
7bd8b2a8 8811 cse_insn (insn, libcall_insn);
7afe21cc
RK
8812 }
8813
8814 /* If INSN is now an unconditional jump, skip to the end of our
8815 basic block by pretending that we just did the last insn in the
8816 basic block. If we are jumping to the end of our block, show
8817 that we can have one usage of TO. */
8818
8819 if (simplejump_p (insn))
8820 {
8821 if (to == 0)
8822 return 0;
8823
8824 if (JUMP_LABEL (insn) == to)
8825 to_usage = 1;
8826
6a5293dc
RS
8827 /* Maybe TO was deleted because the jump is unconditional.
8828 If so, there is nothing left in this basic block. */
8829 /* ??? Perhaps it would be smarter to set TO
8830 to whatever follows this insn,
8831 and pretend the basic block had always ended here. */
8832 if (INSN_DELETED_P (to))
8833 break;
8834
7afe21cc
RK
8835 insn = PREV_INSN (to);
8836 }
8837
8838 /* See if it is ok to keep on going past the label
8839 which used to end our basic block. Remember that we incremented
d45cf215 8840 the count of that label, so we decrement it here. If we made
7afe21cc
RK
8841 a jump unconditional, TO_USAGE will be one; in that case, we don't
8842 want to count the use in that jump. */
8843
8844 if (to != 0 && NEXT_INSN (insn) == to
8845 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8846 {
8847 struct cse_basic_block_data val;
146135d6 8848 rtx prev;
7afe21cc
RK
8849
8850 insn = NEXT_INSN (to);
8851
8852 if (LABEL_NUSES (to) == 0)
146135d6 8853 insn = delete_insn (to);
7afe21cc 8854
146135d6
RK
8855 /* If TO was the last insn in the function, we are done. */
8856 if (insn == 0)
7afe21cc
RK
8857 return 0;
8858
146135d6
RK
8859 /* If TO was preceded by a BARRIER we are done with this block
8860 because it has no continuation. */
8861 prev = prev_nonnote_insn (to);
8862 if (prev && GET_CODE (prev) == BARRIER)
8863 return insn;
8864
8865 /* Find the end of the following block. Note that we won't be
8866 following branches in this case. */
7afe21cc
RK
8867 to_usage = 0;
8868 val.path_size = 0;
8b3686ed 8869 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7afe21cc
RK
8870
8871 /* If the tables we allocated have enough space left
8872 to handle all the SETs in the next basic block,
8873 continue through it. Otherwise, return,
8874 and that block will be scanned individually. */
8875 if (val.nsets * 2 + next_qty > max_qty)
8876 break;
8877
8878 cse_basic_block_start = val.low_cuid;
8879 cse_basic_block_end = val.high_cuid;
8880 to = val.last;
8881
8882 /* Prevent TO from being deleted if it is a label. */
8883 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8884 ++LABEL_NUSES (to);
8885
8886 /* Back up so we process the first insn in the extension. */
8887 insn = PREV_INSN (insn);
8888 }
8889 }
8890
8891 if (next_qty > max_qty)
8892 abort ();
8893
8894 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8895 the previous insn is the only insn that branches to the head of a loop,
8896 we can cse into the loop. Don't do this if we changed the jump
8897 structure of a loop unless we aren't going to be following jumps. */
8898
8b3686ed
RK
8899 if ((cse_jumps_altered == 0
8900 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8901 && around_loop && to != 0
8902 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8903 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8904 && JUMP_LABEL (PREV_INSN (to)) != 0
8905 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8906 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8907
8908 return to ? NEXT_INSN (to) : 0;
8909}
8910\f
8911/* Count the number of times registers are used (not set) in X.
8912 COUNTS is an array in which we accumulate the count, INCR is how much
79644f06
RK
8913 we count each register usage.
8914
8915 Don't count a usage of DEST, which is the SET_DEST of a SET which
8916 contains X in its SET_SRC. This is because such a SET does not
8917 modify the liveness of DEST. */
7afe21cc
RK
8918
8919static void
79644f06 8920count_reg_usage (x, counts, dest, incr)
7afe21cc
RK
8921 rtx x;
8922 int *counts;
79644f06 8923 rtx dest;
7afe21cc
RK
8924 int incr;
8925{
f1e7c95f 8926 enum rtx_code code;
7afe21cc
RK
8927 char *fmt;
8928 int i, j;
8929
f1e7c95f
RK
8930 if (x == 0)
8931 return;
8932
8933 switch (code = GET_CODE (x))
7afe21cc
RK
8934 {
8935 case REG:
79644f06
RK
8936 if (x != dest)
8937 counts[REGNO (x)] += incr;
7afe21cc
RK
8938 return;
8939
8940 case PC:
8941 case CC0:
8942 case CONST:
8943 case CONST_INT:
8944 case CONST_DOUBLE:
8945 case SYMBOL_REF:
8946 case LABEL_REF:
02e39abc
JL
8947 return;
8948
8949 case CLOBBER:
8950 /* If we are clobbering a MEM, mark any registers inside the address
8951 as being used. */
8952 if (GET_CODE (XEXP (x, 0)) == MEM)
8953 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7afe21cc
RK
8954 return;
8955
8956 case SET:
8957 /* Unless we are setting a REG, count everything in SET_DEST. */
8958 if (GET_CODE (SET_DEST (x)) != REG)
79644f06 8959 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9ff08f70
RK
8960
8961 /* If SRC has side-effects, then we can't delete this insn, so the
8962 usage of SET_DEST inside SRC counts.
8963
8964 ??? Strictly-speaking, we might be preserving this insn
8965 because some other SET has side-effects, but that's hard
8966 to do and can't happen now. */
8967 count_reg_usage (SET_SRC (x), counts,
8968 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8969 incr);
7afe21cc
RK
8970 return;
8971
f1e7c95f
RK
8972 case CALL_INSN:
8973 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8974
8975 /* ... falls through ... */
7afe21cc
RK
8976 case INSN:
8977 case JUMP_INSN:
79644f06 8978 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7afe21cc
RK
8979
8980 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8981 use them. */
8982
f1e7c95f 8983 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7afe21cc
RK
8984 return;
8985
8986 case EXPR_LIST:
8987 case INSN_LIST:
f1e7c95f 8988 if (REG_NOTE_KIND (x) == REG_EQUAL
c6a26dc4 8989 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
79644f06 8990 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
f1e7c95f 8991 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7afe21cc 8992 return;
e9a25f70
JL
8993
8994 default:
8995 break;
7afe21cc
RK
8996 }
8997
8998 fmt = GET_RTX_FORMAT (code);
8999 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9000 {
9001 if (fmt[i] == 'e')
79644f06 9002 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
9003 else if (fmt[i] == 'E')
9004 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
79644f06 9005 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
9006 }
9007}
9008\f
9009/* Scan all the insns and delete any that are dead; i.e., they store a register
9010 that is never used or they copy a register to itself.
9011
c6a26dc4
JL
9012 This is used to remove insns made obviously dead by cse, loop or other
9013 optimizations. It improves the heuristics in loop since it won't try to
9014 move dead invariants out of loops or make givs for dead quantities. The
9015 remaining passes of the compilation are also sped up. */
7afe21cc
RK
9016
9017void
c6a26dc4 9018delete_trivially_dead_insns (insns, nreg)
7afe21cc
RK
9019 rtx insns;
9020 int nreg;
9021{
9022 int *counts = (int *) alloca (nreg * sizeof (int));
77fa0940 9023 rtx insn, prev;
51723711 9024#ifdef HAVE_cc0
d45cf215 9025 rtx tem;
51723711 9026#endif
7afe21cc 9027 int i;
614bb5d4 9028 int in_libcall = 0, dead_libcall = 0;
7afe21cc
RK
9029
9030 /* First count the number of times each register is used. */
4c9a05bc 9031 bzero ((char *) counts, sizeof (int) * nreg);
7afe21cc 9032 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
79644f06 9033 count_reg_usage (insn, counts, NULL_RTX, 1);
7afe21cc
RK
9034
9035 /* Go from the last insn to the first and delete insns that only set unused
9036 registers or copy a register to itself. As we delete an insn, remove
9037 usage counts for registers it uses. */
77fa0940 9038 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
7afe21cc
RK
9039 {
9040 int live_insn = 0;
614bb5d4 9041 rtx note;
7afe21cc 9042
77fa0940
RK
9043 prev = prev_real_insn (insn);
9044
614bb5d4
JL
9045 /* Don't delete any insns that are part of a libcall block unless
9046 we can delete the whole libcall block.
9047
77fa0940
RK
9048 Flow or loop might get confused if we did that. Remember
9049 that we are scanning backwards. */
906c4e36 9050 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
614bb5d4
JL
9051 {
9052 in_libcall = 1;
9053 live_insn = 1;
9054 dead_libcall = 0;
e4890d45 9055
614bb5d4
JL
9056 /* See if there's a REG_EQUAL note on this insn and try to
9057 replace the source with the REG_EQUAL expression.
9058
9059 We assume that insns with REG_RETVALs can only be reg->reg
9060 copies at this point. */
9061 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9062 if (note)
9063 {
9064 rtx set = single_set (insn);
9065 if (set
9066 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9067 {
9068 remove_note (insn,
9069 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9070 dead_libcall = 1;
9071 }
9072 }
9073 }
9074 else if (in_libcall)
9075 live_insn = ! dead_libcall;
e4890d45 9076 else if (GET_CODE (PATTERN (insn)) == SET)
7afe21cc
RK
9077 {
9078 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9079 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9080 ;
9081
d45cf215
RS
9082#ifdef HAVE_cc0
9083 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9084 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9085 && ((tem = next_nonnote_insn (insn)) == 0
9086 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9087 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9088 ;
9089#endif
7afe21cc
RK
9090 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9091 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9092 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9093 || side_effects_p (SET_SRC (PATTERN (insn))))
9094 live_insn = 1;
9095 }
9096 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9097 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9098 {
9099 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9100
9101 if (GET_CODE (elt) == SET)
9102 {
9103 if (GET_CODE (SET_DEST (elt)) == REG
9104 && SET_DEST (elt) == SET_SRC (elt))
9105 ;
9106
d45cf215
RS
9107#ifdef HAVE_cc0
9108 else if (GET_CODE (SET_DEST (elt)) == CC0
9109 && ! side_effects_p (SET_SRC (elt))
9110 && ((tem = next_nonnote_insn (insn)) == 0
9111 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9112 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9113 ;
9114#endif
7afe21cc
RK
9115 else if (GET_CODE (SET_DEST (elt)) != REG
9116 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9117 || counts[REGNO (SET_DEST (elt))] != 0
9118 || side_effects_p (SET_SRC (elt)))
9119 live_insn = 1;
9120 }
9121 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9122 live_insn = 1;
9123 }
9124 else
9125 live_insn = 1;
9126
9127 /* If this is a dead insn, delete it and show registers in it aren't
e4890d45 9128 being used. */
7afe21cc 9129
e4890d45 9130 if (! live_insn)
7afe21cc 9131 {
79644f06 9132 count_reg_usage (insn, counts, NULL_RTX, -1);
77fa0940 9133 delete_insn (insn);
7afe21cc 9134 }
e4890d45 9135
906c4e36 9136 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
614bb5d4
JL
9137 {
9138 in_libcall = 0;
9139 dead_libcall = 0;
9140 }
7afe21cc
RK
9141 }
9142}