]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
machmode.h (mode_name): Constify a char*.
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
747215f1 2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
7afe21cc
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
7afe21cc
RK
20
21
22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
50b2596f 25#include <setjmp.h>
9c3b4c8b 26
7afe21cc
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
49ad7cfa 34#include "function.h"
956d6950 35#include "expr.h"
50b2596f
KG
36#include "toplev.h"
37#include "output.h"
30f72379 38#include "splay-tree.h"
7afe21cc
RK
39
40/* The basic idea of common subexpression elimination is to go
41 through the code, keeping a record of expressions that would
42 have the same value at the current scan point, and replacing
43 expressions encountered with the cheapest equivalent expression.
44
45 It is too complicated to keep track of the different possibilities
46 when control paths merge; so, at each label, we forget all that is
47 known and start fresh. This can be described as processing each
48 basic block separately. Note, however, that these are not quite
49 the same as the basic blocks found by a later pass and used for
50 data flow analysis and register packing. We do not need to start fresh
51 after a conditional jump instruction if there is no label there.
52
53 We use two data structures to record the equivalent expressions:
54 a hash table for most expressions, and several vectors together
55 with "quantity numbers" to record equivalent (pseudo) registers.
56
57 The use of the special data structure for registers is desirable
58 because it is faster. It is possible because registers references
59 contain a fairly small number, the register number, taken from
60 a contiguously allocated series, and two register references are
61 identical if they have the same number. General expressions
62 do not have any such thing, so the only way to retrieve the
63 information recorded on an expression other than a register
64 is to keep it in a hash table.
65
66Registers and "quantity numbers":
67
68 At the start of each basic block, all of the (hardware and pseudo)
69 registers used in the function are given distinct quantity
70 numbers to indicate their contents. During scan, when the code
71 copies one register into another, we copy the quantity number.
72 When a register is loaded in any other way, we allocate a new
73 quantity number to describe the value generated by this operation.
74 `reg_qty' records what quantity a register is currently thought
75 of as containing.
76
77 All real quantity numbers are greater than or equal to `max_reg'.
78 If register N has not been assigned a quantity, reg_qty[N] will equal N.
79
80 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
81 variables should be referenced with an index below `max_reg'.
82
83 We also maintain a bidirectional chain of registers for each
84 quantity number. `qty_first_reg', `qty_last_reg',
85 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
86
87 The first register in a chain is the one whose lifespan is least local.
88 Among equals, it is the one that was seen first.
89 We replace any equivalent register with that one.
90
91 If two registers have the same quantity number, it must be true that
92 REG expressions with `qty_mode' must be in the hash table for both
93 registers and must be in the same class.
94
95 The converse is not true. Since hard registers may be referenced in
96 any mode, two REG expressions might be equivalent in the hash table
97 but not have the same quantity number if the quantity number of one
98 of the registers is not the same mode as those expressions.
99
100Constants and quantity numbers
101
102 When a quantity has a known constant value, that value is stored
103 in the appropriate element of qty_const. This is in addition to
104 putting the constant in the hash table as is usual for non-regs.
105
d45cf215 106 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
107 macro CONST_COSTS and will often depend on the constant value. In any
108 event, expressions containing constants can be simplified, by fold_rtx.
109
110 When a quantity has a known nearly constant value (such as an address
111 of a stack slot), that value is stored in the appropriate element
112 of qty_const.
113
114 Integer constants don't have a machine mode. However, cse
115 determines the intended machine mode from the destination
116 of the instruction that moves the constant. The machine mode
117 is recorded in the hash table along with the actual RTL
118 constant expression so that different modes are kept separate.
119
120Other expressions:
121
122 To record known equivalences among expressions in general
123 we use a hash table called `table'. It has a fixed number of buckets
124 that contain chains of `struct table_elt' elements for expressions.
125 These chains connect the elements whose expressions have the same
126 hash codes.
127
128 Other chains through the same elements connect the elements which
129 currently have equivalent values.
130
131 Register references in an expression are canonicalized before hashing
132 the expression. This is done using `reg_qty' and `qty_first_reg'.
133 The hash code of a register reference is computed using the quantity
134 number, not the register number.
135
136 When the value of an expression changes, it is necessary to remove from the
137 hash table not just that expression but all expressions whose values
138 could be different as a result.
139
140 1. If the value changing is in memory, except in special cases
141 ANYTHING referring to memory could be changed. That is because
142 nobody knows where a pointer does not point.
143 The function `invalidate_memory' removes what is necessary.
144
145 The special cases are when the address is constant or is
146 a constant plus a fixed register such as the frame pointer
147 or a static chain pointer. When such addresses are stored in,
148 we can tell exactly which other such addresses must be invalidated
149 due to overlap. `invalidate' does this.
150 All expressions that refer to non-constant
151 memory addresses are also invalidated. `invalidate_memory' does this.
152
153 2. If the value changing is a register, all expressions
154 containing references to that register, and only those,
155 must be removed.
156
157 Because searching the entire hash table for expressions that contain
158 a register is very slow, we try to figure out when it isn't necessary.
159 Precisely, this is necessary only when expressions have been
160 entered in the hash table using this register, and then the value has
161 changed, and then another expression wants to be added to refer to
162 the register's new value. This sequence of circumstances is rare
163 within any one basic block.
164
165 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
166 reg_tick[i] is incremented whenever a value is stored in register i.
167 reg_in_table[i] holds -1 if no references to register i have been
168 entered in the table; otherwise, it contains the value reg_tick[i] had
169 when the references were entered. If we want to enter a reference
170 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
171 Until we want to enter a new entry, the mere fact that the two vectors
172 don't match makes the entries be ignored if anyone tries to match them.
173
174 Registers themselves are entered in the hash table as well as in
175 the equivalent-register chains. However, the vectors `reg_tick'
176 and `reg_in_table' do not apply to expressions which are simple
177 register references. These expressions are removed from the table
178 immediately when they become invalid, and this can be done even if
179 we do not immediately search for all the expressions that refer to
180 the register.
181
182 A CLOBBER rtx in an instruction invalidates its operand for further
183 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
184 invalidates everything that resides in memory.
185
186Related expressions:
187
188 Constant expressions that differ only by an additive integer
189 are called related. When a constant expression is put in
190 the table, the related expression with no constant term
191 is also entered. These are made to point at each other
192 so that it is possible to find out if there exists any
193 register equivalent to an expression related to a given expression. */
194
195/* One plus largest register number used in this function. */
196
197static int max_reg;
198
556c714b
JW
199/* One plus largest instruction UID used in this function at time of
200 cse_main call. */
201
202static int max_insn_uid;
203
7afe21cc
RK
204/* Length of vectors indexed by quantity number.
205 We know in advance we will not need a quantity number this big. */
206
207static int max_qty;
208
209/* Next quantity number to be allocated.
210 This is 1 + the largest number needed so far. */
211
212static int next_qty;
213
71d306d1 214/* Indexed by quantity number, gives the first (or last) register
7afe21cc
RK
215 in the chain of registers that currently contain this quantity. */
216
217static int *qty_first_reg;
218static int *qty_last_reg;
219
220/* Index by quantity number, gives the mode of the quantity. */
221
222static enum machine_mode *qty_mode;
223
224/* Indexed by quantity number, gives the rtx of the constant value of the
225 quantity, or zero if it does not have a known value.
226 A sum of the frame pointer (or arg pointer) plus a constant
227 can also be entered here. */
228
229static rtx *qty_const;
230
231/* Indexed by qty number, gives the insn that stored the constant value
232 recorded in `qty_const'. */
233
234static rtx *qty_const_insn;
235
236/* The next three variables are used to track when a comparison between a
237 quantity and some constant or register has been passed. In that case, we
238 know the results of the comparison in case we see it again. These variables
239 record a comparison that is known to be true. */
240
241/* Indexed by qty number, gives the rtx code of a comparison with a known
242 result involving this quantity. If none, it is UNKNOWN. */
243static enum rtx_code *qty_comparison_code;
244
245/* Indexed by qty number, gives the constant being compared against in a
246 comparison of known result. If no such comparison, it is undefined.
247 If the comparison is not with a constant, it is zero. */
248
249static rtx *qty_comparison_const;
250
251/* Indexed by qty number, gives the quantity being compared against in a
252 comparison of known result. If no such comparison, if it undefined.
253 If the comparison is not with a register, it is -1. */
254
255static int *qty_comparison_qty;
256
257#ifdef HAVE_cc0
258/* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
261
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
266
267static rtx prev_insn_cc0;
268static enum machine_mode prev_insn_cc0_mode;
269#endif
270
271/* Previous actual insn. 0 if at first insn of basic block. */
272
273static rtx prev_insn;
274
275/* Insn being scanned. */
276
277static rtx this_insn;
278
71d306d1
DE
279/* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
7afe21cc
RK
281 value.
282
283 Or -1 if this register is at the end of the chain.
284
285 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
286
287static int *reg_next_eqv;
288static int *reg_prev_eqv;
289
30f72379
MM
290struct cse_reg_info {
291 union {
292 /* The number of times the register has been altered in the current
293 basic block. */
294 int reg_tick;
295
296 /* The next cse_reg_info structure in the free list. */
297 struct cse_reg_info* next;
298 } variant;
299
300 /* The REG_TICK value at which rtx's containing this register are
301 valid in the hash table. If this does not equal the current
302 reg_tick value, such expressions existing in the hash table are
303 invalid. */
304 int reg_in_table;
305
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
308};
7afe21cc 309
30f72379
MM
310/* A free list of cse_reg_info entries. */
311static struct cse_reg_info *cse_reg_info_free_list;
7afe21cc 312
30f72379
MM
313/* A mapping from registers to cse_reg_info data structures. */
314static splay_tree cse_reg_info_tree;
7afe21cc 315
30f72379
MM
316/* The last lookup we did into the cse_reg_info_tree. This allows us
317 to cache repeated lookups. */
318static int cached_regno;
319static struct cse_reg_info *cached_cse_reg_info;
7afe21cc
RK
320
321/* A HARD_REG_SET containing all the hard registers for which there is
322 currently a REG expression in the hash table. Note the difference
323 from the above variables, which indicate if the REG is mentioned in some
324 expression in the table. */
325
326static HARD_REG_SET hard_regs_in_table;
327
328/* A HARD_REG_SET containing all the hard registers that are invalidated
329 by a CALL_INSN. */
330
331static HARD_REG_SET regs_invalidated_by_call;
332
7afe21cc
RK
333/* CUID of insn that starts the basic block currently being cse-processed. */
334
335static int cse_basic_block_start;
336
337/* CUID of insn that ends the basic block currently being cse-processed. */
338
339static int cse_basic_block_end;
340
341/* Vector mapping INSN_UIDs to cuids.
d45cf215 342 The cuids are like uids but increase monotonically always.
7afe21cc
RK
343 We use them to see whether a reg is used outside a given basic block. */
344
906c4e36 345static int *uid_cuid;
7afe21cc 346
164c8956
RK
347/* Highest UID in UID_CUID. */
348static int max_uid;
349
7afe21cc
RK
350/* Get the cuid of an insn. */
351
352#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
353
354/* Nonzero if cse has altered conditional jump insns
355 in such a way that jump optimization should be redone. */
356
357static int cse_jumps_altered;
358
a5dfb4ee
RK
359/* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
360 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
361 to put in the note. */
362static int recorded_label_ref;
363
7afe21cc
RK
364/* canon_hash stores 1 in do_not_record
365 if it notices a reference to CC0, PC, or some other volatile
366 subexpression. */
367
368static int do_not_record;
369
7bac1be0
RK
370#ifdef LOAD_EXTEND_OP
371
372/* Scratch rtl used when looking for load-extended copy of a MEM. */
373static rtx memory_extend_rtx;
374#endif
375
7afe21cc
RK
376/* canon_hash stores 1 in hash_arg_in_memory
377 if it notices a reference to memory within the expression being hashed. */
378
379static int hash_arg_in_memory;
380
381/* canon_hash stores 1 in hash_arg_in_struct
382 if it notices a reference to memory that's part of a structure. */
383
384static int hash_arg_in_struct;
385
386/* The hash table contains buckets which are chains of `struct table_elt's,
387 each recording one expression's information.
388 That expression is in the `exp' field.
389
390 Those elements with the same hash code are chained in both directions
391 through the `next_same_hash' and `prev_same_hash' fields.
392
393 Each set of expressions with equivalent values
394 are on a two-way chain through the `next_same_value'
395 and `prev_same_value' fields, and all point with
396 the `first_same_value' field at the first element in
397 that chain. The chain is in order of increasing cost.
398 Each element's cost value is in its `cost' field.
399
400 The `in_memory' field is nonzero for elements that
401 involve any reference to memory. These elements are removed
402 whenever a write is done to an unidentified location in memory.
403 To be safe, we assume that a memory address is unidentified unless
404 the address is either a symbol constant or a constant plus
405 the frame pointer or argument pointer.
406
407 The `in_struct' field is nonzero for elements that
408 involve any reference to memory inside a structure or array.
409
410 The `related_value' field is used to connect related expressions
411 (that differ by adding an integer).
412 The related expressions are chained in a circular fashion.
413 `related_value' is zero for expressions for which this
414 chain is not useful.
415
416 The `cost' field stores the cost of this element's expression.
417
418 The `is_const' flag is set if the element is a constant (including
419 a fixed address).
420
421 The `flag' field is used as a temporary during some search routines.
422
423 The `mode' field is usually the same as GET_MODE (`exp'), but
424 if `exp' is a CONST_INT and has no machine mode then the `mode'
425 field is the mode it was being used as. Each constant is
426 recorded separately for each mode it is used with. */
427
428
429struct table_elt
430{
431 rtx exp;
432 struct table_elt *next_same_hash;
433 struct table_elt *prev_same_hash;
434 struct table_elt *next_same_value;
435 struct table_elt *prev_same_value;
436 struct table_elt *first_same_value;
437 struct table_elt *related_value;
438 int cost;
439 enum machine_mode mode;
440 char in_memory;
441 char in_struct;
442 char is_const;
443 char flag;
444};
445
7afe21cc
RK
446/* We don't want a lot of buckets, because we rarely have very many
447 things stored in the hash table, and a lot of buckets slows
448 down a lot of loops that happen frequently. */
449#define NBUCKETS 31
450
451/* Compute hash code of X in mode M. Special-case case where X is a pseudo
452 register (hard registers may require `do_not_record' to be set). */
453
454#define HASH(X, M) \
455 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
30f72379 456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS \
7afe21cc
RK
457 : canon_hash (X, M) % NBUCKETS)
458
459/* Determine whether register number N is considered a fixed register for CSE.
460 It is desirable to replace other regs with fixed regs, to reduce need for
461 non-fixed hard regs.
462 A reg wins if it is either the frame pointer or designated as fixed,
463 but not if it is an overlapping register. */
464#ifdef OVERLAPPING_REGNO_P
465#define FIXED_REGNO_P(N) \
8bc169f2 466 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 467 || fixed_regs[N] || global_regs[N]) \
7afe21cc
RK
468 && ! OVERLAPPING_REGNO_P ((N)))
469#else
470#define FIXED_REGNO_P(N) \
8bc169f2 471 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 472 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
473#endif
474
475/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
476 hard registers and pointers into the frame are the cheapest with a cost
477 of 0. Next come pseudos with a cost of one and other hard registers with
478 a cost of 2. Aside from these special cases, call `rtx_cost'. */
479
6ab832bc 480#define CHEAP_REGNO(N) \
8bc169f2
DE
481 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
482 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
483 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
484 || ((N) < FIRST_PSEUDO_REGISTER \
e7bb59fa 485 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 486
6ab832bc
RK
487/* A register is cheap if it is a user variable assigned to the register
488 or if its register number always corresponds to a cheap register. */
489
490#define CHEAP_REG(N) \
491 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
492 || CHEAP_REGNO (REGNO (N)))
493
38734e55
ILT
494#define COST(X) \
495 (GET_CODE (X) == REG \
496 ? (CHEAP_REG (X) ? 0 \
497 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
498 : 2) \
954a5693 499 : notreg_cost(X))
7afe21cc 500
30f72379
MM
501/* Get the info associated with register N. */
502
503#define GET_CSE_REG_INFO(N) \
504 (((N) == cached_regno && cached_cse_reg_info) \
505 ? cached_cse_reg_info : get_cse_reg_info ((N)))
506
507/* Get the number of times this register has been updated in this
508 basic block. */
509
510#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
511
512/* Get the point at which REG was recorded in the table. */
513
514#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
515
516/* Get the quantity number for REG. */
517
518#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
519
7afe21cc
RK
520/* Determine if the quantity number for register X represents a valid index
521 into the `qty_...' variables. */
522
30f72379 523#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
7afe21cc 524
2f541799
MM
525#ifdef ADDRESS_COST
526/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
527 during CSE, such nodes are present. Using an ADDRESSOF node which
528 refers to the address of a REG is a good thing because we can then
529 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
530#define CSE_ADDRESS_COST(RTX) \
531 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
532 ? -1 : ADDRESS_COST(RTX))
533#endif
534
7afe21cc
RK
535static struct table_elt *table[NBUCKETS];
536
537/* Chain of `struct table_elt's made so far for this function
538 but currently removed from the table. */
539
540static struct table_elt *free_element_chain;
541
542/* Number of `struct table_elt' structures made so far for this function. */
543
544static int n_elements_made;
545
546/* Maximum value `n_elements_made' has had so far in this compilation
547 for functions previously processed. */
548
549static int max_elements_made;
550
551/* Surviving equivalence class when two equivalence classes are merged
552 by recording the effects of a jump in the last insn. Zero if the
553 last insn was not a conditional jump. */
554
555static struct table_elt *last_jump_equiv_class;
556
557/* Set to the cost of a constant pool reference if one was found for a
558 symbolic constant. If this was found, it means we should try to
559 convert constants into constant pool entries if they don't fit in
560 the insn. */
561
562static int constant_pool_entries_cost;
563
6cd4575e
RK
564/* Define maximum length of a branch path. */
565
566#define PATHLENGTH 10
567
568/* This data describes a block that will be processed by cse_basic_block. */
569
570struct cse_basic_block_data {
571 /* Lowest CUID value of insns in block. */
572 int low_cuid;
573 /* Highest CUID value of insns in block. */
574 int high_cuid;
575 /* Total number of SETs in block. */
576 int nsets;
577 /* Last insn in the block. */
578 rtx last;
579 /* Size of current branch path, if any. */
580 int path_size;
581 /* Current branch path, indicating which branches will be taken. */
582 struct branch_path {
0f41302f 583 /* The branch insn. */
6cd4575e
RK
584 rtx branch;
585 /* Whether it should be taken or not. AROUND is the same as taken
586 except that it is used when the destination label is not preceded
587 by a BARRIER. */
588 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
589 } path[PATHLENGTH];
590};
591
7afe21cc
RK
592/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
593 virtual regs here because the simplify_*_operation routines are called
594 by integrate.c, which is called before virtual register instantiation. */
595
596#define FIXED_BASE_PLUS_P(X) \
8bc169f2
DE
597 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
598 || (X) == arg_pointer_rtx \
7afe21cc
RK
599 || (X) == virtual_stack_vars_rtx \
600 || (X) == virtual_incoming_args_rtx \
601 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
602 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 603 || XEXP (X, 0) == hard_frame_pointer_rtx \
7afe21cc
RK
604 || XEXP (X, 0) == arg_pointer_rtx \
605 || XEXP (X, 0) == virtual_stack_vars_rtx \
e9a25f70
JL
606 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
607 || GET_CODE (X) == ADDRESSOF)
7afe21cc 608
6f90e075
JW
609/* Similar, but also allows reference to the stack pointer.
610
611 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
612 arg_pointer_rtx by itself is nonzero, because on at least one machine,
613 the i960, the arg pointer is zero when it is unused. */
7afe21cc
RK
614
615#define NONZERO_BASE_PLUS_P(X) \
8bc169f2 616 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
6f90e075
JW
617 || (X) == virtual_stack_vars_rtx \
618 || (X) == virtual_incoming_args_rtx \
619 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
620 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 621 || XEXP (X, 0) == hard_frame_pointer_rtx \
6f90e075
JW
622 || XEXP (X, 0) == arg_pointer_rtx \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
7afe21cc
RK
625 || (X) == stack_pointer_rtx \
626 || (X) == virtual_stack_dynamic_rtx \
627 || (X) == virtual_outgoing_args_rtx \
628 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629 && (XEXP (X, 0) == stack_pointer_rtx \
630 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
e9a25f70
JL
631 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
632 || GET_CODE (X) == ADDRESSOF)
7afe21cc 633
954a5693 634static int notreg_cost PROTO((rtx));
6cd4575e
RK
635static void new_basic_block PROTO((void));
636static void make_new_qty PROTO((int));
637static void make_regs_eqv PROTO((int, int));
638static void delete_reg_equiv PROTO((int));
639static int mention_regs PROTO((rtx));
640static int insert_regs PROTO((rtx, struct table_elt *, int));
641static void free_element PROTO((struct table_elt *));
2197a88a 642static void remove_from_table PROTO((struct table_elt *, unsigned));
6cd4575e 643static struct table_elt *get_element PROTO((void));
2197a88a
RK
644static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
645 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
6cd4575e 646static rtx lookup_as_function PROTO((rtx, enum rtx_code));
2197a88a 647static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
6cd4575e
RK
648 enum machine_mode));
649static void merge_equiv_classes PROTO((struct table_elt *,
650 struct table_elt *));
68c1e173 651static void invalidate PROTO((rtx, enum machine_mode));
9ae8ffe7 652static int cse_rtx_varies_p PROTO((rtx));
6cd4575e 653static void remove_invalid_refs PROTO((int));
34c73909 654static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
6cd4575e 655static void rehash_using_reg PROTO((rtx));
9ae8ffe7 656static void invalidate_memory PROTO((void));
6cd4575e
RK
657static void invalidate_for_call PROTO((void));
658static rtx use_related_value PROTO((rtx, struct table_elt *));
2197a88a
RK
659static unsigned canon_hash PROTO((rtx, enum machine_mode));
660static unsigned safe_hash PROTO((rtx, enum machine_mode));
6cd4575e 661static int exp_equiv_p PROTO((rtx, rtx, int, int));
f451db89 662static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
6500fb43
RK
663 HOST_WIDE_INT *,
664 HOST_WIDE_INT *));
6cd4575e 665static int refers_to_p PROTO((rtx, rtx));
6cd4575e
RK
666static rtx canon_reg PROTO((rtx, rtx));
667static void find_best_addr PROTO((rtx, rtx *));
668static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
669 enum machine_mode *,
670 enum machine_mode *));
96b0e481
RK
671static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
672 rtx, rtx));
673static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
674 rtx, rtx));
6cd4575e
RK
675static rtx fold_rtx PROTO((rtx, rtx));
676static rtx equiv_constant PROTO((rtx));
677static void record_jump_equiv PROTO((rtx, int));
678static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
679 rtx, rtx, int));
7bd8b2a8 680static void cse_insn PROTO((rtx, rtx));
9ae8ffe7
JL
681static int note_mem_written PROTO((rtx));
682static void invalidate_from_clobbers PROTO((rtx));
6cd4575e
RK
683static rtx cse_process_notes PROTO((rtx, rtx));
684static void cse_around_loop PROTO((rtx));
685static void invalidate_skipped_set PROTO((rtx, rtx));
686static void invalidate_skipped_block PROTO((rtx));
687static void cse_check_loop_start PROTO((rtx, rtx));
688static void cse_set_around_loop PROTO((rtx, rtx, rtx));
689static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
79644f06 690static void count_reg_usage PROTO((rtx, int *, rtx, int));
a0153051 691extern void dump_class PROTO((struct table_elt*));
1a87eea2 692static void check_fold_consts PROTO((PTR));
30f72379
MM
693static struct cse_reg_info* get_cse_reg_info PROTO((int));
694static void free_cse_reg_info PROTO((splay_tree_value));
01e752d3 695static void flush_hash_table PROTO((void));
c407b802
RK
696
697extern int rtx_equal_function_value_matters;
7afe21cc 698\f
a4c6502a
MM
699/* Dump the expressions in the equivalence class indicated by CLASSP.
700 This function is used only for debugging. */
a0153051 701void
a4c6502a
MM
702dump_class (classp)
703 struct table_elt *classp;
704{
705 struct table_elt *elt;
706
707 fprintf (stderr, "Equivalence chain for ");
708 print_rtl (stderr, classp->exp);
709 fprintf (stderr, ": \n");
710
711 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
712 {
713 print_rtl (stderr, elt->exp);
714 fprintf (stderr, "\n");
715 }
716}
717
7afe21cc
RK
718/* Return an estimate of the cost of computing rtx X.
719 One use is in cse, to decide which expression to keep in the hash table.
720 Another is in rtl generation, to pick the cheapest way to multiply.
721 Other uses like the latter are expected in the future. */
722
954a5693
RK
723/* Internal function, to compute cost when X is not a register; called
724 from COST macro to keep it simple. */
725
726static int
727notreg_cost (x)
728 rtx x;
729{
730 return ((GET_CODE (x) == SUBREG
731 && GET_CODE (SUBREG_REG (x)) == REG
732 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
733 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
734 && (GET_MODE_SIZE (GET_MODE (x))
735 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
736 && subreg_lowpart_p (x)
737 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
738 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
739 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
740 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
741 : 2))
742 : rtx_cost (x, SET) * 2);
743}
744
7afe21cc
RK
745/* Return the right cost to give to an operation
746 to make the cost of the corresponding register-to-register instruction
747 N times that of a fast register-to-register instruction. */
748
749#define COSTS_N_INSNS(N) ((N) * 4 - 2)
750
751int
e5f6a288 752rtx_cost (x, outer_code)
7afe21cc 753 rtx x;
79c9824e 754 enum rtx_code outer_code ATTRIBUTE_UNUSED;
7afe21cc
RK
755{
756 register int i, j;
757 register enum rtx_code code;
758 register char *fmt;
759 register int total;
760
761 if (x == 0)
762 return 0;
763
764 /* Compute the default costs of certain things.
765 Note that RTX_COSTS can override the defaults. */
766
767 code = GET_CODE (x);
768 switch (code)
769 {
770 case MULT:
771 /* Count multiplication by 2**n as a shift,
772 because if we are considering it, we would output it as a shift. */
773 if (GET_CODE (XEXP (x, 1)) == CONST_INT
774 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
775 total = 2;
776 else
777 total = COSTS_N_INSNS (5);
778 break;
779 case DIV:
780 case UDIV:
781 case MOD:
782 case UMOD:
783 total = COSTS_N_INSNS (7);
784 break;
785 case USE:
786 /* Used in loop.c and combine.c as a marker. */
787 total = 0;
788 break;
538b78e7
RS
789 case ASM_OPERANDS:
790 /* We don't want these to be used in substitutions because
791 we have no way of validating the resulting insn. So assign
792 anything containing an ASM_OPERANDS a very high cost. */
793 total = 1000;
794 break;
7afe21cc
RK
795 default:
796 total = 2;
797 }
798
799 switch (code)
800 {
801 case REG:
6ab832bc 802 return ! CHEAP_REG (x);
ac07e066 803
7afe21cc 804 case SUBREG:
fc3ffe83
RK
805 /* If we can't tie these modes, make this expensive. The larger
806 the mode, the more expensive it is. */
807 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
808 return COSTS_N_INSNS (2
809 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
7afe21cc
RK
810 return 2;
811#ifdef RTX_COSTS
e5f6a288 812 RTX_COSTS (x, code, outer_code);
7afe21cc 813#endif
47a0b68f 814#ifdef CONST_COSTS
e5f6a288 815 CONST_COSTS (x, code, outer_code);
47a0b68f 816#endif
8625fab5
KG
817
818 default:
819#ifdef DEFAULT_RTX_COSTS
820 DEFAULT_RTX_COSTS(x, code, outer_code);
821#endif
822 break;
7afe21cc
RK
823 }
824
825 /* Sum the costs of the sub-rtx's, plus cost of this operation,
826 which is already in total. */
827
828 fmt = GET_RTX_FORMAT (code);
829 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
830 if (fmt[i] == 'e')
e5f6a288 831 total += rtx_cost (XEXP (x, i), code);
7afe21cc
RK
832 else if (fmt[i] == 'E')
833 for (j = 0; j < XVECLEN (x, i); j++)
e5f6a288 834 total += rtx_cost (XVECEXP (x, i, j), code);
7afe21cc
RK
835
836 return total;
837}
838\f
30f72379
MM
839static struct cse_reg_info *
840get_cse_reg_info (regno)
841 int regno;
842{
843 struct cse_reg_info *cri;
844 splay_tree_node n;
845
846 /* See if we already have this entry. */
847 n = splay_tree_lookup (cse_reg_info_tree,
848 (splay_tree_key) regno);
849 if (n)
850 cri = (struct cse_reg_info *) (n->value);
851 else
852 {
853 /* Get a new cse_reg_info structure. */
854 if (cse_reg_info_free_list)
855 {
856 cri = cse_reg_info_free_list;
857 cse_reg_info_free_list = cri->variant.next;
858 }
859 else
860 cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
861
862 /* Initialize it. */
863 cri->variant.reg_tick = 0;
864 cri->reg_in_table = -1;
865 cri->reg_qty = regno;
866
867 splay_tree_insert (cse_reg_info_tree,
868 (splay_tree_key) regno,
869 (splay_tree_value) cri);
870 }
871
872 /* Cache this lookup; we tend to be looking up information about the
873 same register several times in a row. */
874 cached_regno = regno;
875 cached_cse_reg_info = cri;
876
877 return cri;
878}
879
880static void
881free_cse_reg_info (v)
882 splay_tree_value v;
883{
884 struct cse_reg_info *cri = (struct cse_reg_info *) v;
885
886 cri->variant.next = cse_reg_info_free_list;
887 cse_reg_info_free_list = cri;
888}
889
7afe21cc
RK
890/* Clear the hash table and initialize each register with its own quantity,
891 for a new basic block. */
892
893static void
894new_basic_block ()
895{
896 register int i;
897
898 next_qty = max_reg;
899
30f72379
MM
900 if (cse_reg_info_tree)
901 {
902 splay_tree_delete (cse_reg_info_tree);
903 cached_cse_reg_info = 0;
904 }
905
906 cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
907 free_cse_reg_info);
7afe21cc 908
7afe21cc
RK
909 CLEAR_HARD_REG_SET (hard_regs_in_table);
910
911 /* The per-quantity values used to be initialized here, but it is
912 much faster to initialize each as it is made in `make_new_qty'. */
913
914 for (i = 0; i < NBUCKETS; i++)
915 {
916 register struct table_elt *this, *next;
917 for (this = table[i]; this; this = next)
918 {
919 next = this->next_same_hash;
920 free_element (this);
921 }
922 }
923
4c9a05bc 924 bzero ((char *) table, sizeof table);
7afe21cc
RK
925
926 prev_insn = 0;
927
928#ifdef HAVE_cc0
929 prev_insn_cc0 = 0;
930#endif
931}
932
933/* Say that register REG contains a quantity not in any register before
934 and initialize that quantity. */
935
936static void
937make_new_qty (reg)
938 register int reg;
939{
940 register int q;
941
942 if (next_qty >= max_qty)
943 abort ();
944
30f72379 945 q = REG_QTY (reg) = next_qty++;
7afe21cc
RK
946 qty_first_reg[q] = reg;
947 qty_last_reg[q] = reg;
948 qty_const[q] = qty_const_insn[q] = 0;
949 qty_comparison_code[q] = UNKNOWN;
950
951 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
952}
953
954/* Make reg NEW equivalent to reg OLD.
955 OLD is not changing; NEW is. */
956
957static void
958make_regs_eqv (new, old)
959 register int new, old;
960{
961 register int lastr, firstr;
30f72379 962 register int q = REG_QTY (old);
7afe21cc
RK
963
964 /* Nothing should become eqv until it has a "non-invalid" qty number. */
965 if (! REGNO_QTY_VALID_P (old))
966 abort ();
967
30f72379 968 REG_QTY (new) = q;
7afe21cc
RK
969 firstr = qty_first_reg[q];
970 lastr = qty_last_reg[q];
971
972 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
973 hard regs. Among pseudos, if NEW will live longer than any other reg
974 of the same qty, and that is beyond the current basic block,
975 make it the new canonical replacement for this qty. */
976 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
977 /* Certain fixed registers might be of the class NO_REGS. This means
978 that not only can they not be allocated by the compiler, but
830a38ee 979 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
980 either. */
981 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
982 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
983 || (new >= FIRST_PSEUDO_REGISTER
984 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
985 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
986 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 987 < cse_basic_block_start))
b1f21e0a
MM
988 && (uid_cuid[REGNO_LAST_UID (new)]
989 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc
RK
990 {
991 reg_prev_eqv[firstr] = new;
992 reg_next_eqv[new] = firstr;
993 reg_prev_eqv[new] = -1;
994 qty_first_reg[q] = new;
995 }
996 else
997 {
998 /* If NEW is a hard reg (known to be non-fixed), insert at end.
999 Otherwise, insert before any non-fixed hard regs that are at the
1000 end. Registers of class NO_REGS cannot be used as an
1001 equivalent for anything. */
1002 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1003 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1004 && new >= FIRST_PSEUDO_REGISTER)
1005 lastr = reg_prev_eqv[lastr];
1006 reg_next_eqv[new] = reg_next_eqv[lastr];
1007 if (reg_next_eqv[lastr] >= 0)
1008 reg_prev_eqv[reg_next_eqv[lastr]] = new;
1009 else
1010 qty_last_reg[q] = new;
1011 reg_next_eqv[lastr] = new;
1012 reg_prev_eqv[new] = lastr;
1013 }
1014}
1015
1016/* Remove REG from its equivalence class. */
1017
1018static void
1019delete_reg_equiv (reg)
1020 register int reg;
1021{
30f72379 1022 register int q = REG_QTY (reg);
a4e262bc 1023 register int p, n;
7afe21cc 1024
a4e262bc 1025 /* If invalid, do nothing. */
7afe21cc
RK
1026 if (q == reg)
1027 return;
1028
a4e262bc
RK
1029 p = reg_prev_eqv[reg];
1030 n = reg_next_eqv[reg];
1031
7afe21cc
RK
1032 if (n != -1)
1033 reg_prev_eqv[n] = p;
1034 else
1035 qty_last_reg[q] = p;
1036 if (p != -1)
1037 reg_next_eqv[p] = n;
1038 else
1039 qty_first_reg[q] = n;
1040
30f72379 1041 REG_QTY (reg) = reg;
7afe21cc
RK
1042}
1043
1044/* Remove any invalid expressions from the hash table
1045 that refer to any of the registers contained in expression X.
1046
1047 Make sure that newly inserted references to those registers
1048 as subexpressions will be considered valid.
1049
1050 mention_regs is not called when a register itself
1051 is being stored in the table.
1052
1053 Return 1 if we have done something that may have changed the hash code
1054 of X. */
1055
1056static int
1057mention_regs (x)
1058 rtx x;
1059{
1060 register enum rtx_code code;
1061 register int i, j;
1062 register char *fmt;
1063 register int changed = 0;
1064
1065 if (x == 0)
e5f6a288 1066 return 0;
7afe21cc
RK
1067
1068 code = GET_CODE (x);
1069 if (code == REG)
1070 {
1071 register int regno = REGNO (x);
1072 register int endregno
1073 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1074 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1075 int i;
1076
1077 for (i = regno; i < endregno; i++)
1078 {
30f72379 1079 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1080 remove_invalid_refs (i);
1081
30f72379 1082 REG_IN_TABLE (i) = REG_TICK (i);
7afe21cc
RK
1083 }
1084
1085 return 0;
1086 }
1087
34c73909
R
1088 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1089 pseudo if they don't use overlapping words. We handle only pseudos
1090 here for simplicity. */
1091 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1092 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1093 {
1094 int i = REGNO (SUBREG_REG (x));
1095
30f72379 1096 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909
R
1097 {
1098 /* If reg_tick has been incremented more than once since
1099 reg_in_table was last set, that means that the entire
1100 register has been set before, so discard anything memorized
1101 for the entrire register, including all SUBREG expressions. */
30f72379 1102 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
34c73909
R
1103 remove_invalid_refs (i);
1104 else
1105 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1106 }
1107
30f72379 1108 REG_IN_TABLE (i) = REG_TICK (i);
34c73909
R
1109 return 0;
1110 }
1111
7afe21cc
RK
1112 /* If X is a comparison or a COMPARE and either operand is a register
1113 that does not have a quantity, give it one. This is so that a later
1114 call to record_jump_equiv won't cause X to be assigned a different
1115 hash code and not found in the table after that call.
1116
1117 It is not necessary to do this here, since rehash_using_reg can
1118 fix up the table later, but doing this here eliminates the need to
1119 call that expensive function in the most common case where the only
1120 use of the register is in the comparison. */
1121
1122 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1123 {
1124 if (GET_CODE (XEXP (x, 0)) == REG
1125 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
906c4e36 1126 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
7afe21cc
RK
1127 {
1128 rehash_using_reg (XEXP (x, 0));
1129 changed = 1;
1130 }
1131
1132 if (GET_CODE (XEXP (x, 1)) == REG
1133 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
906c4e36 1134 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
7afe21cc
RK
1135 {
1136 rehash_using_reg (XEXP (x, 1));
1137 changed = 1;
1138 }
1139 }
1140
1141 fmt = GET_RTX_FORMAT (code);
1142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1143 if (fmt[i] == 'e')
1144 changed |= mention_regs (XEXP (x, i));
1145 else if (fmt[i] == 'E')
1146 for (j = 0; j < XVECLEN (x, i); j++)
1147 changed |= mention_regs (XVECEXP (x, i, j));
1148
1149 return changed;
1150}
1151
1152/* Update the register quantities for inserting X into the hash table
1153 with a value equivalent to CLASSP.
1154 (If the class does not contain a REG, it is irrelevant.)
1155 If MODIFIED is nonzero, X is a destination; it is being modified.
1156 Note that delete_reg_equiv should be called on a register
1157 before insert_regs is done on that register with MODIFIED != 0.
1158
1159 Nonzero value means that elements of reg_qty have changed
1160 so X's hash code may be different. */
1161
1162static int
1163insert_regs (x, classp, modified)
1164 rtx x;
1165 struct table_elt *classp;
1166 int modified;
1167{
1168 if (GET_CODE (x) == REG)
1169 {
1170 register int regno = REGNO (x);
1171
1ff0c00d
RK
1172 /* If REGNO is in the equivalence table already but is of the
1173 wrong mode for that equivalence, don't do anything here. */
1174
1175 if (REGNO_QTY_VALID_P (regno)
30f72379 1176 && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1ff0c00d
RK
1177 return 0;
1178
1179 if (modified || ! REGNO_QTY_VALID_P (regno))
7afe21cc
RK
1180 {
1181 if (classp)
1182 for (classp = classp->first_same_value;
1183 classp != 0;
1184 classp = classp->next_same_value)
1185 if (GET_CODE (classp->exp) == REG
1186 && GET_MODE (classp->exp) == GET_MODE (x))
1187 {
1188 make_regs_eqv (regno, REGNO (classp->exp));
1189 return 1;
1190 }
1191
1192 make_new_qty (regno);
30f72379 1193 qty_mode[REG_QTY (regno)] = GET_MODE (x);
7afe21cc
RK
1194 return 1;
1195 }
cdf4112f
TG
1196
1197 return 0;
7afe21cc 1198 }
c610adec
RK
1199
1200 /* If X is a SUBREG, we will likely be inserting the inner register in the
1201 table. If that register doesn't have an assigned quantity number at
1202 this point but does later, the insertion that we will be doing now will
1203 not be accessible because its hash code will have changed. So assign
1204 a quantity number now. */
1205
1206 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1207 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1208 {
34c73909
R
1209 int regno = REGNO (SUBREG_REG (x));
1210
906c4e36 1211 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
34c73909
R
1212 /* Mention_regs checks if REG_TICK is exactly one larger than
1213 REG_IN_TABLE to find out if there was only a single preceding
1214 invalidation - for the SUBREG - or another one, which would be
1215 for the full register. Since we don't invalidate the SUBREG
1216 here first, we might have to bump up REG_TICK so that mention_regs
1217 will do the right thing. */
30f72379
MM
1218 if (REG_IN_TABLE (regno) >= 0
1219 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1220 REG_TICK (regno)++;
34c73909 1221 mention_regs (x);
c610adec
RK
1222 return 1;
1223 }
7afe21cc
RK
1224 else
1225 return mention_regs (x);
1226}
1227\f
1228/* Look in or update the hash table. */
1229
1230/* Put the element ELT on the list of free elements. */
1231
1232static void
1233free_element (elt)
1234 struct table_elt *elt;
1235{
1236 elt->next_same_hash = free_element_chain;
1237 free_element_chain = elt;
1238}
1239
1240/* Return an element that is free for use. */
1241
1242static struct table_elt *
1243get_element ()
1244{
1245 struct table_elt *elt = free_element_chain;
1246 if (elt)
1247 {
1248 free_element_chain = elt->next_same_hash;
1249 return elt;
1250 }
1251 n_elements_made++;
1252 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1253}
1254
1255/* Remove table element ELT from use in the table.
1256 HASH is its hash code, made using the HASH macro.
1257 It's an argument because often that is known in advance
1258 and we save much time not recomputing it. */
1259
1260static void
1261remove_from_table (elt, hash)
1262 register struct table_elt *elt;
2197a88a 1263 unsigned hash;
7afe21cc
RK
1264{
1265 if (elt == 0)
1266 return;
1267
1268 /* Mark this element as removed. See cse_insn. */
1269 elt->first_same_value = 0;
1270
1271 /* Remove the table element from its equivalence class. */
1272
1273 {
1274 register struct table_elt *prev = elt->prev_same_value;
1275 register struct table_elt *next = elt->next_same_value;
1276
1277 if (next) next->prev_same_value = prev;
1278
1279 if (prev)
1280 prev->next_same_value = next;
1281 else
1282 {
1283 register struct table_elt *newfirst = next;
1284 while (next)
1285 {
1286 next->first_same_value = newfirst;
1287 next = next->next_same_value;
1288 }
1289 }
1290 }
1291
1292 /* Remove the table element from its hash bucket. */
1293
1294 {
1295 register struct table_elt *prev = elt->prev_same_hash;
1296 register struct table_elt *next = elt->next_same_hash;
1297
1298 if (next) next->prev_same_hash = prev;
1299
1300 if (prev)
1301 prev->next_same_hash = next;
1302 else if (table[hash] == elt)
1303 table[hash] = next;
1304 else
1305 {
1306 /* This entry is not in the proper hash bucket. This can happen
1307 when two classes were merged by `merge_equiv_classes'. Search
1308 for the hash bucket that it heads. This happens only very
1309 rarely, so the cost is acceptable. */
1310 for (hash = 0; hash < NBUCKETS; hash++)
1311 if (table[hash] == elt)
1312 table[hash] = next;
1313 }
1314 }
1315
1316 /* Remove the table element from its related-value circular chain. */
1317
1318 if (elt->related_value != 0 && elt->related_value != elt)
1319 {
1320 register struct table_elt *p = elt->related_value;
1321 while (p->related_value != elt)
1322 p = p->related_value;
1323 p->related_value = elt->related_value;
1324 if (p->related_value == p)
1325 p->related_value = 0;
1326 }
1327
1328 free_element (elt);
1329}
1330
1331/* Look up X in the hash table and return its table element,
1332 or 0 if X is not in the table.
1333
1334 MODE is the machine-mode of X, or if X is an integer constant
1335 with VOIDmode then MODE is the mode with which X will be used.
1336
1337 Here we are satisfied to find an expression whose tree structure
1338 looks like X. */
1339
1340static struct table_elt *
1341lookup (x, hash, mode)
1342 rtx x;
2197a88a 1343 unsigned hash;
7afe21cc
RK
1344 enum machine_mode mode;
1345{
1346 register struct table_elt *p;
1347
1348 for (p = table[hash]; p; p = p->next_same_hash)
1349 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1350 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1351 return p;
1352
1353 return 0;
1354}
1355
1356/* Like `lookup' but don't care whether the table element uses invalid regs.
1357 Also ignore discrepancies in the machine mode of a register. */
1358
1359static struct table_elt *
1360lookup_for_remove (x, hash, mode)
1361 rtx x;
2197a88a 1362 unsigned hash;
7afe21cc
RK
1363 enum machine_mode mode;
1364{
1365 register struct table_elt *p;
1366
1367 if (GET_CODE (x) == REG)
1368 {
1369 int regno = REGNO (x);
1370 /* Don't check the machine mode when comparing registers;
1371 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1372 for (p = table[hash]; p; p = p->next_same_hash)
1373 if (GET_CODE (p->exp) == REG
1374 && REGNO (p->exp) == regno)
1375 return p;
1376 }
1377 else
1378 {
1379 for (p = table[hash]; p; p = p->next_same_hash)
1380 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1381 return p;
1382 }
1383
1384 return 0;
1385}
1386
1387/* Look for an expression equivalent to X and with code CODE.
1388 If one is found, return that expression. */
1389
1390static rtx
1391lookup_as_function (x, code)
1392 rtx x;
1393 enum rtx_code code;
1394{
1395 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1396 GET_MODE (x));
34c73909
R
1397 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1398 long as we are narrowing. So if we looked in vain for a mode narrower
1399 than word_mode before, look for word_mode now. */
1400 if (p == 0 && code == CONST_INT
1401 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1402 {
1403 x = copy_rtx (x);
1404 PUT_MODE (x, word_mode);
1405 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1406 }
1407
7afe21cc
RK
1408 if (p == 0)
1409 return 0;
1410
1411 for (p = p->first_same_value; p; p = p->next_same_value)
1412 {
1413 if (GET_CODE (p->exp) == code
1414 /* Make sure this is a valid entry in the table. */
1415 && exp_equiv_p (p->exp, p->exp, 1, 0))
1416 return p->exp;
1417 }
1418
1419 return 0;
1420}
1421
1422/* Insert X in the hash table, assuming HASH is its hash code
1423 and CLASSP is an element of the class it should go in
1424 (or 0 if a new class should be made).
1425 It is inserted at the proper position to keep the class in
1426 the order cheapest first.
1427
1428 MODE is the machine-mode of X, or if X is an integer constant
1429 with VOIDmode then MODE is the mode with which X will be used.
1430
1431 For elements of equal cheapness, the most recent one
1432 goes in front, except that the first element in the list
1433 remains first unless a cheaper element is added. The order of
1434 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1435 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1436
1437 The in_memory field in the hash table element is set to 0.
1438 The caller must set it nonzero if appropriate.
1439
1440 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1441 and if insert_regs returns a nonzero value
1442 you must then recompute its hash code before calling here.
1443
1444 If necessary, update table showing constant values of quantities. */
1445
1446#define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1447
1448static struct table_elt *
1449insert (x, classp, hash, mode)
1450 register rtx x;
1451 register struct table_elt *classp;
2197a88a 1452 unsigned hash;
7afe21cc
RK
1453 enum machine_mode mode;
1454{
1455 register struct table_elt *elt;
1456
1457 /* If X is a register and we haven't made a quantity for it,
1458 something is wrong. */
1459 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1460 abort ();
1461
1462 /* If X is a hard register, show it is being put in the table. */
1463 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1464 {
1465 int regno = REGNO (x);
1466 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1467 int i;
1468
1469 for (i = regno; i < endregno; i++)
1470 SET_HARD_REG_BIT (hard_regs_in_table, i);
1471 }
1472
a5dfb4ee 1473 /* If X is a label, show we recorded it. */
970c9ace
RK
1474 if (GET_CODE (x) == LABEL_REF
1475 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1476 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
a5dfb4ee 1477 recorded_label_ref = 1;
7afe21cc
RK
1478
1479 /* Put an element for X into the right hash bucket. */
1480
1481 elt = get_element ();
1482 elt->exp = x;
1483 elt->cost = COST (x);
1484 elt->next_same_value = 0;
1485 elt->prev_same_value = 0;
1486 elt->next_same_hash = table[hash];
1487 elt->prev_same_hash = 0;
1488 elt->related_value = 0;
1489 elt->in_memory = 0;
1490 elt->mode = mode;
1491 elt->is_const = (CONSTANT_P (x)
1492 /* GNU C++ takes advantage of this for `this'
1493 (and other const values). */
1494 || (RTX_UNCHANGING_P (x)
1495 && GET_CODE (x) == REG
1496 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1497 || FIXED_BASE_PLUS_P (x));
1498
1499 if (table[hash])
1500 table[hash]->prev_same_hash = elt;
1501 table[hash] = elt;
1502
1503 /* Put it into the proper value-class. */
1504 if (classp)
1505 {
1506 classp = classp->first_same_value;
1507 if (CHEAPER (elt, classp))
1508 /* Insert at the head of the class */
1509 {
1510 register struct table_elt *p;
1511 elt->next_same_value = classp;
1512 classp->prev_same_value = elt;
1513 elt->first_same_value = elt;
1514
1515 for (p = classp; p; p = p->next_same_value)
1516 p->first_same_value = elt;
1517 }
1518 else
1519 {
1520 /* Insert not at head of the class. */
1521 /* Put it after the last element cheaper than X. */
1522 register struct table_elt *p, *next;
1523 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1524 p = next);
1525 /* Put it after P and before NEXT. */
1526 elt->next_same_value = next;
1527 if (next)
1528 next->prev_same_value = elt;
1529 elt->prev_same_value = p;
1530 p->next_same_value = elt;
1531 elt->first_same_value = classp;
1532 }
1533 }
1534 else
1535 elt->first_same_value = elt;
1536
1537 /* If this is a constant being set equivalent to a register or a register
1538 being set equivalent to a constant, note the constant equivalence.
1539
1540 If this is a constant, it cannot be equivalent to a different constant,
1541 and a constant is the only thing that can be cheaper than a register. So
1542 we know the register is the head of the class (before the constant was
1543 inserted).
1544
1545 If this is a register that is not already known equivalent to a
1546 constant, we must check the entire class.
1547
1548 If this is a register that is already known equivalent to an insn,
1549 update `qty_const_insn' to show that `this_insn' is the latest
1550 insn making that quantity equivalent to the constant. */
1551
f353588a
RK
1552 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1553 && GET_CODE (x) != REG)
7afe21cc 1554 {
30f72379
MM
1555 qty_const[REG_QTY (REGNO (classp->exp))]
1556 = gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1557 qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
7afe21cc
RK
1558 }
1559
30f72379 1560 else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
f353588a 1561 && ! elt->is_const)
7afe21cc
RK
1562 {
1563 register struct table_elt *p;
1564
1565 for (p = classp; p != 0; p = p->next_same_value)
1566 {
f353588a 1567 if (p->is_const && GET_CODE (p->exp) != REG)
7afe21cc 1568 {
30f72379 1569 qty_const[REG_QTY (REGNO (x))]
7afe21cc 1570 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
30f72379 1571 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
7afe21cc
RK
1572 break;
1573 }
1574 }
1575 }
1576
30f72379
MM
1577 else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1578 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1579 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
7afe21cc
RK
1580
1581 /* If this is a constant with symbolic value,
1582 and it has a term with an explicit integer value,
1583 link it up with related expressions. */
1584 if (GET_CODE (x) == CONST)
1585 {
1586 rtx subexp = get_related_value (x);
2197a88a 1587 unsigned subhash;
7afe21cc
RK
1588 struct table_elt *subelt, *subelt_prev;
1589
1590 if (subexp != 0)
1591 {
1592 /* Get the integer-free subexpression in the hash table. */
1593 subhash = safe_hash (subexp, mode) % NBUCKETS;
1594 subelt = lookup (subexp, subhash, mode);
1595 if (subelt == 0)
906c4e36 1596 subelt = insert (subexp, NULL_PTR, subhash, mode);
7afe21cc
RK
1597 /* Initialize SUBELT's circular chain if it has none. */
1598 if (subelt->related_value == 0)
1599 subelt->related_value = subelt;
1600 /* Find the element in the circular chain that precedes SUBELT. */
1601 subelt_prev = subelt;
1602 while (subelt_prev->related_value != subelt)
1603 subelt_prev = subelt_prev->related_value;
1604 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1605 This way the element that follows SUBELT is the oldest one. */
1606 elt->related_value = subelt_prev->related_value;
1607 subelt_prev->related_value = elt;
1608 }
1609 }
1610
1611 return elt;
1612}
1613\f
1614/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1615 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1616 the two classes equivalent.
1617
1618 CLASS1 will be the surviving class; CLASS2 should not be used after this
1619 call.
1620
1621 Any invalid entries in CLASS2 will not be copied. */
1622
1623static void
1624merge_equiv_classes (class1, class2)
1625 struct table_elt *class1, *class2;
1626{
1627 struct table_elt *elt, *next, *new;
1628
1629 /* Ensure we start with the head of the classes. */
1630 class1 = class1->first_same_value;
1631 class2 = class2->first_same_value;
1632
1633 /* If they were already equal, forget it. */
1634 if (class1 == class2)
1635 return;
1636
1637 for (elt = class2; elt; elt = next)
1638 {
2197a88a 1639 unsigned hash;
7afe21cc
RK
1640 rtx exp = elt->exp;
1641 enum machine_mode mode = elt->mode;
1642
1643 next = elt->next_same_value;
1644
1645 /* Remove old entry, make a new one in CLASS1's class.
1646 Don't do this for invalid entries as we cannot find their
0f41302f 1647 hash code (it also isn't necessary). */
7afe21cc
RK
1648 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1649 {
1650 hash_arg_in_memory = 0;
1651 hash_arg_in_struct = 0;
1652 hash = HASH (exp, mode);
1653
1654 if (GET_CODE (exp) == REG)
1655 delete_reg_equiv (REGNO (exp));
1656
1657 remove_from_table (elt, hash);
1658
1659 if (insert_regs (exp, class1, 0))
8ae2b8f6
JW
1660 {
1661 rehash_using_reg (exp);
1662 hash = HASH (exp, mode);
1663 }
7afe21cc
RK
1664 new = insert (exp, class1, hash, mode);
1665 new->in_memory = hash_arg_in_memory;
1666 new->in_struct = hash_arg_in_struct;
1667 }
1668 }
1669}
1670\f
01e752d3
JL
1671
1672/* Flush the entire hash table. */
1673
1674static void
1675flush_hash_table ()
1676{
1677 int i;
1678 struct table_elt *p;
1679
1680 for (i = 0; i < NBUCKETS; i++)
1681 for (p = table[i]; p; p = table[i])
1682 {
1683 /* Note that invalidate can remove elements
1684 after P in the current hash chain. */
1685 if (GET_CODE (p->exp) == REG)
1686 invalidate (p->exp, p->mode);
1687 else
1688 remove_from_table (p, i);
1689 }
1690}
1691
1692
7afe21cc
RK
1693/* Remove from the hash table, or mark as invalid,
1694 all expressions whose values could be altered by storing in X.
1695 X is a register, a subreg, or a memory reference with nonvarying address
1696 (because, when a memory reference with a varying address is stored in,
1697 all memory references are removed by invalidate_memory
1698 so specific invalidation is superfluous).
bb4034b3
JW
1699 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1700 instead of just the amount indicated by the mode of X. This is only used
1701 for bitfield stores into memory.
7afe21cc
RK
1702
1703 A nonvarying address may be just a register or just
1704 a symbol reference, or it may be either of those plus
1705 a numeric offset. */
1706
1707static void
bb4034b3 1708invalidate (x, full_mode)
7afe21cc 1709 rtx x;
bb4034b3 1710 enum machine_mode full_mode;
7afe21cc
RK
1711{
1712 register int i;
1713 register struct table_elt *p;
7afe21cc
RK
1714
1715 /* If X is a register, dependencies on its contents
1716 are recorded through the qty number mechanism.
1717 Just change the qty number of the register,
1718 mark it as invalid for expressions that refer to it,
1719 and remove it itself. */
1720
1721 if (GET_CODE (x) == REG)
1722 {
1723 register int regno = REGNO (x);
2197a88a 1724 register unsigned hash = HASH (x, GET_MODE (x));
7afe21cc
RK
1725
1726 /* Remove REGNO from any quantity list it might be on and indicate
9ec36da5 1727 that its value might have changed. If it is a pseudo, remove its
7afe21cc
RK
1728 entry from the hash table.
1729
1730 For a hard register, we do the first two actions above for any
1731 additional hard registers corresponding to X. Then, if any of these
1732 registers are in the table, we must remove any REG entries that
1733 overlap these registers. */
1734
1735 delete_reg_equiv (regno);
30f72379 1736 REG_TICK (regno)++;
7afe21cc
RK
1737
1738 if (regno >= FIRST_PSEUDO_REGISTER)
85e4d983
RK
1739 {
1740 /* Because a register can be referenced in more than one mode,
1741 we might have to remove more than one table entry. */
1742
1743 struct table_elt *elt;
1744
2d8b0f3a 1745 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
85e4d983
RK
1746 remove_from_table (elt, hash);
1747 }
7afe21cc
RK
1748 else
1749 {
54b1de55
RK
1750 HOST_WIDE_INT in_table
1751 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc
RK
1752 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1753 int tregno, tendregno;
1754 register struct table_elt *p, *next;
1755
1756 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1757
1758 for (i = regno + 1; i < endregno; i++)
1759 {
1760 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1761 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1762 delete_reg_equiv (i);
30f72379 1763 REG_TICK (i)++;
7afe21cc
RK
1764 }
1765
1766 if (in_table)
1767 for (hash = 0; hash < NBUCKETS; hash++)
1768 for (p = table[hash]; p; p = next)
1769 {
1770 next = p->next_same_hash;
1771
1772 if (GET_CODE (p->exp) != REG
1773 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1774 continue;
1775
1776 tregno = REGNO (p->exp);
1777 tendregno
1778 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1779 if (tendregno > regno && tregno < endregno)
925be47c 1780 remove_from_table (p, hash);
7afe21cc
RK
1781 }
1782 }
1783
1784 return;
1785 }
1786
1787 if (GET_CODE (x) == SUBREG)
1788 {
1789 if (GET_CODE (SUBREG_REG (x)) != REG)
1790 abort ();
bb4034b3 1791 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc
RK
1792 return;
1793 }
1794
aac5cc16
RH
1795 /* If X is a parallel, invalidate all of its elements. */
1796
1797 if (GET_CODE (x) == PARALLEL)
1798 {
1799 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1800 invalidate (XVECEXP (x, 0, i), VOIDmode);
1801 return;
1802 }
1803
1804 /* If X is an expr_list, this is part of a disjoint return value;
1805 extract the location in question ignoring the offset. */
1806
1807 if (GET_CODE (x) == EXPR_LIST)
1808 {
1809 invalidate (XEXP (x, 0), VOIDmode);
1810 return;
1811 }
1812
7afe21cc
RK
1813 /* X is not a register; it must be a memory reference with
1814 a nonvarying address. Remove all hash table elements
1815 that refer to overlapping pieces of memory. */
1816
1817 if (GET_CODE (x) != MEM)
1818 abort ();
7afe21cc 1819
bb4034b3
JW
1820 if (full_mode == VOIDmode)
1821 full_mode = GET_MODE (x);
1822
7afe21cc
RK
1823 for (i = 0; i < NBUCKETS; i++)
1824 {
1825 register struct table_elt *next;
1826 for (p = table[i]; p; p = next)
1827 {
1828 next = p->next_same_hash;
9ae8ffe7
JL
1829 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1830 than checking all the aliases). */
1831 if (p->in_memory
1832 && (GET_CODE (p->exp) != MEM
1833 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
7afe21cc
RK
1834 remove_from_table (p, i);
1835 }
1836 }
1837}
1838
1839/* Remove all expressions that refer to register REGNO,
1840 since they are already invalid, and we are about to
1841 mark that register valid again and don't want the old
1842 expressions to reappear as valid. */
1843
1844static void
1845remove_invalid_refs (regno)
1846 int regno;
1847{
1848 register int i;
1849 register struct table_elt *p, *next;
1850
1851 for (i = 0; i < NBUCKETS; i++)
1852 for (p = table[i]; p; p = next)
1853 {
1854 next = p->next_same_hash;
1855 if (GET_CODE (p->exp) != REG
906c4e36 1856 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
7afe21cc
RK
1857 remove_from_table (p, i);
1858 }
1859}
34c73909
R
1860
1861/* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1862static void
1863remove_invalid_subreg_refs (regno, word, mode)
1864 int regno;
1865 int word;
1866 enum machine_mode mode;
1867{
1868 register int i;
1869 register struct table_elt *p, *next;
1870 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1871
1872 for (i = 0; i < NBUCKETS; i++)
1873 for (p = table[i]; p; p = next)
1874 {
1875 rtx exp;
1876 next = p->next_same_hash;
1877
1878 exp = p->exp;
1879 if (GET_CODE (p->exp) != REG
1880 && (GET_CODE (exp) != SUBREG
1881 || GET_CODE (SUBREG_REG (exp)) != REG
1882 || REGNO (SUBREG_REG (exp)) != regno
1883 || (((SUBREG_WORD (exp)
1884 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1885 >= word)
1886 && SUBREG_WORD (exp) <= end))
1887 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1888 remove_from_table (p, i);
1889 }
1890}
7afe21cc
RK
1891\f
1892/* Recompute the hash codes of any valid entries in the hash table that
1893 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1894
1895 This is called when we make a jump equivalence. */
1896
1897static void
1898rehash_using_reg (x)
1899 rtx x;
1900{
973838fd 1901 unsigned int i;
7afe21cc 1902 struct table_elt *p, *next;
2197a88a 1903 unsigned hash;
7afe21cc
RK
1904
1905 if (GET_CODE (x) == SUBREG)
1906 x = SUBREG_REG (x);
1907
1908 /* If X is not a register or if the register is known not to be in any
1909 valid entries in the table, we have no work to do. */
1910
1911 if (GET_CODE (x) != REG
30f72379
MM
1912 || REG_IN_TABLE (REGNO (x)) < 0
1913 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1914 return;
1915
1916 /* Scan all hash chains looking for valid entries that mention X.
1917 If we find one and it is in the wrong hash chain, move it. We can skip
1918 objects that are registers, since they are handled specially. */
1919
1920 for (i = 0; i < NBUCKETS; i++)
1921 for (p = table[i]; p; p = next)
1922 {
1923 next = p->next_same_hash;
1924 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1925 && exp_equiv_p (p->exp, p->exp, 1, 0)
7afe21cc
RK
1926 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1927 {
1928 if (p->next_same_hash)
1929 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1930
1931 if (p->prev_same_hash)
1932 p->prev_same_hash->next_same_hash = p->next_same_hash;
1933 else
1934 table[i] = p->next_same_hash;
1935
1936 p->next_same_hash = table[hash];
1937 p->prev_same_hash = 0;
1938 if (table[hash])
1939 table[hash]->prev_same_hash = p;
1940 table[hash] = p;
1941 }
1942 }
1943}
1944\f
7afe21cc
RK
1945/* Remove from the hash table any expression that is a call-clobbered
1946 register. Also update their TICK values. */
1947
1948static void
1949invalidate_for_call ()
1950{
1951 int regno, endregno;
1952 int i;
2197a88a 1953 unsigned hash;
7afe21cc
RK
1954 struct table_elt *p, *next;
1955 int in_table = 0;
1956
1957 /* Go through all the hard registers. For each that is clobbered in
1958 a CALL_INSN, remove the register from quantity chains and update
1959 reg_tick if defined. Also see if any of these registers is currently
1960 in the table. */
1961
1962 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1963 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1964 {
1965 delete_reg_equiv (regno);
30f72379
MM
1966 if (REG_TICK (regno) >= 0)
1967 REG_TICK (regno)++;
7afe21cc 1968
0e227018 1969 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
1970 }
1971
1972 /* In the case where we have no call-clobbered hard registers in the
1973 table, we are done. Otherwise, scan the table and remove any
1974 entry that overlaps a call-clobbered register. */
1975
1976 if (in_table)
1977 for (hash = 0; hash < NBUCKETS; hash++)
1978 for (p = table[hash]; p; p = next)
1979 {
1980 next = p->next_same_hash;
1981
9ae8ffe7
JL
1982 if (p->in_memory)
1983 {
1984 remove_from_table (p, hash);
1985 continue;
1986 }
1987
7afe21cc
RK
1988 if (GET_CODE (p->exp) != REG
1989 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1990 continue;
1991
1992 regno = REGNO (p->exp);
1993 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1994
1995 for (i = regno; i < endregno; i++)
1996 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1997 {
1998 remove_from_table (p, hash);
1999 break;
2000 }
2001 }
2002}
2003\f
2004/* Given an expression X of type CONST,
2005 and ELT which is its table entry (or 0 if it
2006 is not in the hash table),
2007 return an alternate expression for X as a register plus integer.
2008 If none can be found, return 0. */
2009
2010static rtx
2011use_related_value (x, elt)
2012 rtx x;
2013 struct table_elt *elt;
2014{
2015 register struct table_elt *relt = 0;
2016 register struct table_elt *p, *q;
906c4e36 2017 HOST_WIDE_INT offset;
7afe21cc
RK
2018
2019 /* First, is there anything related known?
2020 If we have a table element, we can tell from that.
2021 Otherwise, must look it up. */
2022
2023 if (elt != 0 && elt->related_value != 0)
2024 relt = elt;
2025 else if (elt == 0 && GET_CODE (x) == CONST)
2026 {
2027 rtx subexp = get_related_value (x);
2028 if (subexp != 0)
2029 relt = lookup (subexp,
2030 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2031 GET_MODE (subexp));
2032 }
2033
2034 if (relt == 0)
2035 return 0;
2036
2037 /* Search all related table entries for one that has an
2038 equivalent register. */
2039
2040 p = relt;
2041 while (1)
2042 {
2043 /* This loop is strange in that it is executed in two different cases.
2044 The first is when X is already in the table. Then it is searching
2045 the RELATED_VALUE list of X's class (RELT). The second case is when
2046 X is not in the table. Then RELT points to a class for the related
2047 value.
2048
2049 Ensure that, whatever case we are in, that we ignore classes that have
2050 the same value as X. */
2051
2052 if (rtx_equal_p (x, p->exp))
2053 q = 0;
2054 else
2055 for (q = p->first_same_value; q; q = q->next_same_value)
2056 if (GET_CODE (q->exp) == REG)
2057 break;
2058
2059 if (q)
2060 break;
2061
2062 p = p->related_value;
2063
2064 /* We went all the way around, so there is nothing to be found.
2065 Alternatively, perhaps RELT was in the table for some other reason
2066 and it has no related values recorded. */
2067 if (p == relt || p == 0)
2068 break;
2069 }
2070
2071 if (q == 0)
2072 return 0;
2073
2074 offset = (get_integer_term (x) - get_integer_term (p->exp));
2075 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2076 return plus_constant (q->exp, offset);
2077}
2078\f
2079/* Hash an rtx. We are careful to make sure the value is never negative.
2080 Equivalent registers hash identically.
2081 MODE is used in hashing for CONST_INTs only;
2082 otherwise the mode of X is used.
2083
2084 Store 1 in do_not_record if any subexpression is volatile.
2085
2086 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2087 which does not have the RTX_UNCHANGING_P bit set.
2088 In this case, also store 1 in hash_arg_in_struct
2089 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2090
2091 Note that cse_insn knows that the hash code of a MEM expression
2092 is just (int) MEM plus the hash code of the address. */
2093
2197a88a 2094static unsigned
7afe21cc
RK
2095canon_hash (x, mode)
2096 rtx x;
2097 enum machine_mode mode;
2098{
2099 register int i, j;
2197a88a 2100 register unsigned hash = 0;
7afe21cc
RK
2101 register enum rtx_code code;
2102 register char *fmt;
2103
2104 /* repeat is used to turn tail-recursion into iteration. */
2105 repeat:
2106 if (x == 0)
2107 return hash;
2108
2109 code = GET_CODE (x);
2110 switch (code)
2111 {
2112 case REG:
2113 {
2114 register int regno = REGNO (x);
2115
2116 /* On some machines, we can't record any non-fixed hard register,
2117 because extending its life will cause reload problems. We
9a794e50
RH
2118 consider ap, fp, and sp to be fixed for this purpose.
2119
2120 We also consider CCmode registers to be fixed for this purpose;
2121 failure to do so leads to failure to simplify 0<100 type of
2122 conditionals.
2123
0f41302f 2124 On all machines, we can't record any global registers. */
7afe21cc
RK
2125
2126 if (regno < FIRST_PSEUDO_REGISTER
2127 && (global_regs[regno]
f95182a4
ILT
2128 || (SMALL_REGISTER_CLASSES
2129 && ! fixed_regs[regno]
7afe21cc 2130 && regno != FRAME_POINTER_REGNUM
8bc169f2 2131 && regno != HARD_FRAME_POINTER_REGNUM
7afe21cc 2132 && regno != ARG_POINTER_REGNUM
9a794e50
RH
2133 && regno != STACK_POINTER_REGNUM
2134 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
7afe21cc
RK
2135 {
2136 do_not_record = 1;
2137 return 0;
2138 }
30f72379 2139 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2197a88a 2140 return hash;
7afe21cc
RK
2141 }
2142
34c73909
R
2143 /* We handle SUBREG of a REG specially because the underlying
2144 reg changes its hash value with every value change; we don't
2145 want to have to forget unrelated subregs when one subreg changes. */
2146 case SUBREG:
2147 {
2148 if (GET_CODE (SUBREG_REG (x)) == REG)
2149 {
2150 hash += (((unsigned) SUBREG << 7)
2151 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2152 return hash;
2153 }
2154 break;
2155 }
2156
7afe21cc 2157 case CONST_INT:
2197a88a
RK
2158 {
2159 unsigned HOST_WIDE_INT tem = INTVAL (x);
2160 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2161 return hash;
2162 }
7afe21cc
RK
2163
2164 case CONST_DOUBLE:
2165 /* This is like the general case, except that it only counts
2166 the integers representing the constant. */
2197a88a 2167 hash += (unsigned) code + (unsigned) GET_MODE (x);
969c8517
RK
2168 if (GET_MODE (x) != VOIDmode)
2169 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2170 {
2171 unsigned tem = XINT (x, i);
2172 hash += tem;
2173 }
2174 else
2175 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2176 + (unsigned) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2177 return hash;
2178
2179 /* Assume there is only one rtx object for any given label. */
2180 case LABEL_REF:
3c543775 2181 hash
7bcac048 2182 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2197a88a 2183 return hash;
7afe21cc
RK
2184
2185 case SYMBOL_REF:
3c543775 2186 hash
7bcac048 2187 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2197a88a 2188 return hash;
7afe21cc
RK
2189
2190 case MEM:
2191 if (MEM_VOLATILE_P (x))
2192 {
2193 do_not_record = 1;
2194 return 0;
2195 }
9ad91d71 2196 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
7afe21cc
RK
2197 {
2198 hash_arg_in_memory = 1;
2199 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2200 }
2201 /* Now that we have already found this special case,
2202 might as well speed it up as much as possible. */
2197a88a 2203 hash += (unsigned) MEM;
7afe21cc
RK
2204 x = XEXP (x, 0);
2205 goto repeat;
2206
2207 case PRE_DEC:
2208 case PRE_INC:
2209 case POST_DEC:
2210 case POST_INC:
2211 case PC:
2212 case CC0:
2213 case CALL:
2214 case UNSPEC_VOLATILE:
2215 do_not_record = 1;
2216 return 0;
2217
2218 case ASM_OPERANDS:
2219 if (MEM_VOLATILE_P (x))
2220 {
2221 do_not_record = 1;
2222 return 0;
2223 }
e9a25f70
JL
2224 break;
2225
2226 default:
2227 break;
7afe21cc
RK
2228 }
2229
2230 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2231 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2232 fmt = GET_RTX_FORMAT (code);
2233 for (; i >= 0; i--)
2234 {
2235 if (fmt[i] == 'e')
2236 {
2237 rtx tem = XEXP (x, i);
7afe21cc
RK
2238
2239 /* If we are about to do the last recursive call
2240 needed at this level, change it into iteration.
2241 This function is called enough to be worth it. */
2242 if (i == 0)
2243 {
2244 x = tem;
2245 goto repeat;
2246 }
2247 hash += canon_hash (tem, 0);
2248 }
2249 else if (fmt[i] == 'E')
2250 for (j = 0; j < XVECLEN (x, i); j++)
2251 hash += canon_hash (XVECEXP (x, i, j), 0);
2252 else if (fmt[i] == 's')
2253 {
2197a88a 2254 register unsigned char *p = (unsigned char *) XSTR (x, i);
7afe21cc
RK
2255 if (p)
2256 while (*p)
2197a88a 2257 hash += *p++;
7afe21cc
RK
2258 }
2259 else if (fmt[i] == 'i')
2260 {
2197a88a
RK
2261 register unsigned tem = XINT (x, i);
2262 hash += tem;
7afe21cc 2263 }
8f985ec4 2264 else if (fmt[i] == '0' || fmt[i] == 't')
e9a25f70 2265 /* unused */;
7afe21cc
RK
2266 else
2267 abort ();
2268 }
2269 return hash;
2270}
2271
2272/* Like canon_hash but with no side effects. */
2273
2197a88a 2274static unsigned
7afe21cc
RK
2275safe_hash (x, mode)
2276 rtx x;
2277 enum machine_mode mode;
2278{
2279 int save_do_not_record = do_not_record;
2280 int save_hash_arg_in_memory = hash_arg_in_memory;
2281 int save_hash_arg_in_struct = hash_arg_in_struct;
2197a88a 2282 unsigned hash = canon_hash (x, mode);
7afe21cc
RK
2283 hash_arg_in_memory = save_hash_arg_in_memory;
2284 hash_arg_in_struct = save_hash_arg_in_struct;
2285 do_not_record = save_do_not_record;
2286 return hash;
2287}
2288\f
2289/* Return 1 iff X and Y would canonicalize into the same thing,
2290 without actually constructing the canonicalization of either one.
2291 If VALIDATE is nonzero,
2292 we assume X is an expression being processed from the rtl
2293 and Y was found in the hash table. We check register refs
2294 in Y for being marked as valid.
2295
2296 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2297 that is known to be in the register. Ordinarily, we don't allow them
2298 to match, because letting them match would cause unpredictable results
2299 in all the places that search a hash table chain for an equivalent
2300 for a given value. A possible equivalent that has different structure
2301 has its hash code computed from different data. Whether the hash code
38e01259 2302 is the same as that of the given value is pure luck. */
7afe21cc
RK
2303
2304static int
2305exp_equiv_p (x, y, validate, equal_values)
2306 rtx x, y;
2307 int validate;
2308 int equal_values;
2309{
906c4e36 2310 register int i, j;
7afe21cc
RK
2311 register enum rtx_code code;
2312 register char *fmt;
2313
2314 /* Note: it is incorrect to assume an expression is equivalent to itself
2315 if VALIDATE is nonzero. */
2316 if (x == y && !validate)
2317 return 1;
2318 if (x == 0 || y == 0)
2319 return x == y;
2320
2321 code = GET_CODE (x);
2322 if (code != GET_CODE (y))
2323 {
2324 if (!equal_values)
2325 return 0;
2326
2327 /* If X is a constant and Y is a register or vice versa, they may be
2328 equivalent. We only have to validate if Y is a register. */
2329 if (CONSTANT_P (x) && GET_CODE (y) == REG
2330 && REGNO_QTY_VALID_P (REGNO (y))
30f72379
MM
2331 && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2332 && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2333 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
7afe21cc
RK
2334 return 1;
2335
2336 if (CONSTANT_P (y) && code == REG
2337 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
2338 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2339 && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
7afe21cc
RK
2340 return 1;
2341
2342 return 0;
2343 }
2344
2345 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2346 if (GET_MODE (x) != GET_MODE (y))
2347 return 0;
2348
2349 switch (code)
2350 {
2351 case PC:
2352 case CC0:
2353 return x == y;
2354
2355 case CONST_INT:
58c8c593 2356 return INTVAL (x) == INTVAL (y);
7afe21cc
RK
2357
2358 case LABEL_REF:
7afe21cc
RK
2359 return XEXP (x, 0) == XEXP (y, 0);
2360
f54d4924
RK
2361 case SYMBOL_REF:
2362 return XSTR (x, 0) == XSTR (y, 0);
2363
7afe21cc
RK
2364 case REG:
2365 {
2366 int regno = REGNO (y);
2367 int endregno
2368 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2369 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2370 int i;
2371
2372 /* If the quantities are not the same, the expressions are not
2373 equivalent. If there are and we are not to validate, they
2374 are equivalent. Otherwise, ensure all regs are up-to-date. */
2375
30f72379 2376 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
7afe21cc
RK
2377 return 0;
2378
2379 if (! validate)
2380 return 1;
2381
2382 for (i = regno; i < endregno; i++)
30f72379 2383 if (REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
2384 return 0;
2385
2386 return 1;
2387 }
2388
2389 /* For commutative operations, check both orders. */
2390 case PLUS:
2391 case MULT:
2392 case AND:
2393 case IOR:
2394 case XOR:
2395 case NE:
2396 case EQ:
2397 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2398 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2399 validate, equal_values))
2400 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2401 validate, equal_values)
2402 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2403 validate, equal_values)));
e9a25f70
JL
2404
2405 default:
2406 break;
7afe21cc
RK
2407 }
2408
2409 /* Compare the elements. If any pair of corresponding elements
2410 fail to match, return 0 for the whole things. */
2411
2412 fmt = GET_RTX_FORMAT (code);
2413 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2414 {
906c4e36 2415 switch (fmt[i])
7afe21cc 2416 {
906c4e36 2417 case 'e':
7afe21cc
RK
2418 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2419 return 0;
906c4e36
RK
2420 break;
2421
2422 case 'E':
7afe21cc
RK
2423 if (XVECLEN (x, i) != XVECLEN (y, i))
2424 return 0;
2425 for (j = 0; j < XVECLEN (x, i); j++)
2426 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2427 validate, equal_values))
2428 return 0;
906c4e36
RK
2429 break;
2430
2431 case 's':
7afe21cc
RK
2432 if (strcmp (XSTR (x, i), XSTR (y, i)))
2433 return 0;
906c4e36
RK
2434 break;
2435
2436 case 'i':
7afe21cc
RK
2437 if (XINT (x, i) != XINT (y, i))
2438 return 0;
906c4e36
RK
2439 break;
2440
2441 case 'w':
2442 if (XWINT (x, i) != XWINT (y, i))
2443 return 0;
2444 break;
2445
2446 case '0':
8f985ec4 2447 case 't':
906c4e36
RK
2448 break;
2449
2450 default:
2451 abort ();
7afe21cc 2452 }
906c4e36
RK
2453 }
2454
7afe21cc
RK
2455 return 1;
2456}
2457\f
2458/* Return 1 iff any subexpression of X matches Y.
2459 Here we do not require that X or Y be valid (for registers referred to)
2460 for being in the hash table. */
2461
6cd4575e 2462static int
7afe21cc
RK
2463refers_to_p (x, y)
2464 rtx x, y;
2465{
2466 register int i;
2467 register enum rtx_code code;
2468 register char *fmt;
2469
2470 repeat:
2471 if (x == y)
2472 return 1;
2473 if (x == 0 || y == 0)
2474 return 0;
2475
2476 code = GET_CODE (x);
2477 /* If X as a whole has the same code as Y, they may match.
2478 If so, return 1. */
2479 if (code == GET_CODE (y))
2480 {
2481 if (exp_equiv_p (x, y, 0, 1))
2482 return 1;
2483 }
2484
2485 /* X does not match, so try its subexpressions. */
2486
2487 fmt = GET_RTX_FORMAT (code);
2488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2489 if (fmt[i] == 'e')
2490 {
2491 if (i == 0)
2492 {
2493 x = XEXP (x, 0);
2494 goto repeat;
2495 }
2496 else
2497 if (refers_to_p (XEXP (x, i), y))
2498 return 1;
2499 }
2500 else if (fmt[i] == 'E')
2501 {
2502 int j;
2503 for (j = 0; j < XVECLEN (x, i); j++)
2504 if (refers_to_p (XVECEXP (x, i, j), y))
2505 return 1;
2506 }
2507
2508 return 0;
2509}
2510\f
f451db89
JL
2511/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2512 set PBASE, PSTART, and PEND which correspond to the base of the address,
2513 the starting offset, and ending offset respectively.
2514
bb4034b3 2515 ADDR is known to be a nonvarying address. */
f451db89 2516
bb4034b3
JW
2517/* ??? Despite what the comments say, this function is in fact frequently
2518 passed varying addresses. This does not appear to cause any problems. */
f451db89
JL
2519
2520static void
2521set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2522 rtx addr;
2523 int size;
2524 rtx *pbase;
6500fb43 2525 HOST_WIDE_INT *pstart, *pend;
f451db89
JL
2526{
2527 rtx base;
c85663b1 2528 HOST_WIDE_INT start, end;
f451db89
JL
2529
2530 base = addr;
2531 start = 0;
2532 end = 0;
2533
e5e809f4
JL
2534 if (flag_pic && GET_CODE (base) == PLUS
2535 && XEXP (base, 0) == pic_offset_table_rtx)
2536 base = XEXP (base, 1);
2537
f451db89
JL
2538 /* Registers with nonvarying addresses usually have constant equivalents;
2539 but the frame pointer register is also possible. */
2540 if (GET_CODE (base) == REG
2541 && qty_const != 0
2542 && REGNO_QTY_VALID_P (REGNO (base))
30f72379
MM
2543 && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2544 && qty_const[REG_QTY (REGNO (base))] != 0)
2545 base = qty_const[REG_QTY (REGNO (base))];
f451db89
JL
2546 else if (GET_CODE (base) == PLUS
2547 && GET_CODE (XEXP (base, 1)) == CONST_INT
2548 && GET_CODE (XEXP (base, 0)) == REG
2549 && qty_const != 0
2550 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
30f72379 2551 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
f451db89 2552 == GET_MODE (XEXP (base, 0)))
30f72379 2553 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
f451db89
JL
2554 {
2555 start = INTVAL (XEXP (base, 1));
30f72379 2556 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
f451db89 2557 }
9c6b0bae 2558 /* This can happen as the result of virtual register instantiation,
abc95ed3 2559 if the initial offset is too large to be a valid address. */
9c6b0bae
RK
2560 else if (GET_CODE (base) == PLUS
2561 && GET_CODE (XEXP (base, 0)) == REG
2562 && GET_CODE (XEXP (base, 1)) == REG
2563 && qty_const != 0
2564 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
30f72379 2565 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
9c6b0bae 2566 == GET_MODE (XEXP (base, 0)))
30f72379 2567 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
9c6b0bae 2568 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
30f72379 2569 && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
9c6b0bae 2570 == GET_MODE (XEXP (base, 1)))
30f72379 2571 && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
9c6b0bae 2572 {
30f72379
MM
2573 rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2574 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
9c6b0bae
RK
2575
2576 /* One of the two values must be a constant. */
2577 if (GET_CODE (base) != CONST_INT)
2578 {
2579 if (GET_CODE (tem) != CONST_INT)
2580 abort ();
2581 start = INTVAL (tem);
2582 }
2583 else
2584 {
2585 start = INTVAL (base);
2586 base = tem;
2587 }
2588 }
f451db89 2589
c85663b1
RK
2590 /* Handle everything that we can find inside an address that has been
2591 viewed as constant. */
f451db89 2592
c85663b1 2593 while (1)
f451db89 2594 {
c85663b1
RK
2595 /* If no part of this switch does a "continue", the code outside
2596 will exit this loop. */
2597
2598 switch (GET_CODE (base))
2599 {
2600 case LO_SUM:
2601 /* By definition, operand1 of a LO_SUM is the associated constant
2602 address. Use the associated constant address as the base
2603 instead. */
2604 base = XEXP (base, 1);
2605 continue;
2606
2607 case CONST:
2608 /* Strip off CONST. */
2609 base = XEXP (base, 0);
2610 continue;
2611
2612 case PLUS:
2613 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2614 {
2615 start += INTVAL (XEXP (base, 1));
2616 base = XEXP (base, 0);
2617 continue;
2618 }
2619 break;
2620
2621 case AND:
2622 /* Handle the case of an AND which is the negative of a power of
2623 two. This is used to represent unaligned memory operations. */
2624 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2625 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2626 {
2627 set_nonvarying_address_components (XEXP (base, 0), size,
2628 pbase, pstart, pend);
2629
2630 /* Assume the worst misalignment. START is affected, but not
2631 END, so compensate but adjusting SIZE. Don't lose any
2632 constant we already had. */
2633
2634 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
89046535
RK
2635 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2636 end += *pend;
c85663b1
RK
2637 base = *pbase;
2638 }
2639 break;
e9a25f70
JL
2640
2641 default:
2642 break;
c85663b1
RK
2643 }
2644
2645 break;
f451db89
JL
2646 }
2647
336d6f0a
RK
2648 if (GET_CODE (base) == CONST_INT)
2649 {
2650 start += INTVAL (base);
2651 base = const0_rtx;
2652 }
2653
f451db89
JL
2654 end = start + size;
2655
2656 /* Set the return values. */
2657 *pbase = base;
2658 *pstart = start;
2659 *pend = end;
2660}
2661
9ae8ffe7
JL
2662/* Return 1 if X has a value that can vary even between two
2663 executions of the program. 0 means X can be compared reliably
2664 against certain constants or near-constants. */
7afe21cc
RK
2665
2666static int
9ae8ffe7
JL
2667cse_rtx_varies_p (x)
2668 register rtx x;
7afe21cc
RK
2669{
2670 /* We need not check for X and the equivalence class being of the same
2671 mode because if X is equivalent to a constant in some mode, it
2672 doesn't vary in any mode. */
2673
9ae8ffe7
JL
2674 if (GET_CODE (x) == REG
2675 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
2676 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2677 && qty_const[REG_QTY (REGNO (x))] != 0)
7afe21cc
RK
2678 return 0;
2679
9ae8ffe7
JL
2680 if (GET_CODE (x) == PLUS
2681 && GET_CODE (XEXP (x, 1)) == CONST_INT
2682 && GET_CODE (XEXP (x, 0)) == REG
2683 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2684 && (GET_MODE (XEXP (x, 0))
30f72379
MM
2685 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2686 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
7afe21cc
RK
2687 return 0;
2688
9c6b0bae
RK
2689 /* This can happen as the result of virtual register instantiation, if
2690 the initial constant is too large to be a valid address. This gives
2691 us a three instruction sequence, load large offset into a register,
2692 load fp minus a constant into a register, then a MEM which is the
2693 sum of the two `constant' registers. */
9ae8ffe7
JL
2694 if (GET_CODE (x) == PLUS
2695 && GET_CODE (XEXP (x, 0)) == REG
2696 && GET_CODE (XEXP (x, 1)) == REG
2697 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2698 && (GET_MODE (XEXP (x, 0))
30f72379
MM
2699 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2700 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
9ae8ffe7
JL
2701 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2702 && (GET_MODE (XEXP (x, 1))
30f72379
MM
2703 == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2704 && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
9c6b0bae
RK
2705 return 0;
2706
9ae8ffe7 2707 return rtx_varies_p (x);
7afe21cc
RK
2708}
2709\f
2710/* Canonicalize an expression:
2711 replace each register reference inside it
2712 with the "oldest" equivalent register.
2713
2714 If INSN is non-zero and we are replacing a pseudo with a hard register
7722328e
RK
2715 or vice versa, validate_change is used to ensure that INSN remains valid
2716 after we make our substitution. The calls are made with IN_GROUP non-zero
2717 so apply_change_group must be called upon the outermost return from this
2718 function (unless INSN is zero). The result of apply_change_group can
2719 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2720
2721static rtx
2722canon_reg (x, insn)
2723 rtx x;
2724 rtx insn;
2725{
2726 register int i;
2727 register enum rtx_code code;
2728 register char *fmt;
2729
2730 if (x == 0)
2731 return x;
2732
2733 code = GET_CODE (x);
2734 switch (code)
2735 {
2736 case PC:
2737 case CC0:
2738 case CONST:
2739 case CONST_INT:
2740 case CONST_DOUBLE:
2741 case SYMBOL_REF:
2742 case LABEL_REF:
2743 case ADDR_VEC:
2744 case ADDR_DIFF_VEC:
2745 return x;
2746
2747 case REG:
2748 {
2749 register int first;
2750
2751 /* Never replace a hard reg, because hard regs can appear
2752 in more than one machine mode, and we must preserve the mode
2753 of each occurrence. Also, some hard regs appear in
2754 MEMs that are shared and mustn't be altered. Don't try to
2755 replace any reg that maps to a reg of class NO_REGS. */
2756 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2757 || ! REGNO_QTY_VALID_P (REGNO (x)))
2758 return x;
2759
30f72379 2760 first = qty_first_reg[REG_QTY (REGNO (x))];
7afe21cc
RK
2761 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2762 : REGNO_REG_CLASS (first) == NO_REGS ? x
30f72379 2763 : gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
7afe21cc 2764 }
e9a25f70
JL
2765
2766 default:
2767 break;
7afe21cc
RK
2768 }
2769
2770 fmt = GET_RTX_FORMAT (code);
2771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2772 {
2773 register int j;
2774
2775 if (fmt[i] == 'e')
2776 {
2777 rtx new = canon_reg (XEXP (x, i), insn);
58873255 2778 int insn_code;
7afe21cc
RK
2779
2780 /* If replacing pseudo with hard reg or vice versa, ensure the
178c39f6 2781 insn remains valid. Likewise if the insn has MATCH_DUPs. */
aee9dc31
RS
2782 if (insn != 0 && new != 0
2783 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
178c39f6
RK
2784 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2785 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
58873255
RK
2786 || (insn_code = recog_memoized (insn)) < 0
2787 || insn_n_dups[insn_code] > 0))
77fa0940 2788 validate_change (insn, &XEXP (x, i), new, 1);
7afe21cc
RK
2789 else
2790 XEXP (x, i) = new;
2791 }
2792 else if (fmt[i] == 'E')
2793 for (j = 0; j < XVECLEN (x, i); j++)
2794 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2795 }
2796
2797 return x;
2798}
2799\f
a2cabb29 2800/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2801 a MEM). Find the best equivalent address to use that is valid for this
2802 insn.
2803
2804 On most CISC machines, complicated address modes are costly, and rtx_cost
2805 is a good approximation for that cost. However, most RISC machines have
2806 only a few (usually only one) memory reference formats. If an address is
2807 valid at all, it is often just as cheap as any other address. Hence, for
2808 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2809 costs of various addresses. For two addresses of equal cost, choose the one
2810 with the highest `rtx_cost' value as that has the potential of eliminating
2811 the most insns. For equal costs, we choose the first in the equivalence
2812 class. Note that we ignore the fact that pseudo registers are cheaper
2813 than hard registers here because we would also prefer the pseudo registers.
2814 */
2815
6cd4575e 2816static void
7afe21cc
RK
2817find_best_addr (insn, loc)
2818 rtx insn;
2819 rtx *loc;
2820{
7a87758d 2821 struct table_elt *elt;
7afe21cc 2822 rtx addr = *loc;
7a87758d
AS
2823#ifdef ADDRESS_COST
2824 struct table_elt *p;
7afe21cc 2825 int found_better = 1;
7a87758d 2826#endif
7afe21cc
RK
2827 int save_do_not_record = do_not_record;
2828 int save_hash_arg_in_memory = hash_arg_in_memory;
2829 int save_hash_arg_in_struct = hash_arg_in_struct;
7afe21cc
RK
2830 int addr_volatile;
2831 int regno;
2197a88a 2832 unsigned hash;
7afe21cc
RK
2833
2834 /* Do not try to replace constant addresses or addresses of local and
2835 argument slots. These MEM expressions are made only once and inserted
2836 in many instructions, as well as being used to control symbol table
2837 output. It is not safe to clobber them.
2838
2839 There are some uncommon cases where the address is already in a register
2840 for some reason, but we cannot take advantage of that because we have
2841 no easy way to unshare the MEM. In addition, looking up all stack
2842 addresses is costly. */
2843 if ((GET_CODE (addr) == PLUS
2844 && GET_CODE (XEXP (addr, 0)) == REG
2845 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2846 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2847 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2848 || regno == ARG_POINTER_REGNUM))
7afe21cc 2849 || (GET_CODE (addr) == REG
8bc169f2
DE
2850 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2851 || regno == HARD_FRAME_POINTER_REGNUM
2852 || regno == ARG_POINTER_REGNUM))
e9a25f70 2853 || GET_CODE (addr) == ADDRESSOF
7afe21cc
RK
2854 || CONSTANT_ADDRESS_P (addr))
2855 return;
2856
2857 /* If this address is not simply a register, try to fold it. This will
2858 sometimes simplify the expression. Many simplifications
2859 will not be valid, but some, usually applying the associative rule, will
2860 be valid and produce better code. */
8c87f107
RK
2861 if (GET_CODE (addr) != REG)
2862 {
2863 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2864
2865 if (1
2866#ifdef ADDRESS_COST
2f541799
MM
2867 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2868 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
9a252d29 2869 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
8c87f107 2870#else
9a252d29 2871 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
8c87f107
RK
2872#endif
2873 && validate_change (insn, loc, folded, 0))
2874 addr = folded;
2875 }
7afe21cc 2876
42495ca0
RK
2877 /* If this address is not in the hash table, we can't look for equivalences
2878 of the whole address. Also, ignore if volatile. */
2879
7afe21cc 2880 do_not_record = 0;
2197a88a 2881 hash = HASH (addr, Pmode);
7afe21cc
RK
2882 addr_volatile = do_not_record;
2883 do_not_record = save_do_not_record;
2884 hash_arg_in_memory = save_hash_arg_in_memory;
2885 hash_arg_in_struct = save_hash_arg_in_struct;
2886
2887 if (addr_volatile)
2888 return;
2889
2197a88a 2890 elt = lookup (addr, hash, Pmode);
7afe21cc 2891
7afe21cc 2892#ifndef ADDRESS_COST
42495ca0
RK
2893 if (elt)
2894 {
2d8b0f3a 2895 int our_cost = elt->cost;
42495ca0
RK
2896
2897 /* Find the lowest cost below ours that works. */
2898 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2899 if (elt->cost < our_cost
2900 && (GET_CODE (elt->exp) == REG
2901 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2902 && validate_change (insn, loc,
906c4e36 2903 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
42495ca0
RK
2904 return;
2905 }
2906#else
7afe21cc 2907
42495ca0
RK
2908 if (elt)
2909 {
2910 /* We need to find the best (under the criteria documented above) entry
2911 in the class that is valid. We use the `flag' field to indicate
2912 choices that were invalid and iterate until we can't find a better
2913 one that hasn't already been tried. */
7afe21cc 2914
42495ca0
RK
2915 for (p = elt->first_same_value; p; p = p->next_same_value)
2916 p->flag = 0;
7afe21cc 2917
42495ca0
RK
2918 while (found_better)
2919 {
2f541799 2920 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2921 int best_rtx_cost = (elt->cost + 1) >> 1;
2922 struct table_elt *best_elt = elt;
2923
2924 found_better = 0;
2925 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2926 if (! p->flag)
42495ca0 2927 {
2f541799
MM
2928 if ((GET_CODE (p->exp) == REG
2929 || exp_equiv_p (p->exp, p->exp, 1, 0))
2930 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2931 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2932 && (p->cost + 1) >> 1 > best_rtx_cost)))
2933 {
2934 found_better = 1;
2935 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2936 best_rtx_cost = (p->cost + 1) >> 1;
2937 best_elt = p;
2938 }
42495ca0 2939 }
7afe21cc 2940
42495ca0
RK
2941 if (found_better)
2942 {
2943 if (validate_change (insn, loc,
906c4e36
RK
2944 canon_reg (copy_rtx (best_elt->exp),
2945 NULL_RTX), 0))
42495ca0
RK
2946 return;
2947 else
2948 best_elt->flag = 1;
2949 }
2950 }
2951 }
7afe21cc 2952
42495ca0
RK
2953 /* If the address is a binary operation with the first operand a register
2954 and the second a constant, do the same as above, but looking for
2955 equivalences of the register. Then try to simplify before checking for
2956 the best address to use. This catches a few cases: First is when we
2957 have REG+const and the register is another REG+const. We can often merge
2958 the constants and eliminate one insn and one register. It may also be
2959 that a machine has a cheap REG+REG+const. Finally, this improves the
2960 code on the Alpha for unaligned byte stores. */
2961
2962 if (flag_expensive_optimizations
2963 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2964 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2965 && GET_CODE (XEXP (*loc, 0)) == REG
2966 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
7afe21cc 2967 {
42495ca0
RK
2968 rtx c = XEXP (*loc, 1);
2969
2970 do_not_record = 0;
2197a88a 2971 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2972 do_not_record = save_do_not_record;
2973 hash_arg_in_memory = save_hash_arg_in_memory;
2974 hash_arg_in_struct = save_hash_arg_in_struct;
2975
2197a88a 2976 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2977 if (elt == 0)
2978 return;
2979
2980 /* We need to find the best (under the criteria documented above) entry
2981 in the class that is valid. We use the `flag' field to indicate
2982 choices that were invalid and iterate until we can't find a better
2983 one that hasn't already been tried. */
7afe21cc 2984
7afe21cc 2985 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2986 p->flag = 0;
7afe21cc 2987
42495ca0 2988 while (found_better)
7afe21cc 2989 {
2f541799 2990 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2991 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2992 struct table_elt *best_elt = elt;
2993 rtx best_rtx = *loc;
f6516aee
JW
2994 int count;
2995
2996 /* This is at worst case an O(n^2) algorithm, so limit our search
2997 to the first 32 elements on the list. This avoids trouble
2998 compiling code with very long basic blocks that can easily
2999 call cse_gen_binary so many times that we run out of memory. */
42495ca0
RK
3000
3001 found_better = 0;
f6516aee
JW
3002 for (p = elt->first_same_value, count = 0;
3003 p && count < 32;
3004 p = p->next_same_value, count++)
42495ca0
RK
3005 if (! p->flag
3006 && (GET_CODE (p->exp) == REG
3007 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3008 {
96b0e481 3009 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
42495ca0 3010
2f541799
MM
3011 if ((CSE_ADDRESS_COST (new) < best_addr_cost
3012 || (CSE_ADDRESS_COST (new) == best_addr_cost
42495ca0
RK
3013 && (COST (new) + 1) >> 1 > best_rtx_cost)))
3014 {
3015 found_better = 1;
2f541799 3016 best_addr_cost = CSE_ADDRESS_COST (new);
42495ca0
RK
3017 best_rtx_cost = (COST (new) + 1) >> 1;
3018 best_elt = p;
3019 best_rtx = new;
3020 }
3021 }
3022
3023 if (found_better)
3024 {
3025 if (validate_change (insn, loc,
906c4e36
RK
3026 canon_reg (copy_rtx (best_rtx),
3027 NULL_RTX), 0))
42495ca0
RK
3028 return;
3029 else
3030 best_elt->flag = 1;
3031 }
7afe21cc
RK
3032 }
3033 }
3034#endif
3035}
3036\f
3037/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3038 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3039 what values are being compared.
3040
3041 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3042 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3043 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3044 compared to produce cc0.
3045
3046 The return value is the comparison operator and is either the code of
3047 A or the code corresponding to the inverse of the comparison. */
3048
3049static enum rtx_code
13c9910f 3050find_comparison_args (code, parg1, parg2, pmode1, pmode2)
7afe21cc
RK
3051 enum rtx_code code;
3052 rtx *parg1, *parg2;
13c9910f 3053 enum machine_mode *pmode1, *pmode2;
7afe21cc
RK
3054{
3055 rtx arg1, arg2;
3056
3057 arg1 = *parg1, arg2 = *parg2;
3058
3059 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3060
b2796a4b 3061 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
7afe21cc
RK
3062 {
3063 /* Set non-zero when we find something of interest. */
3064 rtx x = 0;
3065 int reverse_code = 0;
3066 struct table_elt *p = 0;
3067
3068 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3069 On machines with CC0, this is the only case that can occur, since
3070 fold_rtx will return the COMPARE or item being compared with zero
3071 when given CC0. */
3072
3073 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3074 x = arg1;
3075
3076 /* If ARG1 is a comparison operator and CODE is testing for
3077 STORE_FLAG_VALUE, get the inner arguments. */
3078
3079 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3080 {
c610adec
RK
3081 if (code == NE
3082 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3083 && code == LT && STORE_FLAG_VALUE == -1)
3084#ifdef FLOAT_STORE_FLAG_VALUE
3085 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3086 && FLOAT_STORE_FLAG_VALUE < 0)
3087#endif
3088 )
7afe21cc 3089 x = arg1;
c610adec
RK
3090 else if (code == EQ
3091 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3092 && code == GE && STORE_FLAG_VALUE == -1)
3093#ifdef FLOAT_STORE_FLAG_VALUE
3094 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3095 && FLOAT_STORE_FLAG_VALUE < 0)
3096#endif
3097 )
7afe21cc
RK
3098 x = arg1, reverse_code = 1;
3099 }
3100
3101 /* ??? We could also check for
3102
3103 (ne (and (eq (...) (const_int 1))) (const_int 0))
3104
3105 and related forms, but let's wait until we see them occurring. */
3106
3107 if (x == 0)
3108 /* Look up ARG1 in the hash table and see if it has an equivalence
3109 that lets us see what is being compared. */
3110 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3111 GET_MODE (arg1));
3112 if (p) p = p->first_same_value;
3113
3114 for (; p; p = p->next_same_value)
3115 {
3116 enum machine_mode inner_mode = GET_MODE (p->exp);
3117
3118 /* If the entry isn't valid, skip it. */
3119 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3120 continue;
3121
3122 if (GET_CODE (p->exp) == COMPARE
3123 /* Another possibility is that this machine has a compare insn
3124 that includes the comparison code. In that case, ARG1 would
3125 be equivalent to a comparison operation that would set ARG1 to
3126 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3127 ORIG_CODE is the actual comparison being done; if it is an EQ,
3128 we must reverse ORIG_CODE. On machine with a negative value
3129 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3130 || ((code == NE
3131 || (code == LT
c610adec 3132 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3133 && (GET_MODE_BITSIZE (inner_mode)
3134 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3135 && (STORE_FLAG_VALUE
906c4e36
RK
3136 & ((HOST_WIDE_INT) 1
3137 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3138#ifdef FLOAT_STORE_FLAG_VALUE
3139 || (code == LT
3140 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3141 && FLOAT_STORE_FLAG_VALUE < 0)
3142#endif
3143 )
7afe21cc
RK
3144 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3145 {
3146 x = p->exp;
3147 break;
3148 }
3149 else if ((code == EQ
3150 || (code == GE
c610adec 3151 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3152 && (GET_MODE_BITSIZE (inner_mode)
3153 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3154 && (STORE_FLAG_VALUE
906c4e36
RK
3155 & ((HOST_WIDE_INT) 1
3156 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3157#ifdef FLOAT_STORE_FLAG_VALUE
3158 || (code == GE
3159 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3160 && FLOAT_STORE_FLAG_VALUE < 0)
3161#endif
3162 )
7afe21cc
RK
3163 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3164 {
3165 reverse_code = 1;
3166 x = p->exp;
3167 break;
3168 }
3169
3170 /* If this is fp + constant, the equivalent is a better operand since
3171 it may let us predict the value of the comparison. */
3172 else if (NONZERO_BASE_PLUS_P (p->exp))
3173 {
3174 arg1 = p->exp;
3175 continue;
3176 }
3177 }
3178
3179 /* If we didn't find a useful equivalence for ARG1, we are done.
3180 Otherwise, set up for the next iteration. */
3181 if (x == 0)
3182 break;
3183
3184 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3185 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3186 code = GET_CODE (x);
3187
3188 if (reverse_code)
3189 code = reverse_condition (code);
3190 }
3191
13c9910f
RS
3192 /* Return our results. Return the modes from before fold_rtx
3193 because fold_rtx might produce const_int, and then it's too late. */
3194 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
7afe21cc
RK
3195 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3196
3197 return code;
3198}
3199\f
3200/* Try to simplify a unary operation CODE whose output mode is to be
3201 MODE with input operand OP whose mode was originally OP_MODE.
3202 Return zero if no simplification can be made. */
3203
3204rtx
3205simplify_unary_operation (code, mode, op, op_mode)
3206 enum rtx_code code;
3207 enum machine_mode mode;
3208 rtx op;
3209 enum machine_mode op_mode;
3210{
3211 register int width = GET_MODE_BITSIZE (mode);
3212
3213 /* The order of these tests is critical so that, for example, we don't
3214 check the wrong mode (input vs. output) for a conversion operation,
3215 such as FIX. At some point, this should be simplified. */
3216
62c0ea12 3217#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
7afe21cc 3218
62c0ea12
RK
3219 if (code == FLOAT && GET_MODE (op) == VOIDmode
3220 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3221 {
62c0ea12 3222 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3223 REAL_VALUE_TYPE d;
3224
62c0ea12
RK
3225 if (GET_CODE (op) == CONST_INT)
3226 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3227 else
7ac4a266 3228 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
7afe21cc
RK
3229
3230#ifdef REAL_ARITHMETIC
2ebcccf3 3231 REAL_VALUE_FROM_INT (d, lv, hv, mode);
7afe21cc 3232#else
62c0ea12 3233 if (hv < 0)
7afe21cc 3234 {
62c0ea12 3235 d = (double) (~ hv);
906c4e36
RK
3236 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3237 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3238 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
7afe21cc
RK
3239 d = (- d - 1.0);
3240 }
3241 else
3242 {
62c0ea12 3243 d = (double) hv;
906c4e36
RK
3244 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3245 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3246 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc
RK
3247 }
3248#endif /* REAL_ARITHMETIC */
940fd0b5 3249 d = real_value_truncate (mode, d);
7afe21cc
RK
3250 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3251 }
62c0ea12
RK
3252 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3253 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3254 {
62c0ea12 3255 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3256 REAL_VALUE_TYPE d;
3257
62c0ea12
RK
3258 if (GET_CODE (op) == CONST_INT)
3259 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3260 else
7ac4a266 3261 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
62c0ea12 3262
a9c6464d
RK
3263 if (op_mode == VOIDmode)
3264 {
3265 /* We don't know how to interpret negative-looking numbers in
3266 this case, so don't try to fold those. */
3267 if (hv < 0)
3268 return 0;
3269 }
3270 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
62c0ea12
RK
3271 ;
3272 else
3273 hv = 0, lv &= GET_MODE_MASK (op_mode);
3274
7afe21cc 3275#ifdef REAL_ARITHMETIC
2ebcccf3 3276 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
7afe21cc 3277#else
62c0ea12 3278
138cec59 3279 d = (double) (unsigned HOST_WIDE_INT) hv;
906c4e36
RK
3280 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3281 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3282 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc 3283#endif /* REAL_ARITHMETIC */
940fd0b5 3284 d = real_value_truncate (mode, d);
7afe21cc
RK
3285 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3286 }
3287#endif
3288
f89e32e9
RK
3289 if (GET_CODE (op) == CONST_INT
3290 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc 3291 {
906c4e36
RK
3292 register HOST_WIDE_INT arg0 = INTVAL (op);
3293 register HOST_WIDE_INT val;
7afe21cc
RK
3294
3295 switch (code)
3296 {
3297 case NOT:
3298 val = ~ arg0;
3299 break;
3300
3301 case NEG:
3302 val = - arg0;
3303 break;
3304
3305 case ABS:
3306 val = (arg0 >= 0 ? arg0 : - arg0);
3307 break;
3308
3309 case FFS:
3310 /* Don't use ffs here. Instead, get low order bit and then its
3311 number. If arg0 is zero, this will return 0, as desired. */
3312 arg0 &= GET_MODE_MASK (mode);
3313 val = exact_log2 (arg0 & (- arg0)) + 1;
3314 break;
3315
3316 case TRUNCATE:
3317 val = arg0;
3318 break;
3319
3320 case ZERO_EXTEND:
3321 if (op_mode == VOIDmode)
3322 op_mode = mode;
82a5e898 3323 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3324 {
3325 /* If we were really extending the mode,
3326 we would have to distinguish between zero-extension
3327 and sign-extension. */
3328 if (width != GET_MODE_BITSIZE (op_mode))
3329 abort ();
3330 val = arg0;
3331 }
82a5e898
CH
3332 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3333 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
7afe21cc
RK
3334 else
3335 return 0;
3336 break;
3337
3338 case SIGN_EXTEND:
3339 if (op_mode == VOIDmode)
3340 op_mode = mode;
82a5e898 3341 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3342 {
3343 /* If we were really extending the mode,
3344 we would have to distinguish between zero-extension
3345 and sign-extension. */
3346 if (width != GET_MODE_BITSIZE (op_mode))
3347 abort ();
3348 val = arg0;
3349 }
f12564b4 3350 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
7afe21cc 3351 {
82a5e898
CH
3352 val
3353 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3354 if (val
3355 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3356 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
7afe21cc
RK
3357 }
3358 else
3359 return 0;
3360 break;
3361
d45cf215
RS
3362 case SQRT:
3363 return 0;
3364
7afe21cc
RK
3365 default:
3366 abort ();
3367 }
3368
7e4ce834 3369 val = trunc_int_for_mode (val, mode);
737e7965 3370
906c4e36 3371 return GEN_INT (val);
7afe21cc
RK
3372 }
3373
3374 /* We can do some operations on integer CONST_DOUBLEs. Also allow
0f41302f 3375 for a DImode operation on a CONST_INT. */
8e0ac43b 3376 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
7afe21cc
RK
3377 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3378 {
906c4e36 3379 HOST_WIDE_INT l1, h1, lv, hv;
7afe21cc
RK
3380
3381 if (GET_CODE (op) == CONST_DOUBLE)
3382 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3383 else
3384 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3385
3386 switch (code)
3387 {
3388 case NOT:
3389 lv = ~ l1;
3390 hv = ~ h1;
3391 break;
3392
3393 case NEG:
3394 neg_double (l1, h1, &lv, &hv);
3395 break;
3396
3397 case ABS:
3398 if (h1 < 0)
3399 neg_double (l1, h1, &lv, &hv);
3400 else
3401 lv = l1, hv = h1;
3402 break;
3403
3404 case FFS:
3405 hv = 0;
3406 if (l1 == 0)
906c4e36 3407 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
7afe21cc
RK
3408 else
3409 lv = exact_log2 (l1 & (-l1)) + 1;
3410 break;
3411
3412 case TRUNCATE:
8e0ac43b 3413 /* This is just a change-of-mode, so do nothing. */
d50d63c0 3414 lv = l1, hv = h1;
7afe21cc
RK
3415 break;
3416
f72aed24
RS
3417 case ZERO_EXTEND:
3418 if (op_mode == VOIDmode
906c4e36 3419 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3420 return 0;
3421
3422 hv = 0;
3423 lv = l1 & GET_MODE_MASK (op_mode);
3424 break;
3425
3426 case SIGN_EXTEND:
3427 if (op_mode == VOIDmode
906c4e36 3428 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3429 return 0;
3430 else
3431 {
3432 lv = l1 & GET_MODE_MASK (op_mode);
906c4e36
RK
3433 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3434 && (lv & ((HOST_WIDE_INT) 1
3435 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3436 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
f72aed24 3437
906c4e36 3438 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
f72aed24
RS
3439 }
3440 break;
3441
d45cf215
RS
3442 case SQRT:
3443 return 0;
3444
7afe21cc
RK
3445 default:
3446 return 0;
3447 }
3448
3449 return immed_double_const (lv, hv, mode);
3450 }
3451
3452#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3453 else if (GET_CODE (op) == CONST_DOUBLE
3454 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3455 {
3456 REAL_VALUE_TYPE d;
3457 jmp_buf handler;
3458 rtx x;
3459
3460 if (setjmp (handler))
3461 /* There used to be a warning here, but that is inadvisable.
3462 People may want to cause traps, and the natural way
3463 to do it should not get a warning. */
3464 return 0;
3465
3466 set_float_handler (handler);
3467
3468 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3469
3470 switch (code)
3471 {
3472 case NEG:
3473 d = REAL_VALUE_NEGATE (d);
3474 break;
3475
3476 case ABS:
8b3686ed 3477 if (REAL_VALUE_NEGATIVE (d))
7afe21cc
RK
3478 d = REAL_VALUE_NEGATE (d);
3479 break;
3480
3481 case FLOAT_TRUNCATE:
d3159aee 3482 d = real_value_truncate (mode, d);
7afe21cc
RK
3483 break;
3484
3485 case FLOAT_EXTEND:
3486 /* All this does is change the mode. */
3487 break;
3488
3489 case FIX:
d3159aee 3490 d = REAL_VALUE_RNDZINT (d);
7afe21cc
RK
3491 break;
3492
3493 case UNSIGNED_FIX:
d3159aee 3494 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
7afe21cc
RK
3495 break;
3496
d45cf215
RS
3497 case SQRT:
3498 return 0;
3499
7afe21cc
RK
3500 default:
3501 abort ();
3502 }
3503
560c94a2 3504 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
906c4e36 3505 set_float_handler (NULL_PTR);
7afe21cc
RK
3506 return x;
3507 }
8e0ac43b
RK
3508
3509 else if (GET_CODE (op) == CONST_DOUBLE
3510 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3511 && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 3512 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc
RK
3513 {
3514 REAL_VALUE_TYPE d;
3515 jmp_buf handler;
906c4e36 3516 HOST_WIDE_INT val;
7afe21cc
RK
3517
3518 if (setjmp (handler))
3519 return 0;
3520
3521 set_float_handler (handler);
3522
3523 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3524
3525 switch (code)
3526 {
3527 case FIX:
3528 val = REAL_VALUE_FIX (d);
3529 break;
3530
3531 case UNSIGNED_FIX:
3532 val = REAL_VALUE_UNSIGNED_FIX (d);
3533 break;
3534
3535 default:
3536 abort ();
3537 }
3538
906c4e36 3539 set_float_handler (NULL_PTR);
7afe21cc 3540
7e4ce834 3541 val = trunc_int_for_mode (val, mode);
ad89d6f6 3542
906c4e36 3543 return GEN_INT (val);
7afe21cc
RK
3544 }
3545#endif
a6acbe15
RS
3546 /* This was formerly used only for non-IEEE float.
3547 eggert@twinsun.com says it is safe for IEEE also. */
3548 else
7afe21cc
RK
3549 {
3550 /* There are some simplifications we can do even if the operands
a6acbe15 3551 aren't constant. */
7afe21cc
RK
3552 switch (code)
3553 {
3554 case NEG:
3555 case NOT:
3556 /* (not (not X)) == X, similarly for NEG. */
3557 if (GET_CODE (op) == code)
3558 return XEXP (op, 0);
3559 break;
3560
3561 case SIGN_EXTEND:
3562 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3563 becomes just the MINUS if its mode is MODE. This allows
3564 folding switch statements on machines using casesi (such as
3565 the Vax). */
3566 if (GET_CODE (op) == TRUNCATE
3567 && GET_MODE (XEXP (op, 0)) == mode
3568 && GET_CODE (XEXP (op, 0)) == MINUS
3569 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3570 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3571 return XEXP (op, 0);
cceb347c
RK
3572
3573#ifdef POINTERS_EXTEND_UNSIGNED
3574 if (! POINTERS_EXTEND_UNSIGNED
3575 && mode == Pmode && GET_MODE (op) == ptr_mode
3576 && CONSTANT_P (op))
3577 return convert_memory_address (Pmode, op);
3578#endif
3579 break;
3580
3581#ifdef POINTERS_EXTEND_UNSIGNED
3582 case ZERO_EXTEND:
3583 if (POINTERS_EXTEND_UNSIGNED
3584 && mode == Pmode && GET_MODE (op) == ptr_mode
3585 && CONSTANT_P (op))
3586 return convert_memory_address (Pmode, op);
7afe21cc 3587 break;
cceb347c 3588#endif
e9a25f70
JL
3589
3590 default:
3591 break;
7afe21cc
RK
3592 }
3593
3594 return 0;
3595 }
7afe21cc
RK
3596}
3597\f
3598/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3599 and OP1. Return 0 if no simplification is possible.
3600
3601 Don't use this for relational operations such as EQ or LT.
3602 Use simplify_relational_operation instead. */
3603
3604rtx
3605simplify_binary_operation (code, mode, op0, op1)
3606 enum rtx_code code;
3607 enum machine_mode mode;
3608 rtx op0, op1;
3609{
906c4e36
RK
3610 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3611 HOST_WIDE_INT val;
7afe21cc 3612 int width = GET_MODE_BITSIZE (mode);
96b0e481 3613 rtx tem;
7afe21cc
RK
3614
3615 /* Relational operations don't work here. We must know the mode
3616 of the operands in order to do the comparison correctly.
3617 Assuming a full word can give incorrect results.
3618 Consider comparing 128 with -128 in QImode. */
3619
3620 if (GET_RTX_CLASS (code) == '<')
3621 abort ();
3622
3623#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3624 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3625 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3626 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3627 {
3628 REAL_VALUE_TYPE f0, f1, value;
3629 jmp_buf handler;
3630
3631 if (setjmp (handler))
3632 return 0;
3633
3634 set_float_handler (handler);
3635
3636 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3637 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
5352b11a
RS
3638 f0 = real_value_truncate (mode, f0);
3639 f1 = real_value_truncate (mode, f1);
7afe21cc
RK
3640
3641#ifdef REAL_ARITHMETIC
956d6950
JL
3642#ifndef REAL_INFINITY
3643 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3644 return 0;
3645#endif
d3159aee 3646 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
7afe21cc
RK
3647#else
3648 switch (code)
3649 {
3650 case PLUS:
3651 value = f0 + f1;
3652 break;
3653 case MINUS:
3654 value = f0 - f1;
3655 break;
3656 case MULT:
3657 value = f0 * f1;
3658 break;
3659 case DIV:
3660#ifndef REAL_INFINITY
3661 if (f1 == 0)
21d12b80 3662 return 0;
7afe21cc
RK
3663#endif
3664 value = f0 / f1;
3665 break;
3666 case SMIN:
3667 value = MIN (f0, f1);
3668 break;
3669 case SMAX:
3670 value = MAX (f0, f1);
3671 break;
3672 default:
3673 abort ();
3674 }
3675#endif
3676
5352b11a 3677 value = real_value_truncate (mode, value);
831522a4 3678 set_float_handler (NULL_PTR);
560c94a2 3679 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
7afe21cc 3680 }
6076248a 3681#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc
RK
3682
3683 /* We can fold some multi-word operations. */
6076248a 3684 if (GET_MODE_CLASS (mode) == MODE_INT
33085906 3685 && width == HOST_BITS_PER_WIDE_INT * 2
fe873240 3686 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
6076248a 3687 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
7afe21cc 3688 {
906c4e36 3689 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
7afe21cc 3690
fe873240
RK
3691 if (GET_CODE (op0) == CONST_DOUBLE)
3692 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3693 else
3694 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
7afe21cc
RK
3695
3696 if (GET_CODE (op1) == CONST_DOUBLE)
3697 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3698 else
3699 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3700
3701 switch (code)
3702 {
3703 case MINUS:
3704 /* A - B == A + (-B). */
3705 neg_double (l2, h2, &lv, &hv);
3706 l2 = lv, h2 = hv;
3707
0f41302f 3708 /* .. fall through ... */
7afe21cc
RK
3709
3710 case PLUS:
3711 add_double (l1, h1, l2, h2, &lv, &hv);
3712 break;
3713
3714 case MULT:
3715 mul_double (l1, h1, l2, h2, &lv, &hv);
3716 break;
3717
3718 case DIV: case MOD: case UDIV: case UMOD:
3719 /* We'd need to include tree.h to do this and it doesn't seem worth
3720 it. */
3721 return 0;
3722
3723 case AND:
3724 lv = l1 & l2, hv = h1 & h2;
3725 break;
3726
3727 case IOR:
3728 lv = l1 | l2, hv = h1 | h2;
3729 break;
3730
3731 case XOR:
3732 lv = l1 ^ l2, hv = h1 ^ h2;
3733 break;
3734
3735 case SMIN:
906c4e36
RK
3736 if (h1 < h2
3737 || (h1 == h2
3738 && ((unsigned HOST_WIDE_INT) l1
3739 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3740 lv = l1, hv = h1;
3741 else
3742 lv = l2, hv = h2;
3743 break;
3744
3745 case SMAX:
906c4e36
RK
3746 if (h1 > h2
3747 || (h1 == h2
3748 && ((unsigned HOST_WIDE_INT) l1
3749 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3750 lv = l1, hv = h1;
3751 else
3752 lv = l2, hv = h2;
3753 break;
3754
3755 case UMIN:
906c4e36
RK
3756 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3757 || (h1 == h2
3758 && ((unsigned HOST_WIDE_INT) l1
3759 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3760 lv = l1, hv = h1;
3761 else
3762 lv = l2, hv = h2;
3763 break;
3764
3765 case UMAX:
906c4e36
RK
3766 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3767 || (h1 == h2
3768 && ((unsigned HOST_WIDE_INT) l1
3769 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3770 lv = l1, hv = h1;
3771 else
3772 lv = l2, hv = h2;
3773 break;
3774
3775 case LSHIFTRT: case ASHIFTRT:
45620ed4 3776 case ASHIFT:
7afe21cc
RK
3777 case ROTATE: case ROTATERT:
3778#ifdef SHIFT_COUNT_TRUNCATED
85c0a556
RK
3779 if (SHIFT_COUNT_TRUNCATED)
3780 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
7afe21cc
RK
3781#endif
3782
3783 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3784 return 0;
3785
3786 if (code == LSHIFTRT || code == ASHIFTRT)
3787 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3788 code == ASHIFTRT);
45620ed4
RK
3789 else if (code == ASHIFT)
3790 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
7afe21cc
RK
3791 else if (code == ROTATE)
3792 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3793 else /* code == ROTATERT */
3794 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3795 break;
3796
3797 default:
3798 return 0;
3799 }
3800
3801 return immed_double_const (lv, hv, mode);
3802 }
7afe21cc
RK
3803
3804 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
906c4e36 3805 || width > HOST_BITS_PER_WIDE_INT || width == 0)
7afe21cc
RK
3806 {
3807 /* Even if we can't compute a constant result,
3808 there are some cases worth simplifying. */
3809
3810 switch (code)
3811 {
3812 case PLUS:
3813 /* In IEEE floating point, x+0 is not the same as x. Similarly
3814 for the other optimizations below. */
3815 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3816 && FLOAT_MODE_P (mode) && ! flag_fast_math)
7afe21cc
RK
3817 break;
3818
3819 if (op1 == CONST0_RTX (mode))
3820 return op0;
3821
7afe21cc
RK
3822 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3823 if (GET_CODE (op0) == NEG)
96b0e481 3824 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
7afe21cc 3825 else if (GET_CODE (op1) == NEG)
96b0e481 3826 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
7afe21cc 3827
96b0e481
RK
3828 /* Handle both-operands-constant cases. We can only add
3829 CONST_INTs to constants since the sum of relocatable symbols
fe873240
RK
3830 can't be handled by most assemblers. Don't add CONST_INT
3831 to CONST_INT since overflow won't be computed properly if wider
3832 than HOST_BITS_PER_WIDE_INT. */
7afe21cc 3833
fe873240
RK
3834 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3835 && GET_CODE (op1) == CONST_INT)
96b0e481 3836 return plus_constant (op0, INTVAL (op1));
fe873240
RK
3837 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3838 && GET_CODE (op0) == CONST_INT)
96b0e481 3839 return plus_constant (op1, INTVAL (op0));
7afe21cc 3840
30d69925
RK
3841 /* See if this is something like X * C - X or vice versa or
3842 if the multiplication is written as a shift. If so, we can
3843 distribute and make a new multiply, shift, or maybe just
3844 have X (if C is 2 in the example above). But don't make
3845 real multiply if we didn't have one before. */
3846
3847 if (! FLOAT_MODE_P (mode))
3848 {
3849 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3850 rtx lhs = op0, rhs = op1;
3851 int had_mult = 0;
3852
3853 if (GET_CODE (lhs) == NEG)
3854 coeff0 = -1, lhs = XEXP (lhs, 0);
3855 else if (GET_CODE (lhs) == MULT
3856 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3857 {
3858 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3859 had_mult = 1;
3860 }
3861 else if (GET_CODE (lhs) == ASHIFT
3862 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3863 && INTVAL (XEXP (lhs, 1)) >= 0
3864 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3865 {
3866 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3867 lhs = XEXP (lhs, 0);
3868 }
3869
3870 if (GET_CODE (rhs) == NEG)
3871 coeff1 = -1, rhs = XEXP (rhs, 0);
3872 else if (GET_CODE (rhs) == MULT
3873 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3874 {
3875 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3876 had_mult = 1;
3877 }
3878 else if (GET_CODE (rhs) == ASHIFT
3879 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3880 && INTVAL (XEXP (rhs, 1)) >= 0
3881 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3882 {
3883 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3884 rhs = XEXP (rhs, 0);
3885 }
3886
3887 if (rtx_equal_p (lhs, rhs))
3888 {
3889 tem = cse_gen_binary (MULT, mode, lhs,
3890 GEN_INT (coeff0 + coeff1));
3891 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3892 }
3893 }
3894
96b0e481
RK
3895 /* If one of the operands is a PLUS or a MINUS, see if we can
3896 simplify this by the associative law.
3897 Don't use the associative law for floating point.
3898 The inaccuracy makes it nonassociative,
3899 and subtle programs can break if operations are associated. */
7afe21cc 3900
cbf6a543 3901 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3902 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3903 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3904 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3905 return tem;
7afe21cc
RK
3906 break;
3907
3908 case COMPARE:
3909#ifdef HAVE_cc0
3910 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3911 using cc0, in which case we want to leave it as a COMPARE
3912 so we can distinguish it from a register-register-copy.
3913
3914 In IEEE floating point, x-0 is not the same as x. */
3915
3916 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3917 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3918 && op1 == CONST0_RTX (mode))
3919 return op0;
3920#else
3921 /* Do nothing here. */
3922#endif
3923 break;
3924
3925 case MINUS:
21648b45
RK
3926 /* None of these optimizations can be done for IEEE
3927 floating point. */
3928 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3929 && FLOAT_MODE_P (mode) && ! flag_fast_math)
21648b45
RK
3930 break;
3931
a83afb65
RK
3932 /* We can't assume x-x is 0 even with non-IEEE floating point,
3933 but since it is zero except in very strange circumstances, we
3934 will treat it as zero with -ffast-math. */
7afe21cc
RK
3935 if (rtx_equal_p (op0, op1)
3936 && ! side_effects_p (op0)
a83afb65
RK
3937 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3938 return CONST0_RTX (mode);
7afe21cc
RK
3939
3940 /* Change subtraction from zero into negation. */
3941 if (op0 == CONST0_RTX (mode))
38a448ca 3942 return gen_rtx_NEG (mode, op1);
7afe21cc 3943
96b0e481
RK
3944 /* (-1 - a) is ~a. */
3945 if (op0 == constm1_rtx)
38a448ca 3946 return gen_rtx_NOT (mode, op1);
96b0e481 3947
7afe21cc
RK
3948 /* Subtracting 0 has no effect. */
3949 if (op1 == CONST0_RTX (mode))
3950 return op0;
3951
30d69925
RK
3952 /* See if this is something like X * C - X or vice versa or
3953 if the multiplication is written as a shift. If so, we can
3954 distribute and make a new multiply, shift, or maybe just
3955 have X (if C is 2 in the example above). But don't make
3956 real multiply if we didn't have one before. */
3957
3958 if (! FLOAT_MODE_P (mode))
3959 {
3960 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3961 rtx lhs = op0, rhs = op1;
3962 int had_mult = 0;
3963
3964 if (GET_CODE (lhs) == NEG)
3965 coeff0 = -1, lhs = XEXP (lhs, 0);
3966 else if (GET_CODE (lhs) == MULT
3967 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3968 {
3969 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3970 had_mult = 1;
3971 }
3972 else if (GET_CODE (lhs) == ASHIFT
3973 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3974 && INTVAL (XEXP (lhs, 1)) >= 0
3975 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3976 {
3977 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3978 lhs = XEXP (lhs, 0);
3979 }
3980
3981 if (GET_CODE (rhs) == NEG)
3982 coeff1 = - 1, rhs = XEXP (rhs, 0);
3983 else if (GET_CODE (rhs) == MULT
3984 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3985 {
3986 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3987 had_mult = 1;
3988 }
3989 else if (GET_CODE (rhs) == ASHIFT
3990 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3991 && INTVAL (XEXP (rhs, 1)) >= 0
3992 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3993 {
3994 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3995 rhs = XEXP (rhs, 0);
3996 }
3997
3998 if (rtx_equal_p (lhs, rhs))
3999 {
4000 tem = cse_gen_binary (MULT, mode, lhs,
4001 GEN_INT (coeff0 - coeff1));
4002 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4003 }
4004 }
4005
7afe21cc
RK
4006 /* (a - (-b)) -> (a + b). */
4007 if (GET_CODE (op1) == NEG)
96b0e481 4008 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
7afe21cc 4009
96b0e481
RK
4010 /* If one of the operands is a PLUS or a MINUS, see if we can
4011 simplify this by the associative law.
4012 Don't use the associative law for floating point.
7afe21cc
RK
4013 The inaccuracy makes it nonassociative,
4014 and subtle programs can break if operations are associated. */
7afe21cc 4015
cbf6a543 4016 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
4017 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4018 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4019 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4020 return tem;
7afe21cc
RK
4021
4022 /* Don't let a relocatable value get a negative coeff. */
b5a09c41 4023 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
7afe21cc 4024 return plus_constant (op0, - INTVAL (op1));
29d72c4b
TG
4025
4026 /* (x - (x & y)) -> (x & ~y) */
4027 if (GET_CODE (op1) == AND)
4028 {
4029 if (rtx_equal_p (op0, XEXP (op1, 0)))
38a448ca 4030 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
29d72c4b 4031 if (rtx_equal_p (op0, XEXP (op1, 1)))
38a448ca 4032 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
29d72c4b 4033 }
7afe21cc
RK
4034 break;
4035
4036 case MULT:
4037 if (op1 == constm1_rtx)
4038 {
96b0e481 4039 tem = simplify_unary_operation (NEG, mode, op0, mode);
7afe21cc 4040
38a448ca 4041 return tem ? tem : gen_rtx_NEG (mode, op0);
7afe21cc
RK
4042 }
4043
4044 /* In IEEE floating point, x*0 is not always 0. */
4045 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4046 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
4047 && op1 == CONST0_RTX (mode)
4048 && ! side_effects_p (op0))
4049 return op1;
4050
4051 /* In IEEE floating point, x*1 is not equivalent to x for nans.
4052 However, ANSI says we can drop signals,
4053 so we can do this anyway. */
4054 if (op1 == CONST1_RTX (mode))
4055 return op0;
4056
c407b802
RK
4057 /* Convert multiply by constant power of two into shift unless
4058 we are still generating RTL. This test is a kludge. */
7afe21cc 4059 if (GET_CODE (op1) == CONST_INT
c407b802 4060 && (val = exact_log2 (INTVAL (op1))) >= 0
2d917903
JW
4061 /* If the mode is larger than the host word size, and the
4062 uppermost bit is set, then this isn't a power of two due
4063 to implicit sign extension. */
4064 && (width <= HOST_BITS_PER_WIDE_INT
4065 || val != HOST_BITS_PER_WIDE_INT - 1)
c407b802 4066 && ! rtx_equal_function_value_matters)
38a448ca 4067 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
7afe21cc
RK
4068
4069 if (GET_CODE (op1) == CONST_DOUBLE
4070 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4071 {
4072 REAL_VALUE_TYPE d;
5a3d4bef
RK
4073 jmp_buf handler;
4074 int op1is2, op1ism1;
4075
4076 if (setjmp (handler))
4077 return 0;
4078
4079 set_float_handler (handler);
7afe21cc 4080 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
5a3d4bef
RK
4081 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4082 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4083 set_float_handler (NULL_PTR);
7afe21cc
RK
4084
4085 /* x*2 is x+x and x*(-1) is -x */
5a3d4bef 4086 if (op1is2 && GET_MODE (op0) == mode)
38a448ca 4087 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
7afe21cc 4088
5a3d4bef 4089 else if (op1ism1 && GET_MODE (op0) == mode)
38a448ca 4090 return gen_rtx_NEG (mode, op0);
7afe21cc
RK
4091 }
4092 break;
4093
4094 case IOR:
4095 if (op1 == const0_rtx)
4096 return op0;
4097 if (GET_CODE (op1) == CONST_INT
4098 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4099 return op1;
4100 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4101 return op0;
4102 /* A | (~A) -> -1 */
4103 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4104 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
31dcf83f 4105 && ! side_effects_p (op0)
8e7e5365 4106 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4107 return constm1_rtx;
4108 break;
4109
4110 case XOR:
4111 if (op1 == const0_rtx)
4112 return op0;
4113 if (GET_CODE (op1) == CONST_INT
4114 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
38a448ca 4115 return gen_rtx_NOT (mode, op0);
31dcf83f 4116 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4117 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4118 return const0_rtx;
4119 break;
4120
4121 case AND:
4122 if (op1 == const0_rtx && ! side_effects_p (op0))
4123 return const0_rtx;
4124 if (GET_CODE (op1) == CONST_INT
4125 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4126 return op0;
31dcf83f 4127 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4128 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4129 return op0;
4130 /* A & (~A) -> 0 */
4131 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4132 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
709ab4fc 4133 && ! side_effects_p (op0)
8e7e5365 4134 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4135 return const0_rtx;
4136 break;
4137
4138 case UDIV:
4139 /* Convert divide by power of two into shift (divide by 1 handled
4140 below). */
4141 if (GET_CODE (op1) == CONST_INT
4142 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
38a448ca 4143 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
7afe21cc 4144
0f41302f 4145 /* ... fall through ... */
7afe21cc
RK
4146
4147 case DIV:
4148 if (op1 == CONST1_RTX (mode))
4149 return op0;
e7a522ba
RS
4150
4151 /* In IEEE floating point, 0/x is not always 0. */
4152 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4153 || ! FLOAT_MODE_P (mode) || flag_fast_math)
e7a522ba
RS
4154 && op0 == CONST0_RTX (mode)
4155 && ! side_effects_p (op1))
7afe21cc 4156 return op0;
e7a522ba 4157
7afe21cc 4158#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a83afb65
RK
4159 /* Change division by a constant into multiplication. Only do
4160 this with -ffast-math until an expert says it is safe in
4161 general. */
7afe21cc
RK
4162 else if (GET_CODE (op1) == CONST_DOUBLE
4163 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
a83afb65
RK
4164 && op1 != CONST0_RTX (mode)
4165 && flag_fast_math)
7afe21cc
RK
4166 {
4167 REAL_VALUE_TYPE d;
4168 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
a83afb65
RK
4169
4170 if (! REAL_VALUES_EQUAL (d, dconst0))
4171 {
7afe21cc 4172#if defined (REAL_ARITHMETIC)
a83afb65 4173 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
38a448ca
RH
4174 return gen_rtx_MULT (mode, op0,
4175 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
7afe21cc 4176#else
38a448ca
RH
4177 return gen_rtx_MULT (mode, op0,
4178 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
7afe21cc 4179#endif
a83afb65
RK
4180 }
4181 }
7afe21cc
RK
4182#endif
4183 break;
4184
4185 case UMOD:
4186 /* Handle modulus by power of two (mod with 1 handled below). */
4187 if (GET_CODE (op1) == CONST_INT
4188 && exact_log2 (INTVAL (op1)) > 0)
38a448ca 4189 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
7afe21cc 4190
0f41302f 4191 /* ... fall through ... */
7afe21cc
RK
4192
4193 case MOD:
4194 if ((op0 == const0_rtx || op1 == const1_rtx)
4195 && ! side_effects_p (op0) && ! side_effects_p (op1))
4196 return const0_rtx;
4197 break;
4198
4199 case ROTATERT:
4200 case ROTATE:
4201 /* Rotating ~0 always results in ~0. */
906c4e36 4202 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
7afe21cc
RK
4203 && INTVAL (op0) == GET_MODE_MASK (mode)
4204 && ! side_effects_p (op1))
4205 return op0;
4206
0f41302f 4207 /* ... fall through ... */
7afe21cc 4208
7afe21cc
RK
4209 case ASHIFT:
4210 case ASHIFTRT:
4211 case LSHIFTRT:
4212 if (op1 == const0_rtx)
4213 return op0;
4214 if (op0 == const0_rtx && ! side_effects_p (op1))
4215 return op0;
4216 break;
4217
4218 case SMIN:
906c4e36
RK
4219 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4220 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
7afe21cc
RK
4221 && ! side_effects_p (op0))
4222 return op1;
4223 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4224 return op0;
4225 break;
4226
4227 case SMAX:
906c4e36 4228 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
dbbe6445
RK
4229 && (INTVAL (op1)
4230 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
7afe21cc
RK
4231 && ! side_effects_p (op0))
4232 return op1;
4233 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4234 return op0;
4235 break;
4236
4237 case UMIN:
4238 if (op1 == const0_rtx && ! side_effects_p (op0))
4239 return op1;
4240 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4241 return op0;
4242 break;
4243
4244 case UMAX:
4245 if (op1 == constm1_rtx && ! side_effects_p (op0))
4246 return op1;
4247 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4248 return op0;
4249 break;
4250
4251 default:
4252 abort ();
4253 }
4254
4255 return 0;
4256 }
4257
4258 /* Get the integer argument values in two forms:
4259 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4260
4261 arg0 = INTVAL (op0);
4262 arg1 = INTVAL (op1);
4263
906c4e36 4264 if (width < HOST_BITS_PER_WIDE_INT)
7afe21cc 4265 {
906c4e36
RK
4266 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4267 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc
RK
4268
4269 arg0s = arg0;
906c4e36
RK
4270 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4271 arg0s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4272
4273 arg1s = arg1;
906c4e36
RK
4274 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4275 arg1s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4276 }
4277 else
4278 {
4279 arg0s = arg0;
4280 arg1s = arg1;
4281 }
4282
4283 /* Compute the value of the arithmetic. */
4284
4285 switch (code)
4286 {
4287 case PLUS:
538b78e7 4288 val = arg0s + arg1s;
7afe21cc
RK
4289 break;
4290
4291 case MINUS:
538b78e7 4292 val = arg0s - arg1s;
7afe21cc
RK
4293 break;
4294
4295 case MULT:
4296 val = arg0s * arg1s;
4297 break;
4298
4299 case DIV:
4300 if (arg1s == 0)
4301 return 0;
4302 val = arg0s / arg1s;
4303 break;
4304
4305 case MOD:
4306 if (arg1s == 0)
4307 return 0;
4308 val = arg0s % arg1s;
4309 break;
4310
4311 case UDIV:
4312 if (arg1 == 0)
4313 return 0;
906c4e36 4314 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
7afe21cc
RK
4315 break;
4316
4317 case UMOD:
4318 if (arg1 == 0)
4319 return 0;
906c4e36 4320 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
7afe21cc
RK
4321 break;
4322
4323 case AND:
4324 val = arg0 & arg1;
4325 break;
4326
4327 case IOR:
4328 val = arg0 | arg1;
4329 break;
4330
4331 case XOR:
4332 val = arg0 ^ arg1;
4333 break;
4334
4335 case LSHIFTRT:
4336 /* If shift count is undefined, don't fold it; let the machine do
4337 what it wants. But truncate it if the machine will do that. */
4338 if (arg1 < 0)
4339 return 0;
4340
4341#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4342 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4343 arg1 %= width;
7afe21cc
RK
4344#endif
4345
906c4e36 4346 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
7afe21cc
RK
4347 break;
4348
4349 case ASHIFT:
7afe21cc
RK
4350 if (arg1 < 0)
4351 return 0;
4352
4353#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4354 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4355 arg1 %= width;
7afe21cc
RK
4356#endif
4357
906c4e36 4358 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
7afe21cc
RK
4359 break;
4360
4361 case ASHIFTRT:
4362 if (arg1 < 0)
4363 return 0;
4364
4365#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4366 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4367 arg1 %= width;
7afe21cc
RK
4368#endif
4369
7afe21cc 4370 val = arg0s >> arg1;
2166571b
RS
4371
4372 /* Bootstrap compiler may not have sign extended the right shift.
4373 Manually extend the sign to insure bootstrap cc matches gcc. */
4374 if (arg0s < 0 && arg1 > 0)
4375 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4376
7afe21cc
RK
4377 break;
4378
4379 case ROTATERT:
4380 if (arg1 < 0)
4381 return 0;
4382
4383 arg1 %= width;
906c4e36
RK
4384 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4385 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
7afe21cc
RK
4386 break;
4387
4388 case ROTATE:
4389 if (arg1 < 0)
4390 return 0;
4391
4392 arg1 %= width;
906c4e36
RK
4393 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4394 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
7afe21cc
RK
4395 break;
4396
4397 case COMPARE:
4398 /* Do nothing here. */
4399 return 0;
4400
830a38ee
RS
4401 case SMIN:
4402 val = arg0s <= arg1s ? arg0s : arg1s;
4403 break;
4404
4405 case UMIN:
906c4e36
RK
4406 val = ((unsigned HOST_WIDE_INT) arg0
4407 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4408 break;
4409
4410 case SMAX:
4411 val = arg0s > arg1s ? arg0s : arg1s;
4412 break;
4413
4414 case UMAX:
906c4e36
RK
4415 val = ((unsigned HOST_WIDE_INT) arg0
4416 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4417 break;
4418
7afe21cc
RK
4419 default:
4420 abort ();
4421 }
4422
7e4ce834 4423 val = trunc_int_for_mode (val, mode);
ad89d6f6 4424
906c4e36 4425 return GEN_INT (val);
7afe21cc
RK
4426}
4427\f
96b0e481
RK
4428/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4429 PLUS or MINUS.
4430
4431 Rather than test for specific case, we do this by a brute-force method
4432 and do all possible simplifications until no more changes occur. Then
4433 we rebuild the operation. */
4434
4435static rtx
4436simplify_plus_minus (code, mode, op0, op1)
4437 enum rtx_code code;
4438 enum machine_mode mode;
4439 rtx op0, op1;
4440{
4441 rtx ops[8];
4442 int negs[8];
4443 rtx result, tem;
fb5c8ce6 4444 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
96b0e481 4445 int first = 1, negate = 0, changed;
fb5c8ce6 4446 int i, j;
96b0e481 4447
4c9a05bc 4448 bzero ((char *) ops, sizeof ops);
96b0e481
RK
4449
4450 /* Set up the two operands and then expand them until nothing has been
4451 changed. If we run out of room in our array, give up; this should
4452 almost never happen. */
4453
4454 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4455
4456 changed = 1;
4457 while (changed)
4458 {
4459 changed = 0;
4460
4461 for (i = 0; i < n_ops; i++)
4462 switch (GET_CODE (ops[i]))
4463 {
4464 case PLUS:
4465 case MINUS:
4466 if (n_ops == 7)
4467 return 0;
4468
4469 ops[n_ops] = XEXP (ops[i], 1);
4470 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4471 ops[i] = XEXP (ops[i], 0);
b7d9299b 4472 input_ops++;
96b0e481
RK
4473 changed = 1;
4474 break;
4475
4476 case NEG:
4477 ops[i] = XEXP (ops[i], 0);
4478 negs[i] = ! negs[i];
4479 changed = 1;
4480 break;
4481
4482 case CONST:
4483 ops[i] = XEXP (ops[i], 0);
fb5c8ce6 4484 input_consts++;
96b0e481
RK
4485 changed = 1;
4486 break;
4487
4488 case NOT:
4489 /* ~a -> (-a - 1) */
4490 if (n_ops != 7)
4491 {
4492 ops[n_ops] = constm1_rtx;
5931019b 4493 negs[n_ops++] = negs[i];
96b0e481
RK
4494 ops[i] = XEXP (ops[i], 0);
4495 negs[i] = ! negs[i];
4496 changed = 1;
4497 }
4498 break;
4499
4500 case CONST_INT:
4501 if (negs[i])
4502 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4503 break;
e9a25f70
JL
4504
4505 default:
4506 break;
96b0e481
RK
4507 }
4508 }
4509
4510 /* If we only have two operands, we can't do anything. */
4511 if (n_ops <= 2)
4512 return 0;
4513
4514 /* Now simplify each pair of operands until nothing changes. The first
4515 time through just simplify constants against each other. */
4516
4517 changed = 1;
4518 while (changed)
4519 {
4520 changed = first;
4521
4522 for (i = 0; i < n_ops - 1; i++)
4523 for (j = i + 1; j < n_ops; j++)
4524 if (ops[i] != 0 && ops[j] != 0
4525 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4526 {
4527 rtx lhs = ops[i], rhs = ops[j];
4528 enum rtx_code ncode = PLUS;
4529
4530 if (negs[i] && ! negs[j])
4531 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4532 else if (! negs[i] && negs[j])
4533 ncode = MINUS;
4534
4535 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
b7d9299b 4536 if (tem)
96b0e481
RK
4537 {
4538 ops[i] = tem, ops[j] = 0;
4539 negs[i] = negs[i] && negs[j];
4540 if (GET_CODE (tem) == NEG)
4541 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4542
4543 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4544 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4545 changed = 1;
4546 }
4547 }
4548
4549 first = 0;
4550 }
4551
4552 /* Pack all the operands to the lower-numbered entries and give up if
91a60f37 4553 we didn't reduce the number of operands we had. Make sure we
fb5c8ce6
RK
4554 count a CONST as two operands. If we have the same number of
4555 operands, but have made more CONSTs than we had, this is also
4556 an improvement, so accept it. */
91a60f37 4557
fb5c8ce6 4558 for (i = 0, j = 0; j < n_ops; j++)
96b0e481 4559 if (ops[j] != 0)
91a60f37
RK
4560 {
4561 ops[i] = ops[j], negs[i++] = negs[j];
4562 if (GET_CODE (ops[j]) == CONST)
fb5c8ce6 4563 n_consts++;
91a60f37 4564 }
96b0e481 4565
fb5c8ce6
RK
4566 if (i + n_consts > input_ops
4567 || (i + n_consts == input_ops && n_consts <= input_consts))
96b0e481
RK
4568 return 0;
4569
4570 n_ops = i;
4571
4572 /* If we have a CONST_INT, put it last. */
4573 for (i = 0; i < n_ops - 1; i++)
4574 if (GET_CODE (ops[i]) == CONST_INT)
4575 {
4576 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4577 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4578 }
4579
4580 /* Put a non-negated operand first. If there aren't any, make all
4581 operands positive and negate the whole thing later. */
4582 for (i = 0; i < n_ops && negs[i]; i++)
4583 ;
4584
4585 if (i == n_ops)
4586 {
4587 for (i = 0; i < n_ops; i++)
4588 negs[i] = 0;
4589 negate = 1;
4590 }
4591 else if (i != 0)
4592 {
4593 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4594 j = negs[0], negs[0] = negs[i], negs[i] = j;
4595 }
4596
4597 /* Now make the result by performing the requested operations. */
4598 result = ops[0];
4599 for (i = 1; i < n_ops; i++)
4600 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4601
38a448ca 4602 return negate ? gen_rtx_NEG (mode, result) : result;
96b0e481
RK
4603}
4604\f
4605/* Make a binary operation by properly ordering the operands and
4606 seeing if the expression folds. */
4607
4608static rtx
4609cse_gen_binary (code, mode, op0, op1)
4610 enum rtx_code code;
4611 enum machine_mode mode;
4612 rtx op0, op1;
4613{
4614 rtx tem;
4615
4616 /* Put complex operands first and constants second if commutative. */
4617 if (GET_RTX_CLASS (code) == 'c'
4618 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4619 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4620 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4621 || (GET_CODE (op0) == SUBREG
4622 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4623 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4624 tem = op0, op0 = op1, op1 = tem;
4625
4626 /* If this simplifies, do it. */
4627 tem = simplify_binary_operation (code, mode, op0, op1);
4628
4629 if (tem)
4630 return tem;
4631
4632 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4633 just form the operation. */
4634
4635 if (code == PLUS && GET_CODE (op1) == CONST_INT
4636 && GET_MODE (op0) != VOIDmode)
4637 return plus_constant (op0, INTVAL (op1));
4638 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4639 && GET_MODE (op0) != VOIDmode)
4640 return plus_constant (op0, - INTVAL (op1));
4641 else
38a448ca 4642 return gen_rtx_fmt_ee (code, mode, op0, op1);
96b0e481
RK
4643}
4644\f
1a87eea2
KG
4645struct cfc_args
4646{
4647 /* Input */
4648 rtx op0, op1;
4649 /* Output */
4650 int equal, op0lt, op1lt;
4651};
4652
4653static void
4654check_fold_consts (data)
4655 PTR data;
4656{
4657 struct cfc_args * args = (struct cfc_args *) data;
4658 REAL_VALUE_TYPE d0, d1;
4659
4660 REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4661 REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4662 args->equal = REAL_VALUES_EQUAL (d0, d1);
4663 args->op0lt = REAL_VALUES_LESS (d0, d1);
4664 args->op1lt = REAL_VALUES_LESS (d1, d0);
4665}
4666
7afe21cc 4667/* Like simplify_binary_operation except used for relational operators.
a432f20d
RK
4668 MODE is the mode of the operands, not that of the result. If MODE
4669 is VOIDmode, both operands must also be VOIDmode and we compare the
4670 operands in "infinite precision".
4671
4672 If no simplification is possible, this function returns zero. Otherwise,
4673 it returns either const_true_rtx or const0_rtx. */
7afe21cc
RK
4674
4675rtx
4676simplify_relational_operation (code, mode, op0, op1)
4677 enum rtx_code code;
4678 enum machine_mode mode;
4679 rtx op0, op1;
4680{
a432f20d
RK
4681 int equal, op0lt, op0ltu, op1lt, op1ltu;
4682 rtx tem;
7afe21cc
RK
4683
4684 /* If op0 is a compare, extract the comparison arguments from it. */
4685 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4686 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4687
28bad1cb
RK
4688 /* We can't simplify MODE_CC values since we don't know what the
4689 actual comparison is. */
4690 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4691#ifdef HAVE_cc0
4692 || op0 == cc0_rtx
4693#endif
4694 )
31dcf83f
RS
4695 return 0;
4696
a432f20d
RK
4697 /* For integer comparisons of A and B maybe we can simplify A - B and can
4698 then simplify a comparison of that with zero. If A and B are both either
4699 a register or a CONST_INT, this can't help; testing for these cases will
4700 prevent infinite recursion here and speed things up.
4701
c27b5c62
JW
4702 If CODE is an unsigned comparison, then we can never do this optimization,
4703 because it gives an incorrect result if the subtraction wraps around zero.
4704 ANSI C defines unsigned operations such that they never overflow, and
4705 thus such cases can not be ignored. */
a432f20d
RK
4706
4707 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4708 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4709 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4710 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
c27b5c62 4711 && code != GTU && code != GEU && code != LTU && code != LEU)
a432f20d
RK
4712 return simplify_relational_operation (signed_condition (code),
4713 mode, tem, const0_rtx);
4714
4715 /* For non-IEEE floating-point, if the two operands are equal, we know the
4716 result. */
4717 if (rtx_equal_p (op0, op1)
4718 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4719 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4720 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4721
4722 /* If the operands are floating-point constants, see if we can fold
4723 the result. */
6076248a 4724#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a432f20d
RK
4725 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4726 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4727 {
1a87eea2
KG
4728 struct cfc_args args;
4729
4730 /* Setup input for check_fold_consts() */
4731 args.op0 = op0;
4732 args.op1 = op1;
a432f20d 4733
1a87eea2
KG
4734 if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4735 /* We got an exception from check_fold_consts() */
a432f20d 4736 return 0;
7afe21cc 4737
1a87eea2
KG
4738 /* Receive output from check_fold_consts() */
4739 equal = args.equal;
4740 op0lt = op0ltu = args.op0lt;
4741 op1lt = op1ltu = args.op1lt;
a432f20d
RK
4742 }
4743#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc 4744
a432f20d
RK
4745 /* Otherwise, see if the operands are both integers. */
4746 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4747 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4748 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4749 {
4750 int width = GET_MODE_BITSIZE (mode);
64812ded
RK
4751 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4752 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
7afe21cc 4753
a432f20d
RK
4754 /* Get the two words comprising each integer constant. */
4755 if (GET_CODE (op0) == CONST_DOUBLE)
4756 {
4757 l0u = l0s = CONST_DOUBLE_LOW (op0);
4758 h0u = h0s = CONST_DOUBLE_HIGH (op0);
7afe21cc 4759 }
a432f20d 4760 else
6076248a 4761 {
a432f20d 4762 l0u = l0s = INTVAL (op0);
cb3bb2a7 4763 h0u = h0s = l0s < 0 ? -1 : 0;
a432f20d 4764 }
6076248a 4765
a432f20d
RK
4766 if (GET_CODE (op1) == CONST_DOUBLE)
4767 {
4768 l1u = l1s = CONST_DOUBLE_LOW (op1);
4769 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4770 }
4771 else
4772 {
4773 l1u = l1s = INTVAL (op1);
cb3bb2a7 4774 h1u = h1s = l1s < 0 ? -1 : 0;
a432f20d
RK
4775 }
4776
4777 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4778 we have to sign or zero-extend the values. */
4779 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4780 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
6076248a 4781
a432f20d
RK
4782 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4783 {
4784 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4785 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
6076248a 4786
a432f20d
RK
4787 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4788 l0s |= ((HOST_WIDE_INT) (-1) << width);
6076248a 4789
a432f20d
RK
4790 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4791 l1s |= ((HOST_WIDE_INT) (-1) << width);
6076248a
RK
4792 }
4793
a432f20d
RK
4794 equal = (h0u == h1u && l0u == l1u);
4795 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4796 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4797 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4798 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4799 }
4800
4801 /* Otherwise, there are some code-specific tests we can make. */
4802 else
4803 {
7afe21cc
RK
4804 switch (code)
4805 {
4806 case EQ:
a432f20d
RK
4807 /* References to the frame plus a constant or labels cannot
4808 be zero, but a SYMBOL_REF can due to #pragma weak. */
4809 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4810 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4811#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d
RK
4812 /* On some machines, the ap reg can be 0 sometimes. */
4813 && op0 != arg_pointer_rtx
7afe21cc 4814#endif
a432f20d
RK
4815 )
4816 return const0_rtx;
4817 break;
7afe21cc
RK
4818
4819 case NE:
a432f20d
RK
4820 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4821 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4822#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d 4823 && op0 != arg_pointer_rtx
7afe21cc 4824#endif
a432f20d 4825 )
7afe21cc
RK
4826 return const_true_rtx;
4827 break;
4828
4829 case GEU:
a432f20d
RK
4830 /* Unsigned values are never negative. */
4831 if (op1 == const0_rtx)
7afe21cc
RK
4832 return const_true_rtx;
4833 break;
4834
4835 case LTU:
a432f20d 4836 if (op1 == const0_rtx)
7afe21cc
RK
4837 return const0_rtx;
4838 break;
4839
4840 case LEU:
4841 /* Unsigned values are never greater than the largest
4842 unsigned value. */
4843 if (GET_CODE (op1) == CONST_INT
4844 && INTVAL (op1) == GET_MODE_MASK (mode)
a432f20d
RK
4845 && INTEGRAL_MODE_P (mode))
4846 return const_true_rtx;
7afe21cc
RK
4847 break;
4848
4849 case GTU:
4850 if (GET_CODE (op1) == CONST_INT
4851 && INTVAL (op1) == GET_MODE_MASK (mode)
cbf6a543 4852 && INTEGRAL_MODE_P (mode))
7afe21cc
RK
4853 return const0_rtx;
4854 break;
e9a25f70
JL
4855
4856 default:
4857 break;
7afe21cc
RK
4858 }
4859
4860 return 0;
4861 }
4862
a432f20d
RK
4863 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4864 as appropriate. */
7afe21cc
RK
4865 switch (code)
4866 {
7afe21cc 4867 case EQ:
a432f20d
RK
4868 return equal ? const_true_rtx : const0_rtx;
4869 case NE:
4870 return ! equal ? const_true_rtx : const0_rtx;
7afe21cc 4871 case LT:
a432f20d 4872 return op0lt ? const_true_rtx : const0_rtx;
7afe21cc 4873 case GT:
a432f20d 4874 return op1lt ? const_true_rtx : const0_rtx;
7afe21cc 4875 case LTU:
a432f20d 4876 return op0ltu ? const_true_rtx : const0_rtx;
7afe21cc 4877 case GTU:
a432f20d
RK
4878 return op1ltu ? const_true_rtx : const0_rtx;
4879 case LE:
4880 return equal || op0lt ? const_true_rtx : const0_rtx;
4881 case GE:
4882 return equal || op1lt ? const_true_rtx : const0_rtx;
4883 case LEU:
4884 return equal || op0ltu ? const_true_rtx : const0_rtx;
4885 case GEU:
4886 return equal || op1ltu ? const_true_rtx : const0_rtx;
e9a25f70
JL
4887 default:
4888 abort ();
7afe21cc 4889 }
7afe21cc
RK
4890}
4891\f
4892/* Simplify CODE, an operation with result mode MODE and three operands,
4893 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4894 a constant. Return 0 if no simplifications is possible. */
4895
4896rtx
4897simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4898 enum rtx_code code;
4899 enum machine_mode mode, op0_mode;
4900 rtx op0, op1, op2;
4901{
4902 int width = GET_MODE_BITSIZE (mode);
4903
4904 /* VOIDmode means "infinite" precision. */
4905 if (width == 0)
906c4e36 4906 width = HOST_BITS_PER_WIDE_INT;
7afe21cc
RK
4907
4908 switch (code)
4909 {
4910 case SIGN_EXTRACT:
4911 case ZERO_EXTRACT:
4912 if (GET_CODE (op0) == CONST_INT
4913 && GET_CODE (op1) == CONST_INT
4914 && GET_CODE (op2) == CONST_INT
4915 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
906c4e36 4916 && width <= HOST_BITS_PER_WIDE_INT)
7afe21cc
RK
4917 {
4918 /* Extracting a bit-field from a constant */
906c4e36 4919 HOST_WIDE_INT val = INTVAL (op0);
7afe21cc 4920
f76b9db2
ILT
4921 if (BITS_BIG_ENDIAN)
4922 val >>= (GET_MODE_BITSIZE (op0_mode)
4923 - INTVAL (op2) - INTVAL (op1));
4924 else
4925 val >>= INTVAL (op2);
4926
906c4e36 4927 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
7afe21cc
RK
4928 {
4929 /* First zero-extend. */
906c4e36 4930 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
7afe21cc 4931 /* If desired, propagate sign bit. */
906c4e36
RK
4932 if (code == SIGN_EXTRACT
4933 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4934 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
7afe21cc
RK
4935 }
4936
4937 /* Clear the bits that don't belong in our mode,
4938 unless they and our sign bit are all one.
4939 So we get either a reasonable negative value or a reasonable
4940 unsigned value for this mode. */
906c4e36
RK
4941 if (width < HOST_BITS_PER_WIDE_INT
4942 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4943 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4944 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 4945
906c4e36 4946 return GEN_INT (val);
7afe21cc
RK
4947 }
4948 break;
4949
4950 case IF_THEN_ELSE:
4951 if (GET_CODE (op0) == CONST_INT)
4952 return op0 != const0_rtx ? op1 : op2;
3bf1b082
JW
4953
4954 /* Convert a == b ? b : a to "a". */
4955 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4956 && rtx_equal_p (XEXP (op0, 0), op1)
4957 && rtx_equal_p (XEXP (op0, 1), op2))
4958 return op1;
4959 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4960 && rtx_equal_p (XEXP (op0, 1), op1)
4961 && rtx_equal_p (XEXP (op0, 0), op2))
4962 return op2;
e82ad93d 4963 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
ed1ecb19
JL
4964 {
4965 rtx temp;
4966 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4967 XEXP (op0, 0), XEXP (op0, 1));
4968 /* See if any simplifications were possible. */
4969 if (temp == const0_rtx)
4970 return op2;
4971 else if (temp == const1_rtx)
4972 return op1;
4973 }
7afe21cc
RK
4974 break;
4975
4976 default:
4977 abort ();
4978 }
4979
4980 return 0;
4981}
4982\f
4983/* If X is a nontrivial arithmetic operation on an argument
4984 for which a constant value can be determined, return
4985 the result of operating on that value, as a constant.
4986 Otherwise, return X, possibly with one or more operands
4987 modified by recursive calls to this function.
4988
e7bb59fa
RK
4989 If X is a register whose contents are known, we do NOT
4990 return those contents here. equiv_constant is called to
4991 perform that task.
7afe21cc
RK
4992
4993 INSN is the insn that we may be modifying. If it is 0, make a copy
4994 of X before modifying it. */
4995
4996static rtx
4997fold_rtx (x, insn)
4998 rtx x;
4999 rtx insn;
5000{
5001 register enum rtx_code code;
5002 register enum machine_mode mode;
5003 register char *fmt;
906c4e36 5004 register int i;
7afe21cc
RK
5005 rtx new = 0;
5006 int copied = 0;
5007 int must_swap = 0;
5008
5009 /* Folded equivalents of first two operands of X. */
5010 rtx folded_arg0;
5011 rtx folded_arg1;
5012
5013 /* Constant equivalents of first three operands of X;
5014 0 when no such equivalent is known. */
5015 rtx const_arg0;
5016 rtx const_arg1;
5017 rtx const_arg2;
5018
5019 /* The mode of the first operand of X. We need this for sign and zero
5020 extends. */
5021 enum machine_mode mode_arg0;
5022
5023 if (x == 0)
5024 return x;
5025
5026 mode = GET_MODE (x);
5027 code = GET_CODE (x);
5028 switch (code)
5029 {
5030 case CONST:
5031 case CONST_INT:
5032 case CONST_DOUBLE:
5033 case SYMBOL_REF:
5034 case LABEL_REF:
5035 case REG:
5036 /* No use simplifying an EXPR_LIST
5037 since they are used only for lists of args
5038 in a function call's REG_EQUAL note. */
5039 case EXPR_LIST:
956d6950
JL
5040 /* Changing anything inside an ADDRESSOF is incorrect; we don't
5041 want to (e.g.,) make (addressof (const_int 0)) just because
5042 the location is known to be zero. */
5043 case ADDRESSOF:
7afe21cc
RK
5044 return x;
5045
5046#ifdef HAVE_cc0
5047 case CC0:
5048 return prev_insn_cc0;
5049#endif
5050
5051 case PC:
5052 /* If the next insn is a CODE_LABEL followed by a jump table,
5053 PC's value is a LABEL_REF pointing to that label. That
5054 lets us fold switch statements on the Vax. */
5055 if (insn && GET_CODE (insn) == JUMP_INSN)
5056 {
5057 rtx next = next_nonnote_insn (insn);
5058
5059 if (next && GET_CODE (next) == CODE_LABEL
5060 && NEXT_INSN (next) != 0
5061 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5062 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5063 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
38a448ca 5064 return gen_rtx_LABEL_REF (Pmode, next);
7afe21cc
RK
5065 }
5066 break;
5067
5068 case SUBREG:
c610adec
RK
5069 /* See if we previously assigned a constant value to this SUBREG. */
5070 if ((new = lookup_as_function (x, CONST_INT)) != 0
5071 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
5072 return new;
5073
4b980e20
RK
5074 /* If this is a paradoxical SUBREG, we have no idea what value the
5075 extra bits would have. However, if the operand is equivalent
5076 to a SUBREG whose operand is the same as our mode, and all the
5077 modes are within a word, we can just use the inner operand
31c85c78
RK
5078 because these SUBREGs just say how to treat the register.
5079
5080 Similarly if we find an integer constant. */
4b980e20 5081
e5f6a288 5082 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
5083 {
5084 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5085 struct table_elt *elt;
5086
5087 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5088 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5089 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5090 imode)) != 0)
31c85c78
RK
5091 for (elt = elt->first_same_value;
5092 elt; elt = elt->next_same_value)
5093 {
5094 if (CONSTANT_P (elt->exp)
5095 && GET_MODE (elt->exp) == VOIDmode)
5096 return elt->exp;
5097
4b980e20
RK
5098 if (GET_CODE (elt->exp) == SUBREG
5099 && GET_MODE (SUBREG_REG (elt->exp)) == mode
906c4e36 5100 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5101 return copy_rtx (SUBREG_REG (elt->exp));
5102 }
5103
5104 return x;
5105 }
e5f6a288 5106
7afe21cc
RK
5107 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5108 We might be able to if the SUBREG is extracting a single word in an
5109 integral mode or extracting the low part. */
5110
5111 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5112 const_arg0 = equiv_constant (folded_arg0);
5113 if (const_arg0)
5114 folded_arg0 = const_arg0;
5115
5116 if (folded_arg0 != SUBREG_REG (x))
5117 {
5118 new = 0;
5119
5120 if (GET_MODE_CLASS (mode) == MODE_INT
5121 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5122 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5123 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5124 GET_MODE (SUBREG_REG (x)));
5125 if (new == 0 && subreg_lowpart_p (x))
5126 new = gen_lowpart_if_possible (mode, folded_arg0);
5127 if (new)
5128 return new;
5129 }
e5f6a288
RK
5130
5131 /* If this is a narrowing SUBREG and our operand is a REG, see if
858a47b1 5132 we can find an equivalence for REG that is an arithmetic operation
e5f6a288
RK
5133 in a wider mode where both operands are paradoxical SUBREGs
5134 from objects of our result mode. In that case, we couldn't report
5135 an equivalent value for that operation, since we don't know what the
5136 extra bits will be. But we can find an equivalence for this SUBREG
5137 by folding that operation is the narrow mode. This allows us to
5138 fold arithmetic in narrow modes when the machine only supports
4b980e20
RK
5139 word-sized arithmetic.
5140
5141 Also look for a case where we have a SUBREG whose operand is the
5142 same as our result. If both modes are smaller than a word, we
5143 are simply interpreting a register in different modes and we
5144 can use the inner value. */
e5f6a288
RK
5145
5146 if (GET_CODE (folded_arg0) == REG
e8d76a39
RS
5147 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5148 && subreg_lowpart_p (x))
e5f6a288
RK
5149 {
5150 struct table_elt *elt;
5151
5152 /* We can use HASH here since we know that canon_hash won't be
5153 called. */
5154 elt = lookup (folded_arg0,
5155 HASH (folded_arg0, GET_MODE (folded_arg0)),
5156 GET_MODE (folded_arg0));
5157
5158 if (elt)
5159 elt = elt->first_same_value;
5160
5161 for (; elt; elt = elt->next_same_value)
5162 {
e8d76a39
RS
5163 enum rtx_code eltcode = GET_CODE (elt->exp);
5164
e5f6a288
RK
5165 /* Just check for unary and binary operations. */
5166 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5167 && GET_CODE (elt->exp) != SIGN_EXTEND
5168 && GET_CODE (elt->exp) != ZERO_EXTEND
5169 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5170 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5171 {
5172 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5173
5174 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5175 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5176
5177 op0 = equiv_constant (op0);
5178 if (op0)
5179 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5180 op0, mode);
5181 }
5182 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5183 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
e8d76a39
RS
5184 && eltcode != DIV && eltcode != MOD
5185 && eltcode != UDIV && eltcode != UMOD
5186 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5187 && eltcode != ROTATE && eltcode != ROTATERT
e5f6a288
RK
5188 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5189 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5190 == mode))
5191 || CONSTANT_P (XEXP (elt->exp, 0)))
5192 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5193 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5194 == mode))
5195 || CONSTANT_P (XEXP (elt->exp, 1))))
5196 {
5197 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5198 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5199
5200 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5201 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5202
5203 if (op0)
5204 op0 = equiv_constant (op0);
5205
5206 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
906c4e36 5207 op1 = fold_rtx (op1, NULL_RTX);
e5f6a288
RK
5208
5209 if (op1)
5210 op1 = equiv_constant (op1);
5211
76fb0b60
RS
5212 /* If we are looking for the low SImode part of
5213 (ashift:DI c (const_int 32)), it doesn't work
5214 to compute that in SImode, because a 32-bit shift
5215 in SImode is unpredictable. We know the value is 0. */
5216 if (op0 && op1
45620ed4 5217 && GET_CODE (elt->exp) == ASHIFT
76fb0b60
RS
5218 && GET_CODE (op1) == CONST_INT
5219 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5220 {
5221 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5222
5223 /* If the count fits in the inner mode's width,
5224 but exceeds the outer mode's width,
5225 the value will get truncated to 0
5226 by the subreg. */
5227 new = const0_rtx;
5228 else
5229 /* If the count exceeds even the inner mode's width,
5230 don't fold this expression. */
5231 new = 0;
5232 }
5233 else if (op0 && op1)
e5f6a288
RK
5234 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5235 op0, op1);
5236 }
5237
4b980e20
RK
5238 else if (GET_CODE (elt->exp) == SUBREG
5239 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5240 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5241 <= UNITS_PER_WORD)
906c4e36 5242 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5243 new = copy_rtx (SUBREG_REG (elt->exp));
5244
e5f6a288
RK
5245 if (new)
5246 return new;
5247 }
5248 }
5249
7afe21cc
RK
5250 return x;
5251
5252 case NOT:
5253 case NEG:
5254 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5255 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5256 new = lookup_as_function (XEXP (x, 0), code);
5257 if (new)
5258 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5259 break;
13c9910f 5260
7afe21cc
RK
5261 case MEM:
5262 /* If we are not actually processing an insn, don't try to find the
5263 best address. Not only don't we care, but we could modify the
5264 MEM in an invalid way since we have no insn to validate against. */
5265 if (insn != 0)
5266 find_best_addr (insn, &XEXP (x, 0));
5267
5268 {
5269 /* Even if we don't fold in the insn itself,
5270 we can safely do so here, in hopes of getting a constant. */
906c4e36 5271 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 5272 rtx base = 0;
906c4e36 5273 HOST_WIDE_INT offset = 0;
7afe21cc
RK
5274
5275 if (GET_CODE (addr) == REG
5276 && REGNO_QTY_VALID_P (REGNO (addr))
30f72379
MM
5277 && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5278 && qty_const[REG_QTY (REGNO (addr))] != 0)
5279 addr = qty_const[REG_QTY (REGNO (addr))];
7afe21cc
RK
5280
5281 /* If address is constant, split it into a base and integer offset. */
5282 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5283 base = addr;
5284 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5285 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5286 {
5287 base = XEXP (XEXP (addr, 0), 0);
5288 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5289 }
5290 else if (GET_CODE (addr) == LO_SUM
5291 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5292 base = XEXP (addr, 1);
e9a25f70 5293 else if (GET_CODE (addr) == ADDRESSOF)
956d6950 5294 return change_address (x, VOIDmode, addr);
7afe21cc
RK
5295
5296 /* If this is a constant pool reference, we can fold it into its
5297 constant to allow better value tracking. */
5298 if (base && GET_CODE (base) == SYMBOL_REF
5299 && CONSTANT_POOL_ADDRESS_P (base))
5300 {
5301 rtx constant = get_pool_constant (base);
5302 enum machine_mode const_mode = get_pool_mode (base);
5303 rtx new;
5304
5305 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5306 constant_pool_entries_cost = COST (constant);
5307
5308 /* If we are loading the full constant, we have an equivalence. */
5309 if (offset == 0 && mode == const_mode)
5310 return constant;
5311
9faa82d8 5312 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
5313 anything. Otherwise, handle the two most common cases:
5314 extracting a word from a multi-word constant, and extracting
5315 the low-order bits. Other cases don't seem common enough to
5316 worry about. */
5317 if (! CONSTANT_P (constant))
5318 return x;
5319
5320 if (GET_MODE_CLASS (mode) == MODE_INT
5321 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5322 && offset % UNITS_PER_WORD == 0
5323 && (new = operand_subword (constant,
5324 offset / UNITS_PER_WORD,
5325 0, const_mode)) != 0)
5326 return new;
5327
5328 if (((BYTES_BIG_ENDIAN
5329 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5330 || (! BYTES_BIG_ENDIAN && offset == 0))
5331 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5332 return new;
5333 }
5334
5335 /* If this is a reference to a label at a known position in a jump
5336 table, we also know its value. */
5337 if (base && GET_CODE (base) == LABEL_REF)
5338 {
5339 rtx label = XEXP (base, 0);
5340 rtx table_insn = NEXT_INSN (label);
5341
5342 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5343 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5344 {
5345 rtx table = PATTERN (table_insn);
5346
5347 if (offset >= 0
5348 && (offset / GET_MODE_SIZE (GET_MODE (table))
5349 < XVECLEN (table, 0)))
5350 return XVECEXP (table, 0,
5351 offset / GET_MODE_SIZE (GET_MODE (table)));
5352 }
5353 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5354 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5355 {
5356 rtx table = PATTERN (table_insn);
5357
5358 if (offset >= 0
5359 && (offset / GET_MODE_SIZE (GET_MODE (table))
5360 < XVECLEN (table, 1)))
5361 {
5362 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
5363 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5364 XEXP (table, 0));
7afe21cc
RK
5365
5366 if (GET_MODE (table) != Pmode)
38a448ca 5367 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 5368
67a37737
RK
5369 /* Indicate this is a constant. This isn't a
5370 valid form of CONST, but it will only be used
5371 to fold the next insns and then discarded, so
ac7ef8d5
FS
5372 it should be safe.
5373
5374 Note this expression must be explicitly discarded,
5375 by cse_insn, else it may end up in a REG_EQUAL note
5376 and "escape" to cause problems elsewhere. */
38a448ca 5377 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
5378 }
5379 }
5380 }
5381
5382 return x;
5383 }
9255709c
RK
5384
5385 case ASM_OPERANDS:
5386 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5387 validate_change (insn, &XVECEXP (x, 3, i),
5388 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5389 break;
e9a25f70
JL
5390
5391 default:
5392 break;
7afe21cc
RK
5393 }
5394
5395 const_arg0 = 0;
5396 const_arg1 = 0;
5397 const_arg2 = 0;
5398 mode_arg0 = VOIDmode;
5399
5400 /* Try folding our operands.
5401 Then see which ones have constant values known. */
5402
5403 fmt = GET_RTX_FORMAT (code);
5404 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5405 if (fmt[i] == 'e')
5406 {
5407 rtx arg = XEXP (x, i);
5408 rtx folded_arg = arg, const_arg = 0;
5409 enum machine_mode mode_arg = GET_MODE (arg);
5410 rtx cheap_arg, expensive_arg;
5411 rtx replacements[2];
5412 int j;
5413
5414 /* Most arguments are cheap, so handle them specially. */
5415 switch (GET_CODE (arg))
5416 {
5417 case REG:
5418 /* This is the same as calling equiv_constant; it is duplicated
5419 here for speed. */
5420 if (REGNO_QTY_VALID_P (REGNO (arg))
30f72379
MM
5421 && qty_const[REG_QTY (REGNO (arg))] != 0
5422 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5423 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
7afe21cc
RK
5424 const_arg
5425 = gen_lowpart_if_possible (GET_MODE (arg),
30f72379 5426 qty_const[REG_QTY (REGNO (arg))]);
7afe21cc
RK
5427 break;
5428
5429 case CONST:
5430 case CONST_INT:
5431 case SYMBOL_REF:
5432 case LABEL_REF:
5433 case CONST_DOUBLE:
5434 const_arg = arg;
5435 break;
5436
5437#ifdef HAVE_cc0
5438 case CC0:
5439 folded_arg = prev_insn_cc0;
5440 mode_arg = prev_insn_cc0_mode;
5441 const_arg = equiv_constant (folded_arg);
5442 break;
5443#endif
5444
5445 default:
5446 folded_arg = fold_rtx (arg, insn);
5447 const_arg = equiv_constant (folded_arg);
5448 }
5449
5450 /* For the first three operands, see if the operand
5451 is constant or equivalent to a constant. */
5452 switch (i)
5453 {
5454 case 0:
5455 folded_arg0 = folded_arg;
5456 const_arg0 = const_arg;
5457 mode_arg0 = mode_arg;
5458 break;
5459 case 1:
5460 folded_arg1 = folded_arg;
5461 const_arg1 = const_arg;
5462 break;
5463 case 2:
5464 const_arg2 = const_arg;
5465 break;
5466 }
5467
5468 /* Pick the least expensive of the folded argument and an
5469 equivalent constant argument. */
5470 if (const_arg == 0 || const_arg == folded_arg
5471 || COST (const_arg) > COST (folded_arg))
5472 cheap_arg = folded_arg, expensive_arg = const_arg;
5473 else
5474 cheap_arg = const_arg, expensive_arg = folded_arg;
5475
5476 /* Try to replace the operand with the cheapest of the two
5477 possibilities. If it doesn't work and this is either of the first
5478 two operands of a commutative operation, try swapping them.
5479 If THAT fails, try the more expensive, provided it is cheaper
5480 than what is already there. */
5481
5482 if (cheap_arg == XEXP (x, i))
5483 continue;
5484
5485 if (insn == 0 && ! copied)
5486 {
5487 x = copy_rtx (x);
5488 copied = 1;
5489 }
5490
5491 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5492 for (j = 0;
5493 j < 2 && replacements[j]
5494 && COST (replacements[j]) < COST (XEXP (x, i));
5495 j++)
5496 {
5497 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5498 break;
5499
5500 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5501 {
5502 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5503 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5504
5505 if (apply_change_group ())
5506 {
5507 /* Swap them back to be invalid so that this loop can
5508 continue and flag them to be swapped back later. */
5509 rtx tem;
5510
5511 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5512 XEXP (x, 1) = tem;
5513 must_swap = 1;
5514 break;
5515 }
5516 }
5517 }
5518 }
5519
2d8b0f3a
JL
5520 else
5521 {
5522 if (fmt[i] == 'E')
5523 /* Don't try to fold inside of a vector of expressions.
5524 Doing nothing is harmless. */
5525 {;}
5526 }
7afe21cc
RK
5527
5528 /* If a commutative operation, place a constant integer as the second
5529 operand unless the first operand is also a constant integer. Otherwise,
5530 place any constant second unless the first operand is also a constant. */
5531
5532 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5533 {
5534 if (must_swap || (const_arg0
5535 && (const_arg1 == 0
5536 || (GET_CODE (const_arg0) == CONST_INT
5537 && GET_CODE (const_arg1) != CONST_INT))))
5538 {
5539 register rtx tem = XEXP (x, 0);
5540
5541 if (insn == 0 && ! copied)
5542 {
5543 x = copy_rtx (x);
5544 copied = 1;
5545 }
5546
5547 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5548 validate_change (insn, &XEXP (x, 1), tem, 1);
5549 if (apply_change_group ())
5550 {
5551 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5552 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5553 }
5554 }
5555 }
5556
5557 /* If X is an arithmetic operation, see if we can simplify it. */
5558
5559 switch (GET_RTX_CLASS (code))
5560 {
5561 case '1':
67a37737
RK
5562 {
5563 int is_const = 0;
5564
5565 /* We can't simplify extension ops unless we know the
5566 original mode. */
5567 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5568 && mode_arg0 == VOIDmode)
5569 break;
5570
5571 /* If we had a CONST, strip it off and put it back later if we
5572 fold. */
5573 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5574 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5575
5576 new = simplify_unary_operation (code, mode,
5577 const_arg0 ? const_arg0 : folded_arg0,
5578 mode_arg0);
5579 if (new != 0 && is_const)
38a448ca 5580 new = gen_rtx_CONST (mode, new);
67a37737 5581 }
7afe21cc
RK
5582 break;
5583
5584 case '<':
5585 /* See what items are actually being compared and set FOLDED_ARG[01]
5586 to those values and CODE to the actual comparison code. If any are
5587 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5588 do anything if both operands are already known to be constant. */
5589
5590 if (const_arg0 == 0 || const_arg1 == 0)
5591 {
5592 struct table_elt *p0, *p1;
c610adec 5593 rtx true = const_true_rtx, false = const0_rtx;
13c9910f 5594 enum machine_mode mode_arg1;
c610adec
RK
5595
5596#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5597 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5598 {
560c94a2
RK
5599 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5600 mode);
c610adec
RK
5601 false = CONST0_RTX (mode);
5602 }
5603#endif
7afe21cc 5604
13c9910f
RS
5605 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5606 &mode_arg0, &mode_arg1);
7afe21cc
RK
5607 const_arg0 = equiv_constant (folded_arg0);
5608 const_arg1 = equiv_constant (folded_arg1);
5609
13c9910f
RS
5610 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5611 what kinds of things are being compared, so we can't do
5612 anything with this comparison. */
7afe21cc
RK
5613
5614 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5615 break;
5616
0f41302f
MS
5617 /* If we do not now have two constants being compared, see
5618 if we can nevertheless deduce some things about the
5619 comparison. */
7afe21cc
RK
5620 if (const_arg0 == 0 || const_arg1 == 0)
5621 {
0f41302f
MS
5622 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5623 non-explicit constant? These aren't zero, but we
5624 don't know their sign. */
7afe21cc
RK
5625 if (const_arg1 == const0_rtx
5626 && (NONZERO_BASE_PLUS_P (folded_arg0)
5627#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5628 come out as 0. */
5629 || GET_CODE (folded_arg0) == SYMBOL_REF
5630#endif
5631 || GET_CODE (folded_arg0) == LABEL_REF
5632 || GET_CODE (folded_arg0) == CONST))
5633 {
5634 if (code == EQ)
c610adec 5635 return false;
7afe21cc 5636 else if (code == NE)
c610adec 5637 return true;
7afe21cc
RK
5638 }
5639
5640 /* See if the two operands are the same. We don't do this
5641 for IEEE floating-point since we can't assume x == x
5642 since x might be a NaN. */
5643
5644 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 5645 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
7afe21cc
RK
5646 && (folded_arg0 == folded_arg1
5647 || (GET_CODE (folded_arg0) == REG
5648 && GET_CODE (folded_arg1) == REG
30f72379
MM
5649 && (REG_QTY (REGNO (folded_arg0))
5650 == REG_QTY (REGNO (folded_arg1))))
7afe21cc
RK
5651 || ((p0 = lookup (folded_arg0,
5652 (safe_hash (folded_arg0, mode_arg0)
5653 % NBUCKETS), mode_arg0))
5654 && (p1 = lookup (folded_arg1,
5655 (safe_hash (folded_arg1, mode_arg0)
5656 % NBUCKETS), mode_arg0))
5657 && p0->first_same_value == p1->first_same_value)))
5658 return ((code == EQ || code == LE || code == GE
5659 || code == LEU || code == GEU)
c610adec 5660 ? true : false);
7afe21cc
RK
5661
5662 /* If FOLDED_ARG0 is a register, see if the comparison we are
5663 doing now is either the same as we did before or the reverse
5664 (we only check the reverse if not floating-point). */
5665 else if (GET_CODE (folded_arg0) == REG)
5666 {
30f72379 5667 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc
RK
5668
5669 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5670 && (comparison_dominates_p (qty_comparison_code[qty], code)
5671 || (comparison_dominates_p (qty_comparison_code[qty],
5672 reverse_condition (code))
cbf6a543 5673 && ! FLOAT_MODE_P (mode_arg0)))
7afe21cc
RK
5674 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5675 || (const_arg1
5676 && rtx_equal_p (qty_comparison_const[qty],
5677 const_arg1))
5678 || (GET_CODE (folded_arg1) == REG
30f72379 5679 && (REG_QTY (REGNO (folded_arg1))
7afe21cc
RK
5680 == qty_comparison_qty[qty]))))
5681 return (comparison_dominates_p (qty_comparison_code[qty],
5682 code)
c610adec 5683 ? true : false);
7afe21cc
RK
5684 }
5685 }
5686 }
5687
5688 /* If we are comparing against zero, see if the first operand is
5689 equivalent to an IOR with a constant. If so, we may be able to
5690 determine the result of this comparison. */
5691
5692 if (const_arg1 == const0_rtx)
5693 {
5694 rtx y = lookup_as_function (folded_arg0, IOR);
5695 rtx inner_const;
5696
5697 if (y != 0
5698 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5699 && GET_CODE (inner_const) == CONST_INT
5700 && INTVAL (inner_const) != 0)
5701 {
5702 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
5703 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5704 && (INTVAL (inner_const)
5705 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
c610adec
RK
5706 rtx true = const_true_rtx, false = const0_rtx;
5707
5708#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5709 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5710 {
560c94a2
RK
5711 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5712 mode);
c610adec
RK
5713 false = CONST0_RTX (mode);
5714 }
5715#endif
7afe21cc
RK
5716
5717 switch (code)
5718 {
5719 case EQ:
c610adec 5720 return false;
7afe21cc 5721 case NE:
c610adec 5722 return true;
7afe21cc
RK
5723 case LT: case LE:
5724 if (has_sign)
c610adec 5725 return true;
7afe21cc
RK
5726 break;
5727 case GT: case GE:
5728 if (has_sign)
c610adec 5729 return false;
7afe21cc 5730 break;
e9a25f70
JL
5731 default:
5732 break;
7afe21cc
RK
5733 }
5734 }
5735 }
5736
5737 new = simplify_relational_operation (code, mode_arg0,
5738 const_arg0 ? const_arg0 : folded_arg0,
5739 const_arg1 ? const_arg1 : folded_arg1);
c610adec
RK
5740#ifdef FLOAT_STORE_FLAG_VALUE
5741 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5742 new = ((new == const0_rtx) ? CONST0_RTX (mode)
560c94a2 5743 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
c610adec 5744#endif
7afe21cc
RK
5745 break;
5746
5747 case '2':
5748 case 'c':
5749 switch (code)
5750 {
5751 case PLUS:
5752 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5753 with that LABEL_REF as its second operand. If so, the result is
5754 the first operand of that MINUS. This handles switches with an
5755 ADDR_DIFF_VEC table. */
5756 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5757 {
e650cbda
RK
5758 rtx y
5759 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5760 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
5761
5762 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5763 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5764 return XEXP (y, 0);
67a37737
RK
5765
5766 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
5767 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5768 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
5769 && GET_CODE (XEXP (y, 0)) == MINUS
5770 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5771 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5772 return XEXP (XEXP (y, 0), 0);
7afe21cc 5773 }
c2cc0778 5774
e650cbda
RK
5775 /* Likewise if the operands are in the other order. */
5776 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5777 {
5778 rtx y
5779 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5780 : lookup_as_function (folded_arg1, MINUS);
5781
5782 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5783 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5784 return XEXP (y, 0);
5785
5786 /* Now try for a CONST of a MINUS like the above. */
5787 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5788 : lookup_as_function (folded_arg1, CONST))) != 0
5789 && GET_CODE (XEXP (y, 0)) == MINUS
5790 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5791 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5792 return XEXP (XEXP (y, 0), 0);
5793 }
5794
c2cc0778
RK
5795 /* If second operand is a register equivalent to a negative
5796 CONST_INT, see if we can find a register equivalent to the
5797 positive constant. Make a MINUS if so. Don't do this for
5d595063 5798 a non-negative constant since we might then alternate between
c2cc0778 5799 chosing positive and negative constants. Having the positive
5d595063
RK
5800 constant previously-used is the more common case. Be sure
5801 the resulting constant is non-negative; if const_arg1 were
5802 the smallest negative number this would overflow: depending
5803 on the mode, this would either just be the same value (and
5804 hence not save anything) or be incorrect. */
5805 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5806 && INTVAL (const_arg1) < 0
5807 && - INTVAL (const_arg1) >= 0
5808 && GET_CODE (folded_arg1) == REG)
c2cc0778
RK
5809 {
5810 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5811 struct table_elt *p
5812 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5813 mode);
5814
5815 if (p)
5816 for (p = p->first_same_value; p; p = p->next_same_value)
5817 if (GET_CODE (p->exp) == REG)
5818 return cse_gen_binary (MINUS, mode, folded_arg0,
5819 canon_reg (p->exp, NULL_RTX));
5820 }
13c9910f
RS
5821 goto from_plus;
5822
5823 case MINUS:
5824 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5825 If so, produce (PLUS Z C2-C). */
5826 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5827 {
5828 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5829 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
5830 return fold_rtx (plus_constant (copy_rtx (y),
5831 -INTVAL (const_arg1)),
a3b5c94a 5832 NULL_RTX);
13c9910f 5833 }
7afe21cc 5834
0f41302f 5835 /* ... fall through ... */
7afe21cc 5836
13c9910f 5837 from_plus:
7afe21cc
RK
5838 case SMIN: case SMAX: case UMIN: case UMAX:
5839 case IOR: case AND: case XOR:
5840 case MULT: case DIV: case UDIV:
5841 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5842 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5843 is known to be of similar form, we may be able to replace the
5844 operation with a combined operation. This may eliminate the
5845 intermediate operation if every use is simplified in this way.
5846 Note that the similar optimization done by combine.c only works
5847 if the intermediate operation's result has only one reference. */
5848
5849 if (GET_CODE (folded_arg0) == REG
5850 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5851 {
5852 int is_shift
5853 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5854 rtx y = lookup_as_function (folded_arg0, code);
5855 rtx inner_const;
5856 enum rtx_code associate_code;
5857 rtx new_const;
5858
5859 if (y == 0
5860 || 0 == (inner_const
5861 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5862 || GET_CODE (inner_const) != CONST_INT
5863 /* If we have compiled a statement like
5864 "if (x == (x & mask1))", and now are looking at
5865 "x & mask2", we will have a case where the first operand
5866 of Y is the same as our first operand. Unless we detect
5867 this case, an infinite loop will result. */
5868 || XEXP (y, 0) == folded_arg0)
5869 break;
5870
5871 /* Don't associate these operations if they are a PLUS with the
5872 same constant and it is a power of two. These might be doable
5873 with a pre- or post-increment. Similarly for two subtracts of
5874 identical powers of two with post decrement. */
5875
5876 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
940da324
JL
5877 && ((HAVE_PRE_INCREMENT
5878 && exact_log2 (INTVAL (const_arg1)) >= 0)
5879 || (HAVE_POST_INCREMENT
5880 && exact_log2 (INTVAL (const_arg1)) >= 0)
5881 || (HAVE_PRE_DECREMENT
5882 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5883 || (HAVE_POST_DECREMENT
5884 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
5885 break;
5886
5887 /* Compute the code used to compose the constants. For example,
5888 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5889
5890 associate_code
5891 = (code == MULT || code == DIV || code == UDIV ? MULT
5892 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5893
5894 new_const = simplify_binary_operation (associate_code, mode,
5895 const_arg1, inner_const);
5896
5897 if (new_const == 0)
5898 break;
5899
5900 /* If we are associating shift operations, don't let this
4908e508
RS
5901 produce a shift of the size of the object or larger.
5902 This could occur when we follow a sign-extend by a right
5903 shift on a machine that does a sign-extend as a pair
5904 of shifts. */
7afe21cc
RK
5905
5906 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
5907 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5908 {
5909 /* As an exception, we can turn an ASHIFTRT of this
5910 form into a shift of the number of bits - 1. */
5911 if (code == ASHIFTRT)
5912 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5913 else
5914 break;
5915 }
7afe21cc
RK
5916
5917 y = copy_rtx (XEXP (y, 0));
5918
5919 /* If Y contains our first operand (the most common way this
5920 can happen is if Y is a MEM), we would do into an infinite
5921 loop if we tried to fold it. So don't in that case. */
5922
5923 if (! reg_mentioned_p (folded_arg0, y))
5924 y = fold_rtx (y, insn);
5925
96b0e481 5926 return cse_gen_binary (code, mode, y, new_const);
7afe21cc 5927 }
e9a25f70
JL
5928 break;
5929
5930 default:
5931 break;
7afe21cc
RK
5932 }
5933
5934 new = simplify_binary_operation (code, mode,
5935 const_arg0 ? const_arg0 : folded_arg0,
5936 const_arg1 ? const_arg1 : folded_arg1);
5937 break;
5938
5939 case 'o':
5940 /* (lo_sum (high X) X) is simply X. */
5941 if (code == LO_SUM && const_arg0 != 0
5942 && GET_CODE (const_arg0) == HIGH
5943 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5944 return const_arg1;
5945 break;
5946
5947 case '3':
5948 case 'b':
5949 new = simplify_ternary_operation (code, mode, mode_arg0,
5950 const_arg0 ? const_arg0 : folded_arg0,
5951 const_arg1 ? const_arg1 : folded_arg1,
5952 const_arg2 ? const_arg2 : XEXP (x, 2));
5953 break;
ee5332b8
RH
5954
5955 case 'x':
5956 /* Always eliminate CONSTANT_P_RTX at this stage. */
5957 if (code == CONSTANT_P_RTX)
5958 return (const_arg0 ? const1_rtx : const0_rtx);
5959 break;
7afe21cc
RK
5960 }
5961
5962 return new ? new : x;
5963}
5964\f
5965/* Return a constant value currently equivalent to X.
5966 Return 0 if we don't know one. */
5967
5968static rtx
5969equiv_constant (x)
5970 rtx x;
5971{
5972 if (GET_CODE (x) == REG
5973 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
5974 && qty_const[REG_QTY (REGNO (x))])
5975 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
7afe21cc 5976
2ce5e1b4 5977 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
5978 return x;
5979
fc3ffe83
RK
5980 /* If X is a MEM, try to fold it outside the context of any insn to see if
5981 it might be equivalent to a constant. That handles the case where it
5982 is a constant-pool reference. Then try to look it up in the hash table
5983 in case it is something whose value we have seen before. */
5984
5985 if (GET_CODE (x) == MEM)
5986 {
5987 struct table_elt *elt;
5988
906c4e36 5989 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
5990 if (CONSTANT_P (x))
5991 return x;
5992
5993 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5994 if (elt == 0)
5995 return 0;
5996
5997 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5998 if (elt->is_const && CONSTANT_P (elt->exp))
5999 return elt->exp;
6000 }
6001
7afe21cc
RK
6002 return 0;
6003}
6004\f
6005/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6006 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6007 least-significant part of X.
6008 MODE specifies how big a part of X to return.
6009
6010 If the requested operation cannot be done, 0 is returned.
6011
6012 This is similar to gen_lowpart in emit-rtl.c. */
6013
6014rtx
6015gen_lowpart_if_possible (mode, x)
6016 enum machine_mode mode;
6017 register rtx x;
6018{
6019 rtx result = gen_lowpart_common (mode, x);
6020
6021 if (result)
6022 return result;
6023 else if (GET_CODE (x) == MEM)
6024 {
6025 /* This is the only other case we handle. */
6026 register int offset = 0;
6027 rtx new;
6028
f76b9db2
ILT
6029 if (WORDS_BIG_ENDIAN)
6030 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6031 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6032 if (BYTES_BIG_ENDIAN)
6033 /* Adjust the address so that the address-after-the-data is
6034 unchanged. */
6035 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6036 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
38a448ca 6037 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
7afe21cc
RK
6038 if (! memory_address_p (mode, XEXP (new, 0)))
6039 return 0;
7afe21cc 6040 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 6041 MEM_COPY_ATTRIBUTES (new, x);
7afe21cc
RK
6042 return new;
6043 }
6044 else
6045 return 0;
6046}
6047\f
6048/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6049 branch. It will be zero if not.
6050
6051 In certain cases, this can cause us to add an equivalence. For example,
6052 if we are following the taken case of
6053 if (i == 2)
6054 we can add the fact that `i' and '2' are now equivalent.
6055
6056 In any case, we can record that this comparison was passed. If the same
6057 comparison is seen later, we will know its value. */
6058
6059static void
6060record_jump_equiv (insn, taken)
6061 rtx insn;
6062 int taken;
6063{
6064 int cond_known_true;
6065 rtx op0, op1;
13c9910f 6066 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
6067 int reversed_nonequality = 0;
6068 enum rtx_code code;
6069
6070 /* Ensure this is the right kind of insn. */
6071 if (! condjump_p (insn) || simplejump_p (insn))
6072 return;
6073
6074 /* See if this jump condition is known true or false. */
6075 if (taken)
6076 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6077 else
6078 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6079
6080 /* Get the type of comparison being done and the operands being compared.
6081 If we had to reverse a non-equality condition, record that fact so we
6082 know that it isn't valid for floating-point. */
6083 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6084 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6085 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6086
13c9910f 6087 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
6088 if (! cond_known_true)
6089 {
6090 reversed_nonequality = (code != EQ && code != NE);
6091 code = reverse_condition (code);
6092 }
6093
6094 /* The mode is the mode of the non-constant. */
13c9910f
RS
6095 mode = mode0;
6096 if (mode1 != VOIDmode)
6097 mode = mode1;
7afe21cc
RK
6098
6099 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6100}
6101
6102/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6103 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6104 Make any useful entries we can with that information. Called from
6105 above function and called recursively. */
6106
6107static void
6108record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6109 enum rtx_code code;
6110 enum machine_mode mode;
6111 rtx op0, op1;
6112 int reversed_nonequality;
6113{
2197a88a 6114 unsigned op0_hash, op1_hash;
7afe21cc
RK
6115 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6116 struct table_elt *op0_elt, *op1_elt;
6117
6118 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6119 we know that they are also equal in the smaller mode (this is also
6120 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 6121 is not worth testing for with no SUBREG). */
7afe21cc 6122
2e794ee8 6123 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 6124 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
6125 && (GET_MODE_SIZE (GET_MODE (op0))
6126 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6127 {
6128 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6129 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6130
6131 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6132 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6133 reversed_nonequality);
6134 }
6135
6136 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
6137 && (GET_MODE_SIZE (GET_MODE (op1))
6138 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6139 {
6140 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6141 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6142
6143 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6144 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6145 reversed_nonequality);
6146 }
6147
6148 /* Similarly, if this is an NE comparison, and either is a SUBREG
6149 making a smaller mode, we know the whole thing is also NE. */
6150
2e794ee8
RS
6151 /* Note that GET_MODE (op0) may not equal MODE;
6152 if we test MODE instead, we can get an infinite recursion
6153 alternating between two modes each wider than MODE. */
6154
7afe21cc
RK
6155 if (code == NE && GET_CODE (op0) == SUBREG
6156 && subreg_lowpart_p (op0)
2e794ee8
RS
6157 && (GET_MODE_SIZE (GET_MODE (op0))
6158 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6159 {
6160 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6161 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6162
6163 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6164 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6165 reversed_nonequality);
6166 }
6167
6168 if (code == NE && GET_CODE (op1) == SUBREG
6169 && subreg_lowpart_p (op1)
2e794ee8
RS
6170 && (GET_MODE_SIZE (GET_MODE (op1))
6171 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6172 {
6173 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6174 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6175
6176 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6177 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6178 reversed_nonequality);
6179 }
6180
6181 /* Hash both operands. */
6182
6183 do_not_record = 0;
6184 hash_arg_in_memory = 0;
6185 hash_arg_in_struct = 0;
2197a88a 6186 op0_hash = HASH (op0, mode);
7afe21cc
RK
6187 op0_in_memory = hash_arg_in_memory;
6188 op0_in_struct = hash_arg_in_struct;
6189
6190 if (do_not_record)
6191 return;
6192
6193 do_not_record = 0;
6194 hash_arg_in_memory = 0;
6195 hash_arg_in_struct = 0;
2197a88a 6196 op1_hash = HASH (op1, mode);
7afe21cc
RK
6197 op1_in_memory = hash_arg_in_memory;
6198 op1_in_struct = hash_arg_in_struct;
6199
6200 if (do_not_record)
6201 return;
6202
6203 /* Look up both operands. */
2197a88a
RK
6204 op0_elt = lookup (op0, op0_hash, mode);
6205 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 6206
af3869c1
RK
6207 /* If both operands are already equivalent or if they are not in the
6208 table but are identical, do nothing. */
6209 if ((op0_elt != 0 && op1_elt != 0
6210 && op0_elt->first_same_value == op1_elt->first_same_value)
6211 || op0 == op1 || rtx_equal_p (op0, op1))
6212 return;
6213
7afe21cc 6214 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
6215 comparison. Similarly if this is floating-point. In the latter
6216 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6217 If we record the equality, we might inadvertently delete code
6218 whose intent was to change -0 to +0. */
6219
cbf6a543 6220 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc
RK
6221 {
6222 /* If we reversed a floating-point comparison, if OP0 is not a
6223 register, or if OP1 is neither a register or constant, we can't
6224 do anything. */
6225
6226 if (GET_CODE (op1) != REG)
6227 op1 = equiv_constant (op1);
6228
cbf6a543 6229 if ((reversed_nonequality && FLOAT_MODE_P (mode))
7afe21cc
RK
6230 || GET_CODE (op0) != REG || op1 == 0)
6231 return;
6232
6233 /* Put OP0 in the hash table if it isn't already. This gives it a
6234 new quantity number. */
6235 if (op0_elt == 0)
6236 {
906c4e36 6237 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6238 {
6239 rehash_using_reg (op0);
2197a88a 6240 op0_hash = HASH (op0, mode);
2bb81c86
RK
6241
6242 /* If OP0 is contained in OP1, this changes its hash code
6243 as well. Faster to rehash than to check, except
6244 for the simple case of a constant. */
6245 if (! CONSTANT_P (op1))
2197a88a 6246 op1_hash = HASH (op1,mode);
7afe21cc
RK
6247 }
6248
2197a88a 6249 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6250 op0_elt->in_memory = op0_in_memory;
6251 op0_elt->in_struct = op0_in_struct;
6252 }
6253
30f72379 6254 qty_comparison_code[REG_QTY (REGNO (op0))] = code;
7afe21cc
RK
6255 if (GET_CODE (op1) == REG)
6256 {
5d5ea909 6257 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 6258 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 6259
7afe21cc
RK
6260 /* Put OP1 in the hash table so it gets a new quantity number. */
6261 if (op1_elt == 0)
6262 {
906c4e36 6263 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6264 {
6265 rehash_using_reg (op1);
2197a88a 6266 op1_hash = HASH (op1, mode);
7afe21cc
RK
6267 }
6268
2197a88a 6269 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6270 op1_elt->in_memory = op1_in_memory;
6271 op1_elt->in_struct = op1_in_struct;
6272 }
6273
30f72379
MM
6274 qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6275 qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
7afe21cc
RK
6276 }
6277 else
6278 {
30f72379
MM
6279 qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6280 qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
7afe21cc
RK
6281 }
6282
6283 return;
6284 }
6285
eb5ad42a
RS
6286 /* If either side is still missing an equivalence, make it now,
6287 then merge the equivalences. */
7afe21cc 6288
7afe21cc
RK
6289 if (op0_elt == 0)
6290 {
eb5ad42a 6291 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6292 {
6293 rehash_using_reg (op0);
2197a88a 6294 op0_hash = HASH (op0, mode);
7afe21cc
RK
6295 }
6296
2197a88a 6297 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6298 op0_elt->in_memory = op0_in_memory;
6299 op0_elt->in_struct = op0_in_struct;
7afe21cc
RK
6300 }
6301
6302 if (op1_elt == 0)
6303 {
eb5ad42a 6304 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6305 {
6306 rehash_using_reg (op1);
2197a88a 6307 op1_hash = HASH (op1, mode);
7afe21cc
RK
6308 }
6309
2197a88a 6310 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6311 op1_elt->in_memory = op1_in_memory;
6312 op1_elt->in_struct = op1_in_struct;
7afe21cc 6313 }
eb5ad42a
RS
6314
6315 merge_equiv_classes (op0_elt, op1_elt);
6316 last_jump_equiv_class = op0_elt;
7afe21cc
RK
6317}
6318\f
6319/* CSE processing for one instruction.
6320 First simplify sources and addresses of all assignments
6321 in the instruction, using previously-computed equivalents values.
6322 Then install the new sources and destinations in the table
6323 of available values.
6324
1ed0205e
VM
6325 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6326 the insn. It means that INSN is inside libcall block. In this
6327 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
6328
6329/* Data on one SET contained in the instruction. */
6330
6331struct set
6332{
6333 /* The SET rtx itself. */
6334 rtx rtl;
6335 /* The SET_SRC of the rtx (the original value, if it is changing). */
6336 rtx src;
6337 /* The hash-table element for the SET_SRC of the SET. */
6338 struct table_elt *src_elt;
2197a88a
RK
6339 /* Hash value for the SET_SRC. */
6340 unsigned src_hash;
6341 /* Hash value for the SET_DEST. */
6342 unsigned dest_hash;
7afe21cc
RK
6343 /* The SET_DEST, with SUBREG, etc., stripped. */
6344 rtx inner_dest;
6345 /* Place where the pointer to the INNER_DEST was found. */
6346 rtx *inner_dest_loc;
6347 /* Nonzero if the SET_SRC is in memory. */
6348 char src_in_memory;
6349 /* Nonzero if the SET_SRC is in a structure. */
6350 char src_in_struct;
6351 /* Nonzero if the SET_SRC contains something
6352 whose value cannot be predicted and understood. */
6353 char src_volatile;
6354 /* Original machine mode, in case it becomes a CONST_INT. */
6355 enum machine_mode mode;
6356 /* A constant equivalent for SET_SRC, if any. */
6357 rtx src_const;
2197a88a
RK
6358 /* Hash value of constant equivalent for SET_SRC. */
6359 unsigned src_const_hash;
7afe21cc
RK
6360 /* Table entry for constant equivalent for SET_SRC, if any. */
6361 struct table_elt *src_const_elt;
6362};
6363
6364static void
7bd8b2a8 6365cse_insn (insn, libcall_insn)
7afe21cc 6366 rtx insn;
7bd8b2a8 6367 rtx libcall_insn;
7afe21cc
RK
6368{
6369 register rtx x = PATTERN (insn);
7afe21cc 6370 register int i;
92f9aa51 6371 rtx tem;
7afe21cc
RK
6372 register int n_sets = 0;
6373
2d8b0f3a 6374#ifdef HAVE_cc0
7afe21cc
RK
6375 /* Records what this insn does to set CC0. */
6376 rtx this_insn_cc0 = 0;
135d84b8 6377 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 6378#endif
7afe21cc
RK
6379
6380 rtx src_eqv = 0;
6381 struct table_elt *src_eqv_elt = 0;
6382 int src_eqv_volatile;
6383 int src_eqv_in_memory;
6384 int src_eqv_in_struct;
2197a88a 6385 unsigned src_eqv_hash;
7afe21cc
RK
6386
6387 struct set *sets;
6388
6389 this_insn = insn;
7afe21cc
RK
6390
6391 /* Find all the SETs and CLOBBERs in this instruction.
6392 Record all the SETs in the array `set' and count them.
6393 Also determine whether there is a CLOBBER that invalidates
6394 all memory references, or all references at varying addresses. */
6395
f1e7c95f
RK
6396 if (GET_CODE (insn) == CALL_INSN)
6397 {
6398 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6399 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
bb4034b3 6400 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
f1e7c95f
RK
6401 }
6402
7afe21cc
RK
6403 if (GET_CODE (x) == SET)
6404 {
6405 sets = (struct set *) alloca (sizeof (struct set));
6406 sets[0].rtl = x;
6407
6408 /* Ignore SETs that are unconditional jumps.
6409 They never need cse processing, so this does not hurt.
6410 The reason is not efficiency but rather
6411 so that we can test at the end for instructions
6412 that have been simplified to unconditional jumps
6413 and not be misled by unchanged instructions
6414 that were unconditional jumps to begin with. */
6415 if (SET_DEST (x) == pc_rtx
6416 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6417 ;
6418
6419 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6420 The hard function value register is used only once, to copy to
6421 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6422 Ensure we invalidate the destination register. On the 80386 no
7722328e 6423 other code would invalidate it since it is a fixed_reg.
0f41302f 6424 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
6425
6426 else if (GET_CODE (SET_SRC (x)) == CALL)
6427 {
6428 canon_reg (SET_SRC (x), insn);
77fa0940 6429 apply_change_group ();
7afe21cc 6430 fold_rtx (SET_SRC (x), insn);
bb4034b3 6431 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
6432 }
6433 else
6434 n_sets = 1;
6435 }
6436 else if (GET_CODE (x) == PARALLEL)
6437 {
6438 register int lim = XVECLEN (x, 0);
6439
6440 sets = (struct set *) alloca (lim * sizeof (struct set));
6441
6442 /* Find all regs explicitly clobbered in this insn,
6443 and ensure they are not replaced with any other regs
6444 elsewhere in this insn.
6445 When a reg that is clobbered is also used for input,
6446 we should presume that that is for a reason,
6447 and we should not substitute some other register
6448 which is not supposed to be clobbered.
6449 Therefore, this loop cannot be merged into the one below
830a38ee 6450 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
6451 value clobbered. We must not let a canonicalization do
6452 anything in that case. */
6453 for (i = 0; i < lim; i++)
6454 {
6455 register rtx y = XVECEXP (x, 0, i);
2708da92
RS
6456 if (GET_CODE (y) == CLOBBER)
6457 {
6458 rtx clobbered = XEXP (y, 0);
6459
6460 if (GET_CODE (clobbered) == REG
6461 || GET_CODE (clobbered) == SUBREG)
bb4034b3 6462 invalidate (clobbered, VOIDmode);
2708da92
RS
6463 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6464 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 6465 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 6466 }
7afe21cc
RK
6467 }
6468
6469 for (i = 0; i < lim; i++)
6470 {
6471 register rtx y = XVECEXP (x, 0, i);
6472 if (GET_CODE (y) == SET)
6473 {
7722328e
RK
6474 /* As above, we ignore unconditional jumps and call-insns and
6475 ignore the result of apply_change_group. */
7afe21cc
RK
6476 if (GET_CODE (SET_SRC (y)) == CALL)
6477 {
6478 canon_reg (SET_SRC (y), insn);
77fa0940 6479 apply_change_group ();
7afe21cc 6480 fold_rtx (SET_SRC (y), insn);
bb4034b3 6481 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
6482 }
6483 else if (SET_DEST (y) == pc_rtx
6484 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6485 ;
6486 else
6487 sets[n_sets++].rtl = y;
6488 }
6489 else if (GET_CODE (y) == CLOBBER)
6490 {
9ae8ffe7 6491 /* If we clobber memory, canon the address.
7afe21cc
RK
6492 This does nothing when a register is clobbered
6493 because we have already invalidated the reg. */
6494 if (GET_CODE (XEXP (y, 0)) == MEM)
9ae8ffe7 6495 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
6496 }
6497 else if (GET_CODE (y) == USE
6498 && ! (GET_CODE (XEXP (y, 0)) == REG
6499 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6500 canon_reg (y, NULL_RTX);
7afe21cc
RK
6501 else if (GET_CODE (y) == CALL)
6502 {
7722328e
RK
6503 /* The result of apply_change_group can be ignored; see
6504 canon_reg. */
7afe21cc 6505 canon_reg (y, insn);
77fa0940 6506 apply_change_group ();
7afe21cc
RK
6507 fold_rtx (y, insn);
6508 }
6509 }
6510 }
6511 else if (GET_CODE (x) == CLOBBER)
6512 {
6513 if (GET_CODE (XEXP (x, 0)) == MEM)
9ae8ffe7 6514 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6515 }
6516
6517 /* Canonicalize a USE of a pseudo register or memory location. */
6518 else if (GET_CODE (x) == USE
6519 && ! (GET_CODE (XEXP (x, 0)) == REG
6520 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6521 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6522 else if (GET_CODE (x) == CALL)
6523 {
7722328e 6524 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 6525 canon_reg (x, insn);
77fa0940 6526 apply_change_group ();
7afe21cc
RK
6527 fold_rtx (x, insn);
6528 }
6529
7b3ab05e
JW
6530 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6531 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6532 is handled specially for this case, and if it isn't set, then there will
9faa82d8 6533 be no equivalence for the destination. */
92f9aa51
RK
6534 if (n_sets == 1 && REG_NOTES (insn) != 0
6535 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
6536 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6537 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
92f9aa51 6538 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
7afe21cc
RK
6539
6540 /* Canonicalize sources and addresses of destinations.
6541 We do this in a separate pass to avoid problems when a MATCH_DUP is
6542 present in the insn pattern. In that case, we want to ensure that
6543 we don't break the duplicate nature of the pattern. So we will replace
6544 both operands at the same time. Otherwise, we would fail to find an
6545 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
6546
6547 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 6548 but we don't do this any more. */
7afe21cc
RK
6549
6550 for (i = 0; i < n_sets; i++)
6551 {
6552 rtx dest = SET_DEST (sets[i].rtl);
6553 rtx src = SET_SRC (sets[i].rtl);
6554 rtx new = canon_reg (src, insn);
58873255 6555 int insn_code;
7afe21cc 6556
77fa0940
RK
6557 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6558 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6559 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255
RK
6560 || (insn_code = recog_memoized (insn)) < 0
6561 || insn_n_dups[insn_code] > 0)
77fa0940 6562 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
6563 else
6564 SET_SRC (sets[i].rtl) = new;
6565
6566 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6567 {
6568 validate_change (insn, &XEXP (dest, 1),
77fa0940 6569 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 6570 validate_change (insn, &XEXP (dest, 2),
77fa0940 6571 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
6572 }
6573
6574 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6575 || GET_CODE (dest) == ZERO_EXTRACT
6576 || GET_CODE (dest) == SIGN_EXTRACT)
6577 dest = XEXP (dest, 0);
6578
6579 if (GET_CODE (dest) == MEM)
6580 canon_reg (dest, insn);
6581 }
6582
77fa0940
RK
6583 /* Now that we have done all the replacements, we can apply the change
6584 group and see if they all work. Note that this will cause some
6585 canonicalizations that would have worked individually not to be applied
6586 because some other canonicalization didn't work, but this should not
7722328e
RK
6587 occur often.
6588
6589 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
6590
6591 apply_change_group ();
6592
7afe21cc
RK
6593 /* Set sets[i].src_elt to the class each source belongs to.
6594 Detect assignments from or to volatile things
6595 and set set[i] to zero so they will be ignored
6596 in the rest of this function.
6597
6598 Nothing in this loop changes the hash table or the register chains. */
6599
6600 for (i = 0; i < n_sets; i++)
6601 {
6602 register rtx src, dest;
6603 register rtx src_folded;
6604 register struct table_elt *elt = 0, *p;
6605 enum machine_mode mode;
6606 rtx src_eqv_here;
6607 rtx src_const = 0;
6608 rtx src_related = 0;
6609 struct table_elt *src_const_elt = 0;
6610 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6611 int src_related_cost = 10000, src_elt_cost = 10000;
6612 /* Set non-zero if we need to call force_const_mem on with the
6613 contents of src_folded before using it. */
6614 int src_folded_force_flag = 0;
6615
6616 dest = SET_DEST (sets[i].rtl);
6617 src = SET_SRC (sets[i].rtl);
6618
6619 /* If SRC is a constant that has no machine mode,
6620 hash it with the destination's machine mode.
6621 This way we can keep different modes separate. */
6622
6623 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6624 sets[i].mode = mode;
6625
6626 if (src_eqv)
6627 {
6628 enum machine_mode eqvmode = mode;
6629 if (GET_CODE (dest) == STRICT_LOW_PART)
6630 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6631 do_not_record = 0;
6632 hash_arg_in_memory = 0;
6633 hash_arg_in_struct = 0;
6634 src_eqv = fold_rtx (src_eqv, insn);
2197a88a 6635 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
6636
6637 /* Find the equivalence class for the equivalent expression. */
6638
6639 if (!do_not_record)
2197a88a 6640 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
6641
6642 src_eqv_volatile = do_not_record;
6643 src_eqv_in_memory = hash_arg_in_memory;
6644 src_eqv_in_struct = hash_arg_in_struct;
6645 }
6646
6647 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6648 value of the INNER register, not the destination. So it is not
3826a3da 6649 a valid substitution for the source. But save it for later. */
7afe21cc
RK
6650 if (GET_CODE (dest) == STRICT_LOW_PART)
6651 src_eqv_here = 0;
6652 else
6653 src_eqv_here = src_eqv;
6654
6655 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6656 simplified result, which may not necessarily be valid. */
6657 src_folded = fold_rtx (src, insn);
6658
e6a125a0
RK
6659#if 0
6660 /* ??? This caused bad code to be generated for the m68k port with -O2.
6661 Suppose src is (CONST_INT -1), and that after truncation src_folded
6662 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6663 At the end we will add src and src_const to the same equivalence
6664 class. We now have 3 and -1 on the same equivalence class. This
6665 causes later instructions to be mis-optimized. */
7afe21cc
RK
6666 /* If storing a constant in a bitfield, pre-truncate the constant
6667 so we will be able to record it later. */
6668 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6669 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6670 {
6671 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6672
6673 if (GET_CODE (src) == CONST_INT
6674 && GET_CODE (width) == CONST_INT
906c4e36
RK
6675 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6676 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6677 src_folded
6678 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6679 << INTVAL (width)) - 1));
7afe21cc 6680 }
e6a125a0 6681#endif
7afe21cc
RK
6682
6683 /* Compute SRC's hash code, and also notice if it
6684 should not be recorded at all. In that case,
6685 prevent any further processing of this assignment. */
6686 do_not_record = 0;
6687 hash_arg_in_memory = 0;
6688 hash_arg_in_struct = 0;
6689
6690 sets[i].src = src;
2197a88a 6691 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
6692 sets[i].src_volatile = do_not_record;
6693 sets[i].src_in_memory = hash_arg_in_memory;
6694 sets[i].src_in_struct = hash_arg_in_struct;
6695
50196afa
RK
6696 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6697 a pseudo that is set more than once, do not record SRC. Using
6698 SRC as a replacement for anything else will be incorrect in that
6699 situation. Note that this usually occurs only for stack slots,
956d6950 6700 in which case all the RTL would be referring to SRC, so we don't
50196afa
RK
6701 lose any optimization opportunities by not having SRC in the
6702 hash table. */
6703
6704 if (GET_CODE (src) == MEM
6705 && find_reg_note (insn, REG_EQUIV, src) != 0
6706 && GET_CODE (dest) == REG
6707 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
b1f21e0a 6708 && REG_N_SETS (REGNO (dest)) != 1)
50196afa
RK
6709 sets[i].src_volatile = 1;
6710
0dadecf6
RK
6711#if 0
6712 /* It is no longer clear why we used to do this, but it doesn't
6713 appear to still be needed. So let's try without it since this
6714 code hurts cse'ing widened ops. */
7afe21cc
RK
6715 /* If source is a perverse subreg (such as QI treated as an SI),
6716 treat it as volatile. It may do the work of an SI in one context
6717 where the extra bits are not being used, but cannot replace an SI
6718 in general. */
6719 if (GET_CODE (src) == SUBREG
6720 && (GET_MODE_SIZE (GET_MODE (src))
6721 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6722 sets[i].src_volatile = 1;
0dadecf6 6723#endif
7afe21cc
RK
6724
6725 /* Locate all possible equivalent forms for SRC. Try to replace
6726 SRC in the insn with each cheaper equivalent.
6727
6728 We have the following types of equivalents: SRC itself, a folded
6729 version, a value given in a REG_EQUAL note, or a value related
6730 to a constant.
6731
6732 Each of these equivalents may be part of an additional class
6733 of equivalents (if more than one is in the table, they must be in
6734 the same class; we check for this).
6735
6736 If the source is volatile, we don't do any table lookups.
6737
6738 We note any constant equivalent for possible later use in a
6739 REG_NOTE. */
6740
6741 if (!sets[i].src_volatile)
2197a88a 6742 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
6743
6744 sets[i].src_elt = elt;
6745
6746 if (elt && src_eqv_here && src_eqv_elt)
6747 {
6748 if (elt->first_same_value != src_eqv_elt->first_same_value)
6749 {
6750 /* The REG_EQUAL is indicating that two formerly distinct
6751 classes are now equivalent. So merge them. */
6752 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
6753 src_eqv_hash = HASH (src_eqv, elt->mode);
6754 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
6755 }
6756
6757 src_eqv_here = 0;
6758 }
6759
6760 else if (src_eqv_elt)
6761 elt = src_eqv_elt;
6762
6763 /* Try to find a constant somewhere and record it in `src_const'.
6764 Record its table element, if any, in `src_const_elt'. Look in
6765 any known equivalences first. (If the constant is not in the
2197a88a 6766 table, also set `sets[i].src_const_hash'). */
7afe21cc
RK
6767 if (elt)
6768 for (p = elt->first_same_value; p; p = p->next_same_value)
6769 if (p->is_const)
6770 {
6771 src_const = p->exp;
6772 src_const_elt = elt;
6773 break;
6774 }
6775
6776 if (src_const == 0
6777 && (CONSTANT_P (src_folded)
6778 /* Consider (minus (label_ref L1) (label_ref L2)) as
6779 "constant" here so we will record it. This allows us
6780 to fold switch statements when an ADDR_DIFF_VEC is used. */
6781 || (GET_CODE (src_folded) == MINUS
6782 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6783 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6784 src_const = src_folded, src_const_elt = elt;
6785 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6786 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6787
6788 /* If we don't know if the constant is in the table, get its
6789 hash code and look it up. */
6790 if (src_const && src_const_elt == 0)
6791 {
2197a88a
RK
6792 sets[i].src_const_hash = HASH (src_const, mode);
6793 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
6794 }
6795
6796 sets[i].src_const = src_const;
6797 sets[i].src_const_elt = src_const_elt;
6798
6799 /* If the constant and our source are both in the table, mark them as
6800 equivalent. Otherwise, if a constant is in the table but the source
6801 isn't, set ELT to it. */
6802 if (src_const_elt && elt
6803 && src_const_elt->first_same_value != elt->first_same_value)
6804 merge_equiv_classes (elt, src_const_elt);
6805 else if (src_const_elt && elt == 0)
6806 elt = src_const_elt;
6807
6808 /* See if there is a register linearly related to a constant
6809 equivalent of SRC. */
6810 if (src_const
6811 && (GET_CODE (src_const) == CONST
6812 || (src_const_elt && src_const_elt->related_value != 0)))
6813 {
6814 src_related = use_related_value (src_const, src_const_elt);
6815 if (src_related)
6816 {
6817 struct table_elt *src_related_elt
6818 = lookup (src_related, HASH (src_related, mode), mode);
6819 if (src_related_elt && elt)
6820 {
6821 if (elt->first_same_value
6822 != src_related_elt->first_same_value)
6823 /* This can occur when we previously saw a CONST
6824 involving a SYMBOL_REF and then see the SYMBOL_REF
6825 twice. Merge the involved classes. */
6826 merge_equiv_classes (elt, src_related_elt);
6827
6828 src_related = 0;
6829 src_related_elt = 0;
6830 }
6831 else if (src_related_elt && elt == 0)
6832 elt = src_related_elt;
6833 }
6834 }
6835
e4600702
RK
6836 /* See if we have a CONST_INT that is already in a register in a
6837 wider mode. */
6838
6839 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6840 && GET_MODE_CLASS (mode) == MODE_INT
6841 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6842 {
6843 enum machine_mode wider_mode;
6844
6845 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6846 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6847 && src_related == 0;
6848 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6849 {
6850 struct table_elt *const_elt
6851 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6852
6853 if (const_elt == 0)
6854 continue;
6855
6856 for (const_elt = const_elt->first_same_value;
6857 const_elt; const_elt = const_elt->next_same_value)
6858 if (GET_CODE (const_elt->exp) == REG)
6859 {
6860 src_related = gen_lowpart_if_possible (mode,
6861 const_elt->exp);
6862 break;
6863 }
6864 }
6865 }
6866
d45cf215
RS
6867 /* Another possibility is that we have an AND with a constant in
6868 a mode narrower than a word. If so, it might have been generated
6869 as part of an "if" which would narrow the AND. If we already
6870 have done the AND in a wider mode, we can use a SUBREG of that
6871 value. */
6872
6873 if (flag_expensive_optimizations && ! src_related
6874 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6875 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6876 {
6877 enum machine_mode tmode;
38a448ca 6878 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
6879
6880 for (tmode = GET_MODE_WIDER_MODE (mode);
6881 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6882 tmode = GET_MODE_WIDER_MODE (tmode))
6883 {
6884 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6885 struct table_elt *larger_elt;
6886
6887 if (inner)
6888 {
6889 PUT_MODE (new_and, tmode);
6890 XEXP (new_and, 0) = inner;
6891 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6892 if (larger_elt == 0)
6893 continue;
6894
6895 for (larger_elt = larger_elt->first_same_value;
6896 larger_elt; larger_elt = larger_elt->next_same_value)
6897 if (GET_CODE (larger_elt->exp) == REG)
6898 {
6899 src_related
6900 = gen_lowpart_if_possible (mode, larger_elt->exp);
6901 break;
6902 }
6903
6904 if (src_related)
6905 break;
6906 }
6907 }
6908 }
7bac1be0
RK
6909
6910#ifdef LOAD_EXTEND_OP
6911 /* See if a MEM has already been loaded with a widening operation;
6912 if it has, we can use a subreg of that. Many CISC machines
6913 also have such operations, but this is only likely to be
6914 beneficial these machines. */
6915
6916 if (flag_expensive_optimizations && src_related == 0
6917 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6918 && GET_MODE_CLASS (mode) == MODE_INT
6919 && GET_CODE (src) == MEM && ! do_not_record
6920 && LOAD_EXTEND_OP (mode) != NIL)
6921 {
6922 enum machine_mode tmode;
6923
6924 /* Set what we are trying to extend and the operation it might
6925 have been extended with. */
6926 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6927 XEXP (memory_extend_rtx, 0) = src;
6928
6929 for (tmode = GET_MODE_WIDER_MODE (mode);
6930 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6931 tmode = GET_MODE_WIDER_MODE (tmode))
6932 {
6933 struct table_elt *larger_elt;
6934
6935 PUT_MODE (memory_extend_rtx, tmode);
6936 larger_elt = lookup (memory_extend_rtx,
6937 HASH (memory_extend_rtx, tmode), tmode);
6938 if (larger_elt == 0)
6939 continue;
6940
6941 for (larger_elt = larger_elt->first_same_value;
6942 larger_elt; larger_elt = larger_elt->next_same_value)
6943 if (GET_CODE (larger_elt->exp) == REG)
6944 {
6945 src_related = gen_lowpart_if_possible (mode,
6946 larger_elt->exp);
6947 break;
6948 }
6949
6950 if (src_related)
6951 break;
6952 }
6953 }
6954#endif /* LOAD_EXTEND_OP */
6955
7afe21cc
RK
6956 if (src == src_folded)
6957 src_folded = 0;
6958
6959 /* At this point, ELT, if non-zero, points to a class of expressions
6960 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6961 and SRC_RELATED, if non-zero, each contain additional equivalent
6962 expressions. Prune these latter expressions by deleting expressions
6963 already in the equivalence class.
6964
6965 Check for an equivalent identical to the destination. If found,
6966 this is the preferred equivalent since it will likely lead to
6967 elimination of the insn. Indicate this by placing it in
6968 `src_related'. */
6969
6970 if (elt) elt = elt->first_same_value;
6971 for (p = elt; p; p = p->next_same_value)
6972 {
6973 enum rtx_code code = GET_CODE (p->exp);
6974
6975 /* If the expression is not valid, ignore it. Then we do not
6976 have to check for validity below. In most cases, we can use
6977 `rtx_equal_p', since canonicalization has already been done. */
6978 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6979 continue;
6980
5a03c8c4
RK
6981 /* Also skip paradoxical subregs, unless that's what we're
6982 looking for. */
6983 if (code == SUBREG
6984 && (GET_MODE_SIZE (GET_MODE (p->exp))
6985 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6986 && ! (src != 0
6987 && GET_CODE (src) == SUBREG
6988 && GET_MODE (src) == GET_MODE (p->exp)
6989 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6990 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6991 continue;
6992
7afe21cc
RK
6993 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6994 src = 0;
6995 else if (src_folded && GET_CODE (src_folded) == code
6996 && rtx_equal_p (src_folded, p->exp))
6997 src_folded = 0;
6998 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6999 && rtx_equal_p (src_eqv_here, p->exp))
7000 src_eqv_here = 0;
7001 else if (src_related && GET_CODE (src_related) == code
7002 && rtx_equal_p (src_related, p->exp))
7003 src_related = 0;
7004
7005 /* This is the same as the destination of the insns, we want
7006 to prefer it. Copy it to src_related. The code below will
7007 then give it a negative cost. */
7008 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7009 src_related = dest;
7010
7011 }
7012
7013 /* Find the cheapest valid equivalent, trying all the available
7014 possibilities. Prefer items not in the hash table to ones
7015 that are when they are equal cost. Note that we can never
7016 worsen an insn as the current contents will also succeed.
05c33dd8 7017 If we find an equivalent identical to the destination, use it as best,
0f41302f 7018 since this insn will probably be eliminated in that case. */
7afe21cc
RK
7019 if (src)
7020 {
7021 if (rtx_equal_p (src, dest))
7022 src_cost = -1;
7023 else
7024 src_cost = COST (src);
7025 }
7026
7027 if (src_eqv_here)
7028 {
7029 if (rtx_equal_p (src_eqv_here, dest))
7030 src_eqv_cost = -1;
7031 else
7032 src_eqv_cost = COST (src_eqv_here);
7033 }
7034
7035 if (src_folded)
7036 {
7037 if (rtx_equal_p (src_folded, dest))
7038 src_folded_cost = -1;
7039 else
7040 src_folded_cost = COST (src_folded);
7041 }
7042
7043 if (src_related)
7044 {
7045 if (rtx_equal_p (src_related, dest))
7046 src_related_cost = -1;
7047 else
7048 src_related_cost = COST (src_related);
7049 }
7050
7051 /* If this was an indirect jump insn, a known label will really be
7052 cheaper even though it looks more expensive. */
7053 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7054 src_folded = src_const, src_folded_cost = -1;
7055
7056 /* Terminate loop when replacement made. This must terminate since
7057 the current contents will be tested and will always be valid. */
7058 while (1)
7059 {
7bd8b2a8 7060 rtx trial, old_src;
7afe21cc
RK
7061
7062 /* Skip invalid entries. */
7063 while (elt && GET_CODE (elt->exp) != REG
7064 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7065 elt = elt->next_same_value;
5a03c8c4
RK
7066
7067 /* A paradoxical subreg would be bad here: it'll be the right
7068 size, but later may be adjusted so that the upper bits aren't
7069 what we want. So reject it. */
7070 if (elt != 0
7071 && GET_CODE (elt->exp) == SUBREG
7072 && (GET_MODE_SIZE (GET_MODE (elt->exp))
7073 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7074 /* It is okay, though, if the rtx we're trying to match
7075 will ignore any of the bits we can't predict. */
7076 && ! (src != 0
7077 && GET_CODE (src) == SUBREG
7078 && GET_MODE (src) == GET_MODE (elt->exp)
7079 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7080 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7081 {
7082 elt = elt->next_same_value;
7083 continue;
7084 }
7afe21cc
RK
7085
7086 if (elt) src_elt_cost = elt->cost;
7087
7088 /* Find cheapest and skip it for the next time. For items
7089 of equal cost, use this order:
7090 src_folded, src, src_eqv, src_related and hash table entry. */
7091 if (src_folded_cost <= src_cost
7092 && src_folded_cost <= src_eqv_cost
7093 && src_folded_cost <= src_related_cost
7094 && src_folded_cost <= src_elt_cost)
7095 {
7096 trial = src_folded, src_folded_cost = 10000;
7097 if (src_folded_force_flag)
7098 trial = force_const_mem (mode, trial);
7099 }
7100 else if (src_cost <= src_eqv_cost
7101 && src_cost <= src_related_cost
7102 && src_cost <= src_elt_cost)
7103 trial = src, src_cost = 10000;
7104 else if (src_eqv_cost <= src_related_cost
7105 && src_eqv_cost <= src_elt_cost)
0af62b41 7106 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7afe21cc 7107 else if (src_related_cost <= src_elt_cost)
0af62b41 7108 trial = copy_rtx (src_related), src_related_cost = 10000;
7afe21cc
RK
7109 else
7110 {
05c33dd8 7111 trial = copy_rtx (elt->exp);
7afe21cc
RK
7112 elt = elt->next_same_value;
7113 src_elt_cost = 10000;
7114 }
7115
7116 /* We don't normally have an insn matching (set (pc) (pc)), so
7117 check for this separately here. We will delete such an
7118 insn below.
7119
7120 Tablejump insns contain a USE of the table, so simply replacing
7121 the operand with the constant won't match. This is simply an
7122 unconditional branch, however, and is therefore valid. Just
7123 insert the substitution here and we will delete and re-emit
7124 the insn later. */
7125
7bd8b2a8
JL
7126 /* Keep track of the original SET_SRC so that we can fix notes
7127 on libcall instructions. */
7128 old_src = SET_SRC (sets[i].rtl);
7129
7afe21cc
RK
7130 if (n_sets == 1 && dest == pc_rtx
7131 && (trial == pc_rtx
7132 || (GET_CODE (trial) == LABEL_REF
7133 && ! condjump_p (insn))))
7134 {
7135 /* If TRIAL is a label in front of a jump table, we are
7136 really falling through the switch (this is how casesi
7137 insns work), so we must branch around the table. */
7138 if (GET_CODE (trial) == CODE_LABEL
7139 && NEXT_INSN (trial) != 0
7140 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7141 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7142 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7143
38a448ca 7144 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7afe21cc
RK
7145
7146 SET_SRC (sets[i].rtl) = trial;
44333223 7147 cse_jumps_altered = 1;
7afe21cc
RK
7148 break;
7149 }
7150
7151 /* Look for a substitution that makes a valid insn. */
7152 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 7153 {
7bd8b2a8
JL
7154 /* If we just made a substitution inside a libcall, then we
7155 need to make the same substitution in any notes attached
7156 to the RETVAL insn. */
1ed0205e
VM
7157 if (libcall_insn
7158 && (GET_CODE (old_src) == REG
7159 || GET_CODE (old_src) == SUBREG
7160 || GET_CODE (old_src) == MEM))
7bd8b2a8
JL
7161 replace_rtx (REG_NOTES (libcall_insn), old_src,
7162 canon_reg (SET_SRC (sets[i].rtl), insn));
7163
7722328e
RK
7164 /* The result of apply_change_group can be ignored; see
7165 canon_reg. */
7166
7167 validate_change (insn, &SET_SRC (sets[i].rtl),
7168 canon_reg (SET_SRC (sets[i].rtl), insn),
7169 1);
6702af89 7170 apply_change_group ();
05c33dd8
RK
7171 break;
7172 }
7afe21cc
RK
7173
7174 /* If we previously found constant pool entries for
7175 constants and this is a constant, try making a
7176 pool entry. Put it in src_folded unless we already have done
7177 this since that is where it likely came from. */
7178
7179 else if (constant_pool_entries_cost
7180 && CONSTANT_P (trial)
1bbd065b
RK
7181 && ! (GET_CODE (trial) == CONST
7182 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7183 && (src_folded == 0
7184 || (GET_CODE (src_folded) != MEM
7185 && ! src_folded_force_flag))
9ae8ffe7
JL
7186 && GET_MODE_CLASS (mode) != MODE_CC
7187 && mode != VOIDmode)
7afe21cc
RK
7188 {
7189 src_folded_force_flag = 1;
7190 src_folded = trial;
7191 src_folded_cost = constant_pool_entries_cost;
7192 }
7193 }
7194
7195 src = SET_SRC (sets[i].rtl);
7196
7197 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7198 However, there is an important exception: If both are registers
7199 that are not the head of their equivalence class, replace SET_SRC
7200 with the head of the class. If we do not do this, we will have
7201 both registers live over a portion of the basic block. This way,
7202 their lifetimes will likely abut instead of overlapping. */
7203 if (GET_CODE (dest) == REG
7204 && REGNO_QTY_VALID_P (REGNO (dest))
30f72379
MM
7205 && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7206 && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7afe21cc
RK
7207 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7208 /* Don't do this if the original insn had a hard reg as
7209 SET_SRC. */
7210 && (GET_CODE (sets[i].src) != REG
7211 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7212 /* We can't call canon_reg here because it won't do anything if
7213 SRC is a hard register. */
7214 {
30f72379 7215 int first = qty_first_reg[REG_QTY (REGNO (src))];
759bd8b7
R
7216 rtx new_src
7217 = (first >= FIRST_PSEUDO_REGISTER
7218 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7219
7220 /* We must use validate-change even for this, because this
7221 might be a special no-op instruction, suitable only to
7222 tag notes onto. */
7223 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7224 {
7225 src = new_src;
7226 /* If we had a constant that is cheaper than what we are now
7227 setting SRC to, use that constant. We ignored it when we
7228 thought we could make this into a no-op. */
7229 if (src_const && COST (src_const) < COST (src)
7230 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7231 0))
7232 src = src_const;
7233 }
7afe21cc
RK
7234 }
7235
7236 /* If we made a change, recompute SRC values. */
7237 if (src != sets[i].src)
7238 {
7239 do_not_record = 0;
7240 hash_arg_in_memory = 0;
7241 hash_arg_in_struct = 0;
7242 sets[i].src = src;
2197a88a 7243 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
7244 sets[i].src_volatile = do_not_record;
7245 sets[i].src_in_memory = hash_arg_in_memory;
7246 sets[i].src_in_struct = hash_arg_in_struct;
2197a88a 7247 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
7248 }
7249
7250 /* If this is a single SET, we are setting a register, and we have an
7251 equivalent constant, we want to add a REG_NOTE. We don't want
7252 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 7253 that pseudo hasn't been eliminated is a pain. Such a note also
ac7ef8d5
FS
7254 won't help anything.
7255
7256 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7257 which can be created for a reference to a compile time computable
7258 entry in a jump table. */
7259
7afe21cc 7260 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
ac7ef8d5
FS
7261 && GET_CODE (src_const) != REG
7262 && ! (GET_CODE (src_const) == CONST
7263 && GET_CODE (XEXP (src_const, 0)) == MINUS
7264 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7265 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 7266 {
92f9aa51 7267 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7afe21cc 7268
51e2a951
AS
7269 /* Make sure that the rtx is not shared with any other insn. */
7270 src_const = copy_rtx (src_const);
7271
7afe21cc
RK
7272 /* Record the actual constant value in a REG_EQUAL note, making
7273 a new one if one does not already exist. */
7274 if (tem)
7275 XEXP (tem, 0) = src_const;
7276 else
38a448ca
RH
7277 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7278 src_const, REG_NOTES (insn));
7afe21cc
RK
7279
7280 /* If storing a constant value in a register that
7281 previously held the constant value 0,
7282 record this fact with a REG_WAS_0 note on this insn.
7283
7284 Note that the *register* is required to have previously held 0,
7285 not just any register in the quantity and we must point to the
7286 insn that set that register to zero.
7287
7288 Rather than track each register individually, we just see if
7289 the last set for this quantity was for this register. */
7290
7291 if (REGNO_QTY_VALID_P (REGNO (dest))
30f72379 7292 && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7afe21cc
RK
7293 {
7294 /* See if we previously had a REG_WAS_0 note. */
906c4e36 7295 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
30f72379 7296 rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7afe21cc
RK
7297
7298 if ((tem = single_set (const_insn)) != 0
7299 && rtx_equal_p (SET_DEST (tem), dest))
7300 {
7301 if (note)
7302 XEXP (note, 0) = const_insn;
7303 else
38a448ca
RH
7304 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7305 const_insn,
7306 REG_NOTES (insn));
7afe21cc
RK
7307 }
7308 }
7309 }
7310
7311 /* Now deal with the destination. */
7312 do_not_record = 0;
7313 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7314
7315 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7316 to the MEM or REG within it. */
7317 while (GET_CODE (dest) == SIGN_EXTRACT
7318 || GET_CODE (dest) == ZERO_EXTRACT
7319 || GET_CODE (dest) == SUBREG
7320 || GET_CODE (dest) == STRICT_LOW_PART)
7321 {
7322 sets[i].inner_dest_loc = &XEXP (dest, 0);
7323 dest = XEXP (dest, 0);
7324 }
7325
7326 sets[i].inner_dest = dest;
7327
7328 if (GET_CODE (dest) == MEM)
7329 {
9ae8ffe7
JL
7330#ifdef PUSH_ROUNDING
7331 /* Stack pushes invalidate the stack pointer. */
7332 rtx addr = XEXP (dest, 0);
7333 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7334 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7335 && XEXP (addr, 0) == stack_pointer_rtx)
7336 invalidate (stack_pointer_rtx, Pmode);
7337#endif
7afe21cc 7338 dest = fold_rtx (dest, insn);
7afe21cc
RK
7339 }
7340
7341 /* Compute the hash code of the destination now,
7342 before the effects of this instruction are recorded,
7343 since the register values used in the address computation
7344 are those before this instruction. */
2197a88a 7345 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
7346
7347 /* Don't enter a bit-field in the hash table
7348 because the value in it after the store
7349 may not equal what was stored, due to truncation. */
7350
7351 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7352 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7353 {
7354 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7355
7356 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7357 && GET_CODE (width) == CONST_INT
906c4e36
RK
7358 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7359 && ! (INTVAL (src_const)
7360 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
7361 /* Exception: if the value is constant,
7362 and it won't be truncated, record it. */
7363 ;
7364 else
7365 {
7366 /* This is chosen so that the destination will be invalidated
7367 but no new value will be recorded.
7368 We must invalidate because sometimes constant
7369 values can be recorded for bitfields. */
7370 sets[i].src_elt = 0;
7371 sets[i].src_volatile = 1;
7372 src_eqv = 0;
7373 src_eqv_elt = 0;
7374 }
7375 }
7376
7377 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7378 the insn. */
7379 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7380 {
7381 PUT_CODE (insn, NOTE);
7382 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7383 NOTE_SOURCE_FILE (insn) = 0;
7384 cse_jumps_altered = 1;
7385 /* One less use of the label this insn used to jump to. */
85c3ba60
JL
7386 if (JUMP_LABEL (insn) != 0)
7387 --LABEL_NUSES (JUMP_LABEL (insn));
7afe21cc
RK
7388 /* No more processing for this set. */
7389 sets[i].rtl = 0;
7390 }
7391
7392 /* If this SET is now setting PC to a label, we know it used to
7393 be a conditional or computed branch. So we see if we can follow
7394 it. If it was a computed branch, delete it and re-emit. */
7395 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7396 {
7397 rtx p;
7398
7399 /* If this is not in the format for a simple branch and
7400 we are the only SET in it, re-emit it. */
7401 if (! simplejump_p (insn) && n_sets == 1)
7402 {
7403 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7404 JUMP_LABEL (new) = XEXP (src, 0);
7405 LABEL_NUSES (XEXP (src, 0))++;
7406 delete_insn (insn);
7407 insn = new;
7408 }
31dcf83f
RS
7409 else
7410 /* Otherwise, force rerecognition, since it probably had
7411 a different pattern before.
7412 This shouldn't really be necessary, since whatever
7413 changed the source value above should have done this.
7414 Until the right place is found, might as well do this here. */
7415 INSN_CODE (insn) = -1;
7afe21cc
RK
7416
7417 /* Now that we've converted this jump to an unconditional jump,
7418 there is dead code after it. Delete the dead code until we
7419 reach a BARRIER, the end of the function, or a label. Do
7420 not delete NOTEs except for NOTE_INSN_DELETED since later
7421 phases assume these notes are retained. */
7422
312f6255
GK
7423 never_reached_warning (insn);
7424
7afe21cc
RK
7425 p = insn;
7426
7427 while (NEXT_INSN (p) != 0
7428 && GET_CODE (NEXT_INSN (p)) != BARRIER
7429 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7430 {
eec9ef57
JL
7431 /* Note, we must update P with the return value from
7432 delete_insn, otherwise we could get an infinite loop
7433 if NEXT_INSN (p) had INSN_DELETED_P set. */
7afe21cc
RK
7434 if (GET_CODE (NEXT_INSN (p)) != NOTE
7435 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
778e0677 7436 p = PREV_INSN (delete_insn (NEXT_INSN (p)));
7afe21cc
RK
7437 else
7438 p = NEXT_INSN (p);
7439 }
7440
7441 /* If we don't have a BARRIER immediately after INSN, put one there.
7442 Much code assumes that there are no NOTEs between a JUMP_INSN and
7443 BARRIER. */
7444
7445 if (NEXT_INSN (insn) == 0
7446 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
783e5bca 7447 emit_barrier_before (NEXT_INSN (insn));
7afe21cc
RK
7448
7449 /* We might have two BARRIERs separated by notes. Delete the second
7450 one if so. */
7451
538b78e7
RS
7452 if (p != insn && NEXT_INSN (p) != 0
7453 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7afe21cc
RK
7454 delete_insn (NEXT_INSN (p));
7455
7456 cse_jumps_altered = 1;
7457 sets[i].rtl = 0;
7458 }
7459
c2a47e48
RK
7460 /* If destination is volatile, invalidate it and then do no further
7461 processing for this assignment. */
7afe21cc
RK
7462
7463 else if (do_not_record)
c2a47e48
RK
7464 {
7465 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7466 || GET_CODE (dest) == MEM)
bb4034b3 7467 invalidate (dest, VOIDmode);
2708da92
RS
7468 else if (GET_CODE (dest) == STRICT_LOW_PART
7469 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7470 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
7471 sets[i].rtl = 0;
7472 }
7afe21cc
RK
7473
7474 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 7475 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
7476
7477#ifdef HAVE_cc0
7478 /* If setting CC0, record what it was set to, or a constant, if it
7479 is equivalent to a constant. If it is being set to a floating-point
7480 value, make a COMPARE with the appropriate constant of 0. If we
7481 don't do this, later code can interpret this as a test against
7482 const0_rtx, which can cause problems if we try to put it into an
7483 insn as a floating-point operand. */
7484 if (dest == cc0_rtx)
7485 {
7486 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7487 this_insn_cc0_mode = mode;
cbf6a543 7488 if (FLOAT_MODE_P (mode))
38a448ca
RH
7489 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7490 CONST0_RTX (mode));
7afe21cc
RK
7491 }
7492#endif
7493 }
7494
7495 /* Now enter all non-volatile source expressions in the hash table
7496 if they are not already present.
7497 Record their equivalence classes in src_elt.
7498 This way we can insert the corresponding destinations into
7499 the same classes even if the actual sources are no longer in them
7500 (having been invalidated). */
7501
7502 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7503 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7504 {
7505 register struct table_elt *elt;
7506 register struct table_elt *classp = sets[0].src_elt;
7507 rtx dest = SET_DEST (sets[0].rtl);
7508 enum machine_mode eqvmode = GET_MODE (dest);
7509
7510 if (GET_CODE (dest) == STRICT_LOW_PART)
7511 {
7512 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7513 classp = 0;
7514 }
7515 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
7516 {
7517 rehash_using_reg (src_eqv);
7518 src_eqv_hash = HASH (src_eqv, eqvmode);
7519 }
2197a88a 7520 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc
RK
7521 elt->in_memory = src_eqv_in_memory;
7522 elt->in_struct = src_eqv_in_struct;
7523 src_eqv_elt = elt;
f7911249
JW
7524
7525 /* Check to see if src_eqv_elt is the same as a set source which
7526 does not yet have an elt, and if so set the elt of the set source
7527 to src_eqv_elt. */
7528 for (i = 0; i < n_sets; i++)
7529 if (sets[i].rtl && sets[i].src_elt == 0
7530 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7531 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
7532 }
7533
7534 for (i = 0; i < n_sets; i++)
7535 if (sets[i].rtl && ! sets[i].src_volatile
7536 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7537 {
7538 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7539 {
7540 /* REG_EQUAL in setting a STRICT_LOW_PART
7541 gives an equivalent for the entire destination register,
7542 not just for the subreg being stored in now.
7543 This is a more interesting equivalence, so we arrange later
7544 to treat the entire reg as the destination. */
7545 sets[i].src_elt = src_eqv_elt;
2197a88a 7546 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
7547 }
7548 else
7549 {
7550 /* Insert source and constant equivalent into hash table, if not
7551 already present. */
7552 register struct table_elt *classp = src_eqv_elt;
7553 register rtx src = sets[i].src;
7554 register rtx dest = SET_DEST (sets[i].rtl);
7555 enum machine_mode mode
7556 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7557
15c68354
R
7558 /* Don't put a hard register source into the table if this is
7559 the last insn of a libcall. */
7560 if (sets[i].src_elt == 0
7561 && (GET_CODE (src) != REG
7562 || REGNO (src) >= FIRST_PSEUDO_REGISTER
7563 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7afe21cc
RK
7564 {
7565 register struct table_elt *elt;
7566
7567 /* Note that these insert_regs calls cannot remove
7568 any of the src_elt's, because they would have failed to
7569 match if not still valid. */
7570 if (insert_regs (src, classp, 0))
8ae2b8f6
JW
7571 {
7572 rehash_using_reg (src);
7573 sets[i].src_hash = HASH (src, mode);
7574 }
2197a88a 7575 elt = insert (src, classp, sets[i].src_hash, mode);
7afe21cc
RK
7576 elt->in_memory = sets[i].src_in_memory;
7577 elt->in_struct = sets[i].src_in_struct;
7578 sets[i].src_elt = classp = elt;
7579 }
7580
7581 if (sets[i].src_const && sets[i].src_const_elt == 0
7582 && src != sets[i].src_const
7583 && ! rtx_equal_p (sets[i].src_const, src))
7584 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 7585 sets[i].src_const_hash, mode);
7afe21cc
RK
7586 }
7587 }
7588 else if (sets[i].src_elt == 0)
7589 /* If we did not insert the source into the hash table (e.g., it was
7590 volatile), note the equivalence class for the REG_EQUAL value, if any,
7591 so that the destination goes into that class. */
7592 sets[i].src_elt = src_eqv_elt;
7593
9ae8ffe7 7594 invalidate_from_clobbers (x);
77fa0940
RK
7595
7596 /* Some registers are invalidated by subroutine calls. Memory is
7597 invalidated by non-constant calls. */
7598
7afe21cc
RK
7599 if (GET_CODE (insn) == CALL_INSN)
7600 {
77fa0940 7601 if (! CONST_CALL_P (insn))
9ae8ffe7 7602 invalidate_memory ();
7afe21cc
RK
7603 invalidate_for_call ();
7604 }
7605
7606 /* Now invalidate everything set by this instruction.
7607 If a SUBREG or other funny destination is being set,
7608 sets[i].rtl is still nonzero, so here we invalidate the reg
7609 a part of which is being set. */
7610
7611 for (i = 0; i < n_sets; i++)
7612 if (sets[i].rtl)
7613 {
bb4034b3
JW
7614 /* We can't use the inner dest, because the mode associated with
7615 a ZERO_EXTRACT is significant. */
7616 register rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
7617
7618 /* Needed for registers to remove the register from its
7619 previous quantity's chain.
7620 Needed for memory if this is a nonvarying address, unless
7621 we have just done an invalidate_memory that covers even those. */
7622 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
9ae8ffe7 7623 || GET_CODE (dest) == MEM)
bb4034b3 7624 invalidate (dest, VOIDmode);
2708da92
RS
7625 else if (GET_CODE (dest) == STRICT_LOW_PART
7626 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7627 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
7628 }
7629
01e752d3
JL
7630 /* A volatile ASM invalidates everything. */
7631 if (GET_CODE (insn) == INSN
7632 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7633 && MEM_VOLATILE_P (PATTERN (insn)))
7634 flush_hash_table ();
7635
7afe21cc
RK
7636 /* Make sure registers mentioned in destinations
7637 are safe for use in an expression to be inserted.
7638 This removes from the hash table
7639 any invalid entry that refers to one of these registers.
7640
7641 We don't care about the return value from mention_regs because
7642 we are going to hash the SET_DEST values unconditionally. */
7643
7644 for (i = 0; i < n_sets; i++)
34c73909
R
7645 {
7646 if (sets[i].rtl)
7647 {
7648 rtx x = SET_DEST (sets[i].rtl);
7649
7650 if (GET_CODE (x) != REG)
7651 mention_regs (x);
7652 else
7653 {
7654 /* We used to rely on all references to a register becoming
7655 inaccessible when a register changes to a new quantity,
7656 since that changes the hash code. However, that is not
7657 safe, since after NBUCKETS new quantities we get a
7658 hash 'collision' of a register with its own invalid
7659 entries. And since SUBREGs have been changed not to
7660 change their hash code with the hash code of the register,
7661 it wouldn't work any longer at all. So we have to check
7662 for any invalid references lying around now.
7663 This code is similar to the REG case in mention_regs,
7664 but it knows that reg_tick has been incremented, and
7665 it leaves reg_in_table as -1 . */
7666 register int regno = REGNO (x);
7667 register int endregno
7668 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7669 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7670 int i;
7671
7672 for (i = regno; i < endregno; i++)
7673 {
30f72379 7674 if (REG_IN_TABLE (i) >= 0)
34c73909
R
7675 {
7676 remove_invalid_refs (i);
30f72379 7677 REG_IN_TABLE (i) = -1;
34c73909
R
7678 }
7679 }
7680 }
7681 }
7682 }
7afe21cc
RK
7683
7684 /* We may have just removed some of the src_elt's from the hash table.
7685 So replace each one with the current head of the same class. */
7686
7687 for (i = 0; i < n_sets; i++)
7688 if (sets[i].rtl)
7689 {
7690 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7691 /* If elt was removed, find current head of same class,
7692 or 0 if nothing remains of that class. */
7693 {
7694 register struct table_elt *elt = sets[i].src_elt;
7695
7696 while (elt && elt->prev_same_value)
7697 elt = elt->prev_same_value;
7698
7699 while (elt && elt->first_same_value == 0)
7700 elt = elt->next_same_value;
7701 sets[i].src_elt = elt ? elt->first_same_value : 0;
7702 }
7703 }
7704
7705 /* Now insert the destinations into their equivalence classes. */
7706
7707 for (i = 0; i < n_sets; i++)
7708 if (sets[i].rtl)
7709 {
7710 register rtx dest = SET_DEST (sets[i].rtl);
9de2c71a 7711 rtx inner_dest = sets[i].inner_dest;
7afe21cc
RK
7712 register struct table_elt *elt;
7713
7714 /* Don't record value if we are not supposed to risk allocating
7715 floating-point values in registers that might be wider than
7716 memory. */
7717 if ((flag_float_store
7718 && GET_CODE (dest) == MEM
cbf6a543 7719 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
7720 /* Don't record BLKmode values, because we don't know the
7721 size of it, and can't be sure that other BLKmode values
7722 have the same or smaller size. */
7723 || GET_MODE (dest) == BLKmode
7afe21cc
RK
7724 /* Don't record values of destinations set inside a libcall block
7725 since we might delete the libcall. Things should have been set
7726 up so we won't want to reuse such a value, but we play it safe
7727 here. */
7bd8b2a8 7728 || libcall_insn
7afe21cc
RK
7729 /* If we didn't put a REG_EQUAL value or a source into the hash
7730 table, there is no point is recording DEST. */
1a8e9a8e
RK
7731 || sets[i].src_elt == 0
7732 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7733 or SIGN_EXTEND, don't record DEST since it can cause
7734 some tracking to be wrong.
7735
7736 ??? Think about this more later. */
7737 || (GET_CODE (dest) == SUBREG
7738 && (GET_MODE_SIZE (GET_MODE (dest))
7739 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7740 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7741 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
7742 continue;
7743
7744 /* STRICT_LOW_PART isn't part of the value BEING set,
7745 and neither is the SUBREG inside it.
7746 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7747 if (GET_CODE (dest) == STRICT_LOW_PART)
7748 dest = SUBREG_REG (XEXP (dest, 0));
7749
c610adec 7750 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7afe21cc
RK
7751 /* Registers must also be inserted into chains for quantities. */
7752 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
7753 {
7754 /* If `insert_regs' changes something, the hash code must be
7755 recalculated. */
7756 rehash_using_reg (dest);
7757 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7758 }
7afe21cc 7759
9de2c71a
MM
7760 if (GET_CODE (inner_dest) == MEM
7761 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7762 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7763 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7764 Consider the case in which the address of the MEM is
7765 passed to a function, which alters the MEM. Then, if we
7766 later use Y instead of the MEM we'll miss the update. */
7767 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7768 else
7769 elt = insert (dest, sets[i].src_elt,
7770 sets[i].dest_hash, GET_MODE (dest));
7771
c256df0b 7772 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
9ad91d71
RK
7773 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7774 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7775 0))));
c256df0b 7776
7afe21cc
RK
7777 if (elt->in_memory)
7778 {
7779 /* This implicitly assumes a whole struct
7780 need not have MEM_IN_STRUCT_P.
7781 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7782 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7783 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7784 }
7785
fc3ffe83
RK
7786 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7787 narrower than M2, and both M1 and M2 are the same number of words,
7788 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7789 make that equivalence as well.
7afe21cc
RK
7790
7791 However, BAR may have equivalences for which gen_lowpart_if_possible
7792 will produce a simpler value than gen_lowpart_if_possible applied to
7793 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7794 BAR's equivalences. If we don't get a simplified form, make
7795 the SUBREG. It will not be used in an equivalence, but will
7796 cause two similar assignments to be detected.
7797
7798 Note the loop below will find SUBREG_REG (DEST) since we have
7799 already entered SRC and DEST of the SET in the table. */
7800
7801 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
7802 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7803 / UNITS_PER_WORD)
7804 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7afe21cc
RK
7805 && (GET_MODE_SIZE (GET_MODE (dest))
7806 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7807 && sets[i].src_elt != 0)
7808 {
7809 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7810 struct table_elt *elt, *classp = 0;
7811
7812 for (elt = sets[i].src_elt->first_same_value; elt;
7813 elt = elt->next_same_value)
7814 {
7815 rtx new_src = 0;
2197a88a 7816 unsigned src_hash;
7afe21cc
RK
7817 struct table_elt *src_elt;
7818
7819 /* Ignore invalid entries. */
7820 if (GET_CODE (elt->exp) != REG
7821 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7822 continue;
7823
7824 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7825 if (new_src == 0)
38a448ca 7826 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7afe21cc
RK
7827
7828 src_hash = HASH (new_src, new_mode);
7829 src_elt = lookup (new_src, src_hash, new_mode);
7830
7831 /* Put the new source in the hash table is if isn't
7832 already. */
7833 if (src_elt == 0)
7834 {
7835 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
7836 {
7837 rehash_using_reg (new_src);
7838 src_hash = HASH (new_src, new_mode);
7839 }
7afe21cc
RK
7840 src_elt = insert (new_src, classp, src_hash, new_mode);
7841 src_elt->in_memory = elt->in_memory;
7842 src_elt->in_struct = elt->in_struct;
7843 }
7844 else if (classp && classp != src_elt->first_same_value)
7845 /* Show that two things that we've seen before are
7846 actually the same. */
7847 merge_equiv_classes (src_elt, classp);
7848
7849 classp = src_elt->first_same_value;
da932f04
JL
7850 /* Ignore invalid entries. */
7851 while (classp
7852 && GET_CODE (classp->exp) != REG
7853 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7854 classp = classp->next_same_value;
7afe21cc
RK
7855 }
7856 }
7857 }
7858
7859 /* Special handling for (set REG0 REG1)
7860 where REG0 is the "cheapest", cheaper than REG1.
7861 After cse, REG1 will probably not be used in the sequel,
7862 so (if easily done) change this insn to (set REG1 REG0) and
7863 replace REG1 with REG0 in the previous insn that computed their value.
7864 Then REG1 will become a dead store and won't cloud the situation
7865 for later optimizations.
7866
7867 Do not make this change if REG1 is a hard register, because it will
7868 then be used in the sequel and we may be changing a two-operand insn
7869 into a three-operand insn.
7870
50270076
R
7871 Also do not do this if we are operating on a copy of INSN.
7872
7873 Also don't do this if INSN ends a libcall; this would cause an unrelated
7874 register to be set in the middle of a libcall, and we then get bad code
7875 if the libcall is deleted. */
7afe21cc
RK
7876
7877 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7878 && NEXT_INSN (PREV_INSN (insn)) == insn
7879 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7880 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7881 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
30f72379 7882 && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
50270076
R
7883 == REGNO (SET_DEST (sets[0].rtl)))
7884 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc
RK
7885 {
7886 rtx prev = PREV_INSN (insn);
7887 while (prev && GET_CODE (prev) == NOTE)
7888 prev = PREV_INSN (prev);
7889
7890 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7891 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7892 {
7893 rtx dest = SET_DEST (sets[0].rtl);
906c4e36 7894 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7afe21cc
RK
7895
7896 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7897 validate_change (insn, & SET_DEST (sets[0].rtl),
7898 SET_SRC (sets[0].rtl), 1);
7899 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7900 apply_change_group ();
7901
7902 /* If REG1 was equivalent to a constant, REG0 is not. */
7903 if (note)
7904 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7905
7906 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7907 any REG_WAS_0 note on INSN to PREV. */
906c4e36 7908 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7909 if (note)
7910 remove_note (prev, note);
7911
906c4e36 7912 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7913 if (note)
7914 {
7915 remove_note (insn, note);
7916 XEXP (note, 1) = REG_NOTES (prev);
7917 REG_NOTES (prev) = note;
7918 }
98369a0f
RK
7919
7920 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7921 then we must delete it, because the value in REG0 has changed. */
7922 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7923 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7924 remove_note (insn, note);
7afe21cc
RK
7925 }
7926 }
7927
7928 /* If this is a conditional jump insn, record any known equivalences due to
7929 the condition being tested. */
7930
7931 last_jump_equiv_class = 0;
7932 if (GET_CODE (insn) == JUMP_INSN
7933 && n_sets == 1 && GET_CODE (x) == SET
7934 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7935 record_jump_equiv (insn, 0);
7936
7937#ifdef HAVE_cc0
7938 /* If the previous insn set CC0 and this insn no longer references CC0,
7939 delete the previous insn. Here we use the fact that nothing expects CC0
7940 to be valid over an insn, which is true until the final pass. */
7941 if (prev_insn && GET_CODE (prev_insn) == INSN
7942 && (tem = single_set (prev_insn)) != 0
7943 && SET_DEST (tem) == cc0_rtx
7944 && ! reg_mentioned_p (cc0_rtx, x))
7945 {
7946 PUT_CODE (prev_insn, NOTE);
7947 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7948 NOTE_SOURCE_FILE (prev_insn) = 0;
7949 }
7950
7951 prev_insn_cc0 = this_insn_cc0;
7952 prev_insn_cc0_mode = this_insn_cc0_mode;
7953#endif
7954
7955 prev_insn = insn;
7956}
7957\f
a4c6502a 7958/* Remove from the hash table all expressions that reference memory. */
7afe21cc 7959static void
9ae8ffe7 7960invalidate_memory ()
7afe21cc 7961{
9ae8ffe7
JL
7962 register int i;
7963 register struct table_elt *p, *next;
7afe21cc 7964
9ae8ffe7
JL
7965 for (i = 0; i < NBUCKETS; i++)
7966 for (p = table[i]; p; p = next)
7967 {
7968 next = p->next_same_hash;
7969 if (p->in_memory)
7970 remove_from_table (p, i);
7971 }
7972}
7973
7974/* XXX ??? The name of this function bears little resemblance to
7975 what this function actually does. FIXME. */
7976static int
7977note_mem_written (addr)
7978 register rtx addr;
7979{
7980 /* Pushing or popping the stack invalidates just the stack pointer. */
7981 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7982 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7983 && GET_CODE (XEXP (addr, 0)) == REG
7984 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 7985 {
30f72379
MM
7986 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
7987 REG_TICK (STACK_POINTER_REGNUM)++;
9ae8ffe7
JL
7988
7989 /* This should be *very* rare. */
7990 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7991 invalidate (stack_pointer_rtx, VOIDmode);
7992 return 1;
7afe21cc 7993 }
9ae8ffe7 7994 return 0;
7afe21cc
RK
7995}
7996
7997/* Perform invalidation on the basis of everything about an insn
7998 except for invalidating the actual places that are SET in it.
7999 This includes the places CLOBBERed, and anything that might
8000 alias with something that is SET or CLOBBERed.
8001
7afe21cc
RK
8002 X is the pattern of the insn. */
8003
8004static void
9ae8ffe7 8005invalidate_from_clobbers (x)
7afe21cc
RK
8006 rtx x;
8007{
7afe21cc
RK
8008 if (GET_CODE (x) == CLOBBER)
8009 {
8010 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
8011 if (ref)
8012 {
8013 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8014 || GET_CODE (ref) == MEM)
8015 invalidate (ref, VOIDmode);
8016 else if (GET_CODE (ref) == STRICT_LOW_PART
8017 || GET_CODE (ref) == ZERO_EXTRACT)
8018 invalidate (XEXP (ref, 0), GET_MODE (ref));
8019 }
7afe21cc
RK
8020 }
8021 else if (GET_CODE (x) == PARALLEL)
8022 {
8023 register int i;
8024 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8025 {
8026 register rtx y = XVECEXP (x, 0, i);
8027 if (GET_CODE (y) == CLOBBER)
8028 {
8029 rtx ref = XEXP (y, 0);
9ae8ffe7
JL
8030 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8031 || GET_CODE (ref) == MEM)
8032 invalidate (ref, VOIDmode);
8033 else if (GET_CODE (ref) == STRICT_LOW_PART
8034 || GET_CODE (ref) == ZERO_EXTRACT)
8035 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
8036 }
8037 }
8038 }
8039}
8040\f
8041/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
8042 and replace any registers in them with either an equivalent constant
8043 or the canonical form of the register. If we are inside an address,
8044 only do this if the address remains valid.
8045
8046 OBJECT is 0 except when within a MEM in which case it is the MEM.
8047
8048 Return the replacement for X. */
8049
8050static rtx
8051cse_process_notes (x, object)
8052 rtx x;
8053 rtx object;
8054{
8055 enum rtx_code code = GET_CODE (x);
8056 char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
8057 int i;
8058
8059 switch (code)
8060 {
8061 case CONST_INT:
8062 case CONST:
8063 case SYMBOL_REF:
8064 case LABEL_REF:
8065 case CONST_DOUBLE:
8066 case PC:
8067 case CC0:
8068 case LO_SUM:
8069 return x;
8070
8071 case MEM:
8072 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8073 return x;
8074
8075 case EXPR_LIST:
8076 case INSN_LIST:
8077 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 8078 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 8079 if (XEXP (x, 1))
906c4e36 8080 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
8081 return x;
8082
e4890d45
RS
8083 case SIGN_EXTEND:
8084 case ZERO_EXTEND:
0b0ee36c 8085 case SUBREG:
e4890d45
RS
8086 {
8087 rtx new = cse_process_notes (XEXP (x, 0), object);
8088 /* We don't substitute VOIDmode constants into these rtx,
8089 since they would impede folding. */
8090 if (GET_MODE (new) != VOIDmode)
8091 validate_change (object, &XEXP (x, 0), new, 0);
8092 return x;
8093 }
8094
7afe21cc 8095 case REG:
30f72379 8096 i = REG_QTY (REGNO (x));
7afe21cc
RK
8097
8098 /* Return a constant or a constant register. */
8099 if (REGNO_QTY_VALID_P (REGNO (x))
8100 && qty_const[i] != 0
8101 && (CONSTANT_P (qty_const[i])
8102 || GET_CODE (qty_const[i]) == REG))
8103 {
8104 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8105 if (new)
8106 return new;
8107 }
8108
8109 /* Otherwise, canonicalize this register. */
906c4e36 8110 return canon_reg (x, NULL_RTX);
e9a25f70
JL
8111
8112 default:
8113 break;
7afe21cc
RK
8114 }
8115
8116 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8117 if (fmt[i] == 'e')
8118 validate_change (object, &XEXP (x, i),
7fe34fdf 8119 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
8120
8121 return x;
8122}
8123\f
8124/* Find common subexpressions between the end test of a loop and the beginning
8125 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8126
8127 Often we have a loop where an expression in the exit test is used
8128 in the body of the loop. For example "while (*p) *q++ = *p++;".
8129 Because of the way we duplicate the loop exit test in front of the loop,
8130 however, we don't detect that common subexpression. This will be caught
8131 when global cse is implemented, but this is a quite common case.
8132
8133 This function handles the most common cases of these common expressions.
8134 It is called after we have processed the basic block ending with the
8135 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8136 jumps to a label used only once. */
8137
8138static void
8139cse_around_loop (loop_start)
8140 rtx loop_start;
8141{
8142 rtx insn;
8143 int i;
8144 struct table_elt *p;
8145
8146 /* If the jump at the end of the loop doesn't go to the start, we don't
8147 do anything. */
8148 for (insn = PREV_INSN (loop_start);
8149 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8150 insn = PREV_INSN (insn))
8151 ;
8152
8153 if (insn == 0
8154 || GET_CODE (insn) != NOTE
8155 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8156 return;
8157
8158 /* If the last insn of the loop (the end test) was an NE comparison,
8159 we will interpret it as an EQ comparison, since we fell through
f72aed24 8160 the loop. Any equivalences resulting from that comparison are
7afe21cc
RK
8161 therefore not valid and must be invalidated. */
8162 if (last_jump_equiv_class)
8163 for (p = last_jump_equiv_class->first_same_value; p;
8164 p = p->next_same_value)
51723711
KG
8165 {
8166 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8167 || (GET_CODE (p->exp) == SUBREG
8168 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8169 invalidate (p->exp, VOIDmode);
8170 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8171 || GET_CODE (p->exp) == ZERO_EXTRACT)
8172 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8173 }
7afe21cc
RK
8174
8175 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8176 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8177
8178 The only thing we do with SET_DEST is invalidate entries, so we
8179 can safely process each SET in order. It is slightly less efficient
556c714b
JW
8180 to do so, but we only want to handle the most common cases.
8181
8182 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8183 These pseudos won't have valid entries in any of the tables indexed
8184 by register number, such as reg_qty. We avoid out-of-range array
8185 accesses by not processing any instructions created after cse started. */
7afe21cc
RK
8186
8187 for (insn = NEXT_INSN (loop_start);
8188 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
556c714b 8189 && INSN_UID (insn) < max_insn_uid
7afe21cc
RK
8190 && ! (GET_CODE (insn) == NOTE
8191 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8192 insn = NEXT_INSN (insn))
8193 {
8194 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8195 && (GET_CODE (PATTERN (insn)) == SET
8196 || GET_CODE (PATTERN (insn)) == CLOBBER))
8197 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8198 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8199 && GET_CODE (PATTERN (insn)) == PARALLEL)
8200 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8201 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8202 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8203 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8204 loop_start);
8205 }
8206}
8207\f
8b3686ed
RK
8208/* Process one SET of an insn that was skipped. We ignore CLOBBERs
8209 since they are done elsewhere. This function is called via note_stores. */
8210
8211static void
8212invalidate_skipped_set (dest, set)
8213 rtx set;
8214 rtx dest;
8215{
9ae8ffe7
JL
8216 enum rtx_code code = GET_CODE (dest);
8217
8218 if (code == MEM
8219 && ! note_mem_written (dest) /* If this is not a stack push ... */
8220 /* There are times when an address can appear varying and be a PLUS
8221 during this scan when it would be a fixed address were we to know
8222 the proper equivalences. So invalidate all memory if there is
8223 a BLKmode or nonscalar memory reference or a reference to a
8224 variable address. */
8225 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8226 || cse_rtx_varies_p (XEXP (dest, 0))))
8227 {
8228 invalidate_memory ();
8229 return;
8230 }
ffcf6393 8231
f47c02fa
RK
8232 if (GET_CODE (set) == CLOBBER
8233#ifdef HAVE_cc0
8234 || dest == cc0_rtx
8235#endif
8236 || dest == pc_rtx)
8237 return;
8238
9ae8ffe7 8239 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 8240 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
8241 else if (code == REG || code == SUBREG || code == MEM)
8242 invalidate (dest, VOIDmode);
8b3686ed
RK
8243}
8244
8245/* Invalidate all insns from START up to the end of the function or the
8246 next label. This called when we wish to CSE around a block that is
8247 conditionally executed. */
8248
8249static void
8250invalidate_skipped_block (start)
8251 rtx start;
8252{
8253 rtx insn;
8b3686ed
RK
8254
8255 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8256 insn = NEXT_INSN (insn))
8257 {
8258 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8259 continue;
8260
8b3686ed
RK
8261 if (GET_CODE (insn) == CALL_INSN)
8262 {
9ae8ffe7
JL
8263 if (! CONST_CALL_P (insn))
8264 invalidate_memory ();
8b3686ed 8265 invalidate_for_call ();
8b3686ed
RK
8266 }
8267
97577254 8268 invalidate_from_clobbers (PATTERN (insn));
8b3686ed 8269 note_stores (PATTERN (insn), invalidate_skipped_set);
8b3686ed
RK
8270 }
8271}
8272\f
7afe21cc
RK
8273/* Used for communication between the following two routines; contains a
8274 value to be checked for modification. */
8275
8276static rtx cse_check_loop_start_value;
8277
8278/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8279 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8280
8281static void
8282cse_check_loop_start (x, set)
8283 rtx x;
d6f4ec51 8284 rtx set ATTRIBUTE_UNUSED;
7afe21cc
RK
8285{
8286 if (cse_check_loop_start_value == 0
8287 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8288 return;
8289
8290 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8291 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8292 cse_check_loop_start_value = 0;
8293}
8294
8295/* X is a SET or CLOBBER contained in INSN that was found near the start of
8296 a loop that starts with the label at LOOP_START.
8297
8298 If X is a SET, we see if its SET_SRC is currently in our hash table.
8299 If so, we see if it has a value equal to some register used only in the
8300 loop exit code (as marked by jump.c).
8301
8302 If those two conditions are true, we search backwards from the start of
8303 the loop to see if that same value was loaded into a register that still
8304 retains its value at the start of the loop.
8305
8306 If so, we insert an insn after the load to copy the destination of that
8307 load into the equivalent register and (try to) replace our SET_SRC with that
8308 register.
8309
8310 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8311
8312static void
8313cse_set_around_loop (x, insn, loop_start)
8314 rtx x;
8315 rtx insn;
8316 rtx loop_start;
8317{
7afe21cc 8318 struct table_elt *src_elt;
7afe21cc
RK
8319
8320 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8321 are setting PC or CC0 or whose SET_SRC is already a register. */
8322 if (GET_CODE (x) == SET
8323 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8324 && GET_CODE (SET_SRC (x)) != REG)
8325 {
8326 src_elt = lookup (SET_SRC (x),
8327 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8328 GET_MODE (SET_DEST (x)));
8329
8330 if (src_elt)
8331 for (src_elt = src_elt->first_same_value; src_elt;
8332 src_elt = src_elt->next_same_value)
8333 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8334 && COST (src_elt->exp) < COST (SET_SRC (x)))
8335 {
8336 rtx p, set;
8337
8338 /* Look for an insn in front of LOOP_START that sets
8339 something in the desired mode to SET_SRC (x) before we hit
8340 a label or CALL_INSN. */
8341
8342 for (p = prev_nonnote_insn (loop_start);
8343 p && GET_CODE (p) != CALL_INSN
8344 && GET_CODE (p) != CODE_LABEL;
8345 p = prev_nonnote_insn (p))
8346 if ((set = single_set (p)) != 0
8347 && GET_CODE (SET_DEST (set)) == REG
8348 && GET_MODE (SET_DEST (set)) == src_elt->mode
8349 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8350 {
8351 /* We now have to ensure that nothing between P
8352 and LOOP_START modified anything referenced in
8353 SET_SRC (x). We know that nothing within the loop
8354 can modify it, or we would have invalidated it in
8355 the hash table. */
8356 rtx q;
8357
8358 cse_check_loop_start_value = SET_SRC (x);
8359 for (q = p; q != loop_start; q = NEXT_INSN (q))
8360 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8361 note_stores (PATTERN (q), cse_check_loop_start);
8362
8363 /* If nothing was changed and we can replace our
8364 SET_SRC, add an insn after P to copy its destination
8365 to what we will be replacing SET_SRC with. */
8366 if (cse_check_loop_start_value
8367 && validate_change (insn, &SET_SRC (x),
8368 src_elt->exp, 0))
e89d3e6f
R
8369 {
8370 /* If this creates new pseudos, this is unsafe,
8371 because the regno of new pseudo is unsuitable
8372 to index into reg_qty when cse_insn processes
8373 the new insn. Therefore, if a new pseudo was
8374 created, discard this optimization. */
8375 int nregs = max_reg_num ();
8376 rtx move
8377 = gen_move_insn (src_elt->exp, SET_DEST (set));
8378 if (nregs != max_reg_num ())
8379 {
8380 if (! validate_change (insn, &SET_SRC (x),
8381 SET_SRC (set), 0))
8382 abort ();
8383 }
8384 else
8385 emit_insn_after (move, p);
8386 }
7afe21cc
RK
8387 break;
8388 }
8389 }
8390 }
8391
8392 /* Now invalidate anything modified by X. */
9ae8ffe7 8393 note_mem_written (SET_DEST (x));
7afe21cc 8394
9ae8ffe7 8395 /* See comment on similar code in cse_insn for explanation of these tests. */
7afe21cc 8396 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
9ae8ffe7 8397 || GET_CODE (SET_DEST (x)) == MEM)
bb4034b3 8398 invalidate (SET_DEST (x), VOIDmode);
2708da92
RS
8399 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8400 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
bb4034b3 8401 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7afe21cc
RK
8402}
8403\f
8404/* Find the end of INSN's basic block and return its range,
8405 the total number of SETs in all the insns of the block, the last insn of the
8406 block, and the branch path.
8407
8408 The branch path indicates which branches should be followed. If a non-zero
8409 path size is specified, the block should be rescanned and a different set
8410 of branches will be taken. The branch path is only used if
8b3686ed 8411 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7afe21cc
RK
8412
8413 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8414 used to describe the block. It is filled in with the information about
8415 the current block. The incoming structure's branch path, if any, is used
8416 to construct the output branch path. */
8417
7afe21cc 8418void
8b3686ed 8419cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7afe21cc
RK
8420 rtx insn;
8421 struct cse_basic_block_data *data;
8422 int follow_jumps;
8423 int after_loop;
8b3686ed 8424 int skip_blocks;
7afe21cc
RK
8425{
8426 rtx p = insn, q;
8427 int nsets = 0;
8428 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
fc3ffe83 8429 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7afe21cc
RK
8430 int path_size = data->path_size;
8431 int path_entry = 0;
8432 int i;
8433
8434 /* Update the previous branch path, if any. If the last branch was
8435 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8436 shorten the path by one and look at the previous branch. We know that
8437 at least one branch must have been taken if PATH_SIZE is non-zero. */
8438 while (path_size > 0)
8439 {
8b3686ed 8440 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
8441 {
8442 data->path[path_size - 1].status = NOT_TAKEN;
8443 break;
8444 }
8445 else
8446 path_size--;
8447 }
8448
8449 /* Scan to end of this basic block. */
8450 while (p && GET_CODE (p) != CODE_LABEL)
8451 {
8452 /* Don't cse out the end of a loop. This makes a difference
8453 only for the unusual loops that always execute at least once;
8454 all other loops have labels there so we will stop in any case.
8455 Cse'ing out the end of the loop is dangerous because it
8456 might cause an invariant expression inside the loop
8457 to be reused after the end of the loop. This would make it
8458 hard to move the expression out of the loop in loop.c,
8459 especially if it is one of several equivalent expressions
8460 and loop.c would like to eliminate it.
8461
8462 If we are running after loop.c has finished, we can ignore
8463 the NOTE_INSN_LOOP_END. */
8464
8465 if (! after_loop && GET_CODE (p) == NOTE
8466 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8467 break;
8468
8469 /* Don't cse over a call to setjmp; on some machines (eg vax)
8470 the regs restored by the longjmp come from
8471 a later time than the setjmp. */
8472 if (GET_CODE (p) == NOTE
8473 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8474 break;
8475
8476 /* A PARALLEL can have lots of SETs in it,
8477 especially if it is really an ASM_OPERANDS. */
8478 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8479 && GET_CODE (PATTERN (p)) == PARALLEL)
8480 nsets += XVECLEN (PATTERN (p), 0);
8481 else if (GET_CODE (p) != NOTE)
8482 nsets += 1;
8483
164c8956
RK
8484 /* Ignore insns made by CSE; they cannot affect the boundaries of
8485 the basic block. */
8486
8487 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 8488 high_cuid = INSN_CUID (p);
164c8956
RK
8489 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8490 low_cuid = INSN_CUID (p);
7afe21cc
RK
8491
8492 /* See if this insn is in our branch path. If it is and we are to
8493 take it, do so. */
8494 if (path_entry < path_size && data->path[path_entry].branch == p)
8495 {
8b3686ed 8496 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc
RK
8497 p = JUMP_LABEL (p);
8498
8499 /* Point to next entry in path, if any. */
8500 path_entry++;
8501 }
8502
8503 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8504 was specified, we haven't reached our maximum path length, there are
8505 insns following the target of the jump, this is the only use of the
8b3686ed
RK
8506 jump label, and the target label is preceded by a BARRIER.
8507
8508 Alternatively, we can follow the jump if it branches around a
8509 block of code and there are no other branches into the block.
8510 In this case invalidate_skipped_block will be called to invalidate any
8511 registers set in the block when following the jump. */
8512
8513 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7afe21cc
RK
8514 && GET_CODE (p) == JUMP_INSN
8515 && GET_CODE (PATTERN (p)) == SET
8516 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 8517 && JUMP_LABEL (p) != 0
7afe21cc
RK
8518 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8519 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8520 {
8521 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8522 if ((GET_CODE (q) != NOTE
8523 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8524 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8525 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8526 break;
8527
8528 /* If we ran into a BARRIER, this code is an extension of the
8529 basic block when the branch is taken. */
8b3686ed 8530 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
8531 {
8532 /* Don't allow ourself to keep walking around an
8533 always-executed loop. */
fc3ffe83
RK
8534 if (next_real_insn (q) == next)
8535 {
8536 p = NEXT_INSN (p);
8537 continue;
8538 }
7afe21cc
RK
8539
8540 /* Similarly, don't put a branch in our path more than once. */
8541 for (i = 0; i < path_entry; i++)
8542 if (data->path[i].branch == p)
8543 break;
8544
8545 if (i != path_entry)
8546 break;
8547
8548 data->path[path_entry].branch = p;
8549 data->path[path_entry++].status = TAKEN;
8550
8551 /* This branch now ends our path. It was possible that we
8552 didn't see this branch the last time around (when the
8553 insn in front of the target was a JUMP_INSN that was
8554 turned into a no-op). */
8555 path_size = path_entry;
8556
8557 p = JUMP_LABEL (p);
8558 /* Mark block so we won't scan it again later. */
8559 PUT_MODE (NEXT_INSN (p), QImode);
8560 }
8b3686ed
RK
8561 /* Detect a branch around a block of code. */
8562 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8563 {
8564 register rtx tmp;
8565
fc3ffe83
RK
8566 if (next_real_insn (q) == next)
8567 {
8568 p = NEXT_INSN (p);
8569 continue;
8570 }
8b3686ed
RK
8571
8572 for (i = 0; i < path_entry; i++)
8573 if (data->path[i].branch == p)
8574 break;
8575
8576 if (i != path_entry)
8577 break;
8578
8579 /* This is no_labels_between_p (p, q) with an added check for
8580 reaching the end of a function (in case Q precedes P). */
8581 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8582 if (GET_CODE (tmp) == CODE_LABEL)
8583 break;
8584
8585 if (tmp == q)
8586 {
8587 data->path[path_entry].branch = p;
8588 data->path[path_entry++].status = AROUND;
8589
8590 path_size = path_entry;
8591
8592 p = JUMP_LABEL (p);
8593 /* Mark block so we won't scan it again later. */
8594 PUT_MODE (NEXT_INSN (p), QImode);
8595 }
8596 }
7afe21cc 8597 }
7afe21cc
RK
8598 p = NEXT_INSN (p);
8599 }
8600
8601 data->low_cuid = low_cuid;
8602 data->high_cuid = high_cuid;
8603 data->nsets = nsets;
8604 data->last = p;
8605
8606 /* If all jumps in the path are not taken, set our path length to zero
8607 so a rescan won't be done. */
8608 for (i = path_size - 1; i >= 0; i--)
8b3686ed 8609 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
8610 break;
8611
8612 if (i == -1)
8613 data->path_size = 0;
8614 else
8615 data->path_size = path_size;
8616
8617 /* End the current branch path. */
8618 data->path[path_size].branch = 0;
8619}
8620\f
7afe21cc
RK
8621/* Perform cse on the instructions of a function.
8622 F is the first instruction.
8623 NREGS is one plus the highest pseudo-reg number used in the instruction.
8624
8625 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8626 (only if -frerun-cse-after-loop).
8627
8628 Returns 1 if jump_optimize should be redone due to simplifications
8629 in conditional jump instructions. */
8630
8631int
8632cse_main (f, nregs, after_loop, file)
8633 rtx f;
8634 int nregs;
8635 int after_loop;
8636 FILE *file;
8637{
8638 struct cse_basic_block_data val;
8639 register rtx insn = f;
8640 register int i;
8641
8642 cse_jumps_altered = 0;
a5dfb4ee 8643 recorded_label_ref = 0;
7afe21cc
RK
8644 constant_pool_entries_cost = 0;
8645 val.path_size = 0;
8646
8647 init_recog ();
9ae8ffe7 8648 init_alias_analysis ();
7afe21cc
RK
8649
8650 max_reg = nregs;
8651
556c714b
JW
8652 max_insn_uid = get_max_uid ();
8653
7afe21cc
RK
8654 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8655 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7afe21cc 8656
7bac1be0
RK
8657#ifdef LOAD_EXTEND_OP
8658
8659 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8660 and change the code and mode as appropriate. */
38a448ca 8661 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7bac1be0
RK
8662#endif
8663
7afe21cc
RK
8664 /* Discard all the free elements of the previous function
8665 since they are allocated in the temporarily obstack. */
4c9a05bc 8666 bzero ((char *) table, sizeof table);
7afe21cc
RK
8667 free_element_chain = 0;
8668 n_elements_made = 0;
8669
8670 /* Find the largest uid. */
8671
164c8956
RK
8672 max_uid = get_max_uid ();
8673 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
4c9a05bc 8674 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
7afe21cc
RK
8675
8676 /* Compute the mapping from uids to cuids.
8677 CUIDs are numbers assigned to insns, like uids,
8678 except that cuids increase monotonically through the code.
8679 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8680 between two insns is not affected by -g. */
8681
8682 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8683 {
8684 if (GET_CODE (insn) != NOTE
8685 || NOTE_LINE_NUMBER (insn) < 0)
8686 INSN_CUID (insn) = ++i;
8687 else
8688 /* Give a line number note the same cuid as preceding insn. */
8689 INSN_CUID (insn) = i;
8690 }
8691
8692 /* Initialize which registers are clobbered by calls. */
8693
8694 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8695
8696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8697 if ((call_used_regs[i]
8698 /* Used to check !fixed_regs[i] here, but that isn't safe;
8699 fixed regs are still call-clobbered, and sched can get
8700 confused if they can "live across calls".
8701
8702 The frame pointer is always preserved across calls. The arg
8703 pointer is if it is fixed. The stack pointer usually is, unless
8704 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8705 will be present. If we are generating PIC code, the PIC offset
8706 table register is preserved across calls. */
8707
8708 && i != STACK_POINTER_REGNUM
8709 && i != FRAME_POINTER_REGNUM
8bc169f2
DE
8710#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8711 && i != HARD_FRAME_POINTER_REGNUM
8712#endif
7afe21cc
RK
8713#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8714 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8715#endif
be8fe470 8716#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
7afe21cc
RK
8717 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8718#endif
8719 )
8720 || global_regs[i])
8721 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8722
8723 /* Loop over basic blocks.
8724 Compute the maximum number of qty's needed for each basic block
8725 (which is 2 for each SET). */
8726 insn = f;
8727 while (insn)
8728 {
8b3686ed
RK
8729 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8730 flag_cse_skip_blocks);
7afe21cc
RK
8731
8732 /* If this basic block was already processed or has no sets, skip it. */
8733 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8734 {
8735 PUT_MODE (insn, VOIDmode);
8736 insn = (val.last ? NEXT_INSN (val.last) : 0);
8737 val.path_size = 0;
8738 continue;
8739 }
8740
8741 cse_basic_block_start = val.low_cuid;
8742 cse_basic_block_end = val.high_cuid;
8743 max_qty = val.nsets * 2;
8744
8745 if (file)
ab87f8c8 8746 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7afe21cc
RK
8747 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8748 val.nsets);
8749
8750 /* Make MAX_QTY bigger to give us room to optimize
8751 past the end of this basic block, if that should prove useful. */
8752 if (max_qty < 500)
8753 max_qty = 500;
8754
8755 max_qty += max_reg;
8756
8757 /* If this basic block is being extended by following certain jumps,
8758 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8759 Otherwise, we start after this basic block. */
8760 if (val.path_size > 0)
8761 cse_basic_block (insn, val.last, val.path, 0);
8762 else
8763 {
8764 int old_cse_jumps_altered = cse_jumps_altered;
8765 rtx temp;
8766
8767 /* When cse changes a conditional jump to an unconditional
8768 jump, we want to reprocess the block, since it will give
8769 us a new branch path to investigate. */
8770 cse_jumps_altered = 0;
8771 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
8772 if (cse_jumps_altered == 0
8773 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8774 insn = temp;
8775
8776 cse_jumps_altered |= old_cse_jumps_altered;
8777 }
8778
8779#ifdef USE_C_ALLOCA
8780 alloca (0);
8781#endif
8782 }
8783
8784 /* Tell refers_to_mem_p that qty_const info is not available. */
8785 qty_const = 0;
8786
8787 if (max_elements_made < n_elements_made)
8788 max_elements_made = n_elements_made;
8789
a5dfb4ee 8790 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
8791}
8792
8793/* Process a single basic block. FROM and TO and the limits of the basic
8794 block. NEXT_BRANCH points to the branch path when following jumps or
8795 a null path when not following jumps.
8796
8797 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8798 loop. This is true when we are being called for the last time on a
8799 block and this CSE pass is before loop.c. */
8800
8801static rtx
8802cse_basic_block (from, to, next_branch, around_loop)
8803 register rtx from, to;
8804 struct branch_path *next_branch;
8805 int around_loop;
8806{
8807 register rtx insn;
8808 int to_usage = 0;
7bd8b2a8 8809 rtx libcall_insn = NULL_RTX;
e9a25f70 8810 int num_insns = 0;
7afe21cc
RK
8811
8812 /* Each of these arrays is undefined before max_reg, so only allocate
8813 the space actually needed and adjust the start below. */
8814
8815 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8816 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8817 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8818 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8819 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8820 qty_comparison_code
8821 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8822 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8823 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8824
8825 qty_first_reg -= max_reg;
8826 qty_last_reg -= max_reg;
8827 qty_mode -= max_reg;
8828 qty_const -= max_reg;
8829 qty_const_insn -= max_reg;
8830 qty_comparison_code -= max_reg;
8831 qty_comparison_qty -= max_reg;
8832 qty_comparison_const -= max_reg;
8833
8834 new_basic_block ();
8835
8836 /* TO might be a label. If so, protect it from being deleted. */
8837 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8838 ++LABEL_NUSES (to);
8839
8840 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8841 {
1d22a2c1 8842 register enum rtx_code code = GET_CODE (insn);
e9a25f70 8843
1d22a2c1
MM
8844 /* If we have processed 1,000 insns, flush the hash table to
8845 avoid extreme quadratic behavior. We must not include NOTEs
8846 in the count since there may be more or them when generating
8847 debugging information. If we clear the table at different
8848 times, code generated with -g -O might be different than code
8849 generated with -O but not -g.
e9a25f70
JL
8850
8851 ??? This is a real kludge and needs to be done some other way.
8852 Perhaps for 2.9. */
1d22a2c1 8853 if (code != NOTE && num_insns++ > 1000)
e9a25f70 8854 {
01e752d3 8855 flush_hash_table ();
e9a25f70
JL
8856 num_insns = 0;
8857 }
7afe21cc
RK
8858
8859 /* See if this is a branch that is part of the path. If so, and it is
8860 to be taken, do so. */
8861 if (next_branch->branch == insn)
8862 {
8b3686ed
RK
8863 enum taken status = next_branch++->status;
8864 if (status != NOT_TAKEN)
7afe21cc 8865 {
8b3686ed
RK
8866 if (status == TAKEN)
8867 record_jump_equiv (insn, 1);
8868 else
8869 invalidate_skipped_block (NEXT_INSN (insn));
8870
7afe21cc
RK
8871 /* Set the last insn as the jump insn; it doesn't affect cc0.
8872 Then follow this branch. */
8873#ifdef HAVE_cc0
8874 prev_insn_cc0 = 0;
8875#endif
8876 prev_insn = insn;
8877 insn = JUMP_LABEL (insn);
8878 continue;
8879 }
8880 }
8881
7afe21cc
RK
8882 if (GET_MODE (insn) == QImode)
8883 PUT_MODE (insn, VOIDmode);
8884
8885 if (GET_RTX_CLASS (code) == 'i')
8886 {
7bd8b2a8
JL
8887 rtx p;
8888
7afe21cc
RK
8889 /* Process notes first so we have all notes in canonical forms when
8890 looking for duplicate operations. */
8891
8892 if (REG_NOTES (insn))
906c4e36 8893 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
8894
8895 /* Track when we are inside in LIBCALL block. Inside such a block,
8896 we do not want to record destinations. The last insn of a
8897 LIBCALL block is not considered to be part of the block, since
830a38ee 8898 its destination is the result of the block and hence should be
7afe21cc
RK
8899 recorded. */
8900
63be02db 8901 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7bd8b2a8 8902 libcall_insn = XEXP (p, 0);
906c4e36 8903 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7bd8b2a8 8904 libcall_insn = NULL_RTX;
7afe21cc 8905
7bd8b2a8 8906 cse_insn (insn, libcall_insn);
7afe21cc
RK
8907 }
8908
8909 /* If INSN is now an unconditional jump, skip to the end of our
8910 basic block by pretending that we just did the last insn in the
8911 basic block. If we are jumping to the end of our block, show
8912 that we can have one usage of TO. */
8913
8914 if (simplejump_p (insn))
8915 {
8916 if (to == 0)
8917 return 0;
8918
8919 if (JUMP_LABEL (insn) == to)
8920 to_usage = 1;
8921
6a5293dc
RS
8922 /* Maybe TO was deleted because the jump is unconditional.
8923 If so, there is nothing left in this basic block. */
8924 /* ??? Perhaps it would be smarter to set TO
8925 to whatever follows this insn,
8926 and pretend the basic block had always ended here. */
8927 if (INSN_DELETED_P (to))
8928 break;
8929
7afe21cc
RK
8930 insn = PREV_INSN (to);
8931 }
8932
8933 /* See if it is ok to keep on going past the label
8934 which used to end our basic block. Remember that we incremented
d45cf215 8935 the count of that label, so we decrement it here. If we made
7afe21cc
RK
8936 a jump unconditional, TO_USAGE will be one; in that case, we don't
8937 want to count the use in that jump. */
8938
8939 if (to != 0 && NEXT_INSN (insn) == to
8940 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8941 {
8942 struct cse_basic_block_data val;
146135d6 8943 rtx prev;
7afe21cc
RK
8944
8945 insn = NEXT_INSN (to);
8946
8947 if (LABEL_NUSES (to) == 0)
146135d6 8948 insn = delete_insn (to);
7afe21cc 8949
146135d6
RK
8950 /* If TO was the last insn in the function, we are done. */
8951 if (insn == 0)
7afe21cc
RK
8952 return 0;
8953
146135d6
RK
8954 /* If TO was preceded by a BARRIER we are done with this block
8955 because it has no continuation. */
8956 prev = prev_nonnote_insn (to);
8957 if (prev && GET_CODE (prev) == BARRIER)
8958 return insn;
8959
8960 /* Find the end of the following block. Note that we won't be
8961 following branches in this case. */
7afe21cc
RK
8962 to_usage = 0;
8963 val.path_size = 0;
8b3686ed 8964 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7afe21cc
RK
8965
8966 /* If the tables we allocated have enough space left
8967 to handle all the SETs in the next basic block,
8968 continue through it. Otherwise, return,
8969 and that block will be scanned individually. */
8970 if (val.nsets * 2 + next_qty > max_qty)
8971 break;
8972
8973 cse_basic_block_start = val.low_cuid;
8974 cse_basic_block_end = val.high_cuid;
8975 to = val.last;
8976
8977 /* Prevent TO from being deleted if it is a label. */
8978 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8979 ++LABEL_NUSES (to);
8980
8981 /* Back up so we process the first insn in the extension. */
8982 insn = PREV_INSN (insn);
8983 }
8984 }
8985
8986 if (next_qty > max_qty)
8987 abort ();
8988
8989 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8990 the previous insn is the only insn that branches to the head of a loop,
8991 we can cse into the loop. Don't do this if we changed the jump
8992 structure of a loop unless we aren't going to be following jumps. */
8993
8b3686ed
RK
8994 if ((cse_jumps_altered == 0
8995 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8996 && around_loop && to != 0
8997 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8998 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8999 && JUMP_LABEL (PREV_INSN (to)) != 0
9000 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9001 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9002
9003 return to ? NEXT_INSN (to) : 0;
9004}
9005\f
9006/* Count the number of times registers are used (not set) in X.
9007 COUNTS is an array in which we accumulate the count, INCR is how much
79644f06
RK
9008 we count each register usage.
9009
9010 Don't count a usage of DEST, which is the SET_DEST of a SET which
9011 contains X in its SET_SRC. This is because such a SET does not
9012 modify the liveness of DEST. */
7afe21cc
RK
9013
9014static void
79644f06 9015count_reg_usage (x, counts, dest, incr)
7afe21cc
RK
9016 rtx x;
9017 int *counts;
79644f06 9018 rtx dest;
7afe21cc
RK
9019 int incr;
9020{
f1e7c95f 9021 enum rtx_code code;
7afe21cc
RK
9022 char *fmt;
9023 int i, j;
9024
f1e7c95f
RK
9025 if (x == 0)
9026 return;
9027
9028 switch (code = GET_CODE (x))
7afe21cc
RK
9029 {
9030 case REG:
79644f06
RK
9031 if (x != dest)
9032 counts[REGNO (x)] += incr;
7afe21cc
RK
9033 return;
9034
9035 case PC:
9036 case CC0:
9037 case CONST:
9038 case CONST_INT:
9039 case CONST_DOUBLE:
9040 case SYMBOL_REF:
9041 case LABEL_REF:
02e39abc
JL
9042 return;
9043
9044 case CLOBBER:
9045 /* If we are clobbering a MEM, mark any registers inside the address
9046 as being used. */
9047 if (GET_CODE (XEXP (x, 0)) == MEM)
9048 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7afe21cc
RK
9049 return;
9050
9051 case SET:
9052 /* Unless we are setting a REG, count everything in SET_DEST. */
9053 if (GET_CODE (SET_DEST (x)) != REG)
79644f06 9054 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9ff08f70
RK
9055
9056 /* If SRC has side-effects, then we can't delete this insn, so the
9057 usage of SET_DEST inside SRC counts.
9058
9059 ??? Strictly-speaking, we might be preserving this insn
9060 because some other SET has side-effects, but that's hard
9061 to do and can't happen now. */
9062 count_reg_usage (SET_SRC (x), counts,
9063 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9064 incr);
7afe21cc
RK
9065 return;
9066
f1e7c95f
RK
9067 case CALL_INSN:
9068 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9069
9070 /* ... falls through ... */
7afe21cc
RK
9071 case INSN:
9072 case JUMP_INSN:
79644f06 9073 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7afe21cc
RK
9074
9075 /* Things used in a REG_EQUAL note aren't dead since loop may try to
9076 use them. */
9077
f1e7c95f 9078 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7afe21cc
RK
9079 return;
9080
9081 case EXPR_LIST:
9082 case INSN_LIST:
f1e7c95f 9083 if (REG_NOTE_KIND (x) == REG_EQUAL
c6a26dc4 9084 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
79644f06 9085 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
f1e7c95f 9086 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7afe21cc 9087 return;
e9a25f70
JL
9088
9089 default:
9090 break;
7afe21cc
RK
9091 }
9092
9093 fmt = GET_RTX_FORMAT (code);
9094 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9095 {
9096 if (fmt[i] == 'e')
79644f06 9097 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
9098 else if (fmt[i] == 'E')
9099 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
79644f06 9100 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
9101 }
9102}
9103\f
9104/* Scan all the insns and delete any that are dead; i.e., they store a register
9105 that is never used or they copy a register to itself.
9106
c6a26dc4
JL
9107 This is used to remove insns made obviously dead by cse, loop or other
9108 optimizations. It improves the heuristics in loop since it won't try to
9109 move dead invariants out of loops or make givs for dead quantities. The
9110 remaining passes of the compilation are also sped up. */
7afe21cc
RK
9111
9112void
c6a26dc4 9113delete_trivially_dead_insns (insns, nreg)
7afe21cc
RK
9114 rtx insns;
9115 int nreg;
9116{
9117 int *counts = (int *) alloca (nreg * sizeof (int));
77fa0940 9118 rtx insn, prev;
51723711 9119#ifdef HAVE_cc0
d45cf215 9120 rtx tem;
51723711 9121#endif
7afe21cc 9122 int i;
614bb5d4 9123 int in_libcall = 0, dead_libcall = 0;
7afe21cc
RK
9124
9125 /* First count the number of times each register is used. */
4c9a05bc 9126 bzero ((char *) counts, sizeof (int) * nreg);
7afe21cc 9127 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
79644f06 9128 count_reg_usage (insn, counts, NULL_RTX, 1);
7afe21cc
RK
9129
9130 /* Go from the last insn to the first and delete insns that only set unused
9131 registers or copy a register to itself. As we delete an insn, remove
9132 usage counts for registers it uses. */
77fa0940 9133 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
7afe21cc
RK
9134 {
9135 int live_insn = 0;
614bb5d4 9136 rtx note;
7afe21cc 9137
77fa0940
RK
9138 prev = prev_real_insn (insn);
9139
614bb5d4
JL
9140 /* Don't delete any insns that are part of a libcall block unless
9141 we can delete the whole libcall block.
9142
77fa0940
RK
9143 Flow or loop might get confused if we did that. Remember
9144 that we are scanning backwards. */
906c4e36 9145 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
614bb5d4
JL
9146 {
9147 in_libcall = 1;
9148 live_insn = 1;
9149 dead_libcall = 0;
e4890d45 9150
614bb5d4
JL
9151 /* See if there's a REG_EQUAL note on this insn and try to
9152 replace the source with the REG_EQUAL expression.
9153
9154 We assume that insns with REG_RETVALs can only be reg->reg
9155 copies at this point. */
9156 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9157 if (note)
9158 {
9159 rtx set = single_set (insn);
9160 if (set
9161 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9162 {
9163 remove_note (insn,
9164 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9165 dead_libcall = 1;
9166 }
9167 }
9168 }
9169 else if (in_libcall)
9170 live_insn = ! dead_libcall;
e4890d45 9171 else if (GET_CODE (PATTERN (insn)) == SET)
7afe21cc
RK
9172 {
9173 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9174 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9175 ;
9176
d45cf215
RS
9177#ifdef HAVE_cc0
9178 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9179 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9180 && ((tem = next_nonnote_insn (insn)) == 0
9181 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9182 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9183 ;
9184#endif
7afe21cc
RK
9185 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9186 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9187 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9188 || side_effects_p (SET_SRC (PATTERN (insn))))
9189 live_insn = 1;
9190 }
9191 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9192 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9193 {
9194 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9195
9196 if (GET_CODE (elt) == SET)
9197 {
9198 if (GET_CODE (SET_DEST (elt)) == REG
9199 && SET_DEST (elt) == SET_SRC (elt))
9200 ;
9201
d45cf215
RS
9202#ifdef HAVE_cc0
9203 else if (GET_CODE (SET_DEST (elt)) == CC0
9204 && ! side_effects_p (SET_SRC (elt))
9205 && ((tem = next_nonnote_insn (insn)) == 0
9206 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9207 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9208 ;
9209#endif
7afe21cc
RK
9210 else if (GET_CODE (SET_DEST (elt)) != REG
9211 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9212 || counts[REGNO (SET_DEST (elt))] != 0
9213 || side_effects_p (SET_SRC (elt)))
9214 live_insn = 1;
9215 }
9216 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9217 live_insn = 1;
9218 }
9219 else
9220 live_insn = 1;
9221
9222 /* If this is a dead insn, delete it and show registers in it aren't
e4890d45 9223 being used. */
7afe21cc 9224
e4890d45 9225 if (! live_insn)
7afe21cc 9226 {
79644f06 9227 count_reg_usage (insn, counts, NULL_RTX, -1);
77fa0940 9228 delete_insn (insn);
7afe21cc 9229 }
e4890d45 9230
906c4e36 9231 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
614bb5d4
JL
9232 {
9233 in_libcall = 0;
9234 dead_libcall = 0;
9235 }
7afe21cc
RK
9236 }
9237}