]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
Merge in gcc2-ss-010999
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
747215f1 2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
7afe21cc
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
7afe21cc
RK
20
21
22#include "config.h"
670ee920
KG
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
50b2596f 25#include <setjmp.h>
9c3b4c8b 26
7afe21cc
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
49ad7cfa 34#include "function.h"
956d6950 35#include "expr.h"
50b2596f
KG
36#include "toplev.h"
37#include "output.h"
30f72379 38#include "splay-tree.h"
7afe21cc
RK
39
40/* The basic idea of common subexpression elimination is to go
41 through the code, keeping a record of expressions that would
42 have the same value at the current scan point, and replacing
43 expressions encountered with the cheapest equivalent expression.
44
45 It is too complicated to keep track of the different possibilities
46 when control paths merge; so, at each label, we forget all that is
47 known and start fresh. This can be described as processing each
48 basic block separately. Note, however, that these are not quite
49 the same as the basic blocks found by a later pass and used for
50 data flow analysis and register packing. We do not need to start fresh
51 after a conditional jump instruction if there is no label there.
52
53 We use two data structures to record the equivalent expressions:
54 a hash table for most expressions, and several vectors together
55 with "quantity numbers" to record equivalent (pseudo) registers.
56
57 The use of the special data structure for registers is desirable
58 because it is faster. It is possible because registers references
59 contain a fairly small number, the register number, taken from
60 a contiguously allocated series, and two register references are
61 identical if they have the same number. General expressions
62 do not have any such thing, so the only way to retrieve the
63 information recorded on an expression other than a register
64 is to keep it in a hash table.
65
66Registers and "quantity numbers":
67
68 At the start of each basic block, all of the (hardware and pseudo)
69 registers used in the function are given distinct quantity
70 numbers to indicate their contents. During scan, when the code
71 copies one register into another, we copy the quantity number.
72 When a register is loaded in any other way, we allocate a new
73 quantity number to describe the value generated by this operation.
74 `reg_qty' records what quantity a register is currently thought
75 of as containing.
76
77 All real quantity numbers are greater than or equal to `max_reg'.
78 If register N has not been assigned a quantity, reg_qty[N] will equal N.
79
80 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
81 variables should be referenced with an index below `max_reg'.
82
83 We also maintain a bidirectional chain of registers for each
84 quantity number. `qty_first_reg', `qty_last_reg',
85 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
86
87 The first register in a chain is the one whose lifespan is least local.
88 Among equals, it is the one that was seen first.
89 We replace any equivalent register with that one.
90
91 If two registers have the same quantity number, it must be true that
92 REG expressions with `qty_mode' must be in the hash table for both
93 registers and must be in the same class.
94
95 The converse is not true. Since hard registers may be referenced in
96 any mode, two REG expressions might be equivalent in the hash table
97 but not have the same quantity number if the quantity number of one
98 of the registers is not the same mode as those expressions.
99
100Constants and quantity numbers
101
102 When a quantity has a known constant value, that value is stored
103 in the appropriate element of qty_const. This is in addition to
104 putting the constant in the hash table as is usual for non-regs.
105
d45cf215 106 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
107 macro CONST_COSTS and will often depend on the constant value. In any
108 event, expressions containing constants can be simplified, by fold_rtx.
109
110 When a quantity has a known nearly constant value (such as an address
111 of a stack slot), that value is stored in the appropriate element
112 of qty_const.
113
114 Integer constants don't have a machine mode. However, cse
115 determines the intended machine mode from the destination
116 of the instruction that moves the constant. The machine mode
117 is recorded in the hash table along with the actual RTL
118 constant expression so that different modes are kept separate.
119
120Other expressions:
121
122 To record known equivalences among expressions in general
123 we use a hash table called `table'. It has a fixed number of buckets
124 that contain chains of `struct table_elt' elements for expressions.
125 These chains connect the elements whose expressions have the same
126 hash codes.
127
128 Other chains through the same elements connect the elements which
129 currently have equivalent values.
130
131 Register references in an expression are canonicalized before hashing
132 the expression. This is done using `reg_qty' and `qty_first_reg'.
133 The hash code of a register reference is computed using the quantity
134 number, not the register number.
135
136 When the value of an expression changes, it is necessary to remove from the
137 hash table not just that expression but all expressions whose values
138 could be different as a result.
139
140 1. If the value changing is in memory, except in special cases
141 ANYTHING referring to memory could be changed. That is because
142 nobody knows where a pointer does not point.
143 The function `invalidate_memory' removes what is necessary.
144
145 The special cases are when the address is constant or is
146 a constant plus a fixed register such as the frame pointer
147 or a static chain pointer. When such addresses are stored in,
148 we can tell exactly which other such addresses must be invalidated
149 due to overlap. `invalidate' does this.
150 All expressions that refer to non-constant
151 memory addresses are also invalidated. `invalidate_memory' does this.
152
153 2. If the value changing is a register, all expressions
154 containing references to that register, and only those,
155 must be removed.
156
157 Because searching the entire hash table for expressions that contain
158 a register is very slow, we try to figure out when it isn't necessary.
159 Precisely, this is necessary only when expressions have been
160 entered in the hash table using this register, and then the value has
161 changed, and then another expression wants to be added to refer to
162 the register's new value. This sequence of circumstances is rare
163 within any one basic block.
164
165 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
166 reg_tick[i] is incremented whenever a value is stored in register i.
167 reg_in_table[i] holds -1 if no references to register i have been
168 entered in the table; otherwise, it contains the value reg_tick[i] had
169 when the references were entered. If we want to enter a reference
170 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
171 Until we want to enter a new entry, the mere fact that the two vectors
172 don't match makes the entries be ignored if anyone tries to match them.
173
174 Registers themselves are entered in the hash table as well as in
175 the equivalent-register chains. However, the vectors `reg_tick'
176 and `reg_in_table' do not apply to expressions which are simple
177 register references. These expressions are removed from the table
178 immediately when they become invalid, and this can be done even if
179 we do not immediately search for all the expressions that refer to
180 the register.
181
182 A CLOBBER rtx in an instruction invalidates its operand for further
183 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
184 invalidates everything that resides in memory.
185
186Related expressions:
187
188 Constant expressions that differ only by an additive integer
189 are called related. When a constant expression is put in
190 the table, the related expression with no constant term
191 is also entered. These are made to point at each other
192 so that it is possible to find out if there exists any
193 register equivalent to an expression related to a given expression. */
194
195/* One plus largest register number used in this function. */
196
197static int max_reg;
198
556c714b
JW
199/* One plus largest instruction UID used in this function at time of
200 cse_main call. */
201
202static int max_insn_uid;
203
7afe21cc
RK
204/* Length of vectors indexed by quantity number.
205 We know in advance we will not need a quantity number this big. */
206
207static int max_qty;
208
209/* Next quantity number to be allocated.
210 This is 1 + the largest number needed so far. */
211
212static int next_qty;
213
71d306d1 214/* Indexed by quantity number, gives the first (or last) register
7afe21cc
RK
215 in the chain of registers that currently contain this quantity. */
216
217static int *qty_first_reg;
218static int *qty_last_reg;
219
220/* Index by quantity number, gives the mode of the quantity. */
221
222static enum machine_mode *qty_mode;
223
224/* Indexed by quantity number, gives the rtx of the constant value of the
225 quantity, or zero if it does not have a known value.
226 A sum of the frame pointer (or arg pointer) plus a constant
227 can also be entered here. */
228
229static rtx *qty_const;
230
231/* Indexed by qty number, gives the insn that stored the constant value
232 recorded in `qty_const'. */
233
234static rtx *qty_const_insn;
235
236/* The next three variables are used to track when a comparison between a
237 quantity and some constant or register has been passed. In that case, we
238 know the results of the comparison in case we see it again. These variables
239 record a comparison that is known to be true. */
240
241/* Indexed by qty number, gives the rtx code of a comparison with a known
242 result involving this quantity. If none, it is UNKNOWN. */
243static enum rtx_code *qty_comparison_code;
244
245/* Indexed by qty number, gives the constant being compared against in a
246 comparison of known result. If no such comparison, it is undefined.
247 If the comparison is not with a constant, it is zero. */
248
249static rtx *qty_comparison_const;
250
251/* Indexed by qty number, gives the quantity being compared against in a
252 comparison of known result. If no such comparison, if it undefined.
253 If the comparison is not with a register, it is -1. */
254
255static int *qty_comparison_qty;
256
257#ifdef HAVE_cc0
258/* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
261
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
266
267static rtx prev_insn_cc0;
268static enum machine_mode prev_insn_cc0_mode;
269#endif
270
271/* Previous actual insn. 0 if at first insn of basic block. */
272
273static rtx prev_insn;
274
275/* Insn being scanned. */
276
277static rtx this_insn;
278
71d306d1
DE
279/* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
7afe21cc
RK
281 value.
282
283 Or -1 if this register is at the end of the chain.
284
285 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
286
287static int *reg_next_eqv;
288static int *reg_prev_eqv;
289
30f72379
MM
290struct cse_reg_info {
291 union {
292 /* The number of times the register has been altered in the current
293 basic block. */
294 int reg_tick;
295
296 /* The next cse_reg_info structure in the free list. */
297 struct cse_reg_info* next;
298 } variant;
299
300 /* The REG_TICK value at which rtx's containing this register are
301 valid in the hash table. If this does not equal the current
302 reg_tick value, such expressions existing in the hash table are
303 invalid. */
304 int reg_in_table;
305
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
308};
7afe21cc 309
30f72379
MM
310/* A free list of cse_reg_info entries. */
311static struct cse_reg_info *cse_reg_info_free_list;
7afe21cc 312
30f72379
MM
313/* A mapping from registers to cse_reg_info data structures. */
314static splay_tree cse_reg_info_tree;
7afe21cc 315
30f72379
MM
316/* The last lookup we did into the cse_reg_info_tree. This allows us
317 to cache repeated lookups. */
318static int cached_regno;
319static struct cse_reg_info *cached_cse_reg_info;
7afe21cc
RK
320
321/* A HARD_REG_SET containing all the hard registers for which there is
322 currently a REG expression in the hash table. Note the difference
323 from the above variables, which indicate if the REG is mentioned in some
324 expression in the table. */
325
326static HARD_REG_SET hard_regs_in_table;
327
328/* A HARD_REG_SET containing all the hard registers that are invalidated
329 by a CALL_INSN. */
330
331static HARD_REG_SET regs_invalidated_by_call;
332
7afe21cc
RK
333/* CUID of insn that starts the basic block currently being cse-processed. */
334
335static int cse_basic_block_start;
336
337/* CUID of insn that ends the basic block currently being cse-processed. */
338
339static int cse_basic_block_end;
340
341/* Vector mapping INSN_UIDs to cuids.
d45cf215 342 The cuids are like uids but increase monotonically always.
7afe21cc
RK
343 We use them to see whether a reg is used outside a given basic block. */
344
906c4e36 345static int *uid_cuid;
7afe21cc 346
164c8956
RK
347/* Highest UID in UID_CUID. */
348static int max_uid;
349
7afe21cc
RK
350/* Get the cuid of an insn. */
351
352#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
353
354/* Nonzero if cse has altered conditional jump insns
355 in such a way that jump optimization should be redone. */
356
357static int cse_jumps_altered;
358
a5dfb4ee
RK
359/* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
360 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
361 to put in the note. */
362static int recorded_label_ref;
363
7afe21cc
RK
364/* canon_hash stores 1 in do_not_record
365 if it notices a reference to CC0, PC, or some other volatile
366 subexpression. */
367
368static int do_not_record;
369
7bac1be0
RK
370#ifdef LOAD_EXTEND_OP
371
372/* Scratch rtl used when looking for load-extended copy of a MEM. */
373static rtx memory_extend_rtx;
374#endif
375
7afe21cc
RK
376/* canon_hash stores 1 in hash_arg_in_memory
377 if it notices a reference to memory within the expression being hashed. */
378
379static int hash_arg_in_memory;
380
381/* canon_hash stores 1 in hash_arg_in_struct
382 if it notices a reference to memory that's part of a structure. */
383
384static int hash_arg_in_struct;
385
386/* The hash table contains buckets which are chains of `struct table_elt's,
387 each recording one expression's information.
388 That expression is in the `exp' field.
389
390 Those elements with the same hash code are chained in both directions
391 through the `next_same_hash' and `prev_same_hash' fields.
392
393 Each set of expressions with equivalent values
394 are on a two-way chain through the `next_same_value'
395 and `prev_same_value' fields, and all point with
396 the `first_same_value' field at the first element in
397 that chain. The chain is in order of increasing cost.
398 Each element's cost value is in its `cost' field.
399
400 The `in_memory' field is nonzero for elements that
401 involve any reference to memory. These elements are removed
402 whenever a write is done to an unidentified location in memory.
403 To be safe, we assume that a memory address is unidentified unless
404 the address is either a symbol constant or a constant plus
405 the frame pointer or argument pointer.
406
407 The `in_struct' field is nonzero for elements that
408 involve any reference to memory inside a structure or array.
409
410 The `related_value' field is used to connect related expressions
411 (that differ by adding an integer).
412 The related expressions are chained in a circular fashion.
413 `related_value' is zero for expressions for which this
414 chain is not useful.
415
416 The `cost' field stores the cost of this element's expression.
417
418 The `is_const' flag is set if the element is a constant (including
419 a fixed address).
420
421 The `flag' field is used as a temporary during some search routines.
422
423 The `mode' field is usually the same as GET_MODE (`exp'), but
424 if `exp' is a CONST_INT and has no machine mode then the `mode'
425 field is the mode it was being used as. Each constant is
426 recorded separately for each mode it is used with. */
427
428
429struct table_elt
430{
431 rtx exp;
432 struct table_elt *next_same_hash;
433 struct table_elt *prev_same_hash;
434 struct table_elt *next_same_value;
435 struct table_elt *prev_same_value;
436 struct table_elt *first_same_value;
437 struct table_elt *related_value;
438 int cost;
439 enum machine_mode mode;
440 char in_memory;
441 char in_struct;
442 char is_const;
443 char flag;
444};
445
7afe21cc
RK
446/* We don't want a lot of buckets, because we rarely have very many
447 things stored in the hash table, and a lot of buckets slows
448 down a lot of loops that happen frequently. */
449#define NBUCKETS 31
450
451/* Compute hash code of X in mode M. Special-case case where X is a pseudo
452 register (hard registers may require `do_not_record' to be set). */
453
454#define HASH(X, M) \
455 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
30f72379 456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS \
7afe21cc
RK
457 : canon_hash (X, M) % NBUCKETS)
458
459/* Determine whether register number N is considered a fixed register for CSE.
460 It is desirable to replace other regs with fixed regs, to reduce need for
461 non-fixed hard regs.
462 A reg wins if it is either the frame pointer or designated as fixed,
463 but not if it is an overlapping register. */
464#ifdef OVERLAPPING_REGNO_P
465#define FIXED_REGNO_P(N) \
8bc169f2 466 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 467 || fixed_regs[N] || global_regs[N]) \
7afe21cc
RK
468 && ! OVERLAPPING_REGNO_P ((N)))
469#else
470#define FIXED_REGNO_P(N) \
8bc169f2 471 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 472 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
473#endif
474
475/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
476 hard registers and pointers into the frame are the cheapest with a cost
477 of 0. Next come pseudos with a cost of one and other hard registers with
478 a cost of 2. Aside from these special cases, call `rtx_cost'. */
479
6ab832bc 480#define CHEAP_REGNO(N) \
8bc169f2
DE
481 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
482 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
483 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
484 || ((N) < FIRST_PSEUDO_REGISTER \
e7bb59fa 485 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 486
6ab832bc
RK
487/* A register is cheap if it is a user variable assigned to the register
488 or if its register number always corresponds to a cheap register. */
489
490#define CHEAP_REG(N) \
491 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
492 || CHEAP_REGNO (REGNO (N)))
493
38734e55
ILT
494#define COST(X) \
495 (GET_CODE (X) == REG \
496 ? (CHEAP_REG (X) ? 0 \
497 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
498 : 2) \
954a5693 499 : notreg_cost(X))
7afe21cc 500
30f72379
MM
501/* Get the info associated with register N. */
502
503#define GET_CSE_REG_INFO(N) \
504 (((N) == cached_regno && cached_cse_reg_info) \
505 ? cached_cse_reg_info : get_cse_reg_info ((N)))
506
507/* Get the number of times this register has been updated in this
508 basic block. */
509
510#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
511
512/* Get the point at which REG was recorded in the table. */
513
514#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
515
516/* Get the quantity number for REG. */
517
518#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
519
7afe21cc
RK
520/* Determine if the quantity number for register X represents a valid index
521 into the `qty_...' variables. */
522
30f72379 523#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
7afe21cc 524
2f541799
MM
525#ifdef ADDRESS_COST
526/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
527 during CSE, such nodes are present. Using an ADDRESSOF node which
528 refers to the address of a REG is a good thing because we can then
529 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
530#define CSE_ADDRESS_COST(RTX) \
531 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
532 ? -1 : ADDRESS_COST(RTX))
533#endif
534
7afe21cc
RK
535static struct table_elt *table[NBUCKETS];
536
537/* Chain of `struct table_elt's made so far for this function
538 but currently removed from the table. */
539
540static struct table_elt *free_element_chain;
541
542/* Number of `struct table_elt' structures made so far for this function. */
543
544static int n_elements_made;
545
546/* Maximum value `n_elements_made' has had so far in this compilation
547 for functions previously processed. */
548
549static int max_elements_made;
550
551/* Surviving equivalence class when two equivalence classes are merged
552 by recording the effects of a jump in the last insn. Zero if the
553 last insn was not a conditional jump. */
554
555static struct table_elt *last_jump_equiv_class;
556
557/* Set to the cost of a constant pool reference if one was found for a
558 symbolic constant. If this was found, it means we should try to
559 convert constants into constant pool entries if they don't fit in
560 the insn. */
561
562static int constant_pool_entries_cost;
563
6cd4575e
RK
564/* Define maximum length of a branch path. */
565
566#define PATHLENGTH 10
567
568/* This data describes a block that will be processed by cse_basic_block. */
569
570struct cse_basic_block_data {
571 /* Lowest CUID value of insns in block. */
572 int low_cuid;
573 /* Highest CUID value of insns in block. */
574 int high_cuid;
575 /* Total number of SETs in block. */
576 int nsets;
577 /* Last insn in the block. */
578 rtx last;
579 /* Size of current branch path, if any. */
580 int path_size;
581 /* Current branch path, indicating which branches will be taken. */
582 struct branch_path {
0f41302f 583 /* The branch insn. */
6cd4575e
RK
584 rtx branch;
585 /* Whether it should be taken or not. AROUND is the same as taken
586 except that it is used when the destination label is not preceded
587 by a BARRIER. */
588 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
589 } path[PATHLENGTH];
590};
591
7afe21cc
RK
592/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
593 virtual regs here because the simplify_*_operation routines are called
594 by integrate.c, which is called before virtual register instantiation. */
595
596#define FIXED_BASE_PLUS_P(X) \
8bc169f2
DE
597 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
598 || (X) == arg_pointer_rtx \
7afe21cc
RK
599 || (X) == virtual_stack_vars_rtx \
600 || (X) == virtual_incoming_args_rtx \
601 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
602 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 603 || XEXP (X, 0) == hard_frame_pointer_rtx \
7afe21cc
RK
604 || XEXP (X, 0) == arg_pointer_rtx \
605 || XEXP (X, 0) == virtual_stack_vars_rtx \
e9a25f70
JL
606 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
607 || GET_CODE (X) == ADDRESSOF)
7afe21cc 608
6f90e075
JW
609/* Similar, but also allows reference to the stack pointer.
610
611 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
612 arg_pointer_rtx by itself is nonzero, because on at least one machine,
613 the i960, the arg pointer is zero when it is unused. */
7afe21cc
RK
614
615#define NONZERO_BASE_PLUS_P(X) \
8bc169f2 616 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
6f90e075
JW
617 || (X) == virtual_stack_vars_rtx \
618 || (X) == virtual_incoming_args_rtx \
619 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
620 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 621 || XEXP (X, 0) == hard_frame_pointer_rtx \
6f90e075
JW
622 || XEXP (X, 0) == arg_pointer_rtx \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
7afe21cc
RK
625 || (X) == stack_pointer_rtx \
626 || (X) == virtual_stack_dynamic_rtx \
627 || (X) == virtual_outgoing_args_rtx \
628 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629 && (XEXP (X, 0) == stack_pointer_rtx \
630 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
e9a25f70
JL
631 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
632 || GET_CODE (X) == ADDRESSOF)
7afe21cc 633
954a5693 634static int notreg_cost PROTO((rtx));
6cd4575e
RK
635static void new_basic_block PROTO((void));
636static void make_new_qty PROTO((int));
637static void make_regs_eqv PROTO((int, int));
638static void delete_reg_equiv PROTO((int));
639static int mention_regs PROTO((rtx));
640static int insert_regs PROTO((rtx, struct table_elt *, int));
641static void free_element PROTO((struct table_elt *));
2197a88a 642static void remove_from_table PROTO((struct table_elt *, unsigned));
6cd4575e 643static struct table_elt *get_element PROTO((void));
2197a88a
RK
644static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
645 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
6cd4575e 646static rtx lookup_as_function PROTO((rtx, enum rtx_code));
2197a88a 647static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
6cd4575e
RK
648 enum machine_mode));
649static void merge_equiv_classes PROTO((struct table_elt *,
650 struct table_elt *));
68c1e173 651static void invalidate PROTO((rtx, enum machine_mode));
9ae8ffe7 652static int cse_rtx_varies_p PROTO((rtx));
6cd4575e 653static void remove_invalid_refs PROTO((int));
34c73909 654static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
6cd4575e 655static void rehash_using_reg PROTO((rtx));
9ae8ffe7 656static void invalidate_memory PROTO((void));
6cd4575e
RK
657static void invalidate_for_call PROTO((void));
658static rtx use_related_value PROTO((rtx, struct table_elt *));
2197a88a
RK
659static unsigned canon_hash PROTO((rtx, enum machine_mode));
660static unsigned safe_hash PROTO((rtx, enum machine_mode));
6cd4575e 661static int exp_equiv_p PROTO((rtx, rtx, int, int));
f451db89 662static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
6500fb43
RK
663 HOST_WIDE_INT *,
664 HOST_WIDE_INT *));
6cd4575e 665static int refers_to_p PROTO((rtx, rtx));
6cd4575e
RK
666static rtx canon_reg PROTO((rtx, rtx));
667static void find_best_addr PROTO((rtx, rtx *));
668static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
669 enum machine_mode *,
670 enum machine_mode *));
96b0e481
RK
671static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
672 rtx, rtx));
673static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
674 rtx, rtx));
6cd4575e
RK
675static rtx fold_rtx PROTO((rtx, rtx));
676static rtx equiv_constant PROTO((rtx));
677static void record_jump_equiv PROTO((rtx, int));
678static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
679 rtx, rtx, int));
7bd8b2a8 680static void cse_insn PROTO((rtx, rtx));
9ae8ffe7
JL
681static int note_mem_written PROTO((rtx));
682static void invalidate_from_clobbers PROTO((rtx));
6cd4575e
RK
683static rtx cse_process_notes PROTO((rtx, rtx));
684static void cse_around_loop PROTO((rtx));
685static void invalidate_skipped_set PROTO((rtx, rtx));
686static void invalidate_skipped_block PROTO((rtx));
687static void cse_check_loop_start PROTO((rtx, rtx));
688static void cse_set_around_loop PROTO((rtx, rtx, rtx));
689static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
79644f06 690static void count_reg_usage PROTO((rtx, int *, rtx, int));
a0153051 691extern void dump_class PROTO((struct table_elt*));
1a87eea2 692static void check_fold_consts PROTO((PTR));
30f72379
MM
693static struct cse_reg_info* get_cse_reg_info PROTO((int));
694static void free_cse_reg_info PROTO((splay_tree_value));
01e752d3 695static void flush_hash_table PROTO((void));
7afe21cc 696\f
a4c6502a
MM
697/* Dump the expressions in the equivalence class indicated by CLASSP.
698 This function is used only for debugging. */
a0153051 699void
a4c6502a
MM
700dump_class (classp)
701 struct table_elt *classp;
702{
703 struct table_elt *elt;
704
705 fprintf (stderr, "Equivalence chain for ");
706 print_rtl (stderr, classp->exp);
707 fprintf (stderr, ": \n");
708
709 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
710 {
711 print_rtl (stderr, elt->exp);
712 fprintf (stderr, "\n");
713 }
714}
715
7afe21cc
RK
716/* Return an estimate of the cost of computing rtx X.
717 One use is in cse, to decide which expression to keep in the hash table.
718 Another is in rtl generation, to pick the cheapest way to multiply.
719 Other uses like the latter are expected in the future. */
720
954a5693
RK
721/* Internal function, to compute cost when X is not a register; called
722 from COST macro to keep it simple. */
723
724static int
725notreg_cost (x)
726 rtx x;
727{
728 return ((GET_CODE (x) == SUBREG
729 && GET_CODE (SUBREG_REG (x)) == REG
730 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
731 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
732 && (GET_MODE_SIZE (GET_MODE (x))
733 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
734 && subreg_lowpart_p (x)
735 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
736 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
737 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
738 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
739 : 2))
740 : rtx_cost (x, SET) * 2);
741}
742
7afe21cc
RK
743/* Return the right cost to give to an operation
744 to make the cost of the corresponding register-to-register instruction
745 N times that of a fast register-to-register instruction. */
746
747#define COSTS_N_INSNS(N) ((N) * 4 - 2)
748
749int
e5f6a288 750rtx_cost (x, outer_code)
7afe21cc 751 rtx x;
79c9824e 752 enum rtx_code outer_code ATTRIBUTE_UNUSED;
7afe21cc
RK
753{
754 register int i, j;
755 register enum rtx_code code;
6f7d635c 756 register const char *fmt;
7afe21cc
RK
757 register int total;
758
759 if (x == 0)
760 return 0;
761
762 /* Compute the default costs of certain things.
763 Note that RTX_COSTS can override the defaults. */
764
765 code = GET_CODE (x);
766 switch (code)
767 {
768 case MULT:
769 /* Count multiplication by 2**n as a shift,
770 because if we are considering it, we would output it as a shift. */
771 if (GET_CODE (XEXP (x, 1)) == CONST_INT
772 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
773 total = 2;
774 else
775 total = COSTS_N_INSNS (5);
776 break;
777 case DIV:
778 case UDIV:
779 case MOD:
780 case UMOD:
781 total = COSTS_N_INSNS (7);
782 break;
783 case USE:
784 /* Used in loop.c and combine.c as a marker. */
785 total = 0;
786 break;
538b78e7
RS
787 case ASM_OPERANDS:
788 /* We don't want these to be used in substitutions because
789 we have no way of validating the resulting insn. So assign
790 anything containing an ASM_OPERANDS a very high cost. */
791 total = 1000;
792 break;
7afe21cc
RK
793 default:
794 total = 2;
795 }
796
797 switch (code)
798 {
799 case REG:
6ab832bc 800 return ! CHEAP_REG (x);
ac07e066 801
7afe21cc 802 case SUBREG:
fc3ffe83
RK
803 /* If we can't tie these modes, make this expensive. The larger
804 the mode, the more expensive it is. */
805 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
806 return COSTS_N_INSNS (2
807 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
7afe21cc
RK
808 return 2;
809#ifdef RTX_COSTS
e5f6a288 810 RTX_COSTS (x, code, outer_code);
7afe21cc 811#endif
47a0b68f 812#ifdef CONST_COSTS
e5f6a288 813 CONST_COSTS (x, code, outer_code);
47a0b68f 814#endif
8625fab5
KG
815
816 default:
817#ifdef DEFAULT_RTX_COSTS
818 DEFAULT_RTX_COSTS(x, code, outer_code);
819#endif
820 break;
7afe21cc
RK
821 }
822
823 /* Sum the costs of the sub-rtx's, plus cost of this operation,
824 which is already in total. */
825
826 fmt = GET_RTX_FORMAT (code);
827 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
828 if (fmt[i] == 'e')
e5f6a288 829 total += rtx_cost (XEXP (x, i), code);
7afe21cc
RK
830 else if (fmt[i] == 'E')
831 for (j = 0; j < XVECLEN (x, i); j++)
e5f6a288 832 total += rtx_cost (XVECEXP (x, i, j), code);
7afe21cc
RK
833
834 return total;
835}
836\f
30f72379
MM
837static struct cse_reg_info *
838get_cse_reg_info (regno)
839 int regno;
840{
841 struct cse_reg_info *cri;
842 splay_tree_node n;
843
844 /* See if we already have this entry. */
845 n = splay_tree_lookup (cse_reg_info_tree,
846 (splay_tree_key) regno);
847 if (n)
848 cri = (struct cse_reg_info *) (n->value);
849 else
850 {
851 /* Get a new cse_reg_info structure. */
852 if (cse_reg_info_free_list)
853 {
854 cri = cse_reg_info_free_list;
855 cse_reg_info_free_list = cri->variant.next;
856 }
857 else
858 cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
859
860 /* Initialize it. */
861 cri->variant.reg_tick = 0;
862 cri->reg_in_table = -1;
863 cri->reg_qty = regno;
864
865 splay_tree_insert (cse_reg_info_tree,
866 (splay_tree_key) regno,
867 (splay_tree_value) cri);
868 }
869
870 /* Cache this lookup; we tend to be looking up information about the
871 same register several times in a row. */
872 cached_regno = regno;
873 cached_cse_reg_info = cri;
874
875 return cri;
876}
877
878static void
879free_cse_reg_info (v)
880 splay_tree_value v;
881{
882 struct cse_reg_info *cri = (struct cse_reg_info *) v;
883
884 cri->variant.next = cse_reg_info_free_list;
885 cse_reg_info_free_list = cri;
886}
887
7afe21cc
RK
888/* Clear the hash table and initialize each register with its own quantity,
889 for a new basic block. */
890
891static void
892new_basic_block ()
893{
894 register int i;
895
896 next_qty = max_reg;
897
30f72379
MM
898 if (cse_reg_info_tree)
899 {
900 splay_tree_delete (cse_reg_info_tree);
901 cached_cse_reg_info = 0;
902 }
903
904 cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
905 free_cse_reg_info);
7afe21cc 906
7afe21cc
RK
907 CLEAR_HARD_REG_SET (hard_regs_in_table);
908
909 /* The per-quantity values used to be initialized here, but it is
910 much faster to initialize each as it is made in `make_new_qty'. */
911
912 for (i = 0; i < NBUCKETS; i++)
913 {
914 register struct table_elt *this, *next;
915 for (this = table[i]; this; this = next)
916 {
917 next = this->next_same_hash;
918 free_element (this);
919 }
920 }
921
4c9a05bc 922 bzero ((char *) table, sizeof table);
7afe21cc
RK
923
924 prev_insn = 0;
925
926#ifdef HAVE_cc0
927 prev_insn_cc0 = 0;
928#endif
929}
930
931/* Say that register REG contains a quantity not in any register before
932 and initialize that quantity. */
933
934static void
935make_new_qty (reg)
936 register int reg;
937{
938 register int q;
939
940 if (next_qty >= max_qty)
941 abort ();
942
30f72379 943 q = REG_QTY (reg) = next_qty++;
7afe21cc
RK
944 qty_first_reg[q] = reg;
945 qty_last_reg[q] = reg;
946 qty_const[q] = qty_const_insn[q] = 0;
947 qty_comparison_code[q] = UNKNOWN;
948
949 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
950}
951
952/* Make reg NEW equivalent to reg OLD.
953 OLD is not changing; NEW is. */
954
955static void
956make_regs_eqv (new, old)
957 register int new, old;
958{
959 register int lastr, firstr;
30f72379 960 register int q = REG_QTY (old);
7afe21cc
RK
961
962 /* Nothing should become eqv until it has a "non-invalid" qty number. */
963 if (! REGNO_QTY_VALID_P (old))
964 abort ();
965
30f72379 966 REG_QTY (new) = q;
7afe21cc
RK
967 firstr = qty_first_reg[q];
968 lastr = qty_last_reg[q];
969
970 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
971 hard regs. Among pseudos, if NEW will live longer than any other reg
972 of the same qty, and that is beyond the current basic block,
973 make it the new canonical replacement for this qty. */
974 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
975 /* Certain fixed registers might be of the class NO_REGS. This means
976 that not only can they not be allocated by the compiler, but
830a38ee 977 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
978 either. */
979 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
980 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
981 || (new >= FIRST_PSEUDO_REGISTER
982 && (firstr < FIRST_PSEUDO_REGISTER
b1f21e0a
MM
983 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
984 || (uid_cuid[REGNO_FIRST_UID (new)]
7afe21cc 985 < cse_basic_block_start))
b1f21e0a
MM
986 && (uid_cuid[REGNO_LAST_UID (new)]
987 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
7afe21cc
RK
988 {
989 reg_prev_eqv[firstr] = new;
990 reg_next_eqv[new] = firstr;
991 reg_prev_eqv[new] = -1;
992 qty_first_reg[q] = new;
993 }
994 else
995 {
996 /* If NEW is a hard reg (known to be non-fixed), insert at end.
997 Otherwise, insert before any non-fixed hard regs that are at the
998 end. Registers of class NO_REGS cannot be used as an
999 equivalent for anything. */
1000 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1001 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1002 && new >= FIRST_PSEUDO_REGISTER)
1003 lastr = reg_prev_eqv[lastr];
1004 reg_next_eqv[new] = reg_next_eqv[lastr];
1005 if (reg_next_eqv[lastr] >= 0)
1006 reg_prev_eqv[reg_next_eqv[lastr]] = new;
1007 else
1008 qty_last_reg[q] = new;
1009 reg_next_eqv[lastr] = new;
1010 reg_prev_eqv[new] = lastr;
1011 }
1012}
1013
1014/* Remove REG from its equivalence class. */
1015
1016static void
1017delete_reg_equiv (reg)
1018 register int reg;
1019{
30f72379 1020 register int q = REG_QTY (reg);
a4e262bc 1021 register int p, n;
7afe21cc 1022
a4e262bc 1023 /* If invalid, do nothing. */
7afe21cc
RK
1024 if (q == reg)
1025 return;
1026
a4e262bc
RK
1027 p = reg_prev_eqv[reg];
1028 n = reg_next_eqv[reg];
1029
7afe21cc
RK
1030 if (n != -1)
1031 reg_prev_eqv[n] = p;
1032 else
1033 qty_last_reg[q] = p;
1034 if (p != -1)
1035 reg_next_eqv[p] = n;
1036 else
1037 qty_first_reg[q] = n;
1038
30f72379 1039 REG_QTY (reg) = reg;
7afe21cc
RK
1040}
1041
1042/* Remove any invalid expressions from the hash table
1043 that refer to any of the registers contained in expression X.
1044
1045 Make sure that newly inserted references to those registers
1046 as subexpressions will be considered valid.
1047
1048 mention_regs is not called when a register itself
1049 is being stored in the table.
1050
1051 Return 1 if we have done something that may have changed the hash code
1052 of X. */
1053
1054static int
1055mention_regs (x)
1056 rtx x;
1057{
1058 register enum rtx_code code;
1059 register int i, j;
6f7d635c 1060 register const char *fmt;
7afe21cc
RK
1061 register int changed = 0;
1062
1063 if (x == 0)
e5f6a288 1064 return 0;
7afe21cc
RK
1065
1066 code = GET_CODE (x);
1067 if (code == REG)
1068 {
1069 register int regno = REGNO (x);
1070 register int endregno
1071 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1072 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1073 int i;
1074
1075 for (i = regno; i < endregno; i++)
1076 {
30f72379 1077 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
1078 remove_invalid_refs (i);
1079
30f72379 1080 REG_IN_TABLE (i) = REG_TICK (i);
7afe21cc
RK
1081 }
1082
1083 return 0;
1084 }
1085
34c73909
R
1086 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1087 pseudo if they don't use overlapping words. We handle only pseudos
1088 here for simplicity. */
1089 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1090 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1091 {
1092 int i = REGNO (SUBREG_REG (x));
1093
30f72379 1094 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
34c73909
R
1095 {
1096 /* If reg_tick has been incremented more than once since
1097 reg_in_table was last set, that means that the entire
1098 register has been set before, so discard anything memorized
1099 for the entrire register, including all SUBREG expressions. */
30f72379 1100 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
34c73909
R
1101 remove_invalid_refs (i);
1102 else
1103 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1104 }
1105
30f72379 1106 REG_IN_TABLE (i) = REG_TICK (i);
34c73909
R
1107 return 0;
1108 }
1109
7afe21cc
RK
1110 /* If X is a comparison or a COMPARE and either operand is a register
1111 that does not have a quantity, give it one. This is so that a later
1112 call to record_jump_equiv won't cause X to be assigned a different
1113 hash code and not found in the table after that call.
1114
1115 It is not necessary to do this here, since rehash_using_reg can
1116 fix up the table later, but doing this here eliminates the need to
1117 call that expensive function in the most common case where the only
1118 use of the register is in the comparison. */
1119
1120 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1121 {
1122 if (GET_CODE (XEXP (x, 0)) == REG
1123 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
906c4e36 1124 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
7afe21cc
RK
1125 {
1126 rehash_using_reg (XEXP (x, 0));
1127 changed = 1;
1128 }
1129
1130 if (GET_CODE (XEXP (x, 1)) == REG
1131 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
906c4e36 1132 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
7afe21cc
RK
1133 {
1134 rehash_using_reg (XEXP (x, 1));
1135 changed = 1;
1136 }
1137 }
1138
1139 fmt = GET_RTX_FORMAT (code);
1140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1141 if (fmt[i] == 'e')
1142 changed |= mention_regs (XEXP (x, i));
1143 else if (fmt[i] == 'E')
1144 for (j = 0; j < XVECLEN (x, i); j++)
1145 changed |= mention_regs (XVECEXP (x, i, j));
1146
1147 return changed;
1148}
1149
1150/* Update the register quantities for inserting X into the hash table
1151 with a value equivalent to CLASSP.
1152 (If the class does not contain a REG, it is irrelevant.)
1153 If MODIFIED is nonzero, X is a destination; it is being modified.
1154 Note that delete_reg_equiv should be called on a register
1155 before insert_regs is done on that register with MODIFIED != 0.
1156
1157 Nonzero value means that elements of reg_qty have changed
1158 so X's hash code may be different. */
1159
1160static int
1161insert_regs (x, classp, modified)
1162 rtx x;
1163 struct table_elt *classp;
1164 int modified;
1165{
1166 if (GET_CODE (x) == REG)
1167 {
1168 register int regno = REGNO (x);
1169
1ff0c00d
RK
1170 /* If REGNO is in the equivalence table already but is of the
1171 wrong mode for that equivalence, don't do anything here. */
1172
1173 if (REGNO_QTY_VALID_P (regno)
30f72379 1174 && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1ff0c00d
RK
1175 return 0;
1176
1177 if (modified || ! REGNO_QTY_VALID_P (regno))
7afe21cc
RK
1178 {
1179 if (classp)
1180 for (classp = classp->first_same_value;
1181 classp != 0;
1182 classp = classp->next_same_value)
1183 if (GET_CODE (classp->exp) == REG
1184 && GET_MODE (classp->exp) == GET_MODE (x))
1185 {
1186 make_regs_eqv (regno, REGNO (classp->exp));
1187 return 1;
1188 }
1189
1190 make_new_qty (regno);
30f72379 1191 qty_mode[REG_QTY (regno)] = GET_MODE (x);
7afe21cc
RK
1192 return 1;
1193 }
cdf4112f
TG
1194
1195 return 0;
7afe21cc 1196 }
c610adec
RK
1197
1198 /* If X is a SUBREG, we will likely be inserting the inner register in the
1199 table. If that register doesn't have an assigned quantity number at
1200 this point but does later, the insertion that we will be doing now will
1201 not be accessible because its hash code will have changed. So assign
1202 a quantity number now. */
1203
1204 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1205 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1206 {
34c73909
R
1207 int regno = REGNO (SUBREG_REG (x));
1208
906c4e36 1209 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
34c73909
R
1210 /* Mention_regs checks if REG_TICK is exactly one larger than
1211 REG_IN_TABLE to find out if there was only a single preceding
1212 invalidation - for the SUBREG - or another one, which would be
1213 for the full register. Since we don't invalidate the SUBREG
1214 here first, we might have to bump up REG_TICK so that mention_regs
1215 will do the right thing. */
30f72379
MM
1216 if (REG_IN_TABLE (regno) >= 0
1217 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1218 REG_TICK (regno)++;
34c73909 1219 mention_regs (x);
c610adec
RK
1220 return 1;
1221 }
7afe21cc
RK
1222 else
1223 return mention_regs (x);
1224}
1225\f
1226/* Look in or update the hash table. */
1227
1228/* Put the element ELT on the list of free elements. */
1229
1230static void
1231free_element (elt)
1232 struct table_elt *elt;
1233{
1234 elt->next_same_hash = free_element_chain;
1235 free_element_chain = elt;
1236}
1237
1238/* Return an element that is free for use. */
1239
1240static struct table_elt *
1241get_element ()
1242{
1243 struct table_elt *elt = free_element_chain;
1244 if (elt)
1245 {
1246 free_element_chain = elt->next_same_hash;
1247 return elt;
1248 }
1249 n_elements_made++;
1250 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1251}
1252
1253/* Remove table element ELT from use in the table.
1254 HASH is its hash code, made using the HASH macro.
1255 It's an argument because often that is known in advance
1256 and we save much time not recomputing it. */
1257
1258static void
1259remove_from_table (elt, hash)
1260 register struct table_elt *elt;
2197a88a 1261 unsigned hash;
7afe21cc
RK
1262{
1263 if (elt == 0)
1264 return;
1265
1266 /* Mark this element as removed. See cse_insn. */
1267 elt->first_same_value = 0;
1268
1269 /* Remove the table element from its equivalence class. */
1270
1271 {
1272 register struct table_elt *prev = elt->prev_same_value;
1273 register struct table_elt *next = elt->next_same_value;
1274
1275 if (next) next->prev_same_value = prev;
1276
1277 if (prev)
1278 prev->next_same_value = next;
1279 else
1280 {
1281 register struct table_elt *newfirst = next;
1282 while (next)
1283 {
1284 next->first_same_value = newfirst;
1285 next = next->next_same_value;
1286 }
1287 }
1288 }
1289
1290 /* Remove the table element from its hash bucket. */
1291
1292 {
1293 register struct table_elt *prev = elt->prev_same_hash;
1294 register struct table_elt *next = elt->next_same_hash;
1295
1296 if (next) next->prev_same_hash = prev;
1297
1298 if (prev)
1299 prev->next_same_hash = next;
1300 else if (table[hash] == elt)
1301 table[hash] = next;
1302 else
1303 {
1304 /* This entry is not in the proper hash bucket. This can happen
1305 when two classes were merged by `merge_equiv_classes'. Search
1306 for the hash bucket that it heads. This happens only very
1307 rarely, so the cost is acceptable. */
1308 for (hash = 0; hash < NBUCKETS; hash++)
1309 if (table[hash] == elt)
1310 table[hash] = next;
1311 }
1312 }
1313
1314 /* Remove the table element from its related-value circular chain. */
1315
1316 if (elt->related_value != 0 && elt->related_value != elt)
1317 {
1318 register struct table_elt *p = elt->related_value;
1319 while (p->related_value != elt)
1320 p = p->related_value;
1321 p->related_value = elt->related_value;
1322 if (p->related_value == p)
1323 p->related_value = 0;
1324 }
1325
1326 free_element (elt);
1327}
1328
1329/* Look up X in the hash table and return its table element,
1330 or 0 if X is not in the table.
1331
1332 MODE is the machine-mode of X, or if X is an integer constant
1333 with VOIDmode then MODE is the mode with which X will be used.
1334
1335 Here we are satisfied to find an expression whose tree structure
1336 looks like X. */
1337
1338static struct table_elt *
1339lookup (x, hash, mode)
1340 rtx x;
2197a88a 1341 unsigned hash;
7afe21cc
RK
1342 enum machine_mode mode;
1343{
1344 register struct table_elt *p;
1345
1346 for (p = table[hash]; p; p = p->next_same_hash)
1347 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1348 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1349 return p;
1350
1351 return 0;
1352}
1353
1354/* Like `lookup' but don't care whether the table element uses invalid regs.
1355 Also ignore discrepancies in the machine mode of a register. */
1356
1357static struct table_elt *
1358lookup_for_remove (x, hash, mode)
1359 rtx x;
2197a88a 1360 unsigned hash;
7afe21cc
RK
1361 enum machine_mode mode;
1362{
1363 register struct table_elt *p;
1364
1365 if (GET_CODE (x) == REG)
1366 {
1367 int regno = REGNO (x);
1368 /* Don't check the machine mode when comparing registers;
1369 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1370 for (p = table[hash]; p; p = p->next_same_hash)
1371 if (GET_CODE (p->exp) == REG
1372 && REGNO (p->exp) == regno)
1373 return p;
1374 }
1375 else
1376 {
1377 for (p = table[hash]; p; p = p->next_same_hash)
1378 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1379 return p;
1380 }
1381
1382 return 0;
1383}
1384
1385/* Look for an expression equivalent to X and with code CODE.
1386 If one is found, return that expression. */
1387
1388static rtx
1389lookup_as_function (x, code)
1390 rtx x;
1391 enum rtx_code code;
1392{
1393 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1394 GET_MODE (x));
34c73909
R
1395 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1396 long as we are narrowing. So if we looked in vain for a mode narrower
1397 than word_mode before, look for word_mode now. */
1398 if (p == 0 && code == CONST_INT
1399 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1400 {
1401 x = copy_rtx (x);
1402 PUT_MODE (x, word_mode);
1403 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1404 }
1405
7afe21cc
RK
1406 if (p == 0)
1407 return 0;
1408
1409 for (p = p->first_same_value; p; p = p->next_same_value)
1410 {
1411 if (GET_CODE (p->exp) == code
1412 /* Make sure this is a valid entry in the table. */
1413 && exp_equiv_p (p->exp, p->exp, 1, 0))
1414 return p->exp;
1415 }
1416
1417 return 0;
1418}
1419
1420/* Insert X in the hash table, assuming HASH is its hash code
1421 and CLASSP is an element of the class it should go in
1422 (or 0 if a new class should be made).
1423 It is inserted at the proper position to keep the class in
1424 the order cheapest first.
1425
1426 MODE is the machine-mode of X, or if X is an integer constant
1427 with VOIDmode then MODE is the mode with which X will be used.
1428
1429 For elements of equal cheapness, the most recent one
1430 goes in front, except that the first element in the list
1431 remains first unless a cheaper element is added. The order of
1432 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1433 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1434
1435 The in_memory field in the hash table element is set to 0.
1436 The caller must set it nonzero if appropriate.
1437
1438 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1439 and if insert_regs returns a nonzero value
1440 you must then recompute its hash code before calling here.
1441
1442 If necessary, update table showing constant values of quantities. */
1443
1444#define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1445
1446static struct table_elt *
1447insert (x, classp, hash, mode)
1448 register rtx x;
1449 register struct table_elt *classp;
2197a88a 1450 unsigned hash;
7afe21cc
RK
1451 enum machine_mode mode;
1452{
1453 register struct table_elt *elt;
1454
1455 /* If X is a register and we haven't made a quantity for it,
1456 something is wrong. */
1457 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1458 abort ();
1459
1460 /* If X is a hard register, show it is being put in the table. */
1461 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1462 {
1463 int regno = REGNO (x);
1464 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1465 int i;
1466
1467 for (i = regno; i < endregno; i++)
1468 SET_HARD_REG_BIT (hard_regs_in_table, i);
1469 }
1470
a5dfb4ee 1471 /* If X is a label, show we recorded it. */
970c9ace
RK
1472 if (GET_CODE (x) == LABEL_REF
1473 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1474 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
a5dfb4ee 1475 recorded_label_ref = 1;
7afe21cc
RK
1476
1477 /* Put an element for X into the right hash bucket. */
1478
1479 elt = get_element ();
1480 elt->exp = x;
1481 elt->cost = COST (x);
1482 elt->next_same_value = 0;
1483 elt->prev_same_value = 0;
1484 elt->next_same_hash = table[hash];
1485 elt->prev_same_hash = 0;
1486 elt->related_value = 0;
1487 elt->in_memory = 0;
1488 elt->mode = mode;
1489 elt->is_const = (CONSTANT_P (x)
1490 /* GNU C++ takes advantage of this for `this'
1491 (and other const values). */
1492 || (RTX_UNCHANGING_P (x)
1493 && GET_CODE (x) == REG
1494 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1495 || FIXED_BASE_PLUS_P (x));
1496
1497 if (table[hash])
1498 table[hash]->prev_same_hash = elt;
1499 table[hash] = elt;
1500
1501 /* Put it into the proper value-class. */
1502 if (classp)
1503 {
1504 classp = classp->first_same_value;
1505 if (CHEAPER (elt, classp))
1506 /* Insert at the head of the class */
1507 {
1508 register struct table_elt *p;
1509 elt->next_same_value = classp;
1510 classp->prev_same_value = elt;
1511 elt->first_same_value = elt;
1512
1513 for (p = classp; p; p = p->next_same_value)
1514 p->first_same_value = elt;
1515 }
1516 else
1517 {
1518 /* Insert not at head of the class. */
1519 /* Put it after the last element cheaper than X. */
1520 register struct table_elt *p, *next;
1521 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1522 p = next);
1523 /* Put it after P and before NEXT. */
1524 elt->next_same_value = next;
1525 if (next)
1526 next->prev_same_value = elt;
1527 elt->prev_same_value = p;
1528 p->next_same_value = elt;
1529 elt->first_same_value = classp;
1530 }
1531 }
1532 else
1533 elt->first_same_value = elt;
1534
1535 /* If this is a constant being set equivalent to a register or a register
1536 being set equivalent to a constant, note the constant equivalence.
1537
1538 If this is a constant, it cannot be equivalent to a different constant,
1539 and a constant is the only thing that can be cheaper than a register. So
1540 we know the register is the head of the class (before the constant was
1541 inserted).
1542
1543 If this is a register that is not already known equivalent to a
1544 constant, we must check the entire class.
1545
1546 If this is a register that is already known equivalent to an insn,
1547 update `qty_const_insn' to show that `this_insn' is the latest
1548 insn making that quantity equivalent to the constant. */
1549
f353588a
RK
1550 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1551 && GET_CODE (x) != REG)
7afe21cc 1552 {
30f72379
MM
1553 qty_const[REG_QTY (REGNO (classp->exp))]
1554 = gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1555 qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
7afe21cc
RK
1556 }
1557
30f72379 1558 else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
f353588a 1559 && ! elt->is_const)
7afe21cc
RK
1560 {
1561 register struct table_elt *p;
1562
1563 for (p = classp; p != 0; p = p->next_same_value)
1564 {
f353588a 1565 if (p->is_const && GET_CODE (p->exp) != REG)
7afe21cc 1566 {
30f72379 1567 qty_const[REG_QTY (REGNO (x))]
7afe21cc 1568 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
30f72379 1569 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
7afe21cc
RK
1570 break;
1571 }
1572 }
1573 }
1574
30f72379
MM
1575 else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1576 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1577 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
7afe21cc
RK
1578
1579 /* If this is a constant with symbolic value,
1580 and it has a term with an explicit integer value,
1581 link it up with related expressions. */
1582 if (GET_CODE (x) == CONST)
1583 {
1584 rtx subexp = get_related_value (x);
2197a88a 1585 unsigned subhash;
7afe21cc
RK
1586 struct table_elt *subelt, *subelt_prev;
1587
1588 if (subexp != 0)
1589 {
1590 /* Get the integer-free subexpression in the hash table. */
1591 subhash = safe_hash (subexp, mode) % NBUCKETS;
1592 subelt = lookup (subexp, subhash, mode);
1593 if (subelt == 0)
906c4e36 1594 subelt = insert (subexp, NULL_PTR, subhash, mode);
7afe21cc
RK
1595 /* Initialize SUBELT's circular chain if it has none. */
1596 if (subelt->related_value == 0)
1597 subelt->related_value = subelt;
1598 /* Find the element in the circular chain that precedes SUBELT. */
1599 subelt_prev = subelt;
1600 while (subelt_prev->related_value != subelt)
1601 subelt_prev = subelt_prev->related_value;
1602 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1603 This way the element that follows SUBELT is the oldest one. */
1604 elt->related_value = subelt_prev->related_value;
1605 subelt_prev->related_value = elt;
1606 }
1607 }
1608
1609 return elt;
1610}
1611\f
1612/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1613 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1614 the two classes equivalent.
1615
1616 CLASS1 will be the surviving class; CLASS2 should not be used after this
1617 call.
1618
1619 Any invalid entries in CLASS2 will not be copied. */
1620
1621static void
1622merge_equiv_classes (class1, class2)
1623 struct table_elt *class1, *class2;
1624{
1625 struct table_elt *elt, *next, *new;
1626
1627 /* Ensure we start with the head of the classes. */
1628 class1 = class1->first_same_value;
1629 class2 = class2->first_same_value;
1630
1631 /* If they were already equal, forget it. */
1632 if (class1 == class2)
1633 return;
1634
1635 for (elt = class2; elt; elt = next)
1636 {
2197a88a 1637 unsigned hash;
7afe21cc
RK
1638 rtx exp = elt->exp;
1639 enum machine_mode mode = elt->mode;
1640
1641 next = elt->next_same_value;
1642
1643 /* Remove old entry, make a new one in CLASS1's class.
1644 Don't do this for invalid entries as we cannot find their
0f41302f 1645 hash code (it also isn't necessary). */
7afe21cc
RK
1646 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1647 {
1648 hash_arg_in_memory = 0;
1649 hash_arg_in_struct = 0;
1650 hash = HASH (exp, mode);
1651
1652 if (GET_CODE (exp) == REG)
1653 delete_reg_equiv (REGNO (exp));
1654
1655 remove_from_table (elt, hash);
1656
1657 if (insert_regs (exp, class1, 0))
8ae2b8f6
JW
1658 {
1659 rehash_using_reg (exp);
1660 hash = HASH (exp, mode);
1661 }
7afe21cc
RK
1662 new = insert (exp, class1, hash, mode);
1663 new->in_memory = hash_arg_in_memory;
1664 new->in_struct = hash_arg_in_struct;
1665 }
1666 }
1667}
1668\f
01e752d3
JL
1669
1670/* Flush the entire hash table. */
1671
1672static void
1673flush_hash_table ()
1674{
1675 int i;
1676 struct table_elt *p;
1677
1678 for (i = 0; i < NBUCKETS; i++)
1679 for (p = table[i]; p; p = table[i])
1680 {
1681 /* Note that invalidate can remove elements
1682 after P in the current hash chain. */
1683 if (GET_CODE (p->exp) == REG)
1684 invalidate (p->exp, p->mode);
1685 else
1686 remove_from_table (p, i);
1687 }
1688}
1689
1690
7afe21cc
RK
1691/* Remove from the hash table, or mark as invalid,
1692 all expressions whose values could be altered by storing in X.
1693 X is a register, a subreg, or a memory reference with nonvarying address
1694 (because, when a memory reference with a varying address is stored in,
1695 all memory references are removed by invalidate_memory
1696 so specific invalidation is superfluous).
bb4034b3
JW
1697 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1698 instead of just the amount indicated by the mode of X. This is only used
1699 for bitfield stores into memory.
7afe21cc
RK
1700
1701 A nonvarying address may be just a register or just
1702 a symbol reference, or it may be either of those plus
1703 a numeric offset. */
1704
1705static void
bb4034b3 1706invalidate (x, full_mode)
7afe21cc 1707 rtx x;
bb4034b3 1708 enum machine_mode full_mode;
7afe21cc
RK
1709{
1710 register int i;
1711 register struct table_elt *p;
7afe21cc
RK
1712
1713 /* If X is a register, dependencies on its contents
1714 are recorded through the qty number mechanism.
1715 Just change the qty number of the register,
1716 mark it as invalid for expressions that refer to it,
1717 and remove it itself. */
1718
1719 if (GET_CODE (x) == REG)
1720 {
1721 register int regno = REGNO (x);
2197a88a 1722 register unsigned hash = HASH (x, GET_MODE (x));
7afe21cc
RK
1723
1724 /* Remove REGNO from any quantity list it might be on and indicate
9ec36da5 1725 that its value might have changed. If it is a pseudo, remove its
7afe21cc
RK
1726 entry from the hash table.
1727
1728 For a hard register, we do the first two actions above for any
1729 additional hard registers corresponding to X. Then, if any of these
1730 registers are in the table, we must remove any REG entries that
1731 overlap these registers. */
1732
1733 delete_reg_equiv (regno);
30f72379 1734 REG_TICK (regno)++;
7afe21cc
RK
1735
1736 if (regno >= FIRST_PSEUDO_REGISTER)
85e4d983
RK
1737 {
1738 /* Because a register can be referenced in more than one mode,
1739 we might have to remove more than one table entry. */
1740
1741 struct table_elt *elt;
1742
2d8b0f3a 1743 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
85e4d983
RK
1744 remove_from_table (elt, hash);
1745 }
7afe21cc
RK
1746 else
1747 {
54b1de55
RK
1748 HOST_WIDE_INT in_table
1749 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc
RK
1750 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1751 int tregno, tendregno;
1752 register struct table_elt *p, *next;
1753
1754 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1755
1756 for (i = regno + 1; i < endregno; i++)
1757 {
1758 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1759 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1760 delete_reg_equiv (i);
30f72379 1761 REG_TICK (i)++;
7afe21cc
RK
1762 }
1763
1764 if (in_table)
1765 for (hash = 0; hash < NBUCKETS; hash++)
1766 for (p = table[hash]; p; p = next)
1767 {
1768 next = p->next_same_hash;
1769
1770 if (GET_CODE (p->exp) != REG
1771 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1772 continue;
1773
1774 tregno = REGNO (p->exp);
1775 tendregno
1776 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1777 if (tendregno > regno && tregno < endregno)
925be47c 1778 remove_from_table (p, hash);
7afe21cc
RK
1779 }
1780 }
1781
1782 return;
1783 }
1784
1785 if (GET_CODE (x) == SUBREG)
1786 {
1787 if (GET_CODE (SUBREG_REG (x)) != REG)
1788 abort ();
bb4034b3 1789 invalidate (SUBREG_REG (x), VOIDmode);
7afe21cc
RK
1790 return;
1791 }
1792
aac5cc16
RH
1793 /* If X is a parallel, invalidate all of its elements. */
1794
1795 if (GET_CODE (x) == PARALLEL)
1796 {
1797 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1798 invalidate (XVECEXP (x, 0, i), VOIDmode);
1799 return;
1800 }
1801
1802 /* If X is an expr_list, this is part of a disjoint return value;
1803 extract the location in question ignoring the offset. */
1804
1805 if (GET_CODE (x) == EXPR_LIST)
1806 {
1807 invalidate (XEXP (x, 0), VOIDmode);
1808 return;
1809 }
1810
7afe21cc
RK
1811 /* X is not a register; it must be a memory reference with
1812 a nonvarying address. Remove all hash table elements
1813 that refer to overlapping pieces of memory. */
1814
1815 if (GET_CODE (x) != MEM)
1816 abort ();
7afe21cc 1817
bb4034b3
JW
1818 if (full_mode == VOIDmode)
1819 full_mode = GET_MODE (x);
1820
7afe21cc
RK
1821 for (i = 0; i < NBUCKETS; i++)
1822 {
1823 register struct table_elt *next;
1824 for (p = table[i]; p; p = next)
1825 {
1826 next = p->next_same_hash;
9ae8ffe7
JL
1827 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1828 than checking all the aliases). */
1829 if (p->in_memory
1830 && (GET_CODE (p->exp) != MEM
1831 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
7afe21cc
RK
1832 remove_from_table (p, i);
1833 }
1834 }
1835}
1836
1837/* Remove all expressions that refer to register REGNO,
1838 since they are already invalid, and we are about to
1839 mark that register valid again and don't want the old
1840 expressions to reappear as valid. */
1841
1842static void
1843remove_invalid_refs (regno)
1844 int regno;
1845{
1846 register int i;
1847 register struct table_elt *p, *next;
1848
1849 for (i = 0; i < NBUCKETS; i++)
1850 for (p = table[i]; p; p = next)
1851 {
1852 next = p->next_same_hash;
1853 if (GET_CODE (p->exp) != REG
906c4e36 1854 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
7afe21cc
RK
1855 remove_from_table (p, i);
1856 }
1857}
34c73909
R
1858
1859/* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1860static void
1861remove_invalid_subreg_refs (regno, word, mode)
1862 int regno;
1863 int word;
1864 enum machine_mode mode;
1865{
1866 register int i;
1867 register struct table_elt *p, *next;
1868 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1869
1870 for (i = 0; i < NBUCKETS; i++)
1871 for (p = table[i]; p; p = next)
1872 {
1873 rtx exp;
1874 next = p->next_same_hash;
1875
1876 exp = p->exp;
1877 if (GET_CODE (p->exp) != REG
1878 && (GET_CODE (exp) != SUBREG
1879 || GET_CODE (SUBREG_REG (exp)) != REG
1880 || REGNO (SUBREG_REG (exp)) != regno
1881 || (((SUBREG_WORD (exp)
1882 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1883 >= word)
1884 && SUBREG_WORD (exp) <= end))
1885 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1886 remove_from_table (p, i);
1887 }
1888}
7afe21cc
RK
1889\f
1890/* Recompute the hash codes of any valid entries in the hash table that
1891 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1892
1893 This is called when we make a jump equivalence. */
1894
1895static void
1896rehash_using_reg (x)
1897 rtx x;
1898{
973838fd 1899 unsigned int i;
7afe21cc 1900 struct table_elt *p, *next;
2197a88a 1901 unsigned hash;
7afe21cc
RK
1902
1903 if (GET_CODE (x) == SUBREG)
1904 x = SUBREG_REG (x);
1905
1906 /* If X is not a register or if the register is known not to be in any
1907 valid entries in the table, we have no work to do. */
1908
1909 if (GET_CODE (x) != REG
30f72379
MM
1910 || REG_IN_TABLE (REGNO (x)) < 0
1911 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
7afe21cc
RK
1912 return;
1913
1914 /* Scan all hash chains looking for valid entries that mention X.
1915 If we find one and it is in the wrong hash chain, move it. We can skip
1916 objects that are registers, since they are handled specially. */
1917
1918 for (i = 0; i < NBUCKETS; i++)
1919 for (p = table[i]; p; p = next)
1920 {
1921 next = p->next_same_hash;
1922 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1923 && exp_equiv_p (p->exp, p->exp, 1, 0)
7afe21cc
RK
1924 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1925 {
1926 if (p->next_same_hash)
1927 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1928
1929 if (p->prev_same_hash)
1930 p->prev_same_hash->next_same_hash = p->next_same_hash;
1931 else
1932 table[i] = p->next_same_hash;
1933
1934 p->next_same_hash = table[hash];
1935 p->prev_same_hash = 0;
1936 if (table[hash])
1937 table[hash]->prev_same_hash = p;
1938 table[hash] = p;
1939 }
1940 }
1941}
1942\f
7afe21cc
RK
1943/* Remove from the hash table any expression that is a call-clobbered
1944 register. Also update their TICK values. */
1945
1946static void
1947invalidate_for_call ()
1948{
1949 int regno, endregno;
1950 int i;
2197a88a 1951 unsigned hash;
7afe21cc
RK
1952 struct table_elt *p, *next;
1953 int in_table = 0;
1954
1955 /* Go through all the hard registers. For each that is clobbered in
1956 a CALL_INSN, remove the register from quantity chains and update
1957 reg_tick if defined. Also see if any of these registers is currently
1958 in the table. */
1959
1960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1961 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1962 {
1963 delete_reg_equiv (regno);
30f72379
MM
1964 if (REG_TICK (regno) >= 0)
1965 REG_TICK (regno)++;
7afe21cc 1966
0e227018 1967 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
7afe21cc
RK
1968 }
1969
1970 /* In the case where we have no call-clobbered hard registers in the
1971 table, we are done. Otherwise, scan the table and remove any
1972 entry that overlaps a call-clobbered register. */
1973
1974 if (in_table)
1975 for (hash = 0; hash < NBUCKETS; hash++)
1976 for (p = table[hash]; p; p = next)
1977 {
1978 next = p->next_same_hash;
1979
9ae8ffe7
JL
1980 if (p->in_memory)
1981 {
1982 remove_from_table (p, hash);
1983 continue;
1984 }
1985
7afe21cc
RK
1986 if (GET_CODE (p->exp) != REG
1987 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1988 continue;
1989
1990 regno = REGNO (p->exp);
1991 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1992
1993 for (i = regno; i < endregno; i++)
1994 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1995 {
1996 remove_from_table (p, hash);
1997 break;
1998 }
1999 }
2000}
2001\f
2002/* Given an expression X of type CONST,
2003 and ELT which is its table entry (or 0 if it
2004 is not in the hash table),
2005 return an alternate expression for X as a register plus integer.
2006 If none can be found, return 0. */
2007
2008static rtx
2009use_related_value (x, elt)
2010 rtx x;
2011 struct table_elt *elt;
2012{
2013 register struct table_elt *relt = 0;
2014 register struct table_elt *p, *q;
906c4e36 2015 HOST_WIDE_INT offset;
7afe21cc
RK
2016
2017 /* First, is there anything related known?
2018 If we have a table element, we can tell from that.
2019 Otherwise, must look it up. */
2020
2021 if (elt != 0 && elt->related_value != 0)
2022 relt = elt;
2023 else if (elt == 0 && GET_CODE (x) == CONST)
2024 {
2025 rtx subexp = get_related_value (x);
2026 if (subexp != 0)
2027 relt = lookup (subexp,
2028 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2029 GET_MODE (subexp));
2030 }
2031
2032 if (relt == 0)
2033 return 0;
2034
2035 /* Search all related table entries for one that has an
2036 equivalent register. */
2037
2038 p = relt;
2039 while (1)
2040 {
2041 /* This loop is strange in that it is executed in two different cases.
2042 The first is when X is already in the table. Then it is searching
2043 the RELATED_VALUE list of X's class (RELT). The second case is when
2044 X is not in the table. Then RELT points to a class for the related
2045 value.
2046
2047 Ensure that, whatever case we are in, that we ignore classes that have
2048 the same value as X. */
2049
2050 if (rtx_equal_p (x, p->exp))
2051 q = 0;
2052 else
2053 for (q = p->first_same_value; q; q = q->next_same_value)
2054 if (GET_CODE (q->exp) == REG)
2055 break;
2056
2057 if (q)
2058 break;
2059
2060 p = p->related_value;
2061
2062 /* We went all the way around, so there is nothing to be found.
2063 Alternatively, perhaps RELT was in the table for some other reason
2064 and it has no related values recorded. */
2065 if (p == relt || p == 0)
2066 break;
2067 }
2068
2069 if (q == 0)
2070 return 0;
2071
2072 offset = (get_integer_term (x) - get_integer_term (p->exp));
2073 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2074 return plus_constant (q->exp, offset);
2075}
2076\f
2077/* Hash an rtx. We are careful to make sure the value is never negative.
2078 Equivalent registers hash identically.
2079 MODE is used in hashing for CONST_INTs only;
2080 otherwise the mode of X is used.
2081
2082 Store 1 in do_not_record if any subexpression is volatile.
2083
2084 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2085 which does not have the RTX_UNCHANGING_P bit set.
2086 In this case, also store 1 in hash_arg_in_struct
2087 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2088
2089 Note that cse_insn knows that the hash code of a MEM expression
2090 is just (int) MEM plus the hash code of the address. */
2091
2197a88a 2092static unsigned
7afe21cc
RK
2093canon_hash (x, mode)
2094 rtx x;
2095 enum machine_mode mode;
2096{
2097 register int i, j;
2197a88a 2098 register unsigned hash = 0;
7afe21cc 2099 register enum rtx_code code;
6f7d635c 2100 register const char *fmt;
7afe21cc
RK
2101
2102 /* repeat is used to turn tail-recursion into iteration. */
2103 repeat:
2104 if (x == 0)
2105 return hash;
2106
2107 code = GET_CODE (x);
2108 switch (code)
2109 {
2110 case REG:
2111 {
2112 register int regno = REGNO (x);
2113
2114 /* On some machines, we can't record any non-fixed hard register,
2115 because extending its life will cause reload problems. We
9a794e50
RH
2116 consider ap, fp, and sp to be fixed for this purpose.
2117
2118 We also consider CCmode registers to be fixed for this purpose;
2119 failure to do so leads to failure to simplify 0<100 type of
2120 conditionals.
2121
0f41302f 2122 On all machines, we can't record any global registers. */
7afe21cc
RK
2123
2124 if (regno < FIRST_PSEUDO_REGISTER
2125 && (global_regs[regno]
f95182a4
ILT
2126 || (SMALL_REGISTER_CLASSES
2127 && ! fixed_regs[regno]
7afe21cc 2128 && regno != FRAME_POINTER_REGNUM
8bc169f2 2129 && regno != HARD_FRAME_POINTER_REGNUM
7afe21cc 2130 && regno != ARG_POINTER_REGNUM
9a794e50
RH
2131 && regno != STACK_POINTER_REGNUM
2132 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
7afe21cc
RK
2133 {
2134 do_not_record = 1;
2135 return 0;
2136 }
30f72379 2137 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2197a88a 2138 return hash;
7afe21cc
RK
2139 }
2140
34c73909
R
2141 /* We handle SUBREG of a REG specially because the underlying
2142 reg changes its hash value with every value change; we don't
2143 want to have to forget unrelated subregs when one subreg changes. */
2144 case SUBREG:
2145 {
2146 if (GET_CODE (SUBREG_REG (x)) == REG)
2147 {
2148 hash += (((unsigned) SUBREG << 7)
2149 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2150 return hash;
2151 }
2152 break;
2153 }
2154
7afe21cc 2155 case CONST_INT:
2197a88a
RK
2156 {
2157 unsigned HOST_WIDE_INT tem = INTVAL (x);
2158 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2159 return hash;
2160 }
7afe21cc
RK
2161
2162 case CONST_DOUBLE:
2163 /* This is like the general case, except that it only counts
2164 the integers representing the constant. */
2197a88a 2165 hash += (unsigned) code + (unsigned) GET_MODE (x);
969c8517
RK
2166 if (GET_MODE (x) != VOIDmode)
2167 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2168 {
ef178af3 2169 unsigned HOST_WIDE_INT tem = XWINT (x, i);
969c8517
RK
2170 hash += tem;
2171 }
2172 else
2173 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2174 + (unsigned) CONST_DOUBLE_HIGH (x));
7afe21cc
RK
2175 return hash;
2176
2177 /* Assume there is only one rtx object for any given label. */
2178 case LABEL_REF:
3c543775 2179 hash
7bcac048 2180 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2197a88a 2181 return hash;
7afe21cc
RK
2182
2183 case SYMBOL_REF:
3c543775 2184 hash
7bcac048 2185 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2197a88a 2186 return hash;
7afe21cc
RK
2187
2188 case MEM:
2189 if (MEM_VOLATILE_P (x))
2190 {
2191 do_not_record = 1;
2192 return 0;
2193 }
9ad91d71 2194 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
7afe21cc
RK
2195 {
2196 hash_arg_in_memory = 1;
2197 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2198 }
2199 /* Now that we have already found this special case,
2200 might as well speed it up as much as possible. */
2197a88a 2201 hash += (unsigned) MEM;
7afe21cc
RK
2202 x = XEXP (x, 0);
2203 goto repeat;
2204
2205 case PRE_DEC:
2206 case PRE_INC:
2207 case POST_DEC:
2208 case POST_INC:
2209 case PC:
2210 case CC0:
2211 case CALL:
2212 case UNSPEC_VOLATILE:
2213 do_not_record = 1;
2214 return 0;
2215
2216 case ASM_OPERANDS:
2217 if (MEM_VOLATILE_P (x))
2218 {
2219 do_not_record = 1;
2220 return 0;
2221 }
e9a25f70
JL
2222 break;
2223
2224 default:
2225 break;
7afe21cc
RK
2226 }
2227
2228 i = GET_RTX_LENGTH (code) - 1;
2197a88a 2229 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
2230 fmt = GET_RTX_FORMAT (code);
2231 for (; i >= 0; i--)
2232 {
2233 if (fmt[i] == 'e')
2234 {
2235 rtx tem = XEXP (x, i);
7afe21cc
RK
2236
2237 /* If we are about to do the last recursive call
2238 needed at this level, change it into iteration.
2239 This function is called enough to be worth it. */
2240 if (i == 0)
2241 {
2242 x = tem;
2243 goto repeat;
2244 }
2245 hash += canon_hash (tem, 0);
2246 }
2247 else if (fmt[i] == 'E')
2248 for (j = 0; j < XVECLEN (x, i); j++)
2249 hash += canon_hash (XVECEXP (x, i, j), 0);
2250 else if (fmt[i] == 's')
2251 {
2197a88a 2252 register unsigned char *p = (unsigned char *) XSTR (x, i);
7afe21cc
RK
2253 if (p)
2254 while (*p)
2197a88a 2255 hash += *p++;
7afe21cc
RK
2256 }
2257 else if (fmt[i] == 'i')
2258 {
2197a88a
RK
2259 register unsigned tem = XINT (x, i);
2260 hash += tem;
7afe21cc 2261 }
8f985ec4 2262 else if (fmt[i] == '0' || fmt[i] == 't')
e9a25f70 2263 /* unused */;
7afe21cc
RK
2264 else
2265 abort ();
2266 }
2267 return hash;
2268}
2269
2270/* Like canon_hash but with no side effects. */
2271
2197a88a 2272static unsigned
7afe21cc
RK
2273safe_hash (x, mode)
2274 rtx x;
2275 enum machine_mode mode;
2276{
2277 int save_do_not_record = do_not_record;
2278 int save_hash_arg_in_memory = hash_arg_in_memory;
2279 int save_hash_arg_in_struct = hash_arg_in_struct;
2197a88a 2280 unsigned hash = canon_hash (x, mode);
7afe21cc
RK
2281 hash_arg_in_memory = save_hash_arg_in_memory;
2282 hash_arg_in_struct = save_hash_arg_in_struct;
2283 do_not_record = save_do_not_record;
2284 return hash;
2285}
2286\f
2287/* Return 1 iff X and Y would canonicalize into the same thing,
2288 without actually constructing the canonicalization of either one.
2289 If VALIDATE is nonzero,
2290 we assume X is an expression being processed from the rtl
2291 and Y was found in the hash table. We check register refs
2292 in Y for being marked as valid.
2293
2294 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2295 that is known to be in the register. Ordinarily, we don't allow them
2296 to match, because letting them match would cause unpredictable results
2297 in all the places that search a hash table chain for an equivalent
2298 for a given value. A possible equivalent that has different structure
2299 has its hash code computed from different data. Whether the hash code
38e01259 2300 is the same as that of the given value is pure luck. */
7afe21cc
RK
2301
2302static int
2303exp_equiv_p (x, y, validate, equal_values)
2304 rtx x, y;
2305 int validate;
2306 int equal_values;
2307{
906c4e36 2308 register int i, j;
7afe21cc 2309 register enum rtx_code code;
6f7d635c 2310 register const char *fmt;
7afe21cc
RK
2311
2312 /* Note: it is incorrect to assume an expression is equivalent to itself
2313 if VALIDATE is nonzero. */
2314 if (x == y && !validate)
2315 return 1;
2316 if (x == 0 || y == 0)
2317 return x == y;
2318
2319 code = GET_CODE (x);
2320 if (code != GET_CODE (y))
2321 {
2322 if (!equal_values)
2323 return 0;
2324
2325 /* If X is a constant and Y is a register or vice versa, they may be
2326 equivalent. We only have to validate if Y is a register. */
2327 if (CONSTANT_P (x) && GET_CODE (y) == REG
2328 && REGNO_QTY_VALID_P (REGNO (y))
30f72379
MM
2329 && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2330 && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2331 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
7afe21cc
RK
2332 return 1;
2333
2334 if (CONSTANT_P (y) && code == REG
2335 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
2336 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2337 && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
7afe21cc
RK
2338 return 1;
2339
2340 return 0;
2341 }
2342
2343 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2344 if (GET_MODE (x) != GET_MODE (y))
2345 return 0;
2346
2347 switch (code)
2348 {
2349 case PC:
2350 case CC0:
2351 return x == y;
2352
2353 case CONST_INT:
58c8c593 2354 return INTVAL (x) == INTVAL (y);
7afe21cc
RK
2355
2356 case LABEL_REF:
7afe21cc
RK
2357 return XEXP (x, 0) == XEXP (y, 0);
2358
f54d4924
RK
2359 case SYMBOL_REF:
2360 return XSTR (x, 0) == XSTR (y, 0);
2361
7afe21cc
RK
2362 case REG:
2363 {
2364 int regno = REGNO (y);
2365 int endregno
2366 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2367 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2368 int i;
2369
2370 /* If the quantities are not the same, the expressions are not
2371 equivalent. If there are and we are not to validate, they
2372 are equivalent. Otherwise, ensure all regs are up-to-date. */
2373
30f72379 2374 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
7afe21cc
RK
2375 return 0;
2376
2377 if (! validate)
2378 return 1;
2379
2380 for (i = regno; i < endregno; i++)
30f72379 2381 if (REG_IN_TABLE (i) != REG_TICK (i))
7afe21cc
RK
2382 return 0;
2383
2384 return 1;
2385 }
2386
2387 /* For commutative operations, check both orders. */
2388 case PLUS:
2389 case MULT:
2390 case AND:
2391 case IOR:
2392 case XOR:
2393 case NE:
2394 case EQ:
2395 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2396 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2397 validate, equal_values))
2398 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2399 validate, equal_values)
2400 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2401 validate, equal_values)));
e9a25f70
JL
2402
2403 default:
2404 break;
7afe21cc
RK
2405 }
2406
2407 /* Compare the elements. If any pair of corresponding elements
2408 fail to match, return 0 for the whole things. */
2409
2410 fmt = GET_RTX_FORMAT (code);
2411 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2412 {
906c4e36 2413 switch (fmt[i])
7afe21cc 2414 {
906c4e36 2415 case 'e':
7afe21cc
RK
2416 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2417 return 0;
906c4e36
RK
2418 break;
2419
2420 case 'E':
7afe21cc
RK
2421 if (XVECLEN (x, i) != XVECLEN (y, i))
2422 return 0;
2423 for (j = 0; j < XVECLEN (x, i); j++)
2424 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2425 validate, equal_values))
2426 return 0;
906c4e36
RK
2427 break;
2428
2429 case 's':
7afe21cc
RK
2430 if (strcmp (XSTR (x, i), XSTR (y, i)))
2431 return 0;
906c4e36
RK
2432 break;
2433
2434 case 'i':
7afe21cc
RK
2435 if (XINT (x, i) != XINT (y, i))
2436 return 0;
906c4e36
RK
2437 break;
2438
2439 case 'w':
2440 if (XWINT (x, i) != XWINT (y, i))
2441 return 0;
2442 break;
2443
2444 case '0':
8f985ec4 2445 case 't':
906c4e36
RK
2446 break;
2447
2448 default:
2449 abort ();
7afe21cc 2450 }
906c4e36
RK
2451 }
2452
7afe21cc
RK
2453 return 1;
2454}
2455\f
2456/* Return 1 iff any subexpression of X matches Y.
2457 Here we do not require that X or Y be valid (for registers referred to)
2458 for being in the hash table. */
2459
6cd4575e 2460static int
7afe21cc
RK
2461refers_to_p (x, y)
2462 rtx x, y;
2463{
2464 register int i;
2465 register enum rtx_code code;
6f7d635c 2466 register const char *fmt;
7afe21cc
RK
2467
2468 repeat:
2469 if (x == y)
2470 return 1;
2471 if (x == 0 || y == 0)
2472 return 0;
2473
2474 code = GET_CODE (x);
2475 /* If X as a whole has the same code as Y, they may match.
2476 If so, return 1. */
2477 if (code == GET_CODE (y))
2478 {
2479 if (exp_equiv_p (x, y, 0, 1))
2480 return 1;
2481 }
2482
2483 /* X does not match, so try its subexpressions. */
2484
2485 fmt = GET_RTX_FORMAT (code);
2486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2487 if (fmt[i] == 'e')
2488 {
2489 if (i == 0)
2490 {
2491 x = XEXP (x, 0);
2492 goto repeat;
2493 }
2494 else
2495 if (refers_to_p (XEXP (x, i), y))
2496 return 1;
2497 }
2498 else if (fmt[i] == 'E')
2499 {
2500 int j;
2501 for (j = 0; j < XVECLEN (x, i); j++)
2502 if (refers_to_p (XVECEXP (x, i, j), y))
2503 return 1;
2504 }
2505
2506 return 0;
2507}
2508\f
f451db89
JL
2509/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2510 set PBASE, PSTART, and PEND which correspond to the base of the address,
2511 the starting offset, and ending offset respectively.
2512
bb4034b3 2513 ADDR is known to be a nonvarying address. */
f451db89 2514
bb4034b3
JW
2515/* ??? Despite what the comments say, this function is in fact frequently
2516 passed varying addresses. This does not appear to cause any problems. */
f451db89
JL
2517
2518static void
2519set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2520 rtx addr;
2521 int size;
2522 rtx *pbase;
6500fb43 2523 HOST_WIDE_INT *pstart, *pend;
f451db89
JL
2524{
2525 rtx base;
c85663b1 2526 HOST_WIDE_INT start, end;
f451db89
JL
2527
2528 base = addr;
2529 start = 0;
2530 end = 0;
2531
e5e809f4
JL
2532 if (flag_pic && GET_CODE (base) == PLUS
2533 && XEXP (base, 0) == pic_offset_table_rtx)
2534 base = XEXP (base, 1);
2535
f451db89
JL
2536 /* Registers with nonvarying addresses usually have constant equivalents;
2537 but the frame pointer register is also possible. */
2538 if (GET_CODE (base) == REG
2539 && qty_const != 0
2540 && REGNO_QTY_VALID_P (REGNO (base))
30f72379
MM
2541 && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2542 && qty_const[REG_QTY (REGNO (base))] != 0)
2543 base = qty_const[REG_QTY (REGNO (base))];
f451db89
JL
2544 else if (GET_CODE (base) == PLUS
2545 && GET_CODE (XEXP (base, 1)) == CONST_INT
2546 && GET_CODE (XEXP (base, 0)) == REG
2547 && qty_const != 0
2548 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
30f72379 2549 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
f451db89 2550 == GET_MODE (XEXP (base, 0)))
30f72379 2551 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
f451db89
JL
2552 {
2553 start = INTVAL (XEXP (base, 1));
30f72379 2554 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
f451db89 2555 }
9c6b0bae 2556 /* This can happen as the result of virtual register instantiation,
abc95ed3 2557 if the initial offset is too large to be a valid address. */
9c6b0bae
RK
2558 else if (GET_CODE (base) == PLUS
2559 && GET_CODE (XEXP (base, 0)) == REG
2560 && GET_CODE (XEXP (base, 1)) == REG
2561 && qty_const != 0
2562 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
30f72379 2563 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
9c6b0bae 2564 == GET_MODE (XEXP (base, 0)))
30f72379 2565 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
9c6b0bae 2566 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
30f72379 2567 && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
9c6b0bae 2568 == GET_MODE (XEXP (base, 1)))
30f72379 2569 && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
9c6b0bae 2570 {
30f72379
MM
2571 rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2572 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
9c6b0bae
RK
2573
2574 /* One of the two values must be a constant. */
2575 if (GET_CODE (base) != CONST_INT)
2576 {
2577 if (GET_CODE (tem) != CONST_INT)
2578 abort ();
2579 start = INTVAL (tem);
2580 }
2581 else
2582 {
2583 start = INTVAL (base);
2584 base = tem;
2585 }
2586 }
f451db89 2587
c85663b1
RK
2588 /* Handle everything that we can find inside an address that has been
2589 viewed as constant. */
f451db89 2590
c85663b1 2591 while (1)
f451db89 2592 {
c85663b1
RK
2593 /* If no part of this switch does a "continue", the code outside
2594 will exit this loop. */
2595
2596 switch (GET_CODE (base))
2597 {
2598 case LO_SUM:
2599 /* By definition, operand1 of a LO_SUM is the associated constant
2600 address. Use the associated constant address as the base
2601 instead. */
2602 base = XEXP (base, 1);
2603 continue;
2604
2605 case CONST:
2606 /* Strip off CONST. */
2607 base = XEXP (base, 0);
2608 continue;
2609
2610 case PLUS:
2611 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2612 {
2613 start += INTVAL (XEXP (base, 1));
2614 base = XEXP (base, 0);
2615 continue;
2616 }
2617 break;
2618
2619 case AND:
2620 /* Handle the case of an AND which is the negative of a power of
2621 two. This is used to represent unaligned memory operations. */
2622 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2623 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2624 {
2625 set_nonvarying_address_components (XEXP (base, 0), size,
2626 pbase, pstart, pend);
2627
2628 /* Assume the worst misalignment. START is affected, but not
2629 END, so compensate but adjusting SIZE. Don't lose any
2630 constant we already had. */
2631
2632 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
89046535
RK
2633 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2634 end += *pend;
c85663b1
RK
2635 base = *pbase;
2636 }
2637 break;
e9a25f70
JL
2638
2639 default:
2640 break;
c85663b1
RK
2641 }
2642
2643 break;
f451db89
JL
2644 }
2645
336d6f0a
RK
2646 if (GET_CODE (base) == CONST_INT)
2647 {
2648 start += INTVAL (base);
2649 base = const0_rtx;
2650 }
2651
f451db89
JL
2652 end = start + size;
2653
2654 /* Set the return values. */
2655 *pbase = base;
2656 *pstart = start;
2657 *pend = end;
2658}
2659
9ae8ffe7
JL
2660/* Return 1 if X has a value that can vary even between two
2661 executions of the program. 0 means X can be compared reliably
2662 against certain constants or near-constants. */
7afe21cc
RK
2663
2664static int
9ae8ffe7
JL
2665cse_rtx_varies_p (x)
2666 register rtx x;
7afe21cc
RK
2667{
2668 /* We need not check for X and the equivalence class being of the same
2669 mode because if X is equivalent to a constant in some mode, it
2670 doesn't vary in any mode. */
2671
9ae8ffe7
JL
2672 if (GET_CODE (x) == REG
2673 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
2674 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2675 && qty_const[REG_QTY (REGNO (x))] != 0)
7afe21cc
RK
2676 return 0;
2677
9ae8ffe7
JL
2678 if (GET_CODE (x) == PLUS
2679 && GET_CODE (XEXP (x, 1)) == CONST_INT
2680 && GET_CODE (XEXP (x, 0)) == REG
2681 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2682 && (GET_MODE (XEXP (x, 0))
30f72379
MM
2683 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2684 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
7afe21cc
RK
2685 return 0;
2686
9c6b0bae
RK
2687 /* This can happen as the result of virtual register instantiation, if
2688 the initial constant is too large to be a valid address. This gives
2689 us a three instruction sequence, load large offset into a register,
2690 load fp minus a constant into a register, then a MEM which is the
2691 sum of the two `constant' registers. */
9ae8ffe7
JL
2692 if (GET_CODE (x) == PLUS
2693 && GET_CODE (XEXP (x, 0)) == REG
2694 && GET_CODE (XEXP (x, 1)) == REG
2695 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2696 && (GET_MODE (XEXP (x, 0))
30f72379
MM
2697 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2698 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
9ae8ffe7
JL
2699 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2700 && (GET_MODE (XEXP (x, 1))
30f72379
MM
2701 == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2702 && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
9c6b0bae
RK
2703 return 0;
2704
9ae8ffe7 2705 return rtx_varies_p (x);
7afe21cc
RK
2706}
2707\f
2708/* Canonicalize an expression:
2709 replace each register reference inside it
2710 with the "oldest" equivalent register.
2711
2712 If INSN is non-zero and we are replacing a pseudo with a hard register
7722328e
RK
2713 or vice versa, validate_change is used to ensure that INSN remains valid
2714 after we make our substitution. The calls are made with IN_GROUP non-zero
2715 so apply_change_group must be called upon the outermost return from this
2716 function (unless INSN is zero). The result of apply_change_group can
2717 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2718
2719static rtx
2720canon_reg (x, insn)
2721 rtx x;
2722 rtx insn;
2723{
2724 register int i;
2725 register enum rtx_code code;
6f7d635c 2726 register const char *fmt;
7afe21cc
RK
2727
2728 if (x == 0)
2729 return x;
2730
2731 code = GET_CODE (x);
2732 switch (code)
2733 {
2734 case PC:
2735 case CC0:
2736 case CONST:
2737 case CONST_INT:
2738 case CONST_DOUBLE:
2739 case SYMBOL_REF:
2740 case LABEL_REF:
2741 case ADDR_VEC:
2742 case ADDR_DIFF_VEC:
2743 return x;
2744
2745 case REG:
2746 {
2747 register int first;
2748
2749 /* Never replace a hard reg, because hard regs can appear
2750 in more than one machine mode, and we must preserve the mode
2751 of each occurrence. Also, some hard regs appear in
2752 MEMs that are shared and mustn't be altered. Don't try to
2753 replace any reg that maps to a reg of class NO_REGS. */
2754 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2755 || ! REGNO_QTY_VALID_P (REGNO (x)))
2756 return x;
2757
30f72379 2758 first = qty_first_reg[REG_QTY (REGNO (x))];
7afe21cc
RK
2759 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2760 : REGNO_REG_CLASS (first) == NO_REGS ? x
30f72379 2761 : gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
7afe21cc 2762 }
e9a25f70
JL
2763
2764 default:
2765 break;
7afe21cc
RK
2766 }
2767
2768 fmt = GET_RTX_FORMAT (code);
2769 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2770 {
2771 register int j;
2772
2773 if (fmt[i] == 'e')
2774 {
2775 rtx new = canon_reg (XEXP (x, i), insn);
58873255 2776 int insn_code;
7afe21cc
RK
2777
2778 /* If replacing pseudo with hard reg or vice versa, ensure the
178c39f6 2779 insn remains valid. Likewise if the insn has MATCH_DUPs. */
aee9dc31
RS
2780 if (insn != 0 && new != 0
2781 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
178c39f6
RK
2782 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2783 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
58873255
RK
2784 || (insn_code = recog_memoized (insn)) < 0
2785 || insn_n_dups[insn_code] > 0))
77fa0940 2786 validate_change (insn, &XEXP (x, i), new, 1);
7afe21cc
RK
2787 else
2788 XEXP (x, i) = new;
2789 }
2790 else if (fmt[i] == 'E')
2791 for (j = 0; j < XVECLEN (x, i); j++)
2792 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2793 }
2794
2795 return x;
2796}
2797\f
a2cabb29 2798/* LOC is a location within INSN that is an operand address (the contents of
7afe21cc
RK
2799 a MEM). Find the best equivalent address to use that is valid for this
2800 insn.
2801
2802 On most CISC machines, complicated address modes are costly, and rtx_cost
2803 is a good approximation for that cost. However, most RISC machines have
2804 only a few (usually only one) memory reference formats. If an address is
2805 valid at all, it is often just as cheap as any other address. Hence, for
2806 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2807 costs of various addresses. For two addresses of equal cost, choose the one
2808 with the highest `rtx_cost' value as that has the potential of eliminating
2809 the most insns. For equal costs, we choose the first in the equivalence
2810 class. Note that we ignore the fact that pseudo registers are cheaper
2811 than hard registers here because we would also prefer the pseudo registers.
2812 */
2813
6cd4575e 2814static void
7afe21cc
RK
2815find_best_addr (insn, loc)
2816 rtx insn;
2817 rtx *loc;
2818{
7a87758d 2819 struct table_elt *elt;
7afe21cc 2820 rtx addr = *loc;
7a87758d
AS
2821#ifdef ADDRESS_COST
2822 struct table_elt *p;
7afe21cc 2823 int found_better = 1;
7a87758d 2824#endif
7afe21cc
RK
2825 int save_do_not_record = do_not_record;
2826 int save_hash_arg_in_memory = hash_arg_in_memory;
2827 int save_hash_arg_in_struct = hash_arg_in_struct;
7afe21cc
RK
2828 int addr_volatile;
2829 int regno;
2197a88a 2830 unsigned hash;
7afe21cc
RK
2831
2832 /* Do not try to replace constant addresses or addresses of local and
2833 argument slots. These MEM expressions are made only once and inserted
2834 in many instructions, as well as being used to control symbol table
2835 output. It is not safe to clobber them.
2836
2837 There are some uncommon cases where the address is already in a register
2838 for some reason, but we cannot take advantage of that because we have
2839 no easy way to unshare the MEM. In addition, looking up all stack
2840 addresses is costly. */
2841 if ((GET_CODE (addr) == PLUS
2842 && GET_CODE (XEXP (addr, 0)) == REG
2843 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2844 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2845 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2846 || regno == ARG_POINTER_REGNUM))
7afe21cc 2847 || (GET_CODE (addr) == REG
8bc169f2
DE
2848 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2849 || regno == HARD_FRAME_POINTER_REGNUM
2850 || regno == ARG_POINTER_REGNUM))
e9a25f70 2851 || GET_CODE (addr) == ADDRESSOF
7afe21cc
RK
2852 || CONSTANT_ADDRESS_P (addr))
2853 return;
2854
2855 /* If this address is not simply a register, try to fold it. This will
2856 sometimes simplify the expression. Many simplifications
2857 will not be valid, but some, usually applying the associative rule, will
2858 be valid and produce better code. */
8c87f107
RK
2859 if (GET_CODE (addr) != REG)
2860 {
2861 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2862
2863 if (1
2864#ifdef ADDRESS_COST
2f541799
MM
2865 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2866 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
9a252d29 2867 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
8c87f107 2868#else
9a252d29 2869 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
8c87f107
RK
2870#endif
2871 && validate_change (insn, loc, folded, 0))
2872 addr = folded;
2873 }
7afe21cc 2874
42495ca0
RK
2875 /* If this address is not in the hash table, we can't look for equivalences
2876 of the whole address. Also, ignore if volatile. */
2877
7afe21cc 2878 do_not_record = 0;
2197a88a 2879 hash = HASH (addr, Pmode);
7afe21cc
RK
2880 addr_volatile = do_not_record;
2881 do_not_record = save_do_not_record;
2882 hash_arg_in_memory = save_hash_arg_in_memory;
2883 hash_arg_in_struct = save_hash_arg_in_struct;
2884
2885 if (addr_volatile)
2886 return;
2887
2197a88a 2888 elt = lookup (addr, hash, Pmode);
7afe21cc 2889
7afe21cc 2890#ifndef ADDRESS_COST
42495ca0
RK
2891 if (elt)
2892 {
2d8b0f3a 2893 int our_cost = elt->cost;
42495ca0
RK
2894
2895 /* Find the lowest cost below ours that works. */
2896 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2897 if (elt->cost < our_cost
2898 && (GET_CODE (elt->exp) == REG
2899 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2900 && validate_change (insn, loc,
906c4e36 2901 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
42495ca0
RK
2902 return;
2903 }
2904#else
7afe21cc 2905
42495ca0
RK
2906 if (elt)
2907 {
2908 /* We need to find the best (under the criteria documented above) entry
2909 in the class that is valid. We use the `flag' field to indicate
2910 choices that were invalid and iterate until we can't find a better
2911 one that hasn't already been tried. */
7afe21cc 2912
42495ca0
RK
2913 for (p = elt->first_same_value; p; p = p->next_same_value)
2914 p->flag = 0;
7afe21cc 2915
42495ca0
RK
2916 while (found_better)
2917 {
2f541799 2918 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2919 int best_rtx_cost = (elt->cost + 1) >> 1;
2920 struct table_elt *best_elt = elt;
2921
2922 found_better = 0;
2923 for (p = elt->first_same_value; p; p = p->next_same_value)
2f541799 2924 if (! p->flag)
42495ca0 2925 {
2f541799
MM
2926 if ((GET_CODE (p->exp) == REG
2927 || exp_equiv_p (p->exp, p->exp, 1, 0))
2928 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2929 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2930 && (p->cost + 1) >> 1 > best_rtx_cost)))
2931 {
2932 found_better = 1;
2933 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2934 best_rtx_cost = (p->cost + 1) >> 1;
2935 best_elt = p;
2936 }
42495ca0 2937 }
7afe21cc 2938
42495ca0
RK
2939 if (found_better)
2940 {
2941 if (validate_change (insn, loc,
906c4e36
RK
2942 canon_reg (copy_rtx (best_elt->exp),
2943 NULL_RTX), 0))
42495ca0
RK
2944 return;
2945 else
2946 best_elt->flag = 1;
2947 }
2948 }
2949 }
7afe21cc 2950
42495ca0
RK
2951 /* If the address is a binary operation with the first operand a register
2952 and the second a constant, do the same as above, but looking for
2953 equivalences of the register. Then try to simplify before checking for
2954 the best address to use. This catches a few cases: First is when we
2955 have REG+const and the register is another REG+const. We can often merge
2956 the constants and eliminate one insn and one register. It may also be
2957 that a machine has a cheap REG+REG+const. Finally, this improves the
2958 code on the Alpha for unaligned byte stores. */
2959
2960 if (flag_expensive_optimizations
2961 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2962 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2963 && GET_CODE (XEXP (*loc, 0)) == REG
2964 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
7afe21cc 2965 {
42495ca0
RK
2966 rtx c = XEXP (*loc, 1);
2967
2968 do_not_record = 0;
2197a88a 2969 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2970 do_not_record = save_do_not_record;
2971 hash_arg_in_memory = save_hash_arg_in_memory;
2972 hash_arg_in_struct = save_hash_arg_in_struct;
2973
2197a88a 2974 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2975 if (elt == 0)
2976 return;
2977
2978 /* We need to find the best (under the criteria documented above) entry
2979 in the class that is valid. We use the `flag' field to indicate
2980 choices that were invalid and iterate until we can't find a better
2981 one that hasn't already been tried. */
7afe21cc 2982
7afe21cc 2983 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2984 p->flag = 0;
7afe21cc 2985
42495ca0 2986 while (found_better)
7afe21cc 2987 {
2f541799 2988 int best_addr_cost = CSE_ADDRESS_COST (*loc);
42495ca0
RK
2989 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2990 struct table_elt *best_elt = elt;
2991 rtx best_rtx = *loc;
f6516aee
JW
2992 int count;
2993
2994 /* This is at worst case an O(n^2) algorithm, so limit our search
2995 to the first 32 elements on the list. This avoids trouble
2996 compiling code with very long basic blocks that can easily
2997 call cse_gen_binary so many times that we run out of memory. */
42495ca0
RK
2998
2999 found_better = 0;
f6516aee
JW
3000 for (p = elt->first_same_value, count = 0;
3001 p && count < 32;
3002 p = p->next_same_value, count++)
42495ca0
RK
3003 if (! p->flag
3004 && (GET_CODE (p->exp) == REG
3005 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3006 {
96b0e481 3007 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
42495ca0 3008
2f541799
MM
3009 if ((CSE_ADDRESS_COST (new) < best_addr_cost
3010 || (CSE_ADDRESS_COST (new) == best_addr_cost
42495ca0
RK
3011 && (COST (new) + 1) >> 1 > best_rtx_cost)))
3012 {
3013 found_better = 1;
2f541799 3014 best_addr_cost = CSE_ADDRESS_COST (new);
42495ca0
RK
3015 best_rtx_cost = (COST (new) + 1) >> 1;
3016 best_elt = p;
3017 best_rtx = new;
3018 }
3019 }
3020
3021 if (found_better)
3022 {
3023 if (validate_change (insn, loc,
906c4e36
RK
3024 canon_reg (copy_rtx (best_rtx),
3025 NULL_RTX), 0))
42495ca0
RK
3026 return;
3027 else
3028 best_elt->flag = 1;
3029 }
7afe21cc
RK
3030 }
3031 }
3032#endif
3033}
3034\f
3035/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3036 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3037 what values are being compared.
3038
3039 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3040 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3041 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3042 compared to produce cc0.
3043
3044 The return value is the comparison operator and is either the code of
3045 A or the code corresponding to the inverse of the comparison. */
3046
3047static enum rtx_code
13c9910f 3048find_comparison_args (code, parg1, parg2, pmode1, pmode2)
7afe21cc
RK
3049 enum rtx_code code;
3050 rtx *parg1, *parg2;
13c9910f 3051 enum machine_mode *pmode1, *pmode2;
7afe21cc
RK
3052{
3053 rtx arg1, arg2;
3054
3055 arg1 = *parg1, arg2 = *parg2;
3056
3057 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3058
b2796a4b 3059 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
7afe21cc
RK
3060 {
3061 /* Set non-zero when we find something of interest. */
3062 rtx x = 0;
3063 int reverse_code = 0;
3064 struct table_elt *p = 0;
3065
3066 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3067 On machines with CC0, this is the only case that can occur, since
3068 fold_rtx will return the COMPARE or item being compared with zero
3069 when given CC0. */
3070
3071 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3072 x = arg1;
3073
3074 /* If ARG1 is a comparison operator and CODE is testing for
3075 STORE_FLAG_VALUE, get the inner arguments. */
3076
3077 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3078 {
c610adec
RK
3079 if (code == NE
3080 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3081 && code == LT && STORE_FLAG_VALUE == -1)
3082#ifdef FLOAT_STORE_FLAG_VALUE
3083 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3084 && FLOAT_STORE_FLAG_VALUE < 0)
3085#endif
3086 )
7afe21cc 3087 x = arg1;
c610adec
RK
3088 else if (code == EQ
3089 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3090 && code == GE && STORE_FLAG_VALUE == -1)
3091#ifdef FLOAT_STORE_FLAG_VALUE
3092 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3093 && FLOAT_STORE_FLAG_VALUE < 0)
3094#endif
3095 )
7afe21cc
RK
3096 x = arg1, reverse_code = 1;
3097 }
3098
3099 /* ??? We could also check for
3100
3101 (ne (and (eq (...) (const_int 1))) (const_int 0))
3102
3103 and related forms, but let's wait until we see them occurring. */
3104
3105 if (x == 0)
3106 /* Look up ARG1 in the hash table and see if it has an equivalence
3107 that lets us see what is being compared. */
3108 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3109 GET_MODE (arg1));
3110 if (p) p = p->first_same_value;
3111
3112 for (; p; p = p->next_same_value)
3113 {
3114 enum machine_mode inner_mode = GET_MODE (p->exp);
3115
3116 /* If the entry isn't valid, skip it. */
3117 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3118 continue;
3119
3120 if (GET_CODE (p->exp) == COMPARE
3121 /* Another possibility is that this machine has a compare insn
3122 that includes the comparison code. In that case, ARG1 would
3123 be equivalent to a comparison operation that would set ARG1 to
3124 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3125 ORIG_CODE is the actual comparison being done; if it is an EQ,
3126 we must reverse ORIG_CODE. On machine with a negative value
3127 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3128 || ((code == NE
3129 || (code == LT
c610adec 3130 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3131 && (GET_MODE_BITSIZE (inner_mode)
3132 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3133 && (STORE_FLAG_VALUE
906c4e36
RK
3134 & ((HOST_WIDE_INT) 1
3135 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3136#ifdef FLOAT_STORE_FLAG_VALUE
3137 || (code == LT
3138 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3139 && FLOAT_STORE_FLAG_VALUE < 0)
3140#endif
3141 )
7afe21cc
RK
3142 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3143 {
3144 x = p->exp;
3145 break;
3146 }
3147 else if ((code == EQ
3148 || (code == GE
c610adec 3149 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
3150 && (GET_MODE_BITSIZE (inner_mode)
3151 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 3152 && (STORE_FLAG_VALUE
906c4e36
RK
3153 & ((HOST_WIDE_INT) 1
3154 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
3155#ifdef FLOAT_STORE_FLAG_VALUE
3156 || (code == GE
3157 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3158 && FLOAT_STORE_FLAG_VALUE < 0)
3159#endif
3160 )
7afe21cc
RK
3161 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3162 {
3163 reverse_code = 1;
3164 x = p->exp;
3165 break;
3166 }
3167
3168 /* If this is fp + constant, the equivalent is a better operand since
3169 it may let us predict the value of the comparison. */
3170 else if (NONZERO_BASE_PLUS_P (p->exp))
3171 {
3172 arg1 = p->exp;
3173 continue;
3174 }
3175 }
3176
3177 /* If we didn't find a useful equivalence for ARG1, we are done.
3178 Otherwise, set up for the next iteration. */
3179 if (x == 0)
3180 break;
3181
3182 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3183 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3184 code = GET_CODE (x);
3185
3186 if (reverse_code)
3187 code = reverse_condition (code);
3188 }
3189
13c9910f
RS
3190 /* Return our results. Return the modes from before fold_rtx
3191 because fold_rtx might produce const_int, and then it's too late. */
3192 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
7afe21cc
RK
3193 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3194
3195 return code;
3196}
3197\f
3198/* Try to simplify a unary operation CODE whose output mode is to be
3199 MODE with input operand OP whose mode was originally OP_MODE.
3200 Return zero if no simplification can be made. */
3201
3202rtx
3203simplify_unary_operation (code, mode, op, op_mode)
3204 enum rtx_code code;
3205 enum machine_mode mode;
3206 rtx op;
3207 enum machine_mode op_mode;
3208{
3209 register int width = GET_MODE_BITSIZE (mode);
3210
3211 /* The order of these tests is critical so that, for example, we don't
3212 check the wrong mode (input vs. output) for a conversion operation,
3213 such as FIX. At some point, this should be simplified. */
3214
62c0ea12 3215#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
7afe21cc 3216
62c0ea12
RK
3217 if (code == FLOAT && GET_MODE (op) == VOIDmode
3218 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3219 {
62c0ea12 3220 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3221 REAL_VALUE_TYPE d;
3222
62c0ea12
RK
3223 if (GET_CODE (op) == CONST_INT)
3224 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3225 else
7ac4a266 3226 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
7afe21cc
RK
3227
3228#ifdef REAL_ARITHMETIC
2ebcccf3 3229 REAL_VALUE_FROM_INT (d, lv, hv, mode);
7afe21cc 3230#else
62c0ea12 3231 if (hv < 0)
7afe21cc 3232 {
62c0ea12 3233 d = (double) (~ hv);
906c4e36
RK
3234 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3235 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3236 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
7afe21cc
RK
3237 d = (- d - 1.0);
3238 }
3239 else
3240 {
62c0ea12 3241 d = (double) hv;
906c4e36
RK
3242 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3243 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3244 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc
RK
3245 }
3246#endif /* REAL_ARITHMETIC */
940fd0b5 3247 d = real_value_truncate (mode, d);
7afe21cc
RK
3248 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3249 }
62c0ea12
RK
3250 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3251 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 3252 {
62c0ea12 3253 HOST_WIDE_INT hv, lv;
7afe21cc
RK
3254 REAL_VALUE_TYPE d;
3255
62c0ea12
RK
3256 if (GET_CODE (op) == CONST_INT)
3257 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3258 else
7ac4a266 3259 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
62c0ea12 3260
a9c6464d
RK
3261 if (op_mode == VOIDmode)
3262 {
3263 /* We don't know how to interpret negative-looking numbers in
3264 this case, so don't try to fold those. */
3265 if (hv < 0)
3266 return 0;
3267 }
3268 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
62c0ea12
RK
3269 ;
3270 else
3271 hv = 0, lv &= GET_MODE_MASK (op_mode);
3272
7afe21cc 3273#ifdef REAL_ARITHMETIC
2ebcccf3 3274 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
7afe21cc 3275#else
62c0ea12 3276
138cec59 3277 d = (double) (unsigned HOST_WIDE_INT) hv;
906c4e36
RK
3278 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3279 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 3280 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc 3281#endif /* REAL_ARITHMETIC */
940fd0b5 3282 d = real_value_truncate (mode, d);
7afe21cc
RK
3283 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3284 }
3285#endif
3286
f89e32e9
RK
3287 if (GET_CODE (op) == CONST_INT
3288 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc 3289 {
906c4e36
RK
3290 register HOST_WIDE_INT arg0 = INTVAL (op);
3291 register HOST_WIDE_INT val;
7afe21cc
RK
3292
3293 switch (code)
3294 {
3295 case NOT:
3296 val = ~ arg0;
3297 break;
3298
3299 case NEG:
3300 val = - arg0;
3301 break;
3302
3303 case ABS:
3304 val = (arg0 >= 0 ? arg0 : - arg0);
3305 break;
3306
3307 case FFS:
3308 /* Don't use ffs here. Instead, get low order bit and then its
3309 number. If arg0 is zero, this will return 0, as desired. */
3310 arg0 &= GET_MODE_MASK (mode);
3311 val = exact_log2 (arg0 & (- arg0)) + 1;
3312 break;
3313
3314 case TRUNCATE:
3315 val = arg0;
3316 break;
3317
3318 case ZERO_EXTEND:
3319 if (op_mode == VOIDmode)
3320 op_mode = mode;
82a5e898 3321 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3322 {
3323 /* If we were really extending the mode,
3324 we would have to distinguish between zero-extension
3325 and sign-extension. */
3326 if (width != GET_MODE_BITSIZE (op_mode))
3327 abort ();
3328 val = arg0;
3329 }
82a5e898
CH
3330 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3331 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
7afe21cc
RK
3332 else
3333 return 0;
3334 break;
3335
3336 case SIGN_EXTEND:
3337 if (op_mode == VOIDmode)
3338 op_mode = mode;
82a5e898 3339 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3340 {
3341 /* If we were really extending the mode,
3342 we would have to distinguish between zero-extension
3343 and sign-extension. */
3344 if (width != GET_MODE_BITSIZE (op_mode))
3345 abort ();
3346 val = arg0;
3347 }
f12564b4 3348 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
7afe21cc 3349 {
82a5e898
CH
3350 val
3351 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3352 if (val
3353 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3354 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
7afe21cc
RK
3355 }
3356 else
3357 return 0;
3358 break;
3359
d45cf215
RS
3360 case SQRT:
3361 return 0;
3362
7afe21cc
RK
3363 default:
3364 abort ();
3365 }
3366
7e4ce834 3367 val = trunc_int_for_mode (val, mode);
737e7965 3368
906c4e36 3369 return GEN_INT (val);
7afe21cc
RK
3370 }
3371
3372 /* We can do some operations on integer CONST_DOUBLEs. Also allow
0f41302f 3373 for a DImode operation on a CONST_INT. */
8e0ac43b 3374 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
7afe21cc
RK
3375 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3376 {
906c4e36 3377 HOST_WIDE_INT l1, h1, lv, hv;
7afe21cc
RK
3378
3379 if (GET_CODE (op) == CONST_DOUBLE)
3380 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3381 else
3382 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3383
3384 switch (code)
3385 {
3386 case NOT:
3387 lv = ~ l1;
3388 hv = ~ h1;
3389 break;
3390
3391 case NEG:
3392 neg_double (l1, h1, &lv, &hv);
3393 break;
3394
3395 case ABS:
3396 if (h1 < 0)
3397 neg_double (l1, h1, &lv, &hv);
3398 else
3399 lv = l1, hv = h1;
3400 break;
3401
3402 case FFS:
3403 hv = 0;
3404 if (l1 == 0)
906c4e36 3405 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
7afe21cc
RK
3406 else
3407 lv = exact_log2 (l1 & (-l1)) + 1;
3408 break;
3409
3410 case TRUNCATE:
8e0ac43b 3411 /* This is just a change-of-mode, so do nothing. */
d50d63c0 3412 lv = l1, hv = h1;
7afe21cc
RK
3413 break;
3414
f72aed24
RS
3415 case ZERO_EXTEND:
3416 if (op_mode == VOIDmode
906c4e36 3417 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3418 return 0;
3419
3420 hv = 0;
3421 lv = l1 & GET_MODE_MASK (op_mode);
3422 break;
3423
3424 case SIGN_EXTEND:
3425 if (op_mode == VOIDmode
906c4e36 3426 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3427 return 0;
3428 else
3429 {
3430 lv = l1 & GET_MODE_MASK (op_mode);
906c4e36
RK
3431 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3432 && (lv & ((HOST_WIDE_INT) 1
3433 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3434 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
f72aed24 3435
906c4e36 3436 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
f72aed24
RS
3437 }
3438 break;
3439
d45cf215
RS
3440 case SQRT:
3441 return 0;
3442
7afe21cc
RK
3443 default:
3444 return 0;
3445 }
3446
3447 return immed_double_const (lv, hv, mode);
3448 }
3449
3450#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3451 else if (GET_CODE (op) == CONST_DOUBLE
3452 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3453 {
3454 REAL_VALUE_TYPE d;
3455 jmp_buf handler;
3456 rtx x;
3457
3458 if (setjmp (handler))
3459 /* There used to be a warning here, but that is inadvisable.
3460 People may want to cause traps, and the natural way
3461 to do it should not get a warning. */
3462 return 0;
3463
3464 set_float_handler (handler);
3465
3466 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3467
3468 switch (code)
3469 {
3470 case NEG:
3471 d = REAL_VALUE_NEGATE (d);
3472 break;
3473
3474 case ABS:
8b3686ed 3475 if (REAL_VALUE_NEGATIVE (d))
7afe21cc
RK
3476 d = REAL_VALUE_NEGATE (d);
3477 break;
3478
3479 case FLOAT_TRUNCATE:
d3159aee 3480 d = real_value_truncate (mode, d);
7afe21cc
RK
3481 break;
3482
3483 case FLOAT_EXTEND:
3484 /* All this does is change the mode. */
3485 break;
3486
3487 case FIX:
d3159aee 3488 d = REAL_VALUE_RNDZINT (d);
7afe21cc
RK
3489 break;
3490
3491 case UNSIGNED_FIX:
d3159aee 3492 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
7afe21cc
RK
3493 break;
3494
d45cf215
RS
3495 case SQRT:
3496 return 0;
3497
7afe21cc
RK
3498 default:
3499 abort ();
3500 }
3501
560c94a2 3502 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
906c4e36 3503 set_float_handler (NULL_PTR);
7afe21cc
RK
3504 return x;
3505 }
8e0ac43b
RK
3506
3507 else if (GET_CODE (op) == CONST_DOUBLE
3508 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3509 && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 3510 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc
RK
3511 {
3512 REAL_VALUE_TYPE d;
3513 jmp_buf handler;
906c4e36 3514 HOST_WIDE_INT val;
7afe21cc
RK
3515
3516 if (setjmp (handler))
3517 return 0;
3518
3519 set_float_handler (handler);
3520
3521 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3522
3523 switch (code)
3524 {
3525 case FIX:
3526 val = REAL_VALUE_FIX (d);
3527 break;
3528
3529 case UNSIGNED_FIX:
3530 val = REAL_VALUE_UNSIGNED_FIX (d);
3531 break;
3532
3533 default:
3534 abort ();
3535 }
3536
906c4e36 3537 set_float_handler (NULL_PTR);
7afe21cc 3538
7e4ce834 3539 val = trunc_int_for_mode (val, mode);
ad89d6f6 3540
906c4e36 3541 return GEN_INT (val);
7afe21cc
RK
3542 }
3543#endif
a6acbe15
RS
3544 /* This was formerly used only for non-IEEE float.
3545 eggert@twinsun.com says it is safe for IEEE also. */
3546 else
7afe21cc
RK
3547 {
3548 /* There are some simplifications we can do even if the operands
a6acbe15 3549 aren't constant. */
7afe21cc
RK
3550 switch (code)
3551 {
3552 case NEG:
3553 case NOT:
3554 /* (not (not X)) == X, similarly for NEG. */
3555 if (GET_CODE (op) == code)
3556 return XEXP (op, 0);
3557 break;
3558
3559 case SIGN_EXTEND:
3560 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3561 becomes just the MINUS if its mode is MODE. This allows
3562 folding switch statements on machines using casesi (such as
3563 the Vax). */
3564 if (GET_CODE (op) == TRUNCATE
3565 && GET_MODE (XEXP (op, 0)) == mode
3566 && GET_CODE (XEXP (op, 0)) == MINUS
3567 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3568 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3569 return XEXP (op, 0);
cceb347c
RK
3570
3571#ifdef POINTERS_EXTEND_UNSIGNED
3572 if (! POINTERS_EXTEND_UNSIGNED
3573 && mode == Pmode && GET_MODE (op) == ptr_mode
3574 && CONSTANT_P (op))
3575 return convert_memory_address (Pmode, op);
3576#endif
3577 break;
3578
3579#ifdef POINTERS_EXTEND_UNSIGNED
3580 case ZERO_EXTEND:
3581 if (POINTERS_EXTEND_UNSIGNED
3582 && mode == Pmode && GET_MODE (op) == ptr_mode
3583 && CONSTANT_P (op))
3584 return convert_memory_address (Pmode, op);
7afe21cc 3585 break;
cceb347c 3586#endif
e9a25f70
JL
3587
3588 default:
3589 break;
7afe21cc
RK
3590 }
3591
3592 return 0;
3593 }
7afe21cc
RK
3594}
3595\f
3596/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3597 and OP1. Return 0 if no simplification is possible.
3598
3599 Don't use this for relational operations such as EQ or LT.
3600 Use simplify_relational_operation instead. */
3601
3602rtx
3603simplify_binary_operation (code, mode, op0, op1)
3604 enum rtx_code code;
3605 enum machine_mode mode;
3606 rtx op0, op1;
3607{
906c4e36
RK
3608 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3609 HOST_WIDE_INT val;
7afe21cc 3610 int width = GET_MODE_BITSIZE (mode);
96b0e481 3611 rtx tem;
7afe21cc
RK
3612
3613 /* Relational operations don't work here. We must know the mode
3614 of the operands in order to do the comparison correctly.
3615 Assuming a full word can give incorrect results.
3616 Consider comparing 128 with -128 in QImode. */
3617
3618 if (GET_RTX_CLASS (code) == '<')
3619 abort ();
3620
3621#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3622 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3623 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3624 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3625 {
3626 REAL_VALUE_TYPE f0, f1, value;
3627 jmp_buf handler;
3628
3629 if (setjmp (handler))
3630 return 0;
3631
3632 set_float_handler (handler);
3633
3634 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3635 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
5352b11a
RS
3636 f0 = real_value_truncate (mode, f0);
3637 f1 = real_value_truncate (mode, f1);
7afe21cc
RK
3638
3639#ifdef REAL_ARITHMETIC
956d6950
JL
3640#ifndef REAL_INFINITY
3641 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3642 return 0;
3643#endif
d3159aee 3644 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
7afe21cc
RK
3645#else
3646 switch (code)
3647 {
3648 case PLUS:
3649 value = f0 + f1;
3650 break;
3651 case MINUS:
3652 value = f0 - f1;
3653 break;
3654 case MULT:
3655 value = f0 * f1;
3656 break;
3657 case DIV:
3658#ifndef REAL_INFINITY
3659 if (f1 == 0)
21d12b80 3660 return 0;
7afe21cc
RK
3661#endif
3662 value = f0 / f1;
3663 break;
3664 case SMIN:
3665 value = MIN (f0, f1);
3666 break;
3667 case SMAX:
3668 value = MAX (f0, f1);
3669 break;
3670 default:
3671 abort ();
3672 }
3673#endif
3674
5352b11a 3675 value = real_value_truncate (mode, value);
831522a4 3676 set_float_handler (NULL_PTR);
560c94a2 3677 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
7afe21cc 3678 }
6076248a 3679#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc
RK
3680
3681 /* We can fold some multi-word operations. */
6076248a 3682 if (GET_MODE_CLASS (mode) == MODE_INT
33085906 3683 && width == HOST_BITS_PER_WIDE_INT * 2
fe873240 3684 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
6076248a 3685 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
7afe21cc 3686 {
906c4e36 3687 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
7afe21cc 3688
fe873240
RK
3689 if (GET_CODE (op0) == CONST_DOUBLE)
3690 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3691 else
3692 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
7afe21cc
RK
3693
3694 if (GET_CODE (op1) == CONST_DOUBLE)
3695 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3696 else
3697 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3698
3699 switch (code)
3700 {
3701 case MINUS:
3702 /* A - B == A + (-B). */
3703 neg_double (l2, h2, &lv, &hv);
3704 l2 = lv, h2 = hv;
3705
0f41302f 3706 /* .. fall through ... */
7afe21cc
RK
3707
3708 case PLUS:
3709 add_double (l1, h1, l2, h2, &lv, &hv);
3710 break;
3711
3712 case MULT:
3713 mul_double (l1, h1, l2, h2, &lv, &hv);
3714 break;
3715
3716 case DIV: case MOD: case UDIV: case UMOD:
3717 /* We'd need to include tree.h to do this and it doesn't seem worth
3718 it. */
3719 return 0;
3720
3721 case AND:
3722 lv = l1 & l2, hv = h1 & h2;
3723 break;
3724
3725 case IOR:
3726 lv = l1 | l2, hv = h1 | h2;
3727 break;
3728
3729 case XOR:
3730 lv = l1 ^ l2, hv = h1 ^ h2;
3731 break;
3732
3733 case SMIN:
906c4e36
RK
3734 if (h1 < h2
3735 || (h1 == h2
3736 && ((unsigned HOST_WIDE_INT) l1
3737 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3738 lv = l1, hv = h1;
3739 else
3740 lv = l2, hv = h2;
3741 break;
3742
3743 case SMAX:
906c4e36
RK
3744 if (h1 > h2
3745 || (h1 == h2
3746 && ((unsigned HOST_WIDE_INT) l1
3747 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3748 lv = l1, hv = h1;
3749 else
3750 lv = l2, hv = h2;
3751 break;
3752
3753 case UMIN:
906c4e36
RK
3754 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3755 || (h1 == h2
3756 && ((unsigned HOST_WIDE_INT) l1
3757 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3758 lv = l1, hv = h1;
3759 else
3760 lv = l2, hv = h2;
3761 break;
3762
3763 case UMAX:
906c4e36
RK
3764 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3765 || (h1 == h2
3766 && ((unsigned HOST_WIDE_INT) l1
3767 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3768 lv = l1, hv = h1;
3769 else
3770 lv = l2, hv = h2;
3771 break;
3772
3773 case LSHIFTRT: case ASHIFTRT:
45620ed4 3774 case ASHIFT:
7afe21cc
RK
3775 case ROTATE: case ROTATERT:
3776#ifdef SHIFT_COUNT_TRUNCATED
85c0a556
RK
3777 if (SHIFT_COUNT_TRUNCATED)
3778 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
7afe21cc
RK
3779#endif
3780
3781 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3782 return 0;
3783
3784 if (code == LSHIFTRT || code == ASHIFTRT)
3785 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3786 code == ASHIFTRT);
45620ed4
RK
3787 else if (code == ASHIFT)
3788 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
7afe21cc
RK
3789 else if (code == ROTATE)
3790 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3791 else /* code == ROTATERT */
3792 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3793 break;
3794
3795 default:
3796 return 0;
3797 }
3798
3799 return immed_double_const (lv, hv, mode);
3800 }
7afe21cc
RK
3801
3802 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
906c4e36 3803 || width > HOST_BITS_PER_WIDE_INT || width == 0)
7afe21cc
RK
3804 {
3805 /* Even if we can't compute a constant result,
3806 there are some cases worth simplifying. */
3807
3808 switch (code)
3809 {
3810 case PLUS:
3811 /* In IEEE floating point, x+0 is not the same as x. Similarly
3812 for the other optimizations below. */
3813 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3814 && FLOAT_MODE_P (mode) && ! flag_fast_math)
7afe21cc
RK
3815 break;
3816
3817 if (op1 == CONST0_RTX (mode))
3818 return op0;
3819
7afe21cc
RK
3820 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3821 if (GET_CODE (op0) == NEG)
96b0e481 3822 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
7afe21cc 3823 else if (GET_CODE (op1) == NEG)
96b0e481 3824 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
7afe21cc 3825
96b0e481
RK
3826 /* Handle both-operands-constant cases. We can only add
3827 CONST_INTs to constants since the sum of relocatable symbols
fe873240
RK
3828 can't be handled by most assemblers. Don't add CONST_INT
3829 to CONST_INT since overflow won't be computed properly if wider
3830 than HOST_BITS_PER_WIDE_INT. */
7afe21cc 3831
fe873240
RK
3832 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3833 && GET_CODE (op1) == CONST_INT)
96b0e481 3834 return plus_constant (op0, INTVAL (op1));
fe873240
RK
3835 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3836 && GET_CODE (op0) == CONST_INT)
96b0e481 3837 return plus_constant (op1, INTVAL (op0));
7afe21cc 3838
30d69925
RK
3839 /* See if this is something like X * C - X or vice versa or
3840 if the multiplication is written as a shift. If so, we can
3841 distribute and make a new multiply, shift, or maybe just
3842 have X (if C is 2 in the example above). But don't make
3843 real multiply if we didn't have one before. */
3844
3845 if (! FLOAT_MODE_P (mode))
3846 {
3847 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3848 rtx lhs = op0, rhs = op1;
3849 int had_mult = 0;
3850
3851 if (GET_CODE (lhs) == NEG)
3852 coeff0 = -1, lhs = XEXP (lhs, 0);
3853 else if (GET_CODE (lhs) == MULT
3854 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3855 {
3856 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3857 had_mult = 1;
3858 }
3859 else if (GET_CODE (lhs) == ASHIFT
3860 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3861 && INTVAL (XEXP (lhs, 1)) >= 0
3862 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3863 {
3864 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3865 lhs = XEXP (lhs, 0);
3866 }
3867
3868 if (GET_CODE (rhs) == NEG)
3869 coeff1 = -1, rhs = XEXP (rhs, 0);
3870 else if (GET_CODE (rhs) == MULT
3871 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3872 {
3873 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3874 had_mult = 1;
3875 }
3876 else if (GET_CODE (rhs) == ASHIFT
3877 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3878 && INTVAL (XEXP (rhs, 1)) >= 0
3879 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3880 {
3881 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3882 rhs = XEXP (rhs, 0);
3883 }
3884
3885 if (rtx_equal_p (lhs, rhs))
3886 {
3887 tem = cse_gen_binary (MULT, mode, lhs,
3888 GEN_INT (coeff0 + coeff1));
3889 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3890 }
3891 }
3892
96b0e481
RK
3893 /* If one of the operands is a PLUS or a MINUS, see if we can
3894 simplify this by the associative law.
3895 Don't use the associative law for floating point.
3896 The inaccuracy makes it nonassociative,
3897 and subtle programs can break if operations are associated. */
7afe21cc 3898
cbf6a543 3899 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3900 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3901 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3902 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3903 return tem;
7afe21cc
RK
3904 break;
3905
3906 case COMPARE:
3907#ifdef HAVE_cc0
3908 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3909 using cc0, in which case we want to leave it as a COMPARE
3910 so we can distinguish it from a register-register-copy.
3911
3912 In IEEE floating point, x-0 is not the same as x. */
3913
3914 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3915 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3916 && op1 == CONST0_RTX (mode))
3917 return op0;
3918#else
3919 /* Do nothing here. */
3920#endif
3921 break;
3922
3923 case MINUS:
21648b45
RK
3924 /* None of these optimizations can be done for IEEE
3925 floating point. */
3926 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3927 && FLOAT_MODE_P (mode) && ! flag_fast_math)
21648b45
RK
3928 break;
3929
a83afb65
RK
3930 /* We can't assume x-x is 0 even with non-IEEE floating point,
3931 but since it is zero except in very strange circumstances, we
3932 will treat it as zero with -ffast-math. */
7afe21cc
RK
3933 if (rtx_equal_p (op0, op1)
3934 && ! side_effects_p (op0)
a83afb65
RK
3935 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3936 return CONST0_RTX (mode);
7afe21cc
RK
3937
3938 /* Change subtraction from zero into negation. */
3939 if (op0 == CONST0_RTX (mode))
38a448ca 3940 return gen_rtx_NEG (mode, op1);
7afe21cc 3941
96b0e481
RK
3942 /* (-1 - a) is ~a. */
3943 if (op0 == constm1_rtx)
38a448ca 3944 return gen_rtx_NOT (mode, op1);
96b0e481 3945
7afe21cc
RK
3946 /* Subtracting 0 has no effect. */
3947 if (op1 == CONST0_RTX (mode))
3948 return op0;
3949
30d69925
RK
3950 /* See if this is something like X * C - X or vice versa or
3951 if the multiplication is written as a shift. If so, we can
3952 distribute and make a new multiply, shift, or maybe just
3953 have X (if C is 2 in the example above). But don't make
3954 real multiply if we didn't have one before. */
3955
3956 if (! FLOAT_MODE_P (mode))
3957 {
3958 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3959 rtx lhs = op0, rhs = op1;
3960 int had_mult = 0;
3961
3962 if (GET_CODE (lhs) == NEG)
3963 coeff0 = -1, lhs = XEXP (lhs, 0);
3964 else if (GET_CODE (lhs) == MULT
3965 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3966 {
3967 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3968 had_mult = 1;
3969 }
3970 else if (GET_CODE (lhs) == ASHIFT
3971 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3972 && INTVAL (XEXP (lhs, 1)) >= 0
3973 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3974 {
3975 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3976 lhs = XEXP (lhs, 0);
3977 }
3978
3979 if (GET_CODE (rhs) == NEG)
3980 coeff1 = - 1, rhs = XEXP (rhs, 0);
3981 else if (GET_CODE (rhs) == MULT
3982 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3983 {
3984 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3985 had_mult = 1;
3986 }
3987 else if (GET_CODE (rhs) == ASHIFT
3988 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3989 && INTVAL (XEXP (rhs, 1)) >= 0
3990 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3991 {
3992 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3993 rhs = XEXP (rhs, 0);
3994 }
3995
3996 if (rtx_equal_p (lhs, rhs))
3997 {
3998 tem = cse_gen_binary (MULT, mode, lhs,
3999 GEN_INT (coeff0 - coeff1));
4000 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4001 }
4002 }
4003
7afe21cc
RK
4004 /* (a - (-b)) -> (a + b). */
4005 if (GET_CODE (op1) == NEG)
96b0e481 4006 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
7afe21cc 4007
96b0e481
RK
4008 /* If one of the operands is a PLUS or a MINUS, see if we can
4009 simplify this by the associative law.
4010 Don't use the associative law for floating point.
7afe21cc
RK
4011 The inaccuracy makes it nonassociative,
4012 and subtle programs can break if operations are associated. */
7afe21cc 4013
cbf6a543 4014 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
4015 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4016 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4017 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4018 return tem;
7afe21cc
RK
4019
4020 /* Don't let a relocatable value get a negative coeff. */
b5a09c41 4021 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
7afe21cc 4022 return plus_constant (op0, - INTVAL (op1));
29d72c4b
TG
4023
4024 /* (x - (x & y)) -> (x & ~y) */
4025 if (GET_CODE (op1) == AND)
4026 {
4027 if (rtx_equal_p (op0, XEXP (op1, 0)))
c5c76735
JL
4028 return cse_gen_binary (AND, mode, op0,
4029 gen_rtx_NOT (mode, XEXP (op1, 1)));
29d72c4b 4030 if (rtx_equal_p (op0, XEXP (op1, 1)))
c5c76735
JL
4031 return cse_gen_binary (AND, mode, op0,
4032 gen_rtx_NOT (mode, XEXP (op1, 0)));
29d72c4b 4033 }
7afe21cc
RK
4034 break;
4035
4036 case MULT:
4037 if (op1 == constm1_rtx)
4038 {
96b0e481 4039 tem = simplify_unary_operation (NEG, mode, op0, mode);
7afe21cc 4040
38a448ca 4041 return tem ? tem : gen_rtx_NEG (mode, op0);
7afe21cc
RK
4042 }
4043
4044 /* In IEEE floating point, x*0 is not always 0. */
4045 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4046 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
4047 && op1 == CONST0_RTX (mode)
4048 && ! side_effects_p (op0))
4049 return op1;
4050
4051 /* In IEEE floating point, x*1 is not equivalent to x for nans.
4052 However, ANSI says we can drop signals,
4053 so we can do this anyway. */
4054 if (op1 == CONST1_RTX (mode))
4055 return op0;
4056
c407b802
RK
4057 /* Convert multiply by constant power of two into shift unless
4058 we are still generating RTL. This test is a kludge. */
7afe21cc 4059 if (GET_CODE (op1) == CONST_INT
c407b802 4060 && (val = exact_log2 (INTVAL (op1))) >= 0
2d917903
JW
4061 /* If the mode is larger than the host word size, and the
4062 uppermost bit is set, then this isn't a power of two due
4063 to implicit sign extension. */
4064 && (width <= HOST_BITS_PER_WIDE_INT
4065 || val != HOST_BITS_PER_WIDE_INT - 1)
c407b802 4066 && ! rtx_equal_function_value_matters)
38a448ca 4067 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
7afe21cc
RK
4068
4069 if (GET_CODE (op1) == CONST_DOUBLE
4070 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4071 {
4072 REAL_VALUE_TYPE d;
5a3d4bef
RK
4073 jmp_buf handler;
4074 int op1is2, op1ism1;
4075
4076 if (setjmp (handler))
4077 return 0;
4078
4079 set_float_handler (handler);
7afe21cc 4080 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
5a3d4bef
RK
4081 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4082 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4083 set_float_handler (NULL_PTR);
7afe21cc
RK
4084
4085 /* x*2 is x+x and x*(-1) is -x */
5a3d4bef 4086 if (op1is2 && GET_MODE (op0) == mode)
38a448ca 4087 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
7afe21cc 4088
5a3d4bef 4089 else if (op1ism1 && GET_MODE (op0) == mode)
38a448ca 4090 return gen_rtx_NEG (mode, op0);
7afe21cc
RK
4091 }
4092 break;
4093
4094 case IOR:
4095 if (op1 == const0_rtx)
4096 return op0;
4097 if (GET_CODE (op1) == CONST_INT
4098 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4099 return op1;
4100 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4101 return op0;
4102 /* A | (~A) -> -1 */
4103 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4104 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
31dcf83f 4105 && ! side_effects_p (op0)
8e7e5365 4106 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4107 return constm1_rtx;
4108 break;
4109
4110 case XOR:
4111 if (op1 == const0_rtx)
4112 return op0;
4113 if (GET_CODE (op1) == CONST_INT
4114 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
38a448ca 4115 return gen_rtx_NOT (mode, op0);
31dcf83f 4116 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4117 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4118 return const0_rtx;
4119 break;
4120
4121 case AND:
4122 if (op1 == const0_rtx && ! side_effects_p (op0))
4123 return const0_rtx;
4124 if (GET_CODE (op1) == CONST_INT
4125 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4126 return op0;
31dcf83f 4127 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 4128 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4129 return op0;
4130 /* A & (~A) -> 0 */
4131 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4132 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
709ab4fc 4133 && ! side_effects_p (op0)
8e7e5365 4134 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
4135 return const0_rtx;
4136 break;
4137
4138 case UDIV:
4139 /* Convert divide by power of two into shift (divide by 1 handled
4140 below). */
4141 if (GET_CODE (op1) == CONST_INT
4142 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
38a448ca 4143 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
7afe21cc 4144
0f41302f 4145 /* ... fall through ... */
7afe21cc
RK
4146
4147 case DIV:
4148 if (op1 == CONST1_RTX (mode))
4149 return op0;
e7a522ba
RS
4150
4151 /* In IEEE floating point, 0/x is not always 0. */
4152 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 4153 || ! FLOAT_MODE_P (mode) || flag_fast_math)
e7a522ba
RS
4154 && op0 == CONST0_RTX (mode)
4155 && ! side_effects_p (op1))
7afe21cc 4156 return op0;
e7a522ba 4157
7afe21cc 4158#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a83afb65
RK
4159 /* Change division by a constant into multiplication. Only do
4160 this with -ffast-math until an expert says it is safe in
4161 general. */
7afe21cc
RK
4162 else if (GET_CODE (op1) == CONST_DOUBLE
4163 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
a83afb65
RK
4164 && op1 != CONST0_RTX (mode)
4165 && flag_fast_math)
7afe21cc
RK
4166 {
4167 REAL_VALUE_TYPE d;
4168 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
a83afb65
RK
4169
4170 if (! REAL_VALUES_EQUAL (d, dconst0))
4171 {
7afe21cc 4172#if defined (REAL_ARITHMETIC)
a83afb65 4173 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
38a448ca
RH
4174 return gen_rtx_MULT (mode, op0,
4175 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
7afe21cc 4176#else
c5c76735
JL
4177 return
4178 gen_rtx_MULT (mode, op0,
4179 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
7afe21cc 4180#endif
a83afb65
RK
4181 }
4182 }
7afe21cc
RK
4183#endif
4184 break;
4185
4186 case UMOD:
4187 /* Handle modulus by power of two (mod with 1 handled below). */
4188 if (GET_CODE (op1) == CONST_INT
4189 && exact_log2 (INTVAL (op1)) > 0)
38a448ca 4190 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
7afe21cc 4191
0f41302f 4192 /* ... fall through ... */
7afe21cc
RK
4193
4194 case MOD:
4195 if ((op0 == const0_rtx || op1 == const1_rtx)
4196 && ! side_effects_p (op0) && ! side_effects_p (op1))
4197 return const0_rtx;
4198 break;
4199
4200 case ROTATERT:
4201 case ROTATE:
4202 /* Rotating ~0 always results in ~0. */
906c4e36 4203 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
6a651371 4204 && (unsigned HOST_WIDE_INT) INTVAL (op0) == GET_MODE_MASK (mode)
7afe21cc
RK
4205 && ! side_effects_p (op1))
4206 return op0;
4207
0f41302f 4208 /* ... fall through ... */
7afe21cc 4209
7afe21cc
RK
4210 case ASHIFT:
4211 case ASHIFTRT:
4212 case LSHIFTRT:
4213 if (op1 == const0_rtx)
4214 return op0;
4215 if (op0 == const0_rtx && ! side_effects_p (op1))
4216 return op0;
4217 break;
4218
4219 case SMIN:
906c4e36
RK
4220 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4221 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
7afe21cc
RK
4222 && ! side_effects_p (op0))
4223 return op1;
4224 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4225 return op0;
4226 break;
4227
4228 case SMAX:
906c4e36 4229 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
6a651371 4230 && ((unsigned HOST_WIDE_INT) INTVAL (op1)
dbbe6445 4231 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
7afe21cc
RK
4232 && ! side_effects_p (op0))
4233 return op1;
4234 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4235 return op0;
4236 break;
4237
4238 case UMIN:
4239 if (op1 == const0_rtx && ! side_effects_p (op0))
4240 return op1;
4241 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4242 return op0;
4243 break;
4244
4245 case UMAX:
4246 if (op1 == constm1_rtx && ! side_effects_p (op0))
4247 return op1;
4248 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4249 return op0;
4250 break;
4251
4252 default:
4253 abort ();
4254 }
4255
4256 return 0;
4257 }
4258
4259 /* Get the integer argument values in two forms:
4260 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4261
4262 arg0 = INTVAL (op0);
4263 arg1 = INTVAL (op1);
4264
906c4e36 4265 if (width < HOST_BITS_PER_WIDE_INT)
7afe21cc 4266 {
906c4e36
RK
4267 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4268 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc
RK
4269
4270 arg0s = arg0;
906c4e36
RK
4271 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4272 arg0s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4273
4274 arg1s = arg1;
906c4e36
RK
4275 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4276 arg1s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
4277 }
4278 else
4279 {
4280 arg0s = arg0;
4281 arg1s = arg1;
4282 }
4283
4284 /* Compute the value of the arithmetic. */
4285
4286 switch (code)
4287 {
4288 case PLUS:
538b78e7 4289 val = arg0s + arg1s;
7afe21cc
RK
4290 break;
4291
4292 case MINUS:
538b78e7 4293 val = arg0s - arg1s;
7afe21cc
RK
4294 break;
4295
4296 case MULT:
4297 val = arg0s * arg1s;
4298 break;
4299
4300 case DIV:
4301 if (arg1s == 0)
4302 return 0;
4303 val = arg0s / arg1s;
4304 break;
4305
4306 case MOD:
4307 if (arg1s == 0)
4308 return 0;
4309 val = arg0s % arg1s;
4310 break;
4311
4312 case UDIV:
4313 if (arg1 == 0)
4314 return 0;
906c4e36 4315 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
7afe21cc
RK
4316 break;
4317
4318 case UMOD:
4319 if (arg1 == 0)
4320 return 0;
906c4e36 4321 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
7afe21cc
RK
4322 break;
4323
4324 case AND:
4325 val = arg0 & arg1;
4326 break;
4327
4328 case IOR:
4329 val = arg0 | arg1;
4330 break;
4331
4332 case XOR:
4333 val = arg0 ^ arg1;
4334 break;
4335
4336 case LSHIFTRT:
4337 /* If shift count is undefined, don't fold it; let the machine do
4338 what it wants. But truncate it if the machine will do that. */
4339 if (arg1 < 0)
4340 return 0;
4341
4342#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4343 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4344 arg1 %= width;
7afe21cc
RK
4345#endif
4346
906c4e36 4347 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
7afe21cc
RK
4348 break;
4349
4350 case ASHIFT:
7afe21cc
RK
4351 if (arg1 < 0)
4352 return 0;
4353
4354#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4355 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4356 arg1 %= width;
7afe21cc
RK
4357#endif
4358
906c4e36 4359 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
7afe21cc
RK
4360 break;
4361
4362 case ASHIFTRT:
4363 if (arg1 < 0)
4364 return 0;
4365
4366#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4367 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4368 arg1 %= width;
7afe21cc
RK
4369#endif
4370
7afe21cc 4371 val = arg0s >> arg1;
2166571b
RS
4372
4373 /* Bootstrap compiler may not have sign extended the right shift.
4374 Manually extend the sign to insure bootstrap cc matches gcc. */
4375 if (arg0s < 0 && arg1 > 0)
4376 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4377
7afe21cc
RK
4378 break;
4379
4380 case ROTATERT:
4381 if (arg1 < 0)
4382 return 0;
4383
4384 arg1 %= width;
906c4e36
RK
4385 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4386 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
7afe21cc
RK
4387 break;
4388
4389 case ROTATE:
4390 if (arg1 < 0)
4391 return 0;
4392
4393 arg1 %= width;
906c4e36
RK
4394 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4395 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
7afe21cc
RK
4396 break;
4397
4398 case COMPARE:
4399 /* Do nothing here. */
4400 return 0;
4401
830a38ee
RS
4402 case SMIN:
4403 val = arg0s <= arg1s ? arg0s : arg1s;
4404 break;
4405
4406 case UMIN:
906c4e36
RK
4407 val = ((unsigned HOST_WIDE_INT) arg0
4408 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4409 break;
4410
4411 case SMAX:
4412 val = arg0s > arg1s ? arg0s : arg1s;
4413 break;
4414
4415 case UMAX:
906c4e36
RK
4416 val = ((unsigned HOST_WIDE_INT) arg0
4417 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4418 break;
4419
7afe21cc
RK
4420 default:
4421 abort ();
4422 }
4423
7e4ce834 4424 val = trunc_int_for_mode (val, mode);
ad89d6f6 4425
906c4e36 4426 return GEN_INT (val);
7afe21cc
RK
4427}
4428\f
96b0e481
RK
4429/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4430 PLUS or MINUS.
4431
4432 Rather than test for specific case, we do this by a brute-force method
4433 and do all possible simplifications until no more changes occur. Then
4434 we rebuild the operation. */
4435
4436static rtx
4437simplify_plus_minus (code, mode, op0, op1)
4438 enum rtx_code code;
4439 enum machine_mode mode;
4440 rtx op0, op1;
4441{
4442 rtx ops[8];
4443 int negs[8];
4444 rtx result, tem;
fb5c8ce6 4445 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
96b0e481 4446 int first = 1, negate = 0, changed;
fb5c8ce6 4447 int i, j;
96b0e481 4448
4c9a05bc 4449 bzero ((char *) ops, sizeof ops);
96b0e481
RK
4450
4451 /* Set up the two operands and then expand them until nothing has been
4452 changed. If we run out of room in our array, give up; this should
4453 almost never happen. */
4454
4455 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4456
4457 changed = 1;
4458 while (changed)
4459 {
4460 changed = 0;
4461
4462 for (i = 0; i < n_ops; i++)
4463 switch (GET_CODE (ops[i]))
4464 {
4465 case PLUS:
4466 case MINUS:
4467 if (n_ops == 7)
4468 return 0;
4469
4470 ops[n_ops] = XEXP (ops[i], 1);
4471 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4472 ops[i] = XEXP (ops[i], 0);
b7d9299b 4473 input_ops++;
96b0e481
RK
4474 changed = 1;
4475 break;
4476
4477 case NEG:
4478 ops[i] = XEXP (ops[i], 0);
4479 negs[i] = ! negs[i];
4480 changed = 1;
4481 break;
4482
4483 case CONST:
4484 ops[i] = XEXP (ops[i], 0);
fb5c8ce6 4485 input_consts++;
96b0e481
RK
4486 changed = 1;
4487 break;
4488
4489 case NOT:
4490 /* ~a -> (-a - 1) */
4491 if (n_ops != 7)
4492 {
4493 ops[n_ops] = constm1_rtx;
5931019b 4494 negs[n_ops++] = negs[i];
96b0e481
RK
4495 ops[i] = XEXP (ops[i], 0);
4496 negs[i] = ! negs[i];
4497 changed = 1;
4498 }
4499 break;
4500
4501 case CONST_INT:
4502 if (negs[i])
4503 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4504 break;
e9a25f70
JL
4505
4506 default:
4507 break;
96b0e481
RK
4508 }
4509 }
4510
4511 /* If we only have two operands, we can't do anything. */
4512 if (n_ops <= 2)
4513 return 0;
4514
4515 /* Now simplify each pair of operands until nothing changes. The first
4516 time through just simplify constants against each other. */
4517
4518 changed = 1;
4519 while (changed)
4520 {
4521 changed = first;
4522
4523 for (i = 0; i < n_ops - 1; i++)
4524 for (j = i + 1; j < n_ops; j++)
4525 if (ops[i] != 0 && ops[j] != 0
4526 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4527 {
4528 rtx lhs = ops[i], rhs = ops[j];
4529 enum rtx_code ncode = PLUS;
4530
4531 if (negs[i] && ! negs[j])
4532 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4533 else if (! negs[i] && negs[j])
4534 ncode = MINUS;
4535
4536 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
b7d9299b 4537 if (tem)
96b0e481
RK
4538 {
4539 ops[i] = tem, ops[j] = 0;
4540 negs[i] = negs[i] && negs[j];
4541 if (GET_CODE (tem) == NEG)
4542 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4543
4544 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4545 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4546 changed = 1;
4547 }
4548 }
4549
4550 first = 0;
4551 }
4552
4553 /* Pack all the operands to the lower-numbered entries and give up if
91a60f37 4554 we didn't reduce the number of operands we had. Make sure we
fb5c8ce6
RK
4555 count a CONST as two operands. If we have the same number of
4556 operands, but have made more CONSTs than we had, this is also
4557 an improvement, so accept it. */
91a60f37 4558
fb5c8ce6 4559 for (i = 0, j = 0; j < n_ops; j++)
96b0e481 4560 if (ops[j] != 0)
91a60f37
RK
4561 {
4562 ops[i] = ops[j], negs[i++] = negs[j];
4563 if (GET_CODE (ops[j]) == CONST)
fb5c8ce6 4564 n_consts++;
91a60f37 4565 }
96b0e481 4566
fb5c8ce6
RK
4567 if (i + n_consts > input_ops
4568 || (i + n_consts == input_ops && n_consts <= input_consts))
96b0e481
RK
4569 return 0;
4570
4571 n_ops = i;
4572
4573 /* If we have a CONST_INT, put it last. */
4574 for (i = 0; i < n_ops - 1; i++)
4575 if (GET_CODE (ops[i]) == CONST_INT)
4576 {
4577 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4578 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4579 }
4580
4581 /* Put a non-negated operand first. If there aren't any, make all
4582 operands positive and negate the whole thing later. */
4583 for (i = 0; i < n_ops && negs[i]; i++)
4584 ;
4585
4586 if (i == n_ops)
4587 {
4588 for (i = 0; i < n_ops; i++)
4589 negs[i] = 0;
4590 negate = 1;
4591 }
4592 else if (i != 0)
4593 {
4594 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4595 j = negs[0], negs[0] = negs[i], negs[i] = j;
4596 }
4597
4598 /* Now make the result by performing the requested operations. */
4599 result = ops[0];
4600 for (i = 1; i < n_ops; i++)
4601 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4602
38a448ca 4603 return negate ? gen_rtx_NEG (mode, result) : result;
96b0e481
RK
4604}
4605\f
4606/* Make a binary operation by properly ordering the operands and
4607 seeing if the expression folds. */
4608
4609static rtx
4610cse_gen_binary (code, mode, op0, op1)
4611 enum rtx_code code;
4612 enum machine_mode mode;
4613 rtx op0, op1;
4614{
4615 rtx tem;
4616
4617 /* Put complex operands first and constants second if commutative. */
4618 if (GET_RTX_CLASS (code) == 'c'
4619 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4620 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4621 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4622 || (GET_CODE (op0) == SUBREG
4623 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4624 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4625 tem = op0, op0 = op1, op1 = tem;
4626
4627 /* If this simplifies, do it. */
4628 tem = simplify_binary_operation (code, mode, op0, op1);
4629
4630 if (tem)
4631 return tem;
4632
4633 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4634 just form the operation. */
4635
4636 if (code == PLUS && GET_CODE (op1) == CONST_INT
4637 && GET_MODE (op0) != VOIDmode)
4638 return plus_constant (op0, INTVAL (op1));
4639 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4640 && GET_MODE (op0) != VOIDmode)
4641 return plus_constant (op0, - INTVAL (op1));
4642 else
38a448ca 4643 return gen_rtx_fmt_ee (code, mode, op0, op1);
96b0e481
RK
4644}
4645\f
1a87eea2
KG
4646struct cfc_args
4647{
4648 /* Input */
4649 rtx op0, op1;
4650 /* Output */
4651 int equal, op0lt, op1lt;
4652};
4653
4654static void
4655check_fold_consts (data)
4656 PTR data;
4657{
4658 struct cfc_args * args = (struct cfc_args *) data;
4659 REAL_VALUE_TYPE d0, d1;
4660
4661 REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4662 REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4663 args->equal = REAL_VALUES_EQUAL (d0, d1);
4664 args->op0lt = REAL_VALUES_LESS (d0, d1);
4665 args->op1lt = REAL_VALUES_LESS (d1, d0);
4666}
4667
7afe21cc 4668/* Like simplify_binary_operation except used for relational operators.
a432f20d
RK
4669 MODE is the mode of the operands, not that of the result. If MODE
4670 is VOIDmode, both operands must also be VOIDmode and we compare the
4671 operands in "infinite precision".
4672
4673 If no simplification is possible, this function returns zero. Otherwise,
4674 it returns either const_true_rtx or const0_rtx. */
7afe21cc
RK
4675
4676rtx
4677simplify_relational_operation (code, mode, op0, op1)
4678 enum rtx_code code;
4679 enum machine_mode mode;
4680 rtx op0, op1;
4681{
a432f20d
RK
4682 int equal, op0lt, op0ltu, op1lt, op1ltu;
4683 rtx tem;
7afe21cc
RK
4684
4685 /* If op0 is a compare, extract the comparison arguments from it. */
4686 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4687 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4688
28bad1cb
RK
4689 /* We can't simplify MODE_CC values since we don't know what the
4690 actual comparison is. */
4691 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4692#ifdef HAVE_cc0
4693 || op0 == cc0_rtx
4694#endif
4695 )
31dcf83f
RS
4696 return 0;
4697
a432f20d
RK
4698 /* For integer comparisons of A and B maybe we can simplify A - B and can
4699 then simplify a comparison of that with zero. If A and B are both either
4700 a register or a CONST_INT, this can't help; testing for these cases will
4701 prevent infinite recursion here and speed things up.
4702
c27b5c62
JW
4703 If CODE is an unsigned comparison, then we can never do this optimization,
4704 because it gives an incorrect result if the subtraction wraps around zero.
4705 ANSI C defines unsigned operations such that they never overflow, and
4706 thus such cases can not be ignored. */
a432f20d
RK
4707
4708 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4709 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4710 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4711 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
c27b5c62 4712 && code != GTU && code != GEU && code != LTU && code != LEU)
a432f20d
RK
4713 return simplify_relational_operation (signed_condition (code),
4714 mode, tem, const0_rtx);
4715
4716 /* For non-IEEE floating-point, if the two operands are equal, we know the
4717 result. */
4718 if (rtx_equal_p (op0, op1)
4719 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4720 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4721 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4722
4723 /* If the operands are floating-point constants, see if we can fold
4724 the result. */
6076248a 4725#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a432f20d
RK
4726 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4727 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4728 {
1a87eea2
KG
4729 struct cfc_args args;
4730
4731 /* Setup input for check_fold_consts() */
4732 args.op0 = op0;
4733 args.op1 = op1;
a432f20d 4734
1a87eea2
KG
4735 if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4736 /* We got an exception from check_fold_consts() */
a432f20d 4737 return 0;
7afe21cc 4738
1a87eea2
KG
4739 /* Receive output from check_fold_consts() */
4740 equal = args.equal;
4741 op0lt = op0ltu = args.op0lt;
4742 op1lt = op1ltu = args.op1lt;
a432f20d
RK
4743 }
4744#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc 4745
a432f20d
RK
4746 /* Otherwise, see if the operands are both integers. */
4747 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4748 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4749 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4750 {
4751 int width = GET_MODE_BITSIZE (mode);
64812ded
RK
4752 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4753 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
7afe21cc 4754
a432f20d
RK
4755 /* Get the two words comprising each integer constant. */
4756 if (GET_CODE (op0) == CONST_DOUBLE)
4757 {
4758 l0u = l0s = CONST_DOUBLE_LOW (op0);
4759 h0u = h0s = CONST_DOUBLE_HIGH (op0);
7afe21cc 4760 }
a432f20d 4761 else
6076248a 4762 {
a432f20d 4763 l0u = l0s = INTVAL (op0);
cb3bb2a7 4764 h0u = h0s = l0s < 0 ? -1 : 0;
a432f20d 4765 }
6076248a 4766
a432f20d
RK
4767 if (GET_CODE (op1) == CONST_DOUBLE)
4768 {
4769 l1u = l1s = CONST_DOUBLE_LOW (op1);
4770 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4771 }
4772 else
4773 {
4774 l1u = l1s = INTVAL (op1);
cb3bb2a7 4775 h1u = h1s = l1s < 0 ? -1 : 0;
a432f20d
RK
4776 }
4777
4778 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4779 we have to sign or zero-extend the values. */
4780 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4781 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
6076248a 4782
a432f20d
RK
4783 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4784 {
4785 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4786 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
6076248a 4787
a432f20d
RK
4788 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4789 l0s |= ((HOST_WIDE_INT) (-1) << width);
6076248a 4790
a432f20d
RK
4791 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4792 l1s |= ((HOST_WIDE_INT) (-1) << width);
6076248a
RK
4793 }
4794
a432f20d
RK
4795 equal = (h0u == h1u && l0u == l1u);
4796 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4797 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4798 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4799 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4800 }
4801
4802 /* Otherwise, there are some code-specific tests we can make. */
4803 else
4804 {
7afe21cc
RK
4805 switch (code)
4806 {
4807 case EQ:
a432f20d
RK
4808 /* References to the frame plus a constant or labels cannot
4809 be zero, but a SYMBOL_REF can due to #pragma weak. */
4810 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4811 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4812#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d
RK
4813 /* On some machines, the ap reg can be 0 sometimes. */
4814 && op0 != arg_pointer_rtx
7afe21cc 4815#endif
a432f20d
RK
4816 )
4817 return const0_rtx;
4818 break;
7afe21cc
RK
4819
4820 case NE:
a432f20d
RK
4821 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4822 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4823#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d 4824 && op0 != arg_pointer_rtx
7afe21cc 4825#endif
a432f20d 4826 )
7afe21cc
RK
4827 return const_true_rtx;
4828 break;
4829
4830 case GEU:
a432f20d
RK
4831 /* Unsigned values are never negative. */
4832 if (op1 == const0_rtx)
7afe21cc
RK
4833 return const_true_rtx;
4834 break;
4835
4836 case LTU:
a432f20d 4837 if (op1 == const0_rtx)
7afe21cc
RK
4838 return const0_rtx;
4839 break;
4840
4841 case LEU:
4842 /* Unsigned values are never greater than the largest
4843 unsigned value. */
4844 if (GET_CODE (op1) == CONST_INT
6a651371 4845 && (unsigned HOST_WIDE_INT) INTVAL (op1) == GET_MODE_MASK (mode)
a432f20d
RK
4846 && INTEGRAL_MODE_P (mode))
4847 return const_true_rtx;
7afe21cc
RK
4848 break;
4849
4850 case GTU:
4851 if (GET_CODE (op1) == CONST_INT
6a651371 4852 && (unsigned HOST_WIDE_INT) INTVAL (op1) == GET_MODE_MASK (mode)
cbf6a543 4853 && INTEGRAL_MODE_P (mode))
7afe21cc
RK
4854 return const0_rtx;
4855 break;
e9a25f70
JL
4856
4857 default:
4858 break;
7afe21cc
RK
4859 }
4860
4861 return 0;
4862 }
4863
a432f20d
RK
4864 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4865 as appropriate. */
7afe21cc
RK
4866 switch (code)
4867 {
7afe21cc 4868 case EQ:
a432f20d
RK
4869 return equal ? const_true_rtx : const0_rtx;
4870 case NE:
4871 return ! equal ? const_true_rtx : const0_rtx;
7afe21cc 4872 case LT:
a432f20d 4873 return op0lt ? const_true_rtx : const0_rtx;
7afe21cc 4874 case GT:
a432f20d 4875 return op1lt ? const_true_rtx : const0_rtx;
7afe21cc 4876 case LTU:
a432f20d 4877 return op0ltu ? const_true_rtx : const0_rtx;
7afe21cc 4878 case GTU:
a432f20d
RK
4879 return op1ltu ? const_true_rtx : const0_rtx;
4880 case LE:
4881 return equal || op0lt ? const_true_rtx : const0_rtx;
4882 case GE:
4883 return equal || op1lt ? const_true_rtx : const0_rtx;
4884 case LEU:
4885 return equal || op0ltu ? const_true_rtx : const0_rtx;
4886 case GEU:
4887 return equal || op1ltu ? const_true_rtx : const0_rtx;
e9a25f70
JL
4888 default:
4889 abort ();
7afe21cc 4890 }
7afe21cc
RK
4891}
4892\f
4893/* Simplify CODE, an operation with result mode MODE and three operands,
4894 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4895 a constant. Return 0 if no simplifications is possible. */
4896
4897rtx
4898simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4899 enum rtx_code code;
4900 enum machine_mode mode, op0_mode;
4901 rtx op0, op1, op2;
4902{
4903 int width = GET_MODE_BITSIZE (mode);
4904
4905 /* VOIDmode means "infinite" precision. */
4906 if (width == 0)
906c4e36 4907 width = HOST_BITS_PER_WIDE_INT;
7afe21cc
RK
4908
4909 switch (code)
4910 {
4911 case SIGN_EXTRACT:
4912 case ZERO_EXTRACT:
4913 if (GET_CODE (op0) == CONST_INT
4914 && GET_CODE (op1) == CONST_INT
4915 && GET_CODE (op2) == CONST_INT
4916 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
906c4e36 4917 && width <= HOST_BITS_PER_WIDE_INT)
7afe21cc
RK
4918 {
4919 /* Extracting a bit-field from a constant */
906c4e36 4920 HOST_WIDE_INT val = INTVAL (op0);
7afe21cc 4921
f76b9db2
ILT
4922 if (BITS_BIG_ENDIAN)
4923 val >>= (GET_MODE_BITSIZE (op0_mode)
4924 - INTVAL (op2) - INTVAL (op1));
4925 else
4926 val >>= INTVAL (op2);
4927
906c4e36 4928 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
7afe21cc
RK
4929 {
4930 /* First zero-extend. */
906c4e36 4931 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
7afe21cc 4932 /* If desired, propagate sign bit. */
906c4e36
RK
4933 if (code == SIGN_EXTRACT
4934 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4935 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
7afe21cc
RK
4936 }
4937
4938 /* Clear the bits that don't belong in our mode,
4939 unless they and our sign bit are all one.
4940 So we get either a reasonable negative value or a reasonable
4941 unsigned value for this mode. */
906c4e36
RK
4942 if (width < HOST_BITS_PER_WIDE_INT
4943 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4944 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4945 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 4946
906c4e36 4947 return GEN_INT (val);
7afe21cc
RK
4948 }
4949 break;
4950
4951 case IF_THEN_ELSE:
4952 if (GET_CODE (op0) == CONST_INT)
4953 return op0 != const0_rtx ? op1 : op2;
3bf1b082
JW
4954
4955 /* Convert a == b ? b : a to "a". */
4956 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4957 && rtx_equal_p (XEXP (op0, 0), op1)
4958 && rtx_equal_p (XEXP (op0, 1), op2))
4959 return op1;
4960 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4961 && rtx_equal_p (XEXP (op0, 1), op1)
4962 && rtx_equal_p (XEXP (op0, 0), op2))
4963 return op2;
e82ad93d 4964 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
ed1ecb19
JL
4965 {
4966 rtx temp;
4967 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4968 XEXP (op0, 0), XEXP (op0, 1));
4969 /* See if any simplifications were possible. */
4970 if (temp == const0_rtx)
4971 return op2;
4972 else if (temp == const1_rtx)
4973 return op1;
4974 }
7afe21cc
RK
4975 break;
4976
4977 default:
4978 abort ();
4979 }
4980
4981 return 0;
4982}
4983\f
4984/* If X is a nontrivial arithmetic operation on an argument
4985 for which a constant value can be determined, return
4986 the result of operating on that value, as a constant.
4987 Otherwise, return X, possibly with one or more operands
4988 modified by recursive calls to this function.
4989
e7bb59fa
RK
4990 If X is a register whose contents are known, we do NOT
4991 return those contents here. equiv_constant is called to
4992 perform that task.
7afe21cc
RK
4993
4994 INSN is the insn that we may be modifying. If it is 0, make a copy
4995 of X before modifying it. */
4996
4997static rtx
4998fold_rtx (x, insn)
4999 rtx x;
5000 rtx insn;
5001{
5002 register enum rtx_code code;
5003 register enum machine_mode mode;
6f7d635c 5004 register const char *fmt;
906c4e36 5005 register int i;
7afe21cc
RK
5006 rtx new = 0;
5007 int copied = 0;
5008 int must_swap = 0;
5009
5010 /* Folded equivalents of first two operands of X. */
5011 rtx folded_arg0;
5012 rtx folded_arg1;
5013
5014 /* Constant equivalents of first three operands of X;
5015 0 when no such equivalent is known. */
5016 rtx const_arg0;
5017 rtx const_arg1;
5018 rtx const_arg2;
5019
5020 /* The mode of the first operand of X. We need this for sign and zero
5021 extends. */
5022 enum machine_mode mode_arg0;
5023
5024 if (x == 0)
5025 return x;
5026
5027 mode = GET_MODE (x);
5028 code = GET_CODE (x);
5029 switch (code)
5030 {
5031 case CONST:
5032 case CONST_INT:
5033 case CONST_DOUBLE:
5034 case SYMBOL_REF:
5035 case LABEL_REF:
5036 case REG:
5037 /* No use simplifying an EXPR_LIST
5038 since they are used only for lists of args
5039 in a function call's REG_EQUAL note. */
5040 case EXPR_LIST:
956d6950
JL
5041 /* Changing anything inside an ADDRESSOF is incorrect; we don't
5042 want to (e.g.,) make (addressof (const_int 0)) just because
5043 the location is known to be zero. */
5044 case ADDRESSOF:
7afe21cc
RK
5045 return x;
5046
5047#ifdef HAVE_cc0
5048 case CC0:
5049 return prev_insn_cc0;
5050#endif
5051
5052 case PC:
5053 /* If the next insn is a CODE_LABEL followed by a jump table,
5054 PC's value is a LABEL_REF pointing to that label. That
5055 lets us fold switch statements on the Vax. */
5056 if (insn && GET_CODE (insn) == JUMP_INSN)
5057 {
5058 rtx next = next_nonnote_insn (insn);
5059
5060 if (next && GET_CODE (next) == CODE_LABEL
5061 && NEXT_INSN (next) != 0
5062 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5063 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5064 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
38a448ca 5065 return gen_rtx_LABEL_REF (Pmode, next);
7afe21cc
RK
5066 }
5067 break;
5068
5069 case SUBREG:
c610adec
RK
5070 /* See if we previously assigned a constant value to this SUBREG. */
5071 if ((new = lookup_as_function (x, CONST_INT)) != 0
5072 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
5073 return new;
5074
4b980e20
RK
5075 /* If this is a paradoxical SUBREG, we have no idea what value the
5076 extra bits would have. However, if the operand is equivalent
5077 to a SUBREG whose operand is the same as our mode, and all the
5078 modes are within a word, we can just use the inner operand
31c85c78
RK
5079 because these SUBREGs just say how to treat the register.
5080
5081 Similarly if we find an integer constant. */
4b980e20 5082
e5f6a288 5083 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
5084 {
5085 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5086 struct table_elt *elt;
5087
5088 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5089 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5090 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5091 imode)) != 0)
31c85c78
RK
5092 for (elt = elt->first_same_value;
5093 elt; elt = elt->next_same_value)
5094 {
5095 if (CONSTANT_P (elt->exp)
5096 && GET_MODE (elt->exp) == VOIDmode)
5097 return elt->exp;
5098
4b980e20
RK
5099 if (GET_CODE (elt->exp) == SUBREG
5100 && GET_MODE (SUBREG_REG (elt->exp)) == mode
906c4e36 5101 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5102 return copy_rtx (SUBREG_REG (elt->exp));
5103 }
5104
5105 return x;
5106 }
e5f6a288 5107
7afe21cc
RK
5108 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5109 We might be able to if the SUBREG is extracting a single word in an
5110 integral mode or extracting the low part. */
5111
5112 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5113 const_arg0 = equiv_constant (folded_arg0);
5114 if (const_arg0)
5115 folded_arg0 = const_arg0;
5116
5117 if (folded_arg0 != SUBREG_REG (x))
5118 {
5119 new = 0;
5120
5121 if (GET_MODE_CLASS (mode) == MODE_INT
5122 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5123 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5124 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5125 GET_MODE (SUBREG_REG (x)));
5126 if (new == 0 && subreg_lowpart_p (x))
5127 new = gen_lowpart_if_possible (mode, folded_arg0);
5128 if (new)
5129 return new;
5130 }
e5f6a288
RK
5131
5132 /* If this is a narrowing SUBREG and our operand is a REG, see if
858a47b1 5133 we can find an equivalence for REG that is an arithmetic operation
e5f6a288
RK
5134 in a wider mode where both operands are paradoxical SUBREGs
5135 from objects of our result mode. In that case, we couldn't report
5136 an equivalent value for that operation, since we don't know what the
5137 extra bits will be. But we can find an equivalence for this SUBREG
5138 by folding that operation is the narrow mode. This allows us to
5139 fold arithmetic in narrow modes when the machine only supports
4b980e20
RK
5140 word-sized arithmetic.
5141
5142 Also look for a case where we have a SUBREG whose operand is the
5143 same as our result. If both modes are smaller than a word, we
5144 are simply interpreting a register in different modes and we
5145 can use the inner value. */
e5f6a288
RK
5146
5147 if (GET_CODE (folded_arg0) == REG
e8d76a39
RS
5148 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5149 && subreg_lowpart_p (x))
e5f6a288
RK
5150 {
5151 struct table_elt *elt;
5152
5153 /* We can use HASH here since we know that canon_hash won't be
5154 called. */
5155 elt = lookup (folded_arg0,
5156 HASH (folded_arg0, GET_MODE (folded_arg0)),
5157 GET_MODE (folded_arg0));
5158
5159 if (elt)
5160 elt = elt->first_same_value;
5161
5162 for (; elt; elt = elt->next_same_value)
5163 {
e8d76a39
RS
5164 enum rtx_code eltcode = GET_CODE (elt->exp);
5165
e5f6a288
RK
5166 /* Just check for unary and binary operations. */
5167 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5168 && GET_CODE (elt->exp) != SIGN_EXTEND
5169 && GET_CODE (elt->exp) != ZERO_EXTEND
5170 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5171 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5172 {
5173 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5174
5175 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5176 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5177
5178 op0 = equiv_constant (op0);
5179 if (op0)
5180 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5181 op0, mode);
5182 }
5183 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5184 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
e8d76a39
RS
5185 && eltcode != DIV && eltcode != MOD
5186 && eltcode != UDIV && eltcode != UMOD
5187 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5188 && eltcode != ROTATE && eltcode != ROTATERT
e5f6a288
RK
5189 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5190 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5191 == mode))
5192 || CONSTANT_P (XEXP (elt->exp, 0)))
5193 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5194 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5195 == mode))
5196 || CONSTANT_P (XEXP (elt->exp, 1))))
5197 {
5198 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5199 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5200
5201 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 5202 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
5203
5204 if (op0)
5205 op0 = equiv_constant (op0);
5206
5207 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
906c4e36 5208 op1 = fold_rtx (op1, NULL_RTX);
e5f6a288
RK
5209
5210 if (op1)
5211 op1 = equiv_constant (op1);
5212
76fb0b60
RS
5213 /* If we are looking for the low SImode part of
5214 (ashift:DI c (const_int 32)), it doesn't work
5215 to compute that in SImode, because a 32-bit shift
5216 in SImode is unpredictable. We know the value is 0. */
5217 if (op0 && op1
45620ed4 5218 && GET_CODE (elt->exp) == ASHIFT
76fb0b60
RS
5219 && GET_CODE (op1) == CONST_INT
5220 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5221 {
5222 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5223
5224 /* If the count fits in the inner mode's width,
5225 but exceeds the outer mode's width,
5226 the value will get truncated to 0
5227 by the subreg. */
5228 new = const0_rtx;
5229 else
5230 /* If the count exceeds even the inner mode's width,
5231 don't fold this expression. */
5232 new = 0;
5233 }
5234 else if (op0 && op1)
e5f6a288
RK
5235 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5236 op0, op1);
5237 }
5238
4b980e20
RK
5239 else if (GET_CODE (elt->exp) == SUBREG
5240 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5241 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5242 <= UNITS_PER_WORD)
906c4e36 5243 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
5244 new = copy_rtx (SUBREG_REG (elt->exp));
5245
e5f6a288
RK
5246 if (new)
5247 return new;
5248 }
5249 }
5250
7afe21cc
RK
5251 return x;
5252
5253 case NOT:
5254 case NEG:
5255 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5256 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5257 new = lookup_as_function (XEXP (x, 0), code);
5258 if (new)
5259 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5260 break;
13c9910f 5261
7afe21cc
RK
5262 case MEM:
5263 /* If we are not actually processing an insn, don't try to find the
5264 best address. Not only don't we care, but we could modify the
5265 MEM in an invalid way since we have no insn to validate against. */
5266 if (insn != 0)
5267 find_best_addr (insn, &XEXP (x, 0));
5268
5269 {
5270 /* Even if we don't fold in the insn itself,
5271 we can safely do so here, in hopes of getting a constant. */
906c4e36 5272 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 5273 rtx base = 0;
906c4e36 5274 HOST_WIDE_INT offset = 0;
7afe21cc
RK
5275
5276 if (GET_CODE (addr) == REG
5277 && REGNO_QTY_VALID_P (REGNO (addr))
30f72379
MM
5278 && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5279 && qty_const[REG_QTY (REGNO (addr))] != 0)
5280 addr = qty_const[REG_QTY (REGNO (addr))];
7afe21cc
RK
5281
5282 /* If address is constant, split it into a base and integer offset. */
5283 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5284 base = addr;
5285 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5286 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5287 {
5288 base = XEXP (XEXP (addr, 0), 0);
5289 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5290 }
5291 else if (GET_CODE (addr) == LO_SUM
5292 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5293 base = XEXP (addr, 1);
e9a25f70 5294 else if (GET_CODE (addr) == ADDRESSOF)
956d6950 5295 return change_address (x, VOIDmode, addr);
7afe21cc
RK
5296
5297 /* If this is a constant pool reference, we can fold it into its
5298 constant to allow better value tracking. */
5299 if (base && GET_CODE (base) == SYMBOL_REF
5300 && CONSTANT_POOL_ADDRESS_P (base))
5301 {
5302 rtx constant = get_pool_constant (base);
5303 enum machine_mode const_mode = get_pool_mode (base);
5304 rtx new;
5305
5306 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5307 constant_pool_entries_cost = COST (constant);
5308
5309 /* If we are loading the full constant, we have an equivalence. */
5310 if (offset == 0 && mode == const_mode)
5311 return constant;
5312
9faa82d8 5313 /* If this actually isn't a constant (weird!), we can't do
7afe21cc
RK
5314 anything. Otherwise, handle the two most common cases:
5315 extracting a word from a multi-word constant, and extracting
5316 the low-order bits. Other cases don't seem common enough to
5317 worry about. */
5318 if (! CONSTANT_P (constant))
5319 return x;
5320
5321 if (GET_MODE_CLASS (mode) == MODE_INT
5322 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5323 && offset % UNITS_PER_WORD == 0
5324 && (new = operand_subword (constant,
5325 offset / UNITS_PER_WORD,
5326 0, const_mode)) != 0)
5327 return new;
5328
5329 if (((BYTES_BIG_ENDIAN
5330 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5331 || (! BYTES_BIG_ENDIAN && offset == 0))
5332 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5333 return new;
5334 }
5335
5336 /* If this is a reference to a label at a known position in a jump
5337 table, we also know its value. */
5338 if (base && GET_CODE (base) == LABEL_REF)
5339 {
5340 rtx label = XEXP (base, 0);
5341 rtx table_insn = NEXT_INSN (label);
5342
5343 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5344 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5345 {
5346 rtx table = PATTERN (table_insn);
5347
5348 if (offset >= 0
5349 && (offset / GET_MODE_SIZE (GET_MODE (table))
5350 < XVECLEN (table, 0)))
5351 return XVECEXP (table, 0,
5352 offset / GET_MODE_SIZE (GET_MODE (table)));
5353 }
5354 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5355 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5356 {
5357 rtx table = PATTERN (table_insn);
5358
5359 if (offset >= 0
5360 && (offset / GET_MODE_SIZE (GET_MODE (table))
5361 < XVECLEN (table, 1)))
5362 {
5363 offset /= GET_MODE_SIZE (GET_MODE (table));
38a448ca
RH
5364 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5365 XEXP (table, 0));
7afe21cc
RK
5366
5367 if (GET_MODE (table) != Pmode)
38a448ca 5368 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
7afe21cc 5369
67a37737
RK
5370 /* Indicate this is a constant. This isn't a
5371 valid form of CONST, but it will only be used
5372 to fold the next insns and then discarded, so
ac7ef8d5
FS
5373 it should be safe.
5374
5375 Note this expression must be explicitly discarded,
5376 by cse_insn, else it may end up in a REG_EQUAL note
5377 and "escape" to cause problems elsewhere. */
38a448ca 5378 return gen_rtx_CONST (GET_MODE (new), new);
7afe21cc
RK
5379 }
5380 }
5381 }
5382
5383 return x;
5384 }
9255709c
RK
5385
5386 case ASM_OPERANDS:
5387 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5388 validate_change (insn, &XVECEXP (x, 3, i),
5389 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5390 break;
e9a25f70
JL
5391
5392 default:
5393 break;
7afe21cc
RK
5394 }
5395
5396 const_arg0 = 0;
5397 const_arg1 = 0;
5398 const_arg2 = 0;
5399 mode_arg0 = VOIDmode;
5400
5401 /* Try folding our operands.
5402 Then see which ones have constant values known. */
5403
5404 fmt = GET_RTX_FORMAT (code);
5405 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5406 if (fmt[i] == 'e')
5407 {
5408 rtx arg = XEXP (x, i);
5409 rtx folded_arg = arg, const_arg = 0;
5410 enum machine_mode mode_arg = GET_MODE (arg);
5411 rtx cheap_arg, expensive_arg;
5412 rtx replacements[2];
5413 int j;
5414
5415 /* Most arguments are cheap, so handle them specially. */
5416 switch (GET_CODE (arg))
5417 {
5418 case REG:
5419 /* This is the same as calling equiv_constant; it is duplicated
5420 here for speed. */
5421 if (REGNO_QTY_VALID_P (REGNO (arg))
30f72379
MM
5422 && qty_const[REG_QTY (REGNO (arg))] != 0
5423 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5424 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
7afe21cc
RK
5425 const_arg
5426 = gen_lowpart_if_possible (GET_MODE (arg),
30f72379 5427 qty_const[REG_QTY (REGNO (arg))]);
7afe21cc
RK
5428 break;
5429
5430 case CONST:
5431 case CONST_INT:
5432 case SYMBOL_REF:
5433 case LABEL_REF:
5434 case CONST_DOUBLE:
5435 const_arg = arg;
5436 break;
5437
5438#ifdef HAVE_cc0
5439 case CC0:
5440 folded_arg = prev_insn_cc0;
5441 mode_arg = prev_insn_cc0_mode;
5442 const_arg = equiv_constant (folded_arg);
5443 break;
5444#endif
5445
5446 default:
5447 folded_arg = fold_rtx (arg, insn);
5448 const_arg = equiv_constant (folded_arg);
5449 }
5450
5451 /* For the first three operands, see if the operand
5452 is constant or equivalent to a constant. */
5453 switch (i)
5454 {
5455 case 0:
5456 folded_arg0 = folded_arg;
5457 const_arg0 = const_arg;
5458 mode_arg0 = mode_arg;
5459 break;
5460 case 1:
5461 folded_arg1 = folded_arg;
5462 const_arg1 = const_arg;
5463 break;
5464 case 2:
5465 const_arg2 = const_arg;
5466 break;
5467 }
5468
5469 /* Pick the least expensive of the folded argument and an
5470 equivalent constant argument. */
5471 if (const_arg == 0 || const_arg == folded_arg
5472 || COST (const_arg) > COST (folded_arg))
5473 cheap_arg = folded_arg, expensive_arg = const_arg;
5474 else
5475 cheap_arg = const_arg, expensive_arg = folded_arg;
5476
5477 /* Try to replace the operand with the cheapest of the two
5478 possibilities. If it doesn't work and this is either of the first
5479 two operands of a commutative operation, try swapping them.
5480 If THAT fails, try the more expensive, provided it is cheaper
5481 than what is already there. */
5482
5483 if (cheap_arg == XEXP (x, i))
5484 continue;
5485
5486 if (insn == 0 && ! copied)
5487 {
5488 x = copy_rtx (x);
5489 copied = 1;
5490 }
5491
5492 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5493 for (j = 0;
5494 j < 2 && replacements[j]
5495 && COST (replacements[j]) < COST (XEXP (x, i));
5496 j++)
5497 {
5498 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5499 break;
5500
5501 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5502 {
5503 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5504 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5505
5506 if (apply_change_group ())
5507 {
5508 /* Swap them back to be invalid so that this loop can
5509 continue and flag them to be swapped back later. */
5510 rtx tem;
5511
5512 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5513 XEXP (x, 1) = tem;
5514 must_swap = 1;
5515 break;
5516 }
5517 }
5518 }
5519 }
5520
2d8b0f3a
JL
5521 else
5522 {
5523 if (fmt[i] == 'E')
5524 /* Don't try to fold inside of a vector of expressions.
5525 Doing nothing is harmless. */
5526 {;}
5527 }
7afe21cc
RK
5528
5529 /* If a commutative operation, place a constant integer as the second
5530 operand unless the first operand is also a constant integer. Otherwise,
5531 place any constant second unless the first operand is also a constant. */
5532
5533 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5534 {
5535 if (must_swap || (const_arg0
5536 && (const_arg1 == 0
5537 || (GET_CODE (const_arg0) == CONST_INT
5538 && GET_CODE (const_arg1) != CONST_INT))))
5539 {
5540 register rtx tem = XEXP (x, 0);
5541
5542 if (insn == 0 && ! copied)
5543 {
5544 x = copy_rtx (x);
5545 copied = 1;
5546 }
5547
5548 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5549 validate_change (insn, &XEXP (x, 1), tem, 1);
5550 if (apply_change_group ())
5551 {
5552 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5553 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5554 }
5555 }
5556 }
5557
5558 /* If X is an arithmetic operation, see if we can simplify it. */
5559
5560 switch (GET_RTX_CLASS (code))
5561 {
5562 case '1':
67a37737
RK
5563 {
5564 int is_const = 0;
5565
5566 /* We can't simplify extension ops unless we know the
5567 original mode. */
5568 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5569 && mode_arg0 == VOIDmode)
5570 break;
5571
5572 /* If we had a CONST, strip it off and put it back later if we
5573 fold. */
5574 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5575 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5576
5577 new = simplify_unary_operation (code, mode,
5578 const_arg0 ? const_arg0 : folded_arg0,
5579 mode_arg0);
5580 if (new != 0 && is_const)
38a448ca 5581 new = gen_rtx_CONST (mode, new);
67a37737 5582 }
7afe21cc
RK
5583 break;
5584
5585 case '<':
5586 /* See what items are actually being compared and set FOLDED_ARG[01]
5587 to those values and CODE to the actual comparison code. If any are
5588 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5589 do anything if both operands are already known to be constant. */
5590
5591 if (const_arg0 == 0 || const_arg1 == 0)
5592 {
5593 struct table_elt *p0, *p1;
c610adec 5594 rtx true = const_true_rtx, false = const0_rtx;
13c9910f 5595 enum machine_mode mode_arg1;
c610adec
RK
5596
5597#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5598 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5599 {
560c94a2
RK
5600 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5601 mode);
c610adec
RK
5602 false = CONST0_RTX (mode);
5603 }
5604#endif
7afe21cc 5605
13c9910f
RS
5606 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5607 &mode_arg0, &mode_arg1);
7afe21cc
RK
5608 const_arg0 = equiv_constant (folded_arg0);
5609 const_arg1 = equiv_constant (folded_arg1);
5610
13c9910f
RS
5611 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5612 what kinds of things are being compared, so we can't do
5613 anything with this comparison. */
7afe21cc
RK
5614
5615 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5616 break;
5617
0f41302f
MS
5618 /* If we do not now have two constants being compared, see
5619 if we can nevertheless deduce some things about the
5620 comparison. */
7afe21cc
RK
5621 if (const_arg0 == 0 || const_arg1 == 0)
5622 {
0f41302f
MS
5623 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5624 non-explicit constant? These aren't zero, but we
5625 don't know their sign. */
7afe21cc
RK
5626 if (const_arg1 == const0_rtx
5627 && (NONZERO_BASE_PLUS_P (folded_arg0)
5628#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5629 come out as 0. */
5630 || GET_CODE (folded_arg0) == SYMBOL_REF
5631#endif
5632 || GET_CODE (folded_arg0) == LABEL_REF
5633 || GET_CODE (folded_arg0) == CONST))
5634 {
5635 if (code == EQ)
c610adec 5636 return false;
7afe21cc 5637 else if (code == NE)
c610adec 5638 return true;
7afe21cc
RK
5639 }
5640
5641 /* See if the two operands are the same. We don't do this
5642 for IEEE floating-point since we can't assume x == x
5643 since x might be a NaN. */
5644
5645 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 5646 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
7afe21cc
RK
5647 && (folded_arg0 == folded_arg1
5648 || (GET_CODE (folded_arg0) == REG
5649 && GET_CODE (folded_arg1) == REG
30f72379
MM
5650 && (REG_QTY (REGNO (folded_arg0))
5651 == REG_QTY (REGNO (folded_arg1))))
7afe21cc
RK
5652 || ((p0 = lookup (folded_arg0,
5653 (safe_hash (folded_arg0, mode_arg0)
5654 % NBUCKETS), mode_arg0))
5655 && (p1 = lookup (folded_arg1,
5656 (safe_hash (folded_arg1, mode_arg0)
5657 % NBUCKETS), mode_arg0))
5658 && p0->first_same_value == p1->first_same_value)))
5659 return ((code == EQ || code == LE || code == GE
5660 || code == LEU || code == GEU)
c610adec 5661 ? true : false);
7afe21cc
RK
5662
5663 /* If FOLDED_ARG0 is a register, see if the comparison we are
5664 doing now is either the same as we did before or the reverse
5665 (we only check the reverse if not floating-point). */
5666 else if (GET_CODE (folded_arg0) == REG)
5667 {
30f72379 5668 int qty = REG_QTY (REGNO (folded_arg0));
7afe21cc
RK
5669
5670 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5671 && (comparison_dominates_p (qty_comparison_code[qty], code)
5672 || (comparison_dominates_p (qty_comparison_code[qty],
5673 reverse_condition (code))
cbf6a543 5674 && ! FLOAT_MODE_P (mode_arg0)))
7afe21cc
RK
5675 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5676 || (const_arg1
5677 && rtx_equal_p (qty_comparison_const[qty],
5678 const_arg1))
5679 || (GET_CODE (folded_arg1) == REG
30f72379 5680 && (REG_QTY (REGNO (folded_arg1))
7afe21cc
RK
5681 == qty_comparison_qty[qty]))))
5682 return (comparison_dominates_p (qty_comparison_code[qty],
5683 code)
c610adec 5684 ? true : false);
7afe21cc
RK
5685 }
5686 }
5687 }
5688
5689 /* If we are comparing against zero, see if the first operand is
5690 equivalent to an IOR with a constant. If so, we may be able to
5691 determine the result of this comparison. */
5692
5693 if (const_arg1 == const0_rtx)
5694 {
5695 rtx y = lookup_as_function (folded_arg0, IOR);
5696 rtx inner_const;
5697
5698 if (y != 0
5699 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5700 && GET_CODE (inner_const) == CONST_INT
5701 && INTVAL (inner_const) != 0)
5702 {
5703 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
5704 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5705 && (INTVAL (inner_const)
5706 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
c610adec
RK
5707 rtx true = const_true_rtx, false = const0_rtx;
5708
5709#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5710 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5711 {
560c94a2
RK
5712 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5713 mode);
c610adec
RK
5714 false = CONST0_RTX (mode);
5715 }
5716#endif
7afe21cc
RK
5717
5718 switch (code)
5719 {
5720 case EQ:
c610adec 5721 return false;
7afe21cc 5722 case NE:
c610adec 5723 return true;
7afe21cc
RK
5724 case LT: case LE:
5725 if (has_sign)
c610adec 5726 return true;
7afe21cc
RK
5727 break;
5728 case GT: case GE:
5729 if (has_sign)
c610adec 5730 return false;
7afe21cc 5731 break;
e9a25f70
JL
5732 default:
5733 break;
7afe21cc
RK
5734 }
5735 }
5736 }
5737
5738 new = simplify_relational_operation (code, mode_arg0,
5739 const_arg0 ? const_arg0 : folded_arg0,
5740 const_arg1 ? const_arg1 : folded_arg1);
c610adec
RK
5741#ifdef FLOAT_STORE_FLAG_VALUE
5742 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5743 new = ((new == const0_rtx) ? CONST0_RTX (mode)
560c94a2 5744 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
c610adec 5745#endif
7afe21cc
RK
5746 break;
5747
5748 case '2':
5749 case 'c':
5750 switch (code)
5751 {
5752 case PLUS:
5753 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5754 with that LABEL_REF as its second operand. If so, the result is
5755 the first operand of that MINUS. This handles switches with an
5756 ADDR_DIFF_VEC table. */
5757 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5758 {
e650cbda
RK
5759 rtx y
5760 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5761 : lookup_as_function (folded_arg0, MINUS);
7afe21cc
RK
5762
5763 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5764 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5765 return XEXP (y, 0);
67a37737
RK
5766
5767 /* Now try for a CONST of a MINUS like the above. */
e650cbda
RK
5768 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5769 : lookup_as_function (folded_arg0, CONST))) != 0
67a37737
RK
5770 && GET_CODE (XEXP (y, 0)) == MINUS
5771 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5772 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5773 return XEXP (XEXP (y, 0), 0);
7afe21cc 5774 }
c2cc0778 5775
e650cbda
RK
5776 /* Likewise if the operands are in the other order. */
5777 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5778 {
5779 rtx y
5780 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5781 : lookup_as_function (folded_arg1, MINUS);
5782
5783 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5784 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5785 return XEXP (y, 0);
5786
5787 /* Now try for a CONST of a MINUS like the above. */
5788 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5789 : lookup_as_function (folded_arg1, CONST))) != 0
5790 && GET_CODE (XEXP (y, 0)) == MINUS
5791 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5792 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5793 return XEXP (XEXP (y, 0), 0);
5794 }
5795
c2cc0778
RK
5796 /* If second operand is a register equivalent to a negative
5797 CONST_INT, see if we can find a register equivalent to the
5798 positive constant. Make a MINUS if so. Don't do this for
5d595063 5799 a non-negative constant since we might then alternate between
c2cc0778 5800 chosing positive and negative constants. Having the positive
5d595063
RK
5801 constant previously-used is the more common case. Be sure
5802 the resulting constant is non-negative; if const_arg1 were
5803 the smallest negative number this would overflow: depending
5804 on the mode, this would either just be the same value (and
5805 hence not save anything) or be incorrect. */
5806 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5807 && INTVAL (const_arg1) < 0
4741f6ad
JL
5808 /* This used to test
5809
5810 - INTVAL (const_arg1) >= 0
5811
5812 But The Sun V5.0 compilers mis-compiled that test. So
5813 instead we test for the problematic value in a more direct
5814 manner and hope the Sun compilers get it correct. */
5c45a8ac
KG
5815 && INTVAL (const_arg1) !=
5816 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
5d595063 5817 && GET_CODE (folded_arg1) == REG)
c2cc0778
RK
5818 {
5819 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5820 struct table_elt *p
5821 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5822 mode);
5823
5824 if (p)
5825 for (p = p->first_same_value; p; p = p->next_same_value)
5826 if (GET_CODE (p->exp) == REG)
5827 return cse_gen_binary (MINUS, mode, folded_arg0,
5828 canon_reg (p->exp, NULL_RTX));
5829 }
13c9910f
RS
5830 goto from_plus;
5831
5832 case MINUS:
5833 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5834 If so, produce (PLUS Z C2-C). */
5835 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5836 {
5837 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5838 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
5839 return fold_rtx (plus_constant (copy_rtx (y),
5840 -INTVAL (const_arg1)),
a3b5c94a 5841 NULL_RTX);
13c9910f 5842 }
7afe21cc 5843
0f41302f 5844 /* ... fall through ... */
7afe21cc 5845
13c9910f 5846 from_plus:
7afe21cc
RK
5847 case SMIN: case SMAX: case UMIN: case UMAX:
5848 case IOR: case AND: case XOR:
5849 case MULT: case DIV: case UDIV:
5850 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5851 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5852 is known to be of similar form, we may be able to replace the
5853 operation with a combined operation. This may eliminate the
5854 intermediate operation if every use is simplified in this way.
5855 Note that the similar optimization done by combine.c only works
5856 if the intermediate operation's result has only one reference. */
5857
5858 if (GET_CODE (folded_arg0) == REG
5859 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5860 {
5861 int is_shift
5862 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5863 rtx y = lookup_as_function (folded_arg0, code);
5864 rtx inner_const;
5865 enum rtx_code associate_code;
5866 rtx new_const;
5867
5868 if (y == 0
5869 || 0 == (inner_const
5870 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5871 || GET_CODE (inner_const) != CONST_INT
5872 /* If we have compiled a statement like
5873 "if (x == (x & mask1))", and now are looking at
5874 "x & mask2", we will have a case where the first operand
5875 of Y is the same as our first operand. Unless we detect
5876 this case, an infinite loop will result. */
5877 || XEXP (y, 0) == folded_arg0)
5878 break;
5879
5880 /* Don't associate these operations if they are a PLUS with the
5881 same constant and it is a power of two. These might be doable
5882 with a pre- or post-increment. Similarly for two subtracts of
5883 identical powers of two with post decrement. */
5884
5885 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
940da324
JL
5886 && ((HAVE_PRE_INCREMENT
5887 && exact_log2 (INTVAL (const_arg1)) >= 0)
5888 || (HAVE_POST_INCREMENT
5889 && exact_log2 (INTVAL (const_arg1)) >= 0)
5890 || (HAVE_PRE_DECREMENT
5891 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5892 || (HAVE_POST_DECREMENT
5893 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
7afe21cc
RK
5894 break;
5895
5896 /* Compute the code used to compose the constants. For example,
5897 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5898
5899 associate_code
5900 = (code == MULT || code == DIV || code == UDIV ? MULT
5901 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5902
5903 new_const = simplify_binary_operation (associate_code, mode,
5904 const_arg1, inner_const);
5905
5906 if (new_const == 0)
5907 break;
5908
5909 /* If we are associating shift operations, don't let this
4908e508
RS
5910 produce a shift of the size of the object or larger.
5911 This could occur when we follow a sign-extend by a right
5912 shift on a machine that does a sign-extend as a pair
5913 of shifts. */
7afe21cc
RK
5914
5915 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
5916 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5917 {
5918 /* As an exception, we can turn an ASHIFTRT of this
5919 form into a shift of the number of bits - 1. */
5920 if (code == ASHIFTRT)
5921 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5922 else
5923 break;
5924 }
7afe21cc
RK
5925
5926 y = copy_rtx (XEXP (y, 0));
5927
5928 /* If Y contains our first operand (the most common way this
5929 can happen is if Y is a MEM), we would do into an infinite
5930 loop if we tried to fold it. So don't in that case. */
5931
5932 if (! reg_mentioned_p (folded_arg0, y))
5933 y = fold_rtx (y, insn);
5934
96b0e481 5935 return cse_gen_binary (code, mode, y, new_const);
7afe21cc 5936 }
e9a25f70
JL
5937 break;
5938
5939 default:
5940 break;
7afe21cc
RK
5941 }
5942
5943 new = simplify_binary_operation (code, mode,
5944 const_arg0 ? const_arg0 : folded_arg0,
5945 const_arg1 ? const_arg1 : folded_arg1);
5946 break;
5947
5948 case 'o':
5949 /* (lo_sum (high X) X) is simply X. */
5950 if (code == LO_SUM && const_arg0 != 0
5951 && GET_CODE (const_arg0) == HIGH
5952 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5953 return const_arg1;
5954 break;
5955
5956 case '3':
5957 case 'b':
5958 new = simplify_ternary_operation (code, mode, mode_arg0,
5959 const_arg0 ? const_arg0 : folded_arg0,
5960 const_arg1 ? const_arg1 : folded_arg1,
5961 const_arg2 ? const_arg2 : XEXP (x, 2));
5962 break;
ee5332b8
RH
5963
5964 case 'x':
5965 /* Always eliminate CONSTANT_P_RTX at this stage. */
5966 if (code == CONSTANT_P_RTX)
5967 return (const_arg0 ? const1_rtx : const0_rtx);
5968 break;
7afe21cc
RK
5969 }
5970
5971 return new ? new : x;
5972}
5973\f
5974/* Return a constant value currently equivalent to X.
5975 Return 0 if we don't know one. */
5976
5977static rtx
5978equiv_constant (x)
5979 rtx x;
5980{
5981 if (GET_CODE (x) == REG
5982 && REGNO_QTY_VALID_P (REGNO (x))
30f72379
MM
5983 && qty_const[REG_QTY (REGNO (x))])
5984 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
7afe21cc 5985
2ce5e1b4 5986 if (x == 0 || CONSTANT_P (x))
7afe21cc
RK
5987 return x;
5988
fc3ffe83
RK
5989 /* If X is a MEM, try to fold it outside the context of any insn to see if
5990 it might be equivalent to a constant. That handles the case where it
5991 is a constant-pool reference. Then try to look it up in the hash table
5992 in case it is something whose value we have seen before. */
5993
5994 if (GET_CODE (x) == MEM)
5995 {
5996 struct table_elt *elt;
5997
906c4e36 5998 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
5999 if (CONSTANT_P (x))
6000 return x;
6001
6002 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
6003 if (elt == 0)
6004 return 0;
6005
6006 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
6007 if (elt->is_const && CONSTANT_P (elt->exp))
6008 return elt->exp;
6009 }
6010
7afe21cc
RK
6011 return 0;
6012}
6013\f
6014/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6015 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6016 least-significant part of X.
6017 MODE specifies how big a part of X to return.
6018
6019 If the requested operation cannot be done, 0 is returned.
6020
6021 This is similar to gen_lowpart in emit-rtl.c. */
6022
6023rtx
6024gen_lowpart_if_possible (mode, x)
6025 enum machine_mode mode;
6026 register rtx x;
6027{
6028 rtx result = gen_lowpart_common (mode, x);
6029
6030 if (result)
6031 return result;
6032 else if (GET_CODE (x) == MEM)
6033 {
6034 /* This is the only other case we handle. */
6035 register int offset = 0;
6036 rtx new;
6037
f76b9db2
ILT
6038 if (WORDS_BIG_ENDIAN)
6039 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6040 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6041 if (BYTES_BIG_ENDIAN)
6042 /* Adjust the address so that the address-after-the-data is
6043 unchanged. */
6044 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6045 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
38a448ca 6046 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
7afe21cc
RK
6047 if (! memory_address_p (mode, XEXP (new, 0)))
6048 return 0;
7afe21cc 6049 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 6050 MEM_COPY_ATTRIBUTES (new, x);
7afe21cc
RK
6051 return new;
6052 }
6053 else
6054 return 0;
6055}
6056\f
6057/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6058 branch. It will be zero if not.
6059
6060 In certain cases, this can cause us to add an equivalence. For example,
6061 if we are following the taken case of
6062 if (i == 2)
6063 we can add the fact that `i' and '2' are now equivalent.
6064
6065 In any case, we can record that this comparison was passed. If the same
6066 comparison is seen later, we will know its value. */
6067
6068static void
6069record_jump_equiv (insn, taken)
6070 rtx insn;
6071 int taken;
6072{
6073 int cond_known_true;
6074 rtx op0, op1;
13c9910f 6075 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
6076 int reversed_nonequality = 0;
6077 enum rtx_code code;
6078
6079 /* Ensure this is the right kind of insn. */
6080 if (! condjump_p (insn) || simplejump_p (insn))
6081 return;
6082
6083 /* See if this jump condition is known true or false. */
6084 if (taken)
6085 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6086 else
6087 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6088
6089 /* Get the type of comparison being done and the operands being compared.
6090 If we had to reverse a non-equality condition, record that fact so we
6091 know that it isn't valid for floating-point. */
6092 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6093 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6094 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6095
13c9910f 6096 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
6097 if (! cond_known_true)
6098 {
6099 reversed_nonequality = (code != EQ && code != NE);
6100 code = reverse_condition (code);
6101 }
6102
6103 /* The mode is the mode of the non-constant. */
13c9910f
RS
6104 mode = mode0;
6105 if (mode1 != VOIDmode)
6106 mode = mode1;
7afe21cc
RK
6107
6108 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6109}
6110
6111/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6112 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6113 Make any useful entries we can with that information. Called from
6114 above function and called recursively. */
6115
6116static void
6117record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6118 enum rtx_code code;
6119 enum machine_mode mode;
6120 rtx op0, op1;
6121 int reversed_nonequality;
6122{
2197a88a 6123 unsigned op0_hash, op1_hash;
7afe21cc
RK
6124 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6125 struct table_elt *op0_elt, *op1_elt;
6126
6127 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6128 we know that they are also equal in the smaller mode (this is also
6129 true for all smaller modes whether or not there is a SUBREG, but
ac7ef8d5 6130 is not worth testing for with no SUBREG). */
7afe21cc 6131
2e794ee8 6132 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 6133 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
6134 && (GET_MODE_SIZE (GET_MODE (op0))
6135 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6136 {
6137 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6138 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6139
6140 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6141 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6142 reversed_nonequality);
6143 }
6144
6145 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
6146 && (GET_MODE_SIZE (GET_MODE (op1))
6147 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6148 {
6149 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6150 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6151
6152 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6153 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6154 reversed_nonequality);
6155 }
6156
6157 /* Similarly, if this is an NE comparison, and either is a SUBREG
6158 making a smaller mode, we know the whole thing is also NE. */
6159
2e794ee8
RS
6160 /* Note that GET_MODE (op0) may not equal MODE;
6161 if we test MODE instead, we can get an infinite recursion
6162 alternating between two modes each wider than MODE. */
6163
7afe21cc
RK
6164 if (code == NE && GET_CODE (op0) == SUBREG
6165 && subreg_lowpart_p (op0)
2e794ee8
RS
6166 && (GET_MODE_SIZE (GET_MODE (op0))
6167 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
6168 {
6169 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6170 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6171
6172 record_jump_cond (code, mode, SUBREG_REG (op0),
38a448ca 6173 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
7afe21cc
RK
6174 reversed_nonequality);
6175 }
6176
6177 if (code == NE && GET_CODE (op1) == SUBREG
6178 && subreg_lowpart_p (op1)
2e794ee8
RS
6179 && (GET_MODE_SIZE (GET_MODE (op1))
6180 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
6181 {
6182 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6183 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6184
6185 record_jump_cond (code, mode, SUBREG_REG (op1),
38a448ca 6186 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
7afe21cc
RK
6187 reversed_nonequality);
6188 }
6189
6190 /* Hash both operands. */
6191
6192 do_not_record = 0;
6193 hash_arg_in_memory = 0;
6194 hash_arg_in_struct = 0;
2197a88a 6195 op0_hash = HASH (op0, mode);
7afe21cc
RK
6196 op0_in_memory = hash_arg_in_memory;
6197 op0_in_struct = hash_arg_in_struct;
6198
6199 if (do_not_record)
6200 return;
6201
6202 do_not_record = 0;
6203 hash_arg_in_memory = 0;
6204 hash_arg_in_struct = 0;
2197a88a 6205 op1_hash = HASH (op1, mode);
7afe21cc
RK
6206 op1_in_memory = hash_arg_in_memory;
6207 op1_in_struct = hash_arg_in_struct;
6208
6209 if (do_not_record)
6210 return;
6211
6212 /* Look up both operands. */
2197a88a
RK
6213 op0_elt = lookup (op0, op0_hash, mode);
6214 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc 6215
af3869c1
RK
6216 /* If both operands are already equivalent or if they are not in the
6217 table but are identical, do nothing. */
6218 if ((op0_elt != 0 && op1_elt != 0
6219 && op0_elt->first_same_value == op1_elt->first_same_value)
6220 || op0 == op1 || rtx_equal_p (op0, op1))
6221 return;
6222
7afe21cc 6223 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
6224 comparison. Similarly if this is floating-point. In the latter
6225 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6226 If we record the equality, we might inadvertently delete code
6227 whose intent was to change -0 to +0. */
6228
cbf6a543 6229 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc
RK
6230 {
6231 /* If we reversed a floating-point comparison, if OP0 is not a
6232 register, or if OP1 is neither a register or constant, we can't
6233 do anything. */
6234
6235 if (GET_CODE (op1) != REG)
6236 op1 = equiv_constant (op1);
6237
cbf6a543 6238 if ((reversed_nonequality && FLOAT_MODE_P (mode))
7afe21cc
RK
6239 || GET_CODE (op0) != REG || op1 == 0)
6240 return;
6241
6242 /* Put OP0 in the hash table if it isn't already. This gives it a
6243 new quantity number. */
6244 if (op0_elt == 0)
6245 {
906c4e36 6246 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6247 {
6248 rehash_using_reg (op0);
2197a88a 6249 op0_hash = HASH (op0, mode);
2bb81c86
RK
6250
6251 /* If OP0 is contained in OP1, this changes its hash code
6252 as well. Faster to rehash than to check, except
6253 for the simple case of a constant. */
6254 if (! CONSTANT_P (op1))
2197a88a 6255 op1_hash = HASH (op1,mode);
7afe21cc
RK
6256 }
6257
2197a88a 6258 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6259 op0_elt->in_memory = op0_in_memory;
6260 op0_elt->in_struct = op0_in_struct;
6261 }
6262
30f72379 6263 qty_comparison_code[REG_QTY (REGNO (op0))] = code;
7afe21cc
RK
6264 if (GET_CODE (op1) == REG)
6265 {
5d5ea909 6266 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 6267 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 6268
7afe21cc
RK
6269 /* Put OP1 in the hash table so it gets a new quantity number. */
6270 if (op1_elt == 0)
6271 {
906c4e36 6272 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6273 {
6274 rehash_using_reg (op1);
2197a88a 6275 op1_hash = HASH (op1, mode);
7afe21cc
RK
6276 }
6277
2197a88a 6278 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6279 op1_elt->in_memory = op1_in_memory;
6280 op1_elt->in_struct = op1_in_struct;
6281 }
6282
30f72379
MM
6283 qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6284 qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
7afe21cc
RK
6285 }
6286 else
6287 {
30f72379
MM
6288 qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6289 qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
7afe21cc
RK
6290 }
6291
6292 return;
6293 }
6294
eb5ad42a
RS
6295 /* If either side is still missing an equivalence, make it now,
6296 then merge the equivalences. */
7afe21cc 6297
7afe21cc
RK
6298 if (op0_elt == 0)
6299 {
eb5ad42a 6300 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
6301 {
6302 rehash_using_reg (op0);
2197a88a 6303 op0_hash = HASH (op0, mode);
7afe21cc
RK
6304 }
6305
2197a88a 6306 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
6307 op0_elt->in_memory = op0_in_memory;
6308 op0_elt->in_struct = op0_in_struct;
7afe21cc
RK
6309 }
6310
6311 if (op1_elt == 0)
6312 {
eb5ad42a 6313 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
6314 {
6315 rehash_using_reg (op1);
2197a88a 6316 op1_hash = HASH (op1, mode);
7afe21cc
RK
6317 }
6318
2197a88a 6319 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
6320 op1_elt->in_memory = op1_in_memory;
6321 op1_elt->in_struct = op1_in_struct;
7afe21cc 6322 }
eb5ad42a
RS
6323
6324 merge_equiv_classes (op0_elt, op1_elt);
6325 last_jump_equiv_class = op0_elt;
7afe21cc
RK
6326}
6327\f
6328/* CSE processing for one instruction.
6329 First simplify sources and addresses of all assignments
6330 in the instruction, using previously-computed equivalents values.
6331 Then install the new sources and destinations in the table
6332 of available values.
6333
1ed0205e
VM
6334 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6335 the insn. It means that INSN is inside libcall block. In this
6336 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
7afe21cc
RK
6337
6338/* Data on one SET contained in the instruction. */
6339
6340struct set
6341{
6342 /* The SET rtx itself. */
6343 rtx rtl;
6344 /* The SET_SRC of the rtx (the original value, if it is changing). */
6345 rtx src;
6346 /* The hash-table element for the SET_SRC of the SET. */
6347 struct table_elt *src_elt;
2197a88a
RK
6348 /* Hash value for the SET_SRC. */
6349 unsigned src_hash;
6350 /* Hash value for the SET_DEST. */
6351 unsigned dest_hash;
7afe21cc
RK
6352 /* The SET_DEST, with SUBREG, etc., stripped. */
6353 rtx inner_dest;
6354 /* Place where the pointer to the INNER_DEST was found. */
6355 rtx *inner_dest_loc;
6356 /* Nonzero if the SET_SRC is in memory. */
6357 char src_in_memory;
6358 /* Nonzero if the SET_SRC is in a structure. */
6359 char src_in_struct;
6360 /* Nonzero if the SET_SRC contains something
6361 whose value cannot be predicted and understood. */
6362 char src_volatile;
6363 /* Original machine mode, in case it becomes a CONST_INT. */
6364 enum machine_mode mode;
6365 /* A constant equivalent for SET_SRC, if any. */
6366 rtx src_const;
2197a88a
RK
6367 /* Hash value of constant equivalent for SET_SRC. */
6368 unsigned src_const_hash;
7afe21cc
RK
6369 /* Table entry for constant equivalent for SET_SRC, if any. */
6370 struct table_elt *src_const_elt;
6371};
6372
6373static void
7bd8b2a8 6374cse_insn (insn, libcall_insn)
7afe21cc 6375 rtx insn;
7bd8b2a8 6376 rtx libcall_insn;
7afe21cc
RK
6377{
6378 register rtx x = PATTERN (insn);
7afe21cc 6379 register int i;
92f9aa51 6380 rtx tem;
7afe21cc
RK
6381 register int n_sets = 0;
6382
2d8b0f3a 6383#ifdef HAVE_cc0
7afe21cc
RK
6384 /* Records what this insn does to set CC0. */
6385 rtx this_insn_cc0 = 0;
135d84b8 6386 enum machine_mode this_insn_cc0_mode = VOIDmode;
2d8b0f3a 6387#endif
7afe21cc
RK
6388
6389 rtx src_eqv = 0;
6390 struct table_elt *src_eqv_elt = 0;
6a651371
KG
6391 int src_eqv_volatile = 0;
6392 int src_eqv_in_memory = 0;
6393 int src_eqv_in_struct = 0;
6394 unsigned src_eqv_hash = 0;
7afe21cc 6395
6a651371 6396 struct set *sets = NULL_PTR;
7afe21cc
RK
6397
6398 this_insn = insn;
7afe21cc
RK
6399
6400 /* Find all the SETs and CLOBBERs in this instruction.
6401 Record all the SETs in the array `set' and count them.
6402 Also determine whether there is a CLOBBER that invalidates
6403 all memory references, or all references at varying addresses. */
6404
f1e7c95f
RK
6405 if (GET_CODE (insn) == CALL_INSN)
6406 {
6407 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6408 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
bb4034b3 6409 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
f1e7c95f
RK
6410 }
6411
7afe21cc
RK
6412 if (GET_CODE (x) == SET)
6413 {
6414 sets = (struct set *) alloca (sizeof (struct set));
6415 sets[0].rtl = x;
6416
6417 /* Ignore SETs that are unconditional jumps.
6418 They never need cse processing, so this does not hurt.
6419 The reason is not efficiency but rather
6420 so that we can test at the end for instructions
6421 that have been simplified to unconditional jumps
6422 and not be misled by unchanged instructions
6423 that were unconditional jumps to begin with. */
6424 if (SET_DEST (x) == pc_rtx
6425 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6426 ;
6427
6428 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6429 The hard function value register is used only once, to copy to
6430 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6431 Ensure we invalidate the destination register. On the 80386 no
7722328e 6432 other code would invalidate it since it is a fixed_reg.
0f41302f 6433 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
6434
6435 else if (GET_CODE (SET_SRC (x)) == CALL)
6436 {
6437 canon_reg (SET_SRC (x), insn);
77fa0940 6438 apply_change_group ();
7afe21cc 6439 fold_rtx (SET_SRC (x), insn);
bb4034b3 6440 invalidate (SET_DEST (x), VOIDmode);
7afe21cc
RK
6441 }
6442 else
6443 n_sets = 1;
6444 }
6445 else if (GET_CODE (x) == PARALLEL)
6446 {
6447 register int lim = XVECLEN (x, 0);
6448
6449 sets = (struct set *) alloca (lim * sizeof (struct set));
6450
6451 /* Find all regs explicitly clobbered in this insn,
6452 and ensure they are not replaced with any other regs
6453 elsewhere in this insn.
6454 When a reg that is clobbered is also used for input,
6455 we should presume that that is for a reason,
6456 and we should not substitute some other register
6457 which is not supposed to be clobbered.
6458 Therefore, this loop cannot be merged into the one below
830a38ee 6459 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
6460 value clobbered. We must not let a canonicalization do
6461 anything in that case. */
6462 for (i = 0; i < lim; i++)
6463 {
6464 register rtx y = XVECEXP (x, 0, i);
2708da92
RS
6465 if (GET_CODE (y) == CLOBBER)
6466 {
6467 rtx clobbered = XEXP (y, 0);
6468
6469 if (GET_CODE (clobbered) == REG
6470 || GET_CODE (clobbered) == SUBREG)
bb4034b3 6471 invalidate (clobbered, VOIDmode);
2708da92
RS
6472 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6473 || GET_CODE (clobbered) == ZERO_EXTRACT)
bb4034b3 6474 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
2708da92 6475 }
7afe21cc
RK
6476 }
6477
6478 for (i = 0; i < lim; i++)
6479 {
6480 register rtx y = XVECEXP (x, 0, i);
6481 if (GET_CODE (y) == SET)
6482 {
7722328e
RK
6483 /* As above, we ignore unconditional jumps and call-insns and
6484 ignore the result of apply_change_group. */
7afe21cc
RK
6485 if (GET_CODE (SET_SRC (y)) == CALL)
6486 {
6487 canon_reg (SET_SRC (y), insn);
77fa0940 6488 apply_change_group ();
7afe21cc 6489 fold_rtx (SET_SRC (y), insn);
bb4034b3 6490 invalidate (SET_DEST (y), VOIDmode);
7afe21cc
RK
6491 }
6492 else if (SET_DEST (y) == pc_rtx
6493 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6494 ;
6495 else
6496 sets[n_sets++].rtl = y;
6497 }
6498 else if (GET_CODE (y) == CLOBBER)
6499 {
9ae8ffe7 6500 /* If we clobber memory, canon the address.
7afe21cc
RK
6501 This does nothing when a register is clobbered
6502 because we have already invalidated the reg. */
6503 if (GET_CODE (XEXP (y, 0)) == MEM)
9ae8ffe7 6504 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
6505 }
6506 else if (GET_CODE (y) == USE
6507 && ! (GET_CODE (XEXP (y, 0)) == REG
6508 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6509 canon_reg (y, NULL_RTX);
7afe21cc
RK
6510 else if (GET_CODE (y) == CALL)
6511 {
7722328e
RK
6512 /* The result of apply_change_group can be ignored; see
6513 canon_reg. */
7afe21cc 6514 canon_reg (y, insn);
77fa0940 6515 apply_change_group ();
7afe21cc
RK
6516 fold_rtx (y, insn);
6517 }
6518 }
6519 }
6520 else if (GET_CODE (x) == CLOBBER)
6521 {
6522 if (GET_CODE (XEXP (x, 0)) == MEM)
9ae8ffe7 6523 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6524 }
6525
6526 /* Canonicalize a USE of a pseudo register or memory location. */
6527 else if (GET_CODE (x) == USE
6528 && ! (GET_CODE (XEXP (x, 0)) == REG
6529 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6530 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6531 else if (GET_CODE (x) == CALL)
6532 {
7722328e 6533 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 6534 canon_reg (x, insn);
77fa0940 6535 apply_change_group ();
7afe21cc
RK
6536 fold_rtx (x, insn);
6537 }
6538
7b3ab05e
JW
6539 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6540 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6541 is handled specially for this case, and if it isn't set, then there will
9faa82d8 6542 be no equivalence for the destination. */
92f9aa51
RK
6543 if (n_sets == 1 && REG_NOTES (insn) != 0
6544 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
6545 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6546 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
92f9aa51 6547 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
7afe21cc
RK
6548
6549 /* Canonicalize sources and addresses of destinations.
6550 We do this in a separate pass to avoid problems when a MATCH_DUP is
6551 present in the insn pattern. In that case, we want to ensure that
6552 we don't break the duplicate nature of the pattern. So we will replace
6553 both operands at the same time. Otherwise, we would fail to find an
6554 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
6555
6556 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 6557 but we don't do this any more. */
7afe21cc
RK
6558
6559 for (i = 0; i < n_sets; i++)
6560 {
6561 rtx dest = SET_DEST (sets[i].rtl);
6562 rtx src = SET_SRC (sets[i].rtl);
6563 rtx new = canon_reg (src, insn);
58873255 6564 int insn_code;
7afe21cc 6565
77fa0940
RK
6566 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6567 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6568 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
58873255
RK
6569 || (insn_code = recog_memoized (insn)) < 0
6570 || insn_n_dups[insn_code] > 0)
77fa0940 6571 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
6572 else
6573 SET_SRC (sets[i].rtl) = new;
6574
6575 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6576 {
6577 validate_change (insn, &XEXP (dest, 1),
77fa0940 6578 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 6579 validate_change (insn, &XEXP (dest, 2),
77fa0940 6580 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
6581 }
6582
6583 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6584 || GET_CODE (dest) == ZERO_EXTRACT
6585 || GET_CODE (dest) == SIGN_EXTRACT)
6586 dest = XEXP (dest, 0);
6587
6588 if (GET_CODE (dest) == MEM)
6589 canon_reg (dest, insn);
6590 }
6591
77fa0940
RK
6592 /* Now that we have done all the replacements, we can apply the change
6593 group and see if they all work. Note that this will cause some
6594 canonicalizations that would have worked individually not to be applied
6595 because some other canonicalization didn't work, but this should not
7722328e
RK
6596 occur often.
6597
6598 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
6599
6600 apply_change_group ();
6601
7afe21cc
RK
6602 /* Set sets[i].src_elt to the class each source belongs to.
6603 Detect assignments from or to volatile things
6604 and set set[i] to zero so they will be ignored
6605 in the rest of this function.
6606
6607 Nothing in this loop changes the hash table or the register chains. */
6608
6609 for (i = 0; i < n_sets; i++)
6610 {
6611 register rtx src, dest;
6612 register rtx src_folded;
6613 register struct table_elt *elt = 0, *p;
6614 enum machine_mode mode;
6615 rtx src_eqv_here;
6616 rtx src_const = 0;
6617 rtx src_related = 0;
6618 struct table_elt *src_const_elt = 0;
6619 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6620 int src_related_cost = 10000, src_elt_cost = 10000;
6621 /* Set non-zero if we need to call force_const_mem on with the
6622 contents of src_folded before using it. */
6623 int src_folded_force_flag = 0;
6624
6625 dest = SET_DEST (sets[i].rtl);
6626 src = SET_SRC (sets[i].rtl);
6627
6628 /* If SRC is a constant that has no machine mode,
6629 hash it with the destination's machine mode.
6630 This way we can keep different modes separate. */
6631
6632 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6633 sets[i].mode = mode;
6634
6635 if (src_eqv)
6636 {
6637 enum machine_mode eqvmode = mode;
6638 if (GET_CODE (dest) == STRICT_LOW_PART)
6639 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6640 do_not_record = 0;
6641 hash_arg_in_memory = 0;
6642 hash_arg_in_struct = 0;
6643 src_eqv = fold_rtx (src_eqv, insn);
2197a88a 6644 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
6645
6646 /* Find the equivalence class for the equivalent expression. */
6647
6648 if (!do_not_record)
2197a88a 6649 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
6650
6651 src_eqv_volatile = do_not_record;
6652 src_eqv_in_memory = hash_arg_in_memory;
6653 src_eqv_in_struct = hash_arg_in_struct;
6654 }
6655
6656 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6657 value of the INNER register, not the destination. So it is not
3826a3da 6658 a valid substitution for the source. But save it for later. */
7afe21cc
RK
6659 if (GET_CODE (dest) == STRICT_LOW_PART)
6660 src_eqv_here = 0;
6661 else
6662 src_eqv_here = src_eqv;
6663
6664 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6665 simplified result, which may not necessarily be valid. */
6666 src_folded = fold_rtx (src, insn);
6667
e6a125a0
RK
6668#if 0
6669 /* ??? This caused bad code to be generated for the m68k port with -O2.
6670 Suppose src is (CONST_INT -1), and that after truncation src_folded
6671 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6672 At the end we will add src and src_const to the same equivalence
6673 class. We now have 3 and -1 on the same equivalence class. This
6674 causes later instructions to be mis-optimized. */
7afe21cc
RK
6675 /* If storing a constant in a bitfield, pre-truncate the constant
6676 so we will be able to record it later. */
6677 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6678 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6679 {
6680 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6681
6682 if (GET_CODE (src) == CONST_INT
6683 && GET_CODE (width) == CONST_INT
906c4e36
RK
6684 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6685 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6686 src_folded
6687 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6688 << INTVAL (width)) - 1));
7afe21cc 6689 }
e6a125a0 6690#endif
7afe21cc
RK
6691
6692 /* Compute SRC's hash code, and also notice if it
6693 should not be recorded at all. In that case,
6694 prevent any further processing of this assignment. */
6695 do_not_record = 0;
6696 hash_arg_in_memory = 0;
6697 hash_arg_in_struct = 0;
6698
6699 sets[i].src = src;
2197a88a 6700 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
6701 sets[i].src_volatile = do_not_record;
6702 sets[i].src_in_memory = hash_arg_in_memory;
6703 sets[i].src_in_struct = hash_arg_in_struct;
6704
50196afa
RK
6705 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6706 a pseudo that is set more than once, do not record SRC. Using
6707 SRC as a replacement for anything else will be incorrect in that
6708 situation. Note that this usually occurs only for stack slots,
956d6950 6709 in which case all the RTL would be referring to SRC, so we don't
50196afa
RK
6710 lose any optimization opportunities by not having SRC in the
6711 hash table. */
6712
6713 if (GET_CODE (src) == MEM
6714 && find_reg_note (insn, REG_EQUIV, src) != 0
6715 && GET_CODE (dest) == REG
6716 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
b1f21e0a 6717 && REG_N_SETS (REGNO (dest)) != 1)
50196afa
RK
6718 sets[i].src_volatile = 1;
6719
0dadecf6
RK
6720#if 0
6721 /* It is no longer clear why we used to do this, but it doesn't
6722 appear to still be needed. So let's try without it since this
6723 code hurts cse'ing widened ops. */
7afe21cc
RK
6724 /* If source is a perverse subreg (such as QI treated as an SI),
6725 treat it as volatile. It may do the work of an SI in one context
6726 where the extra bits are not being used, but cannot replace an SI
6727 in general. */
6728 if (GET_CODE (src) == SUBREG
6729 && (GET_MODE_SIZE (GET_MODE (src))
6730 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6731 sets[i].src_volatile = 1;
0dadecf6 6732#endif
7afe21cc
RK
6733
6734 /* Locate all possible equivalent forms for SRC. Try to replace
6735 SRC in the insn with each cheaper equivalent.
6736
6737 We have the following types of equivalents: SRC itself, a folded
6738 version, a value given in a REG_EQUAL note, or a value related
6739 to a constant.
6740
6741 Each of these equivalents may be part of an additional class
6742 of equivalents (if more than one is in the table, they must be in
6743 the same class; we check for this).
6744
6745 If the source is volatile, we don't do any table lookups.
6746
6747 We note any constant equivalent for possible later use in a
6748 REG_NOTE. */
6749
6750 if (!sets[i].src_volatile)
2197a88a 6751 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
6752
6753 sets[i].src_elt = elt;
6754
6755 if (elt && src_eqv_here && src_eqv_elt)
6756 {
6757 if (elt->first_same_value != src_eqv_elt->first_same_value)
6758 {
6759 /* The REG_EQUAL is indicating that two formerly distinct
6760 classes are now equivalent. So merge them. */
6761 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
6762 src_eqv_hash = HASH (src_eqv, elt->mode);
6763 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
6764 }
6765
6766 src_eqv_here = 0;
6767 }
6768
6769 else if (src_eqv_elt)
6770 elt = src_eqv_elt;
6771
6772 /* Try to find a constant somewhere and record it in `src_const'.
6773 Record its table element, if any, in `src_const_elt'. Look in
6774 any known equivalences first. (If the constant is not in the
2197a88a 6775 table, also set `sets[i].src_const_hash'). */
7afe21cc
RK
6776 if (elt)
6777 for (p = elt->first_same_value; p; p = p->next_same_value)
6778 if (p->is_const)
6779 {
6780 src_const = p->exp;
6781 src_const_elt = elt;
6782 break;
6783 }
6784
6785 if (src_const == 0
6786 && (CONSTANT_P (src_folded)
6787 /* Consider (minus (label_ref L1) (label_ref L2)) as
6788 "constant" here so we will record it. This allows us
6789 to fold switch statements when an ADDR_DIFF_VEC is used. */
6790 || (GET_CODE (src_folded) == MINUS
6791 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6792 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6793 src_const = src_folded, src_const_elt = elt;
6794 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6795 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6796
6797 /* If we don't know if the constant is in the table, get its
6798 hash code and look it up. */
6799 if (src_const && src_const_elt == 0)
6800 {
2197a88a
RK
6801 sets[i].src_const_hash = HASH (src_const, mode);
6802 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
6803 }
6804
6805 sets[i].src_const = src_const;
6806 sets[i].src_const_elt = src_const_elt;
6807
6808 /* If the constant and our source are both in the table, mark them as
6809 equivalent. Otherwise, if a constant is in the table but the source
6810 isn't, set ELT to it. */
6811 if (src_const_elt && elt
6812 && src_const_elt->first_same_value != elt->first_same_value)
6813 merge_equiv_classes (elt, src_const_elt);
6814 else if (src_const_elt && elt == 0)
6815 elt = src_const_elt;
6816
6817 /* See if there is a register linearly related to a constant
6818 equivalent of SRC. */
6819 if (src_const
6820 && (GET_CODE (src_const) == CONST
6821 || (src_const_elt && src_const_elt->related_value != 0)))
6822 {
6823 src_related = use_related_value (src_const, src_const_elt);
6824 if (src_related)
6825 {
6826 struct table_elt *src_related_elt
6827 = lookup (src_related, HASH (src_related, mode), mode);
6828 if (src_related_elt && elt)
6829 {
6830 if (elt->first_same_value
6831 != src_related_elt->first_same_value)
6832 /* This can occur when we previously saw a CONST
6833 involving a SYMBOL_REF and then see the SYMBOL_REF
6834 twice. Merge the involved classes. */
6835 merge_equiv_classes (elt, src_related_elt);
6836
6837 src_related = 0;
6838 src_related_elt = 0;
6839 }
6840 else if (src_related_elt && elt == 0)
6841 elt = src_related_elt;
6842 }
6843 }
6844
e4600702
RK
6845 /* See if we have a CONST_INT that is already in a register in a
6846 wider mode. */
6847
6848 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6849 && GET_MODE_CLASS (mode) == MODE_INT
6850 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6851 {
6852 enum machine_mode wider_mode;
6853
6854 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6855 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6856 && src_related == 0;
6857 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6858 {
6859 struct table_elt *const_elt
6860 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6861
6862 if (const_elt == 0)
6863 continue;
6864
6865 for (const_elt = const_elt->first_same_value;
6866 const_elt; const_elt = const_elt->next_same_value)
6867 if (GET_CODE (const_elt->exp) == REG)
6868 {
6869 src_related = gen_lowpart_if_possible (mode,
6870 const_elt->exp);
6871 break;
6872 }
6873 }
6874 }
6875
d45cf215
RS
6876 /* Another possibility is that we have an AND with a constant in
6877 a mode narrower than a word. If so, it might have been generated
6878 as part of an "if" which would narrow the AND. If we already
6879 have done the AND in a wider mode, we can use a SUBREG of that
6880 value. */
6881
6882 if (flag_expensive_optimizations && ! src_related
6883 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6884 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6885 {
6886 enum machine_mode tmode;
38a448ca 6887 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
6888
6889 for (tmode = GET_MODE_WIDER_MODE (mode);
6890 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6891 tmode = GET_MODE_WIDER_MODE (tmode))
6892 {
6893 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6894 struct table_elt *larger_elt;
6895
6896 if (inner)
6897 {
6898 PUT_MODE (new_and, tmode);
6899 XEXP (new_and, 0) = inner;
6900 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6901 if (larger_elt == 0)
6902 continue;
6903
6904 for (larger_elt = larger_elt->first_same_value;
6905 larger_elt; larger_elt = larger_elt->next_same_value)
6906 if (GET_CODE (larger_elt->exp) == REG)
6907 {
6908 src_related
6909 = gen_lowpart_if_possible (mode, larger_elt->exp);
6910 break;
6911 }
6912
6913 if (src_related)
6914 break;
6915 }
6916 }
6917 }
7bac1be0
RK
6918
6919#ifdef LOAD_EXTEND_OP
6920 /* See if a MEM has already been loaded with a widening operation;
6921 if it has, we can use a subreg of that. Many CISC machines
6922 also have such operations, but this is only likely to be
6923 beneficial these machines. */
6924
6925 if (flag_expensive_optimizations && src_related == 0
6926 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6927 && GET_MODE_CLASS (mode) == MODE_INT
6928 && GET_CODE (src) == MEM && ! do_not_record
6929 && LOAD_EXTEND_OP (mode) != NIL)
6930 {
6931 enum machine_mode tmode;
6932
6933 /* Set what we are trying to extend and the operation it might
6934 have been extended with. */
6935 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6936 XEXP (memory_extend_rtx, 0) = src;
6937
6938 for (tmode = GET_MODE_WIDER_MODE (mode);
6939 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6940 tmode = GET_MODE_WIDER_MODE (tmode))
6941 {
6942 struct table_elt *larger_elt;
6943
6944 PUT_MODE (memory_extend_rtx, tmode);
6945 larger_elt = lookup (memory_extend_rtx,
6946 HASH (memory_extend_rtx, tmode), tmode);
6947 if (larger_elt == 0)
6948 continue;
6949
6950 for (larger_elt = larger_elt->first_same_value;
6951 larger_elt; larger_elt = larger_elt->next_same_value)
6952 if (GET_CODE (larger_elt->exp) == REG)
6953 {
6954 src_related = gen_lowpart_if_possible (mode,
6955 larger_elt->exp);
6956 break;
6957 }
6958
6959 if (src_related)
6960 break;
6961 }
6962 }
6963#endif /* LOAD_EXTEND_OP */
6964
7afe21cc
RK
6965 if (src == src_folded)
6966 src_folded = 0;
6967
6968 /* At this point, ELT, if non-zero, points to a class of expressions
6969 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6970 and SRC_RELATED, if non-zero, each contain additional equivalent
6971 expressions. Prune these latter expressions by deleting expressions
6972 already in the equivalence class.
6973
6974 Check for an equivalent identical to the destination. If found,
6975 this is the preferred equivalent since it will likely lead to
6976 elimination of the insn. Indicate this by placing it in
6977 `src_related'. */
6978
6979 if (elt) elt = elt->first_same_value;
6980 for (p = elt; p; p = p->next_same_value)
6981 {
6982 enum rtx_code code = GET_CODE (p->exp);
6983
6984 /* If the expression is not valid, ignore it. Then we do not
6985 have to check for validity below. In most cases, we can use
6986 `rtx_equal_p', since canonicalization has already been done. */
6987 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6988 continue;
6989
5a03c8c4
RK
6990 /* Also skip paradoxical subregs, unless that's what we're
6991 looking for. */
6992 if (code == SUBREG
6993 && (GET_MODE_SIZE (GET_MODE (p->exp))
6994 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6995 && ! (src != 0
6996 && GET_CODE (src) == SUBREG
6997 && GET_MODE (src) == GET_MODE (p->exp)
6998 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6999 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
7000 continue;
7001
7afe21cc
RK
7002 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7003 src = 0;
7004 else if (src_folded && GET_CODE (src_folded) == code
7005 && rtx_equal_p (src_folded, p->exp))
7006 src_folded = 0;
7007 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7008 && rtx_equal_p (src_eqv_here, p->exp))
7009 src_eqv_here = 0;
7010 else if (src_related && GET_CODE (src_related) == code
7011 && rtx_equal_p (src_related, p->exp))
7012 src_related = 0;
7013
7014 /* This is the same as the destination of the insns, we want
7015 to prefer it. Copy it to src_related. The code below will
7016 then give it a negative cost. */
7017 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7018 src_related = dest;
7019
7020 }
7021
7022 /* Find the cheapest valid equivalent, trying all the available
7023 possibilities. Prefer items not in the hash table to ones
7024 that are when they are equal cost. Note that we can never
7025 worsen an insn as the current contents will also succeed.
05c33dd8 7026 If we find an equivalent identical to the destination, use it as best,
0f41302f 7027 since this insn will probably be eliminated in that case. */
7afe21cc
RK
7028 if (src)
7029 {
7030 if (rtx_equal_p (src, dest))
7031 src_cost = -1;
7032 else
7033 src_cost = COST (src);
7034 }
7035
7036 if (src_eqv_here)
7037 {
7038 if (rtx_equal_p (src_eqv_here, dest))
7039 src_eqv_cost = -1;
7040 else
7041 src_eqv_cost = COST (src_eqv_here);
7042 }
7043
7044 if (src_folded)
7045 {
7046 if (rtx_equal_p (src_folded, dest))
7047 src_folded_cost = -1;
7048 else
7049 src_folded_cost = COST (src_folded);
7050 }
7051
7052 if (src_related)
7053 {
7054 if (rtx_equal_p (src_related, dest))
7055 src_related_cost = -1;
7056 else
7057 src_related_cost = COST (src_related);
7058 }
7059
7060 /* If this was an indirect jump insn, a known label will really be
7061 cheaper even though it looks more expensive. */
7062 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7063 src_folded = src_const, src_folded_cost = -1;
7064
7065 /* Terminate loop when replacement made. This must terminate since
7066 the current contents will be tested and will always be valid. */
7067 while (1)
7068 {
7bd8b2a8 7069 rtx trial, old_src;
7afe21cc
RK
7070
7071 /* Skip invalid entries. */
7072 while (elt && GET_CODE (elt->exp) != REG
7073 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7074 elt = elt->next_same_value;
5a03c8c4
RK
7075
7076 /* A paradoxical subreg would be bad here: it'll be the right
7077 size, but later may be adjusted so that the upper bits aren't
7078 what we want. So reject it. */
7079 if (elt != 0
7080 && GET_CODE (elt->exp) == SUBREG
7081 && (GET_MODE_SIZE (GET_MODE (elt->exp))
7082 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7083 /* It is okay, though, if the rtx we're trying to match
7084 will ignore any of the bits we can't predict. */
7085 && ! (src != 0
7086 && GET_CODE (src) == SUBREG
7087 && GET_MODE (src) == GET_MODE (elt->exp)
7088 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7089 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7090 {
7091 elt = elt->next_same_value;
7092 continue;
7093 }
7afe21cc
RK
7094
7095 if (elt) src_elt_cost = elt->cost;
7096
7097 /* Find cheapest and skip it for the next time. For items
7098 of equal cost, use this order:
7099 src_folded, src, src_eqv, src_related and hash table entry. */
7100 if (src_folded_cost <= src_cost
7101 && src_folded_cost <= src_eqv_cost
7102 && src_folded_cost <= src_related_cost
7103 && src_folded_cost <= src_elt_cost)
7104 {
7105 trial = src_folded, src_folded_cost = 10000;
7106 if (src_folded_force_flag)
7107 trial = force_const_mem (mode, trial);
7108 }
7109 else if (src_cost <= src_eqv_cost
7110 && src_cost <= src_related_cost
7111 && src_cost <= src_elt_cost)
7112 trial = src, src_cost = 10000;
7113 else if (src_eqv_cost <= src_related_cost
7114 && src_eqv_cost <= src_elt_cost)
0af62b41 7115 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7afe21cc 7116 else if (src_related_cost <= src_elt_cost)
0af62b41 7117 trial = copy_rtx (src_related), src_related_cost = 10000;
7afe21cc
RK
7118 else
7119 {
05c33dd8 7120 trial = copy_rtx (elt->exp);
7afe21cc
RK
7121 elt = elt->next_same_value;
7122 src_elt_cost = 10000;
7123 }
7124
7125 /* We don't normally have an insn matching (set (pc) (pc)), so
7126 check for this separately here. We will delete such an
7127 insn below.
7128
7129 Tablejump insns contain a USE of the table, so simply replacing
7130 the operand with the constant won't match. This is simply an
7131 unconditional branch, however, and is therefore valid. Just
7132 insert the substitution here and we will delete and re-emit
7133 the insn later. */
7134
7bd8b2a8
JL
7135 /* Keep track of the original SET_SRC so that we can fix notes
7136 on libcall instructions. */
7137 old_src = SET_SRC (sets[i].rtl);
7138
7afe21cc
RK
7139 if (n_sets == 1 && dest == pc_rtx
7140 && (trial == pc_rtx
7141 || (GET_CODE (trial) == LABEL_REF
7142 && ! condjump_p (insn))))
7143 {
7144 /* If TRIAL is a label in front of a jump table, we are
7145 really falling through the switch (this is how casesi
7146 insns work), so we must branch around the table. */
7147 if (GET_CODE (trial) == CODE_LABEL
7148 && NEXT_INSN (trial) != 0
7149 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7150 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7151 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7152
38a448ca 7153 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7afe21cc
RK
7154
7155 SET_SRC (sets[i].rtl) = trial;
44333223 7156 cse_jumps_altered = 1;
7afe21cc
RK
7157 break;
7158 }
7159
7160 /* Look for a substitution that makes a valid insn. */
7161 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 7162 {
7bd8b2a8
JL
7163 /* If we just made a substitution inside a libcall, then we
7164 need to make the same substitution in any notes attached
7165 to the RETVAL insn. */
1ed0205e
VM
7166 if (libcall_insn
7167 && (GET_CODE (old_src) == REG
7168 || GET_CODE (old_src) == SUBREG
7169 || GET_CODE (old_src) == MEM))
7bd8b2a8
JL
7170 replace_rtx (REG_NOTES (libcall_insn), old_src,
7171 canon_reg (SET_SRC (sets[i].rtl), insn));
7172
7722328e
RK
7173 /* The result of apply_change_group can be ignored; see
7174 canon_reg. */
7175
7176 validate_change (insn, &SET_SRC (sets[i].rtl),
7177 canon_reg (SET_SRC (sets[i].rtl), insn),
7178 1);
6702af89 7179 apply_change_group ();
05c33dd8
RK
7180 break;
7181 }
7afe21cc
RK
7182
7183 /* If we previously found constant pool entries for
7184 constants and this is a constant, try making a
7185 pool entry. Put it in src_folded unless we already have done
7186 this since that is where it likely came from. */
7187
7188 else if (constant_pool_entries_cost
7189 && CONSTANT_P (trial)
1bbd065b
RK
7190 && ! (GET_CODE (trial) == CONST
7191 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7192 && (src_folded == 0
7193 || (GET_CODE (src_folded) != MEM
7194 && ! src_folded_force_flag))
9ae8ffe7
JL
7195 && GET_MODE_CLASS (mode) != MODE_CC
7196 && mode != VOIDmode)
7afe21cc
RK
7197 {
7198 src_folded_force_flag = 1;
7199 src_folded = trial;
7200 src_folded_cost = constant_pool_entries_cost;
7201 }
7202 }
7203
7204 src = SET_SRC (sets[i].rtl);
7205
7206 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7207 However, there is an important exception: If both are registers
7208 that are not the head of their equivalence class, replace SET_SRC
7209 with the head of the class. If we do not do this, we will have
7210 both registers live over a portion of the basic block. This way,
7211 their lifetimes will likely abut instead of overlapping. */
7212 if (GET_CODE (dest) == REG
7213 && REGNO_QTY_VALID_P (REGNO (dest))
30f72379
MM
7214 && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7215 && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7afe21cc
RK
7216 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7217 /* Don't do this if the original insn had a hard reg as
c5c76735 7218 SET_SRC or SET_DEST. */
7afe21cc 7219 && (GET_CODE (sets[i].src) != REG
c5c76735
JL
7220 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
7221 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
7afe21cc
RK
7222 /* We can't call canon_reg here because it won't do anything if
7223 SRC is a hard register. */
7224 {
30f72379 7225 int first = qty_first_reg[REG_QTY (REGNO (src))];
759bd8b7
R
7226 rtx new_src
7227 = (first >= FIRST_PSEUDO_REGISTER
7228 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7229
7230 /* We must use validate-change even for this, because this
7231 might be a special no-op instruction, suitable only to
7232 tag notes onto. */
7233 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7234 {
7235 src = new_src;
7236 /* If we had a constant that is cheaper than what we are now
7237 setting SRC to, use that constant. We ignored it when we
7238 thought we could make this into a no-op. */
7239 if (src_const && COST (src_const) < COST (src)
7240 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7241 0))
7242 src = src_const;
7243 }
7afe21cc
RK
7244 }
7245
7246 /* If we made a change, recompute SRC values. */
7247 if (src != sets[i].src)
7248 {
7249 do_not_record = 0;
7250 hash_arg_in_memory = 0;
7251 hash_arg_in_struct = 0;
7252 sets[i].src = src;
2197a88a 7253 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
7254 sets[i].src_volatile = do_not_record;
7255 sets[i].src_in_memory = hash_arg_in_memory;
7256 sets[i].src_in_struct = hash_arg_in_struct;
2197a88a 7257 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
7258 }
7259
7260 /* If this is a single SET, we are setting a register, and we have an
7261 equivalent constant, we want to add a REG_NOTE. We don't want
7262 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 7263 that pseudo hasn't been eliminated is a pain. Such a note also
ac7ef8d5
FS
7264 won't help anything.
7265
7266 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7267 which can be created for a reference to a compile time computable
7268 entry in a jump table. */
7269
7afe21cc 7270 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
ac7ef8d5
FS
7271 && GET_CODE (src_const) != REG
7272 && ! (GET_CODE (src_const) == CONST
7273 && GET_CODE (XEXP (src_const, 0)) == MINUS
7274 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7275 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7afe21cc 7276 {
92f9aa51 7277 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7afe21cc 7278
51e2a951
AS
7279 /* Make sure that the rtx is not shared with any other insn. */
7280 src_const = copy_rtx (src_const);
7281
7afe21cc
RK
7282 /* Record the actual constant value in a REG_EQUAL note, making
7283 a new one if one does not already exist. */
7284 if (tem)
7285 XEXP (tem, 0) = src_const;
7286 else
38a448ca
RH
7287 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7288 src_const, REG_NOTES (insn));
7afe21cc
RK
7289
7290 /* If storing a constant value in a register that
7291 previously held the constant value 0,
7292 record this fact with a REG_WAS_0 note on this insn.
7293
7294 Note that the *register* is required to have previously held 0,
7295 not just any register in the quantity and we must point to the
7296 insn that set that register to zero.
7297
7298 Rather than track each register individually, we just see if
7299 the last set for this quantity was for this register. */
7300
7301 if (REGNO_QTY_VALID_P (REGNO (dest))
30f72379 7302 && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7afe21cc
RK
7303 {
7304 /* See if we previously had a REG_WAS_0 note. */
906c4e36 7305 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
30f72379 7306 rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7afe21cc
RK
7307
7308 if ((tem = single_set (const_insn)) != 0
7309 && rtx_equal_p (SET_DEST (tem), dest))
7310 {
7311 if (note)
7312 XEXP (note, 0) = const_insn;
7313 else
c5c76735
JL
7314 REG_NOTES (insn)
7315 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
7316 REG_NOTES (insn));
7afe21cc
RK
7317 }
7318 }
7319 }
7320
7321 /* Now deal with the destination. */
7322 do_not_record = 0;
7323 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7324
7325 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7326 to the MEM or REG within it. */
7327 while (GET_CODE (dest) == SIGN_EXTRACT
7328 || GET_CODE (dest) == ZERO_EXTRACT
7329 || GET_CODE (dest) == SUBREG
7330 || GET_CODE (dest) == STRICT_LOW_PART)
7331 {
7332 sets[i].inner_dest_loc = &XEXP (dest, 0);
7333 dest = XEXP (dest, 0);
7334 }
7335
7336 sets[i].inner_dest = dest;
7337
7338 if (GET_CODE (dest) == MEM)
7339 {
9ae8ffe7
JL
7340#ifdef PUSH_ROUNDING
7341 /* Stack pushes invalidate the stack pointer. */
7342 rtx addr = XEXP (dest, 0);
7343 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7344 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7345 && XEXP (addr, 0) == stack_pointer_rtx)
7346 invalidate (stack_pointer_rtx, Pmode);
7347#endif
7afe21cc 7348 dest = fold_rtx (dest, insn);
7afe21cc
RK
7349 }
7350
7351 /* Compute the hash code of the destination now,
7352 before the effects of this instruction are recorded,
7353 since the register values used in the address computation
7354 are those before this instruction. */
2197a88a 7355 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
7356
7357 /* Don't enter a bit-field in the hash table
7358 because the value in it after the store
7359 may not equal what was stored, due to truncation. */
7360
7361 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7362 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7363 {
7364 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7365
7366 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7367 && GET_CODE (width) == CONST_INT
906c4e36
RK
7368 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7369 && ! (INTVAL (src_const)
7370 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
7371 /* Exception: if the value is constant,
7372 and it won't be truncated, record it. */
7373 ;
7374 else
7375 {
7376 /* This is chosen so that the destination will be invalidated
7377 but no new value will be recorded.
7378 We must invalidate because sometimes constant
7379 values can be recorded for bitfields. */
7380 sets[i].src_elt = 0;
7381 sets[i].src_volatile = 1;
7382 src_eqv = 0;
7383 src_eqv_elt = 0;
7384 }
7385 }
7386
7387 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7388 the insn. */
7389 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7390 {
ef178af3
ZW
7391 /* One less use of the label this insn used to jump to. */
7392 if (JUMP_LABEL (insn) != 0)
7393 --LABEL_NUSES (JUMP_LABEL (insn));
7afe21cc
RK
7394 PUT_CODE (insn, NOTE);
7395 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7396 NOTE_SOURCE_FILE (insn) = 0;
7397 cse_jumps_altered = 1;
7afe21cc
RK
7398 /* No more processing for this set. */
7399 sets[i].rtl = 0;
7400 }
7401
7402 /* If this SET is now setting PC to a label, we know it used to
7403 be a conditional or computed branch. So we see if we can follow
7404 it. If it was a computed branch, delete it and re-emit. */
7405 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7406 {
7407 rtx p;
7408
7409 /* If this is not in the format for a simple branch and
7410 we are the only SET in it, re-emit it. */
7411 if (! simplejump_p (insn) && n_sets == 1)
7412 {
7413 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7414 JUMP_LABEL (new) = XEXP (src, 0);
7415 LABEL_NUSES (XEXP (src, 0))++;
7416 delete_insn (insn);
7417 insn = new;
7418 }
31dcf83f
RS
7419 else
7420 /* Otherwise, force rerecognition, since it probably had
7421 a different pattern before.
7422 This shouldn't really be necessary, since whatever
7423 changed the source value above should have done this.
7424 Until the right place is found, might as well do this here. */
7425 INSN_CODE (insn) = -1;
7afe21cc
RK
7426
7427 /* Now that we've converted this jump to an unconditional jump,
7428 there is dead code after it. Delete the dead code until we
7429 reach a BARRIER, the end of the function, or a label. Do
7430 not delete NOTEs except for NOTE_INSN_DELETED since later
7431 phases assume these notes are retained. */
7432
312f6255
GK
7433 never_reached_warning (insn);
7434
7afe21cc
RK
7435 p = insn;
7436
7437 while (NEXT_INSN (p) != 0
7438 && GET_CODE (NEXT_INSN (p)) != BARRIER
7439 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7440 {
eec9ef57
JL
7441 /* Note, we must update P with the return value from
7442 delete_insn, otherwise we could get an infinite loop
7443 if NEXT_INSN (p) had INSN_DELETED_P set. */
7afe21cc
RK
7444 if (GET_CODE (NEXT_INSN (p)) != NOTE
7445 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
778e0677 7446 p = PREV_INSN (delete_insn (NEXT_INSN (p)));
7afe21cc
RK
7447 else
7448 p = NEXT_INSN (p);
7449 }
7450
7451 /* If we don't have a BARRIER immediately after INSN, put one there.
7452 Much code assumes that there are no NOTEs between a JUMP_INSN and
7453 BARRIER. */
7454
7455 if (NEXT_INSN (insn) == 0
7456 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
783e5bca 7457 emit_barrier_before (NEXT_INSN (insn));
7afe21cc
RK
7458
7459 /* We might have two BARRIERs separated by notes. Delete the second
7460 one if so. */
7461
538b78e7
RS
7462 if (p != insn && NEXT_INSN (p) != 0
7463 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7afe21cc
RK
7464 delete_insn (NEXT_INSN (p));
7465
7466 cse_jumps_altered = 1;
7467 sets[i].rtl = 0;
7468 }
7469
c2a47e48
RK
7470 /* If destination is volatile, invalidate it and then do no further
7471 processing for this assignment. */
7afe21cc
RK
7472
7473 else if (do_not_record)
c2a47e48
RK
7474 {
7475 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7476 || GET_CODE (dest) == MEM)
bb4034b3 7477 invalidate (dest, VOIDmode);
2708da92
RS
7478 else if (GET_CODE (dest) == STRICT_LOW_PART
7479 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7480 invalidate (XEXP (dest, 0), GET_MODE (dest));
c2a47e48
RK
7481 sets[i].rtl = 0;
7482 }
7afe21cc
RK
7483
7484 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 7485 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
7486
7487#ifdef HAVE_cc0
7488 /* If setting CC0, record what it was set to, or a constant, if it
7489 is equivalent to a constant. If it is being set to a floating-point
7490 value, make a COMPARE with the appropriate constant of 0. If we
7491 don't do this, later code can interpret this as a test against
7492 const0_rtx, which can cause problems if we try to put it into an
7493 insn as a floating-point operand. */
7494 if (dest == cc0_rtx)
7495 {
7496 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7497 this_insn_cc0_mode = mode;
cbf6a543 7498 if (FLOAT_MODE_P (mode))
38a448ca
RH
7499 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7500 CONST0_RTX (mode));
7afe21cc
RK
7501 }
7502#endif
7503 }
7504
7505 /* Now enter all non-volatile source expressions in the hash table
7506 if they are not already present.
7507 Record their equivalence classes in src_elt.
7508 This way we can insert the corresponding destinations into
7509 the same classes even if the actual sources are no longer in them
7510 (having been invalidated). */
7511
7512 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7513 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7514 {
7515 register struct table_elt *elt;
7516 register struct table_elt *classp = sets[0].src_elt;
7517 rtx dest = SET_DEST (sets[0].rtl);
7518 enum machine_mode eqvmode = GET_MODE (dest);
7519
7520 if (GET_CODE (dest) == STRICT_LOW_PART)
7521 {
7522 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7523 classp = 0;
7524 }
7525 if (insert_regs (src_eqv, classp, 0))
8ae2b8f6
JW
7526 {
7527 rehash_using_reg (src_eqv);
7528 src_eqv_hash = HASH (src_eqv, eqvmode);
7529 }
2197a88a 7530 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc
RK
7531 elt->in_memory = src_eqv_in_memory;
7532 elt->in_struct = src_eqv_in_struct;
7533 src_eqv_elt = elt;
f7911249
JW
7534
7535 /* Check to see if src_eqv_elt is the same as a set source which
7536 does not yet have an elt, and if so set the elt of the set source
7537 to src_eqv_elt. */
7538 for (i = 0; i < n_sets; i++)
7539 if (sets[i].rtl && sets[i].src_elt == 0
7540 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7541 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
7542 }
7543
7544 for (i = 0; i < n_sets; i++)
7545 if (sets[i].rtl && ! sets[i].src_volatile
7546 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7547 {
7548 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7549 {
7550 /* REG_EQUAL in setting a STRICT_LOW_PART
7551 gives an equivalent for the entire destination register,
7552 not just for the subreg being stored in now.
7553 This is a more interesting equivalence, so we arrange later
7554 to treat the entire reg as the destination. */
7555 sets[i].src_elt = src_eqv_elt;
2197a88a 7556 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
7557 }
7558 else
7559 {
7560 /* Insert source and constant equivalent into hash table, if not
7561 already present. */
7562 register struct table_elt *classp = src_eqv_elt;
7563 register rtx src = sets[i].src;
7564 register rtx dest = SET_DEST (sets[i].rtl);
7565 enum machine_mode mode
7566 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7567
15c68354
R
7568 /* Don't put a hard register source into the table if this is
7569 the last insn of a libcall. */
7570 if (sets[i].src_elt == 0
7571 && (GET_CODE (src) != REG
7572 || REGNO (src) >= FIRST_PSEUDO_REGISTER
7573 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7afe21cc
RK
7574 {
7575 register struct table_elt *elt;
7576
7577 /* Note that these insert_regs calls cannot remove
7578 any of the src_elt's, because they would have failed to
7579 match if not still valid. */
7580 if (insert_regs (src, classp, 0))
8ae2b8f6
JW
7581 {
7582 rehash_using_reg (src);
7583 sets[i].src_hash = HASH (src, mode);
7584 }
2197a88a 7585 elt = insert (src, classp, sets[i].src_hash, mode);
7afe21cc
RK
7586 elt->in_memory = sets[i].src_in_memory;
7587 elt->in_struct = sets[i].src_in_struct;
7588 sets[i].src_elt = classp = elt;
7589 }
7590
7591 if (sets[i].src_const && sets[i].src_const_elt == 0
7592 && src != sets[i].src_const
7593 && ! rtx_equal_p (sets[i].src_const, src))
7594 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 7595 sets[i].src_const_hash, mode);
7afe21cc
RK
7596 }
7597 }
7598 else if (sets[i].src_elt == 0)
7599 /* If we did not insert the source into the hash table (e.g., it was
7600 volatile), note the equivalence class for the REG_EQUAL value, if any,
7601 so that the destination goes into that class. */
7602 sets[i].src_elt = src_eqv_elt;
7603
9ae8ffe7 7604 invalidate_from_clobbers (x);
77fa0940
RK
7605
7606 /* Some registers are invalidated by subroutine calls. Memory is
7607 invalidated by non-constant calls. */
7608
7afe21cc
RK
7609 if (GET_CODE (insn) == CALL_INSN)
7610 {
77fa0940 7611 if (! CONST_CALL_P (insn))
9ae8ffe7 7612 invalidate_memory ();
7afe21cc
RK
7613 invalidate_for_call ();
7614 }
7615
7616 /* Now invalidate everything set by this instruction.
7617 If a SUBREG or other funny destination is being set,
7618 sets[i].rtl is still nonzero, so here we invalidate the reg
7619 a part of which is being set. */
7620
7621 for (i = 0; i < n_sets; i++)
7622 if (sets[i].rtl)
7623 {
bb4034b3
JW
7624 /* We can't use the inner dest, because the mode associated with
7625 a ZERO_EXTRACT is significant. */
7626 register rtx dest = SET_DEST (sets[i].rtl);
7afe21cc
RK
7627
7628 /* Needed for registers to remove the register from its
7629 previous quantity's chain.
7630 Needed for memory if this is a nonvarying address, unless
7631 we have just done an invalidate_memory that covers even those. */
7632 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
9ae8ffe7 7633 || GET_CODE (dest) == MEM)
bb4034b3 7634 invalidate (dest, VOIDmode);
2708da92
RS
7635 else if (GET_CODE (dest) == STRICT_LOW_PART
7636 || GET_CODE (dest) == ZERO_EXTRACT)
bb4034b3 7637 invalidate (XEXP (dest, 0), GET_MODE (dest));
7afe21cc
RK
7638 }
7639
01e752d3
JL
7640 /* A volatile ASM invalidates everything. */
7641 if (GET_CODE (insn) == INSN
7642 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7643 && MEM_VOLATILE_P (PATTERN (insn)))
7644 flush_hash_table ();
7645
7afe21cc
RK
7646 /* Make sure registers mentioned in destinations
7647 are safe for use in an expression to be inserted.
7648 This removes from the hash table
7649 any invalid entry that refers to one of these registers.
7650
7651 We don't care about the return value from mention_regs because
7652 we are going to hash the SET_DEST values unconditionally. */
7653
7654 for (i = 0; i < n_sets; i++)
34c73909
R
7655 {
7656 if (sets[i].rtl)
7657 {
7658 rtx x = SET_DEST (sets[i].rtl);
7659
7660 if (GET_CODE (x) != REG)
7661 mention_regs (x);
7662 else
7663 {
7664 /* We used to rely on all references to a register becoming
7665 inaccessible when a register changes to a new quantity,
7666 since that changes the hash code. However, that is not
7667 safe, since after NBUCKETS new quantities we get a
7668 hash 'collision' of a register with its own invalid
7669 entries. And since SUBREGs have been changed not to
7670 change their hash code with the hash code of the register,
7671 it wouldn't work any longer at all. So we have to check
7672 for any invalid references lying around now.
7673 This code is similar to the REG case in mention_regs,
7674 but it knows that reg_tick has been incremented, and
7675 it leaves reg_in_table as -1 . */
7676 register int regno = REGNO (x);
7677 register int endregno
7678 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7679 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7680 int i;
7681
7682 for (i = regno; i < endregno; i++)
7683 {
30f72379 7684 if (REG_IN_TABLE (i) >= 0)
34c73909
R
7685 {
7686 remove_invalid_refs (i);
30f72379 7687 REG_IN_TABLE (i) = -1;
34c73909
R
7688 }
7689 }
7690 }
7691 }
7692 }
7afe21cc
RK
7693
7694 /* We may have just removed some of the src_elt's from the hash table.
7695 So replace each one with the current head of the same class. */
7696
7697 for (i = 0; i < n_sets; i++)
7698 if (sets[i].rtl)
7699 {
7700 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7701 /* If elt was removed, find current head of same class,
7702 or 0 if nothing remains of that class. */
7703 {
7704 register struct table_elt *elt = sets[i].src_elt;
7705
7706 while (elt && elt->prev_same_value)
7707 elt = elt->prev_same_value;
7708
7709 while (elt && elt->first_same_value == 0)
7710 elt = elt->next_same_value;
7711 sets[i].src_elt = elt ? elt->first_same_value : 0;
7712 }
7713 }
7714
7715 /* Now insert the destinations into their equivalence classes. */
7716
7717 for (i = 0; i < n_sets; i++)
7718 if (sets[i].rtl)
7719 {
7720 register rtx dest = SET_DEST (sets[i].rtl);
9de2c71a 7721 rtx inner_dest = sets[i].inner_dest;
7afe21cc
RK
7722 register struct table_elt *elt;
7723
7724 /* Don't record value if we are not supposed to risk allocating
7725 floating-point values in registers that might be wider than
7726 memory. */
7727 if ((flag_float_store
7728 && GET_CODE (dest) == MEM
cbf6a543 7729 && FLOAT_MODE_P (GET_MODE (dest)))
bc4ddc77
JW
7730 /* Don't record BLKmode values, because we don't know the
7731 size of it, and can't be sure that other BLKmode values
7732 have the same or smaller size. */
7733 || GET_MODE (dest) == BLKmode
7afe21cc
RK
7734 /* Don't record values of destinations set inside a libcall block
7735 since we might delete the libcall. Things should have been set
7736 up so we won't want to reuse such a value, but we play it safe
7737 here. */
7bd8b2a8 7738 || libcall_insn
7afe21cc
RK
7739 /* If we didn't put a REG_EQUAL value or a source into the hash
7740 table, there is no point is recording DEST. */
1a8e9a8e
RK
7741 || sets[i].src_elt == 0
7742 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7743 or SIGN_EXTEND, don't record DEST since it can cause
7744 some tracking to be wrong.
7745
7746 ??? Think about this more later. */
7747 || (GET_CODE (dest) == SUBREG
7748 && (GET_MODE_SIZE (GET_MODE (dest))
7749 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7750 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7751 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7afe21cc
RK
7752 continue;
7753
7754 /* STRICT_LOW_PART isn't part of the value BEING set,
7755 and neither is the SUBREG inside it.
7756 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7757 if (GET_CODE (dest) == STRICT_LOW_PART)
7758 dest = SUBREG_REG (XEXP (dest, 0));
7759
c610adec 7760 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7afe21cc
RK
7761 /* Registers must also be inserted into chains for quantities. */
7762 if (insert_regs (dest, sets[i].src_elt, 1))
8ae2b8f6
JW
7763 {
7764 /* If `insert_regs' changes something, the hash code must be
7765 recalculated. */
7766 rehash_using_reg (dest);
7767 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7768 }
7afe21cc 7769
9de2c71a
MM
7770 if (GET_CODE (inner_dest) == MEM
7771 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7772 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7773 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7774 Consider the case in which the address of the MEM is
7775 passed to a function, which alters the MEM. Then, if we
7776 later use Y instead of the MEM we'll miss the update. */
7777 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7778 else
7779 elt = insert (dest, sets[i].src_elt,
7780 sets[i].dest_hash, GET_MODE (dest));
7781
c256df0b 7782 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
9ad91d71
RK
7783 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7784 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7785 0))));
c256df0b 7786
7afe21cc
RK
7787 if (elt->in_memory)
7788 {
7789 /* This implicitly assumes a whole struct
7790 need not have MEM_IN_STRUCT_P.
7791 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7792 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7793 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7794 }
7795
fc3ffe83
RK
7796 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7797 narrower than M2, and both M1 and M2 are the same number of words,
7798 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7799 make that equivalence as well.
7afe21cc
RK
7800
7801 However, BAR may have equivalences for which gen_lowpart_if_possible
7802 will produce a simpler value than gen_lowpart_if_possible applied to
7803 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7804 BAR's equivalences. If we don't get a simplified form, make
7805 the SUBREG. It will not be used in an equivalence, but will
7806 cause two similar assignments to be detected.
7807
7808 Note the loop below will find SUBREG_REG (DEST) since we have
7809 already entered SRC and DEST of the SET in the table. */
7810
7811 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
7812 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7813 / UNITS_PER_WORD)
7814 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7afe21cc
RK
7815 && (GET_MODE_SIZE (GET_MODE (dest))
7816 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7817 && sets[i].src_elt != 0)
7818 {
7819 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7820 struct table_elt *elt, *classp = 0;
7821
7822 for (elt = sets[i].src_elt->first_same_value; elt;
7823 elt = elt->next_same_value)
7824 {
7825 rtx new_src = 0;
2197a88a 7826 unsigned src_hash;
7afe21cc
RK
7827 struct table_elt *src_elt;
7828
7829 /* Ignore invalid entries. */
7830 if (GET_CODE (elt->exp) != REG
7831 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7832 continue;
7833
7834 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7835 if (new_src == 0)
38a448ca 7836 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7afe21cc
RK
7837
7838 src_hash = HASH (new_src, new_mode);
7839 src_elt = lookup (new_src, src_hash, new_mode);
7840
7841 /* Put the new source in the hash table is if isn't
7842 already. */
7843 if (src_elt == 0)
7844 {
7845 if (insert_regs (new_src, classp, 0))
8ae2b8f6
JW
7846 {
7847 rehash_using_reg (new_src);
7848 src_hash = HASH (new_src, new_mode);
7849 }
7afe21cc
RK
7850 src_elt = insert (new_src, classp, src_hash, new_mode);
7851 src_elt->in_memory = elt->in_memory;
7852 src_elt->in_struct = elt->in_struct;
7853 }
7854 else if (classp && classp != src_elt->first_same_value)
7855 /* Show that two things that we've seen before are
7856 actually the same. */
7857 merge_equiv_classes (src_elt, classp);
7858
7859 classp = src_elt->first_same_value;
da932f04
JL
7860 /* Ignore invalid entries. */
7861 while (classp
7862 && GET_CODE (classp->exp) != REG
7863 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7864 classp = classp->next_same_value;
7afe21cc
RK
7865 }
7866 }
7867 }
7868
7869 /* Special handling for (set REG0 REG1)
7870 where REG0 is the "cheapest", cheaper than REG1.
7871 After cse, REG1 will probably not be used in the sequel,
7872 so (if easily done) change this insn to (set REG1 REG0) and
7873 replace REG1 with REG0 in the previous insn that computed their value.
7874 Then REG1 will become a dead store and won't cloud the situation
7875 for later optimizations.
7876
7877 Do not make this change if REG1 is a hard register, because it will
7878 then be used in the sequel and we may be changing a two-operand insn
7879 into a three-operand insn.
7880
50270076
R
7881 Also do not do this if we are operating on a copy of INSN.
7882
7883 Also don't do this if INSN ends a libcall; this would cause an unrelated
7884 register to be set in the middle of a libcall, and we then get bad code
7885 if the libcall is deleted. */
7afe21cc
RK
7886
7887 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7888 && NEXT_INSN (PREV_INSN (insn)) == insn
7889 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7890 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7891 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
30f72379 7892 && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
50270076
R
7893 == REGNO (SET_DEST (sets[0].rtl)))
7894 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc
RK
7895 {
7896 rtx prev = PREV_INSN (insn);
7897 while (prev && GET_CODE (prev) == NOTE)
7898 prev = PREV_INSN (prev);
7899
7900 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7901 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7902 {
7903 rtx dest = SET_DEST (sets[0].rtl);
906c4e36 7904 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7afe21cc
RK
7905
7906 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7907 validate_change (insn, & SET_DEST (sets[0].rtl),
7908 SET_SRC (sets[0].rtl), 1);
7909 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7910 apply_change_group ();
7911
7912 /* If REG1 was equivalent to a constant, REG0 is not. */
7913 if (note)
7914 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7915
7916 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7917 any REG_WAS_0 note on INSN to PREV. */
906c4e36 7918 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7919 if (note)
7920 remove_note (prev, note);
7921
906c4e36 7922 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7923 if (note)
7924 {
7925 remove_note (insn, note);
7926 XEXP (note, 1) = REG_NOTES (prev);
7927 REG_NOTES (prev) = note;
7928 }
98369a0f
RK
7929
7930 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7931 then we must delete it, because the value in REG0 has changed. */
7932 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7933 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7934 remove_note (insn, note);
7afe21cc
RK
7935 }
7936 }
7937
7938 /* If this is a conditional jump insn, record any known equivalences due to
7939 the condition being tested. */
7940
7941 last_jump_equiv_class = 0;
7942 if (GET_CODE (insn) == JUMP_INSN
7943 && n_sets == 1 && GET_CODE (x) == SET
7944 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7945 record_jump_equiv (insn, 0);
7946
7947#ifdef HAVE_cc0
7948 /* If the previous insn set CC0 and this insn no longer references CC0,
7949 delete the previous insn. Here we use the fact that nothing expects CC0
7950 to be valid over an insn, which is true until the final pass. */
7951 if (prev_insn && GET_CODE (prev_insn) == INSN
7952 && (tem = single_set (prev_insn)) != 0
7953 && SET_DEST (tem) == cc0_rtx
7954 && ! reg_mentioned_p (cc0_rtx, x))
7955 {
7956 PUT_CODE (prev_insn, NOTE);
7957 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7958 NOTE_SOURCE_FILE (prev_insn) = 0;
7959 }
7960
7961 prev_insn_cc0 = this_insn_cc0;
7962 prev_insn_cc0_mode = this_insn_cc0_mode;
7963#endif
7964
7965 prev_insn = insn;
7966}
7967\f
a4c6502a 7968/* Remove from the hash table all expressions that reference memory. */
7afe21cc 7969static void
9ae8ffe7 7970invalidate_memory ()
7afe21cc 7971{
9ae8ffe7
JL
7972 register int i;
7973 register struct table_elt *p, *next;
7afe21cc 7974
9ae8ffe7
JL
7975 for (i = 0; i < NBUCKETS; i++)
7976 for (p = table[i]; p; p = next)
7977 {
7978 next = p->next_same_hash;
7979 if (p->in_memory)
7980 remove_from_table (p, i);
7981 }
7982}
7983
7984/* XXX ??? The name of this function bears little resemblance to
7985 what this function actually does. FIXME. */
7986static int
7987note_mem_written (addr)
7988 register rtx addr;
7989{
7990 /* Pushing or popping the stack invalidates just the stack pointer. */
7991 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7992 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7993 && GET_CODE (XEXP (addr, 0)) == REG
7994 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7afe21cc 7995 {
30f72379
MM
7996 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
7997 REG_TICK (STACK_POINTER_REGNUM)++;
9ae8ffe7
JL
7998
7999 /* This should be *very* rare. */
8000 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
8001 invalidate (stack_pointer_rtx, VOIDmode);
8002 return 1;
7afe21cc 8003 }
9ae8ffe7 8004 return 0;
7afe21cc
RK
8005}
8006
8007/* Perform invalidation on the basis of everything about an insn
8008 except for invalidating the actual places that are SET in it.
8009 This includes the places CLOBBERed, and anything that might
8010 alias with something that is SET or CLOBBERed.
8011
7afe21cc
RK
8012 X is the pattern of the insn. */
8013
8014static void
9ae8ffe7 8015invalidate_from_clobbers (x)
7afe21cc
RK
8016 rtx x;
8017{
7afe21cc
RK
8018 if (GET_CODE (x) == CLOBBER)
8019 {
8020 rtx ref = XEXP (x, 0);
9ae8ffe7
JL
8021 if (ref)
8022 {
8023 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8024 || GET_CODE (ref) == MEM)
8025 invalidate (ref, VOIDmode);
8026 else if (GET_CODE (ref) == STRICT_LOW_PART
8027 || GET_CODE (ref) == ZERO_EXTRACT)
8028 invalidate (XEXP (ref, 0), GET_MODE (ref));
8029 }
7afe21cc
RK
8030 }
8031 else if (GET_CODE (x) == PARALLEL)
8032 {
8033 register int i;
8034 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8035 {
8036 register rtx y = XVECEXP (x, 0, i);
8037 if (GET_CODE (y) == CLOBBER)
8038 {
8039 rtx ref = XEXP (y, 0);
9ae8ffe7
JL
8040 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8041 || GET_CODE (ref) == MEM)
8042 invalidate (ref, VOIDmode);
8043 else if (GET_CODE (ref) == STRICT_LOW_PART
8044 || GET_CODE (ref) == ZERO_EXTRACT)
8045 invalidate (XEXP (ref, 0), GET_MODE (ref));
7afe21cc
RK
8046 }
8047 }
8048 }
8049}
8050\f
8051/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
8052 and replace any registers in them with either an equivalent constant
8053 or the canonical form of the register. If we are inside an address,
8054 only do this if the address remains valid.
8055
8056 OBJECT is 0 except when within a MEM in which case it is the MEM.
8057
8058 Return the replacement for X. */
8059
8060static rtx
8061cse_process_notes (x, object)
8062 rtx x;
8063 rtx object;
8064{
8065 enum rtx_code code = GET_CODE (x);
6f7d635c 8066 const char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
8067 int i;
8068
8069 switch (code)
8070 {
8071 case CONST_INT:
8072 case CONST:
8073 case SYMBOL_REF:
8074 case LABEL_REF:
8075 case CONST_DOUBLE:
8076 case PC:
8077 case CC0:
8078 case LO_SUM:
8079 return x;
8080
8081 case MEM:
8082 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8083 return x;
8084
8085 case EXPR_LIST:
8086 case INSN_LIST:
8087 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 8088 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 8089 if (XEXP (x, 1))
906c4e36 8090 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
8091 return x;
8092
e4890d45
RS
8093 case SIGN_EXTEND:
8094 case ZERO_EXTEND:
0b0ee36c 8095 case SUBREG:
e4890d45
RS
8096 {
8097 rtx new = cse_process_notes (XEXP (x, 0), object);
8098 /* We don't substitute VOIDmode constants into these rtx,
8099 since they would impede folding. */
8100 if (GET_MODE (new) != VOIDmode)
8101 validate_change (object, &XEXP (x, 0), new, 0);
8102 return x;
8103 }
8104
7afe21cc 8105 case REG:
30f72379 8106 i = REG_QTY (REGNO (x));
7afe21cc
RK
8107
8108 /* Return a constant or a constant register. */
8109 if (REGNO_QTY_VALID_P (REGNO (x))
8110 && qty_const[i] != 0
8111 && (CONSTANT_P (qty_const[i])
8112 || GET_CODE (qty_const[i]) == REG))
8113 {
8114 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8115 if (new)
8116 return new;
8117 }
8118
8119 /* Otherwise, canonicalize this register. */
906c4e36 8120 return canon_reg (x, NULL_RTX);
e9a25f70
JL
8121
8122 default:
8123 break;
7afe21cc
RK
8124 }
8125
8126 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8127 if (fmt[i] == 'e')
8128 validate_change (object, &XEXP (x, i),
7fe34fdf 8129 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
8130
8131 return x;
8132}
8133\f
8134/* Find common subexpressions between the end test of a loop and the beginning
8135 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8136
8137 Often we have a loop where an expression in the exit test is used
8138 in the body of the loop. For example "while (*p) *q++ = *p++;".
8139 Because of the way we duplicate the loop exit test in front of the loop,
8140 however, we don't detect that common subexpression. This will be caught
8141 when global cse is implemented, but this is a quite common case.
8142
8143 This function handles the most common cases of these common expressions.
8144 It is called after we have processed the basic block ending with the
8145 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8146 jumps to a label used only once. */
8147
8148static void
8149cse_around_loop (loop_start)
8150 rtx loop_start;
8151{
8152 rtx insn;
8153 int i;
8154 struct table_elt *p;
8155
8156 /* If the jump at the end of the loop doesn't go to the start, we don't
8157 do anything. */
8158 for (insn = PREV_INSN (loop_start);
8159 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8160 insn = PREV_INSN (insn))
8161 ;
8162
8163 if (insn == 0
8164 || GET_CODE (insn) != NOTE
8165 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8166 return;
8167
8168 /* If the last insn of the loop (the end test) was an NE comparison,
8169 we will interpret it as an EQ comparison, since we fell through
f72aed24 8170 the loop. Any equivalences resulting from that comparison are
7afe21cc
RK
8171 therefore not valid and must be invalidated. */
8172 if (last_jump_equiv_class)
8173 for (p = last_jump_equiv_class->first_same_value; p;
8174 p = p->next_same_value)
51723711
KG
8175 {
8176 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8177 || (GET_CODE (p->exp) == SUBREG
8178 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8179 invalidate (p->exp, VOIDmode);
8180 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8181 || GET_CODE (p->exp) == ZERO_EXTRACT)
8182 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8183 }
7afe21cc
RK
8184
8185 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8186 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8187
8188 The only thing we do with SET_DEST is invalidate entries, so we
8189 can safely process each SET in order. It is slightly less efficient
556c714b
JW
8190 to do so, but we only want to handle the most common cases.
8191
8192 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8193 These pseudos won't have valid entries in any of the tables indexed
8194 by register number, such as reg_qty. We avoid out-of-range array
8195 accesses by not processing any instructions created after cse started. */
7afe21cc
RK
8196
8197 for (insn = NEXT_INSN (loop_start);
8198 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
556c714b 8199 && INSN_UID (insn) < max_insn_uid
7afe21cc
RK
8200 && ! (GET_CODE (insn) == NOTE
8201 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8202 insn = NEXT_INSN (insn))
8203 {
8204 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8205 && (GET_CODE (PATTERN (insn)) == SET
8206 || GET_CODE (PATTERN (insn)) == CLOBBER))
8207 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8208 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8209 && GET_CODE (PATTERN (insn)) == PARALLEL)
8210 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8211 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8212 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8213 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8214 loop_start);
8215 }
8216}
8217\f
8b3686ed
RK
8218/* Process one SET of an insn that was skipped. We ignore CLOBBERs
8219 since they are done elsewhere. This function is called via note_stores. */
8220
8221static void
8222invalidate_skipped_set (dest, set)
8223 rtx set;
8224 rtx dest;
8225{
9ae8ffe7
JL
8226 enum rtx_code code = GET_CODE (dest);
8227
8228 if (code == MEM
8229 && ! note_mem_written (dest) /* If this is not a stack push ... */
8230 /* There are times when an address can appear varying and be a PLUS
8231 during this scan when it would be a fixed address were we to know
8232 the proper equivalences. So invalidate all memory if there is
8233 a BLKmode or nonscalar memory reference or a reference to a
8234 variable address. */
8235 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8236 || cse_rtx_varies_p (XEXP (dest, 0))))
8237 {
8238 invalidate_memory ();
8239 return;
8240 }
ffcf6393 8241
f47c02fa
RK
8242 if (GET_CODE (set) == CLOBBER
8243#ifdef HAVE_cc0
8244 || dest == cc0_rtx
8245#endif
8246 || dest == pc_rtx)
8247 return;
8248
9ae8ffe7 8249 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
bb4034b3 8250 invalidate (XEXP (dest, 0), GET_MODE (dest));
9ae8ffe7
JL
8251 else if (code == REG || code == SUBREG || code == MEM)
8252 invalidate (dest, VOIDmode);
8b3686ed
RK
8253}
8254
8255/* Invalidate all insns from START up to the end of the function or the
8256 next label. This called when we wish to CSE around a block that is
8257 conditionally executed. */
8258
8259static void
8260invalidate_skipped_block (start)
8261 rtx start;
8262{
8263 rtx insn;
8b3686ed
RK
8264
8265 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8266 insn = NEXT_INSN (insn))
8267 {
8268 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8269 continue;
8270
8b3686ed
RK
8271 if (GET_CODE (insn) == CALL_INSN)
8272 {
9ae8ffe7
JL
8273 if (! CONST_CALL_P (insn))
8274 invalidate_memory ();
8b3686ed 8275 invalidate_for_call ();
8b3686ed
RK
8276 }
8277
97577254 8278 invalidate_from_clobbers (PATTERN (insn));
8b3686ed 8279 note_stores (PATTERN (insn), invalidate_skipped_set);
8b3686ed
RK
8280 }
8281}
8282\f
7afe21cc
RK
8283/* Used for communication between the following two routines; contains a
8284 value to be checked for modification. */
8285
8286static rtx cse_check_loop_start_value;
8287
8288/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8289 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8290
8291static void
8292cse_check_loop_start (x, set)
8293 rtx x;
d6f4ec51 8294 rtx set ATTRIBUTE_UNUSED;
7afe21cc
RK
8295{
8296 if (cse_check_loop_start_value == 0
8297 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8298 return;
8299
8300 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8301 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8302 cse_check_loop_start_value = 0;
8303}
8304
8305/* X is a SET or CLOBBER contained in INSN that was found near the start of
8306 a loop that starts with the label at LOOP_START.
8307
8308 If X is a SET, we see if its SET_SRC is currently in our hash table.
8309 If so, we see if it has a value equal to some register used only in the
8310 loop exit code (as marked by jump.c).
8311
8312 If those two conditions are true, we search backwards from the start of
8313 the loop to see if that same value was loaded into a register that still
8314 retains its value at the start of the loop.
8315
8316 If so, we insert an insn after the load to copy the destination of that
8317 load into the equivalent register and (try to) replace our SET_SRC with that
8318 register.
8319
8320 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8321
8322static void
8323cse_set_around_loop (x, insn, loop_start)
8324 rtx x;
8325 rtx insn;
8326 rtx loop_start;
8327{
7afe21cc 8328 struct table_elt *src_elt;
7afe21cc
RK
8329
8330 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8331 are setting PC or CC0 or whose SET_SRC is already a register. */
8332 if (GET_CODE (x) == SET
8333 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8334 && GET_CODE (SET_SRC (x)) != REG)
8335 {
8336 src_elt = lookup (SET_SRC (x),
8337 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8338 GET_MODE (SET_DEST (x)));
8339
8340 if (src_elt)
8341 for (src_elt = src_elt->first_same_value; src_elt;
8342 src_elt = src_elt->next_same_value)
8343 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8344 && COST (src_elt->exp) < COST (SET_SRC (x)))
8345 {
8346 rtx p, set;
8347
8348 /* Look for an insn in front of LOOP_START that sets
8349 something in the desired mode to SET_SRC (x) before we hit
8350 a label or CALL_INSN. */
8351
8352 for (p = prev_nonnote_insn (loop_start);
8353 p && GET_CODE (p) != CALL_INSN
8354 && GET_CODE (p) != CODE_LABEL;
8355 p = prev_nonnote_insn (p))
8356 if ((set = single_set (p)) != 0
8357 && GET_CODE (SET_DEST (set)) == REG
8358 && GET_MODE (SET_DEST (set)) == src_elt->mode
8359 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8360 {
8361 /* We now have to ensure that nothing between P
8362 and LOOP_START modified anything referenced in
8363 SET_SRC (x). We know that nothing within the loop
8364 can modify it, or we would have invalidated it in
8365 the hash table. */
8366 rtx q;
8367
8368 cse_check_loop_start_value = SET_SRC (x);
8369 for (q = p; q != loop_start; q = NEXT_INSN (q))
8370 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8371 note_stores (PATTERN (q), cse_check_loop_start);
8372
8373 /* If nothing was changed and we can replace our
8374 SET_SRC, add an insn after P to copy its destination
8375 to what we will be replacing SET_SRC with. */
8376 if (cse_check_loop_start_value
8377 && validate_change (insn, &SET_SRC (x),
8378 src_elt->exp, 0))
e89d3e6f
R
8379 {
8380 /* If this creates new pseudos, this is unsafe,
8381 because the regno of new pseudo is unsuitable
8382 to index into reg_qty when cse_insn processes
8383 the new insn. Therefore, if a new pseudo was
8384 created, discard this optimization. */
8385 int nregs = max_reg_num ();
8386 rtx move
8387 = gen_move_insn (src_elt->exp, SET_DEST (set));
8388 if (nregs != max_reg_num ())
8389 {
8390 if (! validate_change (insn, &SET_SRC (x),
8391 SET_SRC (set), 0))
8392 abort ();
8393 }
8394 else
8395 emit_insn_after (move, p);
8396 }
7afe21cc
RK
8397 break;
8398 }
8399 }
8400 }
8401
8402 /* Now invalidate anything modified by X. */
9ae8ffe7 8403 note_mem_written (SET_DEST (x));
7afe21cc 8404
9ae8ffe7 8405 /* See comment on similar code in cse_insn for explanation of these tests. */
7afe21cc 8406 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
9ae8ffe7 8407 || GET_CODE (SET_DEST (x)) == MEM)
bb4034b3 8408 invalidate (SET_DEST (x), VOIDmode);
2708da92
RS
8409 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8410 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
bb4034b3 8411 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
7afe21cc
RK
8412}
8413\f
8414/* Find the end of INSN's basic block and return its range,
8415 the total number of SETs in all the insns of the block, the last insn of the
8416 block, and the branch path.
8417
8418 The branch path indicates which branches should be followed. If a non-zero
8419 path size is specified, the block should be rescanned and a different set
8420 of branches will be taken. The branch path is only used if
8b3686ed 8421 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7afe21cc
RK
8422
8423 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8424 used to describe the block. It is filled in with the information about
8425 the current block. The incoming structure's branch path, if any, is used
8426 to construct the output branch path. */
8427
7afe21cc 8428void
8b3686ed 8429cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7afe21cc
RK
8430 rtx insn;
8431 struct cse_basic_block_data *data;
8432 int follow_jumps;
8433 int after_loop;
8b3686ed 8434 int skip_blocks;
7afe21cc
RK
8435{
8436 rtx p = insn, q;
8437 int nsets = 0;
8438 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
fc3ffe83 8439 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7afe21cc
RK
8440 int path_size = data->path_size;
8441 int path_entry = 0;
8442 int i;
8443
8444 /* Update the previous branch path, if any. If the last branch was
8445 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8446 shorten the path by one and look at the previous branch. We know that
8447 at least one branch must have been taken if PATH_SIZE is non-zero. */
8448 while (path_size > 0)
8449 {
8b3686ed 8450 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
8451 {
8452 data->path[path_size - 1].status = NOT_TAKEN;
8453 break;
8454 }
8455 else
8456 path_size--;
8457 }
8458
8459 /* Scan to end of this basic block. */
8460 while (p && GET_CODE (p) != CODE_LABEL)
8461 {
8462 /* Don't cse out the end of a loop. This makes a difference
8463 only for the unusual loops that always execute at least once;
8464 all other loops have labels there so we will stop in any case.
8465 Cse'ing out the end of the loop is dangerous because it
8466 might cause an invariant expression inside the loop
8467 to be reused after the end of the loop. This would make it
8468 hard to move the expression out of the loop in loop.c,
8469 especially if it is one of several equivalent expressions
8470 and loop.c would like to eliminate it.
8471
8472 If we are running after loop.c has finished, we can ignore
8473 the NOTE_INSN_LOOP_END. */
8474
8475 if (! after_loop && GET_CODE (p) == NOTE
8476 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8477 break;
8478
8479 /* Don't cse over a call to setjmp; on some machines (eg vax)
8480 the regs restored by the longjmp come from
8481 a later time than the setjmp. */
8482 if (GET_CODE (p) == NOTE
8483 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8484 break;
8485
8486 /* A PARALLEL can have lots of SETs in it,
8487 especially if it is really an ASM_OPERANDS. */
8488 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8489 && GET_CODE (PATTERN (p)) == PARALLEL)
8490 nsets += XVECLEN (PATTERN (p), 0);
8491 else if (GET_CODE (p) != NOTE)
8492 nsets += 1;
8493
164c8956
RK
8494 /* Ignore insns made by CSE; they cannot affect the boundaries of
8495 the basic block. */
8496
8497 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 8498 high_cuid = INSN_CUID (p);
164c8956
RK
8499 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8500 low_cuid = INSN_CUID (p);
7afe21cc
RK
8501
8502 /* See if this insn is in our branch path. If it is and we are to
8503 take it, do so. */
8504 if (path_entry < path_size && data->path[path_entry].branch == p)
8505 {
8b3686ed 8506 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc
RK
8507 p = JUMP_LABEL (p);
8508
8509 /* Point to next entry in path, if any. */
8510 path_entry++;
8511 }
8512
8513 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8514 was specified, we haven't reached our maximum path length, there are
8515 insns following the target of the jump, this is the only use of the
8b3686ed
RK
8516 jump label, and the target label is preceded by a BARRIER.
8517
8518 Alternatively, we can follow the jump if it branches around a
8519 block of code and there are no other branches into the block.
8520 In this case invalidate_skipped_block will be called to invalidate any
8521 registers set in the block when following the jump. */
8522
8523 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7afe21cc
RK
8524 && GET_CODE (p) == JUMP_INSN
8525 && GET_CODE (PATTERN (p)) == SET
8526 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
85c3ba60 8527 && JUMP_LABEL (p) != 0
7afe21cc
RK
8528 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8529 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8530 {
8531 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8532 if ((GET_CODE (q) != NOTE
8533 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8534 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8535 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8536 break;
8537
8538 /* If we ran into a BARRIER, this code is an extension of the
8539 basic block when the branch is taken. */
8b3686ed 8540 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
8541 {
8542 /* Don't allow ourself to keep walking around an
8543 always-executed loop. */
fc3ffe83
RK
8544 if (next_real_insn (q) == next)
8545 {
8546 p = NEXT_INSN (p);
8547 continue;
8548 }
7afe21cc
RK
8549
8550 /* Similarly, don't put a branch in our path more than once. */
8551 for (i = 0; i < path_entry; i++)
8552 if (data->path[i].branch == p)
8553 break;
8554
8555 if (i != path_entry)
8556 break;
8557
8558 data->path[path_entry].branch = p;
8559 data->path[path_entry++].status = TAKEN;
8560
8561 /* This branch now ends our path. It was possible that we
8562 didn't see this branch the last time around (when the
8563 insn in front of the target was a JUMP_INSN that was
8564 turned into a no-op). */
8565 path_size = path_entry;
8566
8567 p = JUMP_LABEL (p);
8568 /* Mark block so we won't scan it again later. */
8569 PUT_MODE (NEXT_INSN (p), QImode);
8570 }
8b3686ed
RK
8571 /* Detect a branch around a block of code. */
8572 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8573 {
8574 register rtx tmp;
8575
fc3ffe83
RK
8576 if (next_real_insn (q) == next)
8577 {
8578 p = NEXT_INSN (p);
8579 continue;
8580 }
8b3686ed
RK
8581
8582 for (i = 0; i < path_entry; i++)
8583 if (data->path[i].branch == p)
8584 break;
8585
8586 if (i != path_entry)
8587 break;
8588
8589 /* This is no_labels_between_p (p, q) with an added check for
8590 reaching the end of a function (in case Q precedes P). */
8591 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8592 if (GET_CODE (tmp) == CODE_LABEL)
8593 break;
8594
8595 if (tmp == q)
8596 {
8597 data->path[path_entry].branch = p;
8598 data->path[path_entry++].status = AROUND;
8599
8600 path_size = path_entry;
8601
8602 p = JUMP_LABEL (p);
8603 /* Mark block so we won't scan it again later. */
8604 PUT_MODE (NEXT_INSN (p), QImode);
8605 }
8606 }
7afe21cc 8607 }
7afe21cc
RK
8608 p = NEXT_INSN (p);
8609 }
8610
8611 data->low_cuid = low_cuid;
8612 data->high_cuid = high_cuid;
8613 data->nsets = nsets;
8614 data->last = p;
8615
8616 /* If all jumps in the path are not taken, set our path length to zero
8617 so a rescan won't be done. */
8618 for (i = path_size - 1; i >= 0; i--)
8b3686ed 8619 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
8620 break;
8621
8622 if (i == -1)
8623 data->path_size = 0;
8624 else
8625 data->path_size = path_size;
8626
8627 /* End the current branch path. */
8628 data->path[path_size].branch = 0;
8629}
8630\f
7afe21cc
RK
8631/* Perform cse on the instructions of a function.
8632 F is the first instruction.
8633 NREGS is one plus the highest pseudo-reg number used in the instruction.
8634
8635 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8636 (only if -frerun-cse-after-loop).
8637
8638 Returns 1 if jump_optimize should be redone due to simplifications
8639 in conditional jump instructions. */
8640
8641int
8642cse_main (f, nregs, after_loop, file)
8643 rtx f;
8644 int nregs;
8645 int after_loop;
8646 FILE *file;
8647{
8648 struct cse_basic_block_data val;
8649 register rtx insn = f;
8650 register int i;
8651
8652 cse_jumps_altered = 0;
a5dfb4ee 8653 recorded_label_ref = 0;
7afe21cc
RK
8654 constant_pool_entries_cost = 0;
8655 val.path_size = 0;
8656
8657 init_recog ();
9ae8ffe7 8658 init_alias_analysis ();
7afe21cc
RK
8659
8660 max_reg = nregs;
8661
556c714b
JW
8662 max_insn_uid = get_max_uid ();
8663
7afe21cc
RK
8664 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8665 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7afe21cc 8666
7bac1be0
RK
8667#ifdef LOAD_EXTEND_OP
8668
8669 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8670 and change the code and mode as appropriate. */
38a448ca 8671 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7bac1be0
RK
8672#endif
8673
7afe21cc
RK
8674 /* Discard all the free elements of the previous function
8675 since they are allocated in the temporarily obstack. */
4c9a05bc 8676 bzero ((char *) table, sizeof table);
7afe21cc
RK
8677 free_element_chain = 0;
8678 n_elements_made = 0;
8679
8680 /* Find the largest uid. */
8681
164c8956
RK
8682 max_uid = get_max_uid ();
8683 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
4c9a05bc 8684 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
7afe21cc
RK
8685
8686 /* Compute the mapping from uids to cuids.
8687 CUIDs are numbers assigned to insns, like uids,
8688 except that cuids increase monotonically through the code.
8689 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8690 between two insns is not affected by -g. */
8691
8692 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8693 {
8694 if (GET_CODE (insn) != NOTE
8695 || NOTE_LINE_NUMBER (insn) < 0)
8696 INSN_CUID (insn) = ++i;
8697 else
8698 /* Give a line number note the same cuid as preceding insn. */
8699 INSN_CUID (insn) = i;
8700 }
8701
8702 /* Initialize which registers are clobbered by calls. */
8703
8704 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8705
8706 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8707 if ((call_used_regs[i]
8708 /* Used to check !fixed_regs[i] here, but that isn't safe;
8709 fixed regs are still call-clobbered, and sched can get
8710 confused if they can "live across calls".
8711
8712 The frame pointer is always preserved across calls. The arg
8713 pointer is if it is fixed. The stack pointer usually is, unless
8714 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8715 will be present. If we are generating PIC code, the PIC offset
8716 table register is preserved across calls. */
8717
8718 && i != STACK_POINTER_REGNUM
8719 && i != FRAME_POINTER_REGNUM
8bc169f2
DE
8720#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8721 && i != HARD_FRAME_POINTER_REGNUM
8722#endif
7afe21cc
RK
8723#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8724 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8725#endif
be8fe470 8726#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
7afe21cc
RK
8727 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8728#endif
8729 )
8730 || global_regs[i])
8731 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8732
8733 /* Loop over basic blocks.
8734 Compute the maximum number of qty's needed for each basic block
8735 (which is 2 for each SET). */
8736 insn = f;
8737 while (insn)
8738 {
8b3686ed
RK
8739 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8740 flag_cse_skip_blocks);
7afe21cc
RK
8741
8742 /* If this basic block was already processed or has no sets, skip it. */
8743 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8744 {
8745 PUT_MODE (insn, VOIDmode);
8746 insn = (val.last ? NEXT_INSN (val.last) : 0);
8747 val.path_size = 0;
8748 continue;
8749 }
8750
8751 cse_basic_block_start = val.low_cuid;
8752 cse_basic_block_end = val.high_cuid;
8753 max_qty = val.nsets * 2;
8754
8755 if (file)
ab87f8c8 8756 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7afe21cc
RK
8757 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8758 val.nsets);
8759
8760 /* Make MAX_QTY bigger to give us room to optimize
8761 past the end of this basic block, if that should prove useful. */
8762 if (max_qty < 500)
8763 max_qty = 500;
8764
8765 max_qty += max_reg;
8766
8767 /* If this basic block is being extended by following certain jumps,
8768 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8769 Otherwise, we start after this basic block. */
8770 if (val.path_size > 0)
8771 cse_basic_block (insn, val.last, val.path, 0);
8772 else
8773 {
8774 int old_cse_jumps_altered = cse_jumps_altered;
8775 rtx temp;
8776
8777 /* When cse changes a conditional jump to an unconditional
8778 jump, we want to reprocess the block, since it will give
8779 us a new branch path to investigate. */
8780 cse_jumps_altered = 0;
8781 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
8782 if (cse_jumps_altered == 0
8783 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8784 insn = temp;
8785
8786 cse_jumps_altered |= old_cse_jumps_altered;
8787 }
8788
8789#ifdef USE_C_ALLOCA
8790 alloca (0);
8791#endif
8792 }
8793
8794 /* Tell refers_to_mem_p that qty_const info is not available. */
8795 qty_const = 0;
8796
8797 if (max_elements_made < n_elements_made)
8798 max_elements_made = n_elements_made;
8799
a5dfb4ee 8800 return cse_jumps_altered || recorded_label_ref;
7afe21cc
RK
8801}
8802
8803/* Process a single basic block. FROM and TO and the limits of the basic
8804 block. NEXT_BRANCH points to the branch path when following jumps or
8805 a null path when not following jumps.
8806
8807 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8808 loop. This is true when we are being called for the last time on a
8809 block and this CSE pass is before loop.c. */
8810
8811static rtx
8812cse_basic_block (from, to, next_branch, around_loop)
8813 register rtx from, to;
8814 struct branch_path *next_branch;
8815 int around_loop;
8816{
8817 register rtx insn;
8818 int to_usage = 0;
7bd8b2a8 8819 rtx libcall_insn = NULL_RTX;
e9a25f70 8820 int num_insns = 0;
7afe21cc
RK
8821
8822 /* Each of these arrays is undefined before max_reg, so only allocate
8823 the space actually needed and adjust the start below. */
8824
8825 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8826 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
c5c76735
JL
8827 qty_mode = (enum machine_mode *) alloca ((max_qty - max_reg)
8828 * sizeof (enum machine_mode));
7afe21cc
RK
8829 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8830 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8831 qty_comparison_code
8832 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8833 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8834 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8835
8836 qty_first_reg -= max_reg;
8837 qty_last_reg -= max_reg;
8838 qty_mode -= max_reg;
8839 qty_const -= max_reg;
8840 qty_const_insn -= max_reg;
8841 qty_comparison_code -= max_reg;
8842 qty_comparison_qty -= max_reg;
8843 qty_comparison_const -= max_reg;
8844
8845 new_basic_block ();
8846
8847 /* TO might be a label. If so, protect it from being deleted. */
8848 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8849 ++LABEL_NUSES (to);
8850
8851 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8852 {
1d22a2c1 8853 register enum rtx_code code = GET_CODE (insn);
e9a25f70 8854
1d22a2c1
MM
8855 /* If we have processed 1,000 insns, flush the hash table to
8856 avoid extreme quadratic behavior. We must not include NOTEs
8857 in the count since there may be more or them when generating
8858 debugging information. If we clear the table at different
8859 times, code generated with -g -O might be different than code
8860 generated with -O but not -g.
e9a25f70
JL
8861
8862 ??? This is a real kludge and needs to be done some other way.
8863 Perhaps for 2.9. */
1d22a2c1 8864 if (code != NOTE && num_insns++ > 1000)
e9a25f70 8865 {
01e752d3 8866 flush_hash_table ();
e9a25f70
JL
8867 num_insns = 0;
8868 }
7afe21cc
RK
8869
8870 /* See if this is a branch that is part of the path. If so, and it is
8871 to be taken, do so. */
8872 if (next_branch->branch == insn)
8873 {
8b3686ed
RK
8874 enum taken status = next_branch++->status;
8875 if (status != NOT_TAKEN)
7afe21cc 8876 {
8b3686ed
RK
8877 if (status == TAKEN)
8878 record_jump_equiv (insn, 1);
8879 else
8880 invalidate_skipped_block (NEXT_INSN (insn));
8881
7afe21cc
RK
8882 /* Set the last insn as the jump insn; it doesn't affect cc0.
8883 Then follow this branch. */
8884#ifdef HAVE_cc0
8885 prev_insn_cc0 = 0;
8886#endif
8887 prev_insn = insn;
8888 insn = JUMP_LABEL (insn);
8889 continue;
8890 }
8891 }
8892
7afe21cc
RK
8893 if (GET_MODE (insn) == QImode)
8894 PUT_MODE (insn, VOIDmode);
8895
8896 if (GET_RTX_CLASS (code) == 'i')
8897 {
7bd8b2a8
JL
8898 rtx p;
8899
7afe21cc
RK
8900 /* Process notes first so we have all notes in canonical forms when
8901 looking for duplicate operations. */
8902
8903 if (REG_NOTES (insn))
906c4e36 8904 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
8905
8906 /* Track when we are inside in LIBCALL block. Inside such a block,
8907 we do not want to record destinations. The last insn of a
8908 LIBCALL block is not considered to be part of the block, since
830a38ee 8909 its destination is the result of the block and hence should be
7afe21cc
RK
8910 recorded. */
8911
63be02db 8912 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7bd8b2a8 8913 libcall_insn = XEXP (p, 0);
906c4e36 8914 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7bd8b2a8 8915 libcall_insn = NULL_RTX;
7afe21cc 8916
7bd8b2a8 8917 cse_insn (insn, libcall_insn);
7afe21cc
RK
8918 }
8919
8920 /* If INSN is now an unconditional jump, skip to the end of our
8921 basic block by pretending that we just did the last insn in the
8922 basic block. If we are jumping to the end of our block, show
8923 that we can have one usage of TO. */
8924
8925 if (simplejump_p (insn))
8926 {
8927 if (to == 0)
8928 return 0;
8929
8930 if (JUMP_LABEL (insn) == to)
8931 to_usage = 1;
8932
6a5293dc
RS
8933 /* Maybe TO was deleted because the jump is unconditional.
8934 If so, there is nothing left in this basic block. */
8935 /* ??? Perhaps it would be smarter to set TO
8936 to whatever follows this insn,
8937 and pretend the basic block had always ended here. */
8938 if (INSN_DELETED_P (to))
8939 break;
8940
7afe21cc
RK
8941 insn = PREV_INSN (to);
8942 }
8943
8944 /* See if it is ok to keep on going past the label
8945 which used to end our basic block. Remember that we incremented
d45cf215 8946 the count of that label, so we decrement it here. If we made
7afe21cc
RK
8947 a jump unconditional, TO_USAGE will be one; in that case, we don't
8948 want to count the use in that jump. */
8949
8950 if (to != 0 && NEXT_INSN (insn) == to
8951 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8952 {
8953 struct cse_basic_block_data val;
146135d6 8954 rtx prev;
7afe21cc
RK
8955
8956 insn = NEXT_INSN (to);
8957
8958 if (LABEL_NUSES (to) == 0)
146135d6 8959 insn = delete_insn (to);
7afe21cc 8960
146135d6
RK
8961 /* If TO was the last insn in the function, we are done. */
8962 if (insn == 0)
7afe21cc
RK
8963 return 0;
8964
146135d6
RK
8965 /* If TO was preceded by a BARRIER we are done with this block
8966 because it has no continuation. */
8967 prev = prev_nonnote_insn (to);
8968 if (prev && GET_CODE (prev) == BARRIER)
8969 return insn;
8970
8971 /* Find the end of the following block. Note that we won't be
8972 following branches in this case. */
7afe21cc
RK
8973 to_usage = 0;
8974 val.path_size = 0;
8b3686ed 8975 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7afe21cc
RK
8976
8977 /* If the tables we allocated have enough space left
8978 to handle all the SETs in the next basic block,
8979 continue through it. Otherwise, return,
8980 and that block will be scanned individually. */
8981 if (val.nsets * 2 + next_qty > max_qty)
8982 break;
8983
8984 cse_basic_block_start = val.low_cuid;
8985 cse_basic_block_end = val.high_cuid;
8986 to = val.last;
8987
8988 /* Prevent TO from being deleted if it is a label. */
8989 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8990 ++LABEL_NUSES (to);
8991
8992 /* Back up so we process the first insn in the extension. */
8993 insn = PREV_INSN (insn);
8994 }
8995 }
8996
8997 if (next_qty > max_qty)
8998 abort ();
8999
9000 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
9001 the previous insn is the only insn that branches to the head of a loop,
9002 we can cse into the loop. Don't do this if we changed the jump
9003 structure of a loop unless we aren't going to be following jumps. */
9004
8b3686ed
RK
9005 if ((cse_jumps_altered == 0
9006 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
9007 && around_loop && to != 0
9008 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
9009 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
9010 && JUMP_LABEL (PREV_INSN (to)) != 0
9011 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9012 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9013
9014 return to ? NEXT_INSN (to) : 0;
9015}
9016\f
9017/* Count the number of times registers are used (not set) in X.
9018 COUNTS is an array in which we accumulate the count, INCR is how much
79644f06
RK
9019 we count each register usage.
9020
9021 Don't count a usage of DEST, which is the SET_DEST of a SET which
9022 contains X in its SET_SRC. This is because such a SET does not
9023 modify the liveness of DEST. */
7afe21cc
RK
9024
9025static void
79644f06 9026count_reg_usage (x, counts, dest, incr)
7afe21cc
RK
9027 rtx x;
9028 int *counts;
79644f06 9029 rtx dest;
7afe21cc
RK
9030 int incr;
9031{
f1e7c95f 9032 enum rtx_code code;
6f7d635c 9033 const char *fmt;
7afe21cc
RK
9034 int i, j;
9035
f1e7c95f
RK
9036 if (x == 0)
9037 return;
9038
9039 switch (code = GET_CODE (x))
7afe21cc
RK
9040 {
9041 case REG:
79644f06
RK
9042 if (x != dest)
9043 counts[REGNO (x)] += incr;
7afe21cc
RK
9044 return;
9045
9046 case PC:
9047 case CC0:
9048 case CONST:
9049 case CONST_INT:
9050 case CONST_DOUBLE:
9051 case SYMBOL_REF:
9052 case LABEL_REF:
02e39abc
JL
9053 return;
9054
9055 case CLOBBER:
9056 /* If we are clobbering a MEM, mark any registers inside the address
9057 as being used. */
9058 if (GET_CODE (XEXP (x, 0)) == MEM)
9059 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7afe21cc
RK
9060 return;
9061
9062 case SET:
9063 /* Unless we are setting a REG, count everything in SET_DEST. */
9064 if (GET_CODE (SET_DEST (x)) != REG)
79644f06 9065 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9ff08f70
RK
9066
9067 /* If SRC has side-effects, then we can't delete this insn, so the
9068 usage of SET_DEST inside SRC counts.
9069
9070 ??? Strictly-speaking, we might be preserving this insn
9071 because some other SET has side-effects, but that's hard
9072 to do and can't happen now. */
9073 count_reg_usage (SET_SRC (x), counts,
9074 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9075 incr);
7afe21cc
RK
9076 return;
9077
f1e7c95f
RK
9078 case CALL_INSN:
9079 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9080
9081 /* ... falls through ... */
7afe21cc
RK
9082 case INSN:
9083 case JUMP_INSN:
79644f06 9084 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7afe21cc
RK
9085
9086 /* Things used in a REG_EQUAL note aren't dead since loop may try to
9087 use them. */
9088
f1e7c95f 9089 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7afe21cc
RK
9090 return;
9091
9092 case EXPR_LIST:
9093 case INSN_LIST:
f1e7c95f 9094 if (REG_NOTE_KIND (x) == REG_EQUAL
c6a26dc4 9095 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
79644f06 9096 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
f1e7c95f 9097 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7afe21cc 9098 return;
e9a25f70
JL
9099
9100 default:
9101 break;
7afe21cc
RK
9102 }
9103
9104 fmt = GET_RTX_FORMAT (code);
9105 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9106 {
9107 if (fmt[i] == 'e')
79644f06 9108 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
9109 else if (fmt[i] == 'E')
9110 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
79644f06 9111 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
9112 }
9113}
9114\f
9115/* Scan all the insns and delete any that are dead; i.e., they store a register
9116 that is never used or they copy a register to itself.
9117
c6a26dc4
JL
9118 This is used to remove insns made obviously dead by cse, loop or other
9119 optimizations. It improves the heuristics in loop since it won't try to
9120 move dead invariants out of loops or make givs for dead quantities. The
9121 remaining passes of the compilation are also sped up. */
7afe21cc
RK
9122
9123void
c6a26dc4 9124delete_trivially_dead_insns (insns, nreg)
7afe21cc
RK
9125 rtx insns;
9126 int nreg;
9127{
9128 int *counts = (int *) alloca (nreg * sizeof (int));
77fa0940 9129 rtx insn, prev;
51723711 9130#ifdef HAVE_cc0
d45cf215 9131 rtx tem;
51723711 9132#endif
7afe21cc 9133 int i;
614bb5d4 9134 int in_libcall = 0, dead_libcall = 0;
7afe21cc
RK
9135
9136 /* First count the number of times each register is used. */
4c9a05bc 9137 bzero ((char *) counts, sizeof (int) * nreg);
7afe21cc 9138 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
79644f06 9139 count_reg_usage (insn, counts, NULL_RTX, 1);
7afe21cc
RK
9140
9141 /* Go from the last insn to the first and delete insns that only set unused
9142 registers or copy a register to itself. As we delete an insn, remove
8d71a510
JL
9143 usage counts for registers it uses.
9144
9145 The first jump optimization pass may leave a real insn as the last
9146 insn in the function. We must not skip that insn or we may end
9147 up deleting code that is not really dead. */
9148 insn = get_last_insn ();
9149 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9150 insn = prev_real_insn (insn);
9151
9152 for ( ; insn; insn = prev)
7afe21cc
RK
9153 {
9154 int live_insn = 0;
614bb5d4 9155 rtx note;
7afe21cc 9156
77fa0940
RK
9157 prev = prev_real_insn (insn);
9158
614bb5d4
JL
9159 /* Don't delete any insns that are part of a libcall block unless
9160 we can delete the whole libcall block.
9161
77fa0940
RK
9162 Flow or loop might get confused if we did that. Remember
9163 that we are scanning backwards. */
906c4e36 9164 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
614bb5d4
JL
9165 {
9166 in_libcall = 1;
9167 live_insn = 1;
9168 dead_libcall = 0;
e4890d45 9169
614bb5d4
JL
9170 /* See if there's a REG_EQUAL note on this insn and try to
9171 replace the source with the REG_EQUAL expression.
9172
9173 We assume that insns with REG_RETVALs can only be reg->reg
9174 copies at this point. */
9175 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9176 if (note)
9177 {
9178 rtx set = single_set (insn);
9179 if (set
9180 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9181 {
9182 remove_note (insn,
9183 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9184 dead_libcall = 1;
9185 }
9186 }
9187 }
9188 else if (in_libcall)
9189 live_insn = ! dead_libcall;
e4890d45 9190 else if (GET_CODE (PATTERN (insn)) == SET)
7afe21cc
RK
9191 {
9192 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9193 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9194 ;
9195
d45cf215
RS
9196#ifdef HAVE_cc0
9197 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9198 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9199 && ((tem = next_nonnote_insn (insn)) == 0
9200 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9201 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9202 ;
9203#endif
7afe21cc
RK
9204 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9205 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9206 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
61c48fbf
JL
9207 || side_effects_p (SET_SRC (PATTERN (insn)))
9208 /* An ADDRESSOF expression can turn into a use of the
9209 internal arg pointer, so always consider the
9210 internal arg pointer live. If it is truly dead,
9211 flow will delete the initializing insn. */
9212 || (SET_DEST (PATTERN (insn))
9213 == current_function_internal_arg_pointer))
7afe21cc
RK
9214 live_insn = 1;
9215 }
9216 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9217 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9218 {
9219 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9220
9221 if (GET_CODE (elt) == SET)
9222 {
9223 if (GET_CODE (SET_DEST (elt)) == REG
9224 && SET_DEST (elt) == SET_SRC (elt))
9225 ;
9226
d45cf215
RS
9227#ifdef HAVE_cc0
9228 else if (GET_CODE (SET_DEST (elt)) == CC0
9229 && ! side_effects_p (SET_SRC (elt))
9230 && ((tem = next_nonnote_insn (insn)) == 0
9231 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9232 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9233 ;
9234#endif
7afe21cc
RK
9235 else if (GET_CODE (SET_DEST (elt)) != REG
9236 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9237 || counts[REGNO (SET_DEST (elt))] != 0
af37f0dd
JL
9238 || side_effects_p (SET_SRC (elt))
9239 /* An ADDRESSOF expression can turn into a use of the
9240 internal arg pointer, so always consider the
9241 internal arg pointer live. If it is truly dead,
9242 flow will delete the initializing insn. */
9243 || (SET_DEST (elt)
9244 == current_function_internal_arg_pointer))
7afe21cc
RK
9245 live_insn = 1;
9246 }
9247 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9248 live_insn = 1;
9249 }
9250 else
9251 live_insn = 1;
9252
9253 /* If this is a dead insn, delete it and show registers in it aren't
e4890d45 9254 being used. */
7afe21cc 9255
e4890d45 9256 if (! live_insn)
7afe21cc 9257 {
79644f06 9258 count_reg_usage (insn, counts, NULL_RTX, -1);
77fa0940 9259 delete_insn (insn);
7afe21cc 9260 }
e4890d45 9261
906c4e36 9262 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
614bb5d4
JL
9263 {
9264 in_libcall = 0;
9265 dead_libcall = 0;
9266 }
7afe21cc
RK
9267 }
9268}