]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
(smuldi3_highpart, smuldi3_highpart_internal,
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc 1/* Common subexpression elimination for GNU compiler.
64812ded 2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
7afe21cc
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
9c3b4c8b
RS
22/* Must precede rtl.h for FFS. */
23#include <stdio.h>
24
7afe21cc
RK
25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
28#include "flags.h"
29#include "real.h"
30#include "insn-config.h"
31#include "recog.h"
32
7afe21cc
RK
33#include <setjmp.h>
34
35/* The basic idea of common subexpression elimination is to go
36 through the code, keeping a record of expressions that would
37 have the same value at the current scan point, and replacing
38 expressions encountered with the cheapest equivalent expression.
39
40 It is too complicated to keep track of the different possibilities
41 when control paths merge; so, at each label, we forget all that is
42 known and start fresh. This can be described as processing each
43 basic block separately. Note, however, that these are not quite
44 the same as the basic blocks found by a later pass and used for
45 data flow analysis and register packing. We do not need to start fresh
46 after a conditional jump instruction if there is no label there.
47
48 We use two data structures to record the equivalent expressions:
49 a hash table for most expressions, and several vectors together
50 with "quantity numbers" to record equivalent (pseudo) registers.
51
52 The use of the special data structure for registers is desirable
53 because it is faster. It is possible because registers references
54 contain a fairly small number, the register number, taken from
55 a contiguously allocated series, and two register references are
56 identical if they have the same number. General expressions
57 do not have any such thing, so the only way to retrieve the
58 information recorded on an expression other than a register
59 is to keep it in a hash table.
60
61Registers and "quantity numbers":
62
63 At the start of each basic block, all of the (hardware and pseudo)
64 registers used in the function are given distinct quantity
65 numbers to indicate their contents. During scan, when the code
66 copies one register into another, we copy the quantity number.
67 When a register is loaded in any other way, we allocate a new
68 quantity number to describe the value generated by this operation.
69 `reg_qty' records what quantity a register is currently thought
70 of as containing.
71
72 All real quantity numbers are greater than or equal to `max_reg'.
73 If register N has not been assigned a quantity, reg_qty[N] will equal N.
74
75 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
76 variables should be referenced with an index below `max_reg'.
77
78 We also maintain a bidirectional chain of registers for each
79 quantity number. `qty_first_reg', `qty_last_reg',
80 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
81
82 The first register in a chain is the one whose lifespan is least local.
83 Among equals, it is the one that was seen first.
84 We replace any equivalent register with that one.
85
86 If two registers have the same quantity number, it must be true that
87 REG expressions with `qty_mode' must be in the hash table for both
88 registers and must be in the same class.
89
90 The converse is not true. Since hard registers may be referenced in
91 any mode, two REG expressions might be equivalent in the hash table
92 but not have the same quantity number if the quantity number of one
93 of the registers is not the same mode as those expressions.
94
95Constants and quantity numbers
96
97 When a quantity has a known constant value, that value is stored
98 in the appropriate element of qty_const. This is in addition to
99 putting the constant in the hash table as is usual for non-regs.
100
d45cf215 101 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
102 macro CONST_COSTS and will often depend on the constant value. In any
103 event, expressions containing constants can be simplified, by fold_rtx.
104
105 When a quantity has a known nearly constant value (such as an address
106 of a stack slot), that value is stored in the appropriate element
107 of qty_const.
108
109 Integer constants don't have a machine mode. However, cse
110 determines the intended machine mode from the destination
111 of the instruction that moves the constant. The machine mode
112 is recorded in the hash table along with the actual RTL
113 constant expression so that different modes are kept separate.
114
115Other expressions:
116
117 To record known equivalences among expressions in general
118 we use a hash table called `table'. It has a fixed number of buckets
119 that contain chains of `struct table_elt' elements for expressions.
120 These chains connect the elements whose expressions have the same
121 hash codes.
122
123 Other chains through the same elements connect the elements which
124 currently have equivalent values.
125
126 Register references in an expression are canonicalized before hashing
127 the expression. This is done using `reg_qty' and `qty_first_reg'.
128 The hash code of a register reference is computed using the quantity
129 number, not the register number.
130
131 When the value of an expression changes, it is necessary to remove from the
132 hash table not just that expression but all expressions whose values
133 could be different as a result.
134
135 1. If the value changing is in memory, except in special cases
136 ANYTHING referring to memory could be changed. That is because
137 nobody knows where a pointer does not point.
138 The function `invalidate_memory' removes what is necessary.
139
140 The special cases are when the address is constant or is
141 a constant plus a fixed register such as the frame pointer
142 or a static chain pointer. When such addresses are stored in,
143 we can tell exactly which other such addresses must be invalidated
144 due to overlap. `invalidate' does this.
145 All expressions that refer to non-constant
146 memory addresses are also invalidated. `invalidate_memory' does this.
147
148 2. If the value changing is a register, all expressions
149 containing references to that register, and only those,
150 must be removed.
151
152 Because searching the entire hash table for expressions that contain
153 a register is very slow, we try to figure out when it isn't necessary.
154 Precisely, this is necessary only when expressions have been
155 entered in the hash table using this register, and then the value has
156 changed, and then another expression wants to be added to refer to
157 the register's new value. This sequence of circumstances is rare
158 within any one basic block.
159
160 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
161 reg_tick[i] is incremented whenever a value is stored in register i.
162 reg_in_table[i] holds -1 if no references to register i have been
163 entered in the table; otherwise, it contains the value reg_tick[i] had
164 when the references were entered. If we want to enter a reference
165 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
166 Until we want to enter a new entry, the mere fact that the two vectors
167 don't match makes the entries be ignored if anyone tries to match them.
168
169 Registers themselves are entered in the hash table as well as in
170 the equivalent-register chains. However, the vectors `reg_tick'
171 and `reg_in_table' do not apply to expressions which are simple
172 register references. These expressions are removed from the table
173 immediately when they become invalid, and this can be done even if
174 we do not immediately search for all the expressions that refer to
175 the register.
176
177 A CLOBBER rtx in an instruction invalidates its operand for further
178 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
179 invalidates everything that resides in memory.
180
181Related expressions:
182
183 Constant expressions that differ only by an additive integer
184 are called related. When a constant expression is put in
185 the table, the related expression with no constant term
186 is also entered. These are made to point at each other
187 so that it is possible to find out if there exists any
188 register equivalent to an expression related to a given expression. */
189
190/* One plus largest register number used in this function. */
191
192static int max_reg;
193
194/* Length of vectors indexed by quantity number.
195 We know in advance we will not need a quantity number this big. */
196
197static int max_qty;
198
199/* Next quantity number to be allocated.
200 This is 1 + the largest number needed so far. */
201
202static int next_qty;
203
204/* Indexed by quantity number, gives the first (or last) (pseudo) register
205 in the chain of registers that currently contain this quantity. */
206
207static int *qty_first_reg;
208static int *qty_last_reg;
209
210/* Index by quantity number, gives the mode of the quantity. */
211
212static enum machine_mode *qty_mode;
213
214/* Indexed by quantity number, gives the rtx of the constant value of the
215 quantity, or zero if it does not have a known value.
216 A sum of the frame pointer (or arg pointer) plus a constant
217 can also be entered here. */
218
219static rtx *qty_const;
220
221/* Indexed by qty number, gives the insn that stored the constant value
222 recorded in `qty_const'. */
223
224static rtx *qty_const_insn;
225
226/* The next three variables are used to track when a comparison between a
227 quantity and some constant or register has been passed. In that case, we
228 know the results of the comparison in case we see it again. These variables
229 record a comparison that is known to be true. */
230
231/* Indexed by qty number, gives the rtx code of a comparison with a known
232 result involving this quantity. If none, it is UNKNOWN. */
233static enum rtx_code *qty_comparison_code;
234
235/* Indexed by qty number, gives the constant being compared against in a
236 comparison of known result. If no such comparison, it is undefined.
237 If the comparison is not with a constant, it is zero. */
238
239static rtx *qty_comparison_const;
240
241/* Indexed by qty number, gives the quantity being compared against in a
242 comparison of known result. If no such comparison, if it undefined.
243 If the comparison is not with a register, it is -1. */
244
245static int *qty_comparison_qty;
246
247#ifdef HAVE_cc0
248/* For machines that have a CC0, we do not record its value in the hash
249 table since its use is guaranteed to be the insn immediately following
250 its definition and any other insn is presumed to invalidate it.
251
252 Instead, we store below the value last assigned to CC0. If it should
253 happen to be a constant, it is stored in preference to the actual
254 assigned value. In case it is a constant, we store the mode in which
255 the constant should be interpreted. */
256
257static rtx prev_insn_cc0;
258static enum machine_mode prev_insn_cc0_mode;
259#endif
260
261/* Previous actual insn. 0 if at first insn of basic block. */
262
263static rtx prev_insn;
264
265/* Insn being scanned. */
266
267static rtx this_insn;
268
269/* Index by (pseudo) register number, gives the quantity number
270 of the register's current contents. */
271
272static int *reg_qty;
273
274/* Index by (pseudo) register number, gives the number of the next (or
275 previous) (pseudo) register in the chain of registers sharing the same
276 value.
277
278 Or -1 if this register is at the end of the chain.
279
280 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
281
282static int *reg_next_eqv;
283static int *reg_prev_eqv;
284
285/* Index by (pseudo) register number, gives the number of times
286 that register has been altered in the current basic block. */
287
288static int *reg_tick;
289
290/* Index by (pseudo) register number, gives the reg_tick value at which
291 rtx's containing this register are valid in the hash table.
292 If this does not equal the current reg_tick value, such expressions
293 existing in the hash table are invalid.
294 If this is -1, no expressions containing this register have been
295 entered in the table. */
296
297static int *reg_in_table;
298
299/* A HARD_REG_SET containing all the hard registers for which there is
300 currently a REG expression in the hash table. Note the difference
301 from the above variables, which indicate if the REG is mentioned in some
302 expression in the table. */
303
304static HARD_REG_SET hard_regs_in_table;
305
306/* A HARD_REG_SET containing all the hard registers that are invalidated
307 by a CALL_INSN. */
308
309static HARD_REG_SET regs_invalidated_by_call;
310
311/* Two vectors of ints:
312 one containing max_reg -1's; the other max_reg + 500 (an approximation
313 for max_qty) elements where element i contains i.
314 These are used to initialize various other vectors fast. */
315
316static int *all_minus_one;
317static int *consec_ints;
318
319/* CUID of insn that starts the basic block currently being cse-processed. */
320
321static int cse_basic_block_start;
322
323/* CUID of insn that ends the basic block currently being cse-processed. */
324
325static int cse_basic_block_end;
326
327/* Vector mapping INSN_UIDs to cuids.
d45cf215 328 The cuids are like uids but increase monotonically always.
7afe21cc
RK
329 We use them to see whether a reg is used outside a given basic block. */
330
906c4e36 331static int *uid_cuid;
7afe21cc 332
164c8956
RK
333/* Highest UID in UID_CUID. */
334static int max_uid;
335
7afe21cc
RK
336/* Get the cuid of an insn. */
337
338#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
339
340/* Nonzero if cse has altered conditional jump insns
341 in such a way that jump optimization should be redone. */
342
343static int cse_jumps_altered;
344
345/* canon_hash stores 1 in do_not_record
346 if it notices a reference to CC0, PC, or some other volatile
347 subexpression. */
348
349static int do_not_record;
350
7bac1be0
RK
351#ifdef LOAD_EXTEND_OP
352
353/* Scratch rtl used when looking for load-extended copy of a MEM. */
354static rtx memory_extend_rtx;
355#endif
356
7afe21cc
RK
357/* canon_hash stores 1 in hash_arg_in_memory
358 if it notices a reference to memory within the expression being hashed. */
359
360static int hash_arg_in_memory;
361
362/* canon_hash stores 1 in hash_arg_in_struct
363 if it notices a reference to memory that's part of a structure. */
364
365static int hash_arg_in_struct;
366
367/* The hash table contains buckets which are chains of `struct table_elt's,
368 each recording one expression's information.
369 That expression is in the `exp' field.
370
371 Those elements with the same hash code are chained in both directions
372 through the `next_same_hash' and `prev_same_hash' fields.
373
374 Each set of expressions with equivalent values
375 are on a two-way chain through the `next_same_value'
376 and `prev_same_value' fields, and all point with
377 the `first_same_value' field at the first element in
378 that chain. The chain is in order of increasing cost.
379 Each element's cost value is in its `cost' field.
380
381 The `in_memory' field is nonzero for elements that
382 involve any reference to memory. These elements are removed
383 whenever a write is done to an unidentified location in memory.
384 To be safe, we assume that a memory address is unidentified unless
385 the address is either a symbol constant or a constant plus
386 the frame pointer or argument pointer.
387
388 The `in_struct' field is nonzero for elements that
389 involve any reference to memory inside a structure or array.
390
391 The `related_value' field is used to connect related expressions
392 (that differ by adding an integer).
393 The related expressions are chained in a circular fashion.
394 `related_value' is zero for expressions for which this
395 chain is not useful.
396
397 The `cost' field stores the cost of this element's expression.
398
399 The `is_const' flag is set if the element is a constant (including
400 a fixed address).
401
402 The `flag' field is used as a temporary during some search routines.
403
404 The `mode' field is usually the same as GET_MODE (`exp'), but
405 if `exp' is a CONST_INT and has no machine mode then the `mode'
406 field is the mode it was being used as. Each constant is
407 recorded separately for each mode it is used with. */
408
409
410struct table_elt
411{
412 rtx exp;
413 struct table_elt *next_same_hash;
414 struct table_elt *prev_same_hash;
415 struct table_elt *next_same_value;
416 struct table_elt *prev_same_value;
417 struct table_elt *first_same_value;
418 struct table_elt *related_value;
419 int cost;
420 enum machine_mode mode;
421 char in_memory;
422 char in_struct;
423 char is_const;
424 char flag;
425};
426
7afe21cc
RK
427/* We don't want a lot of buckets, because we rarely have very many
428 things stored in the hash table, and a lot of buckets slows
429 down a lot of loops that happen frequently. */
430#define NBUCKETS 31
431
432/* Compute hash code of X in mode M. Special-case case where X is a pseudo
433 register (hard registers may require `do_not_record' to be set). */
434
435#define HASH(X, M) \
436 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
2197a88a 437 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
7afe21cc
RK
438 : canon_hash (X, M) % NBUCKETS)
439
440/* Determine whether register number N is considered a fixed register for CSE.
441 It is desirable to replace other regs with fixed regs, to reduce need for
442 non-fixed hard regs.
443 A reg wins if it is either the frame pointer or designated as fixed,
444 but not if it is an overlapping register. */
445#ifdef OVERLAPPING_REGNO_P
446#define FIXED_REGNO_P(N) \
8bc169f2 447 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 448 || fixed_regs[N] || global_regs[N]) \
7afe21cc
RK
449 && ! OVERLAPPING_REGNO_P ((N)))
450#else
451#define FIXED_REGNO_P(N) \
8bc169f2 452 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
6ab832bc 453 || fixed_regs[N] || global_regs[N])
7afe21cc
RK
454#endif
455
456/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
ac07e066
RK
457 hard registers and pointers into the frame are the cheapest with a cost
458 of 0. Next come pseudos with a cost of one and other hard registers with
459 a cost of 2. Aside from these special cases, call `rtx_cost'. */
460
6ab832bc 461#define CHEAP_REGNO(N) \
8bc169f2
DE
462 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
463 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
464 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
465 || ((N) < FIRST_PSEUDO_REGISTER \
e7bb59fa 466 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
7afe21cc 467
6ab832bc
RK
468/* A register is cheap if it is a user variable assigned to the register
469 or if its register number always corresponds to a cheap register. */
470
471#define CHEAP_REG(N) \
472 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
473 || CHEAP_REGNO (REGNO (N)))
474
7afe21cc
RK
475#define COST(X) \
476 (GET_CODE (X) == REG \
6ab832bc 477 ? (CHEAP_REG (X) ? 0 \
ac07e066 478 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
7afe21cc 479 : 2) \
e5f6a288 480 : rtx_cost (X, SET) * 2)
7afe21cc
RK
481
482/* Determine if the quantity number for register X represents a valid index
483 into the `qty_...' variables. */
484
485#define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
486
487static struct table_elt *table[NBUCKETS];
488
489/* Chain of `struct table_elt's made so far for this function
490 but currently removed from the table. */
491
492static struct table_elt *free_element_chain;
493
494/* Number of `struct table_elt' structures made so far for this function. */
495
496static int n_elements_made;
497
498/* Maximum value `n_elements_made' has had so far in this compilation
499 for functions previously processed. */
500
501static int max_elements_made;
502
503/* Surviving equivalence class when two equivalence classes are merged
504 by recording the effects of a jump in the last insn. Zero if the
505 last insn was not a conditional jump. */
506
507static struct table_elt *last_jump_equiv_class;
508
509/* Set to the cost of a constant pool reference if one was found for a
510 symbolic constant. If this was found, it means we should try to
511 convert constants into constant pool entries if they don't fit in
512 the insn. */
513
514static int constant_pool_entries_cost;
515
516/* Bits describing what kind of values in memory must be invalidated
517 for a particular instruction. If all three bits are zero,
518 no memory refs need to be invalidated. Each bit is more powerful
519 than the preceding ones, and if a bit is set then the preceding
520 bits are also set.
521
522 Here is how the bits are set:
523 Pushing onto the stack invalidates only the stack pointer,
524 writing at a fixed address invalidates only variable addresses,
525 writing in a structure element at variable address
526 invalidates all but scalar variables,
527 and writing in anything else at variable address invalidates everything. */
528
529struct write_data
530{
531 int sp : 1; /* Invalidate stack pointer. */
532 int var : 1; /* Invalidate variable addresses. */
533 int nonscalar : 1; /* Invalidate all but scalar variables. */
534 int all : 1; /* Invalidate all memory refs. */
535};
536
6cd4575e
RK
537/* Define maximum length of a branch path. */
538
539#define PATHLENGTH 10
540
541/* This data describes a block that will be processed by cse_basic_block. */
542
543struct cse_basic_block_data {
544 /* Lowest CUID value of insns in block. */
545 int low_cuid;
546 /* Highest CUID value of insns in block. */
547 int high_cuid;
548 /* Total number of SETs in block. */
549 int nsets;
550 /* Last insn in the block. */
551 rtx last;
552 /* Size of current branch path, if any. */
553 int path_size;
554 /* Current branch path, indicating which branches will be taken. */
555 struct branch_path {
556 /* The branch insn. */
557 rtx branch;
558 /* Whether it should be taken or not. AROUND is the same as taken
559 except that it is used when the destination label is not preceded
560 by a BARRIER. */
561 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
562 } path[PATHLENGTH];
563};
564
7afe21cc
RK
565/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
566 virtual regs here because the simplify_*_operation routines are called
567 by integrate.c, which is called before virtual register instantiation. */
568
569#define FIXED_BASE_PLUS_P(X) \
8bc169f2
DE
570 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
571 || (X) == arg_pointer_rtx \
7afe21cc
RK
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 576 || XEXP (X, 0) == hard_frame_pointer_rtx \
7afe21cc
RK
577 || XEXP (X, 0) == arg_pointer_rtx \
578 || XEXP (X, 0) == virtual_stack_vars_rtx \
579 || XEXP (X, 0) == virtual_incoming_args_rtx)))
580
6f90e075
JW
581/* Similar, but also allows reference to the stack pointer.
582
583 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
584 arg_pointer_rtx by itself is nonzero, because on at least one machine,
585 the i960, the arg pointer is zero when it is unused. */
7afe21cc
RK
586
587#define NONZERO_BASE_PLUS_P(X) \
8bc169f2 588 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
6f90e075
JW
589 || (X) == virtual_stack_vars_rtx \
590 || (X) == virtual_incoming_args_rtx \
591 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
592 && (XEXP (X, 0) == frame_pointer_rtx \
8bc169f2 593 || XEXP (X, 0) == hard_frame_pointer_rtx \
6f90e075
JW
594 || XEXP (X, 0) == arg_pointer_rtx \
595 || XEXP (X, 0) == virtual_stack_vars_rtx \
596 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
7afe21cc
RK
597 || (X) == stack_pointer_rtx \
598 || (X) == virtual_stack_dynamic_rtx \
599 || (X) == virtual_outgoing_args_rtx \
600 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
601 && (XEXP (X, 0) == stack_pointer_rtx \
602 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
603 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
604
6cd4575e
RK
605static void new_basic_block PROTO((void));
606static void make_new_qty PROTO((int));
607static void make_regs_eqv PROTO((int, int));
608static void delete_reg_equiv PROTO((int));
609static int mention_regs PROTO((rtx));
610static int insert_regs PROTO((rtx, struct table_elt *, int));
611static void free_element PROTO((struct table_elt *));
2197a88a 612static void remove_from_table PROTO((struct table_elt *, unsigned));
6cd4575e 613static struct table_elt *get_element PROTO((void));
2197a88a
RK
614static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
615 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
6cd4575e 616static rtx lookup_as_function PROTO((rtx, enum rtx_code));
2197a88a 617static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
6cd4575e
RK
618 enum machine_mode));
619static void merge_equiv_classes PROTO((struct table_elt *,
620 struct table_elt *));
621static void invalidate PROTO((rtx));
622static void remove_invalid_refs PROTO((int));
623static void rehash_using_reg PROTO((rtx));
624static void invalidate_memory PROTO((struct write_data *));
625static void invalidate_for_call PROTO((void));
626static rtx use_related_value PROTO((rtx, struct table_elt *));
2197a88a
RK
627static unsigned canon_hash PROTO((rtx, enum machine_mode));
628static unsigned safe_hash PROTO((rtx, enum machine_mode));
6cd4575e 629static int exp_equiv_p PROTO((rtx, rtx, int, int));
f451db89 630static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
6500fb43
RK
631 HOST_WIDE_INT *,
632 HOST_WIDE_INT *));
6cd4575e 633static int refers_to_p PROTO((rtx, rtx));
f451db89 634static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
6cd4575e
RK
635 HOST_WIDE_INT));
636static int cse_rtx_addr_varies_p PROTO((rtx));
637static rtx canon_reg PROTO((rtx, rtx));
638static void find_best_addr PROTO((rtx, rtx *));
639static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
640 enum machine_mode *,
641 enum machine_mode *));
96b0e481
RK
642static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
643 rtx, rtx));
644static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
645 rtx, rtx));
6cd4575e
RK
646static rtx fold_rtx PROTO((rtx, rtx));
647static rtx equiv_constant PROTO((rtx));
648static void record_jump_equiv PROTO((rtx, int));
649static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
650 rtx, rtx, int));
651static void cse_insn PROTO((rtx, int));
652static void note_mem_written PROTO((rtx, struct write_data *));
653static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
654static rtx cse_process_notes PROTO((rtx, rtx));
655static void cse_around_loop PROTO((rtx));
656static void invalidate_skipped_set PROTO((rtx, rtx));
657static void invalidate_skipped_block PROTO((rtx));
658static void cse_check_loop_start PROTO((rtx, rtx));
659static void cse_set_around_loop PROTO((rtx, rtx, rtx));
660static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
79644f06 661static void count_reg_usage PROTO((rtx, int *, rtx, int));
c407b802
RK
662
663extern int rtx_equal_function_value_matters;
7afe21cc
RK
664\f
665/* Return an estimate of the cost of computing rtx X.
666 One use is in cse, to decide which expression to keep in the hash table.
667 Another is in rtl generation, to pick the cheapest way to multiply.
668 Other uses like the latter are expected in the future. */
669
670/* Return the right cost to give to an operation
671 to make the cost of the corresponding register-to-register instruction
672 N times that of a fast register-to-register instruction. */
673
674#define COSTS_N_INSNS(N) ((N) * 4 - 2)
675
676int
e5f6a288 677rtx_cost (x, outer_code)
7afe21cc 678 rtx x;
e5f6a288 679 enum rtx_code outer_code;
7afe21cc
RK
680{
681 register int i, j;
682 register enum rtx_code code;
683 register char *fmt;
684 register int total;
685
686 if (x == 0)
687 return 0;
688
689 /* Compute the default costs of certain things.
690 Note that RTX_COSTS can override the defaults. */
691
692 code = GET_CODE (x);
693 switch (code)
694 {
695 case MULT:
696 /* Count multiplication by 2**n as a shift,
697 because if we are considering it, we would output it as a shift. */
698 if (GET_CODE (XEXP (x, 1)) == CONST_INT
699 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
700 total = 2;
701 else
702 total = COSTS_N_INSNS (5);
703 break;
704 case DIV:
705 case UDIV:
706 case MOD:
707 case UMOD:
708 total = COSTS_N_INSNS (7);
709 break;
710 case USE:
711 /* Used in loop.c and combine.c as a marker. */
712 total = 0;
713 break;
538b78e7
RS
714 case ASM_OPERANDS:
715 /* We don't want these to be used in substitutions because
716 we have no way of validating the resulting insn. So assign
717 anything containing an ASM_OPERANDS a very high cost. */
718 total = 1000;
719 break;
7afe21cc
RK
720 default:
721 total = 2;
722 }
723
724 switch (code)
725 {
726 case REG:
6ab832bc 727 return ! CHEAP_REG (x);
ac07e066 728
7afe21cc 729 case SUBREG:
fc3ffe83
RK
730 /* If we can't tie these modes, make this expensive. The larger
731 the mode, the more expensive it is. */
732 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
733 return COSTS_N_INSNS (2
734 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
7afe21cc
RK
735 return 2;
736#ifdef RTX_COSTS
e5f6a288 737 RTX_COSTS (x, code, outer_code);
7afe21cc 738#endif
e5f6a288 739 CONST_COSTS (x, code, outer_code);
7afe21cc
RK
740 }
741
742 /* Sum the costs of the sub-rtx's, plus cost of this operation,
743 which is already in total. */
744
745 fmt = GET_RTX_FORMAT (code);
746 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
747 if (fmt[i] == 'e')
e5f6a288 748 total += rtx_cost (XEXP (x, i), code);
7afe21cc
RK
749 else if (fmt[i] == 'E')
750 for (j = 0; j < XVECLEN (x, i); j++)
e5f6a288 751 total += rtx_cost (XVECEXP (x, i, j), code);
7afe21cc
RK
752
753 return total;
754}
755\f
756/* Clear the hash table and initialize each register with its own quantity,
757 for a new basic block. */
758
759static void
760new_basic_block ()
761{
762 register int i;
763
764 next_qty = max_reg;
765
4c9a05bc 766 bzero ((char *) reg_tick, max_reg * sizeof (int));
7afe21cc 767
4c9a05bc
RK
768 bcopy ((char *) all_minus_one, (char *) reg_in_table,
769 max_reg * sizeof (int));
770 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
7afe21cc
RK
771 CLEAR_HARD_REG_SET (hard_regs_in_table);
772
773 /* The per-quantity values used to be initialized here, but it is
774 much faster to initialize each as it is made in `make_new_qty'. */
775
776 for (i = 0; i < NBUCKETS; i++)
777 {
778 register struct table_elt *this, *next;
779 for (this = table[i]; this; this = next)
780 {
781 next = this->next_same_hash;
782 free_element (this);
783 }
784 }
785
4c9a05bc 786 bzero ((char *) table, sizeof table);
7afe21cc
RK
787
788 prev_insn = 0;
789
790#ifdef HAVE_cc0
791 prev_insn_cc0 = 0;
792#endif
793}
794
795/* Say that register REG contains a quantity not in any register before
796 and initialize that quantity. */
797
798static void
799make_new_qty (reg)
800 register int reg;
801{
802 register int q;
803
804 if (next_qty >= max_qty)
805 abort ();
806
807 q = reg_qty[reg] = next_qty++;
808 qty_first_reg[q] = reg;
809 qty_last_reg[q] = reg;
810 qty_const[q] = qty_const_insn[q] = 0;
811 qty_comparison_code[q] = UNKNOWN;
812
813 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
814}
815
816/* Make reg NEW equivalent to reg OLD.
817 OLD is not changing; NEW is. */
818
819static void
820make_regs_eqv (new, old)
821 register int new, old;
822{
823 register int lastr, firstr;
824 register int q = reg_qty[old];
825
826 /* Nothing should become eqv until it has a "non-invalid" qty number. */
827 if (! REGNO_QTY_VALID_P (old))
828 abort ();
829
830 reg_qty[new] = q;
831 firstr = qty_first_reg[q];
832 lastr = qty_last_reg[q];
833
834 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
835 hard regs. Among pseudos, if NEW will live longer than any other reg
836 of the same qty, and that is beyond the current basic block,
837 make it the new canonical replacement for this qty. */
838 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
839 /* Certain fixed registers might be of the class NO_REGS. This means
840 that not only can they not be allocated by the compiler, but
830a38ee 841 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
842 either. */
843 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
844 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
845 || (new >= FIRST_PSEUDO_REGISTER
846 && (firstr < FIRST_PSEUDO_REGISTER
847 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
848 || (uid_cuid[regno_first_uid[new]]
849 < cse_basic_block_start))
850 && (uid_cuid[regno_last_uid[new]]
851 > uid_cuid[regno_last_uid[firstr]]))))))
852 {
853 reg_prev_eqv[firstr] = new;
854 reg_next_eqv[new] = firstr;
855 reg_prev_eqv[new] = -1;
856 qty_first_reg[q] = new;
857 }
858 else
859 {
860 /* If NEW is a hard reg (known to be non-fixed), insert at end.
861 Otherwise, insert before any non-fixed hard regs that are at the
862 end. Registers of class NO_REGS cannot be used as an
863 equivalent for anything. */
864 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
865 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
866 && new >= FIRST_PSEUDO_REGISTER)
867 lastr = reg_prev_eqv[lastr];
868 reg_next_eqv[new] = reg_next_eqv[lastr];
869 if (reg_next_eqv[lastr] >= 0)
870 reg_prev_eqv[reg_next_eqv[lastr]] = new;
871 else
872 qty_last_reg[q] = new;
873 reg_next_eqv[lastr] = new;
874 reg_prev_eqv[new] = lastr;
875 }
876}
877
878/* Remove REG from its equivalence class. */
879
880static void
881delete_reg_equiv (reg)
882 register int reg;
883{
7afe21cc 884 register int q = reg_qty[reg];
a4e262bc 885 register int p, n;
7afe21cc 886
a4e262bc 887 /* If invalid, do nothing. */
7afe21cc
RK
888 if (q == reg)
889 return;
890
a4e262bc
RK
891 p = reg_prev_eqv[reg];
892 n = reg_next_eqv[reg];
893
7afe21cc
RK
894 if (n != -1)
895 reg_prev_eqv[n] = p;
896 else
897 qty_last_reg[q] = p;
898 if (p != -1)
899 reg_next_eqv[p] = n;
900 else
901 qty_first_reg[q] = n;
902
903 reg_qty[reg] = reg;
904}
905
906/* Remove any invalid expressions from the hash table
907 that refer to any of the registers contained in expression X.
908
909 Make sure that newly inserted references to those registers
910 as subexpressions will be considered valid.
911
912 mention_regs is not called when a register itself
913 is being stored in the table.
914
915 Return 1 if we have done something that may have changed the hash code
916 of X. */
917
918static int
919mention_regs (x)
920 rtx x;
921{
922 register enum rtx_code code;
923 register int i, j;
924 register char *fmt;
925 register int changed = 0;
926
927 if (x == 0)
e5f6a288 928 return 0;
7afe21cc
RK
929
930 code = GET_CODE (x);
931 if (code == REG)
932 {
933 register int regno = REGNO (x);
934 register int endregno
935 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
936 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
937 int i;
938
939 for (i = regno; i < endregno; i++)
940 {
941 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
942 remove_invalid_refs (i);
943
944 reg_in_table[i] = reg_tick[i];
945 }
946
947 return 0;
948 }
949
950 /* If X is a comparison or a COMPARE and either operand is a register
951 that does not have a quantity, give it one. This is so that a later
952 call to record_jump_equiv won't cause X to be assigned a different
953 hash code and not found in the table after that call.
954
955 It is not necessary to do this here, since rehash_using_reg can
956 fix up the table later, but doing this here eliminates the need to
957 call that expensive function in the most common case where the only
958 use of the register is in the comparison. */
959
960 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
961 {
962 if (GET_CODE (XEXP (x, 0)) == REG
963 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
906c4e36 964 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
7afe21cc
RK
965 {
966 rehash_using_reg (XEXP (x, 0));
967 changed = 1;
968 }
969
970 if (GET_CODE (XEXP (x, 1)) == REG
971 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
906c4e36 972 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
7afe21cc
RK
973 {
974 rehash_using_reg (XEXP (x, 1));
975 changed = 1;
976 }
977 }
978
979 fmt = GET_RTX_FORMAT (code);
980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
981 if (fmt[i] == 'e')
982 changed |= mention_regs (XEXP (x, i));
983 else if (fmt[i] == 'E')
984 for (j = 0; j < XVECLEN (x, i); j++)
985 changed |= mention_regs (XVECEXP (x, i, j));
986
987 return changed;
988}
989
990/* Update the register quantities for inserting X into the hash table
991 with a value equivalent to CLASSP.
992 (If the class does not contain a REG, it is irrelevant.)
993 If MODIFIED is nonzero, X is a destination; it is being modified.
994 Note that delete_reg_equiv should be called on a register
995 before insert_regs is done on that register with MODIFIED != 0.
996
997 Nonzero value means that elements of reg_qty have changed
998 so X's hash code may be different. */
999
1000static int
1001insert_regs (x, classp, modified)
1002 rtx x;
1003 struct table_elt *classp;
1004 int modified;
1005{
1006 if (GET_CODE (x) == REG)
1007 {
1008 register int regno = REGNO (x);
1009
1ff0c00d
RK
1010 /* If REGNO is in the equivalence table already but is of the
1011 wrong mode for that equivalence, don't do anything here. */
1012
1013 if (REGNO_QTY_VALID_P (regno)
1014 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1015 return 0;
1016
1017 if (modified || ! REGNO_QTY_VALID_P (regno))
7afe21cc
RK
1018 {
1019 if (classp)
1020 for (classp = classp->first_same_value;
1021 classp != 0;
1022 classp = classp->next_same_value)
1023 if (GET_CODE (classp->exp) == REG
1024 && GET_MODE (classp->exp) == GET_MODE (x))
1025 {
1026 make_regs_eqv (regno, REGNO (classp->exp));
1027 return 1;
1028 }
1029
1030 make_new_qty (regno);
1031 qty_mode[reg_qty[regno]] = GET_MODE (x);
1032 return 1;
1033 }
cdf4112f
TG
1034
1035 return 0;
7afe21cc 1036 }
c610adec
RK
1037
1038 /* If X is a SUBREG, we will likely be inserting the inner register in the
1039 table. If that register doesn't have an assigned quantity number at
1040 this point but does later, the insertion that we will be doing now will
1041 not be accessible because its hash code will have changed. So assign
1042 a quantity number now. */
1043
1044 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1045 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1046 {
906c4e36 1047 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
c610adec
RK
1048 mention_regs (SUBREG_REG (x));
1049 return 1;
1050 }
7afe21cc
RK
1051 else
1052 return mention_regs (x);
1053}
1054\f
1055/* Look in or update the hash table. */
1056
1057/* Put the element ELT on the list of free elements. */
1058
1059static void
1060free_element (elt)
1061 struct table_elt *elt;
1062{
1063 elt->next_same_hash = free_element_chain;
1064 free_element_chain = elt;
1065}
1066
1067/* Return an element that is free for use. */
1068
1069static struct table_elt *
1070get_element ()
1071{
1072 struct table_elt *elt = free_element_chain;
1073 if (elt)
1074 {
1075 free_element_chain = elt->next_same_hash;
1076 return elt;
1077 }
1078 n_elements_made++;
1079 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1080}
1081
1082/* Remove table element ELT from use in the table.
1083 HASH is its hash code, made using the HASH macro.
1084 It's an argument because often that is known in advance
1085 and we save much time not recomputing it. */
1086
1087static void
1088remove_from_table (elt, hash)
1089 register struct table_elt *elt;
2197a88a 1090 unsigned hash;
7afe21cc
RK
1091{
1092 if (elt == 0)
1093 return;
1094
1095 /* Mark this element as removed. See cse_insn. */
1096 elt->first_same_value = 0;
1097
1098 /* Remove the table element from its equivalence class. */
1099
1100 {
1101 register struct table_elt *prev = elt->prev_same_value;
1102 register struct table_elt *next = elt->next_same_value;
1103
1104 if (next) next->prev_same_value = prev;
1105
1106 if (prev)
1107 prev->next_same_value = next;
1108 else
1109 {
1110 register struct table_elt *newfirst = next;
1111 while (next)
1112 {
1113 next->first_same_value = newfirst;
1114 next = next->next_same_value;
1115 }
1116 }
1117 }
1118
1119 /* Remove the table element from its hash bucket. */
1120
1121 {
1122 register struct table_elt *prev = elt->prev_same_hash;
1123 register struct table_elt *next = elt->next_same_hash;
1124
1125 if (next) next->prev_same_hash = prev;
1126
1127 if (prev)
1128 prev->next_same_hash = next;
1129 else if (table[hash] == elt)
1130 table[hash] = next;
1131 else
1132 {
1133 /* This entry is not in the proper hash bucket. This can happen
1134 when two classes were merged by `merge_equiv_classes'. Search
1135 for the hash bucket that it heads. This happens only very
1136 rarely, so the cost is acceptable. */
1137 for (hash = 0; hash < NBUCKETS; hash++)
1138 if (table[hash] == elt)
1139 table[hash] = next;
1140 }
1141 }
1142
1143 /* Remove the table element from its related-value circular chain. */
1144
1145 if (elt->related_value != 0 && elt->related_value != elt)
1146 {
1147 register struct table_elt *p = elt->related_value;
1148 while (p->related_value != elt)
1149 p = p->related_value;
1150 p->related_value = elt->related_value;
1151 if (p->related_value == p)
1152 p->related_value = 0;
1153 }
1154
1155 free_element (elt);
1156}
1157
1158/* Look up X in the hash table and return its table element,
1159 or 0 if X is not in the table.
1160
1161 MODE is the machine-mode of X, or if X is an integer constant
1162 with VOIDmode then MODE is the mode with which X will be used.
1163
1164 Here we are satisfied to find an expression whose tree structure
1165 looks like X. */
1166
1167static struct table_elt *
1168lookup (x, hash, mode)
1169 rtx x;
2197a88a 1170 unsigned hash;
7afe21cc
RK
1171 enum machine_mode mode;
1172{
1173 register struct table_elt *p;
1174
1175 for (p = table[hash]; p; p = p->next_same_hash)
1176 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1177 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1178 return p;
1179
1180 return 0;
1181}
1182
1183/* Like `lookup' but don't care whether the table element uses invalid regs.
1184 Also ignore discrepancies in the machine mode of a register. */
1185
1186static struct table_elt *
1187lookup_for_remove (x, hash, mode)
1188 rtx x;
2197a88a 1189 unsigned hash;
7afe21cc
RK
1190 enum machine_mode mode;
1191{
1192 register struct table_elt *p;
1193
1194 if (GET_CODE (x) == REG)
1195 {
1196 int regno = REGNO (x);
1197 /* Don't check the machine mode when comparing registers;
1198 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1199 for (p = table[hash]; p; p = p->next_same_hash)
1200 if (GET_CODE (p->exp) == REG
1201 && REGNO (p->exp) == regno)
1202 return p;
1203 }
1204 else
1205 {
1206 for (p = table[hash]; p; p = p->next_same_hash)
1207 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1208 return p;
1209 }
1210
1211 return 0;
1212}
1213
1214/* Look for an expression equivalent to X and with code CODE.
1215 If one is found, return that expression. */
1216
1217static rtx
1218lookup_as_function (x, code)
1219 rtx x;
1220 enum rtx_code code;
1221{
1222 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1223 GET_MODE (x));
1224 if (p == 0)
1225 return 0;
1226
1227 for (p = p->first_same_value; p; p = p->next_same_value)
1228 {
1229 if (GET_CODE (p->exp) == code
1230 /* Make sure this is a valid entry in the table. */
1231 && exp_equiv_p (p->exp, p->exp, 1, 0))
1232 return p->exp;
1233 }
1234
1235 return 0;
1236}
1237
1238/* Insert X in the hash table, assuming HASH is its hash code
1239 and CLASSP is an element of the class it should go in
1240 (or 0 if a new class should be made).
1241 It is inserted at the proper position to keep the class in
1242 the order cheapest first.
1243
1244 MODE is the machine-mode of X, or if X is an integer constant
1245 with VOIDmode then MODE is the mode with which X will be used.
1246
1247 For elements of equal cheapness, the most recent one
1248 goes in front, except that the first element in the list
1249 remains first unless a cheaper element is added. The order of
1250 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1251 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1252
1253 The in_memory field in the hash table element is set to 0.
1254 The caller must set it nonzero if appropriate.
1255
1256 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1257 and if insert_regs returns a nonzero value
1258 you must then recompute its hash code before calling here.
1259
1260 If necessary, update table showing constant values of quantities. */
1261
1262#define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1263
1264static struct table_elt *
1265insert (x, classp, hash, mode)
1266 register rtx x;
1267 register struct table_elt *classp;
2197a88a 1268 unsigned hash;
7afe21cc
RK
1269 enum machine_mode mode;
1270{
1271 register struct table_elt *elt;
1272
1273 /* If X is a register and we haven't made a quantity for it,
1274 something is wrong. */
1275 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1276 abort ();
1277
1278 /* If X is a hard register, show it is being put in the table. */
1279 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1280 {
1281 int regno = REGNO (x);
1282 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1283 int i;
1284
1285 for (i = regno; i < endregno; i++)
1286 SET_HARD_REG_BIT (hard_regs_in_table, i);
1287 }
1288
1289
1290 /* Put an element for X into the right hash bucket. */
1291
1292 elt = get_element ();
1293 elt->exp = x;
1294 elt->cost = COST (x);
1295 elt->next_same_value = 0;
1296 elt->prev_same_value = 0;
1297 elt->next_same_hash = table[hash];
1298 elt->prev_same_hash = 0;
1299 elt->related_value = 0;
1300 elt->in_memory = 0;
1301 elt->mode = mode;
1302 elt->is_const = (CONSTANT_P (x)
1303 /* GNU C++ takes advantage of this for `this'
1304 (and other const values). */
1305 || (RTX_UNCHANGING_P (x)
1306 && GET_CODE (x) == REG
1307 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1308 || FIXED_BASE_PLUS_P (x));
1309
1310 if (table[hash])
1311 table[hash]->prev_same_hash = elt;
1312 table[hash] = elt;
1313
1314 /* Put it into the proper value-class. */
1315 if (classp)
1316 {
1317 classp = classp->first_same_value;
1318 if (CHEAPER (elt, classp))
1319 /* Insert at the head of the class */
1320 {
1321 register struct table_elt *p;
1322 elt->next_same_value = classp;
1323 classp->prev_same_value = elt;
1324 elt->first_same_value = elt;
1325
1326 for (p = classp; p; p = p->next_same_value)
1327 p->first_same_value = elt;
1328 }
1329 else
1330 {
1331 /* Insert not at head of the class. */
1332 /* Put it after the last element cheaper than X. */
1333 register struct table_elt *p, *next;
1334 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1335 p = next);
1336 /* Put it after P and before NEXT. */
1337 elt->next_same_value = next;
1338 if (next)
1339 next->prev_same_value = elt;
1340 elt->prev_same_value = p;
1341 p->next_same_value = elt;
1342 elt->first_same_value = classp;
1343 }
1344 }
1345 else
1346 elt->first_same_value = elt;
1347
1348 /* If this is a constant being set equivalent to a register or a register
1349 being set equivalent to a constant, note the constant equivalence.
1350
1351 If this is a constant, it cannot be equivalent to a different constant,
1352 and a constant is the only thing that can be cheaper than a register. So
1353 we know the register is the head of the class (before the constant was
1354 inserted).
1355
1356 If this is a register that is not already known equivalent to a
1357 constant, we must check the entire class.
1358
1359 If this is a register that is already known equivalent to an insn,
1360 update `qty_const_insn' to show that `this_insn' is the latest
1361 insn making that quantity equivalent to the constant. */
1362
1363 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1364 {
1365 qty_const[reg_qty[REGNO (classp->exp)]]
1366 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1367 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1368 }
1369
1370 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1371 {
1372 register struct table_elt *p;
1373
1374 for (p = classp; p != 0; p = p->next_same_value)
1375 {
1376 if (p->is_const)
1377 {
1378 qty_const[reg_qty[REGNO (x)]]
1379 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1380 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1381 break;
1382 }
1383 }
1384 }
1385
1386 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1387 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1388 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1389
1390 /* If this is a constant with symbolic value,
1391 and it has a term with an explicit integer value,
1392 link it up with related expressions. */
1393 if (GET_CODE (x) == CONST)
1394 {
1395 rtx subexp = get_related_value (x);
2197a88a 1396 unsigned subhash;
7afe21cc
RK
1397 struct table_elt *subelt, *subelt_prev;
1398
1399 if (subexp != 0)
1400 {
1401 /* Get the integer-free subexpression in the hash table. */
1402 subhash = safe_hash (subexp, mode) % NBUCKETS;
1403 subelt = lookup (subexp, subhash, mode);
1404 if (subelt == 0)
906c4e36 1405 subelt = insert (subexp, NULL_PTR, subhash, mode);
7afe21cc
RK
1406 /* Initialize SUBELT's circular chain if it has none. */
1407 if (subelt->related_value == 0)
1408 subelt->related_value = subelt;
1409 /* Find the element in the circular chain that precedes SUBELT. */
1410 subelt_prev = subelt;
1411 while (subelt_prev->related_value != subelt)
1412 subelt_prev = subelt_prev->related_value;
1413 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1414 This way the element that follows SUBELT is the oldest one. */
1415 elt->related_value = subelt_prev->related_value;
1416 subelt_prev->related_value = elt;
1417 }
1418 }
1419
1420 return elt;
1421}
1422\f
1423/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1424 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1425 the two classes equivalent.
1426
1427 CLASS1 will be the surviving class; CLASS2 should not be used after this
1428 call.
1429
1430 Any invalid entries in CLASS2 will not be copied. */
1431
1432static void
1433merge_equiv_classes (class1, class2)
1434 struct table_elt *class1, *class2;
1435{
1436 struct table_elt *elt, *next, *new;
1437
1438 /* Ensure we start with the head of the classes. */
1439 class1 = class1->first_same_value;
1440 class2 = class2->first_same_value;
1441
1442 /* If they were already equal, forget it. */
1443 if (class1 == class2)
1444 return;
1445
1446 for (elt = class2; elt; elt = next)
1447 {
2197a88a 1448 unsigned hash;
7afe21cc
RK
1449 rtx exp = elt->exp;
1450 enum machine_mode mode = elt->mode;
1451
1452 next = elt->next_same_value;
1453
1454 /* Remove old entry, make a new one in CLASS1's class.
1455 Don't do this for invalid entries as we cannot find their
1456 hash code (it also isn't necessary). */
1457 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1458 {
1459 hash_arg_in_memory = 0;
1460 hash_arg_in_struct = 0;
1461 hash = HASH (exp, mode);
1462
1463 if (GET_CODE (exp) == REG)
1464 delete_reg_equiv (REGNO (exp));
1465
1466 remove_from_table (elt, hash);
1467
1468 if (insert_regs (exp, class1, 0))
1469 hash = HASH (exp, mode);
1470 new = insert (exp, class1, hash, mode);
1471 new->in_memory = hash_arg_in_memory;
1472 new->in_struct = hash_arg_in_struct;
1473 }
1474 }
1475}
1476\f
1477/* Remove from the hash table, or mark as invalid,
1478 all expressions whose values could be altered by storing in X.
1479 X is a register, a subreg, or a memory reference with nonvarying address
1480 (because, when a memory reference with a varying address is stored in,
1481 all memory references are removed by invalidate_memory
1482 so specific invalidation is superfluous).
1483
1484 A nonvarying address may be just a register or just
1485 a symbol reference, or it may be either of those plus
1486 a numeric offset. */
1487
1488static void
1489invalidate (x)
1490 rtx x;
1491{
1492 register int i;
1493 register struct table_elt *p;
f451db89
JL
1494 rtx base;
1495 HOST_WIDE_INT start, end;
7afe21cc
RK
1496
1497 /* If X is a register, dependencies on its contents
1498 are recorded through the qty number mechanism.
1499 Just change the qty number of the register,
1500 mark it as invalid for expressions that refer to it,
1501 and remove it itself. */
1502
1503 if (GET_CODE (x) == REG)
1504 {
1505 register int regno = REGNO (x);
2197a88a 1506 register unsigned hash = HASH (x, GET_MODE (x));
7afe21cc
RK
1507
1508 /* Remove REGNO from any quantity list it might be on and indicate
1509 that it's value might have changed. If it is a pseudo, remove its
1510 entry from the hash table.
1511
1512 For a hard register, we do the first two actions above for any
1513 additional hard registers corresponding to X. Then, if any of these
1514 registers are in the table, we must remove any REG entries that
1515 overlap these registers. */
1516
1517 delete_reg_equiv (regno);
1518 reg_tick[regno]++;
1519
1520 if (regno >= FIRST_PSEUDO_REGISTER)
1521 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1522 else
1523 {
54b1de55
RK
1524 HOST_WIDE_INT in_table
1525 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
7afe21cc
RK
1526 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1527 int tregno, tendregno;
1528 register struct table_elt *p, *next;
1529
1530 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1531
1532 for (i = regno + 1; i < endregno; i++)
1533 {
1534 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1535 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1536 delete_reg_equiv (i);
1537 reg_tick[i]++;
1538 }
1539
1540 if (in_table)
1541 for (hash = 0; hash < NBUCKETS; hash++)
1542 for (p = table[hash]; p; p = next)
1543 {
1544 next = p->next_same_hash;
1545
1546 if (GET_CODE (p->exp) != REG
1547 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1548 continue;
1549
1550 tregno = REGNO (p->exp);
1551 tendregno
1552 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1553 if (tendregno > regno && tregno < endregno)
1554 remove_from_table (p, hash);
1555 }
1556 }
1557
1558 return;
1559 }
1560
1561 if (GET_CODE (x) == SUBREG)
1562 {
1563 if (GET_CODE (SUBREG_REG (x)) != REG)
1564 abort ();
1565 invalidate (SUBREG_REG (x));
1566 return;
1567 }
1568
1569 /* X is not a register; it must be a memory reference with
1570 a nonvarying address. Remove all hash table elements
1571 that refer to overlapping pieces of memory. */
1572
1573 if (GET_CODE (x) != MEM)
1574 abort ();
7afe21cc 1575
f451db89
JL
1576 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x)),
1577 &base, &start, &end);
7afe21cc 1578
7afe21cc
RK
1579 for (i = 0; i < NBUCKETS; i++)
1580 {
1581 register struct table_elt *next;
1582 for (p = table[i]; p; p = next)
1583 {
1584 next = p->next_same_hash;
1585 if (refers_to_mem_p (p->exp, base, start, end))
1586 remove_from_table (p, i);
1587 }
1588 }
1589}
1590
1591/* Remove all expressions that refer to register REGNO,
1592 since they are already invalid, and we are about to
1593 mark that register valid again and don't want the old
1594 expressions to reappear as valid. */
1595
1596static void
1597remove_invalid_refs (regno)
1598 int regno;
1599{
1600 register int i;
1601 register struct table_elt *p, *next;
1602
1603 for (i = 0; i < NBUCKETS; i++)
1604 for (p = table[i]; p; p = next)
1605 {
1606 next = p->next_same_hash;
1607 if (GET_CODE (p->exp) != REG
906c4e36 1608 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
7afe21cc
RK
1609 remove_from_table (p, i);
1610 }
1611}
1612\f
1613/* Recompute the hash codes of any valid entries in the hash table that
1614 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1615
1616 This is called when we make a jump equivalence. */
1617
1618static void
1619rehash_using_reg (x)
1620 rtx x;
1621{
1622 int i;
1623 struct table_elt *p, *next;
2197a88a 1624 unsigned hash;
7afe21cc
RK
1625
1626 if (GET_CODE (x) == SUBREG)
1627 x = SUBREG_REG (x);
1628
1629 /* If X is not a register or if the register is known not to be in any
1630 valid entries in the table, we have no work to do. */
1631
1632 if (GET_CODE (x) != REG
1633 || reg_in_table[REGNO (x)] < 0
1634 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1635 return;
1636
1637 /* Scan all hash chains looking for valid entries that mention X.
1638 If we find one and it is in the wrong hash chain, move it. We can skip
1639 objects that are registers, since they are handled specially. */
1640
1641 for (i = 0; i < NBUCKETS; i++)
1642 for (p = table[i]; p; p = next)
1643 {
1644 next = p->next_same_hash;
1645 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1646 && exp_equiv_p (p->exp, p->exp, 1, 0)
7afe21cc
RK
1647 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1648 {
1649 if (p->next_same_hash)
1650 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1651
1652 if (p->prev_same_hash)
1653 p->prev_same_hash->next_same_hash = p->next_same_hash;
1654 else
1655 table[i] = p->next_same_hash;
1656
1657 p->next_same_hash = table[hash];
1658 p->prev_same_hash = 0;
1659 if (table[hash])
1660 table[hash]->prev_same_hash = p;
1661 table[hash] = p;
1662 }
1663 }
1664}
1665\f
1666/* Remove from the hash table all expressions that reference memory,
1667 or some of them as specified by *WRITES. */
1668
1669static void
1670invalidate_memory (writes)
1671 struct write_data *writes;
1672{
1673 register int i;
1674 register struct table_elt *p, *next;
1675 int all = writes->all;
1676 int nonscalar = writes->nonscalar;
1677
1678 for (i = 0; i < NBUCKETS; i++)
1679 for (p = table[i]; p; p = next)
1680 {
1681 next = p->next_same_hash;
1682 if (p->in_memory
1683 && (all
1684 || (nonscalar && p->in_struct)
1685 || cse_rtx_addr_varies_p (p->exp)))
1686 remove_from_table (p, i);
1687 }
1688}
1689\f
1690/* Remove from the hash table any expression that is a call-clobbered
1691 register. Also update their TICK values. */
1692
1693static void
1694invalidate_for_call ()
1695{
1696 int regno, endregno;
1697 int i;
2197a88a 1698 unsigned hash;
7afe21cc
RK
1699 struct table_elt *p, *next;
1700 int in_table = 0;
1701
1702 /* Go through all the hard registers. For each that is clobbered in
1703 a CALL_INSN, remove the register from quantity chains and update
1704 reg_tick if defined. Also see if any of these registers is currently
1705 in the table. */
1706
1707 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1708 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1709 {
1710 delete_reg_equiv (regno);
1711 if (reg_tick[regno] >= 0)
1712 reg_tick[regno]++;
1713
1714 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1715 }
1716
1717 /* In the case where we have no call-clobbered hard registers in the
1718 table, we are done. Otherwise, scan the table and remove any
1719 entry that overlaps a call-clobbered register. */
1720
1721 if (in_table)
1722 for (hash = 0; hash < NBUCKETS; hash++)
1723 for (p = table[hash]; p; p = next)
1724 {
1725 next = p->next_same_hash;
1726
1727 if (GET_CODE (p->exp) != REG
1728 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1729 continue;
1730
1731 regno = REGNO (p->exp);
1732 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1733
1734 for (i = regno; i < endregno; i++)
1735 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1736 {
1737 remove_from_table (p, hash);
1738 break;
1739 }
1740 }
1741}
1742\f
1743/* Given an expression X of type CONST,
1744 and ELT which is its table entry (or 0 if it
1745 is not in the hash table),
1746 return an alternate expression for X as a register plus integer.
1747 If none can be found, return 0. */
1748
1749static rtx
1750use_related_value (x, elt)
1751 rtx x;
1752 struct table_elt *elt;
1753{
1754 register struct table_elt *relt = 0;
1755 register struct table_elt *p, *q;
906c4e36 1756 HOST_WIDE_INT offset;
7afe21cc
RK
1757
1758 /* First, is there anything related known?
1759 If we have a table element, we can tell from that.
1760 Otherwise, must look it up. */
1761
1762 if (elt != 0 && elt->related_value != 0)
1763 relt = elt;
1764 else if (elt == 0 && GET_CODE (x) == CONST)
1765 {
1766 rtx subexp = get_related_value (x);
1767 if (subexp != 0)
1768 relt = lookup (subexp,
1769 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1770 GET_MODE (subexp));
1771 }
1772
1773 if (relt == 0)
1774 return 0;
1775
1776 /* Search all related table entries for one that has an
1777 equivalent register. */
1778
1779 p = relt;
1780 while (1)
1781 {
1782 /* This loop is strange in that it is executed in two different cases.
1783 The first is when X is already in the table. Then it is searching
1784 the RELATED_VALUE list of X's class (RELT). The second case is when
1785 X is not in the table. Then RELT points to a class for the related
1786 value.
1787
1788 Ensure that, whatever case we are in, that we ignore classes that have
1789 the same value as X. */
1790
1791 if (rtx_equal_p (x, p->exp))
1792 q = 0;
1793 else
1794 for (q = p->first_same_value; q; q = q->next_same_value)
1795 if (GET_CODE (q->exp) == REG)
1796 break;
1797
1798 if (q)
1799 break;
1800
1801 p = p->related_value;
1802
1803 /* We went all the way around, so there is nothing to be found.
1804 Alternatively, perhaps RELT was in the table for some other reason
1805 and it has no related values recorded. */
1806 if (p == relt || p == 0)
1807 break;
1808 }
1809
1810 if (q == 0)
1811 return 0;
1812
1813 offset = (get_integer_term (x) - get_integer_term (p->exp));
1814 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1815 return plus_constant (q->exp, offset);
1816}
1817\f
1818/* Hash an rtx. We are careful to make sure the value is never negative.
1819 Equivalent registers hash identically.
1820 MODE is used in hashing for CONST_INTs only;
1821 otherwise the mode of X is used.
1822
1823 Store 1 in do_not_record if any subexpression is volatile.
1824
1825 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1826 which does not have the RTX_UNCHANGING_P bit set.
1827 In this case, also store 1 in hash_arg_in_struct
1828 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1829
1830 Note that cse_insn knows that the hash code of a MEM expression
1831 is just (int) MEM plus the hash code of the address. */
1832
2197a88a 1833static unsigned
7afe21cc
RK
1834canon_hash (x, mode)
1835 rtx x;
1836 enum machine_mode mode;
1837{
1838 register int i, j;
2197a88a 1839 register unsigned hash = 0;
7afe21cc
RK
1840 register enum rtx_code code;
1841 register char *fmt;
1842
1843 /* repeat is used to turn tail-recursion into iteration. */
1844 repeat:
1845 if (x == 0)
1846 return hash;
1847
1848 code = GET_CODE (x);
1849 switch (code)
1850 {
1851 case REG:
1852 {
1853 register int regno = REGNO (x);
1854
1855 /* On some machines, we can't record any non-fixed hard register,
1856 because extending its life will cause reload problems. We
1857 consider ap, fp, and sp to be fixed for this purpose.
1858 On all machines, we can't record any global registers. */
1859
1860 if (regno < FIRST_PSEUDO_REGISTER
1861 && (global_regs[regno]
1862#ifdef SMALL_REGISTER_CLASSES
1863 || (! fixed_regs[regno]
1864 && regno != FRAME_POINTER_REGNUM
8bc169f2 1865 && regno != HARD_FRAME_POINTER_REGNUM
7afe21cc
RK
1866 && regno != ARG_POINTER_REGNUM
1867 && regno != STACK_POINTER_REGNUM)
1868#endif
1869 ))
1870 {
1871 do_not_record = 1;
1872 return 0;
1873 }
2197a88a
RK
1874 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1875 return hash;
7afe21cc
RK
1876 }
1877
1878 case CONST_INT:
2197a88a
RK
1879 {
1880 unsigned HOST_WIDE_INT tem = INTVAL (x);
1881 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1882 return hash;
1883 }
7afe21cc
RK
1884
1885 case CONST_DOUBLE:
1886 /* This is like the general case, except that it only counts
1887 the integers representing the constant. */
2197a88a
RK
1888 hash += (unsigned) code + (unsigned) GET_MODE (x);
1889 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1890 {
1891 unsigned tem = XINT (x, i);
1892 hash += tem;
1893 }
7afe21cc
RK
1894 return hash;
1895
1896 /* Assume there is only one rtx object for any given label. */
1897 case LABEL_REF:
3c543775
RK
1898 hash
1899 += ((unsigned) LABEL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
2197a88a 1900 return hash;
7afe21cc
RK
1901
1902 case SYMBOL_REF:
3c543775
RK
1903 hash
1904 += ((unsigned) SYMBOL_REF << 7) + (unsigned HOST_WIDE_INT) XEXP (x, 0);
2197a88a 1905 return hash;
7afe21cc
RK
1906
1907 case MEM:
1908 if (MEM_VOLATILE_P (x))
1909 {
1910 do_not_record = 1;
1911 return 0;
1912 }
1913 if (! RTX_UNCHANGING_P (x))
1914 {
1915 hash_arg_in_memory = 1;
1916 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1917 }
1918 /* Now that we have already found this special case,
1919 might as well speed it up as much as possible. */
2197a88a 1920 hash += (unsigned) MEM;
7afe21cc
RK
1921 x = XEXP (x, 0);
1922 goto repeat;
1923
1924 case PRE_DEC:
1925 case PRE_INC:
1926 case POST_DEC:
1927 case POST_INC:
1928 case PC:
1929 case CC0:
1930 case CALL:
1931 case UNSPEC_VOLATILE:
1932 do_not_record = 1;
1933 return 0;
1934
1935 case ASM_OPERANDS:
1936 if (MEM_VOLATILE_P (x))
1937 {
1938 do_not_record = 1;
1939 return 0;
1940 }
1941 }
1942
1943 i = GET_RTX_LENGTH (code) - 1;
2197a88a 1944 hash += (unsigned) code + (unsigned) GET_MODE (x);
7afe21cc
RK
1945 fmt = GET_RTX_FORMAT (code);
1946 for (; i >= 0; i--)
1947 {
1948 if (fmt[i] == 'e')
1949 {
1950 rtx tem = XEXP (x, i);
1951 rtx tem1;
1952
1953 /* If the operand is a REG that is equivalent to a constant, hash
1954 as if we were hashing the constant, since we will be comparing
1955 that way. */
1956 if (tem != 0 && GET_CODE (tem) == REG
1957 && REGNO_QTY_VALID_P (REGNO (tem))
1958 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1959 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1960 && CONSTANT_P (tem1))
1961 tem = tem1;
1962
1963 /* If we are about to do the last recursive call
1964 needed at this level, change it into iteration.
1965 This function is called enough to be worth it. */
1966 if (i == 0)
1967 {
1968 x = tem;
1969 goto repeat;
1970 }
1971 hash += canon_hash (tem, 0);
1972 }
1973 else if (fmt[i] == 'E')
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 hash += canon_hash (XVECEXP (x, i, j), 0);
1976 else if (fmt[i] == 's')
1977 {
2197a88a 1978 register unsigned char *p = (unsigned char *) XSTR (x, i);
7afe21cc
RK
1979 if (p)
1980 while (*p)
2197a88a 1981 hash += *p++;
7afe21cc
RK
1982 }
1983 else if (fmt[i] == 'i')
1984 {
2197a88a
RK
1985 register unsigned tem = XINT (x, i);
1986 hash += tem;
7afe21cc
RK
1987 }
1988 else
1989 abort ();
1990 }
1991 return hash;
1992}
1993
1994/* Like canon_hash but with no side effects. */
1995
2197a88a 1996static unsigned
7afe21cc
RK
1997safe_hash (x, mode)
1998 rtx x;
1999 enum machine_mode mode;
2000{
2001 int save_do_not_record = do_not_record;
2002 int save_hash_arg_in_memory = hash_arg_in_memory;
2003 int save_hash_arg_in_struct = hash_arg_in_struct;
2197a88a 2004 unsigned hash = canon_hash (x, mode);
7afe21cc
RK
2005 hash_arg_in_memory = save_hash_arg_in_memory;
2006 hash_arg_in_struct = save_hash_arg_in_struct;
2007 do_not_record = save_do_not_record;
2008 return hash;
2009}
2010\f
2011/* Return 1 iff X and Y would canonicalize into the same thing,
2012 without actually constructing the canonicalization of either one.
2013 If VALIDATE is nonzero,
2014 we assume X is an expression being processed from the rtl
2015 and Y was found in the hash table. We check register refs
2016 in Y for being marked as valid.
2017
2018 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2019 that is known to be in the register. Ordinarily, we don't allow them
2020 to match, because letting them match would cause unpredictable results
2021 in all the places that search a hash table chain for an equivalent
2022 for a given value. A possible equivalent that has different structure
2023 has its hash code computed from different data. Whether the hash code
2024 is the same as that of the the given value is pure luck. */
2025
2026static int
2027exp_equiv_p (x, y, validate, equal_values)
2028 rtx x, y;
2029 int validate;
2030 int equal_values;
2031{
906c4e36 2032 register int i, j;
7afe21cc
RK
2033 register enum rtx_code code;
2034 register char *fmt;
2035
2036 /* Note: it is incorrect to assume an expression is equivalent to itself
2037 if VALIDATE is nonzero. */
2038 if (x == y && !validate)
2039 return 1;
2040 if (x == 0 || y == 0)
2041 return x == y;
2042
2043 code = GET_CODE (x);
2044 if (code != GET_CODE (y))
2045 {
2046 if (!equal_values)
2047 return 0;
2048
2049 /* If X is a constant and Y is a register or vice versa, they may be
2050 equivalent. We only have to validate if Y is a register. */
2051 if (CONSTANT_P (x) && GET_CODE (y) == REG
2052 && REGNO_QTY_VALID_P (REGNO (y))
2053 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2054 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2055 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2056 return 1;
2057
2058 if (CONSTANT_P (y) && code == REG
2059 && REGNO_QTY_VALID_P (REGNO (x))
2060 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2061 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2062 return 1;
2063
2064 return 0;
2065 }
2066
2067 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2068 if (GET_MODE (x) != GET_MODE (y))
2069 return 0;
2070
2071 switch (code)
2072 {
2073 case PC:
2074 case CC0:
2075 return x == y;
2076
2077 case CONST_INT:
58c8c593 2078 return INTVAL (x) == INTVAL (y);
7afe21cc
RK
2079
2080 case LABEL_REF:
2081 case SYMBOL_REF:
2082 return XEXP (x, 0) == XEXP (y, 0);
2083
2084 case REG:
2085 {
2086 int regno = REGNO (y);
2087 int endregno
2088 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2089 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2090 int i;
2091
2092 /* If the quantities are not the same, the expressions are not
2093 equivalent. If there are and we are not to validate, they
2094 are equivalent. Otherwise, ensure all regs are up-to-date. */
2095
2096 if (reg_qty[REGNO (x)] != reg_qty[regno])
2097 return 0;
2098
2099 if (! validate)
2100 return 1;
2101
2102 for (i = regno; i < endregno; i++)
2103 if (reg_in_table[i] != reg_tick[i])
2104 return 0;
2105
2106 return 1;
2107 }
2108
2109 /* For commutative operations, check both orders. */
2110 case PLUS:
2111 case MULT:
2112 case AND:
2113 case IOR:
2114 case XOR:
2115 case NE:
2116 case EQ:
2117 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2118 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2119 validate, equal_values))
2120 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2121 validate, equal_values)
2122 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2123 validate, equal_values)));
2124 }
2125
2126 /* Compare the elements. If any pair of corresponding elements
2127 fail to match, return 0 for the whole things. */
2128
2129 fmt = GET_RTX_FORMAT (code);
2130 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2131 {
906c4e36 2132 switch (fmt[i])
7afe21cc 2133 {
906c4e36 2134 case 'e':
7afe21cc
RK
2135 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2136 return 0;
906c4e36
RK
2137 break;
2138
2139 case 'E':
7afe21cc
RK
2140 if (XVECLEN (x, i) != XVECLEN (y, i))
2141 return 0;
2142 for (j = 0; j < XVECLEN (x, i); j++)
2143 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2144 validate, equal_values))
2145 return 0;
906c4e36
RK
2146 break;
2147
2148 case 's':
7afe21cc
RK
2149 if (strcmp (XSTR (x, i), XSTR (y, i)))
2150 return 0;
906c4e36
RK
2151 break;
2152
2153 case 'i':
7afe21cc
RK
2154 if (XINT (x, i) != XINT (y, i))
2155 return 0;
906c4e36
RK
2156 break;
2157
2158 case 'w':
2159 if (XWINT (x, i) != XWINT (y, i))
2160 return 0;
2161 break;
2162
2163 case '0':
2164 break;
2165
2166 default:
2167 abort ();
7afe21cc 2168 }
906c4e36
RK
2169 }
2170
7afe21cc
RK
2171 return 1;
2172}
2173\f
2174/* Return 1 iff any subexpression of X matches Y.
2175 Here we do not require that X or Y be valid (for registers referred to)
2176 for being in the hash table. */
2177
6cd4575e 2178static int
7afe21cc
RK
2179refers_to_p (x, y)
2180 rtx x, y;
2181{
2182 register int i;
2183 register enum rtx_code code;
2184 register char *fmt;
2185
2186 repeat:
2187 if (x == y)
2188 return 1;
2189 if (x == 0 || y == 0)
2190 return 0;
2191
2192 code = GET_CODE (x);
2193 /* If X as a whole has the same code as Y, they may match.
2194 If so, return 1. */
2195 if (code == GET_CODE (y))
2196 {
2197 if (exp_equiv_p (x, y, 0, 1))
2198 return 1;
2199 }
2200
2201 /* X does not match, so try its subexpressions. */
2202
2203 fmt = GET_RTX_FORMAT (code);
2204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2205 if (fmt[i] == 'e')
2206 {
2207 if (i == 0)
2208 {
2209 x = XEXP (x, 0);
2210 goto repeat;
2211 }
2212 else
2213 if (refers_to_p (XEXP (x, i), y))
2214 return 1;
2215 }
2216 else if (fmt[i] == 'E')
2217 {
2218 int j;
2219 for (j = 0; j < XVECLEN (x, i); j++)
2220 if (refers_to_p (XVECEXP (x, i, j), y))
2221 return 1;
2222 }
2223
2224 return 0;
2225}
2226\f
f451db89
JL
2227/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2228 set PBASE, PSTART, and PEND which correspond to the base of the address,
2229 the starting offset, and ending offset respectively.
2230
2231 ADDR is known to be a nonvarying address.
2232
2233 cse_address_varies_p returns zero for nonvarying addresses. */
2234
2235static void
2236set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2237 rtx addr;
2238 int size;
2239 rtx *pbase;
6500fb43 2240 HOST_WIDE_INT *pstart, *pend;
f451db89
JL
2241{
2242 rtx base;
c85663b1 2243 HOST_WIDE_INT start, end;
f451db89
JL
2244
2245 base = addr;
2246 start = 0;
2247 end = 0;
2248
2249 /* Registers with nonvarying addresses usually have constant equivalents;
2250 but the frame pointer register is also possible. */
2251 if (GET_CODE (base) == REG
2252 && qty_const != 0
2253 && REGNO_QTY_VALID_P (REGNO (base))
2254 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2255 && qty_const[reg_qty[REGNO (base)]] != 0)
2256 base = qty_const[reg_qty[REGNO (base)]];
2257 else if (GET_CODE (base) == PLUS
2258 && GET_CODE (XEXP (base, 1)) == CONST_INT
2259 && GET_CODE (XEXP (base, 0)) == REG
2260 && qty_const != 0
2261 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2262 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2263 == GET_MODE (XEXP (base, 0)))
2264 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2265 {
2266 start = INTVAL (XEXP (base, 1));
2267 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2268 }
2269
c85663b1
RK
2270 /* Handle everything that we can find inside an address that has been
2271 viewed as constant. */
f451db89 2272
c85663b1 2273 while (1)
f451db89 2274 {
c85663b1
RK
2275 /* If no part of this switch does a "continue", the code outside
2276 will exit this loop. */
2277
2278 switch (GET_CODE (base))
2279 {
2280 case LO_SUM:
2281 /* By definition, operand1 of a LO_SUM is the associated constant
2282 address. Use the associated constant address as the base
2283 instead. */
2284 base = XEXP (base, 1);
2285 continue;
2286
2287 case CONST:
2288 /* Strip off CONST. */
2289 base = XEXP (base, 0);
2290 continue;
2291
2292 case PLUS:
2293 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2294 {
2295 start += INTVAL (XEXP (base, 1));
2296 base = XEXP (base, 0);
2297 continue;
2298 }
2299 break;
2300
2301 case AND:
2302 /* Handle the case of an AND which is the negative of a power of
2303 two. This is used to represent unaligned memory operations. */
2304 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2305 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2306 {
2307 set_nonvarying_address_components (XEXP (base, 0), size,
2308 pbase, pstart, pend);
2309
2310 /* Assume the worst misalignment. START is affected, but not
2311 END, so compensate but adjusting SIZE. Don't lose any
2312 constant we already had. */
2313
2314 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2315 start += *pstart - INTVAL (XEXP (base, 1)) - 1;
2316 base = *pbase;
2317 }
2318 break;
2319 }
2320
2321 break;
f451db89
JL
2322 }
2323
2324 end = start + size;
2325
2326 /* Set the return values. */
2327 *pbase = base;
2328 *pstart = start;
2329 *pend = end;
2330}
2331
7afe21cc
RK
2332/* Return 1 iff any subexpression of X refers to memory
2333 at an address of BASE plus some offset
2334 such that any of the bytes' offsets fall between START (inclusive)
2335 and END (exclusive).
2336
f451db89
JL
2337 The value is undefined if X is a varying address (as determined by
2338 cse_rtx_addr_varies_p). This function is not used in such cases.
7afe21cc
RK
2339
2340 When used in the cse pass, `qty_const' is nonzero, and it is used
2341 to treat an address that is a register with a known constant value
2342 as if it were that constant value.
2343 In the loop pass, `qty_const' is zero, so this is not done. */
2344
f451db89 2345static int
7afe21cc
RK
2346refers_to_mem_p (x, base, start, end)
2347 rtx x, base;
906c4e36 2348 HOST_WIDE_INT start, end;
7afe21cc 2349{
906c4e36 2350 register HOST_WIDE_INT i;
7afe21cc
RK
2351 register enum rtx_code code;
2352 register char *fmt;
2353
2354 if (GET_CODE (base) == CONST_INT)
2355 {
2356 start += INTVAL (base);
2357 end += INTVAL (base);
2358 base = const0_rtx;
2359 }
2360
2361 repeat:
2362 if (x == 0)
2363 return 0;
2364
2365 code = GET_CODE (x);
2366 if (code == MEM)
2367 {
2368 register rtx addr = XEXP (x, 0); /* Get the address. */
f451db89 2369 rtx mybase;
6500fb43 2370 HOST_WIDE_INT mystart, myend;
7afe21cc 2371
f451db89
JL
2372 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2373 &mybase, &mystart, &myend);
2374
2375
2376 /* refers_to_mem_p is never called with varying addresses.
2377 If the base addresses are not equal, there is no chance
2378 of the memory addresses conflicting. */
7b0d7bd0 2379 if (! rtx_equal_p (mybase, base))
7afe21cc
RK
2380 return 0;
2381
f451db89 2382 return myend > start && mystart < end;
7afe21cc
RK
2383 }
2384
2385 /* X does not match, so try its subexpressions. */
2386
2387 fmt = GET_RTX_FORMAT (code);
2388 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2389 if (fmt[i] == 'e')
2390 {
2391 if (i == 0)
2392 {
2393 x = XEXP (x, 0);
2394 goto repeat;
2395 }
2396 else
2397 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2398 return 1;
2399 }
2400 else if (fmt[i] == 'E')
2401 {
2402 int j;
2403 for (j = 0; j < XVECLEN (x, i); j++)
2404 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2405 return 1;
2406 }
2407
2408 return 0;
2409}
2410
2411/* Nonzero if X refers to memory at a varying address;
2412 except that a register which has at the moment a known constant value
2413 isn't considered variable. */
2414
2415static int
2416cse_rtx_addr_varies_p (x)
2417 rtx x;
2418{
2419 /* We need not check for X and the equivalence class being of the same
2420 mode because if X is equivalent to a constant in some mode, it
2421 doesn't vary in any mode. */
2422
2423 if (GET_CODE (x) == MEM
2424 && GET_CODE (XEXP (x, 0)) == REG
2425 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2426 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2427 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2428 return 0;
2429
2430 if (GET_CODE (x) == MEM
2431 && GET_CODE (XEXP (x, 0)) == PLUS
2432 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2433 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2434 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2435 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2436 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2437 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2438 return 0;
2439
2440 return rtx_addr_varies_p (x);
2441}
2442\f
2443/* Canonicalize an expression:
2444 replace each register reference inside it
2445 with the "oldest" equivalent register.
2446
2447 If INSN is non-zero and we are replacing a pseudo with a hard register
7722328e
RK
2448 or vice versa, validate_change is used to ensure that INSN remains valid
2449 after we make our substitution. The calls are made with IN_GROUP non-zero
2450 so apply_change_group must be called upon the outermost return from this
2451 function (unless INSN is zero). The result of apply_change_group can
2452 generally be discarded since the changes we are making are optional. */
7afe21cc
RK
2453
2454static rtx
2455canon_reg (x, insn)
2456 rtx x;
2457 rtx insn;
2458{
2459 register int i;
2460 register enum rtx_code code;
2461 register char *fmt;
2462
2463 if (x == 0)
2464 return x;
2465
2466 code = GET_CODE (x);
2467 switch (code)
2468 {
2469 case PC:
2470 case CC0:
2471 case CONST:
2472 case CONST_INT:
2473 case CONST_DOUBLE:
2474 case SYMBOL_REF:
2475 case LABEL_REF:
2476 case ADDR_VEC:
2477 case ADDR_DIFF_VEC:
2478 return x;
2479
2480 case REG:
2481 {
2482 register int first;
2483
2484 /* Never replace a hard reg, because hard regs can appear
2485 in more than one machine mode, and we must preserve the mode
2486 of each occurrence. Also, some hard regs appear in
2487 MEMs that are shared and mustn't be altered. Don't try to
2488 replace any reg that maps to a reg of class NO_REGS. */
2489 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2490 || ! REGNO_QTY_VALID_P (REGNO (x)))
2491 return x;
2492
2493 first = qty_first_reg[reg_qty[REGNO (x)]];
2494 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2495 : REGNO_REG_CLASS (first) == NO_REGS ? x
2496 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2497 }
2498 }
2499
2500 fmt = GET_RTX_FORMAT (code);
2501 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2502 {
2503 register int j;
2504
2505 if (fmt[i] == 'e')
2506 {
2507 rtx new = canon_reg (XEXP (x, i), insn);
2508
2509 /* If replacing pseudo with hard reg or vice versa, ensure the
178c39f6 2510 insn remains valid. Likewise if the insn has MATCH_DUPs. */
aee9dc31
RS
2511 if (insn != 0 && new != 0
2512 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
178c39f6
RK
2513 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2514 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
aee9dc31 2515 || insn_n_dups[recog_memoized (insn)] > 0))
77fa0940 2516 validate_change (insn, &XEXP (x, i), new, 1);
7afe21cc
RK
2517 else
2518 XEXP (x, i) = new;
2519 }
2520 else if (fmt[i] == 'E')
2521 for (j = 0; j < XVECLEN (x, i); j++)
2522 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2523 }
2524
2525 return x;
2526}
2527\f
2528/* LOC is a location with INSN that is an operand address (the contents of
2529 a MEM). Find the best equivalent address to use that is valid for this
2530 insn.
2531
2532 On most CISC machines, complicated address modes are costly, and rtx_cost
2533 is a good approximation for that cost. However, most RISC machines have
2534 only a few (usually only one) memory reference formats. If an address is
2535 valid at all, it is often just as cheap as any other address. Hence, for
2536 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2537 costs of various addresses. For two addresses of equal cost, choose the one
2538 with the highest `rtx_cost' value as that has the potential of eliminating
2539 the most insns. For equal costs, we choose the first in the equivalence
2540 class. Note that we ignore the fact that pseudo registers are cheaper
2541 than hard registers here because we would also prefer the pseudo registers.
2542 */
2543
6cd4575e 2544static void
7afe21cc
RK
2545find_best_addr (insn, loc)
2546 rtx insn;
2547 rtx *loc;
2548{
2549 struct table_elt *elt, *p;
2550 rtx addr = *loc;
2551 int our_cost;
2552 int found_better = 1;
2553 int save_do_not_record = do_not_record;
2554 int save_hash_arg_in_memory = hash_arg_in_memory;
2555 int save_hash_arg_in_struct = hash_arg_in_struct;
7afe21cc
RK
2556 int addr_volatile;
2557 int regno;
2197a88a 2558 unsigned hash;
7afe21cc
RK
2559
2560 /* Do not try to replace constant addresses or addresses of local and
2561 argument slots. These MEM expressions are made only once and inserted
2562 in many instructions, as well as being used to control symbol table
2563 output. It is not safe to clobber them.
2564
2565 There are some uncommon cases where the address is already in a register
2566 for some reason, but we cannot take advantage of that because we have
2567 no easy way to unshare the MEM. In addition, looking up all stack
2568 addresses is costly. */
2569 if ((GET_CODE (addr) == PLUS
2570 && GET_CODE (XEXP (addr, 0)) == REG
2571 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2572 && (regno = REGNO (XEXP (addr, 0)),
8bc169f2
DE
2573 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2574 || regno == ARG_POINTER_REGNUM))
7afe21cc 2575 || (GET_CODE (addr) == REG
8bc169f2
DE
2576 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2577 || regno == HARD_FRAME_POINTER_REGNUM
2578 || regno == ARG_POINTER_REGNUM))
7afe21cc
RK
2579 || CONSTANT_ADDRESS_P (addr))
2580 return;
2581
2582 /* If this address is not simply a register, try to fold it. This will
2583 sometimes simplify the expression. Many simplifications
2584 will not be valid, but some, usually applying the associative rule, will
2585 be valid and produce better code. */
2586 if (GET_CODE (addr) != REG
2587 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2588 addr = *loc;
2589
42495ca0
RK
2590 /* If this address is not in the hash table, we can't look for equivalences
2591 of the whole address. Also, ignore if volatile. */
2592
7afe21cc 2593 do_not_record = 0;
2197a88a 2594 hash = HASH (addr, Pmode);
7afe21cc
RK
2595 addr_volatile = do_not_record;
2596 do_not_record = save_do_not_record;
2597 hash_arg_in_memory = save_hash_arg_in_memory;
2598 hash_arg_in_struct = save_hash_arg_in_struct;
2599
2600 if (addr_volatile)
2601 return;
2602
2197a88a 2603 elt = lookup (addr, hash, Pmode);
7afe21cc 2604
7afe21cc 2605#ifndef ADDRESS_COST
42495ca0
RK
2606 if (elt)
2607 {
2608 our_cost = elt->cost;
2609
2610 /* Find the lowest cost below ours that works. */
2611 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2612 if (elt->cost < our_cost
2613 && (GET_CODE (elt->exp) == REG
2614 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2615 && validate_change (insn, loc,
906c4e36 2616 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
42495ca0
RK
2617 return;
2618 }
2619#else
7afe21cc 2620
42495ca0
RK
2621 if (elt)
2622 {
2623 /* We need to find the best (under the criteria documented above) entry
2624 in the class that is valid. We use the `flag' field to indicate
2625 choices that were invalid and iterate until we can't find a better
2626 one that hasn't already been tried. */
7afe21cc 2627
42495ca0
RK
2628 for (p = elt->first_same_value; p; p = p->next_same_value)
2629 p->flag = 0;
7afe21cc 2630
42495ca0
RK
2631 while (found_better)
2632 {
2633 int best_addr_cost = ADDRESS_COST (*loc);
2634 int best_rtx_cost = (elt->cost + 1) >> 1;
2635 struct table_elt *best_elt = elt;
2636
2637 found_better = 0;
2638 for (p = elt->first_same_value; p; p = p->next_same_value)
2639 if (! p->flag
2640 && (GET_CODE (p->exp) == REG
2641 || exp_equiv_p (p->exp, p->exp, 1, 0))
2642 && (ADDRESS_COST (p->exp) < best_addr_cost
2643 || (ADDRESS_COST (p->exp) == best_addr_cost
2644 && (p->cost + 1) >> 1 > best_rtx_cost)))
2645 {
2646 found_better = 1;
2647 best_addr_cost = ADDRESS_COST (p->exp);
2648 best_rtx_cost = (p->cost + 1) >> 1;
2649 best_elt = p;
2650 }
7afe21cc 2651
42495ca0
RK
2652 if (found_better)
2653 {
2654 if (validate_change (insn, loc,
906c4e36
RK
2655 canon_reg (copy_rtx (best_elt->exp),
2656 NULL_RTX), 0))
42495ca0
RK
2657 return;
2658 else
2659 best_elt->flag = 1;
2660 }
2661 }
2662 }
7afe21cc 2663
42495ca0
RK
2664 /* If the address is a binary operation with the first operand a register
2665 and the second a constant, do the same as above, but looking for
2666 equivalences of the register. Then try to simplify before checking for
2667 the best address to use. This catches a few cases: First is when we
2668 have REG+const and the register is another REG+const. We can often merge
2669 the constants and eliminate one insn and one register. It may also be
2670 that a machine has a cheap REG+REG+const. Finally, this improves the
2671 code on the Alpha for unaligned byte stores. */
2672
2673 if (flag_expensive_optimizations
2674 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2675 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2676 && GET_CODE (XEXP (*loc, 0)) == REG
2677 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
7afe21cc 2678 {
42495ca0
RK
2679 rtx c = XEXP (*loc, 1);
2680
2681 do_not_record = 0;
2197a88a 2682 hash = HASH (XEXP (*loc, 0), Pmode);
42495ca0
RK
2683 do_not_record = save_do_not_record;
2684 hash_arg_in_memory = save_hash_arg_in_memory;
2685 hash_arg_in_struct = save_hash_arg_in_struct;
2686
2197a88a 2687 elt = lookup (XEXP (*loc, 0), hash, Pmode);
42495ca0
RK
2688 if (elt == 0)
2689 return;
2690
2691 /* We need to find the best (under the criteria documented above) entry
2692 in the class that is valid. We use the `flag' field to indicate
2693 choices that were invalid and iterate until we can't find a better
2694 one that hasn't already been tried. */
7afe21cc 2695
7afe21cc 2696 for (p = elt->first_same_value; p; p = p->next_same_value)
42495ca0 2697 p->flag = 0;
7afe21cc 2698
42495ca0 2699 while (found_better)
7afe21cc 2700 {
42495ca0
RK
2701 int best_addr_cost = ADDRESS_COST (*loc);
2702 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2703 struct table_elt *best_elt = elt;
2704 rtx best_rtx = *loc;
f6516aee
JW
2705 int count;
2706
2707 /* This is at worst case an O(n^2) algorithm, so limit our search
2708 to the first 32 elements on the list. This avoids trouble
2709 compiling code with very long basic blocks that can easily
2710 call cse_gen_binary so many times that we run out of memory. */
42495ca0
RK
2711
2712 found_better = 0;
f6516aee
JW
2713 for (p = elt->first_same_value, count = 0;
2714 p && count < 32;
2715 p = p->next_same_value, count++)
42495ca0
RK
2716 if (! p->flag
2717 && (GET_CODE (p->exp) == REG
2718 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2719 {
96b0e481 2720 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
42495ca0
RK
2721
2722 if ((ADDRESS_COST (new) < best_addr_cost
2723 || (ADDRESS_COST (new) == best_addr_cost
2724 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2725 {
2726 found_better = 1;
2727 best_addr_cost = ADDRESS_COST (new);
2728 best_rtx_cost = (COST (new) + 1) >> 1;
2729 best_elt = p;
2730 best_rtx = new;
2731 }
2732 }
2733
2734 if (found_better)
2735 {
2736 if (validate_change (insn, loc,
906c4e36
RK
2737 canon_reg (copy_rtx (best_rtx),
2738 NULL_RTX), 0))
42495ca0
RK
2739 return;
2740 else
2741 best_elt->flag = 1;
2742 }
7afe21cc
RK
2743 }
2744 }
2745#endif
2746}
2747\f
2748/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2749 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2750 what values are being compared.
2751
2752 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2753 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2754 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2755 compared to produce cc0.
2756
2757 The return value is the comparison operator and is either the code of
2758 A or the code corresponding to the inverse of the comparison. */
2759
2760static enum rtx_code
13c9910f 2761find_comparison_args (code, parg1, parg2, pmode1, pmode2)
7afe21cc
RK
2762 enum rtx_code code;
2763 rtx *parg1, *parg2;
13c9910f 2764 enum machine_mode *pmode1, *pmode2;
7afe21cc
RK
2765{
2766 rtx arg1, arg2;
2767
2768 arg1 = *parg1, arg2 = *parg2;
2769
2770 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2771
b2796a4b 2772 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
7afe21cc
RK
2773 {
2774 /* Set non-zero when we find something of interest. */
2775 rtx x = 0;
2776 int reverse_code = 0;
2777 struct table_elt *p = 0;
2778
2779 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2780 On machines with CC0, this is the only case that can occur, since
2781 fold_rtx will return the COMPARE or item being compared with zero
2782 when given CC0. */
2783
2784 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2785 x = arg1;
2786
2787 /* If ARG1 is a comparison operator and CODE is testing for
2788 STORE_FLAG_VALUE, get the inner arguments. */
2789
2790 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2791 {
c610adec
RK
2792 if (code == NE
2793 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2794 && code == LT && STORE_FLAG_VALUE == -1)
2795#ifdef FLOAT_STORE_FLAG_VALUE
2796 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2797 && FLOAT_STORE_FLAG_VALUE < 0)
2798#endif
2799 )
7afe21cc 2800 x = arg1;
c610adec
RK
2801 else if (code == EQ
2802 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2803 && code == GE && STORE_FLAG_VALUE == -1)
2804#ifdef FLOAT_STORE_FLAG_VALUE
2805 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2806 && FLOAT_STORE_FLAG_VALUE < 0)
2807#endif
2808 )
7afe21cc
RK
2809 x = arg1, reverse_code = 1;
2810 }
2811
2812 /* ??? We could also check for
2813
2814 (ne (and (eq (...) (const_int 1))) (const_int 0))
2815
2816 and related forms, but let's wait until we see them occurring. */
2817
2818 if (x == 0)
2819 /* Look up ARG1 in the hash table and see if it has an equivalence
2820 that lets us see what is being compared. */
2821 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2822 GET_MODE (arg1));
2823 if (p) p = p->first_same_value;
2824
2825 for (; p; p = p->next_same_value)
2826 {
2827 enum machine_mode inner_mode = GET_MODE (p->exp);
2828
2829 /* If the entry isn't valid, skip it. */
2830 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2831 continue;
2832
2833 if (GET_CODE (p->exp) == COMPARE
2834 /* Another possibility is that this machine has a compare insn
2835 that includes the comparison code. In that case, ARG1 would
2836 be equivalent to a comparison operation that would set ARG1 to
2837 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2838 ORIG_CODE is the actual comparison being done; if it is an EQ,
2839 we must reverse ORIG_CODE. On machine with a negative value
2840 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2841 || ((code == NE
2842 || (code == LT
c610adec 2843 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
2844 && (GET_MODE_BITSIZE (inner_mode)
2845 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 2846 && (STORE_FLAG_VALUE
906c4e36
RK
2847 & ((HOST_WIDE_INT) 1
2848 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
2849#ifdef FLOAT_STORE_FLAG_VALUE
2850 || (code == LT
2851 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2852 && FLOAT_STORE_FLAG_VALUE < 0)
2853#endif
2854 )
7afe21cc
RK
2855 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2856 {
2857 x = p->exp;
2858 break;
2859 }
2860 else if ((code == EQ
2861 || (code == GE
c610adec 2862 && GET_MODE_CLASS (inner_mode) == MODE_INT
906c4e36
RK
2863 && (GET_MODE_BITSIZE (inner_mode)
2864 <= HOST_BITS_PER_WIDE_INT)
7afe21cc 2865 && (STORE_FLAG_VALUE
906c4e36
RK
2866 & ((HOST_WIDE_INT) 1
2867 << (GET_MODE_BITSIZE (inner_mode) - 1))))
c610adec
RK
2868#ifdef FLOAT_STORE_FLAG_VALUE
2869 || (code == GE
2870 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2871 && FLOAT_STORE_FLAG_VALUE < 0)
2872#endif
2873 )
7afe21cc
RK
2874 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2875 {
2876 reverse_code = 1;
2877 x = p->exp;
2878 break;
2879 }
2880
2881 /* If this is fp + constant, the equivalent is a better operand since
2882 it may let us predict the value of the comparison. */
2883 else if (NONZERO_BASE_PLUS_P (p->exp))
2884 {
2885 arg1 = p->exp;
2886 continue;
2887 }
2888 }
2889
2890 /* If we didn't find a useful equivalence for ARG1, we are done.
2891 Otherwise, set up for the next iteration. */
2892 if (x == 0)
2893 break;
2894
2895 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2896 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2897 code = GET_CODE (x);
2898
2899 if (reverse_code)
2900 code = reverse_condition (code);
2901 }
2902
13c9910f
RS
2903 /* Return our results. Return the modes from before fold_rtx
2904 because fold_rtx might produce const_int, and then it's too late. */
2905 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
7afe21cc
RK
2906 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2907
2908 return code;
2909}
2910\f
2911/* Try to simplify a unary operation CODE whose output mode is to be
2912 MODE with input operand OP whose mode was originally OP_MODE.
2913 Return zero if no simplification can be made. */
2914
2915rtx
2916simplify_unary_operation (code, mode, op, op_mode)
2917 enum rtx_code code;
2918 enum machine_mode mode;
2919 rtx op;
2920 enum machine_mode op_mode;
2921{
2922 register int width = GET_MODE_BITSIZE (mode);
2923
2924 /* The order of these tests is critical so that, for example, we don't
2925 check the wrong mode (input vs. output) for a conversion operation,
2926 such as FIX. At some point, this should be simplified. */
2927
62c0ea12 2928#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
7afe21cc 2929
62c0ea12
RK
2930 if (code == FLOAT && GET_MODE (op) == VOIDmode
2931 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 2932 {
62c0ea12 2933 HOST_WIDE_INT hv, lv;
7afe21cc
RK
2934 REAL_VALUE_TYPE d;
2935
62c0ea12
RK
2936 if (GET_CODE (op) == CONST_INT)
2937 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2938 else
7ac4a266 2939 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
7afe21cc
RK
2940
2941#ifdef REAL_ARITHMETIC
62c0ea12 2942 REAL_VALUE_FROM_INT (d, lv, hv);
7afe21cc 2943#else
62c0ea12 2944 if (hv < 0)
7afe21cc 2945 {
62c0ea12 2946 d = (double) (~ hv);
906c4e36
RK
2947 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2948 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 2949 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
7afe21cc
RK
2950 d = (- d - 1.0);
2951 }
2952 else
2953 {
62c0ea12 2954 d = (double) hv;
906c4e36
RK
2955 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2956 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 2957 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc
RK
2958 }
2959#endif /* REAL_ARITHMETIC */
62c0ea12 2960
7afe21cc
RK
2961 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2962 }
62c0ea12
RK
2963 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
2964 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
7afe21cc 2965 {
62c0ea12 2966 HOST_WIDE_INT hv, lv;
7afe21cc
RK
2967 REAL_VALUE_TYPE d;
2968
62c0ea12
RK
2969 if (GET_CODE (op) == CONST_INT)
2970 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
2971 else
7ac4a266 2972 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
62c0ea12
RK
2973
2974 if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
2975 ;
2976 else
2977 hv = 0, lv &= GET_MODE_MASK (op_mode);
2978
7afe21cc 2979#ifdef REAL_ARITHMETIC
62c0ea12 2980 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv);
7afe21cc 2981#else
62c0ea12 2982
138cec59 2983 d = (double) (unsigned HOST_WIDE_INT) hv;
906c4e36
RK
2984 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2985 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
62c0ea12 2986 d += (double) (unsigned HOST_WIDE_INT) lv;
7afe21cc 2987#endif /* REAL_ARITHMETIC */
62c0ea12 2988
7afe21cc
RK
2989 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2990 }
2991#endif
2992
f89e32e9
RK
2993 if (GET_CODE (op) == CONST_INT
2994 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc 2995 {
906c4e36
RK
2996 register HOST_WIDE_INT arg0 = INTVAL (op);
2997 register HOST_WIDE_INT val;
7afe21cc
RK
2998
2999 switch (code)
3000 {
3001 case NOT:
3002 val = ~ arg0;
3003 break;
3004
3005 case NEG:
3006 val = - arg0;
3007 break;
3008
3009 case ABS:
3010 val = (arg0 >= 0 ? arg0 : - arg0);
3011 break;
3012
3013 case FFS:
3014 /* Don't use ffs here. Instead, get low order bit and then its
3015 number. If arg0 is zero, this will return 0, as desired. */
3016 arg0 &= GET_MODE_MASK (mode);
3017 val = exact_log2 (arg0 & (- arg0)) + 1;
3018 break;
3019
3020 case TRUNCATE:
3021 val = arg0;
3022 break;
3023
3024 case ZERO_EXTEND:
3025 if (op_mode == VOIDmode)
3026 op_mode = mode;
82a5e898 3027 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3028 {
3029 /* If we were really extending the mode,
3030 we would have to distinguish between zero-extension
3031 and sign-extension. */
3032 if (width != GET_MODE_BITSIZE (op_mode))
3033 abort ();
3034 val = arg0;
3035 }
82a5e898
CH
3036 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3037 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
7afe21cc
RK
3038 else
3039 return 0;
3040 break;
3041
3042 case SIGN_EXTEND:
3043 if (op_mode == VOIDmode)
3044 op_mode = mode;
82a5e898 3045 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
d80e9fd7
RS
3046 {
3047 /* If we were really extending the mode,
3048 we would have to distinguish between zero-extension
3049 and sign-extension. */
3050 if (width != GET_MODE_BITSIZE (op_mode))
3051 abort ();
3052 val = arg0;
3053 }
f12564b4 3054 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
7afe21cc 3055 {
82a5e898
CH
3056 val
3057 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3058 if (val
3059 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3060 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
7afe21cc
RK
3061 }
3062 else
3063 return 0;
3064 break;
3065
d45cf215
RS
3066 case SQRT:
3067 return 0;
3068
7afe21cc
RK
3069 default:
3070 abort ();
3071 }
3072
3073 /* Clear the bits that don't belong in our mode,
3074 unless they and our sign bit are all one.
3075 So we get either a reasonable negative value or a reasonable
3076 unsigned value for this mode. */
906c4e36
RK
3077 if (width < HOST_BITS_PER_WIDE_INT
3078 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3079 != ((HOST_WIDE_INT) (-1) << (width - 1))))
7afe21cc
RK
3080 val &= (1 << width) - 1;
3081
906c4e36 3082 return GEN_INT (val);
7afe21cc
RK
3083 }
3084
3085 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3086 for a DImode operation on a CONST_INT. */
089481a6 3087 else if (GET_MODE (op) == VOIDmode && width == HOST_BITS_PER_INT * 2
7afe21cc
RK
3088 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3089 {
906c4e36 3090 HOST_WIDE_INT l1, h1, lv, hv;
7afe21cc
RK
3091
3092 if (GET_CODE (op) == CONST_DOUBLE)
3093 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3094 else
3095 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3096
3097 switch (code)
3098 {
3099 case NOT:
3100 lv = ~ l1;
3101 hv = ~ h1;
3102 break;
3103
3104 case NEG:
3105 neg_double (l1, h1, &lv, &hv);
3106 break;
3107
3108 case ABS:
3109 if (h1 < 0)
3110 neg_double (l1, h1, &lv, &hv);
3111 else
3112 lv = l1, hv = h1;
3113 break;
3114
3115 case FFS:
3116 hv = 0;
3117 if (l1 == 0)
906c4e36 3118 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
7afe21cc
RK
3119 else
3120 lv = exact_log2 (l1 & (-l1)) + 1;
3121 break;
3122
3123 case TRUNCATE:
906c4e36
RK
3124 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3125 return GEN_INT (l1 & GET_MODE_MASK (mode));
7afe21cc
RK
3126 else
3127 return 0;
3128 break;
3129
f72aed24
RS
3130 case ZERO_EXTEND:
3131 if (op_mode == VOIDmode
906c4e36 3132 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3133 return 0;
3134
3135 hv = 0;
3136 lv = l1 & GET_MODE_MASK (op_mode);
3137 break;
3138
3139 case SIGN_EXTEND:
3140 if (op_mode == VOIDmode
906c4e36 3141 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
f72aed24
RS
3142 return 0;
3143 else
3144 {
3145 lv = l1 & GET_MODE_MASK (op_mode);
906c4e36
RK
3146 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3147 && (lv & ((HOST_WIDE_INT) 1
3148 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3149 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
f72aed24 3150
906c4e36 3151 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
f72aed24
RS
3152 }
3153 break;
3154
d45cf215
RS
3155 case SQRT:
3156 return 0;
3157
7afe21cc
RK
3158 default:
3159 return 0;
3160 }
3161
3162 return immed_double_const (lv, hv, mode);
3163 }
3164
3165#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3166 else if (GET_CODE (op) == CONST_DOUBLE
3167 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3168 {
3169 REAL_VALUE_TYPE d;
3170 jmp_buf handler;
3171 rtx x;
3172
3173 if (setjmp (handler))
3174 /* There used to be a warning here, but that is inadvisable.
3175 People may want to cause traps, and the natural way
3176 to do it should not get a warning. */
3177 return 0;
3178
3179 set_float_handler (handler);
3180
3181 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3182
3183 switch (code)
3184 {
3185 case NEG:
3186 d = REAL_VALUE_NEGATE (d);
3187 break;
3188
3189 case ABS:
8b3686ed 3190 if (REAL_VALUE_NEGATIVE (d))
7afe21cc
RK
3191 d = REAL_VALUE_NEGATE (d);
3192 break;
3193
3194 case FLOAT_TRUNCATE:
d3159aee 3195 d = real_value_truncate (mode, d);
7afe21cc
RK
3196 break;
3197
3198 case FLOAT_EXTEND:
3199 /* All this does is change the mode. */
3200 break;
3201
3202 case FIX:
d3159aee 3203 d = REAL_VALUE_RNDZINT (d);
7afe21cc
RK
3204 break;
3205
3206 case UNSIGNED_FIX:
d3159aee 3207 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
7afe21cc
RK
3208 break;
3209
d45cf215
RS
3210 case SQRT:
3211 return 0;
3212
7afe21cc
RK
3213 default:
3214 abort ();
3215 }
3216
560c94a2 3217 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
906c4e36 3218 set_float_handler (NULL_PTR);
7afe21cc
RK
3219 return x;
3220 }
3221 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 3222 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
7afe21cc
RK
3223 {
3224 REAL_VALUE_TYPE d;
3225 jmp_buf handler;
906c4e36 3226 HOST_WIDE_INT val;
7afe21cc
RK
3227
3228 if (setjmp (handler))
3229 return 0;
3230
3231 set_float_handler (handler);
3232
3233 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3234
3235 switch (code)
3236 {
3237 case FIX:
3238 val = REAL_VALUE_FIX (d);
3239 break;
3240
3241 case UNSIGNED_FIX:
3242 val = REAL_VALUE_UNSIGNED_FIX (d);
3243 break;
3244
3245 default:
3246 abort ();
3247 }
3248
906c4e36 3249 set_float_handler (NULL_PTR);
7afe21cc
RK
3250
3251 /* Clear the bits that don't belong in our mode,
3252 unless they and our sign bit are all one.
3253 So we get either a reasonable negative value or a reasonable
3254 unsigned value for this mode. */
906c4e36
RK
3255 if (width < HOST_BITS_PER_WIDE_INT
3256 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3257 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3258 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 3259
906c4e36 3260 return GEN_INT (val);
7afe21cc
RK
3261 }
3262#endif
a6acbe15
RS
3263 /* This was formerly used only for non-IEEE float.
3264 eggert@twinsun.com says it is safe for IEEE also. */
3265 else
7afe21cc
RK
3266 {
3267 /* There are some simplifications we can do even if the operands
a6acbe15 3268 aren't constant. */
7afe21cc
RK
3269 switch (code)
3270 {
3271 case NEG:
3272 case NOT:
3273 /* (not (not X)) == X, similarly for NEG. */
3274 if (GET_CODE (op) == code)
3275 return XEXP (op, 0);
3276 break;
3277
3278 case SIGN_EXTEND:
3279 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3280 becomes just the MINUS if its mode is MODE. This allows
3281 folding switch statements on machines using casesi (such as
3282 the Vax). */
3283 if (GET_CODE (op) == TRUNCATE
3284 && GET_MODE (XEXP (op, 0)) == mode
3285 && GET_CODE (XEXP (op, 0)) == MINUS
3286 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3287 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3288 return XEXP (op, 0);
3289 break;
3290 }
3291
3292 return 0;
3293 }
7afe21cc
RK
3294}
3295\f
3296/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3297 and OP1. Return 0 if no simplification is possible.
3298
3299 Don't use this for relational operations such as EQ or LT.
3300 Use simplify_relational_operation instead. */
3301
3302rtx
3303simplify_binary_operation (code, mode, op0, op1)
3304 enum rtx_code code;
3305 enum machine_mode mode;
3306 rtx op0, op1;
3307{
906c4e36
RK
3308 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3309 HOST_WIDE_INT val;
7afe21cc 3310 int width = GET_MODE_BITSIZE (mode);
96b0e481 3311 rtx tem;
7afe21cc
RK
3312
3313 /* Relational operations don't work here. We must know the mode
3314 of the operands in order to do the comparison correctly.
3315 Assuming a full word can give incorrect results.
3316 Consider comparing 128 with -128 in QImode. */
3317
3318 if (GET_RTX_CLASS (code) == '<')
3319 abort ();
3320
3321#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3322 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3323 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3324 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3325 {
3326 REAL_VALUE_TYPE f0, f1, value;
3327 jmp_buf handler;
3328
3329 if (setjmp (handler))
3330 return 0;
3331
3332 set_float_handler (handler);
3333
3334 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3335 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
5352b11a
RS
3336 f0 = real_value_truncate (mode, f0);
3337 f1 = real_value_truncate (mode, f1);
7afe21cc
RK
3338
3339#ifdef REAL_ARITHMETIC
d3159aee 3340 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
7afe21cc
RK
3341#else
3342 switch (code)
3343 {
3344 case PLUS:
3345 value = f0 + f1;
3346 break;
3347 case MINUS:
3348 value = f0 - f1;
3349 break;
3350 case MULT:
3351 value = f0 * f1;
3352 break;
3353 case DIV:
3354#ifndef REAL_INFINITY
3355 if (f1 == 0)
21d12b80 3356 return 0;
7afe21cc
RK
3357#endif
3358 value = f0 / f1;
3359 break;
3360 case SMIN:
3361 value = MIN (f0, f1);
3362 break;
3363 case SMAX:
3364 value = MAX (f0, f1);
3365 break;
3366 default:
3367 abort ();
3368 }
3369#endif
3370
5352b11a 3371 value = real_value_truncate (mode, value);
831522a4 3372 set_float_handler (NULL_PTR);
560c94a2 3373 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
7afe21cc 3374 }
6076248a 3375#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc
RK
3376
3377 /* We can fold some multi-word operations. */
6076248a 3378 if (GET_MODE_CLASS (mode) == MODE_INT
33085906 3379 && width == HOST_BITS_PER_WIDE_INT * 2
fe873240 3380 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
6076248a 3381 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
7afe21cc 3382 {
906c4e36 3383 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
7afe21cc 3384
fe873240
RK
3385 if (GET_CODE (op0) == CONST_DOUBLE)
3386 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3387 else
3388 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
7afe21cc
RK
3389
3390 if (GET_CODE (op1) == CONST_DOUBLE)
3391 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3392 else
3393 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3394
3395 switch (code)
3396 {
3397 case MINUS:
3398 /* A - B == A + (-B). */
3399 neg_double (l2, h2, &lv, &hv);
3400 l2 = lv, h2 = hv;
3401
3402 /* .. fall through ... */
3403
3404 case PLUS:
3405 add_double (l1, h1, l2, h2, &lv, &hv);
3406 break;
3407
3408 case MULT:
3409 mul_double (l1, h1, l2, h2, &lv, &hv);
3410 break;
3411
3412 case DIV: case MOD: case UDIV: case UMOD:
3413 /* We'd need to include tree.h to do this and it doesn't seem worth
3414 it. */
3415 return 0;
3416
3417 case AND:
3418 lv = l1 & l2, hv = h1 & h2;
3419 break;
3420
3421 case IOR:
3422 lv = l1 | l2, hv = h1 | h2;
3423 break;
3424
3425 case XOR:
3426 lv = l1 ^ l2, hv = h1 ^ h2;
3427 break;
3428
3429 case SMIN:
906c4e36
RK
3430 if (h1 < h2
3431 || (h1 == h2
3432 && ((unsigned HOST_WIDE_INT) l1
3433 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3434 lv = l1, hv = h1;
3435 else
3436 lv = l2, hv = h2;
3437 break;
3438
3439 case SMAX:
906c4e36
RK
3440 if (h1 > h2
3441 || (h1 == h2
3442 && ((unsigned HOST_WIDE_INT) l1
3443 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3444 lv = l1, hv = h1;
3445 else
3446 lv = l2, hv = h2;
3447 break;
3448
3449 case UMIN:
906c4e36
RK
3450 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3451 || (h1 == h2
3452 && ((unsigned HOST_WIDE_INT) l1
3453 < (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3454 lv = l1, hv = h1;
3455 else
3456 lv = l2, hv = h2;
3457 break;
3458
3459 case UMAX:
906c4e36
RK
3460 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3461 || (h1 == h2
3462 && ((unsigned HOST_WIDE_INT) l1
3463 > (unsigned HOST_WIDE_INT) l2)))
7afe21cc
RK
3464 lv = l1, hv = h1;
3465 else
3466 lv = l2, hv = h2;
3467 break;
3468
3469 case LSHIFTRT: case ASHIFTRT:
45620ed4 3470 case ASHIFT:
7afe21cc
RK
3471 case ROTATE: case ROTATERT:
3472#ifdef SHIFT_COUNT_TRUNCATED
85c0a556
RK
3473 if (SHIFT_COUNT_TRUNCATED)
3474 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
7afe21cc
RK
3475#endif
3476
3477 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3478 return 0;
3479
3480 if (code == LSHIFTRT || code == ASHIFTRT)
3481 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3482 code == ASHIFTRT);
45620ed4
RK
3483 else if (code == ASHIFT)
3484 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
7afe21cc
RK
3485 else if (code == ROTATE)
3486 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3487 else /* code == ROTATERT */
3488 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3489 break;
3490
3491 default:
3492 return 0;
3493 }
3494
3495 return immed_double_const (lv, hv, mode);
3496 }
7afe21cc
RK
3497
3498 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
906c4e36 3499 || width > HOST_BITS_PER_WIDE_INT || width == 0)
7afe21cc
RK
3500 {
3501 /* Even if we can't compute a constant result,
3502 there are some cases worth simplifying. */
3503
3504 switch (code)
3505 {
3506 case PLUS:
3507 /* In IEEE floating point, x+0 is not the same as x. Similarly
3508 for the other optimizations below. */
3509 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3510 && FLOAT_MODE_P (mode) && ! flag_fast_math)
7afe21cc
RK
3511 break;
3512
3513 if (op1 == CONST0_RTX (mode))
3514 return op0;
3515
7afe21cc
RK
3516 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3517 if (GET_CODE (op0) == NEG)
96b0e481 3518 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
7afe21cc 3519 else if (GET_CODE (op1) == NEG)
96b0e481 3520 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
7afe21cc 3521
96b0e481
RK
3522 /* Handle both-operands-constant cases. We can only add
3523 CONST_INTs to constants since the sum of relocatable symbols
fe873240
RK
3524 can't be handled by most assemblers. Don't add CONST_INT
3525 to CONST_INT since overflow won't be computed properly if wider
3526 than HOST_BITS_PER_WIDE_INT. */
7afe21cc 3527
fe873240
RK
3528 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3529 && GET_CODE (op1) == CONST_INT)
96b0e481 3530 return plus_constant (op0, INTVAL (op1));
fe873240
RK
3531 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3532 && GET_CODE (op0) == CONST_INT)
96b0e481 3533 return plus_constant (op1, INTVAL (op0));
7afe21cc 3534
30d69925
RK
3535 /* See if this is something like X * C - X or vice versa or
3536 if the multiplication is written as a shift. If so, we can
3537 distribute and make a new multiply, shift, or maybe just
3538 have X (if C is 2 in the example above). But don't make
3539 real multiply if we didn't have one before. */
3540
3541 if (! FLOAT_MODE_P (mode))
3542 {
3543 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3544 rtx lhs = op0, rhs = op1;
3545 int had_mult = 0;
3546
3547 if (GET_CODE (lhs) == NEG)
3548 coeff0 = -1, lhs = XEXP (lhs, 0);
3549 else if (GET_CODE (lhs) == MULT
3550 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3551 {
3552 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3553 had_mult = 1;
3554 }
3555 else if (GET_CODE (lhs) == ASHIFT
3556 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3557 && INTVAL (XEXP (lhs, 1)) >= 0
3558 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3559 {
3560 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3561 lhs = XEXP (lhs, 0);
3562 }
3563
3564 if (GET_CODE (rhs) == NEG)
3565 coeff1 = -1, rhs = XEXP (rhs, 0);
3566 else if (GET_CODE (rhs) == MULT
3567 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3568 {
3569 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3570 had_mult = 1;
3571 }
3572 else if (GET_CODE (rhs) == ASHIFT
3573 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3574 && INTVAL (XEXP (rhs, 1)) >= 0
3575 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3576 {
3577 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3578 rhs = XEXP (rhs, 0);
3579 }
3580
3581 if (rtx_equal_p (lhs, rhs))
3582 {
3583 tem = cse_gen_binary (MULT, mode, lhs,
3584 GEN_INT (coeff0 + coeff1));
3585 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3586 }
3587 }
3588
96b0e481
RK
3589 /* If one of the operands is a PLUS or a MINUS, see if we can
3590 simplify this by the associative law.
3591 Don't use the associative law for floating point.
3592 The inaccuracy makes it nonassociative,
3593 and subtle programs can break if operations are associated. */
7afe21cc 3594
cbf6a543 3595 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3596 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3597 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3598 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3599 return tem;
7afe21cc
RK
3600 break;
3601
3602 case COMPARE:
3603#ifdef HAVE_cc0
3604 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3605 using cc0, in which case we want to leave it as a COMPARE
3606 so we can distinguish it from a register-register-copy.
3607
3608 In IEEE floating point, x-0 is not the same as x. */
3609
3610 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3611 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3612 && op1 == CONST0_RTX (mode))
3613 return op0;
3614#else
3615 /* Do nothing here. */
3616#endif
3617 break;
3618
3619 case MINUS:
21648b45
RK
3620 /* None of these optimizations can be done for IEEE
3621 floating point. */
3622 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
a83afb65 3623 && FLOAT_MODE_P (mode) && ! flag_fast_math)
21648b45
RK
3624 break;
3625
a83afb65
RK
3626 /* We can't assume x-x is 0 even with non-IEEE floating point,
3627 but since it is zero except in very strange circumstances, we
3628 will treat it as zero with -ffast-math. */
7afe21cc
RK
3629 if (rtx_equal_p (op0, op1)
3630 && ! side_effects_p (op0)
a83afb65
RK
3631 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3632 return CONST0_RTX (mode);
7afe21cc
RK
3633
3634 /* Change subtraction from zero into negation. */
3635 if (op0 == CONST0_RTX (mode))
3636 return gen_rtx (NEG, mode, op1);
3637
96b0e481
RK
3638 /* (-1 - a) is ~a. */
3639 if (op0 == constm1_rtx)
3640 return gen_rtx (NOT, mode, op1);
3641
7afe21cc
RK
3642 /* Subtracting 0 has no effect. */
3643 if (op1 == CONST0_RTX (mode))
3644 return op0;
3645
30d69925
RK
3646 /* See if this is something like X * C - X or vice versa or
3647 if the multiplication is written as a shift. If so, we can
3648 distribute and make a new multiply, shift, or maybe just
3649 have X (if C is 2 in the example above). But don't make
3650 real multiply if we didn't have one before. */
3651
3652 if (! FLOAT_MODE_P (mode))
3653 {
3654 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3655 rtx lhs = op0, rhs = op1;
3656 int had_mult = 0;
3657
3658 if (GET_CODE (lhs) == NEG)
3659 coeff0 = -1, lhs = XEXP (lhs, 0);
3660 else if (GET_CODE (lhs) == MULT
3661 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3662 {
3663 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3664 had_mult = 1;
3665 }
3666 else if (GET_CODE (lhs) == ASHIFT
3667 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3668 && INTVAL (XEXP (lhs, 1)) >= 0
3669 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3670 {
3671 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3672 lhs = XEXP (lhs, 0);
3673 }
3674
3675 if (GET_CODE (rhs) == NEG)
3676 coeff1 = - 1, rhs = XEXP (rhs, 0);
3677 else if (GET_CODE (rhs) == MULT
3678 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3679 {
3680 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3681 had_mult = 1;
3682 }
3683 else if (GET_CODE (rhs) == ASHIFT
3684 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3685 && INTVAL (XEXP (rhs, 1)) >= 0
3686 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3687 {
3688 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3689 rhs = XEXP (rhs, 0);
3690 }
3691
3692 if (rtx_equal_p (lhs, rhs))
3693 {
3694 tem = cse_gen_binary (MULT, mode, lhs,
3695 GEN_INT (coeff0 - coeff1));
3696 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3697 }
3698 }
3699
7afe21cc
RK
3700 /* (a - (-b)) -> (a + b). */
3701 if (GET_CODE (op1) == NEG)
96b0e481 3702 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
7afe21cc 3703
96b0e481
RK
3704 /* If one of the operands is a PLUS or a MINUS, see if we can
3705 simplify this by the associative law.
3706 Don't use the associative law for floating point.
7afe21cc
RK
3707 The inaccuracy makes it nonassociative,
3708 and subtle programs can break if operations are associated. */
7afe21cc 3709
cbf6a543 3710 if (INTEGRAL_MODE_P (mode)
96b0e481
RK
3711 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3712 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3713 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3714 return tem;
7afe21cc
RK
3715
3716 /* Don't let a relocatable value get a negative coeff. */
b5a09c41 3717 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
7afe21cc
RK
3718 return plus_constant (op0, - INTVAL (op1));
3719 break;
3720
3721 case MULT:
3722 if (op1 == constm1_rtx)
3723 {
96b0e481 3724 tem = simplify_unary_operation (NEG, mode, op0, mode);
7afe21cc
RK
3725
3726 return tem ? tem : gen_rtx (NEG, mode, op0);
3727 }
3728
3729 /* In IEEE floating point, x*0 is not always 0. */
3730 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3731 || ! FLOAT_MODE_P (mode) || flag_fast_math)
7afe21cc
RK
3732 && op1 == CONST0_RTX (mode)
3733 && ! side_effects_p (op0))
3734 return op1;
3735
3736 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3737 However, ANSI says we can drop signals,
3738 so we can do this anyway. */
3739 if (op1 == CONST1_RTX (mode))
3740 return op0;
3741
c407b802
RK
3742 /* Convert multiply by constant power of two into shift unless
3743 we are still generating RTL. This test is a kludge. */
7afe21cc 3744 if (GET_CODE (op1) == CONST_INT
c407b802
RK
3745 && (val = exact_log2 (INTVAL (op1))) >= 0
3746 && ! rtx_equal_function_value_matters)
906c4e36 3747 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
7afe21cc
RK
3748
3749 if (GET_CODE (op1) == CONST_DOUBLE
3750 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3751 {
3752 REAL_VALUE_TYPE d;
5a3d4bef
RK
3753 jmp_buf handler;
3754 int op1is2, op1ism1;
3755
3756 if (setjmp (handler))
3757 return 0;
3758
3759 set_float_handler (handler);
7afe21cc 3760 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
5a3d4bef
RK
3761 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3762 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3763 set_float_handler (NULL_PTR);
7afe21cc
RK
3764
3765 /* x*2 is x+x and x*(-1) is -x */
5a3d4bef 3766 if (op1is2 && GET_MODE (op0) == mode)
7afe21cc
RK
3767 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3768
5a3d4bef 3769 else if (op1ism1 && GET_MODE (op0) == mode)
7afe21cc
RK
3770 return gen_rtx (NEG, mode, op0);
3771 }
3772 break;
3773
3774 case IOR:
3775 if (op1 == const0_rtx)
3776 return op0;
3777 if (GET_CODE (op1) == CONST_INT
3778 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3779 return op1;
3780 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3781 return op0;
3782 /* A | (~A) -> -1 */
3783 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3784 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
31dcf83f 3785 && ! side_effects_p (op0)
8e7e5365 3786 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
3787 return constm1_rtx;
3788 break;
3789
3790 case XOR:
3791 if (op1 == const0_rtx)
3792 return op0;
3793 if (GET_CODE (op1) == CONST_INT
3794 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3795 return gen_rtx (NOT, mode, op0);
31dcf83f 3796 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 3797 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
3798 return const0_rtx;
3799 break;
3800
3801 case AND:
3802 if (op1 == const0_rtx && ! side_effects_p (op0))
3803 return const0_rtx;
3804 if (GET_CODE (op1) == CONST_INT
3805 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3806 return op0;
31dcf83f 3807 if (op0 == op1 && ! side_effects_p (op0)
8e7e5365 3808 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
3809 return op0;
3810 /* A & (~A) -> 0 */
3811 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3812 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
709ab4fc 3813 && ! side_effects_p (op0)
8e7e5365 3814 && GET_MODE_CLASS (mode) != MODE_CC)
7afe21cc
RK
3815 return const0_rtx;
3816 break;
3817
3818 case UDIV:
3819 /* Convert divide by power of two into shift (divide by 1 handled
3820 below). */
3821 if (GET_CODE (op1) == CONST_INT
3822 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
906c4e36 3823 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
7afe21cc
RK
3824
3825 /* ... fall through ... */
3826
3827 case DIV:
3828 if (op1 == CONST1_RTX (mode))
3829 return op0;
e7a522ba
RS
3830
3831 /* In IEEE floating point, 0/x is not always 0. */
3832 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 3833 || ! FLOAT_MODE_P (mode) || flag_fast_math)
e7a522ba
RS
3834 && op0 == CONST0_RTX (mode)
3835 && ! side_effects_p (op1))
7afe21cc 3836 return op0;
e7a522ba 3837
7afe21cc 3838#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a83afb65
RK
3839 /* Change division by a constant into multiplication. Only do
3840 this with -ffast-math until an expert says it is safe in
3841 general. */
7afe21cc
RK
3842 else if (GET_CODE (op1) == CONST_DOUBLE
3843 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
a83afb65
RK
3844 && op1 != CONST0_RTX (mode)
3845 && flag_fast_math)
7afe21cc
RK
3846 {
3847 REAL_VALUE_TYPE d;
3848 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
a83afb65
RK
3849
3850 if (! REAL_VALUES_EQUAL (d, dconst0))
3851 {
7afe21cc 3852#if defined (REAL_ARITHMETIC)
a83afb65
RK
3853 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3854 return gen_rtx (MULT, mode, op0,
3855 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
7afe21cc 3856#else
a83afb65
RK
3857 return gen_rtx (MULT, mode, op0,
3858 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
7afe21cc 3859#endif
a83afb65
RK
3860 }
3861 }
7afe21cc
RK
3862#endif
3863 break;
3864
3865 case UMOD:
3866 /* Handle modulus by power of two (mod with 1 handled below). */
3867 if (GET_CODE (op1) == CONST_INT
3868 && exact_log2 (INTVAL (op1)) > 0)
906c4e36 3869 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
7afe21cc
RK
3870
3871 /* ... fall through ... */
3872
3873 case MOD:
3874 if ((op0 == const0_rtx || op1 == const1_rtx)
3875 && ! side_effects_p (op0) && ! side_effects_p (op1))
3876 return const0_rtx;
3877 break;
3878
3879 case ROTATERT:
3880 case ROTATE:
3881 /* Rotating ~0 always results in ~0. */
906c4e36 3882 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
7afe21cc
RK
3883 && INTVAL (op0) == GET_MODE_MASK (mode)
3884 && ! side_effects_p (op1))
3885 return op0;
3886
3887 /* ... fall through ... */
3888
7afe21cc
RK
3889 case ASHIFT:
3890 case ASHIFTRT:
3891 case LSHIFTRT:
3892 if (op1 == const0_rtx)
3893 return op0;
3894 if (op0 == const0_rtx && ! side_effects_p (op1))
3895 return op0;
3896 break;
3897
3898 case SMIN:
906c4e36
RK
3899 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3900 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
7afe21cc
RK
3901 && ! side_effects_p (op0))
3902 return op1;
3903 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3904 return op0;
3905 break;
3906
3907 case SMAX:
906c4e36 3908 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
dbbe6445
RK
3909 && (INTVAL (op1)
3910 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
7afe21cc
RK
3911 && ! side_effects_p (op0))
3912 return op1;
3913 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3914 return op0;
3915 break;
3916
3917 case UMIN:
3918 if (op1 == const0_rtx && ! side_effects_p (op0))
3919 return op1;
3920 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3921 return op0;
3922 break;
3923
3924 case UMAX:
3925 if (op1 == constm1_rtx && ! side_effects_p (op0))
3926 return op1;
3927 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3928 return op0;
3929 break;
3930
3931 default:
3932 abort ();
3933 }
3934
3935 return 0;
3936 }
3937
3938 /* Get the integer argument values in two forms:
3939 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3940
3941 arg0 = INTVAL (op0);
3942 arg1 = INTVAL (op1);
3943
906c4e36 3944 if (width < HOST_BITS_PER_WIDE_INT)
7afe21cc 3945 {
906c4e36
RK
3946 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3947 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc
RK
3948
3949 arg0s = arg0;
906c4e36
RK
3950 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3951 arg0s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
3952
3953 arg1s = arg1;
906c4e36
RK
3954 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3955 arg1s |= ((HOST_WIDE_INT) (-1) << width);
7afe21cc
RK
3956 }
3957 else
3958 {
3959 arg0s = arg0;
3960 arg1s = arg1;
3961 }
3962
3963 /* Compute the value of the arithmetic. */
3964
3965 switch (code)
3966 {
3967 case PLUS:
538b78e7 3968 val = arg0s + arg1s;
7afe21cc
RK
3969 break;
3970
3971 case MINUS:
538b78e7 3972 val = arg0s - arg1s;
7afe21cc
RK
3973 break;
3974
3975 case MULT:
3976 val = arg0s * arg1s;
3977 break;
3978
3979 case DIV:
3980 if (arg1s == 0)
3981 return 0;
3982 val = arg0s / arg1s;
3983 break;
3984
3985 case MOD:
3986 if (arg1s == 0)
3987 return 0;
3988 val = arg0s % arg1s;
3989 break;
3990
3991 case UDIV:
3992 if (arg1 == 0)
3993 return 0;
906c4e36 3994 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
7afe21cc
RK
3995 break;
3996
3997 case UMOD:
3998 if (arg1 == 0)
3999 return 0;
906c4e36 4000 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
7afe21cc
RK
4001 break;
4002
4003 case AND:
4004 val = arg0 & arg1;
4005 break;
4006
4007 case IOR:
4008 val = arg0 | arg1;
4009 break;
4010
4011 case XOR:
4012 val = arg0 ^ arg1;
4013 break;
4014
4015 case LSHIFTRT:
4016 /* If shift count is undefined, don't fold it; let the machine do
4017 what it wants. But truncate it if the machine will do that. */
4018 if (arg1 < 0)
4019 return 0;
4020
4021#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4022 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4023 arg1 %= width;
7afe21cc
RK
4024#endif
4025
906c4e36 4026 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
7afe21cc
RK
4027 break;
4028
4029 case ASHIFT:
7afe21cc
RK
4030 if (arg1 < 0)
4031 return 0;
4032
4033#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4034 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4035 arg1 %= width;
7afe21cc
RK
4036#endif
4037
906c4e36 4038 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
7afe21cc
RK
4039 break;
4040
4041 case ASHIFTRT:
4042 if (arg1 < 0)
4043 return 0;
4044
4045#ifdef SHIFT_COUNT_TRUNCATED
85c0a556 4046 if (SHIFT_COUNT_TRUNCATED)
4d61f8c5 4047 arg1 %= width;
7afe21cc
RK
4048#endif
4049
7afe21cc 4050 val = arg0s >> arg1;
2166571b
RS
4051
4052 /* Bootstrap compiler may not have sign extended the right shift.
4053 Manually extend the sign to insure bootstrap cc matches gcc. */
4054 if (arg0s < 0 && arg1 > 0)
4055 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4056
7afe21cc
RK
4057 break;
4058
4059 case ROTATERT:
4060 if (arg1 < 0)
4061 return 0;
4062
4063 arg1 %= width;
906c4e36
RK
4064 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4065 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
7afe21cc
RK
4066 break;
4067
4068 case ROTATE:
4069 if (arg1 < 0)
4070 return 0;
4071
4072 arg1 %= width;
906c4e36
RK
4073 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4074 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
7afe21cc
RK
4075 break;
4076
4077 case COMPARE:
4078 /* Do nothing here. */
4079 return 0;
4080
830a38ee
RS
4081 case SMIN:
4082 val = arg0s <= arg1s ? arg0s : arg1s;
4083 break;
4084
4085 case UMIN:
906c4e36
RK
4086 val = ((unsigned HOST_WIDE_INT) arg0
4087 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4088 break;
4089
4090 case SMAX:
4091 val = arg0s > arg1s ? arg0s : arg1s;
4092 break;
4093
4094 case UMAX:
906c4e36
RK
4095 val = ((unsigned HOST_WIDE_INT) arg0
4096 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
830a38ee
RS
4097 break;
4098
7afe21cc
RK
4099 default:
4100 abort ();
4101 }
4102
4103 /* Clear the bits that don't belong in our mode, unless they and our sign
4104 bit are all one. So we get either a reasonable negative value or a
4105 reasonable unsigned value for this mode. */
906c4e36
RK
4106 if (width < HOST_BITS_PER_WIDE_INT
4107 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4108 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4109 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4110
4111 return GEN_INT (val);
7afe21cc
RK
4112}
4113\f
96b0e481
RK
4114/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4115 PLUS or MINUS.
4116
4117 Rather than test for specific case, we do this by a brute-force method
4118 and do all possible simplifications until no more changes occur. Then
4119 we rebuild the operation. */
4120
4121static rtx
4122simplify_plus_minus (code, mode, op0, op1)
4123 enum rtx_code code;
4124 enum machine_mode mode;
4125 rtx op0, op1;
4126{
4127 rtx ops[8];
4128 int negs[8];
4129 rtx result, tem;
fb5c8ce6 4130 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
96b0e481 4131 int first = 1, negate = 0, changed;
fb5c8ce6 4132 int i, j;
96b0e481 4133
4c9a05bc 4134 bzero ((char *) ops, sizeof ops);
96b0e481
RK
4135
4136 /* Set up the two operands and then expand them until nothing has been
4137 changed. If we run out of room in our array, give up; this should
4138 almost never happen. */
4139
4140 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4141
4142 changed = 1;
4143 while (changed)
4144 {
4145 changed = 0;
4146
4147 for (i = 0; i < n_ops; i++)
4148 switch (GET_CODE (ops[i]))
4149 {
4150 case PLUS:
4151 case MINUS:
4152 if (n_ops == 7)
4153 return 0;
4154
4155 ops[n_ops] = XEXP (ops[i], 1);
4156 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4157 ops[i] = XEXP (ops[i], 0);
b7d9299b 4158 input_ops++;
96b0e481
RK
4159 changed = 1;
4160 break;
4161
4162 case NEG:
4163 ops[i] = XEXP (ops[i], 0);
4164 negs[i] = ! negs[i];
4165 changed = 1;
4166 break;
4167
4168 case CONST:
4169 ops[i] = XEXP (ops[i], 0);
fb5c8ce6 4170 input_consts++;
96b0e481
RK
4171 changed = 1;
4172 break;
4173
4174 case NOT:
4175 /* ~a -> (-a - 1) */
4176 if (n_ops != 7)
4177 {
4178 ops[n_ops] = constm1_rtx;
5931019b 4179 negs[n_ops++] = negs[i];
96b0e481
RK
4180 ops[i] = XEXP (ops[i], 0);
4181 negs[i] = ! negs[i];
4182 changed = 1;
4183 }
4184 break;
4185
4186 case CONST_INT:
4187 if (negs[i])
4188 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4189 break;
4190 }
4191 }
4192
4193 /* If we only have two operands, we can't do anything. */
4194 if (n_ops <= 2)
4195 return 0;
4196
4197 /* Now simplify each pair of operands until nothing changes. The first
4198 time through just simplify constants against each other. */
4199
4200 changed = 1;
4201 while (changed)
4202 {
4203 changed = first;
4204
4205 for (i = 0; i < n_ops - 1; i++)
4206 for (j = i + 1; j < n_ops; j++)
4207 if (ops[i] != 0 && ops[j] != 0
4208 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4209 {
4210 rtx lhs = ops[i], rhs = ops[j];
4211 enum rtx_code ncode = PLUS;
4212
4213 if (negs[i] && ! negs[j])
4214 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4215 else if (! negs[i] && negs[j])
4216 ncode = MINUS;
4217
4218 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
b7d9299b 4219 if (tem)
96b0e481
RK
4220 {
4221 ops[i] = tem, ops[j] = 0;
4222 negs[i] = negs[i] && negs[j];
4223 if (GET_CODE (tem) == NEG)
4224 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4225
4226 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4227 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4228 changed = 1;
4229 }
4230 }
4231
4232 first = 0;
4233 }
4234
4235 /* Pack all the operands to the lower-numbered entries and give up if
91a60f37 4236 we didn't reduce the number of operands we had. Make sure we
fb5c8ce6
RK
4237 count a CONST as two operands. If we have the same number of
4238 operands, but have made more CONSTs than we had, this is also
4239 an improvement, so accept it. */
91a60f37 4240
fb5c8ce6 4241 for (i = 0, j = 0; j < n_ops; j++)
96b0e481 4242 if (ops[j] != 0)
91a60f37
RK
4243 {
4244 ops[i] = ops[j], negs[i++] = negs[j];
4245 if (GET_CODE (ops[j]) == CONST)
fb5c8ce6 4246 n_consts++;
91a60f37 4247 }
96b0e481 4248
fb5c8ce6
RK
4249 if (i + n_consts > input_ops
4250 || (i + n_consts == input_ops && n_consts <= input_consts))
96b0e481
RK
4251 return 0;
4252
4253 n_ops = i;
4254
4255 /* If we have a CONST_INT, put it last. */
4256 for (i = 0; i < n_ops - 1; i++)
4257 if (GET_CODE (ops[i]) == CONST_INT)
4258 {
4259 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4260 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4261 }
4262
4263 /* Put a non-negated operand first. If there aren't any, make all
4264 operands positive and negate the whole thing later. */
4265 for (i = 0; i < n_ops && negs[i]; i++)
4266 ;
4267
4268 if (i == n_ops)
4269 {
4270 for (i = 0; i < n_ops; i++)
4271 negs[i] = 0;
4272 negate = 1;
4273 }
4274 else if (i != 0)
4275 {
4276 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4277 j = negs[0], negs[0] = negs[i], negs[i] = j;
4278 }
4279
4280 /* Now make the result by performing the requested operations. */
4281 result = ops[0];
4282 for (i = 1; i < n_ops; i++)
4283 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4284
4285 return negate ? gen_rtx (NEG, mode, result) : result;
4286}
4287\f
4288/* Make a binary operation by properly ordering the operands and
4289 seeing if the expression folds. */
4290
4291static rtx
4292cse_gen_binary (code, mode, op0, op1)
4293 enum rtx_code code;
4294 enum machine_mode mode;
4295 rtx op0, op1;
4296{
4297 rtx tem;
4298
4299 /* Put complex operands first and constants second if commutative. */
4300 if (GET_RTX_CLASS (code) == 'c'
4301 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4302 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4303 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4304 || (GET_CODE (op0) == SUBREG
4305 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4306 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4307 tem = op0, op0 = op1, op1 = tem;
4308
4309 /* If this simplifies, do it. */
4310 tem = simplify_binary_operation (code, mode, op0, op1);
4311
4312 if (tem)
4313 return tem;
4314
4315 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4316 just form the operation. */
4317
4318 if (code == PLUS && GET_CODE (op1) == CONST_INT
4319 && GET_MODE (op0) != VOIDmode)
4320 return plus_constant (op0, INTVAL (op1));
4321 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4322 && GET_MODE (op0) != VOIDmode)
4323 return plus_constant (op0, - INTVAL (op1));
4324 else
4325 return gen_rtx (code, mode, op0, op1);
4326}
4327\f
7afe21cc 4328/* Like simplify_binary_operation except used for relational operators.
a432f20d
RK
4329 MODE is the mode of the operands, not that of the result. If MODE
4330 is VOIDmode, both operands must also be VOIDmode and we compare the
4331 operands in "infinite precision".
4332
4333 If no simplification is possible, this function returns zero. Otherwise,
4334 it returns either const_true_rtx or const0_rtx. */
7afe21cc
RK
4335
4336rtx
4337simplify_relational_operation (code, mode, op0, op1)
4338 enum rtx_code code;
4339 enum machine_mode mode;
4340 rtx op0, op1;
4341{
a432f20d
RK
4342 int equal, op0lt, op0ltu, op1lt, op1ltu;
4343 rtx tem;
7afe21cc
RK
4344
4345 /* If op0 is a compare, extract the comparison arguments from it. */
4346 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4347 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4348
28bad1cb
RK
4349 /* We can't simplify MODE_CC values since we don't know what the
4350 actual comparison is. */
4351 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4352#ifdef HAVE_cc0
4353 || op0 == cc0_rtx
4354#endif
4355 )
31dcf83f
RS
4356 return 0;
4357
a432f20d
RK
4358 /* For integer comparisons of A and B maybe we can simplify A - B and can
4359 then simplify a comparison of that with zero. If A and B are both either
4360 a register or a CONST_INT, this can't help; testing for these cases will
4361 prevent infinite recursion here and speed things up.
4362
4363 If CODE is an unsigned comparison, we can only do this if A - B is a
4364 constant integer, and then we have to compare that integer with zero as a
4365 signed comparison. Note that this will give the incorrect result from
4366 comparisons that overflow. Since these are undefined, this is probably
4367 OK. If it causes a problem, we can check for A or B being an address
4368 (fp + const or SYMBOL_REF) and only do it in that case. */
4369
4370 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4371 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4372 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4373 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4374 && (GET_CODE (tem) == CONST_INT
4375 || (code != GTU && code != GEU &&
4376 code != LTU && code != LEU)))
4377 return simplify_relational_operation (signed_condition (code),
4378 mode, tem, const0_rtx);
4379
4380 /* For non-IEEE floating-point, if the two operands are equal, we know the
4381 result. */
4382 if (rtx_equal_p (op0, op1)
4383 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4384 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4385 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4386
4387 /* If the operands are floating-point constants, see if we can fold
4388 the result. */
6076248a 4389#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
a432f20d
RK
4390 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4391 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4392 {
4393 REAL_VALUE_TYPE d0, d1;
4394 jmp_buf handler;
4395
4396 if (setjmp (handler))
4397 return 0;
7afe21cc 4398
a432f20d
RK
4399 set_float_handler (handler);
4400 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4401 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4402 equal = REAL_VALUES_EQUAL (d0, d1);
4403 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4404 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4405 set_float_handler (NULL_PTR);
4406 }
4407#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
7afe21cc 4408
a432f20d
RK
4409 /* Otherwise, see if the operands are both integers. */
4410 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4411 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4412 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4413 {
4414 int width = GET_MODE_BITSIZE (mode);
64812ded
RK
4415 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4416 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
7afe21cc 4417
a432f20d
RK
4418 /* Get the two words comprising each integer constant. */
4419 if (GET_CODE (op0) == CONST_DOUBLE)
4420 {
4421 l0u = l0s = CONST_DOUBLE_LOW (op0);
4422 h0u = h0s = CONST_DOUBLE_HIGH (op0);
7afe21cc 4423 }
a432f20d 4424 else
6076248a 4425 {
a432f20d
RK
4426 l0u = l0s = INTVAL (op0);
4427 h0u = 0, h0s = l0s < 0 ? -1 : 0;
4428 }
6076248a 4429
a432f20d
RK
4430 if (GET_CODE (op1) == CONST_DOUBLE)
4431 {
4432 l1u = l1s = CONST_DOUBLE_LOW (op1);
4433 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4434 }
4435 else
4436 {
4437 l1u = l1s = INTVAL (op1);
4438 h1u = 0, h1s = l1s < 0 ? -1 : 0;
4439 }
4440
4441 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4442 we have to sign or zero-extend the values. */
4443 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4444 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
6076248a 4445
a432f20d
RK
4446 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4447 {
4448 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4449 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
6076248a 4450
a432f20d
RK
4451 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4452 l0s |= ((HOST_WIDE_INT) (-1) << width);
6076248a 4453
a432f20d
RK
4454 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4455 l1s |= ((HOST_WIDE_INT) (-1) << width);
6076248a
RK
4456 }
4457
a432f20d
RK
4458 equal = (h0u == h1u && l0u == l1u);
4459 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4460 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4461 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4462 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4463 }
4464
4465 /* Otherwise, there are some code-specific tests we can make. */
4466 else
4467 {
7afe21cc
RK
4468 switch (code)
4469 {
4470 case EQ:
a432f20d
RK
4471 /* References to the frame plus a constant or labels cannot
4472 be zero, but a SYMBOL_REF can due to #pragma weak. */
4473 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4474 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4475#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d
RK
4476 /* On some machines, the ap reg can be 0 sometimes. */
4477 && op0 != arg_pointer_rtx
7afe21cc 4478#endif
a432f20d
RK
4479 )
4480 return const0_rtx;
4481 break;
7afe21cc
RK
4482
4483 case NE:
a432f20d
RK
4484 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4485 || GET_CODE (op0) == LABEL_REF)
1a7c818b 4486#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
a432f20d 4487 && op0 != arg_pointer_rtx
7afe21cc 4488#endif
a432f20d 4489 )
7afe21cc
RK
4490 return const_true_rtx;
4491 break;
4492
4493 case GEU:
a432f20d
RK
4494 /* Unsigned values are never negative. */
4495 if (op1 == const0_rtx)
7afe21cc
RK
4496 return const_true_rtx;
4497 break;
4498
4499 case LTU:
a432f20d 4500 if (op1 == const0_rtx)
7afe21cc
RK
4501 return const0_rtx;
4502 break;
4503
4504 case LEU:
4505 /* Unsigned values are never greater than the largest
4506 unsigned value. */
4507 if (GET_CODE (op1) == CONST_INT
4508 && INTVAL (op1) == GET_MODE_MASK (mode)
a432f20d
RK
4509 && INTEGRAL_MODE_P (mode))
4510 return const_true_rtx;
7afe21cc
RK
4511 break;
4512
4513 case GTU:
4514 if (GET_CODE (op1) == CONST_INT
4515 && INTVAL (op1) == GET_MODE_MASK (mode)
cbf6a543 4516 && INTEGRAL_MODE_P (mode))
7afe21cc
RK
4517 return const0_rtx;
4518 break;
4519 }
4520
4521 return 0;
4522 }
4523
a432f20d
RK
4524 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4525 as appropriate. */
7afe21cc
RK
4526 switch (code)
4527 {
7afe21cc 4528 case EQ:
a432f20d
RK
4529 return equal ? const_true_rtx : const0_rtx;
4530 case NE:
4531 return ! equal ? const_true_rtx : const0_rtx;
7afe21cc 4532 case LT:
a432f20d 4533 return op0lt ? const_true_rtx : const0_rtx;
7afe21cc 4534 case GT:
a432f20d 4535 return op1lt ? const_true_rtx : const0_rtx;
7afe21cc 4536 case LTU:
a432f20d 4537 return op0ltu ? const_true_rtx : const0_rtx;
7afe21cc 4538 case GTU:
a432f20d
RK
4539 return op1ltu ? const_true_rtx : const0_rtx;
4540 case LE:
4541 return equal || op0lt ? const_true_rtx : const0_rtx;
4542 case GE:
4543 return equal || op1lt ? const_true_rtx : const0_rtx;
4544 case LEU:
4545 return equal || op0ltu ? const_true_rtx : const0_rtx;
4546 case GEU:
4547 return equal || op1ltu ? const_true_rtx : const0_rtx;
7afe21cc
RK
4548 }
4549
a432f20d 4550 abort ();
7afe21cc
RK
4551}
4552\f
4553/* Simplify CODE, an operation with result mode MODE and three operands,
4554 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4555 a constant. Return 0 if no simplifications is possible. */
4556
4557rtx
4558simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4559 enum rtx_code code;
4560 enum machine_mode mode, op0_mode;
4561 rtx op0, op1, op2;
4562{
4563 int width = GET_MODE_BITSIZE (mode);
4564
4565 /* VOIDmode means "infinite" precision. */
4566 if (width == 0)
906c4e36 4567 width = HOST_BITS_PER_WIDE_INT;
7afe21cc
RK
4568
4569 switch (code)
4570 {
4571 case SIGN_EXTRACT:
4572 case ZERO_EXTRACT:
4573 if (GET_CODE (op0) == CONST_INT
4574 && GET_CODE (op1) == CONST_INT
4575 && GET_CODE (op2) == CONST_INT
4576 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
906c4e36 4577 && width <= HOST_BITS_PER_WIDE_INT)
7afe21cc
RK
4578 {
4579 /* Extracting a bit-field from a constant */
906c4e36 4580 HOST_WIDE_INT val = INTVAL (op0);
7afe21cc
RK
4581
4582#if BITS_BIG_ENDIAN
4583 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4584#else
4585 val >>= INTVAL (op2);
4586#endif
906c4e36 4587 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
7afe21cc
RK
4588 {
4589 /* First zero-extend. */
906c4e36 4590 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
7afe21cc 4591 /* If desired, propagate sign bit. */
906c4e36
RK
4592 if (code == SIGN_EXTRACT
4593 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4594 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
7afe21cc
RK
4595 }
4596
4597 /* Clear the bits that don't belong in our mode,
4598 unless they and our sign bit are all one.
4599 So we get either a reasonable negative value or a reasonable
4600 unsigned value for this mode. */
906c4e36
RK
4601 if (width < HOST_BITS_PER_WIDE_INT
4602 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4603 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4604 val &= ((HOST_WIDE_INT) 1 << width) - 1;
7afe21cc 4605
906c4e36 4606 return GEN_INT (val);
7afe21cc
RK
4607 }
4608 break;
4609
4610 case IF_THEN_ELSE:
4611 if (GET_CODE (op0) == CONST_INT)
4612 return op0 != const0_rtx ? op1 : op2;
4613 break;
4614
4615 default:
4616 abort ();
4617 }
4618
4619 return 0;
4620}
4621\f
4622/* If X is a nontrivial arithmetic operation on an argument
4623 for which a constant value can be determined, return
4624 the result of operating on that value, as a constant.
4625 Otherwise, return X, possibly with one or more operands
4626 modified by recursive calls to this function.
4627
e7bb59fa
RK
4628 If X is a register whose contents are known, we do NOT
4629 return those contents here. equiv_constant is called to
4630 perform that task.
7afe21cc
RK
4631
4632 INSN is the insn that we may be modifying. If it is 0, make a copy
4633 of X before modifying it. */
4634
4635static rtx
4636fold_rtx (x, insn)
4637 rtx x;
4638 rtx insn;
4639{
4640 register enum rtx_code code;
4641 register enum machine_mode mode;
4642 register char *fmt;
906c4e36 4643 register int i;
7afe21cc
RK
4644 rtx new = 0;
4645 int copied = 0;
4646 int must_swap = 0;
4647
4648 /* Folded equivalents of first two operands of X. */
4649 rtx folded_arg0;
4650 rtx folded_arg1;
4651
4652 /* Constant equivalents of first three operands of X;
4653 0 when no such equivalent is known. */
4654 rtx const_arg0;
4655 rtx const_arg1;
4656 rtx const_arg2;
4657
4658 /* The mode of the first operand of X. We need this for sign and zero
4659 extends. */
4660 enum machine_mode mode_arg0;
4661
4662 if (x == 0)
4663 return x;
4664
4665 mode = GET_MODE (x);
4666 code = GET_CODE (x);
4667 switch (code)
4668 {
4669 case CONST:
4670 case CONST_INT:
4671 case CONST_DOUBLE:
4672 case SYMBOL_REF:
4673 case LABEL_REF:
4674 case REG:
4675 /* No use simplifying an EXPR_LIST
4676 since they are used only for lists of args
4677 in a function call's REG_EQUAL note. */
4678 case EXPR_LIST:
4679 return x;
4680
4681#ifdef HAVE_cc0
4682 case CC0:
4683 return prev_insn_cc0;
4684#endif
4685
4686 case PC:
4687 /* If the next insn is a CODE_LABEL followed by a jump table,
4688 PC's value is a LABEL_REF pointing to that label. That
4689 lets us fold switch statements on the Vax. */
4690 if (insn && GET_CODE (insn) == JUMP_INSN)
4691 {
4692 rtx next = next_nonnote_insn (insn);
4693
4694 if (next && GET_CODE (next) == CODE_LABEL
4695 && NEXT_INSN (next) != 0
4696 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4697 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4698 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4699 return gen_rtx (LABEL_REF, Pmode, next);
4700 }
4701 break;
4702
4703 case SUBREG:
c610adec
RK
4704 /* See if we previously assigned a constant value to this SUBREG. */
4705 if ((new = lookup_as_function (x, CONST_INT)) != 0
4706 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
7afe21cc
RK
4707 return new;
4708
4b980e20
RK
4709 /* If this is a paradoxical SUBREG, we have no idea what value the
4710 extra bits would have. However, if the operand is equivalent
4711 to a SUBREG whose operand is the same as our mode, and all the
4712 modes are within a word, we can just use the inner operand
31c85c78
RK
4713 because these SUBREGs just say how to treat the register.
4714
4715 Similarly if we find an integer constant. */
4b980e20 4716
e5f6a288 4717 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4b980e20
RK
4718 {
4719 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4720 struct table_elt *elt;
4721
4722 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4723 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4724 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4725 imode)) != 0)
31c85c78
RK
4726 for (elt = elt->first_same_value;
4727 elt; elt = elt->next_same_value)
4728 {
4729 if (CONSTANT_P (elt->exp)
4730 && GET_MODE (elt->exp) == VOIDmode)
4731 return elt->exp;
4732
4b980e20
RK
4733 if (GET_CODE (elt->exp) == SUBREG
4734 && GET_MODE (SUBREG_REG (elt->exp)) == mode
906c4e36 4735 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
4736 return copy_rtx (SUBREG_REG (elt->exp));
4737 }
4738
4739 return x;
4740 }
e5f6a288 4741
7afe21cc
RK
4742 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4743 We might be able to if the SUBREG is extracting a single word in an
4744 integral mode or extracting the low part. */
4745
4746 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4747 const_arg0 = equiv_constant (folded_arg0);
4748 if (const_arg0)
4749 folded_arg0 = const_arg0;
4750
4751 if (folded_arg0 != SUBREG_REG (x))
4752 {
4753 new = 0;
4754
4755 if (GET_MODE_CLASS (mode) == MODE_INT
4756 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4757 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4758 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4759 GET_MODE (SUBREG_REG (x)));
4760 if (new == 0 && subreg_lowpart_p (x))
4761 new = gen_lowpart_if_possible (mode, folded_arg0);
4762 if (new)
4763 return new;
4764 }
e5f6a288
RK
4765
4766 /* If this is a narrowing SUBREG and our operand is a REG, see if
858a47b1 4767 we can find an equivalence for REG that is an arithmetic operation
e5f6a288
RK
4768 in a wider mode where both operands are paradoxical SUBREGs
4769 from objects of our result mode. In that case, we couldn't report
4770 an equivalent value for that operation, since we don't know what the
4771 extra bits will be. But we can find an equivalence for this SUBREG
4772 by folding that operation is the narrow mode. This allows us to
4773 fold arithmetic in narrow modes when the machine only supports
4b980e20
RK
4774 word-sized arithmetic.
4775
4776 Also look for a case where we have a SUBREG whose operand is the
4777 same as our result. If both modes are smaller than a word, we
4778 are simply interpreting a register in different modes and we
4779 can use the inner value. */
e5f6a288
RK
4780
4781 if (GET_CODE (folded_arg0) == REG
e8d76a39
RS
4782 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4783 && subreg_lowpart_p (x))
e5f6a288
RK
4784 {
4785 struct table_elt *elt;
4786
4787 /* We can use HASH here since we know that canon_hash won't be
4788 called. */
4789 elt = lookup (folded_arg0,
4790 HASH (folded_arg0, GET_MODE (folded_arg0)),
4791 GET_MODE (folded_arg0));
4792
4793 if (elt)
4794 elt = elt->first_same_value;
4795
4796 for (; elt; elt = elt->next_same_value)
4797 {
e8d76a39
RS
4798 enum rtx_code eltcode = GET_CODE (elt->exp);
4799
e5f6a288
RK
4800 /* Just check for unary and binary operations. */
4801 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4802 && GET_CODE (elt->exp) != SIGN_EXTEND
4803 && GET_CODE (elt->exp) != ZERO_EXTEND
4804 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4805 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4806 {
4807 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4808
4809 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 4810 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
4811
4812 op0 = equiv_constant (op0);
4813 if (op0)
4814 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4815 op0, mode);
4816 }
4817 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4818 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
e8d76a39
RS
4819 && eltcode != DIV && eltcode != MOD
4820 && eltcode != UDIV && eltcode != UMOD
4821 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4822 && eltcode != ROTATE && eltcode != ROTATERT
e5f6a288
RK
4823 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4824 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4825 == mode))
4826 || CONSTANT_P (XEXP (elt->exp, 0)))
4827 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4828 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4829 == mode))
4830 || CONSTANT_P (XEXP (elt->exp, 1))))
4831 {
4832 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4833 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4834
4835 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
906c4e36 4836 op0 = fold_rtx (op0, NULL_RTX);
e5f6a288
RK
4837
4838 if (op0)
4839 op0 = equiv_constant (op0);
4840
4841 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
906c4e36 4842 op1 = fold_rtx (op1, NULL_RTX);
e5f6a288
RK
4843
4844 if (op1)
4845 op1 = equiv_constant (op1);
4846
76fb0b60
RS
4847 /* If we are looking for the low SImode part of
4848 (ashift:DI c (const_int 32)), it doesn't work
4849 to compute that in SImode, because a 32-bit shift
4850 in SImode is unpredictable. We know the value is 0. */
4851 if (op0 && op1
45620ed4 4852 && GET_CODE (elt->exp) == ASHIFT
76fb0b60
RS
4853 && GET_CODE (op1) == CONST_INT
4854 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
4855 {
4856 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
4857
4858 /* If the count fits in the inner mode's width,
4859 but exceeds the outer mode's width,
4860 the value will get truncated to 0
4861 by the subreg. */
4862 new = const0_rtx;
4863 else
4864 /* If the count exceeds even the inner mode's width,
4865 don't fold this expression. */
4866 new = 0;
4867 }
4868 else if (op0 && op1)
e5f6a288
RK
4869 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4870 op0, op1);
4871 }
4872
4b980e20
RK
4873 else if (GET_CODE (elt->exp) == SUBREG
4874 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4875 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4876 <= UNITS_PER_WORD)
906c4e36 4877 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4b980e20
RK
4878 new = copy_rtx (SUBREG_REG (elt->exp));
4879
e5f6a288
RK
4880 if (new)
4881 return new;
4882 }
4883 }
4884
7afe21cc
RK
4885 return x;
4886
4887 case NOT:
4888 case NEG:
4889 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4890 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4891 new = lookup_as_function (XEXP (x, 0), code);
4892 if (new)
4893 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4894 break;
13c9910f 4895
7afe21cc
RK
4896 case MEM:
4897 /* If we are not actually processing an insn, don't try to find the
4898 best address. Not only don't we care, but we could modify the
4899 MEM in an invalid way since we have no insn to validate against. */
4900 if (insn != 0)
4901 find_best_addr (insn, &XEXP (x, 0));
4902
4903 {
4904 /* Even if we don't fold in the insn itself,
4905 we can safely do so here, in hopes of getting a constant. */
906c4e36 4906 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
7afe21cc 4907 rtx base = 0;
906c4e36 4908 HOST_WIDE_INT offset = 0;
7afe21cc
RK
4909
4910 if (GET_CODE (addr) == REG
4911 && REGNO_QTY_VALID_P (REGNO (addr))
4912 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4913 && qty_const[reg_qty[REGNO (addr)]] != 0)
4914 addr = qty_const[reg_qty[REGNO (addr)]];
4915
4916 /* If address is constant, split it into a base and integer offset. */
4917 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4918 base = addr;
4919 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4920 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4921 {
4922 base = XEXP (XEXP (addr, 0), 0);
4923 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4924 }
4925 else if (GET_CODE (addr) == LO_SUM
4926 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4927 base = XEXP (addr, 1);
4928
4929 /* If this is a constant pool reference, we can fold it into its
4930 constant to allow better value tracking. */
4931 if (base && GET_CODE (base) == SYMBOL_REF
4932 && CONSTANT_POOL_ADDRESS_P (base))
4933 {
4934 rtx constant = get_pool_constant (base);
4935 enum machine_mode const_mode = get_pool_mode (base);
4936 rtx new;
4937
4938 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4939 constant_pool_entries_cost = COST (constant);
4940
4941 /* If we are loading the full constant, we have an equivalence. */
4942 if (offset == 0 && mode == const_mode)
4943 return constant;
4944
4945 /* If this actually isn't a constant (wierd!), we can't do
4946 anything. Otherwise, handle the two most common cases:
4947 extracting a word from a multi-word constant, and extracting
4948 the low-order bits. Other cases don't seem common enough to
4949 worry about. */
4950 if (! CONSTANT_P (constant))
4951 return x;
4952
4953 if (GET_MODE_CLASS (mode) == MODE_INT
4954 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4955 && offset % UNITS_PER_WORD == 0
4956 && (new = operand_subword (constant,
4957 offset / UNITS_PER_WORD,
4958 0, const_mode)) != 0)
4959 return new;
4960
4961 if (((BYTES_BIG_ENDIAN
4962 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4963 || (! BYTES_BIG_ENDIAN && offset == 0))
4964 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4965 return new;
4966 }
4967
4968 /* If this is a reference to a label at a known position in a jump
4969 table, we also know its value. */
4970 if (base && GET_CODE (base) == LABEL_REF)
4971 {
4972 rtx label = XEXP (base, 0);
4973 rtx table_insn = NEXT_INSN (label);
4974
4975 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4976 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4977 {
4978 rtx table = PATTERN (table_insn);
4979
4980 if (offset >= 0
4981 && (offset / GET_MODE_SIZE (GET_MODE (table))
4982 < XVECLEN (table, 0)))
4983 return XVECEXP (table, 0,
4984 offset / GET_MODE_SIZE (GET_MODE (table)));
4985 }
4986 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4987 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4988 {
4989 rtx table = PATTERN (table_insn);
4990
4991 if (offset >= 0
4992 && (offset / GET_MODE_SIZE (GET_MODE (table))
4993 < XVECLEN (table, 1)))
4994 {
4995 offset /= GET_MODE_SIZE (GET_MODE (table));
4996 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4997 XEXP (table, 0));
4998
4999 if (GET_MODE (table) != Pmode)
5000 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
5001
5002 return new;
5003 }
5004 }
5005 }
5006
5007 return x;
5008 }
5009 }
5010
5011 const_arg0 = 0;
5012 const_arg1 = 0;
5013 const_arg2 = 0;
5014 mode_arg0 = VOIDmode;
5015
5016 /* Try folding our operands.
5017 Then see which ones have constant values known. */
5018
5019 fmt = GET_RTX_FORMAT (code);
5020 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5021 if (fmt[i] == 'e')
5022 {
5023 rtx arg = XEXP (x, i);
5024 rtx folded_arg = arg, const_arg = 0;
5025 enum machine_mode mode_arg = GET_MODE (arg);
5026 rtx cheap_arg, expensive_arg;
5027 rtx replacements[2];
5028 int j;
5029
5030 /* Most arguments are cheap, so handle them specially. */
5031 switch (GET_CODE (arg))
5032 {
5033 case REG:
5034 /* This is the same as calling equiv_constant; it is duplicated
5035 here for speed. */
5036 if (REGNO_QTY_VALID_P (REGNO (arg))
5037 && qty_const[reg_qty[REGNO (arg)]] != 0
5038 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5039 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5040 const_arg
5041 = gen_lowpart_if_possible (GET_MODE (arg),
5042 qty_const[reg_qty[REGNO (arg)]]);
5043 break;
5044
5045 case CONST:
5046 case CONST_INT:
5047 case SYMBOL_REF:
5048 case LABEL_REF:
5049 case CONST_DOUBLE:
5050 const_arg = arg;
5051 break;
5052
5053#ifdef HAVE_cc0
5054 case CC0:
5055 folded_arg = prev_insn_cc0;
5056 mode_arg = prev_insn_cc0_mode;
5057 const_arg = equiv_constant (folded_arg);
5058 break;
5059#endif
5060
5061 default:
5062 folded_arg = fold_rtx (arg, insn);
5063 const_arg = equiv_constant (folded_arg);
5064 }
5065
5066 /* For the first three operands, see if the operand
5067 is constant or equivalent to a constant. */
5068 switch (i)
5069 {
5070 case 0:
5071 folded_arg0 = folded_arg;
5072 const_arg0 = const_arg;
5073 mode_arg0 = mode_arg;
5074 break;
5075 case 1:
5076 folded_arg1 = folded_arg;
5077 const_arg1 = const_arg;
5078 break;
5079 case 2:
5080 const_arg2 = const_arg;
5081 break;
5082 }
5083
5084 /* Pick the least expensive of the folded argument and an
5085 equivalent constant argument. */
5086 if (const_arg == 0 || const_arg == folded_arg
5087 || COST (const_arg) > COST (folded_arg))
5088 cheap_arg = folded_arg, expensive_arg = const_arg;
5089 else
5090 cheap_arg = const_arg, expensive_arg = folded_arg;
5091
5092 /* Try to replace the operand with the cheapest of the two
5093 possibilities. If it doesn't work and this is either of the first
5094 two operands of a commutative operation, try swapping them.
5095 If THAT fails, try the more expensive, provided it is cheaper
5096 than what is already there. */
5097
5098 if (cheap_arg == XEXP (x, i))
5099 continue;
5100
5101 if (insn == 0 && ! copied)
5102 {
5103 x = copy_rtx (x);
5104 copied = 1;
5105 }
5106
5107 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5108 for (j = 0;
5109 j < 2 && replacements[j]
5110 && COST (replacements[j]) < COST (XEXP (x, i));
5111 j++)
5112 {
5113 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5114 break;
5115
5116 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5117 {
5118 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5119 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5120
5121 if (apply_change_group ())
5122 {
5123 /* Swap them back to be invalid so that this loop can
5124 continue and flag them to be swapped back later. */
5125 rtx tem;
5126
5127 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5128 XEXP (x, 1) = tem;
5129 must_swap = 1;
5130 break;
5131 }
5132 }
5133 }
5134 }
5135
5136 else if (fmt[i] == 'E')
5137 /* Don't try to fold inside of a vector of expressions.
5138 Doing nothing is harmless. */
5139 ;
5140
5141 /* If a commutative operation, place a constant integer as the second
5142 operand unless the first operand is also a constant integer. Otherwise,
5143 place any constant second unless the first operand is also a constant. */
5144
5145 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5146 {
5147 if (must_swap || (const_arg0
5148 && (const_arg1 == 0
5149 || (GET_CODE (const_arg0) == CONST_INT
5150 && GET_CODE (const_arg1) != CONST_INT))))
5151 {
5152 register rtx tem = XEXP (x, 0);
5153
5154 if (insn == 0 && ! copied)
5155 {
5156 x = copy_rtx (x);
5157 copied = 1;
5158 }
5159
5160 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5161 validate_change (insn, &XEXP (x, 1), tem, 1);
5162 if (apply_change_group ())
5163 {
5164 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5165 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5166 }
5167 }
5168 }
5169
5170 /* If X is an arithmetic operation, see if we can simplify it. */
5171
5172 switch (GET_RTX_CLASS (code))
5173 {
5174 case '1':
e4890d45
RS
5175 /* We can't simplify extension ops unless we know the original mode. */
5176 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5177 && mode_arg0 == VOIDmode)
5178 break;
7afe21cc
RK
5179 new = simplify_unary_operation (code, mode,
5180 const_arg0 ? const_arg0 : folded_arg0,
5181 mode_arg0);
5182 break;
5183
5184 case '<':
5185 /* See what items are actually being compared and set FOLDED_ARG[01]
5186 to those values and CODE to the actual comparison code. If any are
5187 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5188 do anything if both operands are already known to be constant. */
5189
5190 if (const_arg0 == 0 || const_arg1 == 0)
5191 {
5192 struct table_elt *p0, *p1;
c610adec 5193 rtx true = const_true_rtx, false = const0_rtx;
13c9910f 5194 enum machine_mode mode_arg1;
c610adec
RK
5195
5196#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5197 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5198 {
560c94a2
RK
5199 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5200 mode);
c610adec
RK
5201 false = CONST0_RTX (mode);
5202 }
5203#endif
7afe21cc 5204
13c9910f
RS
5205 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5206 &mode_arg0, &mode_arg1);
7afe21cc
RK
5207 const_arg0 = equiv_constant (folded_arg0);
5208 const_arg1 = equiv_constant (folded_arg1);
5209
13c9910f
RS
5210 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5211 what kinds of things are being compared, so we can't do
5212 anything with this comparison. */
7afe21cc
RK
5213
5214 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5215 break;
5216
5217 /* If we do not now have two constants being compared, see if we
5218 can nevertheless deduce some things about the comparison. */
5219 if (const_arg0 == 0 || const_arg1 == 0)
5220 {
5221 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5222 constant? These aren't zero, but we don't know their sign. */
5223 if (const_arg1 == const0_rtx
5224 && (NONZERO_BASE_PLUS_P (folded_arg0)
5225#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5226 come out as 0. */
5227 || GET_CODE (folded_arg0) == SYMBOL_REF
5228#endif
5229 || GET_CODE (folded_arg0) == LABEL_REF
5230 || GET_CODE (folded_arg0) == CONST))
5231 {
5232 if (code == EQ)
c610adec 5233 return false;
7afe21cc 5234 else if (code == NE)
c610adec 5235 return true;
7afe21cc
RK
5236 }
5237
5238 /* See if the two operands are the same. We don't do this
5239 for IEEE floating-point since we can't assume x == x
5240 since x might be a NaN. */
5241
5242 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
a83afb65 5243 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
7afe21cc
RK
5244 && (folded_arg0 == folded_arg1
5245 || (GET_CODE (folded_arg0) == REG
5246 && GET_CODE (folded_arg1) == REG
5247 && (reg_qty[REGNO (folded_arg0)]
5248 == reg_qty[REGNO (folded_arg1)]))
5249 || ((p0 = lookup (folded_arg0,
5250 (safe_hash (folded_arg0, mode_arg0)
5251 % NBUCKETS), mode_arg0))
5252 && (p1 = lookup (folded_arg1,
5253 (safe_hash (folded_arg1, mode_arg0)
5254 % NBUCKETS), mode_arg0))
5255 && p0->first_same_value == p1->first_same_value)))
5256 return ((code == EQ || code == LE || code == GE
5257 || code == LEU || code == GEU)
c610adec 5258 ? true : false);
7afe21cc
RK
5259
5260 /* If FOLDED_ARG0 is a register, see if the comparison we are
5261 doing now is either the same as we did before or the reverse
5262 (we only check the reverse if not floating-point). */
5263 else if (GET_CODE (folded_arg0) == REG)
5264 {
5265 int qty = reg_qty[REGNO (folded_arg0)];
5266
5267 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5268 && (comparison_dominates_p (qty_comparison_code[qty], code)
5269 || (comparison_dominates_p (qty_comparison_code[qty],
5270 reverse_condition (code))
cbf6a543 5271 && ! FLOAT_MODE_P (mode_arg0)))
7afe21cc
RK
5272 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5273 || (const_arg1
5274 && rtx_equal_p (qty_comparison_const[qty],
5275 const_arg1))
5276 || (GET_CODE (folded_arg1) == REG
5277 && (reg_qty[REGNO (folded_arg1)]
5278 == qty_comparison_qty[qty]))))
5279 return (comparison_dominates_p (qty_comparison_code[qty],
5280 code)
c610adec 5281 ? true : false);
7afe21cc
RK
5282 }
5283 }
5284 }
5285
5286 /* If we are comparing against zero, see if the first operand is
5287 equivalent to an IOR with a constant. If so, we may be able to
5288 determine the result of this comparison. */
5289
5290 if (const_arg1 == const0_rtx)
5291 {
5292 rtx y = lookup_as_function (folded_arg0, IOR);
5293 rtx inner_const;
5294
5295 if (y != 0
5296 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5297 && GET_CODE (inner_const) == CONST_INT
5298 && INTVAL (inner_const) != 0)
5299 {
5300 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
906c4e36
RK
5301 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5302 && (INTVAL (inner_const)
5303 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
c610adec
RK
5304 rtx true = const_true_rtx, false = const0_rtx;
5305
5306#ifdef FLOAT_STORE_FLAG_VALUE
c7c955ee 5307 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
c610adec 5308 {
560c94a2
RK
5309 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5310 mode);
c610adec
RK
5311 false = CONST0_RTX (mode);
5312 }
5313#endif
7afe21cc
RK
5314
5315 switch (code)
5316 {
5317 case EQ:
c610adec 5318 return false;
7afe21cc 5319 case NE:
c610adec 5320 return true;
7afe21cc
RK
5321 case LT: case LE:
5322 if (has_sign)
c610adec 5323 return true;
7afe21cc
RK
5324 break;
5325 case GT: case GE:
5326 if (has_sign)
c610adec 5327 return false;
7afe21cc
RK
5328 break;
5329 }
5330 }
5331 }
5332
5333 new = simplify_relational_operation (code, mode_arg0,
5334 const_arg0 ? const_arg0 : folded_arg0,
5335 const_arg1 ? const_arg1 : folded_arg1);
c610adec
RK
5336#ifdef FLOAT_STORE_FLAG_VALUE
5337 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5338 new = ((new == const0_rtx) ? CONST0_RTX (mode)
560c94a2 5339 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
c610adec 5340#endif
7afe21cc
RK
5341 break;
5342
5343 case '2':
5344 case 'c':
5345 switch (code)
5346 {
5347 case PLUS:
5348 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5349 with that LABEL_REF as its second operand. If so, the result is
5350 the first operand of that MINUS. This handles switches with an
5351 ADDR_DIFF_VEC table. */
5352 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5353 {
5354 rtx y = lookup_as_function (folded_arg0, MINUS);
5355
5356 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5357 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5358 return XEXP (y, 0);
5359 }
c2cc0778
RK
5360
5361 /* If second operand is a register equivalent to a negative
5362 CONST_INT, see if we can find a register equivalent to the
5363 positive constant. Make a MINUS if so. Don't do this for
5364 a negative constant since we might then alternate between
5365 chosing positive and negative constants. Having the positive
5366 constant previously-used is the more common case. */
5367 if (const_arg1 && GET_CODE (const_arg1) == CONST_INT
5368 && INTVAL (const_arg1) < 0 && GET_CODE (folded_arg1) == REG)
5369 {
5370 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5371 struct table_elt *p
5372 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5373 mode);
5374
5375 if (p)
5376 for (p = p->first_same_value; p; p = p->next_same_value)
5377 if (GET_CODE (p->exp) == REG)
5378 return cse_gen_binary (MINUS, mode, folded_arg0,
5379 canon_reg (p->exp, NULL_RTX));
5380 }
13c9910f
RS
5381 goto from_plus;
5382
5383 case MINUS:
5384 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5385 If so, produce (PLUS Z C2-C). */
5386 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5387 {
5388 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5389 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
f3becefd
RK
5390 return fold_rtx (plus_constant (copy_rtx (y),
5391 -INTVAL (const_arg1)),
a3b5c94a 5392 NULL_RTX);
13c9910f 5393 }
7afe21cc
RK
5394
5395 /* ... fall through ... */
5396
13c9910f 5397 from_plus:
7afe21cc
RK
5398 case SMIN: case SMAX: case UMIN: case UMAX:
5399 case IOR: case AND: case XOR:
5400 case MULT: case DIV: case UDIV:
5401 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5402 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5403 is known to be of similar form, we may be able to replace the
5404 operation with a combined operation. This may eliminate the
5405 intermediate operation if every use is simplified in this way.
5406 Note that the similar optimization done by combine.c only works
5407 if the intermediate operation's result has only one reference. */
5408
5409 if (GET_CODE (folded_arg0) == REG
5410 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5411 {
5412 int is_shift
5413 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5414 rtx y = lookup_as_function (folded_arg0, code);
5415 rtx inner_const;
5416 enum rtx_code associate_code;
5417 rtx new_const;
5418
5419 if (y == 0
5420 || 0 == (inner_const
5421 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5422 || GET_CODE (inner_const) != CONST_INT
5423 /* If we have compiled a statement like
5424 "if (x == (x & mask1))", and now are looking at
5425 "x & mask2", we will have a case where the first operand
5426 of Y is the same as our first operand. Unless we detect
5427 this case, an infinite loop will result. */
5428 || XEXP (y, 0) == folded_arg0)
5429 break;
5430
5431 /* Don't associate these operations if they are a PLUS with the
5432 same constant and it is a power of two. These might be doable
5433 with a pre- or post-increment. Similarly for two subtracts of
5434 identical powers of two with post decrement. */
5435
5436 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5437 && (0
5438#if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5439 || exact_log2 (INTVAL (const_arg1)) >= 0
5440#endif
5441#if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5442 || exact_log2 (- INTVAL (const_arg1)) >= 0
5443#endif
5444 ))
5445 break;
5446
5447 /* Compute the code used to compose the constants. For example,
5448 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5449
5450 associate_code
5451 = (code == MULT || code == DIV || code == UDIV ? MULT
5452 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5453
5454 new_const = simplify_binary_operation (associate_code, mode,
5455 const_arg1, inner_const);
5456
5457 if (new_const == 0)
5458 break;
5459
5460 /* If we are associating shift operations, don't let this
4908e508
RS
5461 produce a shift of the size of the object or larger.
5462 This could occur when we follow a sign-extend by a right
5463 shift on a machine that does a sign-extend as a pair
5464 of shifts. */
7afe21cc
RK
5465
5466 if (is_shift && GET_CODE (new_const) == CONST_INT
4908e508
RS
5467 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5468 {
5469 /* As an exception, we can turn an ASHIFTRT of this
5470 form into a shift of the number of bits - 1. */
5471 if (code == ASHIFTRT)
5472 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5473 else
5474 break;
5475 }
7afe21cc
RK
5476
5477 y = copy_rtx (XEXP (y, 0));
5478
5479 /* If Y contains our first operand (the most common way this
5480 can happen is if Y is a MEM), we would do into an infinite
5481 loop if we tried to fold it. So don't in that case. */
5482
5483 if (! reg_mentioned_p (folded_arg0, y))
5484 y = fold_rtx (y, insn);
5485
96b0e481 5486 return cse_gen_binary (code, mode, y, new_const);
7afe21cc
RK
5487 }
5488 }
5489
5490 new = simplify_binary_operation (code, mode,
5491 const_arg0 ? const_arg0 : folded_arg0,
5492 const_arg1 ? const_arg1 : folded_arg1);
5493 break;
5494
5495 case 'o':
5496 /* (lo_sum (high X) X) is simply X. */
5497 if (code == LO_SUM && const_arg0 != 0
5498 && GET_CODE (const_arg0) == HIGH
5499 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5500 return const_arg1;
5501 break;
5502
5503 case '3':
5504 case 'b':
5505 new = simplify_ternary_operation (code, mode, mode_arg0,
5506 const_arg0 ? const_arg0 : folded_arg0,
5507 const_arg1 ? const_arg1 : folded_arg1,
5508 const_arg2 ? const_arg2 : XEXP (x, 2));
5509 break;
5510 }
5511
5512 return new ? new : x;
5513}
5514\f
5515/* Return a constant value currently equivalent to X.
5516 Return 0 if we don't know one. */
5517
5518static rtx
5519equiv_constant (x)
5520 rtx x;
5521{
5522 if (GET_CODE (x) == REG
5523 && REGNO_QTY_VALID_P (REGNO (x))
5524 && qty_const[reg_qty[REGNO (x)]])
5525 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5526
5527 if (x != 0 && CONSTANT_P (x))
5528 return x;
5529
fc3ffe83
RK
5530 /* If X is a MEM, try to fold it outside the context of any insn to see if
5531 it might be equivalent to a constant. That handles the case where it
5532 is a constant-pool reference. Then try to look it up in the hash table
5533 in case it is something whose value we have seen before. */
5534
5535 if (GET_CODE (x) == MEM)
5536 {
5537 struct table_elt *elt;
5538
906c4e36 5539 x = fold_rtx (x, NULL_RTX);
fc3ffe83
RK
5540 if (CONSTANT_P (x))
5541 return x;
5542
5543 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5544 if (elt == 0)
5545 return 0;
5546
5547 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5548 if (elt->is_const && CONSTANT_P (elt->exp))
5549 return elt->exp;
5550 }
5551
7afe21cc
RK
5552 return 0;
5553}
5554\f
5555/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5556 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5557 least-significant part of X.
5558 MODE specifies how big a part of X to return.
5559
5560 If the requested operation cannot be done, 0 is returned.
5561
5562 This is similar to gen_lowpart in emit-rtl.c. */
5563
5564rtx
5565gen_lowpart_if_possible (mode, x)
5566 enum machine_mode mode;
5567 register rtx x;
5568{
5569 rtx result = gen_lowpart_common (mode, x);
5570
5571 if (result)
5572 return result;
5573 else if (GET_CODE (x) == MEM)
5574 {
5575 /* This is the only other case we handle. */
5576 register int offset = 0;
5577 rtx new;
5578
5579#if WORDS_BIG_ENDIAN
5580 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5581 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5582#endif
5583#if BYTES_BIG_ENDIAN
5584 /* Adjust the address so that the address-after-the-data
5585 is unchanged. */
5586 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5587 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5588#endif
5589 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5590 if (! memory_address_p (mode, XEXP (new, 0)))
5591 return 0;
5592 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5593 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5594 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5595 return new;
5596 }
5597 else
5598 return 0;
5599}
5600\f
5601/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5602 branch. It will be zero if not.
5603
5604 In certain cases, this can cause us to add an equivalence. For example,
5605 if we are following the taken case of
5606 if (i == 2)
5607 we can add the fact that `i' and '2' are now equivalent.
5608
5609 In any case, we can record that this comparison was passed. If the same
5610 comparison is seen later, we will know its value. */
5611
5612static void
5613record_jump_equiv (insn, taken)
5614 rtx insn;
5615 int taken;
5616{
5617 int cond_known_true;
5618 rtx op0, op1;
13c9910f 5619 enum machine_mode mode, mode0, mode1;
7afe21cc
RK
5620 int reversed_nonequality = 0;
5621 enum rtx_code code;
5622
5623 /* Ensure this is the right kind of insn. */
5624 if (! condjump_p (insn) || simplejump_p (insn))
5625 return;
5626
5627 /* See if this jump condition is known true or false. */
5628 if (taken)
5629 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5630 else
5631 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5632
5633 /* Get the type of comparison being done and the operands being compared.
5634 If we had to reverse a non-equality condition, record that fact so we
5635 know that it isn't valid for floating-point. */
5636 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5637 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5638 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5639
13c9910f 5640 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
7afe21cc
RK
5641 if (! cond_known_true)
5642 {
5643 reversed_nonequality = (code != EQ && code != NE);
5644 code = reverse_condition (code);
5645 }
5646
5647 /* The mode is the mode of the non-constant. */
13c9910f
RS
5648 mode = mode0;
5649 if (mode1 != VOIDmode)
5650 mode = mode1;
7afe21cc
RK
5651
5652 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5653}
5654
5655/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5656 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5657 Make any useful entries we can with that information. Called from
5658 above function and called recursively. */
5659
5660static void
5661record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5662 enum rtx_code code;
5663 enum machine_mode mode;
5664 rtx op0, op1;
5665 int reversed_nonequality;
5666{
2197a88a 5667 unsigned op0_hash, op1_hash;
7afe21cc
RK
5668 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5669 struct table_elt *op0_elt, *op1_elt;
5670
5671 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5672 we know that they are also equal in the smaller mode (this is also
5673 true for all smaller modes whether or not there is a SUBREG, but
5674 is not worth testing for with no SUBREG. */
5675
2e794ee8 5676 /* Note that GET_MODE (op0) may not equal MODE. */
7afe21cc 5677 if (code == EQ && GET_CODE (op0) == SUBREG
2e794ee8
RS
5678 && (GET_MODE_SIZE (GET_MODE (op0))
5679 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
5680 {
5681 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5682 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5683
5684 record_jump_cond (code, mode, SUBREG_REG (op0),
5685 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5686 reversed_nonequality);
5687 }
5688
5689 if (code == EQ && GET_CODE (op1) == SUBREG
2e794ee8
RS
5690 && (GET_MODE_SIZE (GET_MODE (op1))
5691 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
5692 {
5693 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5694 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5695
5696 record_jump_cond (code, mode, SUBREG_REG (op1),
5697 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5698 reversed_nonequality);
5699 }
5700
5701 /* Similarly, if this is an NE comparison, and either is a SUBREG
5702 making a smaller mode, we know the whole thing is also NE. */
5703
2e794ee8
RS
5704 /* Note that GET_MODE (op0) may not equal MODE;
5705 if we test MODE instead, we can get an infinite recursion
5706 alternating between two modes each wider than MODE. */
5707
7afe21cc
RK
5708 if (code == NE && GET_CODE (op0) == SUBREG
5709 && subreg_lowpart_p (op0)
2e794ee8
RS
5710 && (GET_MODE_SIZE (GET_MODE (op0))
5711 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
7afe21cc
RK
5712 {
5713 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5714 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5715
5716 record_jump_cond (code, mode, SUBREG_REG (op0),
5717 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5718 reversed_nonequality);
5719 }
5720
5721 if (code == NE && GET_CODE (op1) == SUBREG
5722 && subreg_lowpart_p (op1)
2e794ee8
RS
5723 && (GET_MODE_SIZE (GET_MODE (op1))
5724 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
7afe21cc
RK
5725 {
5726 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5727 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5728
5729 record_jump_cond (code, mode, SUBREG_REG (op1),
5730 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5731 reversed_nonequality);
5732 }
5733
5734 /* Hash both operands. */
5735
5736 do_not_record = 0;
5737 hash_arg_in_memory = 0;
5738 hash_arg_in_struct = 0;
2197a88a 5739 op0_hash = HASH (op0, mode);
7afe21cc
RK
5740 op0_in_memory = hash_arg_in_memory;
5741 op0_in_struct = hash_arg_in_struct;
5742
5743 if (do_not_record)
5744 return;
5745
5746 do_not_record = 0;
5747 hash_arg_in_memory = 0;
5748 hash_arg_in_struct = 0;
2197a88a 5749 op1_hash = HASH (op1, mode);
7afe21cc
RK
5750 op1_in_memory = hash_arg_in_memory;
5751 op1_in_struct = hash_arg_in_struct;
5752
5753 if (do_not_record)
5754 return;
5755
5756 /* Look up both operands. */
2197a88a
RK
5757 op0_elt = lookup (op0, op0_hash, mode);
5758 op1_elt = lookup (op1, op1_hash, mode);
7afe21cc
RK
5759
5760 /* If we aren't setting two things equal all we can do is save this
b2796a4b
RK
5761 comparison. Similarly if this is floating-point. In the latter
5762 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5763 If we record the equality, we might inadvertently delete code
5764 whose intent was to change -0 to +0. */
5765
cbf6a543 5766 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
7afe21cc
RK
5767 {
5768 /* If we reversed a floating-point comparison, if OP0 is not a
5769 register, or if OP1 is neither a register or constant, we can't
5770 do anything. */
5771
5772 if (GET_CODE (op1) != REG)
5773 op1 = equiv_constant (op1);
5774
cbf6a543 5775 if ((reversed_nonequality && FLOAT_MODE_P (mode))
7afe21cc
RK
5776 || GET_CODE (op0) != REG || op1 == 0)
5777 return;
5778
5779 /* Put OP0 in the hash table if it isn't already. This gives it a
5780 new quantity number. */
5781 if (op0_elt == 0)
5782 {
906c4e36 5783 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
5784 {
5785 rehash_using_reg (op0);
2197a88a 5786 op0_hash = HASH (op0, mode);
2bb81c86
RK
5787
5788 /* If OP0 is contained in OP1, this changes its hash code
5789 as well. Faster to rehash than to check, except
5790 for the simple case of a constant. */
5791 if (! CONSTANT_P (op1))
2197a88a 5792 op1_hash = HASH (op1,mode);
7afe21cc
RK
5793 }
5794
2197a88a 5795 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
5796 op0_elt->in_memory = op0_in_memory;
5797 op0_elt->in_struct = op0_in_struct;
5798 }
5799
5800 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5801 if (GET_CODE (op1) == REG)
5802 {
5d5ea909 5803 /* Look it up again--in case op0 and op1 are the same. */
2197a88a 5804 op1_elt = lookup (op1, op1_hash, mode);
5d5ea909 5805
7afe21cc
RK
5806 /* Put OP1 in the hash table so it gets a new quantity number. */
5807 if (op1_elt == 0)
5808 {
906c4e36 5809 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
5810 {
5811 rehash_using_reg (op1);
2197a88a 5812 op1_hash = HASH (op1, mode);
7afe21cc
RK
5813 }
5814
2197a88a 5815 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
5816 op1_elt->in_memory = op1_in_memory;
5817 op1_elt->in_struct = op1_in_struct;
5818 }
5819
5820 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5821 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5822 }
5823 else
5824 {
5825 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5826 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5827 }
5828
5829 return;
5830 }
5831
eb5ad42a
RS
5832 /* If either side is still missing an equivalence, make it now,
5833 then merge the equivalences. */
7afe21cc 5834
7afe21cc
RK
5835 if (op0_elt == 0)
5836 {
eb5ad42a 5837 if (insert_regs (op0, NULL_PTR, 0))
7afe21cc
RK
5838 {
5839 rehash_using_reg (op0);
2197a88a 5840 op0_hash = HASH (op0, mode);
7afe21cc
RK
5841 }
5842
2197a88a 5843 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
7afe21cc
RK
5844 op0_elt->in_memory = op0_in_memory;
5845 op0_elt->in_struct = op0_in_struct;
7afe21cc
RK
5846 }
5847
5848 if (op1_elt == 0)
5849 {
eb5ad42a 5850 if (insert_regs (op1, NULL_PTR, 0))
7afe21cc
RK
5851 {
5852 rehash_using_reg (op1);
2197a88a 5853 op1_hash = HASH (op1, mode);
7afe21cc
RK
5854 }
5855
2197a88a 5856 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
7afe21cc
RK
5857 op1_elt->in_memory = op1_in_memory;
5858 op1_elt->in_struct = op1_in_struct;
7afe21cc 5859 }
eb5ad42a
RS
5860
5861 merge_equiv_classes (op0_elt, op1_elt);
5862 last_jump_equiv_class = op0_elt;
7afe21cc
RK
5863}
5864\f
5865/* CSE processing for one instruction.
5866 First simplify sources and addresses of all assignments
5867 in the instruction, using previously-computed equivalents values.
5868 Then install the new sources and destinations in the table
5869 of available values.
5870
5871 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5872 the insn. */
5873
5874/* Data on one SET contained in the instruction. */
5875
5876struct set
5877{
5878 /* The SET rtx itself. */
5879 rtx rtl;
5880 /* The SET_SRC of the rtx (the original value, if it is changing). */
5881 rtx src;
5882 /* The hash-table element for the SET_SRC of the SET. */
5883 struct table_elt *src_elt;
2197a88a
RK
5884 /* Hash value for the SET_SRC. */
5885 unsigned src_hash;
5886 /* Hash value for the SET_DEST. */
5887 unsigned dest_hash;
7afe21cc
RK
5888 /* The SET_DEST, with SUBREG, etc., stripped. */
5889 rtx inner_dest;
5890 /* Place where the pointer to the INNER_DEST was found. */
5891 rtx *inner_dest_loc;
5892 /* Nonzero if the SET_SRC is in memory. */
5893 char src_in_memory;
5894 /* Nonzero if the SET_SRC is in a structure. */
5895 char src_in_struct;
5896 /* Nonzero if the SET_SRC contains something
5897 whose value cannot be predicted and understood. */
5898 char src_volatile;
5899 /* Original machine mode, in case it becomes a CONST_INT. */
5900 enum machine_mode mode;
5901 /* A constant equivalent for SET_SRC, if any. */
5902 rtx src_const;
2197a88a
RK
5903 /* Hash value of constant equivalent for SET_SRC. */
5904 unsigned src_const_hash;
7afe21cc
RK
5905 /* Table entry for constant equivalent for SET_SRC, if any. */
5906 struct table_elt *src_const_elt;
5907};
5908
5909static void
5910cse_insn (insn, in_libcall_block)
5911 rtx insn;
5912 int in_libcall_block;
5913{
5914 register rtx x = PATTERN (insn);
7afe21cc 5915 register int i;
92f9aa51 5916 rtx tem;
7afe21cc
RK
5917 register int n_sets = 0;
5918
5919 /* Records what this insn does to set CC0. */
5920 rtx this_insn_cc0 = 0;
5921 enum machine_mode this_insn_cc0_mode;
5922 struct write_data writes_memory;
5923 static struct write_data init = {0, 0, 0, 0};
5924
5925 rtx src_eqv = 0;
5926 struct table_elt *src_eqv_elt = 0;
5927 int src_eqv_volatile;
5928 int src_eqv_in_memory;
5929 int src_eqv_in_struct;
2197a88a 5930 unsigned src_eqv_hash;
7afe21cc
RK
5931
5932 struct set *sets;
5933
5934 this_insn = insn;
5935 writes_memory = init;
5936
5937 /* Find all the SETs and CLOBBERs in this instruction.
5938 Record all the SETs in the array `set' and count them.
5939 Also determine whether there is a CLOBBER that invalidates
5940 all memory references, or all references at varying addresses. */
5941
f1e7c95f
RK
5942 if (GET_CODE (insn) == CALL_INSN)
5943 {
5944 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5945 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
5946 invalidate (SET_DEST (XEXP (tem, 0)));
5947 }
5948
7afe21cc
RK
5949 if (GET_CODE (x) == SET)
5950 {
5951 sets = (struct set *) alloca (sizeof (struct set));
5952 sets[0].rtl = x;
5953
5954 /* Ignore SETs that are unconditional jumps.
5955 They never need cse processing, so this does not hurt.
5956 The reason is not efficiency but rather
5957 so that we can test at the end for instructions
5958 that have been simplified to unconditional jumps
5959 and not be misled by unchanged instructions
5960 that were unconditional jumps to begin with. */
5961 if (SET_DEST (x) == pc_rtx
5962 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5963 ;
5964
5965 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5966 The hard function value register is used only once, to copy to
5967 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5968 Ensure we invalidate the destination register. On the 80386 no
7722328e
RK
5969 other code would invalidate it since it is a fixed_reg.
5970 We need not check the return of apply_change_group; see canon_reg. */
7afe21cc
RK
5971
5972 else if (GET_CODE (SET_SRC (x)) == CALL)
5973 {
5974 canon_reg (SET_SRC (x), insn);
77fa0940 5975 apply_change_group ();
7afe21cc
RK
5976 fold_rtx (SET_SRC (x), insn);
5977 invalidate (SET_DEST (x));
5978 }
5979 else
5980 n_sets = 1;
5981 }
5982 else if (GET_CODE (x) == PARALLEL)
5983 {
5984 register int lim = XVECLEN (x, 0);
5985
5986 sets = (struct set *) alloca (lim * sizeof (struct set));
5987
5988 /* Find all regs explicitly clobbered in this insn,
5989 and ensure they are not replaced with any other regs
5990 elsewhere in this insn.
5991 When a reg that is clobbered is also used for input,
5992 we should presume that that is for a reason,
5993 and we should not substitute some other register
5994 which is not supposed to be clobbered.
5995 Therefore, this loop cannot be merged into the one below
830a38ee 5996 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
5997 value clobbered. We must not let a canonicalization do
5998 anything in that case. */
5999 for (i = 0; i < lim; i++)
6000 {
6001 register rtx y = XVECEXP (x, 0, i);
2708da92
RS
6002 if (GET_CODE (y) == CLOBBER)
6003 {
6004 rtx clobbered = XEXP (y, 0);
6005
6006 if (GET_CODE (clobbered) == REG
6007 || GET_CODE (clobbered) == SUBREG)
6008 invalidate (clobbered);
6009 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6010 || GET_CODE (clobbered) == ZERO_EXTRACT)
6011 invalidate (XEXP (clobbered, 0));
6012 }
7afe21cc
RK
6013 }
6014
6015 for (i = 0; i < lim; i++)
6016 {
6017 register rtx y = XVECEXP (x, 0, i);
6018 if (GET_CODE (y) == SET)
6019 {
7722328e
RK
6020 /* As above, we ignore unconditional jumps and call-insns and
6021 ignore the result of apply_change_group. */
7afe21cc
RK
6022 if (GET_CODE (SET_SRC (y)) == CALL)
6023 {
6024 canon_reg (SET_SRC (y), insn);
77fa0940 6025 apply_change_group ();
7afe21cc
RK
6026 fold_rtx (SET_SRC (y), insn);
6027 invalidate (SET_DEST (y));
6028 }
6029 else if (SET_DEST (y) == pc_rtx
6030 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6031 ;
6032 else
6033 sets[n_sets++].rtl = y;
6034 }
6035 else if (GET_CODE (y) == CLOBBER)
6036 {
6037 /* If we clobber memory, take note of that,
6038 and canon the address.
6039 This does nothing when a register is clobbered
6040 because we have already invalidated the reg. */
6041 if (GET_CODE (XEXP (y, 0)) == MEM)
6042 {
906c4e36 6043 canon_reg (XEXP (y, 0), NULL_RTX);
7afe21cc
RK
6044 note_mem_written (XEXP (y, 0), &writes_memory);
6045 }
6046 }
6047 else if (GET_CODE (y) == USE
6048 && ! (GET_CODE (XEXP (y, 0)) == REG
6049 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6050 canon_reg (y, NULL_RTX);
7afe21cc
RK
6051 else if (GET_CODE (y) == CALL)
6052 {
7722328e
RK
6053 /* The result of apply_change_group can be ignored; see
6054 canon_reg. */
7afe21cc 6055 canon_reg (y, insn);
77fa0940 6056 apply_change_group ();
7afe21cc
RK
6057 fold_rtx (y, insn);
6058 }
6059 }
6060 }
6061 else if (GET_CODE (x) == CLOBBER)
6062 {
6063 if (GET_CODE (XEXP (x, 0)) == MEM)
6064 {
906c4e36 6065 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6066 note_mem_written (XEXP (x, 0), &writes_memory);
6067 }
6068 }
6069
6070 /* Canonicalize a USE of a pseudo register or memory location. */
6071 else if (GET_CODE (x) == USE
6072 && ! (GET_CODE (XEXP (x, 0)) == REG
6073 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
906c4e36 6074 canon_reg (XEXP (x, 0), NULL_RTX);
7afe21cc
RK
6075 else if (GET_CODE (x) == CALL)
6076 {
7722328e 6077 /* The result of apply_change_group can be ignored; see canon_reg. */
7afe21cc 6078 canon_reg (x, insn);
77fa0940 6079 apply_change_group ();
7afe21cc
RK
6080 fold_rtx (x, insn);
6081 }
6082
7b3ab05e
JW
6083 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6084 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6085 is handled specially for this case, and if it isn't set, then there will
6086 be no equivalence for the destinatation. */
92f9aa51
RK
6087 if (n_sets == 1 && REG_NOTES (insn) != 0
6088 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
7b3ab05e
JW
6089 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6090 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
92f9aa51 6091 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
7afe21cc
RK
6092
6093 /* Canonicalize sources and addresses of destinations.
6094 We do this in a separate pass to avoid problems when a MATCH_DUP is
6095 present in the insn pattern. In that case, we want to ensure that
6096 we don't break the duplicate nature of the pattern. So we will replace
6097 both operands at the same time. Otherwise, we would fail to find an
6098 equivalent substitution in the loop calling validate_change below.
7afe21cc
RK
6099
6100 We used to suppress canonicalization of DEST if it appears in SRC,
77fa0940 6101 but we don't do this any more. */
7afe21cc
RK
6102
6103 for (i = 0; i < n_sets; i++)
6104 {
6105 rtx dest = SET_DEST (sets[i].rtl);
6106 rtx src = SET_SRC (sets[i].rtl);
6107 rtx new = canon_reg (src, insn);
6108
77fa0940
RK
6109 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6110 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6111 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6112 || insn_n_dups[recog_memoized (insn)] > 0)
6113 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
7afe21cc
RK
6114 else
6115 SET_SRC (sets[i].rtl) = new;
6116
6117 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6118 {
6119 validate_change (insn, &XEXP (dest, 1),
77fa0940 6120 canon_reg (XEXP (dest, 1), insn), 1);
7afe21cc 6121 validate_change (insn, &XEXP (dest, 2),
77fa0940 6122 canon_reg (XEXP (dest, 2), insn), 1);
7afe21cc
RK
6123 }
6124
6125 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6126 || GET_CODE (dest) == ZERO_EXTRACT
6127 || GET_CODE (dest) == SIGN_EXTRACT)
6128 dest = XEXP (dest, 0);
6129
6130 if (GET_CODE (dest) == MEM)
6131 canon_reg (dest, insn);
6132 }
6133
77fa0940
RK
6134 /* Now that we have done all the replacements, we can apply the change
6135 group and see if they all work. Note that this will cause some
6136 canonicalizations that would have worked individually not to be applied
6137 because some other canonicalization didn't work, but this should not
7722328e
RK
6138 occur often.
6139
6140 The result of apply_change_group can be ignored; see canon_reg. */
77fa0940
RK
6141
6142 apply_change_group ();
6143
7afe21cc
RK
6144 /* Set sets[i].src_elt to the class each source belongs to.
6145 Detect assignments from or to volatile things
6146 and set set[i] to zero so they will be ignored
6147 in the rest of this function.
6148
6149 Nothing in this loop changes the hash table or the register chains. */
6150
6151 for (i = 0; i < n_sets; i++)
6152 {
6153 register rtx src, dest;
6154 register rtx src_folded;
6155 register struct table_elt *elt = 0, *p;
6156 enum machine_mode mode;
6157 rtx src_eqv_here;
6158 rtx src_const = 0;
6159 rtx src_related = 0;
6160 struct table_elt *src_const_elt = 0;
6161 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6162 int src_related_cost = 10000, src_elt_cost = 10000;
6163 /* Set non-zero if we need to call force_const_mem on with the
6164 contents of src_folded before using it. */
6165 int src_folded_force_flag = 0;
6166
6167 dest = SET_DEST (sets[i].rtl);
6168 src = SET_SRC (sets[i].rtl);
6169
6170 /* If SRC is a constant that has no machine mode,
6171 hash it with the destination's machine mode.
6172 This way we can keep different modes separate. */
6173
6174 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6175 sets[i].mode = mode;
6176
6177 if (src_eqv)
6178 {
6179 enum machine_mode eqvmode = mode;
6180 if (GET_CODE (dest) == STRICT_LOW_PART)
6181 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6182 do_not_record = 0;
6183 hash_arg_in_memory = 0;
6184 hash_arg_in_struct = 0;
6185 src_eqv = fold_rtx (src_eqv, insn);
2197a88a 6186 src_eqv_hash = HASH (src_eqv, eqvmode);
7afe21cc
RK
6187
6188 /* Find the equivalence class for the equivalent expression. */
6189
6190 if (!do_not_record)
2197a88a 6191 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
7afe21cc
RK
6192
6193 src_eqv_volatile = do_not_record;
6194 src_eqv_in_memory = hash_arg_in_memory;
6195 src_eqv_in_struct = hash_arg_in_struct;
6196 }
6197
6198 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6199 value of the INNER register, not the destination. So it is not
6200 a legal substitution for the source. But save it for later. */
6201 if (GET_CODE (dest) == STRICT_LOW_PART)
6202 src_eqv_here = 0;
6203 else
6204 src_eqv_here = src_eqv;
6205
6206 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6207 simplified result, which may not necessarily be valid. */
6208 src_folded = fold_rtx (src, insn);
6209
6210 /* If storing a constant in a bitfield, pre-truncate the constant
6211 so we will be able to record it later. */
6212 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6213 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6214 {
6215 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6216
6217 if (GET_CODE (src) == CONST_INT
6218 && GET_CODE (width) == CONST_INT
906c4e36
RK
6219 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6220 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6221 src_folded
6222 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6223 << INTVAL (width)) - 1));
7afe21cc
RK
6224 }
6225
6226 /* Compute SRC's hash code, and also notice if it
6227 should not be recorded at all. In that case,
6228 prevent any further processing of this assignment. */
6229 do_not_record = 0;
6230 hash_arg_in_memory = 0;
6231 hash_arg_in_struct = 0;
6232
6233 sets[i].src = src;
2197a88a 6234 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
6235 sets[i].src_volatile = do_not_record;
6236 sets[i].src_in_memory = hash_arg_in_memory;
6237 sets[i].src_in_struct = hash_arg_in_struct;
6238
0dadecf6
RK
6239#if 0
6240 /* It is no longer clear why we used to do this, but it doesn't
6241 appear to still be needed. So let's try without it since this
6242 code hurts cse'ing widened ops. */
7afe21cc
RK
6243 /* If source is a perverse subreg (such as QI treated as an SI),
6244 treat it as volatile. It may do the work of an SI in one context
6245 where the extra bits are not being used, but cannot replace an SI
6246 in general. */
6247 if (GET_CODE (src) == SUBREG
6248 && (GET_MODE_SIZE (GET_MODE (src))
6249 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6250 sets[i].src_volatile = 1;
0dadecf6 6251#endif
7afe21cc
RK
6252
6253 /* Locate all possible equivalent forms for SRC. Try to replace
6254 SRC in the insn with each cheaper equivalent.
6255
6256 We have the following types of equivalents: SRC itself, a folded
6257 version, a value given in a REG_EQUAL note, or a value related
6258 to a constant.
6259
6260 Each of these equivalents may be part of an additional class
6261 of equivalents (if more than one is in the table, they must be in
6262 the same class; we check for this).
6263
6264 If the source is volatile, we don't do any table lookups.
6265
6266 We note any constant equivalent for possible later use in a
6267 REG_NOTE. */
6268
6269 if (!sets[i].src_volatile)
2197a88a 6270 elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
6271
6272 sets[i].src_elt = elt;
6273
6274 if (elt && src_eqv_here && src_eqv_elt)
6275 {
6276 if (elt->first_same_value != src_eqv_elt->first_same_value)
6277 {
6278 /* The REG_EQUAL is indicating that two formerly distinct
6279 classes are now equivalent. So merge them. */
6280 merge_equiv_classes (elt, src_eqv_elt);
2197a88a
RK
6281 src_eqv_hash = HASH (src_eqv, elt->mode);
6282 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
7afe21cc
RK
6283 }
6284
6285 src_eqv_here = 0;
6286 }
6287
6288 else if (src_eqv_elt)
6289 elt = src_eqv_elt;
6290
6291 /* Try to find a constant somewhere and record it in `src_const'.
6292 Record its table element, if any, in `src_const_elt'. Look in
6293 any known equivalences first. (If the constant is not in the
2197a88a 6294 table, also set `sets[i].src_const_hash'). */
7afe21cc
RK
6295 if (elt)
6296 for (p = elt->first_same_value; p; p = p->next_same_value)
6297 if (p->is_const)
6298 {
6299 src_const = p->exp;
6300 src_const_elt = elt;
6301 break;
6302 }
6303
6304 if (src_const == 0
6305 && (CONSTANT_P (src_folded)
6306 /* Consider (minus (label_ref L1) (label_ref L2)) as
6307 "constant" here so we will record it. This allows us
6308 to fold switch statements when an ADDR_DIFF_VEC is used. */
6309 || (GET_CODE (src_folded) == MINUS
6310 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6311 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6312 src_const = src_folded, src_const_elt = elt;
6313 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6314 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6315
6316 /* If we don't know if the constant is in the table, get its
6317 hash code and look it up. */
6318 if (src_const && src_const_elt == 0)
6319 {
2197a88a
RK
6320 sets[i].src_const_hash = HASH (src_const, mode);
6321 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
7afe21cc
RK
6322 }
6323
6324 sets[i].src_const = src_const;
6325 sets[i].src_const_elt = src_const_elt;
6326
6327 /* If the constant and our source are both in the table, mark them as
6328 equivalent. Otherwise, if a constant is in the table but the source
6329 isn't, set ELT to it. */
6330 if (src_const_elt && elt
6331 && src_const_elt->first_same_value != elt->first_same_value)
6332 merge_equiv_classes (elt, src_const_elt);
6333 else if (src_const_elt && elt == 0)
6334 elt = src_const_elt;
6335
6336 /* See if there is a register linearly related to a constant
6337 equivalent of SRC. */
6338 if (src_const
6339 && (GET_CODE (src_const) == CONST
6340 || (src_const_elt && src_const_elt->related_value != 0)))
6341 {
6342 src_related = use_related_value (src_const, src_const_elt);
6343 if (src_related)
6344 {
6345 struct table_elt *src_related_elt
6346 = lookup (src_related, HASH (src_related, mode), mode);
6347 if (src_related_elt && elt)
6348 {
6349 if (elt->first_same_value
6350 != src_related_elt->first_same_value)
6351 /* This can occur when we previously saw a CONST
6352 involving a SYMBOL_REF and then see the SYMBOL_REF
6353 twice. Merge the involved classes. */
6354 merge_equiv_classes (elt, src_related_elt);
6355
6356 src_related = 0;
6357 src_related_elt = 0;
6358 }
6359 else if (src_related_elt && elt == 0)
6360 elt = src_related_elt;
6361 }
6362 }
6363
e4600702
RK
6364 /* See if we have a CONST_INT that is already in a register in a
6365 wider mode. */
6366
6367 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6368 && GET_MODE_CLASS (mode) == MODE_INT
6369 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6370 {
6371 enum machine_mode wider_mode;
6372
6373 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6374 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6375 && src_related == 0;
6376 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6377 {
6378 struct table_elt *const_elt
6379 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6380
6381 if (const_elt == 0)
6382 continue;
6383
6384 for (const_elt = const_elt->first_same_value;
6385 const_elt; const_elt = const_elt->next_same_value)
6386 if (GET_CODE (const_elt->exp) == REG)
6387 {
6388 src_related = gen_lowpart_if_possible (mode,
6389 const_elt->exp);
6390 break;
6391 }
6392 }
6393 }
6394
d45cf215
RS
6395 /* Another possibility is that we have an AND with a constant in
6396 a mode narrower than a word. If so, it might have been generated
6397 as part of an "if" which would narrow the AND. If we already
6398 have done the AND in a wider mode, we can use a SUBREG of that
6399 value. */
6400
6401 if (flag_expensive_optimizations && ! src_related
6402 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6403 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6404 {
6405 enum machine_mode tmode;
906c4e36 6406 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
d45cf215
RS
6407
6408 for (tmode = GET_MODE_WIDER_MODE (mode);
6409 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6410 tmode = GET_MODE_WIDER_MODE (tmode))
6411 {
6412 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6413 struct table_elt *larger_elt;
6414
6415 if (inner)
6416 {
6417 PUT_MODE (new_and, tmode);
6418 XEXP (new_and, 0) = inner;
6419 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6420 if (larger_elt == 0)
6421 continue;
6422
6423 for (larger_elt = larger_elt->first_same_value;
6424 larger_elt; larger_elt = larger_elt->next_same_value)
6425 if (GET_CODE (larger_elt->exp) == REG)
6426 {
6427 src_related
6428 = gen_lowpart_if_possible (mode, larger_elt->exp);
6429 break;
6430 }
6431
6432 if (src_related)
6433 break;
6434 }
6435 }
6436 }
7bac1be0
RK
6437
6438#ifdef LOAD_EXTEND_OP
6439 /* See if a MEM has already been loaded with a widening operation;
6440 if it has, we can use a subreg of that. Many CISC machines
6441 also have such operations, but this is only likely to be
6442 beneficial these machines. */
6443
6444 if (flag_expensive_optimizations && src_related == 0
6445 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6446 && GET_MODE_CLASS (mode) == MODE_INT
6447 && GET_CODE (src) == MEM && ! do_not_record
6448 && LOAD_EXTEND_OP (mode) != NIL)
6449 {
6450 enum machine_mode tmode;
6451
6452 /* Set what we are trying to extend and the operation it might
6453 have been extended with. */
6454 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6455 XEXP (memory_extend_rtx, 0) = src;
6456
6457 for (tmode = GET_MODE_WIDER_MODE (mode);
6458 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6459 tmode = GET_MODE_WIDER_MODE (tmode))
6460 {
6461 struct table_elt *larger_elt;
6462
6463 PUT_MODE (memory_extend_rtx, tmode);
6464 larger_elt = lookup (memory_extend_rtx,
6465 HASH (memory_extend_rtx, tmode), tmode);
6466 if (larger_elt == 0)
6467 continue;
6468
6469 for (larger_elt = larger_elt->first_same_value;
6470 larger_elt; larger_elt = larger_elt->next_same_value)
6471 if (GET_CODE (larger_elt->exp) == REG)
6472 {
6473 src_related = gen_lowpart_if_possible (mode,
6474 larger_elt->exp);
6475 break;
6476 }
6477
6478 if (src_related)
6479 break;
6480 }
6481 }
6482#endif /* LOAD_EXTEND_OP */
6483
7afe21cc
RK
6484 if (src == src_folded)
6485 src_folded = 0;
6486
6487 /* At this point, ELT, if non-zero, points to a class of expressions
6488 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6489 and SRC_RELATED, if non-zero, each contain additional equivalent
6490 expressions. Prune these latter expressions by deleting expressions
6491 already in the equivalence class.
6492
6493 Check for an equivalent identical to the destination. If found,
6494 this is the preferred equivalent since it will likely lead to
6495 elimination of the insn. Indicate this by placing it in
6496 `src_related'. */
6497
6498 if (elt) elt = elt->first_same_value;
6499 for (p = elt; p; p = p->next_same_value)
6500 {
6501 enum rtx_code code = GET_CODE (p->exp);
6502
6503 /* If the expression is not valid, ignore it. Then we do not
6504 have to check for validity below. In most cases, we can use
6505 `rtx_equal_p', since canonicalization has already been done. */
6506 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6507 continue;
6508
6509 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6510 src = 0;
6511 else if (src_folded && GET_CODE (src_folded) == code
6512 && rtx_equal_p (src_folded, p->exp))
6513 src_folded = 0;
6514 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6515 && rtx_equal_p (src_eqv_here, p->exp))
6516 src_eqv_here = 0;
6517 else if (src_related && GET_CODE (src_related) == code
6518 && rtx_equal_p (src_related, p->exp))
6519 src_related = 0;
6520
6521 /* This is the same as the destination of the insns, we want
6522 to prefer it. Copy it to src_related. The code below will
6523 then give it a negative cost. */
6524 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6525 src_related = dest;
6526
6527 }
6528
6529 /* Find the cheapest valid equivalent, trying all the available
6530 possibilities. Prefer items not in the hash table to ones
6531 that are when they are equal cost. Note that we can never
6532 worsen an insn as the current contents will also succeed.
05c33dd8 6533 If we find an equivalent identical to the destination, use it as best,
7afe21cc
RK
6534 since this insn will probably be eliminated in that case. */
6535 if (src)
6536 {
6537 if (rtx_equal_p (src, dest))
6538 src_cost = -1;
6539 else
6540 src_cost = COST (src);
6541 }
6542
6543 if (src_eqv_here)
6544 {
6545 if (rtx_equal_p (src_eqv_here, dest))
6546 src_eqv_cost = -1;
6547 else
6548 src_eqv_cost = COST (src_eqv_here);
6549 }
6550
6551 if (src_folded)
6552 {
6553 if (rtx_equal_p (src_folded, dest))
6554 src_folded_cost = -1;
6555 else
6556 src_folded_cost = COST (src_folded);
6557 }
6558
6559 if (src_related)
6560 {
6561 if (rtx_equal_p (src_related, dest))
6562 src_related_cost = -1;
6563 else
6564 src_related_cost = COST (src_related);
6565 }
6566
6567 /* If this was an indirect jump insn, a known label will really be
6568 cheaper even though it looks more expensive. */
6569 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6570 src_folded = src_const, src_folded_cost = -1;
6571
6572 /* Terminate loop when replacement made. This must terminate since
6573 the current contents will be tested and will always be valid. */
6574 while (1)
6575 {
6576 rtx trial;
6577
6578 /* Skip invalid entries. */
6579 while (elt && GET_CODE (elt->exp) != REG
6580 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6581 elt = elt->next_same_value;
6582
6583 if (elt) src_elt_cost = elt->cost;
6584
6585 /* Find cheapest and skip it for the next time. For items
6586 of equal cost, use this order:
6587 src_folded, src, src_eqv, src_related and hash table entry. */
6588 if (src_folded_cost <= src_cost
6589 && src_folded_cost <= src_eqv_cost
6590 && src_folded_cost <= src_related_cost
6591 && src_folded_cost <= src_elt_cost)
6592 {
6593 trial = src_folded, src_folded_cost = 10000;
6594 if (src_folded_force_flag)
6595 trial = force_const_mem (mode, trial);
6596 }
6597 else if (src_cost <= src_eqv_cost
6598 && src_cost <= src_related_cost
6599 && src_cost <= src_elt_cost)
6600 trial = src, src_cost = 10000;
6601 else if (src_eqv_cost <= src_related_cost
6602 && src_eqv_cost <= src_elt_cost)
0af62b41 6603 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7afe21cc 6604 else if (src_related_cost <= src_elt_cost)
0af62b41 6605 trial = copy_rtx (src_related), src_related_cost = 10000;
7afe21cc
RK
6606 else
6607 {
05c33dd8 6608 trial = copy_rtx (elt->exp);
7afe21cc
RK
6609 elt = elt->next_same_value;
6610 src_elt_cost = 10000;
6611 }
6612
6613 /* We don't normally have an insn matching (set (pc) (pc)), so
6614 check for this separately here. We will delete such an
6615 insn below.
6616
6617 Tablejump insns contain a USE of the table, so simply replacing
6618 the operand with the constant won't match. This is simply an
6619 unconditional branch, however, and is therefore valid. Just
6620 insert the substitution here and we will delete and re-emit
6621 the insn later. */
6622
6623 if (n_sets == 1 && dest == pc_rtx
6624 && (trial == pc_rtx
6625 || (GET_CODE (trial) == LABEL_REF
6626 && ! condjump_p (insn))))
6627 {
6628 /* If TRIAL is a label in front of a jump table, we are
6629 really falling through the switch (this is how casesi
6630 insns work), so we must branch around the table. */
6631 if (GET_CODE (trial) == CODE_LABEL
6632 && NEXT_INSN (trial) != 0
6633 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6634 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6635 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6636
6637 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6638
6639 SET_SRC (sets[i].rtl) = trial;
44333223 6640 cse_jumps_altered = 1;
7afe21cc
RK
6641 break;
6642 }
6643
6644 /* Look for a substitution that makes a valid insn. */
6645 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
05c33dd8 6646 {
7722328e
RK
6647 /* The result of apply_change_group can be ignored; see
6648 canon_reg. */
6649
6650 validate_change (insn, &SET_SRC (sets[i].rtl),
6651 canon_reg (SET_SRC (sets[i].rtl), insn),
6652 1);
6702af89 6653 apply_change_group ();
05c33dd8
RK
6654 break;
6655 }
7afe21cc
RK
6656
6657 /* If we previously found constant pool entries for
6658 constants and this is a constant, try making a
6659 pool entry. Put it in src_folded unless we already have done
6660 this since that is where it likely came from. */
6661
6662 else if (constant_pool_entries_cost
6663 && CONSTANT_P (trial)
6664 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
6665 && GET_MODE_CLASS (mode) != MODE_CC)
6666 {
6667 src_folded_force_flag = 1;
6668 src_folded = trial;
6669 src_folded_cost = constant_pool_entries_cost;
6670 }
6671 }
6672
6673 src = SET_SRC (sets[i].rtl);
6674
6675 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6676 However, there is an important exception: If both are registers
6677 that are not the head of their equivalence class, replace SET_SRC
6678 with the head of the class. If we do not do this, we will have
6679 both registers live over a portion of the basic block. This way,
6680 their lifetimes will likely abut instead of overlapping. */
6681 if (GET_CODE (dest) == REG
6682 && REGNO_QTY_VALID_P (REGNO (dest))
6683 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6684 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6685 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6686 /* Don't do this if the original insn had a hard reg as
6687 SET_SRC. */
6688 && (GET_CODE (sets[i].src) != REG
6689 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6690 /* We can't call canon_reg here because it won't do anything if
6691 SRC is a hard register. */
6692 {
6693 int first = qty_first_reg[reg_qty[REGNO (src)]];
6694
6695 src = SET_SRC (sets[i].rtl)
6696 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6697 : gen_rtx (REG, GET_MODE (src), first);
6698
6699 /* If we had a constant that is cheaper than what we are now
6700 setting SRC to, use that constant. We ignored it when we
6701 thought we could make this into a no-op. */
6702 if (src_const && COST (src_const) < COST (src)
6703 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6704 src = src_const;
6705 }
6706
6707 /* If we made a change, recompute SRC values. */
6708 if (src != sets[i].src)
6709 {
6710 do_not_record = 0;
6711 hash_arg_in_memory = 0;
6712 hash_arg_in_struct = 0;
6713 sets[i].src = src;
2197a88a 6714 sets[i].src_hash = HASH (src, mode);
7afe21cc
RK
6715 sets[i].src_volatile = do_not_record;
6716 sets[i].src_in_memory = hash_arg_in_memory;
6717 sets[i].src_in_struct = hash_arg_in_struct;
2197a88a 6718 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7afe21cc
RK
6719 }
6720
6721 /* If this is a single SET, we are setting a register, and we have an
6722 equivalent constant, we want to add a REG_NOTE. We don't want
6723 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 6724 that pseudo hasn't been eliminated is a pain. Such a note also
7afe21cc
RK
6725 won't help anything. */
6726 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6727 && GET_CODE (src_const) != REG)
6728 {
92f9aa51 6729 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7afe21cc
RK
6730
6731 /* Record the actual constant value in a REG_EQUAL note, making
6732 a new one if one does not already exist. */
6733 if (tem)
6734 XEXP (tem, 0) = src_const;
6735 else
6736 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6737 src_const, REG_NOTES (insn));
6738
6739 /* If storing a constant value in a register that
6740 previously held the constant value 0,
6741 record this fact with a REG_WAS_0 note on this insn.
6742
6743 Note that the *register* is required to have previously held 0,
6744 not just any register in the quantity and we must point to the
6745 insn that set that register to zero.
6746
6747 Rather than track each register individually, we just see if
6748 the last set for this quantity was for this register. */
6749
6750 if (REGNO_QTY_VALID_P (REGNO (dest))
6751 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6752 {
6753 /* See if we previously had a REG_WAS_0 note. */
906c4e36 6754 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
6755 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6756
6757 if ((tem = single_set (const_insn)) != 0
6758 && rtx_equal_p (SET_DEST (tem), dest))
6759 {
6760 if (note)
6761 XEXP (note, 0) = const_insn;
6762 else
6763 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6764 const_insn, REG_NOTES (insn));
6765 }
6766 }
6767 }
6768
6769 /* Now deal with the destination. */
6770 do_not_record = 0;
6771 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6772
6773 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6774 to the MEM or REG within it. */
6775 while (GET_CODE (dest) == SIGN_EXTRACT
6776 || GET_CODE (dest) == ZERO_EXTRACT
6777 || GET_CODE (dest) == SUBREG
6778 || GET_CODE (dest) == STRICT_LOW_PART)
6779 {
6780 sets[i].inner_dest_loc = &XEXP (dest, 0);
6781 dest = XEXP (dest, 0);
6782 }
6783
6784 sets[i].inner_dest = dest;
6785
6786 if (GET_CODE (dest) == MEM)
6787 {
6788 dest = fold_rtx (dest, insn);
6789
6790 /* Decide whether we invalidate everything in memory,
6791 or just things at non-fixed places.
6792 Writing a large aggregate must invalidate everything
6793 because we don't know how long it is. */
6794 note_mem_written (dest, &writes_memory);
6795 }
6796
6797 /* Compute the hash code of the destination now,
6798 before the effects of this instruction are recorded,
6799 since the register values used in the address computation
6800 are those before this instruction. */
2197a88a 6801 sets[i].dest_hash = HASH (dest, mode);
7afe21cc
RK
6802
6803 /* Don't enter a bit-field in the hash table
6804 because the value in it after the store
6805 may not equal what was stored, due to truncation. */
6806
6807 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6808 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6809 {
6810 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6811
6812 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6813 && GET_CODE (width) == CONST_INT
906c4e36
RK
6814 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6815 && ! (INTVAL (src_const)
6816 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7afe21cc
RK
6817 /* Exception: if the value is constant,
6818 and it won't be truncated, record it. */
6819 ;
6820 else
6821 {
6822 /* This is chosen so that the destination will be invalidated
6823 but no new value will be recorded.
6824 We must invalidate because sometimes constant
6825 values can be recorded for bitfields. */
6826 sets[i].src_elt = 0;
6827 sets[i].src_volatile = 1;
6828 src_eqv = 0;
6829 src_eqv_elt = 0;
6830 }
6831 }
6832
6833 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6834 the insn. */
6835 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6836 {
6837 PUT_CODE (insn, NOTE);
6838 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6839 NOTE_SOURCE_FILE (insn) = 0;
6840 cse_jumps_altered = 1;
6841 /* One less use of the label this insn used to jump to. */
6842 --LABEL_NUSES (JUMP_LABEL (insn));
6843 /* No more processing for this set. */
6844 sets[i].rtl = 0;
6845 }
6846
6847 /* If this SET is now setting PC to a label, we know it used to
6848 be a conditional or computed branch. So we see if we can follow
6849 it. If it was a computed branch, delete it and re-emit. */
6850 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6851 {
6852 rtx p;
6853
6854 /* If this is not in the format for a simple branch and
6855 we are the only SET in it, re-emit it. */
6856 if (! simplejump_p (insn) && n_sets == 1)
6857 {
6858 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6859 JUMP_LABEL (new) = XEXP (src, 0);
6860 LABEL_NUSES (XEXP (src, 0))++;
6861 delete_insn (insn);
6862 insn = new;
6863 }
31dcf83f
RS
6864 else
6865 /* Otherwise, force rerecognition, since it probably had
6866 a different pattern before.
6867 This shouldn't really be necessary, since whatever
6868 changed the source value above should have done this.
6869 Until the right place is found, might as well do this here. */
6870 INSN_CODE (insn) = -1;
7afe21cc
RK
6871
6872 /* Now that we've converted this jump to an unconditional jump,
6873 there is dead code after it. Delete the dead code until we
6874 reach a BARRIER, the end of the function, or a label. Do
6875 not delete NOTEs except for NOTE_INSN_DELETED since later
6876 phases assume these notes are retained. */
6877
6878 p = insn;
6879
6880 while (NEXT_INSN (p) != 0
6881 && GET_CODE (NEXT_INSN (p)) != BARRIER
6882 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6883 {
6884 if (GET_CODE (NEXT_INSN (p)) != NOTE
6885 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6886 delete_insn (NEXT_INSN (p));
6887 else
6888 p = NEXT_INSN (p);
6889 }
6890
6891 /* If we don't have a BARRIER immediately after INSN, put one there.
6892 Much code assumes that there are no NOTEs between a JUMP_INSN and
6893 BARRIER. */
6894
6895 if (NEXT_INSN (insn) == 0
6896 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6897 emit_barrier_after (insn);
6898
6899 /* We might have two BARRIERs separated by notes. Delete the second
6900 one if so. */
6901
538b78e7
RS
6902 if (p != insn && NEXT_INSN (p) != 0
6903 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7afe21cc
RK
6904 delete_insn (NEXT_INSN (p));
6905
6906 cse_jumps_altered = 1;
6907 sets[i].rtl = 0;
6908 }
6909
c2a47e48
RK
6910 /* If destination is volatile, invalidate it and then do no further
6911 processing for this assignment. */
7afe21cc
RK
6912
6913 else if (do_not_record)
c2a47e48
RK
6914 {
6915 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6916 || GET_CODE (dest) == MEM)
6917 invalidate (dest);
2708da92
RS
6918 else if (GET_CODE (dest) == STRICT_LOW_PART
6919 || GET_CODE (dest) == ZERO_EXTRACT)
6920 invalidate (XEXP (dest, 0));
c2a47e48
RK
6921 sets[i].rtl = 0;
6922 }
7afe21cc
RK
6923
6924 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
2197a88a 6925 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7afe21cc
RK
6926
6927#ifdef HAVE_cc0
6928 /* If setting CC0, record what it was set to, or a constant, if it
6929 is equivalent to a constant. If it is being set to a floating-point
6930 value, make a COMPARE with the appropriate constant of 0. If we
6931 don't do this, later code can interpret this as a test against
6932 const0_rtx, which can cause problems if we try to put it into an
6933 insn as a floating-point operand. */
6934 if (dest == cc0_rtx)
6935 {
6936 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6937 this_insn_cc0_mode = mode;
cbf6a543 6938 if (FLOAT_MODE_P (mode))
7afe21cc
RK
6939 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6940 CONST0_RTX (mode));
6941 }
6942#endif
6943 }
6944
6945 /* Now enter all non-volatile source expressions in the hash table
6946 if they are not already present.
6947 Record their equivalence classes in src_elt.
6948 This way we can insert the corresponding destinations into
6949 the same classes even if the actual sources are no longer in them
6950 (having been invalidated). */
6951
6952 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6953 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6954 {
6955 register struct table_elt *elt;
6956 register struct table_elt *classp = sets[0].src_elt;
6957 rtx dest = SET_DEST (sets[0].rtl);
6958 enum machine_mode eqvmode = GET_MODE (dest);
6959
6960 if (GET_CODE (dest) == STRICT_LOW_PART)
6961 {
6962 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6963 classp = 0;
6964 }
6965 if (insert_regs (src_eqv, classp, 0))
2197a88a
RK
6966 src_eqv_hash = HASH (src_eqv, eqvmode);
6967 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7afe21cc
RK
6968 elt->in_memory = src_eqv_in_memory;
6969 elt->in_struct = src_eqv_in_struct;
6970 src_eqv_elt = elt;
f7911249
JW
6971
6972 /* Check to see if src_eqv_elt is the same as a set source which
6973 does not yet have an elt, and if so set the elt of the set source
6974 to src_eqv_elt. */
6975 for (i = 0; i < n_sets; i++)
6976 if (sets[i].rtl && sets[i].src_elt == 0
6977 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
6978 sets[i].src_elt = src_eqv_elt;
7afe21cc
RK
6979 }
6980
6981 for (i = 0; i < n_sets; i++)
6982 if (sets[i].rtl && ! sets[i].src_volatile
6983 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6984 {
6985 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6986 {
6987 /* REG_EQUAL in setting a STRICT_LOW_PART
6988 gives an equivalent for the entire destination register,
6989 not just for the subreg being stored in now.
6990 This is a more interesting equivalence, so we arrange later
6991 to treat the entire reg as the destination. */
6992 sets[i].src_elt = src_eqv_elt;
2197a88a 6993 sets[i].src_hash = src_eqv_hash;
7afe21cc
RK
6994 }
6995 else
6996 {
6997 /* Insert source and constant equivalent into hash table, if not
6998 already present. */
6999 register struct table_elt *classp = src_eqv_elt;
7000 register rtx src = sets[i].src;
7001 register rtx dest = SET_DEST (sets[i].rtl);
7002 enum machine_mode mode
7003 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7004
7005 if (sets[i].src_elt == 0)
7006 {
7007 register struct table_elt *elt;
7008
7009 /* Note that these insert_regs calls cannot remove
7010 any of the src_elt's, because they would have failed to
7011 match if not still valid. */
7012 if (insert_regs (src, classp, 0))
2197a88a
RK
7013 sets[i].src_hash = HASH (src, mode);
7014 elt = insert (src, classp, sets[i].src_hash, mode);
7afe21cc
RK
7015 elt->in_memory = sets[i].src_in_memory;
7016 elt->in_struct = sets[i].src_in_struct;
7017 sets[i].src_elt = classp = elt;
7018 }
7019
7020 if (sets[i].src_const && sets[i].src_const_elt == 0
7021 && src != sets[i].src_const
7022 && ! rtx_equal_p (sets[i].src_const, src))
7023 sets[i].src_elt = insert (sets[i].src_const, classp,
2197a88a 7024 sets[i].src_const_hash, mode);
7afe21cc
RK
7025 }
7026 }
7027 else if (sets[i].src_elt == 0)
7028 /* If we did not insert the source into the hash table (e.g., it was
7029 volatile), note the equivalence class for the REG_EQUAL value, if any,
7030 so that the destination goes into that class. */
7031 sets[i].src_elt = src_eqv_elt;
7032
7033 invalidate_from_clobbers (&writes_memory, x);
77fa0940
RK
7034
7035 /* Some registers are invalidated by subroutine calls. Memory is
7036 invalidated by non-constant calls. */
7037
7afe21cc
RK
7038 if (GET_CODE (insn) == CALL_INSN)
7039 {
7040 static struct write_data everything = {0, 1, 1, 1};
77fa0940
RK
7041
7042 if (! CONST_CALL_P (insn))
7043 invalidate_memory (&everything);
7afe21cc
RK
7044 invalidate_for_call ();
7045 }
7046
7047 /* Now invalidate everything set by this instruction.
7048 If a SUBREG or other funny destination is being set,
7049 sets[i].rtl is still nonzero, so here we invalidate the reg
7050 a part of which is being set. */
7051
7052 for (i = 0; i < n_sets; i++)
7053 if (sets[i].rtl)
7054 {
7055 register rtx dest = sets[i].inner_dest;
7056
7057 /* Needed for registers to remove the register from its
7058 previous quantity's chain.
7059 Needed for memory if this is a nonvarying address, unless
7060 we have just done an invalidate_memory that covers even those. */
7061 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7062 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7063 invalidate (dest);
2708da92
RS
7064 else if (GET_CODE (dest) == STRICT_LOW_PART
7065 || GET_CODE (dest) == ZERO_EXTRACT)
7066 invalidate (XEXP (dest, 0));
7afe21cc
RK
7067 }
7068
7069 /* Make sure registers mentioned in destinations
7070 are safe for use in an expression to be inserted.
7071 This removes from the hash table
7072 any invalid entry that refers to one of these registers.
7073
7074 We don't care about the return value from mention_regs because
7075 we are going to hash the SET_DEST values unconditionally. */
7076
7077 for (i = 0; i < n_sets; i++)
7078 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7079 mention_regs (SET_DEST (sets[i].rtl));
7080
7081 /* We may have just removed some of the src_elt's from the hash table.
7082 So replace each one with the current head of the same class. */
7083
7084 for (i = 0; i < n_sets; i++)
7085 if (sets[i].rtl)
7086 {
7087 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7088 /* If elt was removed, find current head of same class,
7089 or 0 if nothing remains of that class. */
7090 {
7091 register struct table_elt *elt = sets[i].src_elt;
7092
7093 while (elt && elt->prev_same_value)
7094 elt = elt->prev_same_value;
7095
7096 while (elt && elt->first_same_value == 0)
7097 elt = elt->next_same_value;
7098 sets[i].src_elt = elt ? elt->first_same_value : 0;
7099 }
7100 }
7101
7102 /* Now insert the destinations into their equivalence classes. */
7103
7104 for (i = 0; i < n_sets; i++)
7105 if (sets[i].rtl)
7106 {
7107 register rtx dest = SET_DEST (sets[i].rtl);
7108 register struct table_elt *elt;
7109
7110 /* Don't record value if we are not supposed to risk allocating
7111 floating-point values in registers that might be wider than
7112 memory. */
7113 if ((flag_float_store
7114 && GET_CODE (dest) == MEM
cbf6a543 7115 && FLOAT_MODE_P (GET_MODE (dest)))
7afe21cc
RK
7116 /* Don't record values of destinations set inside a libcall block
7117 since we might delete the libcall. Things should have been set
7118 up so we won't want to reuse such a value, but we play it safe
7119 here. */
7120 || in_libcall_block
7121 /* If we didn't put a REG_EQUAL value or a source into the hash
7122 table, there is no point is recording DEST. */
7123 || sets[i].src_elt == 0)
7124 continue;
7125
7126 /* STRICT_LOW_PART isn't part of the value BEING set,
7127 and neither is the SUBREG inside it.
7128 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7129 if (GET_CODE (dest) == STRICT_LOW_PART)
7130 dest = SUBREG_REG (XEXP (dest, 0));
7131
c610adec 7132 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7afe21cc
RK
7133 /* Registers must also be inserted into chains for quantities. */
7134 if (insert_regs (dest, sets[i].src_elt, 1))
7135 /* If `insert_regs' changes something, the hash code must be
7136 recalculated. */
2197a88a 7137 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7afe21cc
RK
7138
7139 elt = insert (dest, sets[i].src_elt,
2197a88a 7140 sets[i].dest_hash, GET_MODE (dest));
7afe21cc
RK
7141 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
7142 if (elt->in_memory)
7143 {
7144 /* This implicitly assumes a whole struct
7145 need not have MEM_IN_STRUCT_P.
7146 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7147 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7148 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7149 }
7150
fc3ffe83
RK
7151 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7152 narrower than M2, and both M1 and M2 are the same number of words,
7153 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7154 make that equivalence as well.
7afe21cc
RK
7155
7156 However, BAR may have equivalences for which gen_lowpart_if_possible
7157 will produce a simpler value than gen_lowpart_if_possible applied to
7158 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7159 BAR's equivalences. If we don't get a simplified form, make
7160 the SUBREG. It will not be used in an equivalence, but will
7161 cause two similar assignments to be detected.
7162
7163 Note the loop below will find SUBREG_REG (DEST) since we have
7164 already entered SRC and DEST of the SET in the table. */
7165
7166 if (GET_CODE (dest) == SUBREG
6cdbaec4
RK
7167 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7168 / UNITS_PER_WORD)
7169 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7afe21cc
RK
7170 && (GET_MODE_SIZE (GET_MODE (dest))
7171 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7172 && sets[i].src_elt != 0)
7173 {
7174 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7175 struct table_elt *elt, *classp = 0;
7176
7177 for (elt = sets[i].src_elt->first_same_value; elt;
7178 elt = elt->next_same_value)
7179 {
7180 rtx new_src = 0;
2197a88a 7181 unsigned src_hash;
7afe21cc
RK
7182 struct table_elt *src_elt;
7183
7184 /* Ignore invalid entries. */
7185 if (GET_CODE (elt->exp) != REG
7186 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7187 continue;
7188
7189 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7190 if (new_src == 0)
7191 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
7192
7193 src_hash = HASH (new_src, new_mode);
7194 src_elt = lookup (new_src, src_hash, new_mode);
7195
7196 /* Put the new source in the hash table is if isn't
7197 already. */
7198 if (src_elt == 0)
7199 {
7200 if (insert_regs (new_src, classp, 0))
7201 src_hash = HASH (new_src, new_mode);
7202 src_elt = insert (new_src, classp, src_hash, new_mode);
7203 src_elt->in_memory = elt->in_memory;
7204 src_elt->in_struct = elt->in_struct;
7205 }
7206 else if (classp && classp != src_elt->first_same_value)
7207 /* Show that two things that we've seen before are
7208 actually the same. */
7209 merge_equiv_classes (src_elt, classp);
7210
7211 classp = src_elt->first_same_value;
7212 }
7213 }
7214 }
7215
7216 /* Special handling for (set REG0 REG1)
7217 where REG0 is the "cheapest", cheaper than REG1.
7218 After cse, REG1 will probably not be used in the sequel,
7219 so (if easily done) change this insn to (set REG1 REG0) and
7220 replace REG1 with REG0 in the previous insn that computed their value.
7221 Then REG1 will become a dead store and won't cloud the situation
7222 for later optimizations.
7223
7224 Do not make this change if REG1 is a hard register, because it will
7225 then be used in the sequel and we may be changing a two-operand insn
7226 into a three-operand insn.
7227
7228 Also do not do this if we are operating on a copy of INSN. */
7229
7230 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7231 && NEXT_INSN (PREV_INSN (insn)) == insn
7232 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7233 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7234 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7235 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7236 == REGNO (SET_DEST (sets[0].rtl))))
7237 {
7238 rtx prev = PREV_INSN (insn);
7239 while (prev && GET_CODE (prev) == NOTE)
7240 prev = PREV_INSN (prev);
7241
7242 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7243 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7244 {
7245 rtx dest = SET_DEST (sets[0].rtl);
906c4e36 7246 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7afe21cc
RK
7247
7248 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7249 validate_change (insn, & SET_DEST (sets[0].rtl),
7250 SET_SRC (sets[0].rtl), 1);
7251 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7252 apply_change_group ();
7253
7254 /* If REG1 was equivalent to a constant, REG0 is not. */
7255 if (note)
7256 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7257
7258 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7259 any REG_WAS_0 note on INSN to PREV. */
906c4e36 7260 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7261 if (note)
7262 remove_note (prev, note);
7263
906c4e36 7264 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7afe21cc
RK
7265 if (note)
7266 {
7267 remove_note (insn, note);
7268 XEXP (note, 1) = REG_NOTES (prev);
7269 REG_NOTES (prev) = note;
7270 }
7271 }
7272 }
7273
7274 /* If this is a conditional jump insn, record any known equivalences due to
7275 the condition being tested. */
7276
7277 last_jump_equiv_class = 0;
7278 if (GET_CODE (insn) == JUMP_INSN
7279 && n_sets == 1 && GET_CODE (x) == SET
7280 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7281 record_jump_equiv (insn, 0);
7282
7283#ifdef HAVE_cc0
7284 /* If the previous insn set CC0 and this insn no longer references CC0,
7285 delete the previous insn. Here we use the fact that nothing expects CC0
7286 to be valid over an insn, which is true until the final pass. */
7287 if (prev_insn && GET_CODE (prev_insn) == INSN
7288 && (tem = single_set (prev_insn)) != 0
7289 && SET_DEST (tem) == cc0_rtx
7290 && ! reg_mentioned_p (cc0_rtx, x))
7291 {
7292 PUT_CODE (prev_insn, NOTE);
7293 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7294 NOTE_SOURCE_FILE (prev_insn) = 0;
7295 }
7296
7297 prev_insn_cc0 = this_insn_cc0;
7298 prev_insn_cc0_mode = this_insn_cc0_mode;
7299#endif
7300
7301 prev_insn = insn;
7302}
7303\f
7304/* Store 1 in *WRITES_PTR for those categories of memory ref
7305 that must be invalidated when the expression WRITTEN is stored in.
7306 If WRITTEN is null, say everything must be invalidated. */
7307
7308static void
7309note_mem_written (written, writes_ptr)
7310 rtx written;
7311 struct write_data *writes_ptr;
7312{
7313 static struct write_data everything = {0, 1, 1, 1};
7314
7315 if (written == 0)
7316 *writes_ptr = everything;
7317 else if (GET_CODE (written) == MEM)
7318 {
7319 /* Pushing or popping the stack invalidates just the stack pointer. */
7320 rtx addr = XEXP (written, 0);
7321 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7322 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7323 && GET_CODE (XEXP (addr, 0)) == REG
7324 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7325 {
7326 writes_ptr->sp = 1;
7327 return;
7328 }
7329 else if (GET_MODE (written) == BLKmode)
7330 *writes_ptr = everything;
76fb0b60
RS
7331 /* (mem (scratch)) means clobber everything. */
7332 else if (GET_CODE (addr) == SCRATCH)
7333 *writes_ptr = everything;
7afe21cc
RK
7334 else if (cse_rtx_addr_varies_p (written))
7335 {
7336 /* A varying address that is a sum indicates an array element,
7337 and that's just as good as a structure element
2d08d5b6
JW
7338 in implying that we need not invalidate scalar variables.
7339 However, we must allow QImode aliasing of scalars, because the
7340 ANSI C standard allows character pointers to alias anything. */
7341 if (! ((MEM_IN_STRUCT_P (written)
7342 || GET_CODE (XEXP (written, 0)) == PLUS)
7343 && GET_MODE (written) != QImode))
7afe21cc
RK
7344 writes_ptr->all = 1;
7345 writes_ptr->nonscalar = 1;
7346 }
7347 writes_ptr->var = 1;
7348 }
7349}
7350
7351/* Perform invalidation on the basis of everything about an insn
7352 except for invalidating the actual places that are SET in it.
7353 This includes the places CLOBBERed, and anything that might
7354 alias with something that is SET or CLOBBERed.
7355
7356 W points to the writes_memory for this insn, a struct write_data
7357 saying which kinds of memory references must be invalidated.
7358 X is the pattern of the insn. */
7359
7360static void
7361invalidate_from_clobbers (w, x)
7362 struct write_data *w;
7363 rtx x;
7364{
7365 /* If W->var is not set, W specifies no action.
7366 If W->all is set, this step gets all memory refs
7367 so they can be ignored in the rest of this function. */
7368 if (w->var)
7369 invalidate_memory (w);
7370
7371 if (w->sp)
7372 {
7373 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7374 reg_tick[STACK_POINTER_REGNUM]++;
7375
7376 /* This should be *very* rare. */
7377 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7378 invalidate (stack_pointer_rtx);
7379 }
7380
7381 if (GET_CODE (x) == CLOBBER)
7382 {
7383 rtx ref = XEXP (x, 0);
2708da92
RS
7384 if (ref)
7385 {
7386 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7387 || (GET_CODE (ref) == MEM && ! w->all))
7388 invalidate (ref);
7389 else if (GET_CODE (ref) == STRICT_LOW_PART
7390 || GET_CODE (ref) == ZERO_EXTRACT)
7391 invalidate (XEXP (ref, 0));
7392 }
7afe21cc
RK
7393 }
7394 else if (GET_CODE (x) == PARALLEL)
7395 {
7396 register int i;
7397 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7398 {
7399 register rtx y = XVECEXP (x, 0, i);
7400 if (GET_CODE (y) == CLOBBER)
7401 {
7402 rtx ref = XEXP (y, 0);
2708da92
RS
7403 if (ref)
7404 {
7405 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7406 || (GET_CODE (ref) == MEM && !w->all))
7407 invalidate (ref);
7408 else if (GET_CODE (ref) == STRICT_LOW_PART
7409 || GET_CODE (ref) == ZERO_EXTRACT)
7410 invalidate (XEXP (ref, 0));
7411 }
7afe21cc
RK
7412 }
7413 }
7414 }
7415}
7416\f
7417/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7418 and replace any registers in them with either an equivalent constant
7419 or the canonical form of the register. If we are inside an address,
7420 only do this if the address remains valid.
7421
7422 OBJECT is 0 except when within a MEM in which case it is the MEM.
7423
7424 Return the replacement for X. */
7425
7426static rtx
7427cse_process_notes (x, object)
7428 rtx x;
7429 rtx object;
7430{
7431 enum rtx_code code = GET_CODE (x);
7432 char *fmt = GET_RTX_FORMAT (code);
7afe21cc
RK
7433 int i;
7434
7435 switch (code)
7436 {
7437 case CONST_INT:
7438 case CONST:
7439 case SYMBOL_REF:
7440 case LABEL_REF:
7441 case CONST_DOUBLE:
7442 case PC:
7443 case CC0:
7444 case LO_SUM:
7445 return x;
7446
7447 case MEM:
7448 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7449 return x;
7450
7451 case EXPR_LIST:
7452 case INSN_LIST:
7453 if (REG_NOTE_KIND (x) == REG_EQUAL)
906c4e36 7454 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7afe21cc 7455 if (XEXP (x, 1))
906c4e36 7456 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7afe21cc
RK
7457 return x;
7458
e4890d45
RS
7459 case SIGN_EXTEND:
7460 case ZERO_EXTEND:
7461 {
7462 rtx new = cse_process_notes (XEXP (x, 0), object);
7463 /* We don't substitute VOIDmode constants into these rtx,
7464 since they would impede folding. */
7465 if (GET_MODE (new) != VOIDmode)
7466 validate_change (object, &XEXP (x, 0), new, 0);
7467 return x;
7468 }
7469
7afe21cc
RK
7470 case REG:
7471 i = reg_qty[REGNO (x)];
7472
7473 /* Return a constant or a constant register. */
7474 if (REGNO_QTY_VALID_P (REGNO (x))
7475 && qty_const[i] != 0
7476 && (CONSTANT_P (qty_const[i])
7477 || GET_CODE (qty_const[i]) == REG))
7478 {
7479 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7480 if (new)
7481 return new;
7482 }
7483
7484 /* Otherwise, canonicalize this register. */
906c4e36 7485 return canon_reg (x, NULL_RTX);
7afe21cc
RK
7486 }
7487
7488 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7489 if (fmt[i] == 'e')
7490 validate_change (object, &XEXP (x, i),
7fe34fdf 7491 cse_process_notes (XEXP (x, i), object), 0);
7afe21cc
RK
7492
7493 return x;
7494}
7495\f
7496/* Find common subexpressions between the end test of a loop and the beginning
7497 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7498
7499 Often we have a loop where an expression in the exit test is used
7500 in the body of the loop. For example "while (*p) *q++ = *p++;".
7501 Because of the way we duplicate the loop exit test in front of the loop,
7502 however, we don't detect that common subexpression. This will be caught
7503 when global cse is implemented, but this is a quite common case.
7504
7505 This function handles the most common cases of these common expressions.
7506 It is called after we have processed the basic block ending with the
7507 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7508 jumps to a label used only once. */
7509
7510static void
7511cse_around_loop (loop_start)
7512 rtx loop_start;
7513{
7514 rtx insn;
7515 int i;
7516 struct table_elt *p;
7517
7518 /* If the jump at the end of the loop doesn't go to the start, we don't
7519 do anything. */
7520 for (insn = PREV_INSN (loop_start);
7521 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7522 insn = PREV_INSN (insn))
7523 ;
7524
7525 if (insn == 0
7526 || GET_CODE (insn) != NOTE
7527 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7528 return;
7529
7530 /* If the last insn of the loop (the end test) was an NE comparison,
7531 we will interpret it as an EQ comparison, since we fell through
f72aed24 7532 the loop. Any equivalences resulting from that comparison are
7afe21cc
RK
7533 therefore not valid and must be invalidated. */
7534 if (last_jump_equiv_class)
7535 for (p = last_jump_equiv_class->first_same_value; p;
7536 p = p->next_same_value)
7537 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7538 || GET_CODE (p->exp) == SUBREG)
7539 invalidate (p->exp);
2708da92
RS
7540 else if (GET_CODE (p->exp) == STRICT_LOW_PART
7541 || GET_CODE (p->exp) == ZERO_EXTRACT)
7542 invalidate (XEXP (p->exp, 0));
7afe21cc
RK
7543
7544 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7545 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7546
7547 The only thing we do with SET_DEST is invalidate entries, so we
7548 can safely process each SET in order. It is slightly less efficient
7549 to do so, but we only want to handle the most common cases. */
7550
7551 for (insn = NEXT_INSN (loop_start);
7552 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7553 && ! (GET_CODE (insn) == NOTE
7554 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7555 insn = NEXT_INSN (insn))
7556 {
7557 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7558 && (GET_CODE (PATTERN (insn)) == SET
7559 || GET_CODE (PATTERN (insn)) == CLOBBER))
7560 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7561 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7562 && GET_CODE (PATTERN (insn)) == PARALLEL)
7563 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7564 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7565 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7566 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7567 loop_start);
7568 }
7569}
7570\f
8b3686ed
RK
7571/* Variable used for communications between the next two routines. */
7572
7573static struct write_data skipped_writes_memory;
7574
7575/* Process one SET of an insn that was skipped. We ignore CLOBBERs
7576 since they are done elsewhere. This function is called via note_stores. */
7577
7578static void
7579invalidate_skipped_set (dest, set)
7580 rtx set;
7581 rtx dest;
7582{
7583 if (GET_CODE (set) == CLOBBER
7584#ifdef HAVE_cc0
7585 || dest == cc0_rtx
7586#endif
7587 || dest == pc_rtx)
7588 return;
7589
7590 if (GET_CODE (dest) == MEM)
7591 note_mem_written (dest, &skipped_writes_memory);
7592
ffcf6393
RS
7593 /* There are times when an address can appear varying and be a PLUS
7594 during this scan when it would be a fixed address were we to know
7595 the proper equivalences. So promote "nonscalar" to be "all". */
7596 if (skipped_writes_memory.nonscalar)
7597 skipped_writes_memory.all = 1;
7598
8b3686ed
RK
7599 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7600 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7601 invalidate (dest);
2708da92
RS
7602 else if (GET_CODE (dest) == STRICT_LOW_PART
7603 || GET_CODE (dest) == ZERO_EXTRACT)
7604 invalidate (XEXP (dest, 0));
8b3686ed
RK
7605}
7606
7607/* Invalidate all insns from START up to the end of the function or the
7608 next label. This called when we wish to CSE around a block that is
7609 conditionally executed. */
7610
7611static void
7612invalidate_skipped_block (start)
7613 rtx start;
7614{
7615 rtx insn;
8b3686ed
RK
7616 static struct write_data init = {0, 0, 0, 0};
7617 static struct write_data everything = {0, 1, 1, 1};
7618
7619 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7620 insn = NEXT_INSN (insn))
7621 {
7622 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7623 continue;
7624
7625 skipped_writes_memory = init;
7626
7627 if (GET_CODE (insn) == CALL_INSN)
7628 {
7629 invalidate_for_call ();
7630 skipped_writes_memory = everything;
7631 }
7632
7633 note_stores (PATTERN (insn), invalidate_skipped_set);
7634 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7635 }
7636}
7637\f
7afe21cc
RK
7638/* Used for communication between the following two routines; contains a
7639 value to be checked for modification. */
7640
7641static rtx cse_check_loop_start_value;
7642
7643/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7644 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7645
7646static void
7647cse_check_loop_start (x, set)
7648 rtx x;
7649 rtx set;
7650{
7651 if (cse_check_loop_start_value == 0
7652 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7653 return;
7654
7655 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7656 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7657 cse_check_loop_start_value = 0;
7658}
7659
7660/* X is a SET or CLOBBER contained in INSN that was found near the start of
7661 a loop that starts with the label at LOOP_START.
7662
7663 If X is a SET, we see if its SET_SRC is currently in our hash table.
7664 If so, we see if it has a value equal to some register used only in the
7665 loop exit code (as marked by jump.c).
7666
7667 If those two conditions are true, we search backwards from the start of
7668 the loop to see if that same value was loaded into a register that still
7669 retains its value at the start of the loop.
7670
7671 If so, we insert an insn after the load to copy the destination of that
7672 load into the equivalent register and (try to) replace our SET_SRC with that
7673 register.
7674
7675 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7676
7677static void
7678cse_set_around_loop (x, insn, loop_start)
7679 rtx x;
7680 rtx insn;
7681 rtx loop_start;
7682{
7afe21cc
RK
7683 struct table_elt *src_elt;
7684 static struct write_data init = {0, 0, 0, 0};
7685 struct write_data writes_memory;
7686
7687 writes_memory = init;
7688
7689 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7690 are setting PC or CC0 or whose SET_SRC is already a register. */
7691 if (GET_CODE (x) == SET
7692 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7693 && GET_CODE (SET_SRC (x)) != REG)
7694 {
7695 src_elt = lookup (SET_SRC (x),
7696 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7697 GET_MODE (SET_DEST (x)));
7698
7699 if (src_elt)
7700 for (src_elt = src_elt->first_same_value; src_elt;
7701 src_elt = src_elt->next_same_value)
7702 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7703 && COST (src_elt->exp) < COST (SET_SRC (x)))
7704 {
7705 rtx p, set;
7706
7707 /* Look for an insn in front of LOOP_START that sets
7708 something in the desired mode to SET_SRC (x) before we hit
7709 a label or CALL_INSN. */
7710
7711 for (p = prev_nonnote_insn (loop_start);
7712 p && GET_CODE (p) != CALL_INSN
7713 && GET_CODE (p) != CODE_LABEL;
7714 p = prev_nonnote_insn (p))
7715 if ((set = single_set (p)) != 0
7716 && GET_CODE (SET_DEST (set)) == REG
7717 && GET_MODE (SET_DEST (set)) == src_elt->mode
7718 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7719 {
7720 /* We now have to ensure that nothing between P
7721 and LOOP_START modified anything referenced in
7722 SET_SRC (x). We know that nothing within the loop
7723 can modify it, or we would have invalidated it in
7724 the hash table. */
7725 rtx q;
7726
7727 cse_check_loop_start_value = SET_SRC (x);
7728 for (q = p; q != loop_start; q = NEXT_INSN (q))
7729 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7730 note_stores (PATTERN (q), cse_check_loop_start);
7731
7732 /* If nothing was changed and we can replace our
7733 SET_SRC, add an insn after P to copy its destination
7734 to what we will be replacing SET_SRC with. */
7735 if (cse_check_loop_start_value
7736 && validate_change (insn, &SET_SRC (x),
7737 src_elt->exp, 0))
7738 emit_insn_after (gen_move_insn (src_elt->exp,
7739 SET_DEST (set)),
7740 p);
7741 break;
7742 }
7743 }
7744 }
7745
7746 /* Now invalidate anything modified by X. */
7747 note_mem_written (SET_DEST (x), &writes_memory);
7748
7749 if (writes_memory.var)
7750 invalidate_memory (&writes_memory);
7751
7752 /* See comment on similar code in cse_insn for explanation of these tests. */
7753 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7754 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7755 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7756 invalidate (SET_DEST (x));
2708da92
RS
7757 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7758 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
7759 invalidate (XEXP (SET_DEST (x), 0));
7afe21cc
RK
7760}
7761\f
7762/* Find the end of INSN's basic block and return its range,
7763 the total number of SETs in all the insns of the block, the last insn of the
7764 block, and the branch path.
7765
7766 The branch path indicates which branches should be followed. If a non-zero
7767 path size is specified, the block should be rescanned and a different set
7768 of branches will be taken. The branch path is only used if
8b3686ed 7769 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7afe21cc
RK
7770
7771 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7772 used to describe the block. It is filled in with the information about
7773 the current block. The incoming structure's branch path, if any, is used
7774 to construct the output branch path. */
7775
7afe21cc 7776void
8b3686ed 7777cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7afe21cc
RK
7778 rtx insn;
7779 struct cse_basic_block_data *data;
7780 int follow_jumps;
7781 int after_loop;
8b3686ed 7782 int skip_blocks;
7afe21cc
RK
7783{
7784 rtx p = insn, q;
7785 int nsets = 0;
7786 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
fc3ffe83 7787 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7afe21cc
RK
7788 int path_size = data->path_size;
7789 int path_entry = 0;
7790 int i;
7791
7792 /* Update the previous branch path, if any. If the last branch was
7793 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7794 shorten the path by one and look at the previous branch. We know that
7795 at least one branch must have been taken if PATH_SIZE is non-zero. */
7796 while (path_size > 0)
7797 {
8b3686ed 7798 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
7799 {
7800 data->path[path_size - 1].status = NOT_TAKEN;
7801 break;
7802 }
7803 else
7804 path_size--;
7805 }
7806
7807 /* Scan to end of this basic block. */
7808 while (p && GET_CODE (p) != CODE_LABEL)
7809 {
7810 /* Don't cse out the end of a loop. This makes a difference
7811 only for the unusual loops that always execute at least once;
7812 all other loops have labels there so we will stop in any case.
7813 Cse'ing out the end of the loop is dangerous because it
7814 might cause an invariant expression inside the loop
7815 to be reused after the end of the loop. This would make it
7816 hard to move the expression out of the loop in loop.c,
7817 especially if it is one of several equivalent expressions
7818 and loop.c would like to eliminate it.
7819
7820 If we are running after loop.c has finished, we can ignore
7821 the NOTE_INSN_LOOP_END. */
7822
7823 if (! after_loop && GET_CODE (p) == NOTE
7824 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7825 break;
7826
7827 /* Don't cse over a call to setjmp; on some machines (eg vax)
7828 the regs restored by the longjmp come from
7829 a later time than the setjmp. */
7830 if (GET_CODE (p) == NOTE
7831 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7832 break;
7833
7834 /* A PARALLEL can have lots of SETs in it,
7835 especially if it is really an ASM_OPERANDS. */
7836 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7837 && GET_CODE (PATTERN (p)) == PARALLEL)
7838 nsets += XVECLEN (PATTERN (p), 0);
7839 else if (GET_CODE (p) != NOTE)
7840 nsets += 1;
7841
164c8956
RK
7842 /* Ignore insns made by CSE; they cannot affect the boundaries of
7843 the basic block. */
7844
7845 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8b3686ed 7846 high_cuid = INSN_CUID (p);
164c8956
RK
7847 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7848 low_cuid = INSN_CUID (p);
7afe21cc
RK
7849
7850 /* See if this insn is in our branch path. If it is and we are to
7851 take it, do so. */
7852 if (path_entry < path_size && data->path[path_entry].branch == p)
7853 {
8b3686ed 7854 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc
RK
7855 p = JUMP_LABEL (p);
7856
7857 /* Point to next entry in path, if any. */
7858 path_entry++;
7859 }
7860
7861 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7862 was specified, we haven't reached our maximum path length, there are
7863 insns following the target of the jump, this is the only use of the
8b3686ed
RK
7864 jump label, and the target label is preceded by a BARRIER.
7865
7866 Alternatively, we can follow the jump if it branches around a
7867 block of code and there are no other branches into the block.
7868 In this case invalidate_skipped_block will be called to invalidate any
7869 registers set in the block when following the jump. */
7870
7871 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7afe21cc
RK
7872 && GET_CODE (p) == JUMP_INSN
7873 && GET_CODE (PATTERN (p)) == SET
7874 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7875 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7876 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7877 {
7878 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7879 if ((GET_CODE (q) != NOTE
7880 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7881 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7882 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7883 break;
7884
7885 /* If we ran into a BARRIER, this code is an extension of the
7886 basic block when the branch is taken. */
8b3686ed 7887 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
7888 {
7889 /* Don't allow ourself to keep walking around an
7890 always-executed loop. */
fc3ffe83
RK
7891 if (next_real_insn (q) == next)
7892 {
7893 p = NEXT_INSN (p);
7894 continue;
7895 }
7afe21cc
RK
7896
7897 /* Similarly, don't put a branch in our path more than once. */
7898 for (i = 0; i < path_entry; i++)
7899 if (data->path[i].branch == p)
7900 break;
7901
7902 if (i != path_entry)
7903 break;
7904
7905 data->path[path_entry].branch = p;
7906 data->path[path_entry++].status = TAKEN;
7907
7908 /* This branch now ends our path. It was possible that we
7909 didn't see this branch the last time around (when the
7910 insn in front of the target was a JUMP_INSN that was
7911 turned into a no-op). */
7912 path_size = path_entry;
7913
7914 p = JUMP_LABEL (p);
7915 /* Mark block so we won't scan it again later. */
7916 PUT_MODE (NEXT_INSN (p), QImode);
7917 }
8b3686ed
RK
7918 /* Detect a branch around a block of code. */
7919 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7920 {
7921 register rtx tmp;
7922
fc3ffe83
RK
7923 if (next_real_insn (q) == next)
7924 {
7925 p = NEXT_INSN (p);
7926 continue;
7927 }
8b3686ed
RK
7928
7929 for (i = 0; i < path_entry; i++)
7930 if (data->path[i].branch == p)
7931 break;
7932
7933 if (i != path_entry)
7934 break;
7935
7936 /* This is no_labels_between_p (p, q) with an added check for
7937 reaching the end of a function (in case Q precedes P). */
7938 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7939 if (GET_CODE (tmp) == CODE_LABEL)
7940 break;
7941
7942 if (tmp == q)
7943 {
7944 data->path[path_entry].branch = p;
7945 data->path[path_entry++].status = AROUND;
7946
7947 path_size = path_entry;
7948
7949 p = JUMP_LABEL (p);
7950 /* Mark block so we won't scan it again later. */
7951 PUT_MODE (NEXT_INSN (p), QImode);
7952 }
7953 }
7afe21cc 7954 }
7afe21cc
RK
7955 p = NEXT_INSN (p);
7956 }
7957
7958 data->low_cuid = low_cuid;
7959 data->high_cuid = high_cuid;
7960 data->nsets = nsets;
7961 data->last = p;
7962
7963 /* If all jumps in the path are not taken, set our path length to zero
7964 so a rescan won't be done. */
7965 for (i = path_size - 1; i >= 0; i--)
8b3686ed 7966 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
7967 break;
7968
7969 if (i == -1)
7970 data->path_size = 0;
7971 else
7972 data->path_size = path_size;
7973
7974 /* End the current branch path. */
7975 data->path[path_size].branch = 0;
7976}
7977\f
7afe21cc
RK
7978/* Perform cse on the instructions of a function.
7979 F is the first instruction.
7980 NREGS is one plus the highest pseudo-reg number used in the instruction.
7981
7982 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7983 (only if -frerun-cse-after-loop).
7984
7985 Returns 1 if jump_optimize should be redone due to simplifications
7986 in conditional jump instructions. */
7987
7988int
7989cse_main (f, nregs, after_loop, file)
7990 rtx f;
7991 int nregs;
7992 int after_loop;
7993 FILE *file;
7994{
7995 struct cse_basic_block_data val;
7996 register rtx insn = f;
7997 register int i;
7998
7999 cse_jumps_altered = 0;
8000 constant_pool_entries_cost = 0;
8001 val.path_size = 0;
8002
8003 init_recog ();
8004
8005 max_reg = nregs;
8006
8007 all_minus_one = (int *) alloca (nregs * sizeof (int));
8008 consec_ints = (int *) alloca (nregs * sizeof (int));
8009
8010 for (i = 0; i < nregs; i++)
8011 {
8012 all_minus_one[i] = -1;
8013 consec_ints[i] = i;
8014 }
8015
8016 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8017 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8018 reg_qty = (int *) alloca (nregs * sizeof (int));
8019 reg_in_table = (int *) alloca (nregs * sizeof (int));
8020 reg_tick = (int *) alloca (nregs * sizeof (int));
8021
7bac1be0
RK
8022#ifdef LOAD_EXTEND_OP
8023
8024 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8025 and change the code and mode as appropriate. */
8026 memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, 0);
8027#endif
8028
7afe21cc
RK
8029 /* Discard all the free elements of the previous function
8030 since they are allocated in the temporarily obstack. */
4c9a05bc 8031 bzero ((char *) table, sizeof table);
7afe21cc
RK
8032 free_element_chain = 0;
8033 n_elements_made = 0;
8034
8035 /* Find the largest uid. */
8036
164c8956
RK
8037 max_uid = get_max_uid ();
8038 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
4c9a05bc 8039 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
7afe21cc
RK
8040
8041 /* Compute the mapping from uids to cuids.
8042 CUIDs are numbers assigned to insns, like uids,
8043 except that cuids increase monotonically through the code.
8044 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8045 between two insns is not affected by -g. */
8046
8047 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8048 {
8049 if (GET_CODE (insn) != NOTE
8050 || NOTE_LINE_NUMBER (insn) < 0)
8051 INSN_CUID (insn) = ++i;
8052 else
8053 /* Give a line number note the same cuid as preceding insn. */
8054 INSN_CUID (insn) = i;
8055 }
8056
8057 /* Initialize which registers are clobbered by calls. */
8058
8059 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8060
8061 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8062 if ((call_used_regs[i]
8063 /* Used to check !fixed_regs[i] here, but that isn't safe;
8064 fixed regs are still call-clobbered, and sched can get
8065 confused if they can "live across calls".
8066
8067 The frame pointer is always preserved across calls. The arg
8068 pointer is if it is fixed. The stack pointer usually is, unless
8069 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8070 will be present. If we are generating PIC code, the PIC offset
8071 table register is preserved across calls. */
8072
8073 && i != STACK_POINTER_REGNUM
8074 && i != FRAME_POINTER_REGNUM
8bc169f2
DE
8075#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8076 && i != HARD_FRAME_POINTER_REGNUM
8077#endif
7afe21cc
RK
8078#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8079 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8080#endif
be8fe470 8081#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
7afe21cc
RK
8082 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8083#endif
8084 )
8085 || global_regs[i])
8086 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8087
8088 /* Loop over basic blocks.
8089 Compute the maximum number of qty's needed for each basic block
8090 (which is 2 for each SET). */
8091 insn = f;
8092 while (insn)
8093 {
8b3686ed
RK
8094 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8095 flag_cse_skip_blocks);
7afe21cc
RK
8096
8097 /* If this basic block was already processed or has no sets, skip it. */
8098 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8099 {
8100 PUT_MODE (insn, VOIDmode);
8101 insn = (val.last ? NEXT_INSN (val.last) : 0);
8102 val.path_size = 0;
8103 continue;
8104 }
8105
8106 cse_basic_block_start = val.low_cuid;
8107 cse_basic_block_end = val.high_cuid;
8108 max_qty = val.nsets * 2;
8109
8110 if (file)
8111 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8112 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8113 val.nsets);
8114
8115 /* Make MAX_QTY bigger to give us room to optimize
8116 past the end of this basic block, if that should prove useful. */
8117 if (max_qty < 500)
8118 max_qty = 500;
8119
8120 max_qty += max_reg;
8121
8122 /* If this basic block is being extended by following certain jumps,
8123 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8124 Otherwise, we start after this basic block. */
8125 if (val.path_size > 0)
8126 cse_basic_block (insn, val.last, val.path, 0);
8127 else
8128 {
8129 int old_cse_jumps_altered = cse_jumps_altered;
8130 rtx temp;
8131
8132 /* When cse changes a conditional jump to an unconditional
8133 jump, we want to reprocess the block, since it will give
8134 us a new branch path to investigate. */
8135 cse_jumps_altered = 0;
8136 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
8137 if (cse_jumps_altered == 0
8138 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8139 insn = temp;
8140
8141 cse_jumps_altered |= old_cse_jumps_altered;
8142 }
8143
8144#ifdef USE_C_ALLOCA
8145 alloca (0);
8146#endif
8147 }
8148
8149 /* Tell refers_to_mem_p that qty_const info is not available. */
8150 qty_const = 0;
8151
8152 if (max_elements_made < n_elements_made)
8153 max_elements_made = n_elements_made;
8154
8155 return cse_jumps_altered;
8156}
8157
8158/* Process a single basic block. FROM and TO and the limits of the basic
8159 block. NEXT_BRANCH points to the branch path when following jumps or
8160 a null path when not following jumps.
8161
8162 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8163 loop. This is true when we are being called for the last time on a
8164 block and this CSE pass is before loop.c. */
8165
8166static rtx
8167cse_basic_block (from, to, next_branch, around_loop)
8168 register rtx from, to;
8169 struct branch_path *next_branch;
8170 int around_loop;
8171{
8172 register rtx insn;
8173 int to_usage = 0;
8174 int in_libcall_block = 0;
8175
8176 /* Each of these arrays is undefined before max_reg, so only allocate
8177 the space actually needed and adjust the start below. */
8178
8179 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8180 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8181 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8182 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8183 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8184 qty_comparison_code
8185 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8186 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8187 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8188
8189 qty_first_reg -= max_reg;
8190 qty_last_reg -= max_reg;
8191 qty_mode -= max_reg;
8192 qty_const -= max_reg;
8193 qty_const_insn -= max_reg;
8194 qty_comparison_code -= max_reg;
8195 qty_comparison_qty -= max_reg;
8196 qty_comparison_const -= max_reg;
8197
8198 new_basic_block ();
8199
8200 /* TO might be a label. If so, protect it from being deleted. */
8201 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8202 ++LABEL_NUSES (to);
8203
8204 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8205 {
8206 register enum rtx_code code;
8207
8208 /* See if this is a branch that is part of the path. If so, and it is
8209 to be taken, do so. */
8210 if (next_branch->branch == insn)
8211 {
8b3686ed
RK
8212 enum taken status = next_branch++->status;
8213 if (status != NOT_TAKEN)
7afe21cc 8214 {
8b3686ed
RK
8215 if (status == TAKEN)
8216 record_jump_equiv (insn, 1);
8217 else
8218 invalidate_skipped_block (NEXT_INSN (insn));
8219
7afe21cc
RK
8220 /* Set the last insn as the jump insn; it doesn't affect cc0.
8221 Then follow this branch. */
8222#ifdef HAVE_cc0
8223 prev_insn_cc0 = 0;
8224#endif
8225 prev_insn = insn;
8226 insn = JUMP_LABEL (insn);
8227 continue;
8228 }
8229 }
8230
8231 code = GET_CODE (insn);
8232 if (GET_MODE (insn) == QImode)
8233 PUT_MODE (insn, VOIDmode);
8234
8235 if (GET_RTX_CLASS (code) == 'i')
8236 {
8237 /* Process notes first so we have all notes in canonical forms when
8238 looking for duplicate operations. */
8239
8240 if (REG_NOTES (insn))
906c4e36 8241 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7afe21cc
RK
8242
8243 /* Track when we are inside in LIBCALL block. Inside such a block,
8244 we do not want to record destinations. The last insn of a
8245 LIBCALL block is not considered to be part of the block, since
830a38ee 8246 its destination is the result of the block and hence should be
7afe21cc
RK
8247 recorded. */
8248
906c4e36 8249 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7afe21cc 8250 in_libcall_block = 1;
906c4e36 8251 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7afe21cc
RK
8252 in_libcall_block = 0;
8253
8254 cse_insn (insn, in_libcall_block);
8255 }
8256
8257 /* If INSN is now an unconditional jump, skip to the end of our
8258 basic block by pretending that we just did the last insn in the
8259 basic block. If we are jumping to the end of our block, show
8260 that we can have one usage of TO. */
8261
8262 if (simplejump_p (insn))
8263 {
8264 if (to == 0)
8265 return 0;
8266
8267 if (JUMP_LABEL (insn) == to)
8268 to_usage = 1;
8269
6a5293dc
RS
8270 /* Maybe TO was deleted because the jump is unconditional.
8271 If so, there is nothing left in this basic block. */
8272 /* ??? Perhaps it would be smarter to set TO
8273 to whatever follows this insn,
8274 and pretend the basic block had always ended here. */
8275 if (INSN_DELETED_P (to))
8276 break;
8277
7afe21cc
RK
8278 insn = PREV_INSN (to);
8279 }
8280
8281 /* See if it is ok to keep on going past the label
8282 which used to end our basic block. Remember that we incremented
d45cf215 8283 the count of that label, so we decrement it here. If we made
7afe21cc
RK
8284 a jump unconditional, TO_USAGE will be one; in that case, we don't
8285 want to count the use in that jump. */
8286
8287 if (to != 0 && NEXT_INSN (insn) == to
8288 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8289 {
8290 struct cse_basic_block_data val;
8291
8292 insn = NEXT_INSN (to);
8293
8294 if (LABEL_NUSES (to) == 0)
8295 delete_insn (to);
8296
8297 /* Find the end of the following block. Note that we won't be
8298 following branches in this case. If TO was the last insn
8299 in the function, we are done. Similarly, if we deleted the
d45cf215 8300 insn after TO, it must have been because it was preceded by
7afe21cc
RK
8301 a BARRIER. In that case, we are done with this block because it
8302 has no continuation. */
8303
8304 if (insn == 0 || INSN_DELETED_P (insn))
8305 return 0;
8306
8307 to_usage = 0;
8308 val.path_size = 0;
8b3686ed 8309 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7afe21cc
RK
8310
8311 /* If the tables we allocated have enough space left
8312 to handle all the SETs in the next basic block,
8313 continue through it. Otherwise, return,
8314 and that block will be scanned individually. */
8315 if (val.nsets * 2 + next_qty > max_qty)
8316 break;
8317
8318 cse_basic_block_start = val.low_cuid;
8319 cse_basic_block_end = val.high_cuid;
8320 to = val.last;
8321
8322 /* Prevent TO from being deleted if it is a label. */
8323 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8324 ++LABEL_NUSES (to);
8325
8326 /* Back up so we process the first insn in the extension. */
8327 insn = PREV_INSN (insn);
8328 }
8329 }
8330
8331 if (next_qty > max_qty)
8332 abort ();
8333
8334 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8335 the previous insn is the only insn that branches to the head of a loop,
8336 we can cse into the loop. Don't do this if we changed the jump
8337 structure of a loop unless we aren't going to be following jumps. */
8338
8b3686ed
RK
8339 if ((cse_jumps_altered == 0
8340 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
8341 && around_loop && to != 0
8342 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8343 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8344 && JUMP_LABEL (PREV_INSN (to)) != 0
8345 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8346 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8347
8348 return to ? NEXT_INSN (to) : 0;
8349}
8350\f
8351/* Count the number of times registers are used (not set) in X.
8352 COUNTS is an array in which we accumulate the count, INCR is how much
79644f06
RK
8353 we count each register usage.
8354
8355 Don't count a usage of DEST, which is the SET_DEST of a SET which
8356 contains X in its SET_SRC. This is because such a SET does not
8357 modify the liveness of DEST. */
7afe21cc
RK
8358
8359static void
79644f06 8360count_reg_usage (x, counts, dest, incr)
7afe21cc
RK
8361 rtx x;
8362 int *counts;
79644f06 8363 rtx dest;
7afe21cc
RK
8364 int incr;
8365{
f1e7c95f 8366 enum rtx_code code;
7afe21cc
RK
8367 char *fmt;
8368 int i, j;
8369
f1e7c95f
RK
8370 if (x == 0)
8371 return;
8372
8373 switch (code = GET_CODE (x))
7afe21cc
RK
8374 {
8375 case REG:
79644f06
RK
8376 if (x != dest)
8377 counts[REGNO (x)] += incr;
7afe21cc
RK
8378 return;
8379
8380 case PC:
8381 case CC0:
8382 case CONST:
8383 case CONST_INT:
8384 case CONST_DOUBLE:
8385 case SYMBOL_REF:
8386 case LABEL_REF:
8387 case CLOBBER:
8388 return;
8389
8390 case SET:
8391 /* Unless we are setting a REG, count everything in SET_DEST. */
8392 if (GET_CODE (SET_DEST (x)) != REG)
79644f06 8393 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9ff08f70
RK
8394
8395 /* If SRC has side-effects, then we can't delete this insn, so the
8396 usage of SET_DEST inside SRC counts.
8397
8398 ??? Strictly-speaking, we might be preserving this insn
8399 because some other SET has side-effects, but that's hard
8400 to do and can't happen now. */
8401 count_reg_usage (SET_SRC (x), counts,
8402 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8403 incr);
7afe21cc
RK
8404 return;
8405
f1e7c95f
RK
8406 case CALL_INSN:
8407 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8408
8409 /* ... falls through ... */
7afe21cc
RK
8410 case INSN:
8411 case JUMP_INSN:
79644f06 8412 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7afe21cc
RK
8413
8414 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8415 use them. */
8416
f1e7c95f 8417 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7afe21cc
RK
8418 return;
8419
8420 case EXPR_LIST:
8421 case INSN_LIST:
f1e7c95f
RK
8422 if (REG_NOTE_KIND (x) == REG_EQUAL
8423 || GET_CODE (XEXP (x,0)) == USE)
79644f06 8424 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
f1e7c95f 8425 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7afe21cc
RK
8426 return;
8427 }
8428
8429 fmt = GET_RTX_FORMAT (code);
8430 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8431 {
8432 if (fmt[i] == 'e')
79644f06 8433 count_reg_usage (XEXP (x, i), counts, dest, incr);
7afe21cc
RK
8434 else if (fmt[i] == 'E')
8435 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
79644f06 8436 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7afe21cc
RK
8437 }
8438}
8439\f
8440/* Scan all the insns and delete any that are dead; i.e., they store a register
8441 that is never used or they copy a register to itself.
8442
8443 This is used to remove insns made obviously dead by cse. It improves the
8444 heuristics in loop since it won't try to move dead invariants out of loops
8445 or make givs for dead quantities. The remaining passes of the compilation
8446 are also sped up. */
8447
8448void
8449delete_dead_from_cse (insns, nreg)
8450 rtx insns;
8451 int nreg;
8452{
8453 int *counts = (int *) alloca (nreg * sizeof (int));
77fa0940 8454 rtx insn, prev;
d45cf215 8455 rtx tem;
7afe21cc 8456 int i;
e4890d45 8457 int in_libcall = 0;
7afe21cc
RK
8458
8459 /* First count the number of times each register is used. */
4c9a05bc 8460 bzero ((char *) counts, sizeof (int) * nreg);
7afe21cc 8461 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
79644f06 8462 count_reg_usage (insn, counts, NULL_RTX, 1);
7afe21cc
RK
8463
8464 /* Go from the last insn to the first and delete insns that only set unused
8465 registers or copy a register to itself. As we delete an insn, remove
8466 usage counts for registers it uses. */
77fa0940 8467 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
7afe21cc
RK
8468 {
8469 int live_insn = 0;
8470
77fa0940
RK
8471 prev = prev_real_insn (insn);
8472
e4890d45 8473 /* Don't delete any insns that are part of a libcall block.
77fa0940
RK
8474 Flow or loop might get confused if we did that. Remember
8475 that we are scanning backwards. */
906c4e36 8476 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
e4890d45
RS
8477 in_libcall = 1;
8478
8479 if (in_libcall)
8480 live_insn = 1;
8481 else if (GET_CODE (PATTERN (insn)) == SET)
7afe21cc
RK
8482 {
8483 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8484 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8485 ;
8486
d45cf215
RS
8487#ifdef HAVE_cc0
8488 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8489 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8490 && ((tem = next_nonnote_insn (insn)) == 0
8491 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8492 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8493 ;
8494#endif
7afe21cc
RK
8495 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8496 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8497 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8498 || side_effects_p (SET_SRC (PATTERN (insn))))
8499 live_insn = 1;
8500 }
8501 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8502 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8503 {
8504 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8505
8506 if (GET_CODE (elt) == SET)
8507 {
8508 if (GET_CODE (SET_DEST (elt)) == REG
8509 && SET_DEST (elt) == SET_SRC (elt))
8510 ;
8511
d45cf215
RS
8512#ifdef HAVE_cc0
8513 else if (GET_CODE (SET_DEST (elt)) == CC0
8514 && ! side_effects_p (SET_SRC (elt))
8515 && ((tem = next_nonnote_insn (insn)) == 0
8516 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8517 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8518 ;
8519#endif
7afe21cc
RK
8520 else if (GET_CODE (SET_DEST (elt)) != REG
8521 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8522 || counts[REGNO (SET_DEST (elt))] != 0
8523 || side_effects_p (SET_SRC (elt)))
8524 live_insn = 1;
8525 }
8526 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8527 live_insn = 1;
8528 }
8529 else
8530 live_insn = 1;
8531
8532 /* If this is a dead insn, delete it and show registers in it aren't
e4890d45 8533 being used. */
7afe21cc 8534
e4890d45 8535 if (! live_insn)
7afe21cc 8536 {
79644f06 8537 count_reg_usage (insn, counts, NULL_RTX, -1);
77fa0940 8538 delete_insn (insn);
7afe21cc 8539 }
e4890d45 8540
906c4e36 8541 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
e4890d45 8542 in_libcall = 0;
7afe21cc
RK
8543 }
8544}