]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cse.c
*** empty log message ***
[thirdparty/gcc.git] / gcc / cse.c
CommitLineData
7afe21cc
RK
1/* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "regs.h"
24#include "hard-reg-set.h"
25#include "flags.h"
26#include "real.h"
27#include "insn-config.h"
28#include "recog.h"
29
30#include <stdio.h>
31#include <setjmp.h>
32
33/* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
37
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
45
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
49
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
58
59Registers and "quantity numbers":
60
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
68 of as containing.
69
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
72
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
75
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
79
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
83
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
87
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
92
93Constants and quantity numbers
94
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
98
d45cf215 99 Whether a reg or a constant is preferred is determined by the configuration
7afe21cc
RK
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
102
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
105 of qty_const.
106
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
112
113Other expressions:
114
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
119 hash codes.
120
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
123
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
128
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
132
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
137
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
145
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
148 must be removed.
149
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
157
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
166
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
173 the register.
174
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
178
179Related expressions:
180
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
187
188/* One plus largest register number used in this function. */
189
190static int max_reg;
191
192/* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
194
195static int max_qty;
196
197/* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
199
200static int next_qty;
201
202/* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
204
205static int *qty_first_reg;
206static int *qty_last_reg;
207
208/* Index by quantity number, gives the mode of the quantity. */
209
210static enum machine_mode *qty_mode;
211
212/* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
216
217static rtx *qty_const;
218
219/* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
221
222static rtx *qty_const_insn;
223
224/* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
228
229/* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231static enum rtx_code *qty_comparison_code;
232
233/* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
236
237static rtx *qty_comparison_const;
238
239/* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
242
243static int *qty_comparison_qty;
244
245#ifdef HAVE_cc0
246/* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
249
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
254
255static rtx prev_insn_cc0;
256static enum machine_mode prev_insn_cc0_mode;
257#endif
258
259/* Previous actual insn. 0 if at first insn of basic block. */
260
261static rtx prev_insn;
262
263/* Insn being scanned. */
264
265static rtx this_insn;
266
267/* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
269
270static int *reg_qty;
271
272/* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
274 value.
275
276 Or -1 if this register is at the end of the chain.
277
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
279
280static int *reg_next_eqv;
281static int *reg_prev_eqv;
282
283/* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
285
286static int *reg_tick;
287
288/* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
294
295static int *reg_in_table;
296
297/* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
301
302static HARD_REG_SET hard_regs_in_table;
303
304/* A HARD_REG_SET containing all the hard registers that are invalidated
305 by a CALL_INSN. */
306
307static HARD_REG_SET regs_invalidated_by_call;
308
309/* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
313
314static int *all_minus_one;
315static int *consec_ints;
316
317/* CUID of insn that starts the basic block currently being cse-processed. */
318
319static int cse_basic_block_start;
320
321/* CUID of insn that ends the basic block currently being cse-processed. */
322
323static int cse_basic_block_end;
324
325/* Vector mapping INSN_UIDs to cuids.
d45cf215 326 The cuids are like uids but increase monotonically always.
7afe21cc
RK
327 We use them to see whether a reg is used outside a given basic block. */
328
329static short *uid_cuid;
330
331/* Get the cuid of an insn. */
332
333#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
334
335/* Nonzero if cse has altered conditional jump insns
336 in such a way that jump optimization should be redone. */
337
338static int cse_jumps_altered;
339
340/* canon_hash stores 1 in do_not_record
341 if it notices a reference to CC0, PC, or some other volatile
342 subexpression. */
343
344static int do_not_record;
345
346/* canon_hash stores 1 in hash_arg_in_memory
347 if it notices a reference to memory within the expression being hashed. */
348
349static int hash_arg_in_memory;
350
351/* canon_hash stores 1 in hash_arg_in_struct
352 if it notices a reference to memory that's part of a structure. */
353
354static int hash_arg_in_struct;
355
356/* The hash table contains buckets which are chains of `struct table_elt's,
357 each recording one expression's information.
358 That expression is in the `exp' field.
359
360 Those elements with the same hash code are chained in both directions
361 through the `next_same_hash' and `prev_same_hash' fields.
362
363 Each set of expressions with equivalent values
364 are on a two-way chain through the `next_same_value'
365 and `prev_same_value' fields, and all point with
366 the `first_same_value' field at the first element in
367 that chain. The chain is in order of increasing cost.
368 Each element's cost value is in its `cost' field.
369
370 The `in_memory' field is nonzero for elements that
371 involve any reference to memory. These elements are removed
372 whenever a write is done to an unidentified location in memory.
373 To be safe, we assume that a memory address is unidentified unless
374 the address is either a symbol constant or a constant plus
375 the frame pointer or argument pointer.
376
377 The `in_struct' field is nonzero for elements that
378 involve any reference to memory inside a structure or array.
379
380 The `related_value' field is used to connect related expressions
381 (that differ by adding an integer).
382 The related expressions are chained in a circular fashion.
383 `related_value' is zero for expressions for which this
384 chain is not useful.
385
386 The `cost' field stores the cost of this element's expression.
387
388 The `is_const' flag is set if the element is a constant (including
389 a fixed address).
390
391 The `flag' field is used as a temporary during some search routines.
392
393 The `mode' field is usually the same as GET_MODE (`exp'), but
394 if `exp' is a CONST_INT and has no machine mode then the `mode'
395 field is the mode it was being used as. Each constant is
396 recorded separately for each mode it is used with. */
397
398
399struct table_elt
400{
401 rtx exp;
402 struct table_elt *next_same_hash;
403 struct table_elt *prev_same_hash;
404 struct table_elt *next_same_value;
405 struct table_elt *prev_same_value;
406 struct table_elt *first_same_value;
407 struct table_elt *related_value;
408 int cost;
409 enum machine_mode mode;
410 char in_memory;
411 char in_struct;
412 char is_const;
413 char flag;
414};
415
416#define HASHBITS 16
417
418/* We don't want a lot of buckets, because we rarely have very many
419 things stored in the hash table, and a lot of buckets slows
420 down a lot of loops that happen frequently. */
421#define NBUCKETS 31
422
423/* Compute hash code of X in mode M. Special-case case where X is a pseudo
424 register (hard registers may require `do_not_record' to be set). */
425
426#define HASH(X, M) \
427 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
428 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
429 : canon_hash (X, M) % NBUCKETS)
430
431/* Determine whether register number N is considered a fixed register for CSE.
432 It is desirable to replace other regs with fixed regs, to reduce need for
433 non-fixed hard regs.
434 A reg wins if it is either the frame pointer or designated as fixed,
435 but not if it is an overlapping register. */
436#ifdef OVERLAPPING_REGNO_P
437#define FIXED_REGNO_P(N) \
438 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
439 && ! OVERLAPPING_REGNO_P ((N)))
440#else
441#define FIXED_REGNO_P(N) \
442 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
443#endif
444
445/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
446 hard registers are the cheapest with a cost of 0. Next come pseudos
447 with a cost of one and other hard registers with a cost of 2. Aside
448 from these special cases, call `rtx_cost'. */
449
450#define COST(X) \
451 (GET_CODE (X) == REG \
452 ? (REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
453 : (FIXED_REGNO_P (REGNO (X)) \
454 && REGNO_REG_CLASS (REGNO (X)) != NO_REGS) ? 0 \
455 : 2) \
e5f6a288 456 : rtx_cost (X, SET) * 2)
7afe21cc
RK
457
458/* Determine if the quantity number for register X represents a valid index
459 into the `qty_...' variables. */
460
461#define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
462
463static struct table_elt *table[NBUCKETS];
464
465/* Chain of `struct table_elt's made so far for this function
466 but currently removed from the table. */
467
468static struct table_elt *free_element_chain;
469
470/* Number of `struct table_elt' structures made so far for this function. */
471
472static int n_elements_made;
473
474/* Maximum value `n_elements_made' has had so far in this compilation
475 for functions previously processed. */
476
477static int max_elements_made;
478
479/* Surviving equivalence class when two equivalence classes are merged
480 by recording the effects of a jump in the last insn. Zero if the
481 last insn was not a conditional jump. */
482
483static struct table_elt *last_jump_equiv_class;
484
485/* Set to the cost of a constant pool reference if one was found for a
486 symbolic constant. If this was found, it means we should try to
487 convert constants into constant pool entries if they don't fit in
488 the insn. */
489
490static int constant_pool_entries_cost;
491
492/* Bits describing what kind of values in memory must be invalidated
493 for a particular instruction. If all three bits are zero,
494 no memory refs need to be invalidated. Each bit is more powerful
495 than the preceding ones, and if a bit is set then the preceding
496 bits are also set.
497
498 Here is how the bits are set:
499 Pushing onto the stack invalidates only the stack pointer,
500 writing at a fixed address invalidates only variable addresses,
501 writing in a structure element at variable address
502 invalidates all but scalar variables,
503 and writing in anything else at variable address invalidates everything. */
504
505struct write_data
506{
507 int sp : 1; /* Invalidate stack pointer. */
508 int var : 1; /* Invalidate variable addresses. */
509 int nonscalar : 1; /* Invalidate all but scalar variables. */
510 int all : 1; /* Invalidate all memory refs. */
511};
512
513/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
514 virtual regs here because the simplify_*_operation routines are called
515 by integrate.c, which is called before virtual register instantiation. */
516
517#define FIXED_BASE_PLUS_P(X) \
518 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
519 || (X) == virtual_stack_vars_rtx \
520 || (X) == virtual_incoming_args_rtx \
521 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
522 && (XEXP (X, 0) == frame_pointer_rtx \
523 || XEXP (X, 0) == arg_pointer_rtx \
524 || XEXP (X, 0) == virtual_stack_vars_rtx \
525 || XEXP (X, 0) == virtual_incoming_args_rtx)))
526
6f90e075
JW
527/* Similar, but also allows reference to the stack pointer.
528
529 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
530 arg_pointer_rtx by itself is nonzero, because on at least one machine,
531 the i960, the arg pointer is zero when it is unused. */
7afe21cc
RK
532
533#define NONZERO_BASE_PLUS_P(X) \
6f90e075
JW
534 ((X) == frame_pointer_rtx \
535 || (X) == virtual_stack_vars_rtx \
536 || (X) == virtual_incoming_args_rtx \
537 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
538 && (XEXP (X, 0) == frame_pointer_rtx \
539 || XEXP (X, 0) == arg_pointer_rtx \
540 || XEXP (X, 0) == virtual_stack_vars_rtx \
541 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
7afe21cc
RK
542 || (X) == stack_pointer_rtx \
543 || (X) == virtual_stack_dynamic_rtx \
544 || (X) == virtual_outgoing_args_rtx \
545 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
546 && (XEXP (X, 0) == stack_pointer_rtx \
547 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
548 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
549
550static struct table_elt *lookup ();
551static void free_element ();
552
553static int insert_regs ();
554static void rehash_using_reg ();
555static void remove_invalid_refs ();
556static int exp_equiv_p ();
557int refers_to_p ();
558int refers_to_mem_p ();
559static void invalidate_from_clobbers ();
560static int safe_hash ();
561static int canon_hash ();
562static rtx fold_rtx ();
563static rtx equiv_constant ();
564static void record_jump_cond ();
565static void note_mem_written ();
566static int cse_rtx_addr_varies_p ();
567static enum rtx_code find_comparison_args ();
568static void cse_insn ();
569static void cse_set_around_loop ();
570\f
571/* Return an estimate of the cost of computing rtx X.
572 One use is in cse, to decide which expression to keep in the hash table.
573 Another is in rtl generation, to pick the cheapest way to multiply.
574 Other uses like the latter are expected in the future. */
575
576/* Return the right cost to give to an operation
577 to make the cost of the corresponding register-to-register instruction
578 N times that of a fast register-to-register instruction. */
579
580#define COSTS_N_INSNS(N) ((N) * 4 - 2)
581
582int
e5f6a288 583rtx_cost (x, outer_code)
7afe21cc 584 rtx x;
e5f6a288 585 enum rtx_code outer_code;
7afe21cc
RK
586{
587 register int i, j;
588 register enum rtx_code code;
589 register char *fmt;
590 register int total;
591
592 if (x == 0)
593 return 0;
594
595 /* Compute the default costs of certain things.
596 Note that RTX_COSTS can override the defaults. */
597
598 code = GET_CODE (x);
599 switch (code)
600 {
601 case MULT:
602 /* Count multiplication by 2**n as a shift,
603 because if we are considering it, we would output it as a shift. */
604 if (GET_CODE (XEXP (x, 1)) == CONST_INT
605 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
606 total = 2;
607 else
608 total = COSTS_N_INSNS (5);
609 break;
610 case DIV:
611 case UDIV:
612 case MOD:
613 case UMOD:
614 total = COSTS_N_INSNS (7);
615 break;
616 case USE:
617 /* Used in loop.c and combine.c as a marker. */
618 total = 0;
619 break;
538b78e7
RS
620 case ASM_OPERANDS:
621 /* We don't want these to be used in substitutions because
622 we have no way of validating the resulting insn. So assign
623 anything containing an ASM_OPERANDS a very high cost. */
624 total = 1000;
625 break;
7afe21cc
RK
626 default:
627 total = 2;
628 }
629
630 switch (code)
631 {
632 case REG:
633 return 1;
634 case SUBREG:
fc3ffe83
RK
635 /* If we can't tie these modes, make this expensive. The larger
636 the mode, the more expensive it is. */
637 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
638 return COSTS_N_INSNS (2
639 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
7afe21cc
RK
640 return 2;
641#ifdef RTX_COSTS
e5f6a288 642 RTX_COSTS (x, code, outer_code);
7afe21cc 643#endif
e5f6a288 644 CONST_COSTS (x, code, outer_code);
7afe21cc
RK
645 }
646
647 /* Sum the costs of the sub-rtx's, plus cost of this operation,
648 which is already in total. */
649
650 fmt = GET_RTX_FORMAT (code);
651 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
652 if (fmt[i] == 'e')
e5f6a288 653 total += rtx_cost (XEXP (x, i), code);
7afe21cc
RK
654 else if (fmt[i] == 'E')
655 for (j = 0; j < XVECLEN (x, i); j++)
e5f6a288 656 total += rtx_cost (XVECEXP (x, i, j), code);
7afe21cc
RK
657
658 return total;
659}
660\f
661/* Clear the hash table and initialize each register with its own quantity,
662 for a new basic block. */
663
664static void
665new_basic_block ()
666{
667 register int i;
668
669 next_qty = max_reg;
670
671 bzero (reg_tick, max_reg * sizeof (int));
672
673 bcopy (all_minus_one, reg_in_table, max_reg * sizeof (int));
674 bcopy (consec_ints, reg_qty, max_reg * sizeof (int));
675 CLEAR_HARD_REG_SET (hard_regs_in_table);
676
677 /* The per-quantity values used to be initialized here, but it is
678 much faster to initialize each as it is made in `make_new_qty'. */
679
680 for (i = 0; i < NBUCKETS; i++)
681 {
682 register struct table_elt *this, *next;
683 for (this = table[i]; this; this = next)
684 {
685 next = this->next_same_hash;
686 free_element (this);
687 }
688 }
689
690 bzero (table, sizeof table);
691
692 prev_insn = 0;
693
694#ifdef HAVE_cc0
695 prev_insn_cc0 = 0;
696#endif
697}
698
699/* Say that register REG contains a quantity not in any register before
700 and initialize that quantity. */
701
702static void
703make_new_qty (reg)
704 register int reg;
705{
706 register int q;
707
708 if (next_qty >= max_qty)
709 abort ();
710
711 q = reg_qty[reg] = next_qty++;
712 qty_first_reg[q] = reg;
713 qty_last_reg[q] = reg;
714 qty_const[q] = qty_const_insn[q] = 0;
715 qty_comparison_code[q] = UNKNOWN;
716
717 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
718}
719
720/* Make reg NEW equivalent to reg OLD.
721 OLD is not changing; NEW is. */
722
723static void
724make_regs_eqv (new, old)
725 register int new, old;
726{
727 register int lastr, firstr;
728 register int q = reg_qty[old];
729
730 /* Nothing should become eqv until it has a "non-invalid" qty number. */
731 if (! REGNO_QTY_VALID_P (old))
732 abort ();
733
734 reg_qty[new] = q;
735 firstr = qty_first_reg[q];
736 lastr = qty_last_reg[q];
737
738 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
739 hard regs. Among pseudos, if NEW will live longer than any other reg
740 of the same qty, and that is beyond the current basic block,
741 make it the new canonical replacement for this qty. */
742 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
743 /* Certain fixed registers might be of the class NO_REGS. This means
744 that not only can they not be allocated by the compiler, but
830a38ee 745 they cannot be used in substitutions or canonicalizations
7afe21cc
RK
746 either. */
747 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
748 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
749 || (new >= FIRST_PSEUDO_REGISTER
750 && (firstr < FIRST_PSEUDO_REGISTER
751 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
752 || (uid_cuid[regno_first_uid[new]]
753 < cse_basic_block_start))
754 && (uid_cuid[regno_last_uid[new]]
755 > uid_cuid[regno_last_uid[firstr]]))))))
756 {
757 reg_prev_eqv[firstr] = new;
758 reg_next_eqv[new] = firstr;
759 reg_prev_eqv[new] = -1;
760 qty_first_reg[q] = new;
761 }
762 else
763 {
764 /* If NEW is a hard reg (known to be non-fixed), insert at end.
765 Otherwise, insert before any non-fixed hard regs that are at the
766 end. Registers of class NO_REGS cannot be used as an
767 equivalent for anything. */
768 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
769 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
770 && new >= FIRST_PSEUDO_REGISTER)
771 lastr = reg_prev_eqv[lastr];
772 reg_next_eqv[new] = reg_next_eqv[lastr];
773 if (reg_next_eqv[lastr] >= 0)
774 reg_prev_eqv[reg_next_eqv[lastr]] = new;
775 else
776 qty_last_reg[q] = new;
777 reg_next_eqv[lastr] = new;
778 reg_prev_eqv[new] = lastr;
779 }
780}
781
782/* Remove REG from its equivalence class. */
783
784static void
785delete_reg_equiv (reg)
786 register int reg;
787{
788 register int n = reg_next_eqv[reg];
789 register int p = reg_prev_eqv[reg];
790 register int q = reg_qty[reg];
791
792 /* If invalid, do nothing. N and P above are undefined in that case. */
793 if (q == reg)
794 return;
795
796 if (n != -1)
797 reg_prev_eqv[n] = p;
798 else
799 qty_last_reg[q] = p;
800 if (p != -1)
801 reg_next_eqv[p] = n;
802 else
803 qty_first_reg[q] = n;
804
805 reg_qty[reg] = reg;
806}
807
808/* Remove any invalid expressions from the hash table
809 that refer to any of the registers contained in expression X.
810
811 Make sure that newly inserted references to those registers
812 as subexpressions will be considered valid.
813
814 mention_regs is not called when a register itself
815 is being stored in the table.
816
817 Return 1 if we have done something that may have changed the hash code
818 of X. */
819
820static int
821mention_regs (x)
822 rtx x;
823{
824 register enum rtx_code code;
825 register int i, j;
826 register char *fmt;
827 register int changed = 0;
828
829 if (x == 0)
e5f6a288 830 return 0;
7afe21cc
RK
831
832 code = GET_CODE (x);
833 if (code == REG)
834 {
835 register int regno = REGNO (x);
836 register int endregno
837 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
838 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
839 int i;
840
841 for (i = regno; i < endregno; i++)
842 {
843 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
844 remove_invalid_refs (i);
845
846 reg_in_table[i] = reg_tick[i];
847 }
848
849 return 0;
850 }
851
852 /* If X is a comparison or a COMPARE and either operand is a register
853 that does not have a quantity, give it one. This is so that a later
854 call to record_jump_equiv won't cause X to be assigned a different
855 hash code and not found in the table after that call.
856
857 It is not necessary to do this here, since rehash_using_reg can
858 fix up the table later, but doing this here eliminates the need to
859 call that expensive function in the most common case where the only
860 use of the register is in the comparison. */
861
862 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
863 {
864 if (GET_CODE (XEXP (x, 0)) == REG
865 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
866 if (insert_regs (XEXP (x, 0), 0, 0))
867 {
868 rehash_using_reg (XEXP (x, 0));
869 changed = 1;
870 }
871
872 if (GET_CODE (XEXP (x, 1)) == REG
873 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
874 if (insert_regs (XEXP (x, 1), 0, 0))
875 {
876 rehash_using_reg (XEXP (x, 1));
877 changed = 1;
878 }
879 }
880
881 fmt = GET_RTX_FORMAT (code);
882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
883 if (fmt[i] == 'e')
884 changed |= mention_regs (XEXP (x, i));
885 else if (fmt[i] == 'E')
886 for (j = 0; j < XVECLEN (x, i); j++)
887 changed |= mention_regs (XVECEXP (x, i, j));
888
889 return changed;
890}
891
892/* Update the register quantities for inserting X into the hash table
893 with a value equivalent to CLASSP.
894 (If the class does not contain a REG, it is irrelevant.)
895 If MODIFIED is nonzero, X is a destination; it is being modified.
896 Note that delete_reg_equiv should be called on a register
897 before insert_regs is done on that register with MODIFIED != 0.
898
899 Nonzero value means that elements of reg_qty have changed
900 so X's hash code may be different. */
901
902static int
903insert_regs (x, classp, modified)
904 rtx x;
905 struct table_elt *classp;
906 int modified;
907{
908 if (GET_CODE (x) == REG)
909 {
910 register int regno = REGNO (x);
911
912 if (modified
913 || ! (REGNO_QTY_VALID_P (regno)
914 && qty_mode[reg_qty[regno]] == GET_MODE (x)))
915 {
916 if (classp)
917 for (classp = classp->first_same_value;
918 classp != 0;
919 classp = classp->next_same_value)
920 if (GET_CODE (classp->exp) == REG
921 && GET_MODE (classp->exp) == GET_MODE (x))
922 {
923 make_regs_eqv (regno, REGNO (classp->exp));
924 return 1;
925 }
926
927 make_new_qty (regno);
928 qty_mode[reg_qty[regno]] = GET_MODE (x);
929 return 1;
930 }
931 }
932 else
933 return mention_regs (x);
934}
935\f
936/* Look in or update the hash table. */
937
938/* Put the element ELT on the list of free elements. */
939
940static void
941free_element (elt)
942 struct table_elt *elt;
943{
944 elt->next_same_hash = free_element_chain;
945 free_element_chain = elt;
946}
947
948/* Return an element that is free for use. */
949
950static struct table_elt *
951get_element ()
952{
953 struct table_elt *elt = free_element_chain;
954 if (elt)
955 {
956 free_element_chain = elt->next_same_hash;
957 return elt;
958 }
959 n_elements_made++;
960 return (struct table_elt *) oballoc (sizeof (struct table_elt));
961}
962
963/* Remove table element ELT from use in the table.
964 HASH is its hash code, made using the HASH macro.
965 It's an argument because often that is known in advance
966 and we save much time not recomputing it. */
967
968static void
969remove_from_table (elt, hash)
970 register struct table_elt *elt;
971 int hash;
972{
973 if (elt == 0)
974 return;
975
976 /* Mark this element as removed. See cse_insn. */
977 elt->first_same_value = 0;
978
979 /* Remove the table element from its equivalence class. */
980
981 {
982 register struct table_elt *prev = elt->prev_same_value;
983 register struct table_elt *next = elt->next_same_value;
984
985 if (next) next->prev_same_value = prev;
986
987 if (prev)
988 prev->next_same_value = next;
989 else
990 {
991 register struct table_elt *newfirst = next;
992 while (next)
993 {
994 next->first_same_value = newfirst;
995 next = next->next_same_value;
996 }
997 }
998 }
999
1000 /* Remove the table element from its hash bucket. */
1001
1002 {
1003 register struct table_elt *prev = elt->prev_same_hash;
1004 register struct table_elt *next = elt->next_same_hash;
1005
1006 if (next) next->prev_same_hash = prev;
1007
1008 if (prev)
1009 prev->next_same_hash = next;
1010 else if (table[hash] == elt)
1011 table[hash] = next;
1012 else
1013 {
1014 /* This entry is not in the proper hash bucket. This can happen
1015 when two classes were merged by `merge_equiv_classes'. Search
1016 for the hash bucket that it heads. This happens only very
1017 rarely, so the cost is acceptable. */
1018 for (hash = 0; hash < NBUCKETS; hash++)
1019 if (table[hash] == elt)
1020 table[hash] = next;
1021 }
1022 }
1023
1024 /* Remove the table element from its related-value circular chain. */
1025
1026 if (elt->related_value != 0 && elt->related_value != elt)
1027 {
1028 register struct table_elt *p = elt->related_value;
1029 while (p->related_value != elt)
1030 p = p->related_value;
1031 p->related_value = elt->related_value;
1032 if (p->related_value == p)
1033 p->related_value = 0;
1034 }
1035
1036 free_element (elt);
1037}
1038
1039/* Look up X in the hash table and return its table element,
1040 or 0 if X is not in the table.
1041
1042 MODE is the machine-mode of X, or if X is an integer constant
1043 with VOIDmode then MODE is the mode with which X will be used.
1044
1045 Here we are satisfied to find an expression whose tree structure
1046 looks like X. */
1047
1048static struct table_elt *
1049lookup (x, hash, mode)
1050 rtx x;
1051 int hash;
1052 enum machine_mode mode;
1053{
1054 register struct table_elt *p;
1055
1056 for (p = table[hash]; p; p = p->next_same_hash)
1057 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1058 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1059 return p;
1060
1061 return 0;
1062}
1063
1064/* Like `lookup' but don't care whether the table element uses invalid regs.
1065 Also ignore discrepancies in the machine mode of a register. */
1066
1067static struct table_elt *
1068lookup_for_remove (x, hash, mode)
1069 rtx x;
1070 int hash;
1071 enum machine_mode mode;
1072{
1073 register struct table_elt *p;
1074
1075 if (GET_CODE (x) == REG)
1076 {
1077 int regno = REGNO (x);
1078 /* Don't check the machine mode when comparing registers;
1079 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1080 for (p = table[hash]; p; p = p->next_same_hash)
1081 if (GET_CODE (p->exp) == REG
1082 && REGNO (p->exp) == regno)
1083 return p;
1084 }
1085 else
1086 {
1087 for (p = table[hash]; p; p = p->next_same_hash)
1088 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1089 return p;
1090 }
1091
1092 return 0;
1093}
1094
1095/* Look for an expression equivalent to X and with code CODE.
1096 If one is found, return that expression. */
1097
1098static rtx
1099lookup_as_function (x, code)
1100 rtx x;
1101 enum rtx_code code;
1102{
1103 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1104 GET_MODE (x));
1105 if (p == 0)
1106 return 0;
1107
1108 for (p = p->first_same_value; p; p = p->next_same_value)
1109 {
1110 if (GET_CODE (p->exp) == code
1111 /* Make sure this is a valid entry in the table. */
1112 && exp_equiv_p (p->exp, p->exp, 1, 0))
1113 return p->exp;
1114 }
1115
1116 return 0;
1117}
1118
1119/* Insert X in the hash table, assuming HASH is its hash code
1120 and CLASSP is an element of the class it should go in
1121 (or 0 if a new class should be made).
1122 It is inserted at the proper position to keep the class in
1123 the order cheapest first.
1124
1125 MODE is the machine-mode of X, or if X is an integer constant
1126 with VOIDmode then MODE is the mode with which X will be used.
1127
1128 For elements of equal cheapness, the most recent one
1129 goes in front, except that the first element in the list
1130 remains first unless a cheaper element is added. The order of
1131 pseudo-registers does not matter, as canon_reg will be called to
830a38ee 1132 find the cheapest when a register is retrieved from the table.
7afe21cc
RK
1133
1134 The in_memory field in the hash table element is set to 0.
1135 The caller must set it nonzero if appropriate.
1136
1137 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1138 and if insert_regs returns a nonzero value
1139 you must then recompute its hash code before calling here.
1140
1141 If necessary, update table showing constant values of quantities. */
1142
1143#define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1144
1145static struct table_elt *
1146insert (x, classp, hash, mode)
1147 register rtx x;
1148 register struct table_elt *classp;
1149 int hash;
1150 enum machine_mode mode;
1151{
1152 register struct table_elt *elt;
1153
1154 /* If X is a register and we haven't made a quantity for it,
1155 something is wrong. */
1156 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1157 abort ();
1158
1159 /* If X is a hard register, show it is being put in the table. */
1160 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1161 {
1162 int regno = REGNO (x);
1163 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1164 int i;
1165
1166 for (i = regno; i < endregno; i++)
1167 SET_HARD_REG_BIT (hard_regs_in_table, i);
1168 }
1169
1170
1171 /* Put an element for X into the right hash bucket. */
1172
1173 elt = get_element ();
1174 elt->exp = x;
1175 elt->cost = COST (x);
1176 elt->next_same_value = 0;
1177 elt->prev_same_value = 0;
1178 elt->next_same_hash = table[hash];
1179 elt->prev_same_hash = 0;
1180 elt->related_value = 0;
1181 elt->in_memory = 0;
1182 elt->mode = mode;
1183 elt->is_const = (CONSTANT_P (x)
1184 /* GNU C++ takes advantage of this for `this'
1185 (and other const values). */
1186 || (RTX_UNCHANGING_P (x)
1187 && GET_CODE (x) == REG
1188 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1189 || FIXED_BASE_PLUS_P (x));
1190
1191 if (table[hash])
1192 table[hash]->prev_same_hash = elt;
1193 table[hash] = elt;
1194
1195 /* Put it into the proper value-class. */
1196 if (classp)
1197 {
1198 classp = classp->first_same_value;
1199 if (CHEAPER (elt, classp))
1200 /* Insert at the head of the class */
1201 {
1202 register struct table_elt *p;
1203 elt->next_same_value = classp;
1204 classp->prev_same_value = elt;
1205 elt->first_same_value = elt;
1206
1207 for (p = classp; p; p = p->next_same_value)
1208 p->first_same_value = elt;
1209 }
1210 else
1211 {
1212 /* Insert not at head of the class. */
1213 /* Put it after the last element cheaper than X. */
1214 register struct table_elt *p, *next;
1215 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1216 p = next);
1217 /* Put it after P and before NEXT. */
1218 elt->next_same_value = next;
1219 if (next)
1220 next->prev_same_value = elt;
1221 elt->prev_same_value = p;
1222 p->next_same_value = elt;
1223 elt->first_same_value = classp;
1224 }
1225 }
1226 else
1227 elt->first_same_value = elt;
1228
1229 /* If this is a constant being set equivalent to a register or a register
1230 being set equivalent to a constant, note the constant equivalence.
1231
1232 If this is a constant, it cannot be equivalent to a different constant,
1233 and a constant is the only thing that can be cheaper than a register. So
1234 we know the register is the head of the class (before the constant was
1235 inserted).
1236
1237 If this is a register that is not already known equivalent to a
1238 constant, we must check the entire class.
1239
1240 If this is a register that is already known equivalent to an insn,
1241 update `qty_const_insn' to show that `this_insn' is the latest
1242 insn making that quantity equivalent to the constant. */
1243
1244 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1245 {
1246 qty_const[reg_qty[REGNO (classp->exp)]]
1247 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1248 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1249 }
1250
1251 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1252 {
1253 register struct table_elt *p;
1254
1255 for (p = classp; p != 0; p = p->next_same_value)
1256 {
1257 if (p->is_const)
1258 {
1259 qty_const[reg_qty[REGNO (x)]]
1260 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1261 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1262 break;
1263 }
1264 }
1265 }
1266
1267 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1268 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1269 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1270
1271 /* If this is a constant with symbolic value,
1272 and it has a term with an explicit integer value,
1273 link it up with related expressions. */
1274 if (GET_CODE (x) == CONST)
1275 {
1276 rtx subexp = get_related_value (x);
1277 int subhash;
1278 struct table_elt *subelt, *subelt_prev;
1279
1280 if (subexp != 0)
1281 {
1282 /* Get the integer-free subexpression in the hash table. */
1283 subhash = safe_hash (subexp, mode) % NBUCKETS;
1284 subelt = lookup (subexp, subhash, mode);
1285 if (subelt == 0)
1286 subelt = insert (subexp, 0, subhash, mode);
1287 /* Initialize SUBELT's circular chain if it has none. */
1288 if (subelt->related_value == 0)
1289 subelt->related_value = subelt;
1290 /* Find the element in the circular chain that precedes SUBELT. */
1291 subelt_prev = subelt;
1292 while (subelt_prev->related_value != subelt)
1293 subelt_prev = subelt_prev->related_value;
1294 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1295 This way the element that follows SUBELT is the oldest one. */
1296 elt->related_value = subelt_prev->related_value;
1297 subelt_prev->related_value = elt;
1298 }
1299 }
1300
1301 return elt;
1302}
1303\f
1304/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1305 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1306 the two classes equivalent.
1307
1308 CLASS1 will be the surviving class; CLASS2 should not be used after this
1309 call.
1310
1311 Any invalid entries in CLASS2 will not be copied. */
1312
1313static void
1314merge_equiv_classes (class1, class2)
1315 struct table_elt *class1, *class2;
1316{
1317 struct table_elt *elt, *next, *new;
1318
1319 /* Ensure we start with the head of the classes. */
1320 class1 = class1->first_same_value;
1321 class2 = class2->first_same_value;
1322
1323 /* If they were already equal, forget it. */
1324 if (class1 == class2)
1325 return;
1326
1327 for (elt = class2; elt; elt = next)
1328 {
1329 int hash;
1330 rtx exp = elt->exp;
1331 enum machine_mode mode = elt->mode;
1332
1333 next = elt->next_same_value;
1334
1335 /* Remove old entry, make a new one in CLASS1's class.
1336 Don't do this for invalid entries as we cannot find their
1337 hash code (it also isn't necessary). */
1338 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1339 {
1340 hash_arg_in_memory = 0;
1341 hash_arg_in_struct = 0;
1342 hash = HASH (exp, mode);
1343
1344 if (GET_CODE (exp) == REG)
1345 delete_reg_equiv (REGNO (exp));
1346
1347 remove_from_table (elt, hash);
1348
1349 if (insert_regs (exp, class1, 0))
1350 hash = HASH (exp, mode);
1351 new = insert (exp, class1, hash, mode);
1352 new->in_memory = hash_arg_in_memory;
1353 new->in_struct = hash_arg_in_struct;
1354 }
1355 }
1356}
1357\f
1358/* Remove from the hash table, or mark as invalid,
1359 all expressions whose values could be altered by storing in X.
1360 X is a register, a subreg, or a memory reference with nonvarying address
1361 (because, when a memory reference with a varying address is stored in,
1362 all memory references are removed by invalidate_memory
1363 so specific invalidation is superfluous).
1364
1365 A nonvarying address may be just a register or just
1366 a symbol reference, or it may be either of those plus
1367 a numeric offset. */
1368
1369static void
1370invalidate (x)
1371 rtx x;
1372{
1373 register int i;
1374 register struct table_elt *p;
1375 register rtx base;
1376 register int start, end;
1377
1378 /* If X is a register, dependencies on its contents
1379 are recorded through the qty number mechanism.
1380 Just change the qty number of the register,
1381 mark it as invalid for expressions that refer to it,
1382 and remove it itself. */
1383
1384 if (GET_CODE (x) == REG)
1385 {
1386 register int regno = REGNO (x);
1387 register int hash = HASH (x, GET_MODE (x));
1388
1389 /* Remove REGNO from any quantity list it might be on and indicate
1390 that it's value might have changed. If it is a pseudo, remove its
1391 entry from the hash table.
1392
1393 For a hard register, we do the first two actions above for any
1394 additional hard registers corresponding to X. Then, if any of these
1395 registers are in the table, we must remove any REG entries that
1396 overlap these registers. */
1397
1398 delete_reg_equiv (regno);
1399 reg_tick[regno]++;
1400
1401 if (regno >= FIRST_PSEUDO_REGISTER)
1402 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1403 else
1404 {
1405 int in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1406 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1407 int tregno, tendregno;
1408 register struct table_elt *p, *next;
1409
1410 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1411
1412 for (i = regno + 1; i < endregno; i++)
1413 {
1414 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1415 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1416 delete_reg_equiv (i);
1417 reg_tick[i]++;
1418 }
1419
1420 if (in_table)
1421 for (hash = 0; hash < NBUCKETS; hash++)
1422 for (p = table[hash]; p; p = next)
1423 {
1424 next = p->next_same_hash;
1425
1426 if (GET_CODE (p->exp) != REG
1427 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1428 continue;
1429
1430 tregno = REGNO (p->exp);
1431 tendregno
1432 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1433 if (tendregno > regno && tregno < endregno)
1434 remove_from_table (p, hash);
1435 }
1436 }
1437
1438 return;
1439 }
1440
1441 if (GET_CODE (x) == SUBREG)
1442 {
1443 if (GET_CODE (SUBREG_REG (x)) != REG)
1444 abort ();
1445 invalidate (SUBREG_REG (x));
1446 return;
1447 }
1448
1449 /* X is not a register; it must be a memory reference with
1450 a nonvarying address. Remove all hash table elements
1451 that refer to overlapping pieces of memory. */
1452
1453 if (GET_CODE (x) != MEM)
1454 abort ();
1455 base = XEXP (x, 0);
1456 start = 0;
1457
1458 /* Registers with nonvarying addresses usually have constant equivalents;
1459 but the frame pointer register is also possible. */
1460 if (GET_CODE (base) == REG
1461 && REGNO_QTY_VALID_P (REGNO (base))
1462 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
1463 && qty_const[reg_qty[REGNO (base)]] != 0)
1464 base = qty_const[reg_qty[REGNO (base)]];
1465 else if (GET_CODE (base) == PLUS
1466 && GET_CODE (XEXP (base, 1)) == CONST_INT
1467 && GET_CODE (XEXP (base, 0)) == REG
1468 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
1469 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
1470 == GET_MODE (XEXP (base, 0)))
1471 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
1472 {
1473 start = INTVAL (XEXP (base, 1));
1474 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
1475 }
1476
1477 if (GET_CODE (base) == CONST)
1478 base = XEXP (base, 0);
1479 if (GET_CODE (base) == PLUS
1480 && GET_CODE (XEXP (base, 1)) == CONST_INT)
1481 {
1482 start += INTVAL (XEXP (base, 1));
1483 base = XEXP (base, 0);
1484 }
1485
1486 end = start + GET_MODE_SIZE (GET_MODE (x));
1487 for (i = 0; i < NBUCKETS; i++)
1488 {
1489 register struct table_elt *next;
1490 for (p = table[i]; p; p = next)
1491 {
1492 next = p->next_same_hash;
1493 if (refers_to_mem_p (p->exp, base, start, end))
1494 remove_from_table (p, i);
1495 }
1496 }
1497}
1498
1499/* Remove all expressions that refer to register REGNO,
1500 since they are already invalid, and we are about to
1501 mark that register valid again and don't want the old
1502 expressions to reappear as valid. */
1503
1504static void
1505remove_invalid_refs (regno)
1506 int regno;
1507{
1508 register int i;
1509 register struct table_elt *p, *next;
1510
1511 for (i = 0; i < NBUCKETS; i++)
1512 for (p = table[i]; p; p = next)
1513 {
1514 next = p->next_same_hash;
1515 if (GET_CODE (p->exp) != REG
1516 && refers_to_regno_p (regno, regno + 1, p->exp, 0))
1517 remove_from_table (p, i);
1518 }
1519}
1520\f
1521/* Recompute the hash codes of any valid entries in the hash table that
1522 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1523
1524 This is called when we make a jump equivalence. */
1525
1526static void
1527rehash_using_reg (x)
1528 rtx x;
1529{
1530 int i;
1531 struct table_elt *p, *next;
1532 int hash;
1533
1534 if (GET_CODE (x) == SUBREG)
1535 x = SUBREG_REG (x);
1536
1537 /* If X is not a register or if the register is known not to be in any
1538 valid entries in the table, we have no work to do. */
1539
1540 if (GET_CODE (x) != REG
1541 || reg_in_table[REGNO (x)] < 0
1542 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1543 return;
1544
1545 /* Scan all hash chains looking for valid entries that mention X.
1546 If we find one and it is in the wrong hash chain, move it. We can skip
1547 objects that are registers, since they are handled specially. */
1548
1549 for (i = 0; i < NBUCKETS; i++)
1550 for (p = table[i]; p; p = next)
1551 {
1552 next = p->next_same_hash;
1553 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
538b78e7 1554 && exp_equiv_p (p->exp, p->exp, 1, 0)
7afe21cc
RK
1555 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1556 {
1557 if (p->next_same_hash)
1558 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1559
1560 if (p->prev_same_hash)
1561 p->prev_same_hash->next_same_hash = p->next_same_hash;
1562 else
1563 table[i] = p->next_same_hash;
1564
1565 p->next_same_hash = table[hash];
1566 p->prev_same_hash = 0;
1567 if (table[hash])
1568 table[hash]->prev_same_hash = p;
1569 table[hash] = p;
1570 }
1571 }
1572}
1573\f
1574/* Remove from the hash table all expressions that reference memory,
1575 or some of them as specified by *WRITES. */
1576
1577static void
1578invalidate_memory (writes)
1579 struct write_data *writes;
1580{
1581 register int i;
1582 register struct table_elt *p, *next;
1583 int all = writes->all;
1584 int nonscalar = writes->nonscalar;
1585
1586 for (i = 0; i < NBUCKETS; i++)
1587 for (p = table[i]; p; p = next)
1588 {
1589 next = p->next_same_hash;
1590 if (p->in_memory
1591 && (all
1592 || (nonscalar && p->in_struct)
1593 || cse_rtx_addr_varies_p (p->exp)))
1594 remove_from_table (p, i);
1595 }
1596}
1597\f
1598/* Remove from the hash table any expression that is a call-clobbered
1599 register. Also update their TICK values. */
1600
1601static void
1602invalidate_for_call ()
1603{
1604 int regno, endregno;
1605 int i;
1606 int hash;
1607 struct table_elt *p, *next;
1608 int in_table = 0;
1609
1610 /* Go through all the hard registers. For each that is clobbered in
1611 a CALL_INSN, remove the register from quantity chains and update
1612 reg_tick if defined. Also see if any of these registers is currently
1613 in the table. */
1614
1615 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1616 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1617 {
1618 delete_reg_equiv (regno);
1619 if (reg_tick[regno] >= 0)
1620 reg_tick[regno]++;
1621
1622 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1623 }
1624
1625 /* In the case where we have no call-clobbered hard registers in the
1626 table, we are done. Otherwise, scan the table and remove any
1627 entry that overlaps a call-clobbered register. */
1628
1629 if (in_table)
1630 for (hash = 0; hash < NBUCKETS; hash++)
1631 for (p = table[hash]; p; p = next)
1632 {
1633 next = p->next_same_hash;
1634
1635 if (GET_CODE (p->exp) != REG
1636 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1637 continue;
1638
1639 regno = REGNO (p->exp);
1640 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1641
1642 for (i = regno; i < endregno; i++)
1643 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1644 {
1645 remove_from_table (p, hash);
1646 break;
1647 }
1648 }
1649}
1650\f
1651/* Given an expression X of type CONST,
1652 and ELT which is its table entry (or 0 if it
1653 is not in the hash table),
1654 return an alternate expression for X as a register plus integer.
1655 If none can be found, return 0. */
1656
1657static rtx
1658use_related_value (x, elt)
1659 rtx x;
1660 struct table_elt *elt;
1661{
1662 register struct table_elt *relt = 0;
1663 register struct table_elt *p, *q;
1664 int offset;
1665
1666 /* First, is there anything related known?
1667 If we have a table element, we can tell from that.
1668 Otherwise, must look it up. */
1669
1670 if (elt != 0 && elt->related_value != 0)
1671 relt = elt;
1672 else if (elt == 0 && GET_CODE (x) == CONST)
1673 {
1674 rtx subexp = get_related_value (x);
1675 if (subexp != 0)
1676 relt = lookup (subexp,
1677 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1678 GET_MODE (subexp));
1679 }
1680
1681 if (relt == 0)
1682 return 0;
1683
1684 /* Search all related table entries for one that has an
1685 equivalent register. */
1686
1687 p = relt;
1688 while (1)
1689 {
1690 /* This loop is strange in that it is executed in two different cases.
1691 The first is when X is already in the table. Then it is searching
1692 the RELATED_VALUE list of X's class (RELT). The second case is when
1693 X is not in the table. Then RELT points to a class for the related
1694 value.
1695
1696 Ensure that, whatever case we are in, that we ignore classes that have
1697 the same value as X. */
1698
1699 if (rtx_equal_p (x, p->exp))
1700 q = 0;
1701 else
1702 for (q = p->first_same_value; q; q = q->next_same_value)
1703 if (GET_CODE (q->exp) == REG)
1704 break;
1705
1706 if (q)
1707 break;
1708
1709 p = p->related_value;
1710
1711 /* We went all the way around, so there is nothing to be found.
1712 Alternatively, perhaps RELT was in the table for some other reason
1713 and it has no related values recorded. */
1714 if (p == relt || p == 0)
1715 break;
1716 }
1717
1718 if (q == 0)
1719 return 0;
1720
1721 offset = (get_integer_term (x) - get_integer_term (p->exp));
1722 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1723 return plus_constant (q->exp, offset);
1724}
1725\f
1726/* Hash an rtx. We are careful to make sure the value is never negative.
1727 Equivalent registers hash identically.
1728 MODE is used in hashing for CONST_INTs only;
1729 otherwise the mode of X is used.
1730
1731 Store 1 in do_not_record if any subexpression is volatile.
1732
1733 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1734 which does not have the RTX_UNCHANGING_P bit set.
1735 In this case, also store 1 in hash_arg_in_struct
1736 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1737
1738 Note that cse_insn knows that the hash code of a MEM expression
1739 is just (int) MEM plus the hash code of the address. */
1740
1741static int
1742canon_hash (x, mode)
1743 rtx x;
1744 enum machine_mode mode;
1745{
1746 register int i, j;
1747 register int hash = 0;
1748 register enum rtx_code code;
1749 register char *fmt;
1750
1751 /* repeat is used to turn tail-recursion into iteration. */
1752 repeat:
1753 if (x == 0)
1754 return hash;
1755
1756 code = GET_CODE (x);
1757 switch (code)
1758 {
1759 case REG:
1760 {
1761 register int regno = REGNO (x);
1762
1763 /* On some machines, we can't record any non-fixed hard register,
1764 because extending its life will cause reload problems. We
1765 consider ap, fp, and sp to be fixed for this purpose.
1766 On all machines, we can't record any global registers. */
1767
1768 if (regno < FIRST_PSEUDO_REGISTER
1769 && (global_regs[regno]
1770#ifdef SMALL_REGISTER_CLASSES
1771 || (! fixed_regs[regno]
1772 && regno != FRAME_POINTER_REGNUM
1773 && regno != ARG_POINTER_REGNUM
1774 && regno != STACK_POINTER_REGNUM)
1775#endif
1776 ))
1777 {
1778 do_not_record = 1;
1779 return 0;
1780 }
1781 return hash + ((int) REG << 7) + reg_qty[regno];
1782 }
1783
1784 case CONST_INT:
1785 hash += ((int) mode + ((int) CONST_INT << 7)
1786 + INTVAL (x) + (INTVAL (x) >> HASHBITS));
1787 return ((1 << HASHBITS) - 1) & hash;
1788
1789 case CONST_DOUBLE:
1790 /* This is like the general case, except that it only counts
1791 the integers representing the constant. */
1792 hash += (int) code + (int) GET_MODE (x);
1793 {
1794 int i;
1795 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1796 {
1797 int tem = XINT (x, i);
1798 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1799 }
1800 }
1801 return hash;
1802
1803 /* Assume there is only one rtx object for any given label. */
1804 case LABEL_REF:
1805 /* Use `and' to ensure a positive number. */
1806 return (hash + ((int) LABEL_REF << 7)
1807 + ((int) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1808
1809 case SYMBOL_REF:
1810 return (hash + ((int) SYMBOL_REF << 7)
1811 + ((int) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1812
1813 case MEM:
1814 if (MEM_VOLATILE_P (x))
1815 {
1816 do_not_record = 1;
1817 return 0;
1818 }
1819 if (! RTX_UNCHANGING_P (x))
1820 {
1821 hash_arg_in_memory = 1;
1822 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1823 }
1824 /* Now that we have already found this special case,
1825 might as well speed it up as much as possible. */
1826 hash += (int) MEM;
1827 x = XEXP (x, 0);
1828 goto repeat;
1829
1830 case PRE_DEC:
1831 case PRE_INC:
1832 case POST_DEC:
1833 case POST_INC:
1834 case PC:
1835 case CC0:
1836 case CALL:
1837 case UNSPEC_VOLATILE:
1838 do_not_record = 1;
1839 return 0;
1840
1841 case ASM_OPERANDS:
1842 if (MEM_VOLATILE_P (x))
1843 {
1844 do_not_record = 1;
1845 return 0;
1846 }
1847 }
1848
1849 i = GET_RTX_LENGTH (code) - 1;
1850 hash += (int) code + (int) GET_MODE (x);
1851 fmt = GET_RTX_FORMAT (code);
1852 for (; i >= 0; i--)
1853 {
1854 if (fmt[i] == 'e')
1855 {
1856 rtx tem = XEXP (x, i);
1857 rtx tem1;
1858
1859 /* If the operand is a REG that is equivalent to a constant, hash
1860 as if we were hashing the constant, since we will be comparing
1861 that way. */
1862 if (tem != 0 && GET_CODE (tem) == REG
1863 && REGNO_QTY_VALID_P (REGNO (tem))
1864 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1865 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1866 && CONSTANT_P (tem1))
1867 tem = tem1;
1868
1869 /* If we are about to do the last recursive call
1870 needed at this level, change it into iteration.
1871 This function is called enough to be worth it. */
1872 if (i == 0)
1873 {
1874 x = tem;
1875 goto repeat;
1876 }
1877 hash += canon_hash (tem, 0);
1878 }
1879 else if (fmt[i] == 'E')
1880 for (j = 0; j < XVECLEN (x, i); j++)
1881 hash += canon_hash (XVECEXP (x, i, j), 0);
1882 else if (fmt[i] == 's')
1883 {
1884 register char *p = XSTR (x, i);
1885 if (p)
1886 while (*p)
1887 {
1888 register int tem = *p++;
1889 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1890 }
1891 }
1892 else if (fmt[i] == 'i')
1893 {
1894 register int tem = XINT (x, i);
1895 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1896 }
1897 else
1898 abort ();
1899 }
1900 return hash;
1901}
1902
1903/* Like canon_hash but with no side effects. */
1904
1905static int
1906safe_hash (x, mode)
1907 rtx x;
1908 enum machine_mode mode;
1909{
1910 int save_do_not_record = do_not_record;
1911 int save_hash_arg_in_memory = hash_arg_in_memory;
1912 int save_hash_arg_in_struct = hash_arg_in_struct;
1913 int hash = canon_hash (x, mode);
1914 hash_arg_in_memory = save_hash_arg_in_memory;
1915 hash_arg_in_struct = save_hash_arg_in_struct;
1916 do_not_record = save_do_not_record;
1917 return hash;
1918}
1919\f
1920/* Return 1 iff X and Y would canonicalize into the same thing,
1921 without actually constructing the canonicalization of either one.
1922 If VALIDATE is nonzero,
1923 we assume X is an expression being processed from the rtl
1924 and Y was found in the hash table. We check register refs
1925 in Y for being marked as valid.
1926
1927 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1928 that is known to be in the register. Ordinarily, we don't allow them
1929 to match, because letting them match would cause unpredictable results
1930 in all the places that search a hash table chain for an equivalent
1931 for a given value. A possible equivalent that has different structure
1932 has its hash code computed from different data. Whether the hash code
1933 is the same as that of the the given value is pure luck. */
1934
1935static int
1936exp_equiv_p (x, y, validate, equal_values)
1937 rtx x, y;
1938 int validate;
1939 int equal_values;
1940{
1941 register int i;
1942 register enum rtx_code code;
1943 register char *fmt;
1944
1945 /* Note: it is incorrect to assume an expression is equivalent to itself
1946 if VALIDATE is nonzero. */
1947 if (x == y && !validate)
1948 return 1;
1949 if (x == 0 || y == 0)
1950 return x == y;
1951
1952 code = GET_CODE (x);
1953 if (code != GET_CODE (y))
1954 {
1955 if (!equal_values)
1956 return 0;
1957
1958 /* If X is a constant and Y is a register or vice versa, they may be
1959 equivalent. We only have to validate if Y is a register. */
1960 if (CONSTANT_P (x) && GET_CODE (y) == REG
1961 && REGNO_QTY_VALID_P (REGNO (y))
1962 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
1963 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
1964 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
1965 return 1;
1966
1967 if (CONSTANT_P (y) && code == REG
1968 && REGNO_QTY_VALID_P (REGNO (x))
1969 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
1970 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
1971 return 1;
1972
1973 return 0;
1974 }
1975
1976 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1977 if (GET_MODE (x) != GET_MODE (y))
1978 return 0;
1979
1980 switch (code)
1981 {
1982 case PC:
1983 case CC0:
1984 return x == y;
1985
1986 case CONST_INT:
1987 return XINT (x, 0) == XINT (y, 0);
1988
1989 case LABEL_REF:
1990 case SYMBOL_REF:
1991 return XEXP (x, 0) == XEXP (y, 0);
1992
1993 case REG:
1994 {
1995 int regno = REGNO (y);
1996 int endregno
1997 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1998 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
1999 int i;
2000
2001 /* If the quantities are not the same, the expressions are not
2002 equivalent. If there are and we are not to validate, they
2003 are equivalent. Otherwise, ensure all regs are up-to-date. */
2004
2005 if (reg_qty[REGNO (x)] != reg_qty[regno])
2006 return 0;
2007
2008 if (! validate)
2009 return 1;
2010
2011 for (i = regno; i < endregno; i++)
2012 if (reg_in_table[i] != reg_tick[i])
2013 return 0;
2014
2015 return 1;
2016 }
2017
2018 /* For commutative operations, check both orders. */
2019 case PLUS:
2020 case MULT:
2021 case AND:
2022 case IOR:
2023 case XOR:
2024 case NE:
2025 case EQ:
2026 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2027 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2028 validate, equal_values))
2029 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2030 validate, equal_values)
2031 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2032 validate, equal_values)));
2033 }
2034
2035 /* Compare the elements. If any pair of corresponding elements
2036 fail to match, return 0 for the whole things. */
2037
2038 fmt = GET_RTX_FORMAT (code);
2039 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2040 {
2041 if (fmt[i] == 'e')
2042 {
2043 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2044 return 0;
2045 }
2046 else if (fmt[i] == 'E')
2047 {
2048 int j;
2049 if (XVECLEN (x, i) != XVECLEN (y, i))
2050 return 0;
2051 for (j = 0; j < XVECLEN (x, i); j++)
2052 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2053 validate, equal_values))
2054 return 0;
2055 }
2056 else if (fmt[i] == 's')
2057 {
2058 if (strcmp (XSTR (x, i), XSTR (y, i)))
2059 return 0;
2060 }
2061 else if (fmt[i] == 'i')
2062 {
2063 if (XINT (x, i) != XINT (y, i))
2064 return 0;
2065 }
2066 else if (fmt[i] != '0')
2067 abort ();
2068 }
2069 return 1;
2070}
2071\f
2072/* Return 1 iff any subexpression of X matches Y.
2073 Here we do not require that X or Y be valid (for registers referred to)
2074 for being in the hash table. */
2075
2076int
2077refers_to_p (x, y)
2078 rtx x, y;
2079{
2080 register int i;
2081 register enum rtx_code code;
2082 register char *fmt;
2083
2084 repeat:
2085 if (x == y)
2086 return 1;
2087 if (x == 0 || y == 0)
2088 return 0;
2089
2090 code = GET_CODE (x);
2091 /* If X as a whole has the same code as Y, they may match.
2092 If so, return 1. */
2093 if (code == GET_CODE (y))
2094 {
2095 if (exp_equiv_p (x, y, 0, 1))
2096 return 1;
2097 }
2098
2099 /* X does not match, so try its subexpressions. */
2100
2101 fmt = GET_RTX_FORMAT (code);
2102 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2103 if (fmt[i] == 'e')
2104 {
2105 if (i == 0)
2106 {
2107 x = XEXP (x, 0);
2108 goto repeat;
2109 }
2110 else
2111 if (refers_to_p (XEXP (x, i), y))
2112 return 1;
2113 }
2114 else if (fmt[i] == 'E')
2115 {
2116 int j;
2117 for (j = 0; j < XVECLEN (x, i); j++)
2118 if (refers_to_p (XVECEXP (x, i, j), y))
2119 return 1;
2120 }
2121
2122 return 0;
2123}
2124\f
2125/* Return 1 iff any subexpression of X refers to memory
2126 at an address of BASE plus some offset
2127 such that any of the bytes' offsets fall between START (inclusive)
2128 and END (exclusive).
2129
2130 The value is undefined if X is a varying address.
2131 This function is not used in such cases.
2132
2133 When used in the cse pass, `qty_const' is nonzero, and it is used
2134 to treat an address that is a register with a known constant value
2135 as if it were that constant value.
2136 In the loop pass, `qty_const' is zero, so this is not done. */
2137
2138int
2139refers_to_mem_p (x, base, start, end)
2140 rtx x, base;
2141 int start, end;
2142{
2143 register int i;
2144 register enum rtx_code code;
2145 register char *fmt;
2146
2147 if (GET_CODE (base) == CONST_INT)
2148 {
2149 start += INTVAL (base);
2150 end += INTVAL (base);
2151 base = const0_rtx;
2152 }
2153
2154 repeat:
2155 if (x == 0)
2156 return 0;
2157
2158 code = GET_CODE (x);
2159 if (code == MEM)
2160 {
2161 register rtx addr = XEXP (x, 0); /* Get the address. */
2162 int myend;
2163
2164 i = 0;
2165 if (GET_CODE (addr) == REG
2166 /* qty_const is 0 when outside the cse pass;
2167 at such times, this info is not available. */
2168 && qty_const != 0
2169 && REGNO_QTY_VALID_P (REGNO (addr))
2170 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
2171 && qty_const[reg_qty[REGNO (addr)]] != 0)
2172 addr = qty_const[reg_qty[REGNO (addr)]];
2173 else if (GET_CODE (addr) == PLUS
2174 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2175 && GET_CODE (XEXP (addr, 0)) == REG
2176 && qty_const != 0
2177 && REGNO_QTY_VALID_P (REGNO (XEXP (addr, 0)))
2178 && (GET_MODE (XEXP (addr, 0))
2179 == qty_mode[reg_qty[REGNO (XEXP (addr, 0))]])
2180 && qty_const[reg_qty[REGNO (XEXP (addr, 0))]])
2181 {
2182 i = INTVAL (XEXP (addr, 1));
2183 addr = qty_const[reg_qty[REGNO (XEXP (addr, 0))]];
2184 }
2185
2186 check_addr:
2187 if (GET_CODE (addr) == CONST)
2188 addr = XEXP (addr, 0);
2189
2190 /* If ADDR is BASE, or BASE plus an integer, put
2191 the integer in I. */
2192 if (GET_CODE (addr) == PLUS
2193 && XEXP (addr, 0) == base
2194 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2195 i += INTVAL (XEXP (addr, 1));
2196 else if (GET_CODE (addr) == LO_SUM)
2197 {
2198 if (GET_CODE (base) != LO_SUM)
2199 return 1;
2200 /* The REG component of the LO_SUM is known by the
2201 const value in the XEXP part. */
2202 addr = XEXP (addr, 1);
2203 base = XEXP (base, 1);
2204 i = 0;
2205 if (GET_CODE (base) == CONST)
2206 base = XEXP (base, 0);
2207 if (GET_CODE (base) == PLUS
2208 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2209 {
2210 int tem = INTVAL (XEXP (base, 1));
2211 start += tem;
2212 end += tem;
2213 base = XEXP (base, 0);
2214 }
2215 goto check_addr;
2216 }
2217 else if (GET_CODE (base) == LO_SUM)
2218 {
2219 base = XEXP (base, 1);
2220 if (GET_CODE (base) == CONST)
2221 base = XEXP (base, 0);
2222 if (GET_CODE (base) == PLUS
2223 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2224 {
2225 int tem = INTVAL (XEXP (base, 1));
2226 start += tem;
2227 end += tem;
2228 base = XEXP (base, 0);
2229 }
2230 goto check_addr;
2231 }
2232 else if (GET_CODE (addr) == CONST_INT && base == const0_rtx)
2233 i = INTVAL (addr);
2234 else if (addr != base)
2235 return 0;
2236
2237 myend = i + GET_MODE_SIZE (GET_MODE (x));
2238 return myend > start && i < end;
2239 }
2240
2241 /* X does not match, so try its subexpressions. */
2242
2243 fmt = GET_RTX_FORMAT (code);
2244 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2245 if (fmt[i] == 'e')
2246 {
2247 if (i == 0)
2248 {
2249 x = XEXP (x, 0);
2250 goto repeat;
2251 }
2252 else
2253 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2254 return 1;
2255 }
2256 else if (fmt[i] == 'E')
2257 {
2258 int j;
2259 for (j = 0; j < XVECLEN (x, i); j++)
2260 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2261 return 1;
2262 }
2263
2264 return 0;
2265}
2266
2267/* Nonzero if X refers to memory at a varying address;
2268 except that a register which has at the moment a known constant value
2269 isn't considered variable. */
2270
2271static int
2272cse_rtx_addr_varies_p (x)
2273 rtx x;
2274{
2275 /* We need not check for X and the equivalence class being of the same
2276 mode because if X is equivalent to a constant in some mode, it
2277 doesn't vary in any mode. */
2278
2279 if (GET_CODE (x) == MEM
2280 && GET_CODE (XEXP (x, 0)) == REG
2281 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2282 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2283 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2284 return 0;
2285
2286 if (GET_CODE (x) == MEM
2287 && GET_CODE (XEXP (x, 0)) == PLUS
2288 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2289 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2290 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2291 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2292 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2293 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2294 return 0;
2295
2296 return rtx_addr_varies_p (x);
2297}
2298\f
2299/* Canonicalize an expression:
2300 replace each register reference inside it
2301 with the "oldest" equivalent register.
2302
2303 If INSN is non-zero and we are replacing a pseudo with a hard register
2304 or vice versa, verify that INSN remains valid after we make our
2305 substitution. */
2306
2307static rtx
2308canon_reg (x, insn)
2309 rtx x;
2310 rtx insn;
2311{
2312 register int i;
2313 register enum rtx_code code;
2314 register char *fmt;
2315
2316 if (x == 0)
2317 return x;
2318
2319 code = GET_CODE (x);
2320 switch (code)
2321 {
2322 case PC:
2323 case CC0:
2324 case CONST:
2325 case CONST_INT:
2326 case CONST_DOUBLE:
2327 case SYMBOL_REF:
2328 case LABEL_REF:
2329 case ADDR_VEC:
2330 case ADDR_DIFF_VEC:
2331 return x;
2332
2333 case REG:
2334 {
2335 register int first;
2336
2337 /* Never replace a hard reg, because hard regs can appear
2338 in more than one machine mode, and we must preserve the mode
2339 of each occurrence. Also, some hard regs appear in
2340 MEMs that are shared and mustn't be altered. Don't try to
2341 replace any reg that maps to a reg of class NO_REGS. */
2342 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2343 || ! REGNO_QTY_VALID_P (REGNO (x)))
2344 return x;
2345
2346 first = qty_first_reg[reg_qty[REGNO (x)]];
2347 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2348 : REGNO_REG_CLASS (first) == NO_REGS ? x
2349 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2350 }
2351 }
2352
2353 fmt = GET_RTX_FORMAT (code);
2354 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2355 {
2356 register int j;
2357
2358 if (fmt[i] == 'e')
2359 {
2360 rtx new = canon_reg (XEXP (x, i), insn);
2361
2362 /* If replacing pseudo with hard reg or vice versa, ensure the
2363 insn remains valid. */
2364 if (new && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2365 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
2366 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER)))
2367 validate_change (insn, &XEXP (x, i), new, 0);
2368 else
2369 XEXP (x, i) = new;
2370 }
2371 else if (fmt[i] == 'E')
2372 for (j = 0; j < XVECLEN (x, i); j++)
2373 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2374 }
2375
2376 return x;
2377}
2378\f
2379/* LOC is a location with INSN that is an operand address (the contents of
2380 a MEM). Find the best equivalent address to use that is valid for this
2381 insn.
2382
2383 On most CISC machines, complicated address modes are costly, and rtx_cost
2384 is a good approximation for that cost. However, most RISC machines have
2385 only a few (usually only one) memory reference formats. If an address is
2386 valid at all, it is often just as cheap as any other address. Hence, for
2387 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2388 costs of various addresses. For two addresses of equal cost, choose the one
2389 with the highest `rtx_cost' value as that has the potential of eliminating
2390 the most insns. For equal costs, we choose the first in the equivalence
2391 class. Note that we ignore the fact that pseudo registers are cheaper
2392 than hard registers here because we would also prefer the pseudo registers.
2393 */
2394
2395void
2396find_best_addr (insn, loc)
2397 rtx insn;
2398 rtx *loc;
2399{
2400 struct table_elt *elt, *p;
2401 rtx addr = *loc;
2402 int our_cost;
2403 int found_better = 1;
2404 int save_do_not_record = do_not_record;
2405 int save_hash_arg_in_memory = hash_arg_in_memory;
2406 int save_hash_arg_in_struct = hash_arg_in_struct;
2407 int hash_code;
2408 int addr_volatile;
2409 int regno;
2410
2411 /* Do not try to replace constant addresses or addresses of local and
2412 argument slots. These MEM expressions are made only once and inserted
2413 in many instructions, as well as being used to control symbol table
2414 output. It is not safe to clobber them.
2415
2416 There are some uncommon cases where the address is already in a register
2417 for some reason, but we cannot take advantage of that because we have
2418 no easy way to unshare the MEM. In addition, looking up all stack
2419 addresses is costly. */
2420 if ((GET_CODE (addr) == PLUS
2421 && GET_CODE (XEXP (addr, 0)) == REG
2422 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2423 && (regno = REGNO (XEXP (addr, 0)),
2424 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2425 || (GET_CODE (addr) == REG
2426 && (regno = REGNO (addr),
2427 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2428 || CONSTANT_ADDRESS_P (addr))
2429 return;
2430
2431 /* If this address is not simply a register, try to fold it. This will
2432 sometimes simplify the expression. Many simplifications
2433 will not be valid, but some, usually applying the associative rule, will
2434 be valid and produce better code. */
2435 if (GET_CODE (addr) != REG
2436 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2437 addr = *loc;
2438
2439 /* If this address is not in the hash table, we can't do any better.
2440 Also, ignore if volatile. */
2441 do_not_record = 0;
2442 hash_code = HASH (addr, Pmode);
2443 addr_volatile = do_not_record;
2444 do_not_record = save_do_not_record;
2445 hash_arg_in_memory = save_hash_arg_in_memory;
2446 hash_arg_in_struct = save_hash_arg_in_struct;
2447
2448 if (addr_volatile)
2449 return;
2450
2451 elt = lookup (addr, hash_code, Pmode);
2452
2453 if (elt == 0)
2454 return;
2455
2456#ifndef ADDRESS_COST
2457 our_cost = elt->cost;
2458
2459 /* Find the lowest cost below ours that works. */
2460 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2461 if (elt->cost < our_cost
2462 && (GET_CODE (elt->exp) == REG || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2463 && validate_change (insn, loc, canon_reg (copy_rtx (elt->exp), 0), 0))
2464 return;
2465
2466#else
2467
2468 /* We need to find the best (under the criteria documented above) entry in
2469 the class that is valid. We use the `flag' field to indicate choices
2470 that were invalid and iterate until we can't find a better one that
2471 hasn't already been tried. */
2472
2473 for (p = elt->first_same_value; p; p = p->next_same_value)
2474 p->flag = 0;
2475
2476 while (found_better)
2477 {
2478 int best_addr_cost = ADDRESS_COST (*loc);
2479 int best_rtx_cost = (elt->cost + 1) >> 1;
2480 struct table_elt *best_elt = elt;
2481
2482 found_better = 0;
2483 for (p = elt->first_same_value; p; p = p->next_same_value)
2484 if (! p->flag
2485 && (GET_CODE (p->exp) == REG || exp_equiv_p (p->exp, p->exp, 1, 0))
2486 && (ADDRESS_COST (p->exp) < best_addr_cost
2487 || (ADDRESS_COST (p->exp) == best_addr_cost
2488 && (p->cost + 1) >> 1 > best_rtx_cost)))
2489 {
2490 found_better = 1;
2491 best_addr_cost = ADDRESS_COST (p->exp);
2492 best_rtx_cost = (p->cost + 1) >> 1;
2493 best_elt = p;
2494 }
2495
2496 if (found_better)
2497 {
2498 if (validate_change (insn, loc,
2499 canon_reg (copy_rtx (best_elt->exp), 0), 0))
2500 return;
2501 else
2502 best_elt->flag = 1;
2503 }
2504 }
2505#endif
2506}
2507\f
2508/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2509 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2510 what values are being compared.
2511
2512 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2513 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2514 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2515 compared to produce cc0.
2516
2517 The return value is the comparison operator and is either the code of
2518 A or the code corresponding to the inverse of the comparison. */
2519
2520static enum rtx_code
2521find_comparison_args (code, parg1, parg2)
2522 enum rtx_code code;
2523 rtx *parg1, *parg2;
2524{
2525 rtx arg1, arg2;
2526
2527 arg1 = *parg1, arg2 = *parg2;
2528
2529 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2530
2531 while (arg2 == const0_rtx)
2532 {
2533 /* Set non-zero when we find something of interest. */
2534 rtx x = 0;
2535 int reverse_code = 0;
2536 struct table_elt *p = 0;
2537
2538 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2539 On machines with CC0, this is the only case that can occur, since
2540 fold_rtx will return the COMPARE or item being compared with zero
2541 when given CC0. */
2542
2543 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2544 x = arg1;
2545
2546 /* If ARG1 is a comparison operator and CODE is testing for
2547 STORE_FLAG_VALUE, get the inner arguments. */
2548
2549 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2550 {
2551 if (code == NE || (code == LT && STORE_FLAG_VALUE == -1))
2552 x = arg1;
2553 else if (code == EQ || (code == GE && STORE_FLAG_VALUE == -1))
2554 x = arg1, reverse_code = 1;
2555 }
2556
2557 /* ??? We could also check for
2558
2559 (ne (and (eq (...) (const_int 1))) (const_int 0))
2560
2561 and related forms, but let's wait until we see them occurring. */
2562
2563 if (x == 0)
2564 /* Look up ARG1 in the hash table and see if it has an equivalence
2565 that lets us see what is being compared. */
2566 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2567 GET_MODE (arg1));
2568 if (p) p = p->first_same_value;
2569
2570 for (; p; p = p->next_same_value)
2571 {
2572 enum machine_mode inner_mode = GET_MODE (p->exp);
2573
2574 /* If the entry isn't valid, skip it. */
2575 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2576 continue;
2577
2578 if (GET_CODE (p->exp) == COMPARE
2579 /* Another possibility is that this machine has a compare insn
2580 that includes the comparison code. In that case, ARG1 would
2581 be equivalent to a comparison operation that would set ARG1 to
2582 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2583 ORIG_CODE is the actual comparison being done; if it is an EQ,
2584 we must reverse ORIG_CODE. On machine with a negative value
2585 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2586 || ((code == NE
2587 || (code == LT
2588 && inner_mode != VOIDmode
2589 && GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_INT
2590 && (STORE_FLAG_VALUE
2591 & (1 << (GET_MODE_BITSIZE (inner_mode) - 1)))))
2592 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2593 {
2594 x = p->exp;
2595 break;
2596 }
2597 else if ((code == EQ
2598 || (code == GE
2599 && inner_mode != VOIDmode
2600 && GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_INT
2601 && (STORE_FLAG_VALUE
2602 & (1 << (GET_MODE_BITSIZE (inner_mode) - 1)))))
2603 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2604 {
2605 reverse_code = 1;
2606 x = p->exp;
2607 break;
2608 }
2609
2610 /* If this is fp + constant, the equivalent is a better operand since
2611 it may let us predict the value of the comparison. */
2612 else if (NONZERO_BASE_PLUS_P (p->exp))
2613 {
2614 arg1 = p->exp;
2615 continue;
2616 }
2617 }
2618
2619 /* If we didn't find a useful equivalence for ARG1, we are done.
2620 Otherwise, set up for the next iteration. */
2621 if (x == 0)
2622 break;
2623
2624 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2625 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2626 code = GET_CODE (x);
2627
2628 if (reverse_code)
2629 code = reverse_condition (code);
2630 }
2631
2632 /* Return our results. */
2633 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2634
2635 return code;
2636}
2637\f
2638/* Try to simplify a unary operation CODE whose output mode is to be
2639 MODE with input operand OP whose mode was originally OP_MODE.
2640 Return zero if no simplification can be made. */
2641
2642rtx
2643simplify_unary_operation (code, mode, op, op_mode)
2644 enum rtx_code code;
2645 enum machine_mode mode;
2646 rtx op;
2647 enum machine_mode op_mode;
2648{
2649 register int width = GET_MODE_BITSIZE (mode);
2650
2651 /* The order of these tests is critical so that, for example, we don't
2652 check the wrong mode (input vs. output) for a conversion operation,
2653 such as FIX. At some point, this should be simplified. */
2654
2655#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2656 if (code == FLOAT && GET_CODE (op) == CONST_INT)
2657 {
2658 REAL_VALUE_TYPE d;
2659
2660#ifdef REAL_ARITHMETIC
2661 REAL_VALUE_FROM_INT (d, INTVAL (op), INTVAL (op) < 0 ? ~0 : 0);
2662#else
2663 d = (double) INTVAL (op);
2664#endif
2665 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2666 }
2667 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_INT)
2668 {
2669 REAL_VALUE_TYPE d;
2670
2671#ifdef REAL_ARITHMETIC
2672 REAL_VALUE_FROM_INT (d, INTVAL (op), 0);
2673#else
2674 d = (double) (unsigned int) INTVAL (op);
2675#endif
2676 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2677 }
2678
2679 else if (code == FLOAT && GET_CODE (op) == CONST_DOUBLE
2680 && GET_MODE (op) == VOIDmode)
2681 {
2682 REAL_VALUE_TYPE d;
2683
2684#ifdef REAL_ARITHMETIC
2685 REAL_VALUE_FROM_INT (d, CONST_DOUBLE_LOW (op), CONST_DOUBLE_HIGH (op));
2686#else
2687 if (CONST_DOUBLE_HIGH (op) < 0)
2688 {
2689 d = (double) (~ CONST_DOUBLE_HIGH (op));
2690 d *= ((double) (1 << (HOST_BITS_PER_INT / 2))
2691 * (double) (1 << (HOST_BITS_PER_INT / 2)));
2692 d += (double) (unsigned) (~ CONST_DOUBLE_LOW (op));
2693 d = (- d - 1.0);
2694 }
2695 else
2696 {
2697 d = (double) CONST_DOUBLE_HIGH (op);
2698 d *= ((double) (1 << (HOST_BITS_PER_INT / 2))
2699 * (double) (1 << (HOST_BITS_PER_INT / 2)));
2700 d += (double) (unsigned) CONST_DOUBLE_LOW (op);
2701 }
2702#endif /* REAL_ARITHMETIC */
2703 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2704 }
2705 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_DOUBLE
2706 && GET_MODE (op) == VOIDmode)
2707 {
2708 REAL_VALUE_TYPE d;
2709
2710#ifdef REAL_ARITHMETIC
2711 REAL_VALUE_FROM_UNSIGNED_INT (d, CONST_DOUBLE_LOW (op),
2712 CONST_DOUBLE_HIGH (op));
2713#else
2714 d = (double) CONST_DOUBLE_HIGH (op);
2715 d *= ((double) (1 << (HOST_BITS_PER_INT / 2))
2716 * (double) (1 << (HOST_BITS_PER_INT / 2)));
2717 d += (double) (unsigned) CONST_DOUBLE_LOW (op);
2718#endif /* REAL_ARITHMETIC */
2719 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2720 }
2721#endif
2722
2723 else if (GET_CODE (op) == CONST_INT
2724 && width <= HOST_BITS_PER_INT && width > 0)
2725 {
2726 register int arg0 = INTVAL (op);
2727 register int val;
2728
2729 switch (code)
2730 {
2731 case NOT:
2732 val = ~ arg0;
2733 break;
2734
2735 case NEG:
2736 val = - arg0;
2737 break;
2738
2739 case ABS:
2740 val = (arg0 >= 0 ? arg0 : - arg0);
2741 break;
2742
2743 case FFS:
2744 /* Don't use ffs here. Instead, get low order bit and then its
2745 number. If arg0 is zero, this will return 0, as desired. */
2746 arg0 &= GET_MODE_MASK (mode);
2747 val = exact_log2 (arg0 & (- arg0)) + 1;
2748 break;
2749
2750 case TRUNCATE:
2751 val = arg0;
2752 break;
2753
2754 case ZERO_EXTEND:
2755 if (op_mode == VOIDmode)
2756 op_mode = mode;
2757 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_INT)
2758 val = arg0;
2759 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_INT)
2760 val = arg0 & ~((-1) << GET_MODE_BITSIZE (op_mode));
2761 else
2762 return 0;
2763 break;
2764
2765 case SIGN_EXTEND:
2766 if (op_mode == VOIDmode)
2767 op_mode = mode;
2768 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_INT)
2769 val = arg0;
2770 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_INT)
2771 {
2772 val = arg0 & ~((-1) << GET_MODE_BITSIZE (op_mode));
2773 if (val & (1 << (GET_MODE_BITSIZE (op_mode) - 1)))
2774 val -= 1 << GET_MODE_BITSIZE (op_mode);
2775 }
2776 else
2777 return 0;
2778 break;
2779
d45cf215
RS
2780 case SQRT:
2781 return 0;
2782
7afe21cc
RK
2783 default:
2784 abort ();
2785 }
2786
2787 /* Clear the bits that don't belong in our mode,
2788 unless they and our sign bit are all one.
2789 So we get either a reasonable negative value or a reasonable
2790 unsigned value for this mode. */
2791 if (width < HOST_BITS_PER_INT
2792 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
2793 val &= (1 << width) - 1;
2794
2795 return gen_rtx (CONST_INT, VOIDmode, val);
2796 }
2797
2798 /* We can do some operations on integer CONST_DOUBLEs. Also allow
2799 for a DImode operation on a CONST_INT. */
2800 else if (GET_MODE (op) == VOIDmode
2801 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2802 {
2803 int l1, h1, lv, hv;
2804
2805 if (GET_CODE (op) == CONST_DOUBLE)
2806 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
2807 else
2808 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
2809
2810 switch (code)
2811 {
2812 case NOT:
2813 lv = ~ l1;
2814 hv = ~ h1;
2815 break;
2816
2817 case NEG:
2818 neg_double (l1, h1, &lv, &hv);
2819 break;
2820
2821 case ABS:
2822 if (h1 < 0)
2823 neg_double (l1, h1, &lv, &hv);
2824 else
2825 lv = l1, hv = h1;
2826 break;
2827
2828 case FFS:
2829 hv = 0;
2830 if (l1 == 0)
2831 lv = HOST_BITS_PER_INT + exact_log2 (h1 & (-h1)) + 1;
2832 else
2833 lv = exact_log2 (l1 & (-l1)) + 1;
2834 break;
2835
2836 case TRUNCATE:
2837 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
2838 return gen_rtx (CONST_INT, VOIDmode, l1 & GET_MODE_MASK (mode));
2839 else
2840 return 0;
2841 break;
2842
d45cf215
RS
2843 case SQRT:
2844 return 0;
2845
7afe21cc
RK
2846 default:
2847 return 0;
2848 }
2849
2850 return immed_double_const (lv, hv, mode);
2851 }
2852
2853#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2854 else if (GET_CODE (op) == CONST_DOUBLE
2855 && GET_MODE_CLASS (mode) == MODE_FLOAT)
2856 {
2857 REAL_VALUE_TYPE d;
2858 jmp_buf handler;
2859 rtx x;
2860
2861 if (setjmp (handler))
2862 /* There used to be a warning here, but that is inadvisable.
2863 People may want to cause traps, and the natural way
2864 to do it should not get a warning. */
2865 return 0;
2866
2867 set_float_handler (handler);
2868
2869 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
2870
2871 switch (code)
2872 {
2873 case NEG:
2874 d = REAL_VALUE_NEGATE (d);
2875 break;
2876
2877 case ABS:
8b3686ed 2878 if (REAL_VALUE_NEGATIVE (d))
7afe21cc
RK
2879 d = REAL_VALUE_NEGATE (d);
2880 break;
2881
2882 case FLOAT_TRUNCATE:
2883 d = (double) REAL_VALUE_TRUNCATE (mode, d);
2884 break;
2885
2886 case FLOAT_EXTEND:
2887 /* All this does is change the mode. */
2888 break;
2889
2890 case FIX:
2891 d = (double) REAL_VALUE_FIX_TRUNCATE (d);
2892 break;
2893
2894 case UNSIGNED_FIX:
2895 d = (double) REAL_VALUE_UNSIGNED_FIX_TRUNCATE (d);
2896 break;
2897
d45cf215
RS
2898 case SQRT:
2899 return 0;
2900
7afe21cc
RK
2901 default:
2902 abort ();
2903 }
2904
2905 x = immed_real_const_1 (d, mode);
2906 set_float_handler (0);
2907 return x;
2908 }
2909 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
2910 && width <= HOST_BITS_PER_INT && width > 0)
2911 {
2912 REAL_VALUE_TYPE d;
2913 jmp_buf handler;
2914 rtx x;
2915 int val;
2916
2917 if (setjmp (handler))
2918 return 0;
2919
2920 set_float_handler (handler);
2921
2922 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
2923
2924 switch (code)
2925 {
2926 case FIX:
2927 val = REAL_VALUE_FIX (d);
2928 break;
2929
2930 case UNSIGNED_FIX:
2931 val = REAL_VALUE_UNSIGNED_FIX (d);
2932 break;
2933
2934 default:
2935 abort ();
2936 }
2937
2938 set_float_handler (0);
2939
2940 /* Clear the bits that don't belong in our mode,
2941 unless they and our sign bit are all one.
2942 So we get either a reasonable negative value or a reasonable
2943 unsigned value for this mode. */
2944 if (width < HOST_BITS_PER_INT
2945 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
2946 val &= (1 << width) - 1;
2947
2948 return gen_rtx (CONST_INT, VOIDmode, val);
2949 }
2950#endif
a6acbe15
RS
2951 /* This was formerly used only for non-IEEE float.
2952 eggert@twinsun.com says it is safe for IEEE also. */
2953 else
7afe21cc
RK
2954 {
2955 /* There are some simplifications we can do even if the operands
a6acbe15 2956 aren't constant. */
7afe21cc
RK
2957 switch (code)
2958 {
2959 case NEG:
2960 case NOT:
2961 /* (not (not X)) == X, similarly for NEG. */
2962 if (GET_CODE (op) == code)
2963 return XEXP (op, 0);
2964 break;
2965
2966 case SIGN_EXTEND:
2967 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
2968 becomes just the MINUS if its mode is MODE. This allows
2969 folding switch statements on machines using casesi (such as
2970 the Vax). */
2971 if (GET_CODE (op) == TRUNCATE
2972 && GET_MODE (XEXP (op, 0)) == mode
2973 && GET_CODE (XEXP (op, 0)) == MINUS
2974 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
2975 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
2976 return XEXP (op, 0);
2977 break;
2978 }
2979
2980 return 0;
2981 }
7afe21cc
RK
2982}
2983\f
2984/* Simplify a binary operation CODE with result mode MODE, operating on OP0
2985 and OP1. Return 0 if no simplification is possible.
2986
2987 Don't use this for relational operations such as EQ or LT.
2988 Use simplify_relational_operation instead. */
2989
2990rtx
2991simplify_binary_operation (code, mode, op0, op1)
2992 enum rtx_code code;
2993 enum machine_mode mode;
2994 rtx op0, op1;
2995{
2996 register int arg0, arg1, arg0s, arg1s;
2997 int val;
2998 int width = GET_MODE_BITSIZE (mode);
2999
3000 /* Relational operations don't work here. We must know the mode
3001 of the operands in order to do the comparison correctly.
3002 Assuming a full word can give incorrect results.
3003 Consider comparing 128 with -128 in QImode. */
3004
3005 if (GET_RTX_CLASS (code) == '<')
3006 abort ();
3007
3008#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3009 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3010 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3011 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3012 {
3013 REAL_VALUE_TYPE f0, f1, value;
3014 jmp_buf handler;
3015
3016 if (setjmp (handler))
3017 return 0;
3018
3019 set_float_handler (handler);
3020
3021 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3022 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3023 f0 = REAL_VALUE_TRUNCATE (mode, f0);
3024 f1 = REAL_VALUE_TRUNCATE (mode, f1);
3025
3026#ifdef REAL_ARITHMETIC
3027 REAL_ARITHMETIC (value, code, f0, f1);
3028#else
3029 switch (code)
3030 {
3031 case PLUS:
3032 value = f0 + f1;
3033 break;
3034 case MINUS:
3035 value = f0 - f1;
3036 break;
3037 case MULT:
3038 value = f0 * f1;
3039 break;
3040 case DIV:
3041#ifndef REAL_INFINITY
3042 if (f1 == 0)
3043 abort ();
3044#endif
3045 value = f0 / f1;
3046 break;
3047 case SMIN:
3048 value = MIN (f0, f1);
3049 break;
3050 case SMAX:
3051 value = MAX (f0, f1);
3052 break;
3053 default:
3054 abort ();
3055 }
3056#endif
3057
3058 set_float_handler (0);
3059 value = REAL_VALUE_TRUNCATE (mode, value);
3060 return immed_real_const_1 (value, mode);
3061 }
3062
3063 /* We can fold some multi-word operations. */
3064 else if (GET_MODE_CLASS (mode) == MODE_INT
3065 && GET_CODE (op0) == CONST_DOUBLE
3066 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3067 {
3068 int l1, l2, h1, h2, lv, hv;
3069
3070 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3071
3072 if (GET_CODE (op1) == CONST_DOUBLE)
3073 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3074 else
3075 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3076
3077 switch (code)
3078 {
3079 case MINUS:
3080 /* A - B == A + (-B). */
3081 neg_double (l2, h2, &lv, &hv);
3082 l2 = lv, h2 = hv;
3083
3084 /* .. fall through ... */
3085
3086 case PLUS:
3087 add_double (l1, h1, l2, h2, &lv, &hv);
3088 break;
3089
3090 case MULT:
3091 mul_double (l1, h1, l2, h2, &lv, &hv);
3092 break;
3093
3094 case DIV: case MOD: case UDIV: case UMOD:
3095 /* We'd need to include tree.h to do this and it doesn't seem worth
3096 it. */
3097 return 0;
3098
3099 case AND:
3100 lv = l1 & l2, hv = h1 & h2;
3101 break;
3102
3103 case IOR:
3104 lv = l1 | l2, hv = h1 | h2;
3105 break;
3106
3107 case XOR:
3108 lv = l1 ^ l2, hv = h1 ^ h2;
3109 break;
3110
3111 case SMIN:
3112 if (h1 < h2 || (h1 == h2 && (unsigned) l1 < (unsigned) l2))
3113 lv = l1, hv = h1;
3114 else
3115 lv = l2, hv = h2;
3116 break;
3117
3118 case SMAX:
3119 if (h1 > h2 || (h1 == h2 && (unsigned) l1 > (unsigned) l2))
3120 lv = l1, hv = h1;
3121 else
3122 lv = l2, hv = h2;
3123 break;
3124
3125 case UMIN:
3126 if ((unsigned) h1 < (unsigned) h2
3127 || (h1 == h2 && (unsigned) l1 < (unsigned) l2))
3128 lv = l1, hv = h1;
3129 else
3130 lv = l2, hv = h2;
3131 break;
3132
3133 case UMAX:
3134 if ((unsigned) h1 > (unsigned) h2
3135 || (h1 == h2 && (unsigned) l1 > (unsigned) l2))
3136 lv = l1, hv = h1;
3137 else
3138 lv = l2, hv = h2;
3139 break;
3140
3141 case LSHIFTRT: case ASHIFTRT:
3142 case ASHIFT: case LSHIFT:
3143 case ROTATE: case ROTATERT:
3144#ifdef SHIFT_COUNT_TRUNCATED
3145 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3146#endif
3147
3148 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3149 return 0;
3150
3151 if (code == LSHIFTRT || code == ASHIFTRT)
3152 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3153 code == ASHIFTRT);
3154 else if (code == ASHIFT || code == LSHIFT)
3155 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3156 code == ASHIFT);
3157 else if (code == ROTATE)
3158 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3159 else /* code == ROTATERT */
3160 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3161 break;
3162
3163 default:
3164 return 0;
3165 }
3166
3167 return immed_double_const (lv, hv, mode);
3168 }
3169#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3170
3171 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3172 || width > HOST_BITS_PER_INT || width == 0)
3173 {
3174 /* Even if we can't compute a constant result,
3175 there are some cases worth simplifying. */
3176
3177 switch (code)
3178 {
3179 case PLUS:
3180 /* In IEEE floating point, x+0 is not the same as x. Similarly
3181 for the other optimizations below. */
3182 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3183 && GET_MODE_CLASS (mode) != MODE_INT)
3184 break;
3185
3186 if (op1 == CONST0_RTX (mode))
3187 return op0;
3188
3189 /* Strip off any surrounding CONSTs. They don't matter in any of
3190 the cases below. */
3191 if (GET_CODE (op0) == CONST)
3192 op0 = XEXP (op0, 0);
3193 if (GET_CODE (op1) == CONST)
3194 op1 = XEXP (op1, 0);
3195
3196 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3197 if (GET_CODE (op0) == NEG)
3198 {
3199 rtx tem = simplify_binary_operation (MINUS, mode,
3200 op1, XEXP (op0, 0));
3201 return tem ? tem : gen_rtx (MINUS, mode, op1, XEXP (op0, 0));
3202 }
3203 else if (GET_CODE (op1) == NEG)
3204 {
3205 rtx tem = simplify_binary_operation (MINUS, mode,
3206 op0, XEXP (op1, 0));
3207 return tem ? tem : gen_rtx (MINUS, mode, op0, XEXP (op1, 0));
3208 }
3209
3210 /* Don't use the associative law for floating point.
3211 The inaccuracy makes it nonassociative,
3212 and subtle programs can break if operations are associated. */
3213 if (GET_MODE_CLASS (mode) != MODE_INT)
3214 break;
3215
3216 /* (a - b) + b -> a, similarly a + (b - a) -> a */
3217 if (GET_CODE (op0) == MINUS
3218 && rtx_equal_p (XEXP (op0, 1), op1) && ! side_effects_p (op1))
3219 return XEXP (op0, 0);
3220
3221 if (GET_CODE (op1) == MINUS
3222 && rtx_equal_p (XEXP (op1, 1), op0) && ! side_effects_p (op0))
3223 return XEXP (op1, 0);
3224
3225 /* (c1 - a) + c2 becomes (c1 + c2) - a. */
3226 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == MINUS
3227 && GET_CODE (XEXP (op0, 0)) == CONST_INT)
3228 {
3229 rtx tem = simplify_binary_operation (PLUS, mode, op1,
3230 XEXP (op0, 0));
3231
3232 return tem ? gen_rtx (MINUS, mode, tem, XEXP (op0, 1)) : 0;
3233 }
3234
3235 /* Handle both-operands-constant cases. */
3236 if (CONSTANT_P (op0) && CONSTANT_P (op1)
3237 && GET_CODE (op0) != CONST_DOUBLE
3238 && GET_CODE (op1) != CONST_DOUBLE
3239 && GET_MODE_CLASS (mode) == MODE_INT)
3240 {
3241 if (GET_CODE (op1) == CONST_INT)
3242 return plus_constant (op0, INTVAL (op1));
3243 else if (GET_CODE (op0) == CONST_INT)
3244 return plus_constant (op1, INTVAL (op0));
3245 else
3246 return gen_rtx (CONST, mode,
3247 gen_rtx (PLUS, mode,
3248 GET_CODE (op0) == CONST
3249 ? XEXP (op0, 0) : op0,
3250 GET_CODE (op1) == CONST
3251 ? XEXP (op1, 0) : op1));
3252 }
3253 else if (GET_CODE (op1) == CONST_INT
3254 && GET_CODE (op0) == PLUS
3255 && (CONSTANT_P (XEXP (op0, 0))
3256 || CONSTANT_P (XEXP (op0, 1))))
3257 /* constant + (variable + constant)
3258 can result if an index register is made constant.
3259 We simplify this by adding the constants.
3260 If we did not, it would become an invalid address. */
3261 return plus_constant (op0, INTVAL (op1));
3262 break;
3263
3264 case COMPARE:
3265#ifdef HAVE_cc0
3266 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3267 using cc0, in which case we want to leave it as a COMPARE
3268 so we can distinguish it from a register-register-copy.
3269
3270 In IEEE floating point, x-0 is not the same as x. */
3271
3272 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3273 || GET_MODE_CLASS (mode) == MODE_INT)
3274 && op1 == CONST0_RTX (mode))
3275 return op0;
3276#else
3277 /* Do nothing here. */
3278#endif
3279 break;
3280
3281 case MINUS:
21648b45
RK
3282 /* None of these optimizations can be done for IEEE
3283 floating point. */
3284 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3285 && GET_MODE_CLASS (mode) != MODE_INT)
3286 break;
3287
3288 /* We can't assume x-x is 0 even with non-IEEE floating point. */
7afe21cc
RK
3289 if (rtx_equal_p (op0, op1)
3290 && ! side_effects_p (op0)
7afe21cc
RK
3291 && GET_MODE_CLASS (mode) != MODE_FLOAT)
3292 return const0_rtx;
3293
3294 /* Change subtraction from zero into negation. */
3295 if (op0 == CONST0_RTX (mode))
3296 return gen_rtx (NEG, mode, op1);
3297
7afe21cc
RK
3298 /* Subtracting 0 has no effect. */
3299 if (op1 == CONST0_RTX (mode))
3300 return op0;
3301
3302 /* Strip off any surrounding CONSTs. They don't matter in any of
3303 the cases below. */
3304 if (GET_CODE (op0) == CONST)
3305 op0 = XEXP (op0, 0);
3306 if (GET_CODE (op1) == CONST)
3307 op1 = XEXP (op1, 0);
3308
3309 /* (a - (-b)) -> (a + b). */
3310 if (GET_CODE (op1) == NEG)
3311 {
3312 rtx tem = simplify_binary_operation (PLUS, mode,
3313 op0, XEXP (op1, 0));
3314 return tem ? tem : gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3315 }
3316
3317 /* Don't use the associative law for floating point.
3318 The inaccuracy makes it nonassociative,
3319 and subtle programs can break if operations are associated. */
3320 if (GET_MODE_CLASS (mode) != MODE_INT)
3321 break;
3322
3323 /* (a + b) - a -> b, and (b - (a + b)) -> -a */
3324 if (GET_CODE (op0) == PLUS
3325 && rtx_equal_p (XEXP (op0, 0), op1)
3326 && ! side_effects_p (op1))
3327 return XEXP (op0, 1);
3328 else if (GET_CODE (op0) == PLUS
3329 && rtx_equal_p (XEXP (op0, 1), op1)
3330 && ! side_effects_p (op1))
3331 return XEXP (op0, 0);
3332
3333 if (GET_CODE (op1) == PLUS
3334 && rtx_equal_p (XEXP (op1, 0), op0)
3335 && ! side_effects_p (op0))
3336 {
3337 rtx tem = simplify_unary_operation (NEG, mode, XEXP (op1, 1),
3338 mode);
3339
3340 return tem ? tem : gen_rtx (NEG, mode, XEXP (op1, 1));
3341 }
3342 else if (GET_CODE (op1) == PLUS
3343 && rtx_equal_p (XEXP (op1, 1), op0)
3344 && ! side_effects_p (op0))
3345 {
3346 rtx tem = simplify_unary_operation (NEG, mode, XEXP (op1, 0),
3347 mode);
3348
3349 return tem ? tem : gen_rtx (NEG, mode, XEXP (op1, 0));
3350 }
3351
3352 /* a - (a - b) -> b */
3353 if (GET_CODE (op1) == MINUS && rtx_equal_p (op0, XEXP (op1, 0))
3354 && ! side_effects_p (op0))
3355 return XEXP (op1, 1);
3356
3357 /* (a +/- b) - (a +/- c) can be simplified. Do variants of
3358 this involving commutativity. The most common case is
3359 (a + C1) - (a + C2), but it's not hard to do all the cases. */
3360 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS)
3361 && (GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS))
3362 {
3363 rtx lhs0 = XEXP (op0, 0), lhs1 = XEXP (op0, 1);
3364 rtx rhs0 = XEXP (op1, 0), rhs1 = XEXP (op1, 1);
3365 int lhs_neg = GET_CODE (op0) == MINUS;
3366 int rhs_neg = GET_CODE (op1) == MINUS;
3367 rtx lhs = 0, rhs = 0;
3368
3369 /* Set LHS and RHS to the two different terms. */
3370 if (rtx_equal_p (lhs0, rhs0) && ! side_effects_p (lhs0))
3371 lhs = lhs1, rhs = rhs1;
3372 else if (! rhs_neg && rtx_equal_p (lhs0, rhs1)
3373 && ! side_effects_p (lhs0))
3374 lhs = lhs1, rhs = rhs0;
3375 else if (! lhs_neg && rtx_equal_p (lhs1, rhs0)
3376 && ! side_effects_p (lhs1))
3377 lhs = lhs0, rhs = rhs1;
3378 else if (! lhs_neg && ! rhs_neg && rtx_equal_p (lhs1, rhs1)
3379 && ! side_effects_p (lhs1))
3380 lhs = lhs0, rhs = rhs0;
3381
3382 /* The RHS is the operand of a MINUS, so its negation
3383 status should be complemented. */
3384 rhs_neg = ! rhs_neg;
3385
3386 /* If we found two values equal, form the sum or difference
3387 of the remaining two terms. */
3388 if (lhs)
3389 {
3390 rtx tem = simplify_binary_operation (lhs_neg == rhs_neg
3391 ? PLUS : MINUS,
3392 mode,
3393 lhs_neg ? rhs : lhs,
3394 lhs_neg ? lhs : rhs);
3395 if (tem == 0)
3396 tem = gen_rtx (lhs_neg == rhs_neg
3397 ? PLUS : MINUS,
3398 mode, lhs_neg ? rhs : lhs,
3399 lhs_neg ? lhs : rhs);
3400
3401 /* If both sides negated, negate result. */
3402 if (lhs_neg && rhs_neg)
3403 {
3404 rtx tem1
3405 = simplify_unary_operation (NEG, mode, tem, mode);
3406 if (tem1 == 0)
3407 tem1 = gen_rtx (NEG, mode, tem);
3408 tem = tem1;
3409 }
3410
3411 return tem;
3412 }
3413
3414 return 0;
3415 }
3416
3417 /* c1 - (a + c2) becomes (c1 - c2) - a. */
3418 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == PLUS
3419 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
3420 {
3421 rtx tem = simplify_binary_operation (MINUS, mode, op0,
3422 XEXP (op1, 1));
3423
3424 return tem ? gen_rtx (MINUS, mode, tem, XEXP (op1, 0)) : 0;
3425 }
3426
3427 /* c1 - (c2 - a) becomes (c1 - c2) + a. */
3428 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == MINUS
3429 && GET_CODE (XEXP (op1, 0)) == CONST_INT)
3430 {
3431 rtx tem = simplify_binary_operation (MINUS, mode, op0,
3432 XEXP (op1, 0));
3433
3434 return (tem && GET_CODE (tem) == CONST_INT
3435 ? plus_constant (XEXP (op1, 1), INTVAL (tem))
3436 : 0);
3437 }
3438
3439 /* Don't let a relocatable value get a negative coeff. */
3440 if (GET_CODE (op1) == CONST_INT)
3441 return plus_constant (op0, - INTVAL (op1));
3442 break;
3443
3444 case MULT:
3445 if (op1 == constm1_rtx)
3446 {
3447 rtx tem = simplify_unary_operation (NEG, mode, op0, mode);
3448
3449 return tem ? tem : gen_rtx (NEG, mode, op0);
3450 }
3451
3452 /* In IEEE floating point, x*0 is not always 0. */
3453 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3454 || GET_MODE_CLASS (mode) == MODE_INT)
3455 && op1 == CONST0_RTX (mode)
3456 && ! side_effects_p (op0))
3457 return op1;
3458
3459 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3460 However, ANSI says we can drop signals,
3461 so we can do this anyway. */
3462 if (op1 == CONST1_RTX (mode))
3463 return op0;
3464
3465 /* Convert multiply by constant power of two into shift. */
3466 if (GET_CODE (op1) == CONST_INT
3467 && (val = exact_log2 (INTVAL (op1))) >= 0)
3468 return gen_rtx (ASHIFT, mode, op0,
3469 gen_rtx (CONST_INT, VOIDmode, val));
3470
3471 if (GET_CODE (op1) == CONST_DOUBLE
3472 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3473 {
3474 REAL_VALUE_TYPE d;
3475 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3476
3477 /* x*2 is x+x and x*(-1) is -x */
3478 if (REAL_VALUES_EQUAL (d, dconst2)
3479 && GET_MODE (op0) == mode)
3480 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3481
3482 else if (REAL_VALUES_EQUAL (d, dconstm1)
3483 && GET_MODE (op0) == mode)
3484 return gen_rtx (NEG, mode, op0);
3485 }
3486 break;
3487
3488 case IOR:
3489 if (op1 == const0_rtx)
3490 return op0;
3491 if (GET_CODE (op1) == CONST_INT
3492 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3493 return op1;
3494 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3495 return op0;
3496 /* A | (~A) -> -1 */
3497 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3498 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3499 && ! side_effects_p (op0))
3500 return constm1_rtx;
3501 break;
3502
3503 case XOR:
3504 if (op1 == const0_rtx)
3505 return op0;
3506 if (GET_CODE (op1) == CONST_INT
3507 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3508 return gen_rtx (NOT, mode, op0);
3509 if (op0 == op1 && ! side_effects_p (op0))
3510 return const0_rtx;
3511 break;
3512
3513 case AND:
3514 if (op1 == const0_rtx && ! side_effects_p (op0))
3515 return const0_rtx;
3516 if (GET_CODE (op1) == CONST_INT
3517 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3518 return op0;
3519 if (op0 == op1 && ! side_effects_p (op0))
3520 return op0;
3521 /* A & (~A) -> 0 */
3522 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3523 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3524 && ! side_effects_p (op0))
3525 return const0_rtx;
3526 break;
3527
3528 case UDIV:
3529 /* Convert divide by power of two into shift (divide by 1 handled
3530 below). */
3531 if (GET_CODE (op1) == CONST_INT
3532 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3533 return gen_rtx (LSHIFTRT, mode, op0,
3534 gen_rtx (CONST_INT, VOIDmode, arg1));
3535
3536 /* ... fall through ... */
3537
3538 case DIV:
3539 if (op1 == CONST1_RTX (mode))
3540 return op0;
3541 else if (op0 == CONST0_RTX (mode)
3542 && ! side_effects_p (op1))
3543 return op0;
3544#if 0 /* Turned off till an expert says this is a safe thing to do. */
3545#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3546 /* Change division by a constant into multiplication. */
3547 else if (GET_CODE (op1) == CONST_DOUBLE
3548 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3549 && op1 != CONST0_RTX (mode))
3550 {
3551 REAL_VALUE_TYPE d;
3552 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3553 if (REAL_VALUES_EQUAL (d, dconst0))
3554 abort();
3555#if defined (REAL_ARITHMETIC)
3556 REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d);
3557 return gen_rtx (MULT, mode, op0,
3558 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3559#else
3560 return gen_rtx (MULT, mode, op0,
3561 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3562 }
3563#endif
3564#endif
3565#endif
3566 break;
3567
3568 case UMOD:
3569 /* Handle modulus by power of two (mod with 1 handled below). */
3570 if (GET_CODE (op1) == CONST_INT
3571 && exact_log2 (INTVAL (op1)) > 0)
3572 return gen_rtx (AND, mode, op0,
3573 gen_rtx (CONST_INT, VOIDmode, INTVAL (op1) - 1));
3574
3575 /* ... fall through ... */
3576
3577 case MOD:
3578 if ((op0 == const0_rtx || op1 == const1_rtx)
3579 && ! side_effects_p (op0) && ! side_effects_p (op1))
3580 return const0_rtx;
3581 break;
3582
3583 case ROTATERT:
3584 case ROTATE:
3585 /* Rotating ~0 always results in ~0. */
3586 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_INT
3587 && INTVAL (op0) == GET_MODE_MASK (mode)
3588 && ! side_effects_p (op1))
3589 return op0;
3590
3591 /* ... fall through ... */
3592
3593 case LSHIFT:
3594 case ASHIFT:
3595 case ASHIFTRT:
3596 case LSHIFTRT:
3597 if (op1 == const0_rtx)
3598 return op0;
3599 if (op0 == const0_rtx && ! side_effects_p (op1))
3600 return op0;
3601 break;
3602
3603 case SMIN:
3604 if (width <= HOST_BITS_PER_INT && GET_CODE (op1) == CONST_INT
3605 && INTVAL (op1) == 1 << (width -1)
3606 && ! side_effects_p (op0))
3607 return op1;
3608 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3609 return op0;
3610 break;
3611
3612 case SMAX:
3613 if (width <= HOST_BITS_PER_INT && GET_CODE (op1) == CONST_INT
3614 && INTVAL (op1) == GET_MODE_MASK (mode) >> 1
3615 && ! side_effects_p (op0))
3616 return op1;
3617 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3618 return op0;
3619 break;
3620
3621 case UMIN:
3622 if (op1 == const0_rtx && ! side_effects_p (op0))
3623 return op1;
3624 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3625 return op0;
3626 break;
3627
3628 case UMAX:
3629 if (op1 == constm1_rtx && ! side_effects_p (op0))
3630 return op1;
3631 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3632 return op0;
3633 break;
3634
3635 default:
3636 abort ();
3637 }
3638
3639 return 0;
3640 }
3641
3642 /* Get the integer argument values in two forms:
3643 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3644
3645 arg0 = INTVAL (op0);
3646 arg1 = INTVAL (op1);
3647
3648 if (width < HOST_BITS_PER_INT)
3649 {
3650 arg0 &= (1 << width) - 1;
3651 arg1 &= (1 << width) - 1;
3652
3653 arg0s = arg0;
3654 if (arg0s & (1 << (width - 1)))
3655 arg0s |= ((-1) << width);
3656
3657 arg1s = arg1;
3658 if (arg1s & (1 << (width - 1)))
3659 arg1s |= ((-1) << width);
3660 }
3661 else
3662 {
3663 arg0s = arg0;
3664 arg1s = arg1;
3665 }
3666
3667 /* Compute the value of the arithmetic. */
3668
3669 switch (code)
3670 {
3671 case PLUS:
538b78e7 3672 val = arg0s + arg1s;
7afe21cc
RK
3673 break;
3674
3675 case MINUS:
538b78e7 3676 val = arg0s - arg1s;
7afe21cc
RK
3677 break;
3678
3679 case MULT:
3680 val = arg0s * arg1s;
3681 break;
3682
3683 case DIV:
3684 if (arg1s == 0)
3685 return 0;
3686 val = arg0s / arg1s;
3687 break;
3688
3689 case MOD:
3690 if (arg1s == 0)
3691 return 0;
3692 val = arg0s % arg1s;
3693 break;
3694
3695 case UDIV:
3696 if (arg1 == 0)
3697 return 0;
3698 val = (unsigned) arg0 / arg1;
3699 break;
3700
3701 case UMOD:
3702 if (arg1 == 0)
3703 return 0;
3704 val = (unsigned) arg0 % arg1;
3705 break;
3706
3707 case AND:
3708 val = arg0 & arg1;
3709 break;
3710
3711 case IOR:
3712 val = arg0 | arg1;
3713 break;
3714
3715 case XOR:
3716 val = arg0 ^ arg1;
3717 break;
3718
3719 case LSHIFTRT:
3720 /* If shift count is undefined, don't fold it; let the machine do
3721 what it wants. But truncate it if the machine will do that. */
3722 if (arg1 < 0)
3723 return 0;
3724
3725#ifdef SHIFT_COUNT_TRUNCATED
3726 arg1 &= (BITS_PER_WORD - 1);
3727#endif
3728
3729 if (arg1 >= width)
3730 return 0;
3731
3732 val = ((unsigned) arg0) >> arg1;
3733 break;
3734
3735 case ASHIFT:
3736 case LSHIFT:
3737 if (arg1 < 0)
3738 return 0;
3739
3740#ifdef SHIFT_COUNT_TRUNCATED
3741 arg1 &= (BITS_PER_WORD - 1);
3742#endif
3743
3744 if (arg1 >= width)
3745 return 0;
3746
3747 val = ((unsigned) arg0) << arg1;
3748 break;
3749
3750 case ASHIFTRT:
3751 if (arg1 < 0)
3752 return 0;
3753
3754#ifdef SHIFT_COUNT_TRUNCATED
3755 arg1 &= (BITS_PER_WORD - 1);
3756#endif
3757
3758 if (arg1 >= width)
3759 return 0;
3760
3761 val = arg0s >> arg1;
3762 break;
3763
3764 case ROTATERT:
3765 if (arg1 < 0)
3766 return 0;
3767
3768 arg1 %= width;
3769 val = ((((unsigned) arg0) << (width - arg1))
3770 | (((unsigned) arg0) >> arg1));
3771 break;
3772
3773 case ROTATE:
3774 if (arg1 < 0)
3775 return 0;
3776
3777 arg1 %= width;
3778 val = ((((unsigned) arg0) << arg1)
3779 | (((unsigned) arg0) >> (width - arg1)));
3780 break;
3781
3782 case COMPARE:
3783 /* Do nothing here. */
3784 return 0;
3785
830a38ee
RS
3786 case SMIN:
3787 val = arg0s <= arg1s ? arg0s : arg1s;
3788 break;
3789
3790 case UMIN:
3791 val = (unsigned int)arg0 <= (unsigned int)arg1 ? arg0 : arg1;
3792 break;
3793
3794 case SMAX:
3795 val = arg0s > arg1s ? arg0s : arg1s;
3796 break;
3797
3798 case UMAX:
3799 val = (unsigned int)arg0 > (unsigned int)arg1 ? arg0 : arg1;
3800 break;
3801
7afe21cc
RK
3802 default:
3803 abort ();
3804 }
3805
3806 /* Clear the bits that don't belong in our mode, unless they and our sign
3807 bit are all one. So we get either a reasonable negative value or a
3808 reasonable unsigned value for this mode. */
3809 if (width < HOST_BITS_PER_INT
3810 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
3811 val &= (1 << width) - 1;
3812
3813 return gen_rtx (CONST_INT, VOIDmode, val);
3814}
3815\f
3816/* Like simplify_binary_operation except used for relational operators.
3817 MODE is the mode of the operands, not that of the result. */
3818
3819rtx
3820simplify_relational_operation (code, mode, op0, op1)
3821 enum rtx_code code;
3822 enum machine_mode mode;
3823 rtx op0, op1;
3824{
3825 register int arg0, arg1, arg0s, arg1s;
3826 int val;
3827 int width = GET_MODE_BITSIZE (mode);
3828
3829 /* If op0 is a compare, extract the comparison arguments from it. */
3830 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
3831 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3832
3833 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3834 || width > HOST_BITS_PER_INT || width == 0)
3835 {
3836 /* Even if we can't compute a constant result,
3837 there are some cases worth simplifying. */
3838
3839 /* For non-IEEE floating-point, if the two operands are equal, we know
3840 the result. */
3841 if (rtx_equal_p (op0, op1)
3842 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3843 || GET_MODE_CLASS (GET_MODE (op0)) != MODE_FLOAT))
3844 return (code == EQ || code == GE || code == LE || code == LEU
3845 || code == GEU) ? const_true_rtx : const0_rtx;
3846 else if (GET_CODE (op0) == CONST_DOUBLE
3847 && GET_CODE (op1) == CONST_DOUBLE
3848 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
3849 {
3850 REAL_VALUE_TYPE d0, d1;
3851 int value;
3852 jmp_buf handler;
3853 int op0lt, op1lt, equal;
3854
3855 if (setjmp (handler))
3856 return 0;
3857
3858 set_float_handler (handler);
3859 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
3860 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
3861 equal = REAL_VALUES_EQUAL (d0, d1);
3862 op0lt = REAL_VALUES_LESS (d0, d1);
3863 op1lt = REAL_VALUES_LESS (d1, d0);
3864 set_float_handler (0);
3865
3866 switch (code)
3867 {
3868 case EQ:
3869 return equal ? const_true_rtx : const0_rtx;
3870 case NE:
3871 return !equal ? const_true_rtx : const0_rtx;
3872 case LE:
3873 return equal || op0lt ? const_true_rtx : const0_rtx;
3874 case LT:
3875 return op0lt ? const_true_rtx : const0_rtx;
3876 case GE:
3877 return equal || op1lt ? const_true_rtx : const0_rtx;
3878 case GT:
3879 return op1lt ? const_true_rtx : const0_rtx;
3880 }
3881 }
3882
3883 switch (code)
3884 {
3885 case EQ:
3886 {
3887#if 0
3888 /* We can't make this assumption due to #pragma weak */
3889 if (CONSTANT_P (op0) && op1 == const0_rtx)
3890 return const0_rtx;
3891#endif
8b3686ed
RK
3892 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
3893 /* On some machines, the ap reg can be 0 sometimes. */
3894 && op0 != arg_pointer_rtx)
7afe21cc
RK
3895 return const0_rtx;
3896 break;
3897 }
3898
3899 case NE:
3900#if 0
3901 /* We can't make this assumption due to #pragma weak */
3902 if (CONSTANT_P (op0) && op1 == const0_rtx)
3903 return const_true_rtx;
3904#endif
8b3686ed
RK
3905 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
3906 /* On some machines, the ap reg can be 0 sometimes. */
3907 && op0 != arg_pointer_rtx)
7afe21cc
RK
3908 return const_true_rtx;
3909 break;
3910
3911 case GEU:
3912 /* Unsigned values are never negative, but we must be sure we are
3913 actually comparing a value, not a CC operand. */
3914 if (op1 == const0_rtx
3915 && GET_MODE_CLASS (mode) == MODE_INT)
3916 return const_true_rtx;
3917 break;
3918
3919 case LTU:
3920 if (op1 == const0_rtx
3921 && GET_MODE_CLASS (mode) == MODE_INT)
3922 return const0_rtx;
3923 break;
3924
3925 case LEU:
3926 /* Unsigned values are never greater than the largest
3927 unsigned value. */
3928 if (GET_CODE (op1) == CONST_INT
3929 && INTVAL (op1) == GET_MODE_MASK (mode)
3930 && GET_MODE_CLASS (mode) == MODE_INT)
3931 return const_true_rtx;
3932 break;
3933
3934 case GTU:
3935 if (GET_CODE (op1) == CONST_INT
3936 && INTVAL (op1) == GET_MODE_MASK (mode)
3937 && GET_MODE_CLASS (mode) == MODE_INT)
3938 return const0_rtx;
3939 break;
3940 }
3941
3942 return 0;
3943 }
3944
3945 /* Get the integer argument values in two forms:
3946 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3947
3948 arg0 = INTVAL (op0);
3949 arg1 = INTVAL (op1);
3950
3951 if (width < HOST_BITS_PER_INT)
3952 {
3953 arg0 &= (1 << width) - 1;
3954 arg1 &= (1 << width) - 1;
3955
3956 arg0s = arg0;
3957 if (arg0s & (1 << (width - 1)))
3958 arg0s |= ((-1) << width);
3959
3960 arg1s = arg1;
3961 if (arg1s & (1 << (width - 1)))
3962 arg1s |= ((-1) << width);
3963 }
3964 else
3965 {
3966 arg0s = arg0;
3967 arg1s = arg1;
3968 }
3969
3970 /* Compute the value of the arithmetic. */
3971
3972 switch (code)
3973 {
3974 case NE:
3975 val = arg0 != arg1 ? STORE_FLAG_VALUE : 0;
3976 break;
3977
3978 case EQ:
3979 val = arg0 == arg1 ? STORE_FLAG_VALUE : 0;
3980 break;
3981
3982 case LE:
3983 val = arg0s <= arg1s ? STORE_FLAG_VALUE : 0;
3984 break;
3985
3986 case LT:
3987 val = arg0s < arg1s ? STORE_FLAG_VALUE : 0;
3988 break;
3989
3990 case GE:
3991 val = arg0s >= arg1s ? STORE_FLAG_VALUE : 0;
3992 break;
3993
3994 case GT:
3995 val = arg0s > arg1s ? STORE_FLAG_VALUE : 0;
3996 break;
3997
3998 case LEU:
3999 val = ((unsigned) arg0) <= ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
4000 break;
4001
4002 case LTU:
4003 val = ((unsigned) arg0) < ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
4004 break;
4005
4006 case GEU:
4007 val = ((unsigned) arg0) >= ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
4008 break;
4009
4010 case GTU:
4011 val = ((unsigned) arg0) > ((unsigned) arg1) ? STORE_FLAG_VALUE : 0;
4012 break;
4013
4014 default:
4015 abort ();
4016 }
4017
4018 /* Clear the bits that don't belong in our mode, unless they and our sign
4019 bit are all one. So we get either a reasonable negative value or a
4020 reasonable unsigned value for this mode. */
4021 if (width < HOST_BITS_PER_INT
4022 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
4023 val &= (1 << width) - 1;
4024
4025 return gen_rtx (CONST_INT, VOIDmode, val);
4026}
4027\f
4028/* Simplify CODE, an operation with result mode MODE and three operands,
4029 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4030 a constant. Return 0 if no simplifications is possible. */
4031
4032rtx
4033simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4034 enum rtx_code code;
4035 enum machine_mode mode, op0_mode;
4036 rtx op0, op1, op2;
4037{
4038 int width = GET_MODE_BITSIZE (mode);
4039
4040 /* VOIDmode means "infinite" precision. */
4041 if (width == 0)
4042 width = HOST_BITS_PER_INT;
4043
4044 switch (code)
4045 {
4046 case SIGN_EXTRACT:
4047 case ZERO_EXTRACT:
4048 if (GET_CODE (op0) == CONST_INT
4049 && GET_CODE (op1) == CONST_INT
4050 && GET_CODE (op2) == CONST_INT
4051 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4052 && width <= HOST_BITS_PER_INT)
4053 {
4054 /* Extracting a bit-field from a constant */
4055 int val = INTVAL (op0);
4056
4057#if BITS_BIG_ENDIAN
4058 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4059#else
4060 val >>= INTVAL (op2);
4061#endif
4062 if (HOST_BITS_PER_INT != INTVAL (op1))
4063 {
4064 /* First zero-extend. */
4065 val &= (1 << INTVAL (op1)) - 1;
4066 /* If desired, propagate sign bit. */
4067 if (code == SIGN_EXTRACT && (val & (1 << (INTVAL (op1) - 1))))
fc3ffe83 4068 val |= ~ ((1 << INTVAL (op1)) - 1);
7afe21cc
RK
4069 }
4070
4071 /* Clear the bits that don't belong in our mode,
4072 unless they and our sign bit are all one.
4073 So we get either a reasonable negative value or a reasonable
4074 unsigned value for this mode. */
4075 if (width < HOST_BITS_PER_INT
4076 && ((val & ((-1) << (width - 1))) != ((-1) << (width - 1))))
4077 val &= (1 << width) - 1;
4078
4079 return gen_rtx (CONST_INT, VOIDmode, val);
4080 }
4081 break;
4082
4083 case IF_THEN_ELSE:
4084 if (GET_CODE (op0) == CONST_INT)
4085 return op0 != const0_rtx ? op1 : op2;
4086 break;
4087
4088 default:
4089 abort ();
4090 }
4091
4092 return 0;
4093}
4094\f
4095/* If X is a nontrivial arithmetic operation on an argument
4096 for which a constant value can be determined, return
4097 the result of operating on that value, as a constant.
4098 Otherwise, return X, possibly with one or more operands
4099 modified by recursive calls to this function.
4100
4101 If X is a register whose contents are known, we do NOT
4102 return those contents. This is because an instruction that
4103 uses a register is usually faster than one that uses a constant.
4104
4105 INSN is the insn that we may be modifying. If it is 0, make a copy
4106 of X before modifying it. */
4107
4108static rtx
4109fold_rtx (x, insn)
4110 rtx x;
4111 rtx insn;
4112{
4113 register enum rtx_code code;
4114 register enum machine_mode mode;
4115 register char *fmt;
4116 register int i, val;
4117 rtx new = 0;
4118 int copied = 0;
4119 int must_swap = 0;
4120
4121 /* Folded equivalents of first two operands of X. */
4122 rtx folded_arg0;
4123 rtx folded_arg1;
4124
4125 /* Constant equivalents of first three operands of X;
4126 0 when no such equivalent is known. */
4127 rtx const_arg0;
4128 rtx const_arg1;
4129 rtx const_arg2;
4130
4131 /* The mode of the first operand of X. We need this for sign and zero
4132 extends. */
4133 enum machine_mode mode_arg0;
4134
4135 if (x == 0)
4136 return x;
4137
4138 mode = GET_MODE (x);
4139 code = GET_CODE (x);
4140 switch (code)
4141 {
4142 case CONST:
4143 case CONST_INT:
4144 case CONST_DOUBLE:
4145 case SYMBOL_REF:
4146 case LABEL_REF:
4147 case REG:
4148 /* No use simplifying an EXPR_LIST
4149 since they are used only for lists of args
4150 in a function call's REG_EQUAL note. */
4151 case EXPR_LIST:
4152 return x;
4153
4154#ifdef HAVE_cc0
4155 case CC0:
4156 return prev_insn_cc0;
4157#endif
4158
4159 case PC:
4160 /* If the next insn is a CODE_LABEL followed by a jump table,
4161 PC's value is a LABEL_REF pointing to that label. That
4162 lets us fold switch statements on the Vax. */
4163 if (insn && GET_CODE (insn) == JUMP_INSN)
4164 {
4165 rtx next = next_nonnote_insn (insn);
4166
4167 if (next && GET_CODE (next) == CODE_LABEL
4168 && NEXT_INSN (next) != 0
4169 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4170 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4171 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4172 return gen_rtx (LABEL_REF, Pmode, next);
4173 }
4174 break;
4175
4176 case SUBREG:
4177 /* If this is a single word of a multi-word value, see if we previously
4178 assigned a value to that word. */
4179 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4180 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
4181 && (new = lookup_as_function (x, CONST_INT)) != 0)
4182 return new;
4183
e5f6a288
RK
4184 /* If this is a paradoxical SUBREG, we can't do anything with
4185 it because we have no idea what value the extra bits would have. */
4186 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4187 return x;
4188
7afe21cc
RK
4189 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4190 We might be able to if the SUBREG is extracting a single word in an
4191 integral mode or extracting the low part. */
4192
4193 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4194 const_arg0 = equiv_constant (folded_arg0);
4195 if (const_arg0)
4196 folded_arg0 = const_arg0;
4197
4198 if (folded_arg0 != SUBREG_REG (x))
4199 {
4200 new = 0;
4201
4202 if (GET_MODE_CLASS (mode) == MODE_INT
4203 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4204 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4205 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4206 GET_MODE (SUBREG_REG (x)));
4207 if (new == 0 && subreg_lowpart_p (x))
4208 new = gen_lowpart_if_possible (mode, folded_arg0);
4209 if (new)
4210 return new;
4211 }
e5f6a288
RK
4212
4213 /* If this is a narrowing SUBREG and our operand is a REG, see if
4214 we can find an equivalence for REG that is a arithmetic operation
4215 in a wider mode where both operands are paradoxical SUBREGs
4216 from objects of our result mode. In that case, we couldn't report
4217 an equivalent value for that operation, since we don't know what the
4218 extra bits will be. But we can find an equivalence for this SUBREG
4219 by folding that operation is the narrow mode. This allows us to
4220 fold arithmetic in narrow modes when the machine only supports
4221 word-sized arithmetic. */
4222
4223 if (GET_CODE (folded_arg0) == REG
4224 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
4225 {
4226 struct table_elt *elt;
4227
4228 /* We can use HASH here since we know that canon_hash won't be
4229 called. */
4230 elt = lookup (folded_arg0,
4231 HASH (folded_arg0, GET_MODE (folded_arg0)),
4232 GET_MODE (folded_arg0));
4233
4234 if (elt)
4235 elt = elt->first_same_value;
4236
4237 for (; elt; elt = elt->next_same_value)
4238 {
4239 /* Just check for unary and binary operations. */
4240 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4241 && GET_CODE (elt->exp) != SIGN_EXTEND
4242 && GET_CODE (elt->exp) != ZERO_EXTEND
4243 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4244 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4245 {
4246 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4247
4248 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4249 op0 = fold_rtx (op0, 0);
4250
4251 op0 = equiv_constant (op0);
4252 if (op0)
4253 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4254 op0, mode);
4255 }
4256 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4257 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4258 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4259 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4260 == mode))
4261 || CONSTANT_P (XEXP (elt->exp, 0)))
4262 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4263 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4264 == mode))
4265 || CONSTANT_P (XEXP (elt->exp, 1))))
4266 {
4267 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4268 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4269
4270 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4271 op0 = fold_rtx (op0, 0);
4272
4273 if (op0)
4274 op0 = equiv_constant (op0);
4275
4276 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4277 op1 = fold_rtx (op1, 0);
4278
4279 if (op1)
4280 op1 = equiv_constant (op1);
4281
4282 if (op0 && op1)
4283 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4284 op0, op1);
4285 }
4286
4287 if (new)
4288 return new;
4289 }
4290 }
4291
7afe21cc
RK
4292 return x;
4293
4294 case NOT:
4295 case NEG:
4296 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4297 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4298 new = lookup_as_function (XEXP (x, 0), code);
4299 if (new)
4300 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4301 break;
4302
4303 case MEM:
4304 /* If we are not actually processing an insn, don't try to find the
4305 best address. Not only don't we care, but we could modify the
4306 MEM in an invalid way since we have no insn to validate against. */
4307 if (insn != 0)
4308 find_best_addr (insn, &XEXP (x, 0));
4309
4310 {
4311 /* Even if we don't fold in the insn itself,
4312 we can safely do so here, in hopes of getting a constant. */
4313 rtx addr = fold_rtx (XEXP (x, 0), 0);
4314 rtx base = 0;
4315 int offset = 0;
4316
4317 if (GET_CODE (addr) == REG
4318 && REGNO_QTY_VALID_P (REGNO (addr))
4319 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4320 && qty_const[reg_qty[REGNO (addr)]] != 0)
4321 addr = qty_const[reg_qty[REGNO (addr)]];
4322
4323 /* If address is constant, split it into a base and integer offset. */
4324 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4325 base = addr;
4326 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4327 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4328 {
4329 base = XEXP (XEXP (addr, 0), 0);
4330 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4331 }
4332 else if (GET_CODE (addr) == LO_SUM
4333 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4334 base = XEXP (addr, 1);
4335
4336 /* If this is a constant pool reference, we can fold it into its
4337 constant to allow better value tracking. */
4338 if (base && GET_CODE (base) == SYMBOL_REF
4339 && CONSTANT_POOL_ADDRESS_P (base))
4340 {
4341 rtx constant = get_pool_constant (base);
4342 enum machine_mode const_mode = get_pool_mode (base);
4343 rtx new;
4344
4345 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4346 constant_pool_entries_cost = COST (constant);
4347
4348 /* If we are loading the full constant, we have an equivalence. */
4349 if (offset == 0 && mode == const_mode)
4350 return constant;
4351
4352 /* If this actually isn't a constant (wierd!), we can't do
4353 anything. Otherwise, handle the two most common cases:
4354 extracting a word from a multi-word constant, and extracting
4355 the low-order bits. Other cases don't seem common enough to
4356 worry about. */
4357 if (! CONSTANT_P (constant))
4358 return x;
4359
4360 if (GET_MODE_CLASS (mode) == MODE_INT
4361 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4362 && offset % UNITS_PER_WORD == 0
4363 && (new = operand_subword (constant,
4364 offset / UNITS_PER_WORD,
4365 0, const_mode)) != 0)
4366 return new;
4367
4368 if (((BYTES_BIG_ENDIAN
4369 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4370 || (! BYTES_BIG_ENDIAN && offset == 0))
4371 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4372 return new;
4373 }
4374
4375 /* If this is a reference to a label at a known position in a jump
4376 table, we also know its value. */
4377 if (base && GET_CODE (base) == LABEL_REF)
4378 {
4379 rtx label = XEXP (base, 0);
4380 rtx table_insn = NEXT_INSN (label);
4381
4382 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4383 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4384 {
4385 rtx table = PATTERN (table_insn);
4386
4387 if (offset >= 0
4388 && (offset / GET_MODE_SIZE (GET_MODE (table))
4389 < XVECLEN (table, 0)))
4390 return XVECEXP (table, 0,
4391 offset / GET_MODE_SIZE (GET_MODE (table)));
4392 }
4393 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4394 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4395 {
4396 rtx table = PATTERN (table_insn);
4397
4398 if (offset >= 0
4399 && (offset / GET_MODE_SIZE (GET_MODE (table))
4400 < XVECLEN (table, 1)))
4401 {
4402 offset /= GET_MODE_SIZE (GET_MODE (table));
4403 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4404 XEXP (table, 0));
4405
4406 if (GET_MODE (table) != Pmode)
4407 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
4408
4409 return new;
4410 }
4411 }
4412 }
4413
4414 return x;
4415 }
4416 }
4417
4418 const_arg0 = 0;
4419 const_arg1 = 0;
4420 const_arg2 = 0;
4421 mode_arg0 = VOIDmode;
4422
4423 /* Try folding our operands.
4424 Then see which ones have constant values known. */
4425
4426 fmt = GET_RTX_FORMAT (code);
4427 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4428 if (fmt[i] == 'e')
4429 {
4430 rtx arg = XEXP (x, i);
4431 rtx folded_arg = arg, const_arg = 0;
4432 enum machine_mode mode_arg = GET_MODE (arg);
4433 rtx cheap_arg, expensive_arg;
4434 rtx replacements[2];
4435 int j;
4436
4437 /* Most arguments are cheap, so handle them specially. */
4438 switch (GET_CODE (arg))
4439 {
4440 case REG:
4441 /* This is the same as calling equiv_constant; it is duplicated
4442 here for speed. */
4443 if (REGNO_QTY_VALID_P (REGNO (arg))
4444 && qty_const[reg_qty[REGNO (arg)]] != 0
4445 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
4446 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
4447 const_arg
4448 = gen_lowpart_if_possible (GET_MODE (arg),
4449 qty_const[reg_qty[REGNO (arg)]]);
4450 break;
4451
4452 case CONST:
4453 case CONST_INT:
4454 case SYMBOL_REF:
4455 case LABEL_REF:
4456 case CONST_DOUBLE:
4457 const_arg = arg;
4458 break;
4459
4460#ifdef HAVE_cc0
4461 case CC0:
4462 folded_arg = prev_insn_cc0;
4463 mode_arg = prev_insn_cc0_mode;
4464 const_arg = equiv_constant (folded_arg);
4465 break;
4466#endif
4467
4468 default:
4469 folded_arg = fold_rtx (arg, insn);
4470 const_arg = equiv_constant (folded_arg);
4471 }
4472
4473 /* For the first three operands, see if the operand
4474 is constant or equivalent to a constant. */
4475 switch (i)
4476 {
4477 case 0:
4478 folded_arg0 = folded_arg;
4479 const_arg0 = const_arg;
4480 mode_arg0 = mode_arg;
4481 break;
4482 case 1:
4483 folded_arg1 = folded_arg;
4484 const_arg1 = const_arg;
4485 break;
4486 case 2:
4487 const_arg2 = const_arg;
4488 break;
4489 }
4490
4491 /* Pick the least expensive of the folded argument and an
4492 equivalent constant argument. */
4493 if (const_arg == 0 || const_arg == folded_arg
4494 || COST (const_arg) > COST (folded_arg))
4495 cheap_arg = folded_arg, expensive_arg = const_arg;
4496 else
4497 cheap_arg = const_arg, expensive_arg = folded_arg;
4498
4499 /* Try to replace the operand with the cheapest of the two
4500 possibilities. If it doesn't work and this is either of the first
4501 two operands of a commutative operation, try swapping them.
4502 If THAT fails, try the more expensive, provided it is cheaper
4503 than what is already there. */
4504
4505 if (cheap_arg == XEXP (x, i))
4506 continue;
4507
4508 if (insn == 0 && ! copied)
4509 {
4510 x = copy_rtx (x);
4511 copied = 1;
4512 }
4513
4514 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
4515 for (j = 0;
4516 j < 2 && replacements[j]
4517 && COST (replacements[j]) < COST (XEXP (x, i));
4518 j++)
4519 {
4520 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
4521 break;
4522
4523 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
4524 {
4525 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
4526 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
4527
4528 if (apply_change_group ())
4529 {
4530 /* Swap them back to be invalid so that this loop can
4531 continue and flag them to be swapped back later. */
4532 rtx tem;
4533
4534 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
4535 XEXP (x, 1) = tem;
4536 must_swap = 1;
4537 break;
4538 }
4539 }
4540 }
4541 }
4542
4543 else if (fmt[i] == 'E')
4544 /* Don't try to fold inside of a vector of expressions.
4545 Doing nothing is harmless. */
4546 ;
4547
4548 /* If a commutative operation, place a constant integer as the second
4549 operand unless the first operand is also a constant integer. Otherwise,
4550 place any constant second unless the first operand is also a constant. */
4551
4552 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4553 {
4554 if (must_swap || (const_arg0
4555 && (const_arg1 == 0
4556 || (GET_CODE (const_arg0) == CONST_INT
4557 && GET_CODE (const_arg1) != CONST_INT))))
4558 {
4559 register rtx tem = XEXP (x, 0);
4560
4561 if (insn == 0 && ! copied)
4562 {
4563 x = copy_rtx (x);
4564 copied = 1;
4565 }
4566
4567 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
4568 validate_change (insn, &XEXP (x, 1), tem, 1);
4569 if (apply_change_group ())
4570 {
4571 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
4572 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
4573 }
4574 }
4575 }
4576
4577 /* If X is an arithmetic operation, see if we can simplify it. */
4578
4579 switch (GET_RTX_CLASS (code))
4580 {
4581 case '1':
4582 new = simplify_unary_operation (code, mode,
4583 const_arg0 ? const_arg0 : folded_arg0,
4584 mode_arg0);
4585 break;
4586
4587 case '<':
4588 /* See what items are actually being compared and set FOLDED_ARG[01]
4589 to those values and CODE to the actual comparison code. If any are
4590 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
4591 do anything if both operands are already known to be constant. */
4592
4593 if (const_arg0 == 0 || const_arg1 == 0)
4594 {
4595 struct table_elt *p0, *p1;
4596
4597 code = find_comparison_args (code, &folded_arg0, &folded_arg1);
4598 const_arg0 = equiv_constant (folded_arg0);
4599 const_arg1 = equiv_constant (folded_arg1);
4600
4601 /* Get a mode from the values actually being compared, or from the
4602 old value of MODE_ARG0 if both are constants. If the resulting
4603 mode is VOIDmode or a MODE_CC mode, we don't know what kinds
4604 of things are being compared, so we can't do anything with this
4605 comparison. */
4606
4607 if (GET_MODE (folded_arg0) != VOIDmode
4608 && GET_MODE_CLASS (GET_MODE (folded_arg0)) != MODE_CC)
4609 mode_arg0 = GET_MODE (folded_arg0);
4610
4611 else if (GET_MODE (folded_arg1) != VOIDmode
4612 && GET_MODE_CLASS (GET_MODE (folded_arg1)) != MODE_CC)
4613 mode_arg0 = GET_MODE (folded_arg1);
4614
4615 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
4616 break;
4617
4618 /* If we do not now have two constants being compared, see if we
4619 can nevertheless deduce some things about the comparison. */
4620 if (const_arg0 == 0 || const_arg1 == 0)
4621 {
4622 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
4623 constant? These aren't zero, but we don't know their sign. */
4624 if (const_arg1 == const0_rtx
4625 && (NONZERO_BASE_PLUS_P (folded_arg0)
4626#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
4627 come out as 0. */
4628 || GET_CODE (folded_arg0) == SYMBOL_REF
4629#endif
4630 || GET_CODE (folded_arg0) == LABEL_REF
4631 || GET_CODE (folded_arg0) == CONST))
4632 {
4633 if (code == EQ)
4634 return const0_rtx;
4635 else if (code == NE)
4636 return const_true_rtx;
4637 }
4638
4639 /* See if the two operands are the same. We don't do this
4640 for IEEE floating-point since we can't assume x == x
4641 since x might be a NaN. */
4642
4643 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4644 || GET_MODE_CLASS (mode_arg0) != MODE_FLOAT)
4645 && (folded_arg0 == folded_arg1
4646 || (GET_CODE (folded_arg0) == REG
4647 && GET_CODE (folded_arg1) == REG
4648 && (reg_qty[REGNO (folded_arg0)]
4649 == reg_qty[REGNO (folded_arg1)]))
4650 || ((p0 = lookup (folded_arg0,
4651 (safe_hash (folded_arg0, mode_arg0)
4652 % NBUCKETS), mode_arg0))
4653 && (p1 = lookup (folded_arg1,
4654 (safe_hash (folded_arg1, mode_arg0)
4655 % NBUCKETS), mode_arg0))
4656 && p0->first_same_value == p1->first_same_value)))
4657 return ((code == EQ || code == LE || code == GE
4658 || code == LEU || code == GEU)
4659 ? const_true_rtx : const0_rtx);
4660
4661 /* If FOLDED_ARG0 is a register, see if the comparison we are
4662 doing now is either the same as we did before or the reverse
4663 (we only check the reverse if not floating-point). */
4664 else if (GET_CODE (folded_arg0) == REG)
4665 {
4666 int qty = reg_qty[REGNO (folded_arg0)];
4667
4668 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
4669 && (comparison_dominates_p (qty_comparison_code[qty], code)
4670 || (comparison_dominates_p (qty_comparison_code[qty],
4671 reverse_condition (code))
4672 && GET_MODE_CLASS (mode_arg0) == MODE_INT))
4673 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
4674 || (const_arg1
4675 && rtx_equal_p (qty_comparison_const[qty],
4676 const_arg1))
4677 || (GET_CODE (folded_arg1) == REG
4678 && (reg_qty[REGNO (folded_arg1)]
4679 == qty_comparison_qty[qty]))))
4680 return (comparison_dominates_p (qty_comparison_code[qty],
4681 code)
4682 ? const_true_rtx : const0_rtx);
4683 }
4684 }
4685 }
4686
4687 /* If we are comparing against zero, see if the first operand is
4688 equivalent to an IOR with a constant. If so, we may be able to
4689 determine the result of this comparison. */
4690
4691 if (const_arg1 == const0_rtx)
4692 {
4693 rtx y = lookup_as_function (folded_arg0, IOR);
4694 rtx inner_const;
4695
4696 if (y != 0
4697 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4698 && GET_CODE (inner_const) == CONST_INT
4699 && INTVAL (inner_const) != 0)
4700 {
4701 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4702 int has_sign = (HOST_BITS_PER_INT >= sign_bitnum
4703 && (INTVAL (inner_const) & (1 << sign_bitnum)));
4704
4705 switch (code)
4706 {
4707 case EQ:
4708 return const0_rtx;
4709 case NE:
4710 return const_true_rtx;
4711 case LT: case LE:
4712 if (has_sign)
4713 return const_true_rtx;
4714 break;
4715 case GT: case GE:
4716 if (has_sign)
4717 return const0_rtx;
4718 break;
4719 }
4720 }
4721 }
4722
4723 new = simplify_relational_operation (code, mode_arg0,
4724 const_arg0 ? const_arg0 : folded_arg0,
4725 const_arg1 ? const_arg1 : folded_arg1);
4726 break;
4727
4728 case '2':
4729 case 'c':
4730 switch (code)
4731 {
4732 case PLUS:
4733 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4734 with that LABEL_REF as its second operand. If so, the result is
4735 the first operand of that MINUS. This handles switches with an
4736 ADDR_DIFF_VEC table. */
4737 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4738 {
4739 rtx y = lookup_as_function (folded_arg0, MINUS);
4740
4741 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4742 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4743 return XEXP (y, 0);
4744 }
4745
4746 /* ... fall through ... */
4747
4748 case MINUS:
4749 case SMIN: case SMAX: case UMIN: case UMAX:
4750 case IOR: case AND: case XOR:
4751 case MULT: case DIV: case UDIV:
4752 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4753 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4754 is known to be of similar form, we may be able to replace the
4755 operation with a combined operation. This may eliminate the
4756 intermediate operation if every use is simplified in this way.
4757 Note that the similar optimization done by combine.c only works
4758 if the intermediate operation's result has only one reference. */
4759
4760 if (GET_CODE (folded_arg0) == REG
4761 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4762 {
4763 int is_shift
4764 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4765 rtx y = lookup_as_function (folded_arg0, code);
4766 rtx inner_const;
4767 enum rtx_code associate_code;
4768 rtx new_const;
4769
4770 if (y == 0
4771 || 0 == (inner_const
4772 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4773 || GET_CODE (inner_const) != CONST_INT
4774 /* If we have compiled a statement like
4775 "if (x == (x & mask1))", and now are looking at
4776 "x & mask2", we will have a case where the first operand
4777 of Y is the same as our first operand. Unless we detect
4778 this case, an infinite loop will result. */
4779 || XEXP (y, 0) == folded_arg0)
4780 break;
4781
4782 /* Don't associate these operations if they are a PLUS with the
4783 same constant and it is a power of two. These might be doable
4784 with a pre- or post-increment. Similarly for two subtracts of
4785 identical powers of two with post decrement. */
4786
4787 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4788 && (0
4789#if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
4790 || exact_log2 (INTVAL (const_arg1)) >= 0
4791#endif
4792#if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
4793 || exact_log2 (- INTVAL (const_arg1)) >= 0
4794#endif
4795 ))
4796 break;
4797
4798 /* Compute the code used to compose the constants. For example,
4799 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4800
4801 associate_code
4802 = (code == MULT || code == DIV || code == UDIV ? MULT
4803 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4804
4805 new_const = simplify_binary_operation (associate_code, mode,
4806 const_arg1, inner_const);
4807
4808 if (new_const == 0)
4809 break;
4810
4811 /* If we are associating shift operations, don't let this
4812 produce a shift of larger than the object. This could
4813 occur when we following a sign-extend by a right shift on
4814 a machine that does a sign-extend as a pair of shifts. */
4815
4816 if (is_shift && GET_CODE (new_const) == CONST_INT
4817 && INTVAL (new_const) > GET_MODE_BITSIZE (mode))
4818 break;
4819
4820 y = copy_rtx (XEXP (y, 0));
4821
4822 /* If Y contains our first operand (the most common way this
4823 can happen is if Y is a MEM), we would do into an infinite
4824 loop if we tried to fold it. So don't in that case. */
4825
4826 if (! reg_mentioned_p (folded_arg0, y))
4827 y = fold_rtx (y, insn);
4828
4829 new = simplify_binary_operation (code, mode, y, new_const);
4830 if (new)
4831 return new;
4832
4833 return gen_rtx (code, mode, y, new_const);
4834 }
4835 }
4836
4837 new = simplify_binary_operation (code, mode,
4838 const_arg0 ? const_arg0 : folded_arg0,
4839 const_arg1 ? const_arg1 : folded_arg1);
4840 break;
4841
4842 case 'o':
4843 /* (lo_sum (high X) X) is simply X. */
4844 if (code == LO_SUM && const_arg0 != 0
4845 && GET_CODE (const_arg0) == HIGH
4846 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4847 return const_arg1;
4848 break;
4849
4850 case '3':
4851 case 'b':
4852 new = simplify_ternary_operation (code, mode, mode_arg0,
4853 const_arg0 ? const_arg0 : folded_arg0,
4854 const_arg1 ? const_arg1 : folded_arg1,
4855 const_arg2 ? const_arg2 : XEXP (x, 2));
4856 break;
4857 }
4858
4859 return new ? new : x;
4860}
4861\f
4862/* Return a constant value currently equivalent to X.
4863 Return 0 if we don't know one. */
4864
4865static rtx
4866equiv_constant (x)
4867 rtx x;
4868{
4869 if (GET_CODE (x) == REG
4870 && REGNO_QTY_VALID_P (REGNO (x))
4871 && qty_const[reg_qty[REGNO (x)]])
4872 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
4873
4874 if (x != 0 && CONSTANT_P (x))
4875 return x;
4876
fc3ffe83
RK
4877 /* If X is a MEM, try to fold it outside the context of any insn to see if
4878 it might be equivalent to a constant. That handles the case where it
4879 is a constant-pool reference. Then try to look it up in the hash table
4880 in case it is something whose value we have seen before. */
4881
4882 if (GET_CODE (x) == MEM)
4883 {
4884 struct table_elt *elt;
4885
4886 x = fold_rtx (x, 0);
4887 if (CONSTANT_P (x))
4888 return x;
4889
4890 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
4891 if (elt == 0)
4892 return 0;
4893
4894 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4895 if (elt->is_const && CONSTANT_P (elt->exp))
4896 return elt->exp;
4897 }
4898
7afe21cc
RK
4899 return 0;
4900}
4901\f
4902/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4903 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4904 least-significant part of X.
4905 MODE specifies how big a part of X to return.
4906
4907 If the requested operation cannot be done, 0 is returned.
4908
4909 This is similar to gen_lowpart in emit-rtl.c. */
4910
4911rtx
4912gen_lowpart_if_possible (mode, x)
4913 enum machine_mode mode;
4914 register rtx x;
4915{
4916 rtx result = gen_lowpart_common (mode, x);
4917
4918 if (result)
4919 return result;
4920 else if (GET_CODE (x) == MEM)
4921 {
4922 /* This is the only other case we handle. */
4923 register int offset = 0;
4924 rtx new;
4925
4926#if WORDS_BIG_ENDIAN
4927 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4928 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4929#endif
4930#if BYTES_BIG_ENDIAN
4931 /* Adjust the address so that the address-after-the-data
4932 is unchanged. */
4933 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4934 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4935#endif
4936 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
4937 if (! memory_address_p (mode, XEXP (new, 0)))
4938 return 0;
4939 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
4940 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
4941 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
4942 return new;
4943 }
4944 else
4945 return 0;
4946}
4947\f
4948/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4949 branch. It will be zero if not.
4950
4951 In certain cases, this can cause us to add an equivalence. For example,
4952 if we are following the taken case of
4953 if (i == 2)
4954 we can add the fact that `i' and '2' are now equivalent.
4955
4956 In any case, we can record that this comparison was passed. If the same
4957 comparison is seen later, we will know its value. */
4958
4959static void
4960record_jump_equiv (insn, taken)
4961 rtx insn;
4962 int taken;
4963{
4964 int cond_known_true;
4965 rtx op0, op1;
4966 enum machine_mode mode;
4967 int reversed_nonequality = 0;
4968 enum rtx_code code;
4969
4970 /* Ensure this is the right kind of insn. */
4971 if (! condjump_p (insn) || simplejump_p (insn))
4972 return;
4973
4974 /* See if this jump condition is known true or false. */
4975 if (taken)
4976 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
4977 else
4978 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
4979
4980 /* Get the type of comparison being done and the operands being compared.
4981 If we had to reverse a non-equality condition, record that fact so we
4982 know that it isn't valid for floating-point. */
4983 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
4984 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
4985 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
4986
4987 code = find_comparison_args (code, &op0, &op1);
4988 if (! cond_known_true)
4989 {
4990 reversed_nonequality = (code != EQ && code != NE);
4991 code = reverse_condition (code);
4992 }
4993
4994 /* The mode is the mode of the non-constant. */
4995 mode = GET_MODE (op0);
4996 if (mode == VOIDmode) mode = GET_MODE (op1);
4997
4998 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4999}
5000
5001/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5002 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5003 Make any useful entries we can with that information. Called from
5004 above function and called recursively. */
5005
5006static void
5007record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5008 enum rtx_code code;
5009 enum machine_mode mode;
5010 rtx op0, op1;
5011 int reversed_nonequality;
5012{
5013 int op0_hash_code, op1_hash_code;
5014 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5015 struct table_elt *op0_elt, *op1_elt;
5016
5017 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5018 we know that they are also equal in the smaller mode (this is also
5019 true for all smaller modes whether or not there is a SUBREG, but
5020 is not worth testing for with no SUBREG. */
5021
5022 if (code == EQ && GET_CODE (op0) == SUBREG
5023 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5024 {
5025 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5026 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5027
5028 record_jump_cond (code, mode, SUBREG_REG (op0),
5029 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5030 reversed_nonequality);
5031 }
5032
5033 if (code == EQ && GET_CODE (op1) == SUBREG
5034 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5035 {
5036 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5037 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5038
5039 record_jump_cond (code, mode, SUBREG_REG (op1),
5040 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5041 reversed_nonequality);
5042 }
5043
5044 /* Similarly, if this is an NE comparison, and either is a SUBREG
5045 making a smaller mode, we know the whole thing is also NE. */
5046
5047 if (code == NE && GET_CODE (op0) == SUBREG
5048 && subreg_lowpart_p (op0)
5049 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5050 {
5051 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5052 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5053
5054 record_jump_cond (code, mode, SUBREG_REG (op0),
5055 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5056 reversed_nonequality);
5057 }
5058
5059 if (code == NE && GET_CODE (op1) == SUBREG
5060 && subreg_lowpart_p (op1)
5061 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5062 {
5063 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5064 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5065
5066 record_jump_cond (code, mode, SUBREG_REG (op1),
5067 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5068 reversed_nonequality);
5069 }
5070
5071 /* Hash both operands. */
5072
5073 do_not_record = 0;
5074 hash_arg_in_memory = 0;
5075 hash_arg_in_struct = 0;
5076 op0_hash_code = HASH (op0, mode);
5077 op0_in_memory = hash_arg_in_memory;
5078 op0_in_struct = hash_arg_in_struct;
5079
5080 if (do_not_record)
5081 return;
5082
5083 do_not_record = 0;
5084 hash_arg_in_memory = 0;
5085 hash_arg_in_struct = 0;
5086 op1_hash_code = HASH (op1, mode);
5087 op1_in_memory = hash_arg_in_memory;
5088 op1_in_struct = hash_arg_in_struct;
5089
5090 if (do_not_record)
5091 return;
5092
5093 /* Look up both operands. */
5094 op0_elt = lookup (op0, op0_hash_code, mode);
5095 op1_elt = lookup (op1, op1_hash_code, mode);
5096
5097 /* If we aren't setting two things equal all we can do is save this
5098 comparison. */
5099 if (code != EQ)
5100 {
5101 /* If we reversed a floating-point comparison, if OP0 is not a
5102 register, or if OP1 is neither a register or constant, we can't
5103 do anything. */
5104
5105 if (GET_CODE (op1) != REG)
5106 op1 = equiv_constant (op1);
5107
5108 if ((reversed_nonequality && GET_MODE_CLASS (mode) != MODE_INT)
5109 || GET_CODE (op0) != REG || op1 == 0)
5110 return;
5111
5112 /* Put OP0 in the hash table if it isn't already. This gives it a
5113 new quantity number. */
5114 if (op0_elt == 0)
5115 {
5116 if (insert_regs (op0, 0, 0))
5117 {
5118 rehash_using_reg (op0);
5119 op0_hash_code = HASH (op0, mode);
5120 }
5121
5122 op0_elt = insert (op0, 0, op0_hash_code, mode);
5123 op0_elt->in_memory = op0_in_memory;
5124 op0_elt->in_struct = op0_in_struct;
5125 }
5126
5127 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5128 if (GET_CODE (op1) == REG)
5129 {
5130 /* Put OP1 in the hash table so it gets a new quantity number. */
5131 if (op1_elt == 0)
5132 {
5133 if (insert_regs (op1, 0, 0))
5134 {
5135 rehash_using_reg (op1);
5136 op1_hash_code = HASH (op1, mode);
5137 }
5138
5139 op1_elt = insert (op1, 0, op1_hash_code, mode);
5140 op1_elt->in_memory = op1_in_memory;
5141 op1_elt->in_struct = op1_in_struct;
5142 }
5143
5144 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5145 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5146 }
5147 else
5148 {
5149 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5150 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5151 }
5152
5153 return;
5154 }
5155
5156 /* If both are equivalent, merge the two classes. Save this class for
5157 `cse_set_around_loop'. */
5158 if (op0_elt && op1_elt)
5159 {
5160 merge_equiv_classes (op0_elt, op1_elt);
5161 last_jump_equiv_class = op0_elt;
5162 }
5163
5164 /* For whichever side doesn't have an equivalence, make one. */
5165 if (op0_elt == 0)
5166 {
5167 if (insert_regs (op0, op1_elt, 0))
5168 {
5169 rehash_using_reg (op0);
5170 op0_hash_code = HASH (op0, mode);
5171 }
5172
5173 op0_elt = insert (op0, op1_elt, op0_hash_code, mode);
5174 op0_elt->in_memory = op0_in_memory;
5175 op0_elt->in_struct = op0_in_struct;
5176 last_jump_equiv_class = op0_elt;
5177 }
5178
5179 if (op1_elt == 0)
5180 {
5181 if (insert_regs (op1, op0_elt, 0))
5182 {
5183 rehash_using_reg (op1);
5184 op1_hash_code = HASH (op1, mode);
5185 }
5186
5187 op1_elt = insert (op1, op0_elt, op1_hash_code, mode);
5188 op1_elt->in_memory = op1_in_memory;
5189 op1_elt->in_struct = op1_in_struct;
5190 last_jump_equiv_class = op1_elt;
5191 }
5192}
5193\f
5194/* CSE processing for one instruction.
5195 First simplify sources and addresses of all assignments
5196 in the instruction, using previously-computed equivalents values.
5197 Then install the new sources and destinations in the table
5198 of available values.
5199
5200 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5201 the insn. */
5202
5203/* Data on one SET contained in the instruction. */
5204
5205struct set
5206{
5207 /* The SET rtx itself. */
5208 rtx rtl;
5209 /* The SET_SRC of the rtx (the original value, if it is changing). */
5210 rtx src;
5211 /* The hash-table element for the SET_SRC of the SET. */
5212 struct table_elt *src_elt;
5213 /* Hash code for the SET_SRC. */
5214 int src_hash_code;
5215 /* Hash code for the SET_DEST. */
5216 int dest_hash_code;
5217 /* The SET_DEST, with SUBREG, etc., stripped. */
5218 rtx inner_dest;
5219 /* Place where the pointer to the INNER_DEST was found. */
5220 rtx *inner_dest_loc;
5221 /* Nonzero if the SET_SRC is in memory. */
5222 char src_in_memory;
5223 /* Nonzero if the SET_SRC is in a structure. */
5224 char src_in_struct;
5225 /* Nonzero if the SET_SRC contains something
5226 whose value cannot be predicted and understood. */
5227 char src_volatile;
5228 /* Original machine mode, in case it becomes a CONST_INT. */
5229 enum machine_mode mode;
5230 /* A constant equivalent for SET_SRC, if any. */
5231 rtx src_const;
5232 /* Hash code of constant equivalent for SET_SRC. */
5233 int src_const_hash_code;
5234 /* Table entry for constant equivalent for SET_SRC, if any. */
5235 struct table_elt *src_const_elt;
5236};
5237
5238static void
5239cse_insn (insn, in_libcall_block)
5240 rtx insn;
5241 int in_libcall_block;
5242{
5243 register rtx x = PATTERN (insn);
5244 rtx tem;
5245 register int i;
5246 register int n_sets = 0;
5247
5248 /* Records what this insn does to set CC0. */
5249 rtx this_insn_cc0 = 0;
5250 enum machine_mode this_insn_cc0_mode;
5251 struct write_data writes_memory;
5252 static struct write_data init = {0, 0, 0, 0};
5253
5254 rtx src_eqv = 0;
5255 struct table_elt *src_eqv_elt = 0;
5256 int src_eqv_volatile;
5257 int src_eqv_in_memory;
5258 int src_eqv_in_struct;
5259 int src_eqv_hash_code;
5260
5261 struct set *sets;
5262
5263 this_insn = insn;
5264 writes_memory = init;
5265
5266 /* Find all the SETs and CLOBBERs in this instruction.
5267 Record all the SETs in the array `set' and count them.
5268 Also determine whether there is a CLOBBER that invalidates
5269 all memory references, or all references at varying addresses. */
5270
5271 if (GET_CODE (x) == SET)
5272 {
5273 sets = (struct set *) alloca (sizeof (struct set));
5274 sets[0].rtl = x;
5275
5276 /* Ignore SETs that are unconditional jumps.
5277 They never need cse processing, so this does not hurt.
5278 The reason is not efficiency but rather
5279 so that we can test at the end for instructions
5280 that have been simplified to unconditional jumps
5281 and not be misled by unchanged instructions
5282 that were unconditional jumps to begin with. */
5283 if (SET_DEST (x) == pc_rtx
5284 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5285 ;
5286
5287 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5288 The hard function value register is used only once, to copy to
5289 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5290 Ensure we invalidate the destination register. On the 80386 no
5291 other code would invalidate it since it is a fixed_reg. */
5292
5293 else if (GET_CODE (SET_SRC (x)) == CALL)
5294 {
5295 canon_reg (SET_SRC (x), insn);
5296 fold_rtx (SET_SRC (x), insn);
5297 invalidate (SET_DEST (x));
5298 }
5299 else
5300 n_sets = 1;
5301 }
5302 else if (GET_CODE (x) == PARALLEL)
5303 {
5304 register int lim = XVECLEN (x, 0);
5305
5306 sets = (struct set *) alloca (lim * sizeof (struct set));
5307
5308 /* Find all regs explicitly clobbered in this insn,
5309 and ensure they are not replaced with any other regs
5310 elsewhere in this insn.
5311 When a reg that is clobbered is also used for input,
5312 we should presume that that is for a reason,
5313 and we should not substitute some other register
5314 which is not supposed to be clobbered.
5315 Therefore, this loop cannot be merged into the one below
830a38ee 5316 because a CALL may precede a CLOBBER and refer to the
7afe21cc
RK
5317 value clobbered. We must not let a canonicalization do
5318 anything in that case. */
5319 for (i = 0; i < lim; i++)
5320 {
5321 register rtx y = XVECEXP (x, 0, i);
830a38ee
RS
5322 if (GET_CODE (y) == CLOBBER
5323 && (GET_CODE (XEXP (y, 0)) == REG
5324 || GET_CODE (XEXP (y, 0)) == SUBREG))
7afe21cc
RK
5325 invalidate (XEXP (y, 0));
5326 }
5327
5328 for (i = 0; i < lim; i++)
5329 {
5330 register rtx y = XVECEXP (x, 0, i);
5331 if (GET_CODE (y) == SET)
5332 {
5333 /* As above, we ignore unconditional jumps and call-insns. */
5334 if (GET_CODE (SET_SRC (y)) == CALL)
5335 {
5336 canon_reg (SET_SRC (y), insn);
5337 fold_rtx (SET_SRC (y), insn);
5338 invalidate (SET_DEST (y));
5339 }
5340 else if (SET_DEST (y) == pc_rtx
5341 && GET_CODE (SET_SRC (y)) == LABEL_REF)
5342 ;
5343 else
5344 sets[n_sets++].rtl = y;
5345 }
5346 else if (GET_CODE (y) == CLOBBER)
5347 {
5348 /* If we clobber memory, take note of that,
5349 and canon the address.
5350 This does nothing when a register is clobbered
5351 because we have already invalidated the reg. */
5352 if (GET_CODE (XEXP (y, 0)) == MEM)
5353 {
5354 canon_reg (XEXP (y, 0), 0);
5355 note_mem_written (XEXP (y, 0), &writes_memory);
5356 }
5357 }
5358 else if (GET_CODE (y) == USE
5359 && ! (GET_CODE (XEXP (y, 0)) == REG
5360 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
5361 canon_reg (y, 0);
5362 else if (GET_CODE (y) == CALL)
5363 {
5364 canon_reg (y, insn);
5365 fold_rtx (y, insn);
5366 }
5367 }
5368 }
5369 else if (GET_CODE (x) == CLOBBER)
5370 {
5371 if (GET_CODE (XEXP (x, 0)) == MEM)
5372 {
5373 canon_reg (XEXP (x, 0), 0);
5374 note_mem_written (XEXP (x, 0), &writes_memory);
5375 }
5376 }
5377
5378 /* Canonicalize a USE of a pseudo register or memory location. */
5379 else if (GET_CODE (x) == USE
5380 && ! (GET_CODE (XEXP (x, 0)) == REG
5381 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
5382 canon_reg (XEXP (x, 0), 0);
5383 else if (GET_CODE (x) == CALL)
5384 {
5385 canon_reg (x, insn);
5386 fold_rtx (x, insn);
5387 }
5388
5389 if (n_sets == 1 && REG_NOTES (insn) != 0)
5390 {
5391 /* Store the equivalent value in SRC_EQV, if different. */
5392 rtx tem = find_reg_note (insn, REG_EQUAL, 0);
5393
5394 if (tem && ! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
5395 src_eqv = canon_reg (XEXP (tem, 0), 0);
5396 }
5397
5398 /* Canonicalize sources and addresses of destinations.
5399 We do this in a separate pass to avoid problems when a MATCH_DUP is
5400 present in the insn pattern. In that case, we want to ensure that
5401 we don't break the duplicate nature of the pattern. So we will replace
5402 both operands at the same time. Otherwise, we would fail to find an
5403 equivalent substitution in the loop calling validate_change below.
5404 (We also speed up that loop when a canonicalization was done since
5405 recog_memoized need not be called for just a canonicalization unless
5406 a pseudo register is being replaced by a hard reg of vice versa.)
5407
5408 We used to suppress canonicalization of DEST if it appears in SRC,
5409 but we don't do this any more.
5410
5411 ??? The way this code is written now, if we have a MATCH_DUP between
5412 two operands that are pseudos and we would want to canonicalize them
5413 to a hard register, we won't do that. The only time this would happen
5414 is if the hard reg was a fixed register, and this should be rare.
5415
5416 ??? This won't work if there is a MATCH_DUP between an input and an
5417 output, but these never worked and must be declared invalid. */
5418
5419 for (i = 0; i < n_sets; i++)
5420 {
5421 rtx dest = SET_DEST (sets[i].rtl);
5422 rtx src = SET_SRC (sets[i].rtl);
5423 rtx new = canon_reg (src, insn);
5424
5425 if (GET_CODE (new) == REG && GET_CODE (src) == REG
5426 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
5427 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
5428 validate_change (insn, &SET_SRC (sets[i].rtl), new, 0);
5429 else
5430 SET_SRC (sets[i].rtl) = new;
5431
5432 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5433 {
5434 validate_change (insn, &XEXP (dest, 1),
5435 canon_reg (XEXP (dest, 1), insn), 0);
5436 validate_change (insn, &XEXP (dest, 2),
5437 canon_reg (XEXP (dest, 2), insn), 0);
5438 }
5439
5440 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5441 || GET_CODE (dest) == ZERO_EXTRACT
5442 || GET_CODE (dest) == SIGN_EXTRACT)
5443 dest = XEXP (dest, 0);
5444
5445 if (GET_CODE (dest) == MEM)
5446 canon_reg (dest, insn);
5447 }
5448
5449 /* Set sets[i].src_elt to the class each source belongs to.
5450 Detect assignments from or to volatile things
5451 and set set[i] to zero so they will be ignored
5452 in the rest of this function.
5453
5454 Nothing in this loop changes the hash table or the register chains. */
5455
5456 for (i = 0; i < n_sets; i++)
5457 {
5458 register rtx src, dest;
5459 register rtx src_folded;
5460 register struct table_elt *elt = 0, *p;
5461 enum machine_mode mode;
5462 rtx src_eqv_here;
5463 rtx src_const = 0;
5464 rtx src_related = 0;
5465 struct table_elt *src_const_elt = 0;
5466 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
5467 int src_related_cost = 10000, src_elt_cost = 10000;
5468 /* Set non-zero if we need to call force_const_mem on with the
5469 contents of src_folded before using it. */
5470 int src_folded_force_flag = 0;
5471
5472 dest = SET_DEST (sets[i].rtl);
5473 src = SET_SRC (sets[i].rtl);
5474
5475 /* If SRC is a constant that has no machine mode,
5476 hash it with the destination's machine mode.
5477 This way we can keep different modes separate. */
5478
5479 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5480 sets[i].mode = mode;
5481
5482 if (src_eqv)
5483 {
5484 enum machine_mode eqvmode = mode;
5485 if (GET_CODE (dest) == STRICT_LOW_PART)
5486 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5487 do_not_record = 0;
5488 hash_arg_in_memory = 0;
5489 hash_arg_in_struct = 0;
5490 src_eqv = fold_rtx (src_eqv, insn);
5491 src_eqv_hash_code = HASH (src_eqv, eqvmode);
5492
5493 /* Find the equivalence class for the equivalent expression. */
5494
5495 if (!do_not_record)
5496 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, eqvmode);
5497
5498 src_eqv_volatile = do_not_record;
5499 src_eqv_in_memory = hash_arg_in_memory;
5500 src_eqv_in_struct = hash_arg_in_struct;
5501 }
5502
5503 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5504 value of the INNER register, not the destination. So it is not
5505 a legal substitution for the source. But save it for later. */
5506 if (GET_CODE (dest) == STRICT_LOW_PART)
5507 src_eqv_here = 0;
5508 else
5509 src_eqv_here = src_eqv;
5510
5511 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5512 simplified result, which may not necessarily be valid. */
5513 src_folded = fold_rtx (src, insn);
5514
5515 /* If storing a constant in a bitfield, pre-truncate the constant
5516 so we will be able to record it later. */
5517 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5518 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5519 {
5520 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5521
5522 if (GET_CODE (src) == CONST_INT
5523 && GET_CODE (width) == CONST_INT
5524 && INTVAL (width) < HOST_BITS_PER_INT
5525 && (INTVAL (src) & ((-1) << INTVAL (width))))
5526 src_folded = gen_rtx (CONST_INT, VOIDmode,
5527 INTVAL (src) & ((1 << INTVAL (width)) - 1));
5528 }
5529
5530 /* Compute SRC's hash code, and also notice if it
5531 should not be recorded at all. In that case,
5532 prevent any further processing of this assignment. */
5533 do_not_record = 0;
5534 hash_arg_in_memory = 0;
5535 hash_arg_in_struct = 0;
5536
5537 sets[i].src = src;
5538 sets[i].src_hash_code = HASH (src, mode);
5539 sets[i].src_volatile = do_not_record;
5540 sets[i].src_in_memory = hash_arg_in_memory;
5541 sets[i].src_in_struct = hash_arg_in_struct;
5542
5543 /* If source is a perverse subreg (such as QI treated as an SI),
5544 treat it as volatile. It may do the work of an SI in one context
5545 where the extra bits are not being used, but cannot replace an SI
5546 in general. */
5547 if (GET_CODE (src) == SUBREG
5548 && (GET_MODE_SIZE (GET_MODE (src))
5549 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5550 sets[i].src_volatile = 1;
5551
5552 /* Locate all possible equivalent forms for SRC. Try to replace
5553 SRC in the insn with each cheaper equivalent.
5554
5555 We have the following types of equivalents: SRC itself, a folded
5556 version, a value given in a REG_EQUAL note, or a value related
5557 to a constant.
5558
5559 Each of these equivalents may be part of an additional class
5560 of equivalents (if more than one is in the table, they must be in
5561 the same class; we check for this).
5562
5563 If the source is volatile, we don't do any table lookups.
5564
5565 We note any constant equivalent for possible later use in a
5566 REG_NOTE. */
5567
5568 if (!sets[i].src_volatile)
5569 elt = lookup (src, sets[i].src_hash_code, mode);
5570
5571 sets[i].src_elt = elt;
5572
5573 if (elt && src_eqv_here && src_eqv_elt)
5574 {
5575 if (elt->first_same_value != src_eqv_elt->first_same_value)
5576 {
5577 /* The REG_EQUAL is indicating that two formerly distinct
5578 classes are now equivalent. So merge them. */
5579 merge_equiv_classes (elt, src_eqv_elt);
5580 src_eqv_hash_code = HASH (src_eqv, elt->mode);
5581 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, elt->mode);
5582 }
5583
5584 src_eqv_here = 0;
5585 }
5586
5587 else if (src_eqv_elt)
5588 elt = src_eqv_elt;
5589
5590 /* Try to find a constant somewhere and record it in `src_const'.
5591 Record its table element, if any, in `src_const_elt'. Look in
5592 any known equivalences first. (If the constant is not in the
5593 table, also set `sets[i].src_const_hash_code'). */
5594 if (elt)
5595 for (p = elt->first_same_value; p; p = p->next_same_value)
5596 if (p->is_const)
5597 {
5598 src_const = p->exp;
5599 src_const_elt = elt;
5600 break;
5601 }
5602
5603 if (src_const == 0
5604 && (CONSTANT_P (src_folded)
5605 /* Consider (minus (label_ref L1) (label_ref L2)) as
5606 "constant" here so we will record it. This allows us
5607 to fold switch statements when an ADDR_DIFF_VEC is used. */
5608 || (GET_CODE (src_folded) == MINUS
5609 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5610 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5611 src_const = src_folded, src_const_elt = elt;
5612 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5613 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5614
5615 /* If we don't know if the constant is in the table, get its
5616 hash code and look it up. */
5617 if (src_const && src_const_elt == 0)
5618 {
5619 sets[i].src_const_hash_code = HASH (src_const, mode);
5620 src_const_elt = lookup (src_const, sets[i].src_const_hash_code,
5621 mode);
5622 }
5623
5624 sets[i].src_const = src_const;
5625 sets[i].src_const_elt = src_const_elt;
5626
5627 /* If the constant and our source are both in the table, mark them as
5628 equivalent. Otherwise, if a constant is in the table but the source
5629 isn't, set ELT to it. */
5630 if (src_const_elt && elt
5631 && src_const_elt->first_same_value != elt->first_same_value)
5632 merge_equiv_classes (elt, src_const_elt);
5633 else if (src_const_elt && elt == 0)
5634 elt = src_const_elt;
5635
5636 /* See if there is a register linearly related to a constant
5637 equivalent of SRC. */
5638 if (src_const
5639 && (GET_CODE (src_const) == CONST
5640 || (src_const_elt && src_const_elt->related_value != 0)))
5641 {
5642 src_related = use_related_value (src_const, src_const_elt);
5643 if (src_related)
5644 {
5645 struct table_elt *src_related_elt
5646 = lookup (src_related, HASH (src_related, mode), mode);
5647 if (src_related_elt && elt)
5648 {
5649 if (elt->first_same_value
5650 != src_related_elt->first_same_value)
5651 /* This can occur when we previously saw a CONST
5652 involving a SYMBOL_REF and then see the SYMBOL_REF
5653 twice. Merge the involved classes. */
5654 merge_equiv_classes (elt, src_related_elt);
5655
5656 src_related = 0;
5657 src_related_elt = 0;
5658 }
5659 else if (src_related_elt && elt == 0)
5660 elt = src_related_elt;
5661 }
5662 }
5663
d45cf215
RS
5664 /* Another possibility is that we have an AND with a constant in
5665 a mode narrower than a word. If so, it might have been generated
5666 as part of an "if" which would narrow the AND. If we already
5667 have done the AND in a wider mode, we can use a SUBREG of that
5668 value. */
5669
5670 if (flag_expensive_optimizations && ! src_related
5671 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5672 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5673 {
5674 enum machine_mode tmode;
5675 rtx new_and = gen_rtx (AND, VOIDmode, 0, XEXP (src, 1));
5676
5677 for (tmode = GET_MODE_WIDER_MODE (mode);
5678 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5679 tmode = GET_MODE_WIDER_MODE (tmode))
5680 {
5681 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5682 struct table_elt *larger_elt;
5683
5684 if (inner)
5685 {
5686 PUT_MODE (new_and, tmode);
5687 XEXP (new_and, 0) = inner;
5688 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5689 if (larger_elt == 0)
5690 continue;
5691
5692 for (larger_elt = larger_elt->first_same_value;
5693 larger_elt; larger_elt = larger_elt->next_same_value)
5694 if (GET_CODE (larger_elt->exp) == REG)
5695 {
5696 src_related
5697 = gen_lowpart_if_possible (mode, larger_elt->exp);
5698 break;
5699 }
5700
5701 if (src_related)
5702 break;
5703 }
5704 }
5705 }
5706
7afe21cc
RK
5707 if (src == src_folded)
5708 src_folded = 0;
5709
5710 /* At this point, ELT, if non-zero, points to a class of expressions
5711 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5712 and SRC_RELATED, if non-zero, each contain additional equivalent
5713 expressions. Prune these latter expressions by deleting expressions
5714 already in the equivalence class.
5715
5716 Check for an equivalent identical to the destination. If found,
5717 this is the preferred equivalent since it will likely lead to
5718 elimination of the insn. Indicate this by placing it in
5719 `src_related'. */
5720
5721 if (elt) elt = elt->first_same_value;
5722 for (p = elt; p; p = p->next_same_value)
5723 {
5724 enum rtx_code code = GET_CODE (p->exp);
5725
5726 /* If the expression is not valid, ignore it. Then we do not
5727 have to check for validity below. In most cases, we can use
5728 `rtx_equal_p', since canonicalization has already been done. */
5729 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5730 continue;
5731
5732 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5733 src = 0;
5734 else if (src_folded && GET_CODE (src_folded) == code
5735 && rtx_equal_p (src_folded, p->exp))
5736 src_folded = 0;
5737 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5738 && rtx_equal_p (src_eqv_here, p->exp))
5739 src_eqv_here = 0;
5740 else if (src_related && GET_CODE (src_related) == code
5741 && rtx_equal_p (src_related, p->exp))
5742 src_related = 0;
5743
5744 /* This is the same as the destination of the insns, we want
5745 to prefer it. Copy it to src_related. The code below will
5746 then give it a negative cost. */
5747 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5748 src_related = dest;
5749
5750 }
5751
5752 /* Find the cheapest valid equivalent, trying all the available
5753 possibilities. Prefer items not in the hash table to ones
5754 that are when they are equal cost. Note that we can never
5755 worsen an insn as the current contents will also succeed.
5756 If we find an equivalent identical to the source, use it as best,
5757 since this insn will probably be eliminated in that case. */
5758 if (src)
5759 {
5760 if (rtx_equal_p (src, dest))
5761 src_cost = -1;
5762 else
5763 src_cost = COST (src);
5764 }
5765
5766 if (src_eqv_here)
5767 {
5768 if (rtx_equal_p (src_eqv_here, dest))
5769 src_eqv_cost = -1;
5770 else
5771 src_eqv_cost = COST (src_eqv_here);
5772 }
5773
5774 if (src_folded)
5775 {
5776 if (rtx_equal_p (src_folded, dest))
5777 src_folded_cost = -1;
5778 else
5779 src_folded_cost = COST (src_folded);
5780 }
5781
5782 if (src_related)
5783 {
5784 if (rtx_equal_p (src_related, dest))
5785 src_related_cost = -1;
5786 else
5787 src_related_cost = COST (src_related);
5788 }
5789
5790 /* If this was an indirect jump insn, a known label will really be
5791 cheaper even though it looks more expensive. */
5792 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5793 src_folded = src_const, src_folded_cost = -1;
5794
5795 /* Terminate loop when replacement made. This must terminate since
5796 the current contents will be tested and will always be valid. */
5797 while (1)
5798 {
5799 rtx trial;
5800
5801 /* Skip invalid entries. */
5802 while (elt && GET_CODE (elt->exp) != REG
5803 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5804 elt = elt->next_same_value;
5805
5806 if (elt) src_elt_cost = elt->cost;
5807
5808 /* Find cheapest and skip it for the next time. For items
5809 of equal cost, use this order:
5810 src_folded, src, src_eqv, src_related and hash table entry. */
5811 if (src_folded_cost <= src_cost
5812 && src_folded_cost <= src_eqv_cost
5813 && src_folded_cost <= src_related_cost
5814 && src_folded_cost <= src_elt_cost)
5815 {
5816 trial = src_folded, src_folded_cost = 10000;
5817 if (src_folded_force_flag)
5818 trial = force_const_mem (mode, trial);
5819 }
5820 else if (src_cost <= src_eqv_cost
5821 && src_cost <= src_related_cost
5822 && src_cost <= src_elt_cost)
5823 trial = src, src_cost = 10000;
5824 else if (src_eqv_cost <= src_related_cost
5825 && src_eqv_cost <= src_elt_cost)
5826 trial = src_eqv_here, src_eqv_cost = 10000;
5827 else if (src_related_cost <= src_elt_cost)
5828 trial = src_related, src_related_cost = 10000;
5829 else
5830 {
5831 trial = canon_reg (copy_rtx (elt->exp), 0);
5832 elt = elt->next_same_value;
5833 src_elt_cost = 10000;
5834 }
5835
5836 /* We don't normally have an insn matching (set (pc) (pc)), so
5837 check for this separately here. We will delete such an
5838 insn below.
5839
5840 Tablejump insns contain a USE of the table, so simply replacing
5841 the operand with the constant won't match. This is simply an
5842 unconditional branch, however, and is therefore valid. Just
5843 insert the substitution here and we will delete and re-emit
5844 the insn later. */
5845
5846 if (n_sets == 1 && dest == pc_rtx
5847 && (trial == pc_rtx
5848 || (GET_CODE (trial) == LABEL_REF
5849 && ! condjump_p (insn))))
5850 {
5851 /* If TRIAL is a label in front of a jump table, we are
5852 really falling through the switch (this is how casesi
5853 insns work), so we must branch around the table. */
5854 if (GET_CODE (trial) == CODE_LABEL
5855 && NEXT_INSN (trial) != 0
5856 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
5857 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
5858 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
5859
5860 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
5861
5862 SET_SRC (sets[i].rtl) = trial;
5863 break;
5864 }
5865
5866 /* Look for a substitution that makes a valid insn. */
5867 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5868 break;
5869
5870 /* If we previously found constant pool entries for
5871 constants and this is a constant, try making a
5872 pool entry. Put it in src_folded unless we already have done
5873 this since that is where it likely came from. */
5874
5875 else if (constant_pool_entries_cost
5876 && CONSTANT_P (trial)
5877 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
5878 && GET_MODE_CLASS (mode) != MODE_CC)
5879 {
5880 src_folded_force_flag = 1;
5881 src_folded = trial;
5882 src_folded_cost = constant_pool_entries_cost;
5883 }
5884 }
5885
5886 src = SET_SRC (sets[i].rtl);
5887
5888 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5889 However, there is an important exception: If both are registers
5890 that are not the head of their equivalence class, replace SET_SRC
5891 with the head of the class. If we do not do this, we will have
5892 both registers live over a portion of the basic block. This way,
5893 their lifetimes will likely abut instead of overlapping. */
5894 if (GET_CODE (dest) == REG
5895 && REGNO_QTY_VALID_P (REGNO (dest))
5896 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
5897 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
5898 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5899 /* Don't do this if the original insn had a hard reg as
5900 SET_SRC. */
5901 && (GET_CODE (sets[i].src) != REG
5902 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
5903 /* We can't call canon_reg here because it won't do anything if
5904 SRC is a hard register. */
5905 {
5906 int first = qty_first_reg[reg_qty[REGNO (src)]];
5907
5908 src = SET_SRC (sets[i].rtl)
5909 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
5910 : gen_rtx (REG, GET_MODE (src), first);
5911
5912 /* If we had a constant that is cheaper than what we are now
5913 setting SRC to, use that constant. We ignored it when we
5914 thought we could make this into a no-op. */
5915 if (src_const && COST (src_const) < COST (src)
5916 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
5917 src = src_const;
5918 }
5919
5920 /* If we made a change, recompute SRC values. */
5921 if (src != sets[i].src)
5922 {
5923 do_not_record = 0;
5924 hash_arg_in_memory = 0;
5925 hash_arg_in_struct = 0;
5926 sets[i].src = src;
5927 sets[i].src_hash_code = HASH (src, mode);
5928 sets[i].src_volatile = do_not_record;
5929 sets[i].src_in_memory = hash_arg_in_memory;
5930 sets[i].src_in_struct = hash_arg_in_struct;
5931 sets[i].src_elt = lookup (src, sets[i].src_hash_code, mode);
5932 }
5933
5934 /* If this is a single SET, we are setting a register, and we have an
5935 equivalent constant, we want to add a REG_NOTE. We don't want
5936 to write a REG_EQUAL note for a constant pseudo since verifying that
d45cf215 5937 that pseudo hasn't been eliminated is a pain. Such a note also
7afe21cc
RK
5938 won't help anything. */
5939 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5940 && GET_CODE (src_const) != REG)
5941 {
5942 rtx tem = find_reg_note (insn, REG_EQUAL, 0);
5943
5944 /* Record the actual constant value in a REG_EQUAL note, making
5945 a new one if one does not already exist. */
5946 if (tem)
5947 XEXP (tem, 0) = src_const;
5948 else
5949 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
5950 src_const, REG_NOTES (insn));
5951
5952 /* If storing a constant value in a register that
5953 previously held the constant value 0,
5954 record this fact with a REG_WAS_0 note on this insn.
5955
5956 Note that the *register* is required to have previously held 0,
5957 not just any register in the quantity and we must point to the
5958 insn that set that register to zero.
5959
5960 Rather than track each register individually, we just see if
5961 the last set for this quantity was for this register. */
5962
5963 if (REGNO_QTY_VALID_P (REGNO (dest))
5964 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
5965 {
5966 /* See if we previously had a REG_WAS_0 note. */
5967 rtx note = find_reg_note (insn, REG_WAS_0, 0);
5968 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
5969
5970 if ((tem = single_set (const_insn)) != 0
5971 && rtx_equal_p (SET_DEST (tem), dest))
5972 {
5973 if (note)
5974 XEXP (note, 0) = const_insn;
5975 else
5976 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
5977 const_insn, REG_NOTES (insn));
5978 }
5979 }
5980 }
5981
5982 /* Now deal with the destination. */
5983 do_not_record = 0;
5984 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
5985
5986 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5987 to the MEM or REG within it. */
5988 while (GET_CODE (dest) == SIGN_EXTRACT
5989 || GET_CODE (dest) == ZERO_EXTRACT
5990 || GET_CODE (dest) == SUBREG
5991 || GET_CODE (dest) == STRICT_LOW_PART)
5992 {
5993 sets[i].inner_dest_loc = &XEXP (dest, 0);
5994 dest = XEXP (dest, 0);
5995 }
5996
5997 sets[i].inner_dest = dest;
5998
5999 if (GET_CODE (dest) == MEM)
6000 {
6001 dest = fold_rtx (dest, insn);
6002
6003 /* Decide whether we invalidate everything in memory,
6004 or just things at non-fixed places.
6005 Writing a large aggregate must invalidate everything
6006 because we don't know how long it is. */
6007 note_mem_written (dest, &writes_memory);
6008 }
6009
6010 /* Compute the hash code of the destination now,
6011 before the effects of this instruction are recorded,
6012 since the register values used in the address computation
6013 are those before this instruction. */
6014 sets[i].dest_hash_code = HASH (dest, mode);
6015
6016 /* Don't enter a bit-field in the hash table
6017 because the value in it after the store
6018 may not equal what was stored, due to truncation. */
6019
6020 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6021 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6022 {
6023 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6024
6025 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6026 && GET_CODE (width) == CONST_INT
6027 && INTVAL (width) < HOST_BITS_PER_INT
6028 && ! (INTVAL (src_const) & ((-1) << INTVAL (width))))
6029 /* Exception: if the value is constant,
6030 and it won't be truncated, record it. */
6031 ;
6032 else
6033 {
6034 /* This is chosen so that the destination will be invalidated
6035 but no new value will be recorded.
6036 We must invalidate because sometimes constant
6037 values can be recorded for bitfields. */
6038 sets[i].src_elt = 0;
6039 sets[i].src_volatile = 1;
6040 src_eqv = 0;
6041 src_eqv_elt = 0;
6042 }
6043 }
6044
6045 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6046 the insn. */
6047 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6048 {
6049 PUT_CODE (insn, NOTE);
6050 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6051 NOTE_SOURCE_FILE (insn) = 0;
6052 cse_jumps_altered = 1;
6053 /* One less use of the label this insn used to jump to. */
6054 --LABEL_NUSES (JUMP_LABEL (insn));
6055 /* No more processing for this set. */
6056 sets[i].rtl = 0;
6057 }
6058
6059 /* If this SET is now setting PC to a label, we know it used to
6060 be a conditional or computed branch. So we see if we can follow
6061 it. If it was a computed branch, delete it and re-emit. */
6062 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6063 {
6064 rtx p;
6065
6066 /* If this is not in the format for a simple branch and
6067 we are the only SET in it, re-emit it. */
6068 if (! simplejump_p (insn) && n_sets == 1)
6069 {
6070 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6071 JUMP_LABEL (new) = XEXP (src, 0);
6072 LABEL_NUSES (XEXP (src, 0))++;
6073 delete_insn (insn);
6074 insn = new;
6075 }
6076
6077 /* Now that we've converted this jump to an unconditional jump,
6078 there is dead code after it. Delete the dead code until we
6079 reach a BARRIER, the end of the function, or a label. Do
6080 not delete NOTEs except for NOTE_INSN_DELETED since later
6081 phases assume these notes are retained. */
6082
6083 p = insn;
6084
6085 while (NEXT_INSN (p) != 0
6086 && GET_CODE (NEXT_INSN (p)) != BARRIER
6087 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6088 {
6089 if (GET_CODE (NEXT_INSN (p)) != NOTE
6090 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6091 delete_insn (NEXT_INSN (p));
6092 else
6093 p = NEXT_INSN (p);
6094 }
6095
6096 /* If we don't have a BARRIER immediately after INSN, put one there.
6097 Much code assumes that there are no NOTEs between a JUMP_INSN and
6098 BARRIER. */
6099
6100 if (NEXT_INSN (insn) == 0
6101 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6102 emit_barrier_after (insn);
6103
6104 /* We might have two BARRIERs separated by notes. Delete the second
6105 one if so. */
6106
538b78e7
RS
6107 if (p != insn && NEXT_INSN (p) != 0
6108 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7afe21cc
RK
6109 delete_insn (NEXT_INSN (p));
6110
6111 cse_jumps_altered = 1;
6112 sets[i].rtl = 0;
6113 }
6114
6115 /* No further processing for this assignment if destination
6116 is volatile. */
6117
6118 else if (do_not_record)
6119 sets[i].rtl = 0;
6120
6121 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
6122 sets[i].dest_hash_code = HASH (SET_DEST (sets[i].rtl), mode);
6123
6124#ifdef HAVE_cc0
6125 /* If setting CC0, record what it was set to, or a constant, if it
6126 is equivalent to a constant. If it is being set to a floating-point
6127 value, make a COMPARE with the appropriate constant of 0. If we
6128 don't do this, later code can interpret this as a test against
6129 const0_rtx, which can cause problems if we try to put it into an
6130 insn as a floating-point operand. */
6131 if (dest == cc0_rtx)
6132 {
6133 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6134 this_insn_cc0_mode = mode;
6135 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
6136 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6137 CONST0_RTX (mode));
6138 }
6139#endif
6140 }
6141
6142 /* Now enter all non-volatile source expressions in the hash table
6143 if they are not already present.
6144 Record their equivalence classes in src_elt.
6145 This way we can insert the corresponding destinations into
6146 the same classes even if the actual sources are no longer in them
6147 (having been invalidated). */
6148
6149 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6150 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6151 {
6152 register struct table_elt *elt;
6153 register struct table_elt *classp = sets[0].src_elt;
6154 rtx dest = SET_DEST (sets[0].rtl);
6155 enum machine_mode eqvmode = GET_MODE (dest);
6156
6157 if (GET_CODE (dest) == STRICT_LOW_PART)
6158 {
6159 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6160 classp = 0;
6161 }
6162 if (insert_regs (src_eqv, classp, 0))
6163 src_eqv_hash_code = HASH (src_eqv, eqvmode);
6164 elt = insert (src_eqv, classp, src_eqv_hash_code, eqvmode);
6165 elt->in_memory = src_eqv_in_memory;
6166 elt->in_struct = src_eqv_in_struct;
6167 src_eqv_elt = elt;
6168 }
6169
6170 for (i = 0; i < n_sets; i++)
6171 if (sets[i].rtl && ! sets[i].src_volatile
6172 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6173 {
6174 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6175 {
6176 /* REG_EQUAL in setting a STRICT_LOW_PART
6177 gives an equivalent for the entire destination register,
6178 not just for the subreg being stored in now.
6179 This is a more interesting equivalence, so we arrange later
6180 to treat the entire reg as the destination. */
6181 sets[i].src_elt = src_eqv_elt;
6182 sets[i].src_hash_code = src_eqv_hash_code;
6183 }
6184 else
6185 {
6186 /* Insert source and constant equivalent into hash table, if not
6187 already present. */
6188 register struct table_elt *classp = src_eqv_elt;
6189 register rtx src = sets[i].src;
6190 register rtx dest = SET_DEST (sets[i].rtl);
6191 enum machine_mode mode
6192 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6193
6194 if (sets[i].src_elt == 0)
6195 {
6196 register struct table_elt *elt;
6197
6198 /* Note that these insert_regs calls cannot remove
6199 any of the src_elt's, because they would have failed to
6200 match if not still valid. */
6201 if (insert_regs (src, classp, 0))
6202 sets[i].src_hash_code = HASH (src, mode);
6203 elt = insert (src, classp, sets[i].src_hash_code, mode);
6204 elt->in_memory = sets[i].src_in_memory;
6205 elt->in_struct = sets[i].src_in_struct;
6206 sets[i].src_elt = classp = elt;
6207 }
6208
6209 if (sets[i].src_const && sets[i].src_const_elt == 0
6210 && src != sets[i].src_const
6211 && ! rtx_equal_p (sets[i].src_const, src))
6212 sets[i].src_elt = insert (sets[i].src_const, classp,
6213 sets[i].src_const_hash_code, mode);
6214 }
6215 }
6216 else if (sets[i].src_elt == 0)
6217 /* If we did not insert the source into the hash table (e.g., it was
6218 volatile), note the equivalence class for the REG_EQUAL value, if any,
6219 so that the destination goes into that class. */
6220 sets[i].src_elt = src_eqv_elt;
6221
6222 invalidate_from_clobbers (&writes_memory, x);
6223 /* Memory, and some registers, are invalidate by subroutine calls. */
6224 if (GET_CODE (insn) == CALL_INSN)
6225 {
6226 static struct write_data everything = {0, 1, 1, 1};
6227 invalidate_memory (&everything);
6228 invalidate_for_call ();
6229 }
6230
6231 /* Now invalidate everything set by this instruction.
6232 If a SUBREG or other funny destination is being set,
6233 sets[i].rtl is still nonzero, so here we invalidate the reg
6234 a part of which is being set. */
6235
6236 for (i = 0; i < n_sets; i++)
6237 if (sets[i].rtl)
6238 {
6239 register rtx dest = sets[i].inner_dest;
6240
6241 /* Needed for registers to remove the register from its
6242 previous quantity's chain.
6243 Needed for memory if this is a nonvarying address, unless
6244 we have just done an invalidate_memory that covers even those. */
6245 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6246 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6247 invalidate (dest);
6248 }
6249
6250 /* Make sure registers mentioned in destinations
6251 are safe for use in an expression to be inserted.
6252 This removes from the hash table
6253 any invalid entry that refers to one of these registers.
6254
6255 We don't care about the return value from mention_regs because
6256 we are going to hash the SET_DEST values unconditionally. */
6257
6258 for (i = 0; i < n_sets; i++)
6259 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
6260 mention_regs (SET_DEST (sets[i].rtl));
6261
6262 /* We may have just removed some of the src_elt's from the hash table.
6263 So replace each one with the current head of the same class. */
6264
6265 for (i = 0; i < n_sets; i++)
6266 if (sets[i].rtl)
6267 {
6268 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6269 /* If elt was removed, find current head of same class,
6270 or 0 if nothing remains of that class. */
6271 {
6272 register struct table_elt *elt = sets[i].src_elt;
6273
6274 while (elt && elt->prev_same_value)
6275 elt = elt->prev_same_value;
6276
6277 while (elt && elt->first_same_value == 0)
6278 elt = elt->next_same_value;
6279 sets[i].src_elt = elt ? elt->first_same_value : 0;
6280 }
6281 }
6282
6283 /* Now insert the destinations into their equivalence classes. */
6284
6285 for (i = 0; i < n_sets; i++)
6286 if (sets[i].rtl)
6287 {
6288 register rtx dest = SET_DEST (sets[i].rtl);
6289 register struct table_elt *elt;
6290
6291 /* Don't record value if we are not supposed to risk allocating
6292 floating-point values in registers that might be wider than
6293 memory. */
6294 if ((flag_float_store
6295 && GET_CODE (dest) == MEM
6296 && GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
6297 /* Don't record values of destinations set inside a libcall block
6298 since we might delete the libcall. Things should have been set
6299 up so we won't want to reuse such a value, but we play it safe
6300 here. */
6301 || in_libcall_block
6302 /* If we didn't put a REG_EQUAL value or a source into the hash
6303 table, there is no point is recording DEST. */
6304 || sets[i].src_elt == 0)
6305 continue;
6306
6307 /* STRICT_LOW_PART isn't part of the value BEING set,
6308 and neither is the SUBREG inside it.
6309 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6310 if (GET_CODE (dest) == STRICT_LOW_PART)
6311 dest = SUBREG_REG (XEXP (dest, 0));
6312
6313 if (GET_CODE (dest) == REG)
6314 /* Registers must also be inserted into chains for quantities. */
6315 if (insert_regs (dest, sets[i].src_elt, 1))
6316 /* If `insert_regs' changes something, the hash code must be
6317 recalculated. */
6318 sets[i].dest_hash_code = HASH (dest, GET_MODE (dest));
6319
6320 elt = insert (dest, sets[i].src_elt,
6321 sets[i].dest_hash_code, GET_MODE (dest));
6322 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
6323 if (elt->in_memory)
6324 {
6325 /* This implicitly assumes a whole struct
6326 need not have MEM_IN_STRUCT_P.
6327 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6328 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
6329 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
6330 }
6331
fc3ffe83
RK
6332 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6333 narrower than M2, and both M1 and M2 are the same number of words,
6334 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6335 make that equivalence as well.
7afe21cc
RK
6336
6337 However, BAR may have equivalences for which gen_lowpart_if_possible
6338 will produce a simpler value than gen_lowpart_if_possible applied to
6339 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6340 BAR's equivalences. If we don't get a simplified form, make
6341 the SUBREG. It will not be used in an equivalence, but will
6342 cause two similar assignments to be detected.
6343
6344 Note the loop below will find SUBREG_REG (DEST) since we have
6345 already entered SRC and DEST of the SET in the table. */
6346
6347 if (GET_CODE (dest) == SUBREG
fc3ffe83
RK
6348 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) / UNITS_PER_WORD
6349 == GET_MODE_SIZE (GET_MODE (dest)) / UNITS_PER_WORD)
7afe21cc
RK
6350 && (GET_MODE_SIZE (GET_MODE (dest))
6351 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6352 && sets[i].src_elt != 0)
6353 {
6354 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6355 struct table_elt *elt, *classp = 0;
6356
6357 for (elt = sets[i].src_elt->first_same_value; elt;
6358 elt = elt->next_same_value)
6359 {
6360 rtx new_src = 0;
6361 int src_hash;
6362 struct table_elt *src_elt;
6363
6364 /* Ignore invalid entries. */
6365 if (GET_CODE (elt->exp) != REG
6366 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6367 continue;
6368
6369 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6370 if (new_src == 0)
6371 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
6372
6373 src_hash = HASH (new_src, new_mode);
6374 src_elt = lookup (new_src, src_hash, new_mode);
6375
6376 /* Put the new source in the hash table is if isn't
6377 already. */
6378 if (src_elt == 0)
6379 {
6380 if (insert_regs (new_src, classp, 0))
6381 src_hash = HASH (new_src, new_mode);
6382 src_elt = insert (new_src, classp, src_hash, new_mode);
6383 src_elt->in_memory = elt->in_memory;
6384 src_elt->in_struct = elt->in_struct;
6385 }
6386 else if (classp && classp != src_elt->first_same_value)
6387 /* Show that two things that we've seen before are
6388 actually the same. */
6389 merge_equiv_classes (src_elt, classp);
6390
6391 classp = src_elt->first_same_value;
6392 }
6393 }
6394 }
6395
6396 /* Special handling for (set REG0 REG1)
6397 where REG0 is the "cheapest", cheaper than REG1.
6398 After cse, REG1 will probably not be used in the sequel,
6399 so (if easily done) change this insn to (set REG1 REG0) and
6400 replace REG1 with REG0 in the previous insn that computed their value.
6401 Then REG1 will become a dead store and won't cloud the situation
6402 for later optimizations.
6403
6404 Do not make this change if REG1 is a hard register, because it will
6405 then be used in the sequel and we may be changing a two-operand insn
6406 into a three-operand insn.
6407
6408 Also do not do this if we are operating on a copy of INSN. */
6409
6410 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6411 && NEXT_INSN (PREV_INSN (insn)) == insn
6412 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6413 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6414 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
6415 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
6416 == REGNO (SET_DEST (sets[0].rtl))))
6417 {
6418 rtx prev = PREV_INSN (insn);
6419 while (prev && GET_CODE (prev) == NOTE)
6420 prev = PREV_INSN (prev);
6421
6422 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
6423 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
6424 {
6425 rtx dest = SET_DEST (sets[0].rtl);
6426 rtx note = find_reg_note (prev, REG_EQUIV, 0);
6427
6428 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
6429 validate_change (insn, & SET_DEST (sets[0].rtl),
6430 SET_SRC (sets[0].rtl), 1);
6431 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
6432 apply_change_group ();
6433
6434 /* If REG1 was equivalent to a constant, REG0 is not. */
6435 if (note)
6436 PUT_REG_NOTE_KIND (note, REG_EQUAL);
6437
6438 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6439 any REG_WAS_0 note on INSN to PREV. */
6440 note = find_reg_note (prev, REG_WAS_0, 0);
6441 if (note)
6442 remove_note (prev, note);
6443
6444 note = find_reg_note (insn, REG_WAS_0, 0);
6445 if (note)
6446 {
6447 remove_note (insn, note);
6448 XEXP (note, 1) = REG_NOTES (prev);
6449 REG_NOTES (prev) = note;
6450 }
6451 }
6452 }
6453
6454 /* If this is a conditional jump insn, record any known equivalences due to
6455 the condition being tested. */
6456
6457 last_jump_equiv_class = 0;
6458 if (GET_CODE (insn) == JUMP_INSN
6459 && n_sets == 1 && GET_CODE (x) == SET
6460 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6461 record_jump_equiv (insn, 0);
6462
6463#ifdef HAVE_cc0
6464 /* If the previous insn set CC0 and this insn no longer references CC0,
6465 delete the previous insn. Here we use the fact that nothing expects CC0
6466 to be valid over an insn, which is true until the final pass. */
6467 if (prev_insn && GET_CODE (prev_insn) == INSN
6468 && (tem = single_set (prev_insn)) != 0
6469 && SET_DEST (tem) == cc0_rtx
6470 && ! reg_mentioned_p (cc0_rtx, x))
6471 {
6472 PUT_CODE (prev_insn, NOTE);
6473 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6474 NOTE_SOURCE_FILE (prev_insn) = 0;
6475 }
6476
6477 prev_insn_cc0 = this_insn_cc0;
6478 prev_insn_cc0_mode = this_insn_cc0_mode;
6479#endif
6480
6481 prev_insn = insn;
6482}
6483\f
6484/* Store 1 in *WRITES_PTR for those categories of memory ref
6485 that must be invalidated when the expression WRITTEN is stored in.
6486 If WRITTEN is null, say everything must be invalidated. */
6487
6488static void
6489note_mem_written (written, writes_ptr)
6490 rtx written;
6491 struct write_data *writes_ptr;
6492{
6493 static struct write_data everything = {0, 1, 1, 1};
6494
6495 if (written == 0)
6496 *writes_ptr = everything;
6497 else if (GET_CODE (written) == MEM)
6498 {
6499 /* Pushing or popping the stack invalidates just the stack pointer. */
6500 rtx addr = XEXP (written, 0);
6501 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6502 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6503 && GET_CODE (XEXP (addr, 0)) == REG
6504 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6505 {
6506 writes_ptr->sp = 1;
6507 return;
6508 }
6509 else if (GET_MODE (written) == BLKmode)
6510 *writes_ptr = everything;
6511 else if (cse_rtx_addr_varies_p (written))
6512 {
6513 /* A varying address that is a sum indicates an array element,
6514 and that's just as good as a structure element
6515 in implying that we need not invalidate scalar variables. */
6516 if (!(MEM_IN_STRUCT_P (written)
6517 || GET_CODE (XEXP (written, 0)) == PLUS))
6518 writes_ptr->all = 1;
6519 writes_ptr->nonscalar = 1;
6520 }
6521 writes_ptr->var = 1;
6522 }
6523}
6524
6525/* Perform invalidation on the basis of everything about an insn
6526 except for invalidating the actual places that are SET in it.
6527 This includes the places CLOBBERed, and anything that might
6528 alias with something that is SET or CLOBBERed.
6529
6530 W points to the writes_memory for this insn, a struct write_data
6531 saying which kinds of memory references must be invalidated.
6532 X is the pattern of the insn. */
6533
6534static void
6535invalidate_from_clobbers (w, x)
6536 struct write_data *w;
6537 rtx x;
6538{
6539 /* If W->var is not set, W specifies no action.
6540 If W->all is set, this step gets all memory refs
6541 so they can be ignored in the rest of this function. */
6542 if (w->var)
6543 invalidate_memory (w);
6544
6545 if (w->sp)
6546 {
6547 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
6548 reg_tick[STACK_POINTER_REGNUM]++;
6549
6550 /* This should be *very* rare. */
6551 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6552 invalidate (stack_pointer_rtx);
6553 }
6554
6555 if (GET_CODE (x) == CLOBBER)
6556 {
6557 rtx ref = XEXP (x, 0);
6558 if (ref
6559 && (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6560 || (GET_CODE (ref) == MEM && ! w->all)))
6561 invalidate (ref);
6562 }
6563 else if (GET_CODE (x) == PARALLEL)
6564 {
6565 register int i;
6566 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6567 {
6568 register rtx y = XVECEXP (x, 0, i);
6569 if (GET_CODE (y) == CLOBBER)
6570 {
6571 rtx ref = XEXP (y, 0);
6572 if (ref
6573 &&(GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6574 || (GET_CODE (ref) == MEM && !w->all)))
6575 invalidate (ref);
6576 }
6577 }
6578 }
6579}
6580\f
6581/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6582 and replace any registers in them with either an equivalent constant
6583 or the canonical form of the register. If we are inside an address,
6584 only do this if the address remains valid.
6585
6586 OBJECT is 0 except when within a MEM in which case it is the MEM.
6587
6588 Return the replacement for X. */
6589
6590static rtx
6591cse_process_notes (x, object)
6592 rtx x;
6593 rtx object;
6594{
6595 enum rtx_code code = GET_CODE (x);
6596 char *fmt = GET_RTX_FORMAT (code);
6597 int qty;
6598 int i;
6599
6600 switch (code)
6601 {
6602 case CONST_INT:
6603 case CONST:
6604 case SYMBOL_REF:
6605 case LABEL_REF:
6606 case CONST_DOUBLE:
6607 case PC:
6608 case CC0:
6609 case LO_SUM:
6610 return x;
6611
6612 case MEM:
6613 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6614 return x;
6615
6616 case EXPR_LIST:
6617 case INSN_LIST:
6618 if (REG_NOTE_KIND (x) == REG_EQUAL)
6619 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), 0);
6620 if (XEXP (x, 1))
6621 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), 0);
6622 return x;
6623
6624 case REG:
6625 i = reg_qty[REGNO (x)];
6626
6627 /* Return a constant or a constant register. */
6628 if (REGNO_QTY_VALID_P (REGNO (x))
6629 && qty_const[i] != 0
6630 && (CONSTANT_P (qty_const[i])
6631 || GET_CODE (qty_const[i]) == REG))
6632 {
6633 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
6634 if (new)
6635 return new;
6636 }
6637
6638 /* Otherwise, canonicalize this register. */
6639 return canon_reg (x, 0);
6640 }
6641
6642 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6643 if (fmt[i] == 'e')
6644 validate_change (object, &XEXP (x, i),
6645 cse_process_notes (XEXP (x, i), object), 0);
6646
6647 return x;
6648}
6649\f
6650/* Find common subexpressions between the end test of a loop and the beginning
6651 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6652
6653 Often we have a loop where an expression in the exit test is used
6654 in the body of the loop. For example "while (*p) *q++ = *p++;".
6655 Because of the way we duplicate the loop exit test in front of the loop,
6656 however, we don't detect that common subexpression. This will be caught
6657 when global cse is implemented, but this is a quite common case.
6658
6659 This function handles the most common cases of these common expressions.
6660 It is called after we have processed the basic block ending with the
6661 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6662 jumps to a label used only once. */
6663
6664static void
6665cse_around_loop (loop_start)
6666 rtx loop_start;
6667{
6668 rtx insn;
6669 int i;
6670 struct table_elt *p;
6671
6672 /* If the jump at the end of the loop doesn't go to the start, we don't
6673 do anything. */
6674 for (insn = PREV_INSN (loop_start);
6675 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6676 insn = PREV_INSN (insn))
6677 ;
6678
6679 if (insn == 0
6680 || GET_CODE (insn) != NOTE
6681 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6682 return;
6683
6684 /* If the last insn of the loop (the end test) was an NE comparison,
6685 we will interpret it as an EQ comparison, since we fell through
6686 the loop. Any equivalances resulting from that comparison are
6687 therefore not valid and must be invalidated. */
6688 if (last_jump_equiv_class)
6689 for (p = last_jump_equiv_class->first_same_value; p;
6690 p = p->next_same_value)
6691 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6692 || GET_CODE (p->exp) == SUBREG)
6693 invalidate (p->exp);
6694
6695 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6696 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6697
6698 The only thing we do with SET_DEST is invalidate entries, so we
6699 can safely process each SET in order. It is slightly less efficient
6700 to do so, but we only want to handle the most common cases. */
6701
6702 for (insn = NEXT_INSN (loop_start);
6703 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6704 && ! (GET_CODE (insn) == NOTE
6705 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6706 insn = NEXT_INSN (insn))
6707 {
6708 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6709 && (GET_CODE (PATTERN (insn)) == SET
6710 || GET_CODE (PATTERN (insn)) == CLOBBER))
6711 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6712 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6713 && GET_CODE (PATTERN (insn)) == PARALLEL)
6714 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6715 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6716 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6717 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6718 loop_start);
6719 }
6720}
6721\f
8b3686ed
RK
6722/* Variable used for communications between the next two routines. */
6723
6724static struct write_data skipped_writes_memory;
6725
6726/* Process one SET of an insn that was skipped. We ignore CLOBBERs
6727 since they are done elsewhere. This function is called via note_stores. */
6728
6729static void
6730invalidate_skipped_set (dest, set)
6731 rtx set;
6732 rtx dest;
6733{
6734 if (GET_CODE (set) == CLOBBER
6735#ifdef HAVE_cc0
6736 || dest == cc0_rtx
6737#endif
6738 || dest == pc_rtx)
6739 return;
6740
6741 if (GET_CODE (dest) == MEM)
6742 note_mem_written (dest, &skipped_writes_memory);
6743
6744 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6745 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6746 invalidate (dest);
6747}
6748
6749/* Invalidate all insns from START up to the end of the function or the
6750 next label. This called when we wish to CSE around a block that is
6751 conditionally executed. */
6752
6753static void
6754invalidate_skipped_block (start)
6755 rtx start;
6756{
6757 rtx insn;
6758 int i;
6759 static struct write_data init = {0, 0, 0, 0};
6760 static struct write_data everything = {0, 1, 1, 1};
6761
6762 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6763 insn = NEXT_INSN (insn))
6764 {
6765 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6766 continue;
6767
6768 skipped_writes_memory = init;
6769
6770 if (GET_CODE (insn) == CALL_INSN)
6771 {
6772 invalidate_for_call ();
6773 skipped_writes_memory = everything;
6774 }
6775
6776 note_stores (PATTERN (insn), invalidate_skipped_set);
6777 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
6778 }
6779}
6780\f
7afe21cc
RK
6781/* Used for communication between the following two routines; contains a
6782 value to be checked for modification. */
6783
6784static rtx cse_check_loop_start_value;
6785
6786/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
6787 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
6788
6789static void
6790cse_check_loop_start (x, set)
6791 rtx x;
6792 rtx set;
6793{
6794 if (cse_check_loop_start_value == 0
6795 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6796 return;
6797
6798 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
6799 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
6800 cse_check_loop_start_value = 0;
6801}
6802
6803/* X is a SET or CLOBBER contained in INSN that was found near the start of
6804 a loop that starts with the label at LOOP_START.
6805
6806 If X is a SET, we see if its SET_SRC is currently in our hash table.
6807 If so, we see if it has a value equal to some register used only in the
6808 loop exit code (as marked by jump.c).
6809
6810 If those two conditions are true, we search backwards from the start of
6811 the loop to see if that same value was loaded into a register that still
6812 retains its value at the start of the loop.
6813
6814 If so, we insert an insn after the load to copy the destination of that
6815 load into the equivalent register and (try to) replace our SET_SRC with that
6816 register.
6817
6818 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6819
6820static void
6821cse_set_around_loop (x, insn, loop_start)
6822 rtx x;
6823 rtx insn;
6824 rtx loop_start;
6825{
6826 rtx p;
6827 struct table_elt *src_elt;
6828 static struct write_data init = {0, 0, 0, 0};
6829 struct write_data writes_memory;
6830
6831 writes_memory = init;
6832
6833 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6834 are setting PC or CC0 or whose SET_SRC is already a register. */
6835 if (GET_CODE (x) == SET
6836 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6837 && GET_CODE (SET_SRC (x)) != REG)
6838 {
6839 src_elt = lookup (SET_SRC (x),
6840 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6841 GET_MODE (SET_DEST (x)));
6842
6843 if (src_elt)
6844 for (src_elt = src_elt->first_same_value; src_elt;
6845 src_elt = src_elt->next_same_value)
6846 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6847 && COST (src_elt->exp) < COST (SET_SRC (x)))
6848 {
6849 rtx p, set;
6850
6851 /* Look for an insn in front of LOOP_START that sets
6852 something in the desired mode to SET_SRC (x) before we hit
6853 a label or CALL_INSN. */
6854
6855 for (p = prev_nonnote_insn (loop_start);
6856 p && GET_CODE (p) != CALL_INSN
6857 && GET_CODE (p) != CODE_LABEL;
6858 p = prev_nonnote_insn (p))
6859 if ((set = single_set (p)) != 0
6860 && GET_CODE (SET_DEST (set)) == REG
6861 && GET_MODE (SET_DEST (set)) == src_elt->mode
6862 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6863 {
6864 /* We now have to ensure that nothing between P
6865 and LOOP_START modified anything referenced in
6866 SET_SRC (x). We know that nothing within the loop
6867 can modify it, or we would have invalidated it in
6868 the hash table. */
6869 rtx q;
6870
6871 cse_check_loop_start_value = SET_SRC (x);
6872 for (q = p; q != loop_start; q = NEXT_INSN (q))
6873 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
6874 note_stores (PATTERN (q), cse_check_loop_start);
6875
6876 /* If nothing was changed and we can replace our
6877 SET_SRC, add an insn after P to copy its destination
6878 to what we will be replacing SET_SRC with. */
6879 if (cse_check_loop_start_value
6880 && validate_change (insn, &SET_SRC (x),
6881 src_elt->exp, 0))
6882 emit_insn_after (gen_move_insn (src_elt->exp,
6883 SET_DEST (set)),
6884 p);
6885 break;
6886 }
6887 }
6888 }
6889
6890 /* Now invalidate anything modified by X. */
6891 note_mem_written (SET_DEST (x), &writes_memory);
6892
6893 if (writes_memory.var)
6894 invalidate_memory (&writes_memory);
6895
6896 /* See comment on similar code in cse_insn for explanation of these tests. */
6897 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6898 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
6899 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
6900 invalidate (SET_DEST (x));
6901}
6902\f
6903/* Find the end of INSN's basic block and return its range,
6904 the total number of SETs in all the insns of the block, the last insn of the
6905 block, and the branch path.
6906
6907 The branch path indicates which branches should be followed. If a non-zero
6908 path size is specified, the block should be rescanned and a different set
6909 of branches will be taken. The branch path is only used if
8b3686ed 6910 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7afe21cc
RK
6911
6912 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6913 used to describe the block. It is filled in with the information about
6914 the current block. The incoming structure's branch path, if any, is used
6915 to construct the output branch path. */
6916
6917/* Define maximum length of a branch path. */
6918
6919#define PATHLENGTH 20
6920
6921struct cse_basic_block_data {
6922 /* Lowest CUID value of insns in block. */
6923 int low_cuid;
6924 /* Highest CUID value of insns in block. */
6925 int high_cuid;
6926 /* Total number of SETs in block. */
6927 int nsets;
6928 /* Last insn in the block. */
6929 rtx last;
6930 /* Size of current branch path, if any. */
6931 int path_size;
6932 /* Current branch path, indicating which branches will be taken. */
6933 struct branch_path {
6934 /* The branch insn. */
6935 rtx branch;
8b3686ed
RK
6936 /* Whether it should be taken or not. AROUND is the same as taken
6937 except that it is used when the destination label is not preceded
6938 by a BARRIER. */
6939 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
7afe21cc
RK
6940 } path[PATHLENGTH];
6941};
6942
6943void
8b3686ed 6944cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7afe21cc
RK
6945 rtx insn;
6946 struct cse_basic_block_data *data;
6947 int follow_jumps;
6948 int after_loop;
8b3686ed 6949 int skip_blocks;
7afe21cc
RK
6950{
6951 rtx p = insn, q;
6952 int nsets = 0;
6953 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
fc3ffe83 6954 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7afe21cc
RK
6955 int path_size = data->path_size;
6956 int path_entry = 0;
6957 int i;
6958
6959 /* Update the previous branch path, if any. If the last branch was
6960 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6961 shorten the path by one and look at the previous branch. We know that
6962 at least one branch must have been taken if PATH_SIZE is non-zero. */
6963 while (path_size > 0)
6964 {
8b3686ed 6965 if (data->path[path_size - 1].status != NOT_TAKEN)
7afe21cc
RK
6966 {
6967 data->path[path_size - 1].status = NOT_TAKEN;
6968 break;
6969 }
6970 else
6971 path_size--;
6972 }
6973
6974 /* Scan to end of this basic block. */
6975 while (p && GET_CODE (p) != CODE_LABEL)
6976 {
6977 /* Don't cse out the end of a loop. This makes a difference
6978 only for the unusual loops that always execute at least once;
6979 all other loops have labels there so we will stop in any case.
6980 Cse'ing out the end of the loop is dangerous because it
6981 might cause an invariant expression inside the loop
6982 to be reused after the end of the loop. This would make it
6983 hard to move the expression out of the loop in loop.c,
6984 especially if it is one of several equivalent expressions
6985 and loop.c would like to eliminate it.
6986
6987 If we are running after loop.c has finished, we can ignore
6988 the NOTE_INSN_LOOP_END. */
6989
6990 if (! after_loop && GET_CODE (p) == NOTE
6991 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6992 break;
6993
6994 /* Don't cse over a call to setjmp; on some machines (eg vax)
6995 the regs restored by the longjmp come from
6996 a later time than the setjmp. */
6997 if (GET_CODE (p) == NOTE
6998 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6999 break;
7000
7001 /* A PARALLEL can have lots of SETs in it,
7002 especially if it is really an ASM_OPERANDS. */
7003 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7004 && GET_CODE (PATTERN (p)) == PARALLEL)
7005 nsets += XVECLEN (PATTERN (p), 0);
7006 else if (GET_CODE (p) != NOTE)
7007 nsets += 1;
7008
7009 if (INSN_CUID (p) > high_cuid)
8b3686ed 7010 high_cuid = INSN_CUID (p);
7afe21cc 7011 if (INSN_CUID (p) < low_cuid)
8b3686ed 7012 low_cuid = INSN_CUID(p);
7afe21cc
RK
7013
7014 /* See if this insn is in our branch path. If it is and we are to
7015 take it, do so. */
7016 if (path_entry < path_size && data->path[path_entry].branch == p)
7017 {
8b3686ed 7018 if (data->path[path_entry].status != NOT_TAKEN)
7afe21cc
RK
7019 p = JUMP_LABEL (p);
7020
7021 /* Point to next entry in path, if any. */
7022 path_entry++;
7023 }
7024
7025 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7026 was specified, we haven't reached our maximum path length, there are
7027 insns following the target of the jump, this is the only use of the
8b3686ed
RK
7028 jump label, and the target label is preceded by a BARRIER.
7029
7030 Alternatively, we can follow the jump if it branches around a
7031 block of code and there are no other branches into the block.
7032 In this case invalidate_skipped_block will be called to invalidate any
7033 registers set in the block when following the jump. */
7034
7035 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7afe21cc
RK
7036 && GET_CODE (p) == JUMP_INSN
7037 && GET_CODE (PATTERN (p)) == SET
7038 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7039 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7040 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7041 {
7042 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7043 if ((GET_CODE (q) != NOTE
7044 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7045 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7046 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7047 break;
7048
7049 /* If we ran into a BARRIER, this code is an extension of the
7050 basic block when the branch is taken. */
8b3686ed 7051 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7afe21cc
RK
7052 {
7053 /* Don't allow ourself to keep walking around an
7054 always-executed loop. */
fc3ffe83
RK
7055 if (next_real_insn (q) == next)
7056 {
7057 p = NEXT_INSN (p);
7058 continue;
7059 }
7afe21cc
RK
7060
7061 /* Similarly, don't put a branch in our path more than once. */
7062 for (i = 0; i < path_entry; i++)
7063 if (data->path[i].branch == p)
7064 break;
7065
7066 if (i != path_entry)
7067 break;
7068
7069 data->path[path_entry].branch = p;
7070 data->path[path_entry++].status = TAKEN;
7071
7072 /* This branch now ends our path. It was possible that we
7073 didn't see this branch the last time around (when the
7074 insn in front of the target was a JUMP_INSN that was
7075 turned into a no-op). */
7076 path_size = path_entry;
7077
7078 p = JUMP_LABEL (p);
7079 /* Mark block so we won't scan it again later. */
7080 PUT_MODE (NEXT_INSN (p), QImode);
7081 }
8b3686ed
RK
7082 /* Detect a branch around a block of code. */
7083 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7084 {
7085 register rtx tmp;
7086
fc3ffe83
RK
7087 if (next_real_insn (q) == next)
7088 {
7089 p = NEXT_INSN (p);
7090 continue;
7091 }
8b3686ed
RK
7092
7093 for (i = 0; i < path_entry; i++)
7094 if (data->path[i].branch == p)
7095 break;
7096
7097 if (i != path_entry)
7098 break;
7099
7100 /* This is no_labels_between_p (p, q) with an added check for
7101 reaching the end of a function (in case Q precedes P). */
7102 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7103 if (GET_CODE (tmp) == CODE_LABEL)
7104 break;
7105
7106 if (tmp == q)
7107 {
7108 data->path[path_entry].branch = p;
7109 data->path[path_entry++].status = AROUND;
7110
7111 path_size = path_entry;
7112
7113 p = JUMP_LABEL (p);
7114 /* Mark block so we won't scan it again later. */
7115 PUT_MODE (NEXT_INSN (p), QImode);
7116 }
7117 }
7afe21cc 7118 }
7afe21cc
RK
7119 p = NEXT_INSN (p);
7120 }
7121
7122 data->low_cuid = low_cuid;
7123 data->high_cuid = high_cuid;
7124 data->nsets = nsets;
7125 data->last = p;
7126
7127 /* If all jumps in the path are not taken, set our path length to zero
7128 so a rescan won't be done. */
7129 for (i = path_size - 1; i >= 0; i--)
8b3686ed 7130 if (data->path[i].status != NOT_TAKEN)
7afe21cc
RK
7131 break;
7132
7133 if (i == -1)
7134 data->path_size = 0;
7135 else
7136 data->path_size = path_size;
7137
7138 /* End the current branch path. */
7139 data->path[path_size].branch = 0;
7140}
7141\f
7142static rtx cse_basic_block ();
7143
7144/* Perform cse on the instructions of a function.
7145 F is the first instruction.
7146 NREGS is one plus the highest pseudo-reg number used in the instruction.
7147
7148 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7149 (only if -frerun-cse-after-loop).
7150
7151 Returns 1 if jump_optimize should be redone due to simplifications
7152 in conditional jump instructions. */
7153
7154int
7155cse_main (f, nregs, after_loop, file)
7156 rtx f;
7157 int nregs;
7158 int after_loop;
7159 FILE *file;
7160{
7161 struct cse_basic_block_data val;
7162 register rtx insn = f;
7163 register int i;
7164
7165 cse_jumps_altered = 0;
7166 constant_pool_entries_cost = 0;
7167 val.path_size = 0;
7168
7169 init_recog ();
7170
7171 max_reg = nregs;
7172
7173 all_minus_one = (int *) alloca (nregs * sizeof (int));
7174 consec_ints = (int *) alloca (nregs * sizeof (int));
7175
7176 for (i = 0; i < nregs; i++)
7177 {
7178 all_minus_one[i] = -1;
7179 consec_ints[i] = i;
7180 }
7181
7182 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
7183 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7184 reg_qty = (int *) alloca (nregs * sizeof (int));
7185 reg_in_table = (int *) alloca (nregs * sizeof (int));
7186 reg_tick = (int *) alloca (nregs * sizeof (int));
7187
7188 /* Discard all the free elements of the previous function
7189 since they are allocated in the temporarily obstack. */
7190 bzero (table, sizeof table);
7191 free_element_chain = 0;
7192 n_elements_made = 0;
7193
7194 /* Find the largest uid. */
7195
7196 i = get_max_uid ();
7197 uid_cuid = (short *) alloca ((i + 1) * sizeof (short));
7198 bzero (uid_cuid, (i + 1) * sizeof (short));
7199
7200 /* Compute the mapping from uids to cuids.
7201 CUIDs are numbers assigned to insns, like uids,
7202 except that cuids increase monotonically through the code.
7203 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7204 between two insns is not affected by -g. */
7205
7206 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7207 {
7208 if (GET_CODE (insn) != NOTE
7209 || NOTE_LINE_NUMBER (insn) < 0)
7210 INSN_CUID (insn) = ++i;
7211 else
7212 /* Give a line number note the same cuid as preceding insn. */
7213 INSN_CUID (insn) = i;
7214 }
7215
7216 /* Initialize which registers are clobbered by calls. */
7217
7218 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
7219
7220 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7221 if ((call_used_regs[i]
7222 /* Used to check !fixed_regs[i] here, but that isn't safe;
7223 fixed regs are still call-clobbered, and sched can get
7224 confused if they can "live across calls".
7225
7226 The frame pointer is always preserved across calls. The arg
7227 pointer is if it is fixed. The stack pointer usually is, unless
7228 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7229 will be present. If we are generating PIC code, the PIC offset
7230 table register is preserved across calls. */
7231
7232 && i != STACK_POINTER_REGNUM
7233 && i != FRAME_POINTER_REGNUM
7234#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7235 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
7236#endif
7237#ifdef PIC_OFFSET_TABLE_REGNUM
7238 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
7239#endif
7240 )
7241 || global_regs[i])
7242 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
7243
7244 /* Loop over basic blocks.
7245 Compute the maximum number of qty's needed for each basic block
7246 (which is 2 for each SET). */
7247 insn = f;
7248 while (insn)
7249 {
8b3686ed
RK
7250 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7251 flag_cse_skip_blocks);
7afe21cc
RK
7252
7253 /* If this basic block was already processed or has no sets, skip it. */
7254 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7255 {
7256 PUT_MODE (insn, VOIDmode);
7257 insn = (val.last ? NEXT_INSN (val.last) : 0);
7258 val.path_size = 0;
7259 continue;
7260 }
7261
7262 cse_basic_block_start = val.low_cuid;
7263 cse_basic_block_end = val.high_cuid;
7264 max_qty = val.nsets * 2;
7265
7266 if (file)
7267 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
7268 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7269 val.nsets);
7270
7271 /* Make MAX_QTY bigger to give us room to optimize
7272 past the end of this basic block, if that should prove useful. */
7273 if (max_qty < 500)
7274 max_qty = 500;
7275
7276 max_qty += max_reg;
7277
7278 /* If this basic block is being extended by following certain jumps,
7279 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7280 Otherwise, we start after this basic block. */
7281 if (val.path_size > 0)
7282 cse_basic_block (insn, val.last, val.path, 0);
7283 else
7284 {
7285 int old_cse_jumps_altered = cse_jumps_altered;
7286 rtx temp;
7287
7288 /* When cse changes a conditional jump to an unconditional
7289 jump, we want to reprocess the block, since it will give
7290 us a new branch path to investigate. */
7291 cse_jumps_altered = 0;
7292 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8b3686ed
RK
7293 if (cse_jumps_altered == 0
7294 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
7295 insn = temp;
7296
7297 cse_jumps_altered |= old_cse_jumps_altered;
7298 }
7299
7300#ifdef USE_C_ALLOCA
7301 alloca (0);
7302#endif
7303 }
7304
7305 /* Tell refers_to_mem_p that qty_const info is not available. */
7306 qty_const = 0;
7307
7308 if (max_elements_made < n_elements_made)
7309 max_elements_made = n_elements_made;
7310
7311 return cse_jumps_altered;
7312}
7313
7314/* Process a single basic block. FROM and TO and the limits of the basic
7315 block. NEXT_BRANCH points to the branch path when following jumps or
7316 a null path when not following jumps.
7317
7318 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7319 loop. This is true when we are being called for the last time on a
7320 block and this CSE pass is before loop.c. */
7321
7322static rtx
7323cse_basic_block (from, to, next_branch, around_loop)
7324 register rtx from, to;
7325 struct branch_path *next_branch;
7326 int around_loop;
7327{
7328 register rtx insn;
7329 int to_usage = 0;
7330 int in_libcall_block = 0;
7331
7332 /* Each of these arrays is undefined before max_reg, so only allocate
7333 the space actually needed and adjust the start below. */
7334
7335 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7336 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7337 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
7338 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7339 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7340 qty_comparison_code
7341 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
7342 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7343 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7344
7345 qty_first_reg -= max_reg;
7346 qty_last_reg -= max_reg;
7347 qty_mode -= max_reg;
7348 qty_const -= max_reg;
7349 qty_const_insn -= max_reg;
7350 qty_comparison_code -= max_reg;
7351 qty_comparison_qty -= max_reg;
7352 qty_comparison_const -= max_reg;
7353
7354 new_basic_block ();
7355
7356 /* TO might be a label. If so, protect it from being deleted. */
7357 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7358 ++LABEL_NUSES (to);
7359
7360 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7361 {
7362 register enum rtx_code code;
7363
7364 /* See if this is a branch that is part of the path. If so, and it is
7365 to be taken, do so. */
7366 if (next_branch->branch == insn)
7367 {
8b3686ed
RK
7368 enum taken status = next_branch++->status;
7369 if (status != NOT_TAKEN)
7afe21cc 7370 {
8b3686ed
RK
7371 if (status == TAKEN)
7372 record_jump_equiv (insn, 1);
7373 else
7374 invalidate_skipped_block (NEXT_INSN (insn));
7375
7afe21cc
RK
7376 /* Set the last insn as the jump insn; it doesn't affect cc0.
7377 Then follow this branch. */
7378#ifdef HAVE_cc0
7379 prev_insn_cc0 = 0;
7380#endif
7381 prev_insn = insn;
7382 insn = JUMP_LABEL (insn);
7383 continue;
7384 }
7385 }
7386
7387 code = GET_CODE (insn);
7388 if (GET_MODE (insn) == QImode)
7389 PUT_MODE (insn, VOIDmode);
7390
7391 if (GET_RTX_CLASS (code) == 'i')
7392 {
7393 /* Process notes first so we have all notes in canonical forms when
7394 looking for duplicate operations. */
7395
7396 if (REG_NOTES (insn))
7397 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), 0);
7398
7399 /* Track when we are inside in LIBCALL block. Inside such a block,
7400 we do not want to record destinations. The last insn of a
7401 LIBCALL block is not considered to be part of the block, since
830a38ee 7402 its destination is the result of the block and hence should be
7afe21cc
RK
7403 recorded. */
7404
7405 if (find_reg_note (insn, REG_LIBCALL, 0))
7406 in_libcall_block = 1;
7407 else if (find_reg_note (insn, REG_RETVAL, 0))
7408 in_libcall_block = 0;
7409
7410 cse_insn (insn, in_libcall_block);
7411 }
7412
7413 /* If INSN is now an unconditional jump, skip to the end of our
7414 basic block by pretending that we just did the last insn in the
7415 basic block. If we are jumping to the end of our block, show
7416 that we can have one usage of TO. */
7417
7418 if (simplejump_p (insn))
7419 {
7420 if (to == 0)
7421 return 0;
7422
7423 if (JUMP_LABEL (insn) == to)
7424 to_usage = 1;
7425
7426 insn = PREV_INSN (to);
7427 }
7428
7429 /* See if it is ok to keep on going past the label
7430 which used to end our basic block. Remember that we incremented
d45cf215 7431 the count of that label, so we decrement it here. If we made
7afe21cc
RK
7432 a jump unconditional, TO_USAGE will be one; in that case, we don't
7433 want to count the use in that jump. */
7434
7435 if (to != 0 && NEXT_INSN (insn) == to
7436 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7437 {
7438 struct cse_basic_block_data val;
7439
7440 insn = NEXT_INSN (to);
7441
7442 if (LABEL_NUSES (to) == 0)
7443 delete_insn (to);
7444
7445 /* Find the end of the following block. Note that we won't be
7446 following branches in this case. If TO was the last insn
7447 in the function, we are done. Similarly, if we deleted the
d45cf215 7448 insn after TO, it must have been because it was preceded by
7afe21cc
RK
7449 a BARRIER. In that case, we are done with this block because it
7450 has no continuation. */
7451
7452 if (insn == 0 || INSN_DELETED_P (insn))
7453 return 0;
7454
7455 to_usage = 0;
7456 val.path_size = 0;
8b3686ed 7457 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7afe21cc
RK
7458
7459 /* If the tables we allocated have enough space left
7460 to handle all the SETs in the next basic block,
7461 continue through it. Otherwise, return,
7462 and that block will be scanned individually. */
7463 if (val.nsets * 2 + next_qty > max_qty)
7464 break;
7465
7466 cse_basic_block_start = val.low_cuid;
7467 cse_basic_block_end = val.high_cuid;
7468 to = val.last;
7469
7470 /* Prevent TO from being deleted if it is a label. */
7471 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7472 ++LABEL_NUSES (to);
7473
7474 /* Back up so we process the first insn in the extension. */
7475 insn = PREV_INSN (insn);
7476 }
7477 }
7478
7479 if (next_qty > max_qty)
7480 abort ();
7481
7482 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7483 the previous insn is the only insn that branches to the head of a loop,
7484 we can cse into the loop. Don't do this if we changed the jump
7485 structure of a loop unless we aren't going to be following jumps. */
7486
8b3686ed
RK
7487 if ((cse_jumps_altered == 0
7488 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7afe21cc
RK
7489 && around_loop && to != 0
7490 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7491 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7492 && JUMP_LABEL (PREV_INSN (to)) != 0
7493 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7494 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7495
7496 return to ? NEXT_INSN (to) : 0;
7497}
7498\f
7499/* Count the number of times registers are used (not set) in X.
7500 COUNTS is an array in which we accumulate the count, INCR is how much
7501 we count each register usage. */
7502
7503static void
7504count_reg_usage (x, counts, incr)
7505 rtx x;
7506 int *counts;
7507 int incr;
7508{
7509 enum rtx_code code = GET_CODE (x);
7510 char *fmt;
7511 int i, j;
7512
7513 switch (code)
7514 {
7515 case REG:
7516 counts[REGNO (x)] += incr;
7517 return;
7518
7519 case PC:
7520 case CC0:
7521 case CONST:
7522 case CONST_INT:
7523 case CONST_DOUBLE:
7524 case SYMBOL_REF:
7525 case LABEL_REF:
7526 case CLOBBER:
7527 return;
7528
7529 case SET:
7530 /* Unless we are setting a REG, count everything in SET_DEST. */
7531 if (GET_CODE (SET_DEST (x)) != REG)
7532 count_reg_usage (SET_DEST (x), counts, incr);
7533 count_reg_usage (SET_SRC (x), counts, incr);
7534 return;
7535
7536 case INSN:
7537 case JUMP_INSN:
7538 case CALL_INSN:
7539 count_reg_usage (PATTERN (x), counts, incr);
7540
7541 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7542 use them. */
7543
7544 if (REG_NOTES (x))
7545 count_reg_usage (REG_NOTES (x), counts, incr);
7546 return;
7547
7548 case EXPR_LIST:
7549 case INSN_LIST:
7550 if (REG_NOTE_KIND (x) == REG_EQUAL)
7551 count_reg_usage (XEXP (x, 0), counts, incr);
7552 if (XEXP (x, 1))
7553 count_reg_usage (XEXP (x, 1), counts, incr);
7554 return;
7555 }
7556
7557 fmt = GET_RTX_FORMAT (code);
7558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7559 {
7560 if (fmt[i] == 'e')
7561 count_reg_usage (XEXP (x, i), counts, incr);
7562 else if (fmt[i] == 'E')
7563 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7564 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7565 }
7566}
7567\f
7568/* Scan all the insns and delete any that are dead; i.e., they store a register
7569 that is never used or they copy a register to itself.
7570
7571 This is used to remove insns made obviously dead by cse. It improves the
7572 heuristics in loop since it won't try to move dead invariants out of loops
7573 or make givs for dead quantities. The remaining passes of the compilation
7574 are also sped up. */
7575
7576void
7577delete_dead_from_cse (insns, nreg)
7578 rtx insns;
7579 int nreg;
7580{
7581 int *counts = (int *) alloca (nreg * sizeof (int));
7582 rtx insn;
d45cf215 7583 rtx tem;
7afe21cc
RK
7584 int i;
7585
7586 /* First count the number of times each register is used. */
7587 bzero (counts, sizeof (int) * nreg);
7588 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7589 count_reg_usage (insn, counts, 1);
7590
7591 /* Go from the last insn to the first and delete insns that only set unused
7592 registers or copy a register to itself. As we delete an insn, remove
7593 usage counts for registers it uses. */
7594 for (insn = prev_real_insn (get_last_insn ());
7595 insn; insn = prev_real_insn (insn))
7596 {
7597 int live_insn = 0;
7598
7599 if (GET_CODE (PATTERN (insn)) == SET)
7600 {
7601 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
7602 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
7603 ;
7604
d45cf215
RS
7605#ifdef HAVE_cc0
7606 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7607 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7608 && ((tem = next_nonnote_insn (insn)) == 0
7609 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7610 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7611 ;
7612#endif
7afe21cc
RK
7613 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7614 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7615 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7616 || side_effects_p (SET_SRC (PATTERN (insn))))
7617 live_insn = 1;
7618 }
7619 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7620 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7621 {
7622 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7623
7624 if (GET_CODE (elt) == SET)
7625 {
7626 if (GET_CODE (SET_DEST (elt)) == REG
7627 && SET_DEST (elt) == SET_SRC (elt))
7628 ;
7629
d45cf215
RS
7630#ifdef HAVE_cc0
7631 else if (GET_CODE (SET_DEST (elt)) == CC0
7632 && ! side_effects_p (SET_SRC (elt))
7633 && ((tem = next_nonnote_insn (insn)) == 0
7634 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7635 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7636 ;
7637#endif
7afe21cc
RK
7638 else if (GET_CODE (SET_DEST (elt)) != REG
7639 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7640 || counts[REGNO (SET_DEST (elt))] != 0
7641 || side_effects_p (SET_SRC (elt)))
7642 live_insn = 1;
7643 }
7644 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7645 live_insn = 1;
7646 }
7647 else
7648 live_insn = 1;
7649
7650 /* If this is a dead insn, delete it and show registers in it aren't
7651 being used. If this is the last insn of a libcall sequence, don't
7652 delete it even if it is dead because we don't know how to do so
7653 here. */
7654
7655 if (! live_insn && ! find_reg_note (insn, REG_RETVAL, 0))
7656 {
7657 count_reg_usage (insn, counts, -1);
7658 PUT_CODE (insn, NOTE);
7659 NOTE_SOURCE_FILE (insn) = 0;
7660 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7661 }
7662 }
7663}