]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cse.c
Merge in gcc2 snapshot 19980929. See gcc/ChangeLog and gcc/FSFChangeLog for
[thirdparty/gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
26
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37
38 /* The basic idea of common subexpression elimination is to go
39 through the code, keeping a record of expressions that would
40 have the same value at the current scan point, and replacing
41 expressions encountered with the cheapest equivalent expression.
42
43 It is too complicated to keep track of the different possibilities
44 when control paths merge; so, at each label, we forget all that is
45 known and start fresh. This can be described as processing each
46 basic block separately. Note, however, that these are not quite
47 the same as the basic blocks found by a later pass and used for
48 data flow analysis and register packing. We do not need to start fresh
49 after a conditional jump instruction if there is no label there.
50
51 We use two data structures to record the equivalent expressions:
52 a hash table for most expressions, and several vectors together
53 with "quantity numbers" to record equivalent (pseudo) registers.
54
55 The use of the special data structure for registers is desirable
56 because it is faster. It is possible because registers references
57 contain a fairly small number, the register number, taken from
58 a contiguously allocated series, and two register references are
59 identical if they have the same number. General expressions
60 do not have any such thing, so the only way to retrieve the
61 information recorded on an expression other than a register
62 is to keep it in a hash table.
63
64 Registers and "quantity numbers":
65
66 At the start of each basic block, all of the (hardware and pseudo)
67 registers used in the function are given distinct quantity
68 numbers to indicate their contents. During scan, when the code
69 copies one register into another, we copy the quantity number.
70 When a register is loaded in any other way, we allocate a new
71 quantity number to describe the value generated by this operation.
72 `reg_qty' records what quantity a register is currently thought
73 of as containing.
74
75 All real quantity numbers are greater than or equal to `max_reg'.
76 If register N has not been assigned a quantity, reg_qty[N] will equal N.
77
78 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
79 variables should be referenced with an index below `max_reg'.
80
81 We also maintain a bidirectional chain of registers for each
82 quantity number. `qty_first_reg', `qty_last_reg',
83 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
84
85 The first register in a chain is the one whose lifespan is least local.
86 Among equals, it is the one that was seen first.
87 We replace any equivalent register with that one.
88
89 If two registers have the same quantity number, it must be true that
90 REG expressions with `qty_mode' must be in the hash table for both
91 registers and must be in the same class.
92
93 The converse is not true. Since hard registers may be referenced in
94 any mode, two REG expressions might be equivalent in the hash table
95 but not have the same quantity number if the quantity number of one
96 of the registers is not the same mode as those expressions.
97
98 Constants and quantity numbers
99
100 When a quantity has a known constant value, that value is stored
101 in the appropriate element of qty_const. This is in addition to
102 putting the constant in the hash table as is usual for non-regs.
103
104 Whether a reg or a constant is preferred is determined by the configuration
105 macro CONST_COSTS and will often depend on the constant value. In any
106 event, expressions containing constants can be simplified, by fold_rtx.
107
108 When a quantity has a known nearly constant value (such as an address
109 of a stack slot), that value is stored in the appropriate element
110 of qty_const.
111
112 Integer constants don't have a machine mode. However, cse
113 determines the intended machine mode from the destination
114 of the instruction that moves the constant. The machine mode
115 is recorded in the hash table along with the actual RTL
116 constant expression so that different modes are kept separate.
117
118 Other expressions:
119
120 To record known equivalences among expressions in general
121 we use a hash table called `table'. It has a fixed number of buckets
122 that contain chains of `struct table_elt' elements for expressions.
123 These chains connect the elements whose expressions have the same
124 hash codes.
125
126 Other chains through the same elements connect the elements which
127 currently have equivalent values.
128
129 Register references in an expression are canonicalized before hashing
130 the expression. This is done using `reg_qty' and `qty_first_reg'.
131 The hash code of a register reference is computed using the quantity
132 number, not the register number.
133
134 When the value of an expression changes, it is necessary to remove from the
135 hash table not just that expression but all expressions whose values
136 could be different as a result.
137
138 1. If the value changing is in memory, except in special cases
139 ANYTHING referring to memory could be changed. That is because
140 nobody knows where a pointer does not point.
141 The function `invalidate_memory' removes what is necessary.
142
143 The special cases are when the address is constant or is
144 a constant plus a fixed register such as the frame pointer
145 or a static chain pointer. When such addresses are stored in,
146 we can tell exactly which other such addresses must be invalidated
147 due to overlap. `invalidate' does this.
148 All expressions that refer to non-constant
149 memory addresses are also invalidated. `invalidate_memory' does this.
150
151 2. If the value changing is a register, all expressions
152 containing references to that register, and only those,
153 must be removed.
154
155 Because searching the entire hash table for expressions that contain
156 a register is very slow, we try to figure out when it isn't necessary.
157 Precisely, this is necessary only when expressions have been
158 entered in the hash table using this register, and then the value has
159 changed, and then another expression wants to be added to refer to
160 the register's new value. This sequence of circumstances is rare
161 within any one basic block.
162
163 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
164 reg_tick[i] is incremented whenever a value is stored in register i.
165 reg_in_table[i] holds -1 if no references to register i have been
166 entered in the table; otherwise, it contains the value reg_tick[i] had
167 when the references were entered. If we want to enter a reference
168 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
169 Until we want to enter a new entry, the mere fact that the two vectors
170 don't match makes the entries be ignored if anyone tries to match them.
171
172 Registers themselves are entered in the hash table as well as in
173 the equivalent-register chains. However, the vectors `reg_tick'
174 and `reg_in_table' do not apply to expressions which are simple
175 register references. These expressions are removed from the table
176 immediately when they become invalid, and this can be done even if
177 we do not immediately search for all the expressions that refer to
178 the register.
179
180 A CLOBBER rtx in an instruction invalidates its operand for further
181 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
182 invalidates everything that resides in memory.
183
184 Related expressions:
185
186 Constant expressions that differ only by an additive integer
187 are called related. When a constant expression is put in
188 the table, the related expression with no constant term
189 is also entered. These are made to point at each other
190 so that it is possible to find out if there exists any
191 register equivalent to an expression related to a given expression. */
192
193 /* One plus largest register number used in this function. */
194
195 static int max_reg;
196
197 /* One plus largest instruction UID used in this function at time of
198 cse_main call. */
199
200 static int max_insn_uid;
201
202 /* Length of vectors indexed by quantity number.
203 We know in advance we will not need a quantity number this big. */
204
205 static int max_qty;
206
207 /* Next quantity number to be allocated.
208 This is 1 + the largest number needed so far. */
209
210 static int next_qty;
211
212 /* Indexed by quantity number, gives the first (or last) register
213 in the chain of registers that currently contain this quantity. */
214
215 static int *qty_first_reg;
216 static int *qty_last_reg;
217
218 /* Index by quantity number, gives the mode of the quantity. */
219
220 static enum machine_mode *qty_mode;
221
222 /* Indexed by quantity number, gives the rtx of the constant value of the
223 quantity, or zero if it does not have a known value.
224 A sum of the frame pointer (or arg pointer) plus a constant
225 can also be entered here. */
226
227 static rtx *qty_const;
228
229 /* Indexed by qty number, gives the insn that stored the constant value
230 recorded in `qty_const'. */
231
232 static rtx *qty_const_insn;
233
234 /* The next three variables are used to track when a comparison between a
235 quantity and some constant or register has been passed. In that case, we
236 know the results of the comparison in case we see it again. These variables
237 record a comparison that is known to be true. */
238
239 /* Indexed by qty number, gives the rtx code of a comparison with a known
240 result involving this quantity. If none, it is UNKNOWN. */
241 static enum rtx_code *qty_comparison_code;
242
243 /* Indexed by qty number, gives the constant being compared against in a
244 comparison of known result. If no such comparison, it is undefined.
245 If the comparison is not with a constant, it is zero. */
246
247 static rtx *qty_comparison_const;
248
249 /* Indexed by qty number, gives the quantity being compared against in a
250 comparison of known result. If no such comparison, if it undefined.
251 If the comparison is not with a register, it is -1. */
252
253 static int *qty_comparison_qty;
254
255 #ifdef HAVE_cc0
256 /* For machines that have a CC0, we do not record its value in the hash
257 table since its use is guaranteed to be the insn immediately following
258 its definition and any other insn is presumed to invalidate it.
259
260 Instead, we store below the value last assigned to CC0. If it should
261 happen to be a constant, it is stored in preference to the actual
262 assigned value. In case it is a constant, we store the mode in which
263 the constant should be interpreted. */
264
265 static rtx prev_insn_cc0;
266 static enum machine_mode prev_insn_cc0_mode;
267 #endif
268
269 /* Previous actual insn. 0 if at first insn of basic block. */
270
271 static rtx prev_insn;
272
273 /* Insn being scanned. */
274
275 static rtx this_insn;
276
277 /* Index by register number, gives the quantity number
278 of the register's current contents. */
279
280 static int *reg_qty;
281
282 /* Index by register number, gives the number of the next (or
283 previous) register in the chain of registers sharing the same
284 value.
285
286 Or -1 if this register is at the end of the chain.
287
288 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
289
290 static int *reg_next_eqv;
291 static int *reg_prev_eqv;
292
293 /* Index by register number, gives the number of times
294 that register has been altered in the current basic block. */
295
296 static int *reg_tick;
297
298 /* Index by register number, gives the reg_tick value at which
299 rtx's containing this register are valid in the hash table.
300 If this does not equal the current reg_tick value, such expressions
301 existing in the hash table are invalid.
302 If this is -1, no expressions containing this register have been
303 entered in the table. */
304
305 static int *reg_in_table;
306
307 /* A HARD_REG_SET containing all the hard registers for which there is
308 currently a REG expression in the hash table. Note the difference
309 from the above variables, which indicate if the REG is mentioned in some
310 expression in the table. */
311
312 static HARD_REG_SET hard_regs_in_table;
313
314 /* A HARD_REG_SET containing all the hard registers that are invalidated
315 by a CALL_INSN. */
316
317 static HARD_REG_SET regs_invalidated_by_call;
318
319 /* Two vectors of ints:
320 one containing max_reg -1's; the other max_reg + 500 (an approximation
321 for max_qty) elements where element i contains i.
322 These are used to initialize various other vectors fast. */
323
324 static int *all_minus_one;
325 static int *consec_ints;
326
327 /* CUID of insn that starts the basic block currently being cse-processed. */
328
329 static int cse_basic_block_start;
330
331 /* CUID of insn that ends the basic block currently being cse-processed. */
332
333 static int cse_basic_block_end;
334
335 /* Vector mapping INSN_UIDs to cuids.
336 The cuids are like uids but increase monotonically always.
337 We use them to see whether a reg is used outside a given basic block. */
338
339 static int *uid_cuid;
340
341 /* Highest UID in UID_CUID. */
342 static int max_uid;
343
344 /* Get the cuid of an insn. */
345
346 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
347
348 /* Nonzero if cse has altered conditional jump insns
349 in such a way that jump optimization should be redone. */
350
351 static int cse_jumps_altered;
352
353 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
354 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
355 to put in the note. */
356 static int recorded_label_ref;
357
358 /* canon_hash stores 1 in do_not_record
359 if it notices a reference to CC0, PC, or some other volatile
360 subexpression. */
361
362 static int do_not_record;
363
364 #ifdef LOAD_EXTEND_OP
365
366 /* Scratch rtl used when looking for load-extended copy of a MEM. */
367 static rtx memory_extend_rtx;
368 #endif
369
370 /* canon_hash stores 1 in hash_arg_in_memory
371 if it notices a reference to memory within the expression being hashed. */
372
373 static int hash_arg_in_memory;
374
375 /* canon_hash stores 1 in hash_arg_in_struct
376 if it notices a reference to memory that's part of a structure. */
377
378 static int hash_arg_in_struct;
379
380 /* The hash table contains buckets which are chains of `struct table_elt's,
381 each recording one expression's information.
382 That expression is in the `exp' field.
383
384 Those elements with the same hash code are chained in both directions
385 through the `next_same_hash' and `prev_same_hash' fields.
386
387 Each set of expressions with equivalent values
388 are on a two-way chain through the `next_same_value'
389 and `prev_same_value' fields, and all point with
390 the `first_same_value' field at the first element in
391 that chain. The chain is in order of increasing cost.
392 Each element's cost value is in its `cost' field.
393
394 The `in_memory' field is nonzero for elements that
395 involve any reference to memory. These elements are removed
396 whenever a write is done to an unidentified location in memory.
397 To be safe, we assume that a memory address is unidentified unless
398 the address is either a symbol constant or a constant plus
399 the frame pointer or argument pointer.
400
401 The `in_struct' field is nonzero for elements that
402 involve any reference to memory inside a structure or array.
403
404 The `related_value' field is used to connect related expressions
405 (that differ by adding an integer).
406 The related expressions are chained in a circular fashion.
407 `related_value' is zero for expressions for which this
408 chain is not useful.
409
410 The `cost' field stores the cost of this element's expression.
411
412 The `is_const' flag is set if the element is a constant (including
413 a fixed address).
414
415 The `flag' field is used as a temporary during some search routines.
416
417 The `mode' field is usually the same as GET_MODE (`exp'), but
418 if `exp' is a CONST_INT and has no machine mode then the `mode'
419 field is the mode it was being used as. Each constant is
420 recorded separately for each mode it is used with. */
421
422
423 struct table_elt
424 {
425 rtx exp;
426 struct table_elt *next_same_hash;
427 struct table_elt *prev_same_hash;
428 struct table_elt *next_same_value;
429 struct table_elt *prev_same_value;
430 struct table_elt *first_same_value;
431 struct table_elt *related_value;
432 int cost;
433 enum machine_mode mode;
434 char in_memory;
435 char in_struct;
436 char is_const;
437 char flag;
438 };
439
440 /* We don't want a lot of buckets, because we rarely have very many
441 things stored in the hash table, and a lot of buckets slows
442 down a lot of loops that happen frequently. */
443 #define NBUCKETS 31
444
445 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
447
448 #define HASH(X, M) \
449 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
450 ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS \
451 : canon_hash (X, M) % NBUCKETS)
452
453 /* Determine whether register number N is considered a fixed register for CSE.
454 It is desirable to replace other regs with fixed regs, to reduce need for
455 non-fixed hard regs.
456 A reg wins if it is either the frame pointer or designated as fixed,
457 but not if it is an overlapping register. */
458 #ifdef OVERLAPPING_REGNO_P
459 #define FIXED_REGNO_P(N) \
460 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
461 || fixed_regs[N] || global_regs[N]) \
462 && ! OVERLAPPING_REGNO_P ((N)))
463 #else
464 #define FIXED_REGNO_P(N) \
465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466 || fixed_regs[N] || global_regs[N])
467 #endif
468
469 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
470 hard registers and pointers into the frame are the cheapest with a cost
471 of 0. Next come pseudos with a cost of one and other hard registers with
472 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473
474 #define CHEAP_REGNO(N) \
475 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
476 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
477 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
478 || ((N) < FIRST_PSEUDO_REGISTER \
479 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
480
481 /* A register is cheap if it is a user variable assigned to the register
482 or if its register number always corresponds to a cheap register. */
483
484 #define CHEAP_REG(N) \
485 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
486 || CHEAP_REGNO (REGNO (N)))
487
488 #define COST(X) \
489 (GET_CODE (X) == REG \
490 ? (CHEAP_REG (X) ? 0 \
491 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
492 : 2) \
493 : notreg_cost(X))
494
495 /* Determine if the quantity number for register X represents a valid index
496 into the `qty_...' variables. */
497
498 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
499
500 #ifdef ADDRESS_COST
501 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
502 during CSE, such nodes are present. Using an ADDRESSOF node which
503 refers to the address of a REG is a good thing because we can then
504 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
505 #define CSE_ADDRESS_COST(RTX) \
506 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
507 ? -1 : ADDRESS_COST(RTX))
508 #endif
509
510 static struct table_elt *table[NBUCKETS];
511
512 /* Chain of `struct table_elt's made so far for this function
513 but currently removed from the table. */
514
515 static struct table_elt *free_element_chain;
516
517 /* Number of `struct table_elt' structures made so far for this function. */
518
519 static int n_elements_made;
520
521 /* Maximum value `n_elements_made' has had so far in this compilation
522 for functions previously processed. */
523
524 static int max_elements_made;
525
526 /* Surviving equivalence class when two equivalence classes are merged
527 by recording the effects of a jump in the last insn. Zero if the
528 last insn was not a conditional jump. */
529
530 static struct table_elt *last_jump_equiv_class;
531
532 /* Set to the cost of a constant pool reference if one was found for a
533 symbolic constant. If this was found, it means we should try to
534 convert constants into constant pool entries if they don't fit in
535 the insn. */
536
537 static int constant_pool_entries_cost;
538
539 /* Define maximum length of a branch path. */
540
541 #define PATHLENGTH 10
542
543 /* This data describes a block that will be processed by cse_basic_block. */
544
545 struct cse_basic_block_data {
546 /* Lowest CUID value of insns in block. */
547 int low_cuid;
548 /* Highest CUID value of insns in block. */
549 int high_cuid;
550 /* Total number of SETs in block. */
551 int nsets;
552 /* Last insn in the block. */
553 rtx last;
554 /* Size of current branch path, if any. */
555 int path_size;
556 /* Current branch path, indicating which branches will be taken. */
557 struct branch_path {
558 /* The branch insn. */
559 rtx branch;
560 /* Whether it should be taken or not. AROUND is the same as taken
561 except that it is used when the destination label is not preceded
562 by a BARRIER. */
563 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
564 } path[PATHLENGTH];
565 };
566
567 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
568 virtual regs here because the simplify_*_operation routines are called
569 by integrate.c, which is called before virtual register instantiation. */
570
571 #define FIXED_BASE_PLUS_P(X) \
572 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
573 || (X) == arg_pointer_rtx \
574 || (X) == virtual_stack_vars_rtx \
575 || (X) == virtual_incoming_args_rtx \
576 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && (XEXP (X, 0) == frame_pointer_rtx \
578 || XEXP (X, 0) == hard_frame_pointer_rtx \
579 || XEXP (X, 0) == arg_pointer_rtx \
580 || XEXP (X, 0) == virtual_stack_vars_rtx \
581 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
582 || GET_CODE (X) == ADDRESSOF)
583
584 /* Similar, but also allows reference to the stack pointer.
585
586 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
587 arg_pointer_rtx by itself is nonzero, because on at least one machine,
588 the i960, the arg pointer is zero when it is unused. */
589
590 #define NONZERO_BASE_PLUS_P(X) \
591 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
592 || (X) == virtual_stack_vars_rtx \
593 || (X) == virtual_incoming_args_rtx \
594 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
595 && (XEXP (X, 0) == frame_pointer_rtx \
596 || XEXP (X, 0) == hard_frame_pointer_rtx \
597 || XEXP (X, 0) == arg_pointer_rtx \
598 || XEXP (X, 0) == virtual_stack_vars_rtx \
599 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
600 || (X) == stack_pointer_rtx \
601 || (X) == virtual_stack_dynamic_rtx \
602 || (X) == virtual_outgoing_args_rtx \
603 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
604 && (XEXP (X, 0) == stack_pointer_rtx \
605 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
606 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
607 || GET_CODE (X) == ADDRESSOF)
608
609 static int notreg_cost PROTO((rtx));
610 static void new_basic_block PROTO((void));
611 static void make_new_qty PROTO((int));
612 static void make_regs_eqv PROTO((int, int));
613 static void delete_reg_equiv PROTO((int));
614 static int mention_regs PROTO((rtx));
615 static int insert_regs PROTO((rtx, struct table_elt *, int));
616 static void free_element PROTO((struct table_elt *));
617 static void remove_from_table PROTO((struct table_elt *, unsigned));
618 static struct table_elt *get_element PROTO((void));
619 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
620 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
621 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
622 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
623 enum machine_mode));
624 static void merge_equiv_classes PROTO((struct table_elt *,
625 struct table_elt *));
626 static void invalidate PROTO((rtx, enum machine_mode));
627 static int cse_rtx_varies_p PROTO((rtx));
628 static void remove_invalid_refs PROTO((int));
629 static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
630 static void rehash_using_reg PROTO((rtx));
631 static void invalidate_memory PROTO((void));
632 static void invalidate_for_call PROTO((void));
633 static rtx use_related_value PROTO((rtx, struct table_elt *));
634 static unsigned canon_hash PROTO((rtx, enum machine_mode));
635 static unsigned safe_hash PROTO((rtx, enum machine_mode));
636 static int exp_equiv_p PROTO((rtx, rtx, int, int));
637 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
638 HOST_WIDE_INT *,
639 HOST_WIDE_INT *));
640 static int refers_to_p PROTO((rtx, rtx));
641 static rtx canon_reg PROTO((rtx, rtx));
642 static void find_best_addr PROTO((rtx, rtx *));
643 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
644 enum machine_mode *,
645 enum machine_mode *));
646 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
647 rtx, rtx));
648 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
649 rtx, rtx));
650 static rtx fold_rtx PROTO((rtx, rtx));
651 static rtx equiv_constant PROTO((rtx));
652 static void record_jump_equiv PROTO((rtx, int));
653 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
654 rtx, rtx, int));
655 static void cse_insn PROTO((rtx, rtx));
656 static int note_mem_written PROTO((rtx));
657 static void invalidate_from_clobbers PROTO((rtx));
658 static rtx cse_process_notes PROTO((rtx, rtx));
659 static void cse_around_loop PROTO((rtx));
660 static void invalidate_skipped_set PROTO((rtx, rtx));
661 static void invalidate_skipped_block PROTO((rtx));
662 static void cse_check_loop_start PROTO((rtx, rtx));
663 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
664 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
665 static void count_reg_usage PROTO((rtx, int *, rtx, int));
666
667 extern int rtx_equal_function_value_matters;
668 \f
669 /* Return an estimate of the cost of computing rtx X.
670 One use is in cse, to decide which expression to keep in the hash table.
671 Another is in rtl generation, to pick the cheapest way to multiply.
672 Other uses like the latter are expected in the future. */
673
674 /* Internal function, to compute cost when X is not a register; called
675 from COST macro to keep it simple. */
676
677 static int
678 notreg_cost (x)
679 rtx x;
680 {
681 return ((GET_CODE (x) == SUBREG
682 && GET_CODE (SUBREG_REG (x)) == REG
683 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
684 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
685 && (GET_MODE_SIZE (GET_MODE (x))
686 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
687 && subreg_lowpart_p (x)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
689 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
690 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
691 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
692 : 2))
693 : rtx_cost (x, SET) * 2);
694 }
695
696 /* Return the right cost to give to an operation
697 to make the cost of the corresponding register-to-register instruction
698 N times that of a fast register-to-register instruction. */
699
700 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
701
702 int
703 rtx_cost (x, outer_code)
704 rtx x;
705 enum rtx_code outer_code ATTRIBUTE_UNUSED;
706 {
707 register int i, j;
708 register enum rtx_code code;
709 register char *fmt;
710 register int total;
711
712 if (x == 0)
713 return 0;
714
715 /* Compute the default costs of certain things.
716 Note that RTX_COSTS can override the defaults. */
717
718 code = GET_CODE (x);
719 switch (code)
720 {
721 case MULT:
722 /* Count multiplication by 2**n as a shift,
723 because if we are considering it, we would output it as a shift. */
724 if (GET_CODE (XEXP (x, 1)) == CONST_INT
725 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
726 total = 2;
727 else
728 total = COSTS_N_INSNS (5);
729 break;
730 case DIV:
731 case UDIV:
732 case MOD:
733 case UMOD:
734 total = COSTS_N_INSNS (7);
735 break;
736 case USE:
737 /* Used in loop.c and combine.c as a marker. */
738 total = 0;
739 break;
740 case ASM_OPERANDS:
741 /* We don't want these to be used in substitutions because
742 we have no way of validating the resulting insn. So assign
743 anything containing an ASM_OPERANDS a very high cost. */
744 total = 1000;
745 break;
746 default:
747 total = 2;
748 }
749
750 switch (code)
751 {
752 case REG:
753 return ! CHEAP_REG (x);
754
755 case SUBREG:
756 /* If we can't tie these modes, make this expensive. The larger
757 the mode, the more expensive it is. */
758 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
759 return COSTS_N_INSNS (2
760 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
761 return 2;
762 #ifdef RTX_COSTS
763 RTX_COSTS (x, code, outer_code);
764 #endif
765 #ifdef CONST_COSTS
766 CONST_COSTS (x, code, outer_code);
767 #endif
768
769 default:
770 #ifdef DEFAULT_RTX_COSTS
771 DEFAULT_RTX_COSTS(x, code, outer_code);
772 #endif
773 break;
774 }
775
776 /* Sum the costs of the sub-rtx's, plus cost of this operation,
777 which is already in total. */
778
779 fmt = GET_RTX_FORMAT (code);
780 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
781 if (fmt[i] == 'e')
782 total += rtx_cost (XEXP (x, i), code);
783 else if (fmt[i] == 'E')
784 for (j = 0; j < XVECLEN (x, i); j++)
785 total += rtx_cost (XVECEXP (x, i, j), code);
786
787 return total;
788 }
789 \f
790 /* Clear the hash table and initialize each register with its own quantity,
791 for a new basic block. */
792
793 static void
794 new_basic_block ()
795 {
796 register int i;
797
798 next_qty = max_reg;
799
800 bzero ((char *) reg_tick, max_reg * sizeof (int));
801
802 bcopy ((char *) all_minus_one, (char *) reg_in_table,
803 max_reg * sizeof (int));
804 bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
805 CLEAR_HARD_REG_SET (hard_regs_in_table);
806
807 /* The per-quantity values used to be initialized here, but it is
808 much faster to initialize each as it is made in `make_new_qty'. */
809
810 for (i = 0; i < NBUCKETS; i++)
811 {
812 register struct table_elt *this, *next;
813 for (this = table[i]; this; this = next)
814 {
815 next = this->next_same_hash;
816 free_element (this);
817 }
818 }
819
820 bzero ((char *) table, sizeof table);
821
822 prev_insn = 0;
823
824 #ifdef HAVE_cc0
825 prev_insn_cc0 = 0;
826 #endif
827 }
828
829 /* Say that register REG contains a quantity not in any register before
830 and initialize that quantity. */
831
832 static void
833 make_new_qty (reg)
834 register int reg;
835 {
836 register int q;
837
838 if (next_qty >= max_qty)
839 abort ();
840
841 q = reg_qty[reg] = next_qty++;
842 qty_first_reg[q] = reg;
843 qty_last_reg[q] = reg;
844 qty_const[q] = qty_const_insn[q] = 0;
845 qty_comparison_code[q] = UNKNOWN;
846
847 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
848 }
849
850 /* Make reg NEW equivalent to reg OLD.
851 OLD is not changing; NEW is. */
852
853 static void
854 make_regs_eqv (new, old)
855 register int new, old;
856 {
857 register int lastr, firstr;
858 register int q = reg_qty[old];
859
860 /* Nothing should become eqv until it has a "non-invalid" qty number. */
861 if (! REGNO_QTY_VALID_P (old))
862 abort ();
863
864 reg_qty[new] = q;
865 firstr = qty_first_reg[q];
866 lastr = qty_last_reg[q];
867
868 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
869 hard regs. Among pseudos, if NEW will live longer than any other reg
870 of the same qty, and that is beyond the current basic block,
871 make it the new canonical replacement for this qty. */
872 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
873 /* Certain fixed registers might be of the class NO_REGS. This means
874 that not only can they not be allocated by the compiler, but
875 they cannot be used in substitutions or canonicalizations
876 either. */
877 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
878 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
879 || (new >= FIRST_PSEUDO_REGISTER
880 && (firstr < FIRST_PSEUDO_REGISTER
881 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
882 || (uid_cuid[REGNO_FIRST_UID (new)]
883 < cse_basic_block_start))
884 && (uid_cuid[REGNO_LAST_UID (new)]
885 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
886 {
887 reg_prev_eqv[firstr] = new;
888 reg_next_eqv[new] = firstr;
889 reg_prev_eqv[new] = -1;
890 qty_first_reg[q] = new;
891 }
892 else
893 {
894 /* If NEW is a hard reg (known to be non-fixed), insert at end.
895 Otherwise, insert before any non-fixed hard regs that are at the
896 end. Registers of class NO_REGS cannot be used as an
897 equivalent for anything. */
898 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
899 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
900 && new >= FIRST_PSEUDO_REGISTER)
901 lastr = reg_prev_eqv[lastr];
902 reg_next_eqv[new] = reg_next_eqv[lastr];
903 if (reg_next_eqv[lastr] >= 0)
904 reg_prev_eqv[reg_next_eqv[lastr]] = new;
905 else
906 qty_last_reg[q] = new;
907 reg_next_eqv[lastr] = new;
908 reg_prev_eqv[new] = lastr;
909 }
910 }
911
912 /* Remove REG from its equivalence class. */
913
914 static void
915 delete_reg_equiv (reg)
916 register int reg;
917 {
918 register int q = reg_qty[reg];
919 register int p, n;
920
921 /* If invalid, do nothing. */
922 if (q == reg)
923 return;
924
925 p = reg_prev_eqv[reg];
926 n = reg_next_eqv[reg];
927
928 if (n != -1)
929 reg_prev_eqv[n] = p;
930 else
931 qty_last_reg[q] = p;
932 if (p != -1)
933 reg_next_eqv[p] = n;
934 else
935 qty_first_reg[q] = n;
936
937 reg_qty[reg] = reg;
938 }
939
940 /* Remove any invalid expressions from the hash table
941 that refer to any of the registers contained in expression X.
942
943 Make sure that newly inserted references to those registers
944 as subexpressions will be considered valid.
945
946 mention_regs is not called when a register itself
947 is being stored in the table.
948
949 Return 1 if we have done something that may have changed the hash code
950 of X. */
951
952 static int
953 mention_regs (x)
954 rtx x;
955 {
956 register enum rtx_code code;
957 register int i, j;
958 register char *fmt;
959 register int changed = 0;
960
961 if (x == 0)
962 return 0;
963
964 code = GET_CODE (x);
965 if (code == REG)
966 {
967 register int regno = REGNO (x);
968 register int endregno
969 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
970 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
971 int i;
972
973 for (i = regno; i < endregno; i++)
974 {
975 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
976 remove_invalid_refs (i);
977
978 reg_in_table[i] = reg_tick[i];
979 }
980
981 return 0;
982 }
983
984 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
985 pseudo if they don't use overlapping words. We handle only pseudos
986 here for simplicity. */
987 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
988 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
989 {
990 int i = REGNO (SUBREG_REG (x));
991
992 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
993 {
994 /* If reg_tick has been incremented more than once since
995 reg_in_table was last set, that means that the entire
996 register has been set before, so discard anything memorized
997 for the entrire register, including all SUBREG expressions. */
998 if (reg_in_table[i] != reg_tick[i] - 1)
999 remove_invalid_refs (i);
1000 else
1001 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1002 }
1003
1004 reg_in_table[i] = reg_tick[i];
1005 return 0;
1006 }
1007
1008 /* If X is a comparison or a COMPARE and either operand is a register
1009 that does not have a quantity, give it one. This is so that a later
1010 call to record_jump_equiv won't cause X to be assigned a different
1011 hash code and not found in the table after that call.
1012
1013 It is not necessary to do this here, since rehash_using_reg can
1014 fix up the table later, but doing this here eliminates the need to
1015 call that expensive function in the most common case where the only
1016 use of the register is in the comparison. */
1017
1018 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1019 {
1020 if (GET_CODE (XEXP (x, 0)) == REG
1021 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1022 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1023 {
1024 rehash_using_reg (XEXP (x, 0));
1025 changed = 1;
1026 }
1027
1028 if (GET_CODE (XEXP (x, 1)) == REG
1029 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1030 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1031 {
1032 rehash_using_reg (XEXP (x, 1));
1033 changed = 1;
1034 }
1035 }
1036
1037 fmt = GET_RTX_FORMAT (code);
1038 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1039 if (fmt[i] == 'e')
1040 changed |= mention_regs (XEXP (x, i));
1041 else if (fmt[i] == 'E')
1042 for (j = 0; j < XVECLEN (x, i); j++)
1043 changed |= mention_regs (XVECEXP (x, i, j));
1044
1045 return changed;
1046 }
1047
1048 /* Update the register quantities for inserting X into the hash table
1049 with a value equivalent to CLASSP.
1050 (If the class does not contain a REG, it is irrelevant.)
1051 If MODIFIED is nonzero, X is a destination; it is being modified.
1052 Note that delete_reg_equiv should be called on a register
1053 before insert_regs is done on that register with MODIFIED != 0.
1054
1055 Nonzero value means that elements of reg_qty have changed
1056 so X's hash code may be different. */
1057
1058 static int
1059 insert_regs (x, classp, modified)
1060 rtx x;
1061 struct table_elt *classp;
1062 int modified;
1063 {
1064 if (GET_CODE (x) == REG)
1065 {
1066 register int regno = REGNO (x);
1067
1068 /* If REGNO is in the equivalence table already but is of the
1069 wrong mode for that equivalence, don't do anything here. */
1070
1071 if (REGNO_QTY_VALID_P (regno)
1072 && qty_mode[reg_qty[regno]] != GET_MODE (x))
1073 return 0;
1074
1075 if (modified || ! REGNO_QTY_VALID_P (regno))
1076 {
1077 if (classp)
1078 for (classp = classp->first_same_value;
1079 classp != 0;
1080 classp = classp->next_same_value)
1081 if (GET_CODE (classp->exp) == REG
1082 && GET_MODE (classp->exp) == GET_MODE (x))
1083 {
1084 make_regs_eqv (regno, REGNO (classp->exp));
1085 return 1;
1086 }
1087
1088 make_new_qty (regno);
1089 qty_mode[reg_qty[regno]] = GET_MODE (x);
1090 return 1;
1091 }
1092
1093 return 0;
1094 }
1095
1096 /* If X is a SUBREG, we will likely be inserting the inner register in the
1097 table. If that register doesn't have an assigned quantity number at
1098 this point but does later, the insertion that we will be doing now will
1099 not be accessible because its hash code will have changed. So assign
1100 a quantity number now. */
1101
1102 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1103 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1104 {
1105 int regno = REGNO (SUBREG_REG (x));
1106
1107 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1108 /* Mention_regs checks if REG_TICK is exactly one larger than
1109 REG_IN_TABLE to find out if there was only a single preceding
1110 invalidation - for the SUBREG - or another one, which would be
1111 for the full register. Since we don't invalidate the SUBREG
1112 here first, we might have to bump up REG_TICK so that mention_regs
1113 will do the right thing. */
1114 if (reg_in_table[regno] >= 0
1115 && reg_tick[regno] == reg_in_table[regno] + 1)
1116 reg_tick[regno]++;
1117 mention_regs (x);
1118 return 1;
1119 }
1120 else
1121 return mention_regs (x);
1122 }
1123 \f
1124 /* Look in or update the hash table. */
1125
1126 /* Put the element ELT on the list of free elements. */
1127
1128 static void
1129 free_element (elt)
1130 struct table_elt *elt;
1131 {
1132 elt->next_same_hash = free_element_chain;
1133 free_element_chain = elt;
1134 }
1135
1136 /* Return an element that is free for use. */
1137
1138 static struct table_elt *
1139 get_element ()
1140 {
1141 struct table_elt *elt = free_element_chain;
1142 if (elt)
1143 {
1144 free_element_chain = elt->next_same_hash;
1145 return elt;
1146 }
1147 n_elements_made++;
1148 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1149 }
1150
1151 /* Remove table element ELT from use in the table.
1152 HASH is its hash code, made using the HASH macro.
1153 It's an argument because often that is known in advance
1154 and we save much time not recomputing it. */
1155
1156 static void
1157 remove_from_table (elt, hash)
1158 register struct table_elt *elt;
1159 unsigned hash;
1160 {
1161 if (elt == 0)
1162 return;
1163
1164 /* Mark this element as removed. See cse_insn. */
1165 elt->first_same_value = 0;
1166
1167 /* Remove the table element from its equivalence class. */
1168
1169 {
1170 register struct table_elt *prev = elt->prev_same_value;
1171 register struct table_elt *next = elt->next_same_value;
1172
1173 if (next) next->prev_same_value = prev;
1174
1175 if (prev)
1176 prev->next_same_value = next;
1177 else
1178 {
1179 register struct table_elt *newfirst = next;
1180 while (next)
1181 {
1182 next->first_same_value = newfirst;
1183 next = next->next_same_value;
1184 }
1185 }
1186 }
1187
1188 /* Remove the table element from its hash bucket. */
1189
1190 {
1191 register struct table_elt *prev = elt->prev_same_hash;
1192 register struct table_elt *next = elt->next_same_hash;
1193
1194 if (next) next->prev_same_hash = prev;
1195
1196 if (prev)
1197 prev->next_same_hash = next;
1198 else if (table[hash] == elt)
1199 table[hash] = next;
1200 else
1201 {
1202 /* This entry is not in the proper hash bucket. This can happen
1203 when two classes were merged by `merge_equiv_classes'. Search
1204 for the hash bucket that it heads. This happens only very
1205 rarely, so the cost is acceptable. */
1206 for (hash = 0; hash < NBUCKETS; hash++)
1207 if (table[hash] == elt)
1208 table[hash] = next;
1209 }
1210 }
1211
1212 /* Remove the table element from its related-value circular chain. */
1213
1214 if (elt->related_value != 0 && elt->related_value != elt)
1215 {
1216 register struct table_elt *p = elt->related_value;
1217 while (p->related_value != elt)
1218 p = p->related_value;
1219 p->related_value = elt->related_value;
1220 if (p->related_value == p)
1221 p->related_value = 0;
1222 }
1223
1224 free_element (elt);
1225 }
1226
1227 /* Look up X in the hash table and return its table element,
1228 or 0 if X is not in the table.
1229
1230 MODE is the machine-mode of X, or if X is an integer constant
1231 with VOIDmode then MODE is the mode with which X will be used.
1232
1233 Here we are satisfied to find an expression whose tree structure
1234 looks like X. */
1235
1236 static struct table_elt *
1237 lookup (x, hash, mode)
1238 rtx x;
1239 unsigned hash;
1240 enum machine_mode mode;
1241 {
1242 register struct table_elt *p;
1243
1244 for (p = table[hash]; p; p = p->next_same_hash)
1245 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1246 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1247 return p;
1248
1249 return 0;
1250 }
1251
1252 /* Like `lookup' but don't care whether the table element uses invalid regs.
1253 Also ignore discrepancies in the machine mode of a register. */
1254
1255 static struct table_elt *
1256 lookup_for_remove (x, hash, mode)
1257 rtx x;
1258 unsigned hash;
1259 enum machine_mode mode;
1260 {
1261 register struct table_elt *p;
1262
1263 if (GET_CODE (x) == REG)
1264 {
1265 int regno = REGNO (x);
1266 /* Don't check the machine mode when comparing registers;
1267 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1268 for (p = table[hash]; p; p = p->next_same_hash)
1269 if (GET_CODE (p->exp) == REG
1270 && REGNO (p->exp) == regno)
1271 return p;
1272 }
1273 else
1274 {
1275 for (p = table[hash]; p; p = p->next_same_hash)
1276 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1277 return p;
1278 }
1279
1280 return 0;
1281 }
1282
1283 /* Look for an expression equivalent to X and with code CODE.
1284 If one is found, return that expression. */
1285
1286 static rtx
1287 lookup_as_function (x, code)
1288 rtx x;
1289 enum rtx_code code;
1290 {
1291 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1292 GET_MODE (x));
1293 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1294 long as we are narrowing. So if we looked in vain for a mode narrower
1295 than word_mode before, look for word_mode now. */
1296 if (p == 0 && code == CONST_INT
1297 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1298 {
1299 x = copy_rtx (x);
1300 PUT_MODE (x, word_mode);
1301 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1302 }
1303
1304 if (p == 0)
1305 return 0;
1306
1307 for (p = p->first_same_value; p; p = p->next_same_value)
1308 {
1309 if (GET_CODE (p->exp) == code
1310 /* Make sure this is a valid entry in the table. */
1311 && exp_equiv_p (p->exp, p->exp, 1, 0))
1312 return p->exp;
1313 }
1314
1315 return 0;
1316 }
1317
1318 /* Insert X in the hash table, assuming HASH is its hash code
1319 and CLASSP is an element of the class it should go in
1320 (or 0 if a new class should be made).
1321 It is inserted at the proper position to keep the class in
1322 the order cheapest first.
1323
1324 MODE is the machine-mode of X, or if X is an integer constant
1325 with VOIDmode then MODE is the mode with which X will be used.
1326
1327 For elements of equal cheapness, the most recent one
1328 goes in front, except that the first element in the list
1329 remains first unless a cheaper element is added. The order of
1330 pseudo-registers does not matter, as canon_reg will be called to
1331 find the cheapest when a register is retrieved from the table.
1332
1333 The in_memory field in the hash table element is set to 0.
1334 The caller must set it nonzero if appropriate.
1335
1336 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1337 and if insert_regs returns a nonzero value
1338 you must then recompute its hash code before calling here.
1339
1340 If necessary, update table showing constant values of quantities. */
1341
1342 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1343
1344 static struct table_elt *
1345 insert (x, classp, hash, mode)
1346 register rtx x;
1347 register struct table_elt *classp;
1348 unsigned hash;
1349 enum machine_mode mode;
1350 {
1351 register struct table_elt *elt;
1352
1353 /* If X is a register and we haven't made a quantity for it,
1354 something is wrong. */
1355 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1356 abort ();
1357
1358 /* If X is a hard register, show it is being put in the table. */
1359 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1360 {
1361 int regno = REGNO (x);
1362 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1363 int i;
1364
1365 for (i = regno; i < endregno; i++)
1366 SET_HARD_REG_BIT (hard_regs_in_table, i);
1367 }
1368
1369 /* If X is a label, show we recorded it. */
1370 if (GET_CODE (x) == LABEL_REF
1371 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1372 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1373 recorded_label_ref = 1;
1374
1375 /* Put an element for X into the right hash bucket. */
1376
1377 elt = get_element ();
1378 elt->exp = x;
1379 elt->cost = COST (x);
1380 elt->next_same_value = 0;
1381 elt->prev_same_value = 0;
1382 elt->next_same_hash = table[hash];
1383 elt->prev_same_hash = 0;
1384 elt->related_value = 0;
1385 elt->in_memory = 0;
1386 elt->mode = mode;
1387 elt->is_const = (CONSTANT_P (x)
1388 /* GNU C++ takes advantage of this for `this'
1389 (and other const values). */
1390 || (RTX_UNCHANGING_P (x)
1391 && GET_CODE (x) == REG
1392 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1393 || FIXED_BASE_PLUS_P (x));
1394
1395 if (table[hash])
1396 table[hash]->prev_same_hash = elt;
1397 table[hash] = elt;
1398
1399 /* Put it into the proper value-class. */
1400 if (classp)
1401 {
1402 classp = classp->first_same_value;
1403 if (CHEAPER (elt, classp))
1404 /* Insert at the head of the class */
1405 {
1406 register struct table_elt *p;
1407 elt->next_same_value = classp;
1408 classp->prev_same_value = elt;
1409 elt->first_same_value = elt;
1410
1411 for (p = classp; p; p = p->next_same_value)
1412 p->first_same_value = elt;
1413 }
1414 else
1415 {
1416 /* Insert not at head of the class. */
1417 /* Put it after the last element cheaper than X. */
1418 register struct table_elt *p, *next;
1419 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1420 p = next);
1421 /* Put it after P and before NEXT. */
1422 elt->next_same_value = next;
1423 if (next)
1424 next->prev_same_value = elt;
1425 elt->prev_same_value = p;
1426 p->next_same_value = elt;
1427 elt->first_same_value = classp;
1428 }
1429 }
1430 else
1431 elt->first_same_value = elt;
1432
1433 /* If this is a constant being set equivalent to a register or a register
1434 being set equivalent to a constant, note the constant equivalence.
1435
1436 If this is a constant, it cannot be equivalent to a different constant,
1437 and a constant is the only thing that can be cheaper than a register. So
1438 we know the register is the head of the class (before the constant was
1439 inserted).
1440
1441 If this is a register that is not already known equivalent to a
1442 constant, we must check the entire class.
1443
1444 If this is a register that is already known equivalent to an insn,
1445 update `qty_const_insn' to show that `this_insn' is the latest
1446 insn making that quantity equivalent to the constant. */
1447
1448 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1449 && GET_CODE (x) != REG)
1450 {
1451 qty_const[reg_qty[REGNO (classp->exp)]]
1452 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1453 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1454 }
1455
1456 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1457 && ! elt->is_const)
1458 {
1459 register struct table_elt *p;
1460
1461 for (p = classp; p != 0; p = p->next_same_value)
1462 {
1463 if (p->is_const && GET_CODE (p->exp) != REG)
1464 {
1465 qty_const[reg_qty[REGNO (x)]]
1466 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1467 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1468 break;
1469 }
1470 }
1471 }
1472
1473 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1474 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1475 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1476
1477 /* If this is a constant with symbolic value,
1478 and it has a term with an explicit integer value,
1479 link it up with related expressions. */
1480 if (GET_CODE (x) == CONST)
1481 {
1482 rtx subexp = get_related_value (x);
1483 unsigned subhash;
1484 struct table_elt *subelt, *subelt_prev;
1485
1486 if (subexp != 0)
1487 {
1488 /* Get the integer-free subexpression in the hash table. */
1489 subhash = safe_hash (subexp, mode) % NBUCKETS;
1490 subelt = lookup (subexp, subhash, mode);
1491 if (subelt == 0)
1492 subelt = insert (subexp, NULL_PTR, subhash, mode);
1493 /* Initialize SUBELT's circular chain if it has none. */
1494 if (subelt->related_value == 0)
1495 subelt->related_value = subelt;
1496 /* Find the element in the circular chain that precedes SUBELT. */
1497 subelt_prev = subelt;
1498 while (subelt_prev->related_value != subelt)
1499 subelt_prev = subelt_prev->related_value;
1500 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1501 This way the element that follows SUBELT is the oldest one. */
1502 elt->related_value = subelt_prev->related_value;
1503 subelt_prev->related_value = elt;
1504 }
1505 }
1506
1507 return elt;
1508 }
1509 \f
1510 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1511 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1512 the two classes equivalent.
1513
1514 CLASS1 will be the surviving class; CLASS2 should not be used after this
1515 call.
1516
1517 Any invalid entries in CLASS2 will not be copied. */
1518
1519 static void
1520 merge_equiv_classes (class1, class2)
1521 struct table_elt *class1, *class2;
1522 {
1523 struct table_elt *elt, *next, *new;
1524
1525 /* Ensure we start with the head of the classes. */
1526 class1 = class1->first_same_value;
1527 class2 = class2->first_same_value;
1528
1529 /* If they were already equal, forget it. */
1530 if (class1 == class2)
1531 return;
1532
1533 for (elt = class2; elt; elt = next)
1534 {
1535 unsigned hash;
1536 rtx exp = elt->exp;
1537 enum machine_mode mode = elt->mode;
1538
1539 next = elt->next_same_value;
1540
1541 /* Remove old entry, make a new one in CLASS1's class.
1542 Don't do this for invalid entries as we cannot find their
1543 hash code (it also isn't necessary). */
1544 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1545 {
1546 hash_arg_in_memory = 0;
1547 hash_arg_in_struct = 0;
1548 hash = HASH (exp, mode);
1549
1550 if (GET_CODE (exp) == REG)
1551 delete_reg_equiv (REGNO (exp));
1552
1553 remove_from_table (elt, hash);
1554
1555 if (insert_regs (exp, class1, 0))
1556 {
1557 rehash_using_reg (exp);
1558 hash = HASH (exp, mode);
1559 }
1560 new = insert (exp, class1, hash, mode);
1561 new->in_memory = hash_arg_in_memory;
1562 new->in_struct = hash_arg_in_struct;
1563 }
1564 }
1565 }
1566 \f
1567 /* Remove from the hash table, or mark as invalid,
1568 all expressions whose values could be altered by storing in X.
1569 X is a register, a subreg, or a memory reference with nonvarying address
1570 (because, when a memory reference with a varying address is stored in,
1571 all memory references are removed by invalidate_memory
1572 so specific invalidation is superfluous).
1573 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1574 instead of just the amount indicated by the mode of X. This is only used
1575 for bitfield stores into memory.
1576
1577 A nonvarying address may be just a register or just
1578 a symbol reference, or it may be either of those plus
1579 a numeric offset. */
1580
1581 static void
1582 invalidate (x, full_mode)
1583 rtx x;
1584 enum machine_mode full_mode;
1585 {
1586 register int i;
1587 register struct table_elt *p;
1588
1589 /* If X is a register, dependencies on its contents
1590 are recorded through the qty number mechanism.
1591 Just change the qty number of the register,
1592 mark it as invalid for expressions that refer to it,
1593 and remove it itself. */
1594
1595 if (GET_CODE (x) == REG)
1596 {
1597 register int regno = REGNO (x);
1598 register unsigned hash = HASH (x, GET_MODE (x));
1599
1600 /* Remove REGNO from any quantity list it might be on and indicate
1601 that its value might have changed. If it is a pseudo, remove its
1602 entry from the hash table.
1603
1604 For a hard register, we do the first two actions above for any
1605 additional hard registers corresponding to X. Then, if any of these
1606 registers are in the table, we must remove any REG entries that
1607 overlap these registers. */
1608
1609 delete_reg_equiv (regno);
1610 reg_tick[regno]++;
1611
1612 if (regno >= FIRST_PSEUDO_REGISTER)
1613 {
1614 /* Because a register can be referenced in more than one mode,
1615 we might have to remove more than one table entry. */
1616
1617 struct table_elt *elt;
1618
1619 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1620 remove_from_table (elt, hash);
1621 }
1622 else
1623 {
1624 HOST_WIDE_INT in_table
1625 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1626 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1627 int tregno, tendregno;
1628 register struct table_elt *p, *next;
1629
1630 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1631
1632 for (i = regno + 1; i < endregno; i++)
1633 {
1634 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1635 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1636 delete_reg_equiv (i);
1637 reg_tick[i]++;
1638 }
1639
1640 if (in_table)
1641 for (hash = 0; hash < NBUCKETS; hash++)
1642 for (p = table[hash]; p; p = next)
1643 {
1644 next = p->next_same_hash;
1645
1646 if (GET_CODE (p->exp) != REG
1647 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1648 continue;
1649
1650 tregno = REGNO (p->exp);
1651 tendregno
1652 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1653 if (tendregno > regno && tregno < endregno)
1654 remove_from_table (p, hash);
1655 }
1656 }
1657
1658 return;
1659 }
1660
1661 if (GET_CODE (x) == SUBREG)
1662 {
1663 if (GET_CODE (SUBREG_REG (x)) != REG)
1664 abort ();
1665 invalidate (SUBREG_REG (x), VOIDmode);
1666 return;
1667 }
1668
1669 /* If X is a parallel, invalidate all of its elements. */
1670
1671 if (GET_CODE (x) == PARALLEL)
1672 {
1673 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1674 invalidate (XVECEXP (x, 0, i), VOIDmode);
1675 return;
1676 }
1677
1678 /* If X is an expr_list, this is part of a disjoint return value;
1679 extract the location in question ignoring the offset. */
1680
1681 if (GET_CODE (x) == EXPR_LIST)
1682 {
1683 invalidate (XEXP (x, 0), VOIDmode);
1684 return;
1685 }
1686
1687 /* X is not a register; it must be a memory reference with
1688 a nonvarying address. Remove all hash table elements
1689 that refer to overlapping pieces of memory. */
1690
1691 if (GET_CODE (x) != MEM)
1692 abort ();
1693
1694 if (full_mode == VOIDmode)
1695 full_mode = GET_MODE (x);
1696
1697 for (i = 0; i < NBUCKETS; i++)
1698 {
1699 register struct table_elt *next;
1700 for (p = table[i]; p; p = next)
1701 {
1702 next = p->next_same_hash;
1703 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1704 than checking all the aliases). */
1705 if (p->in_memory
1706 && (GET_CODE (p->exp) != MEM
1707 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1708 remove_from_table (p, i);
1709 }
1710 }
1711 }
1712
1713 /* Remove all expressions that refer to register REGNO,
1714 since they are already invalid, and we are about to
1715 mark that register valid again and don't want the old
1716 expressions to reappear as valid. */
1717
1718 static void
1719 remove_invalid_refs (regno)
1720 int regno;
1721 {
1722 register int i;
1723 register struct table_elt *p, *next;
1724
1725 for (i = 0; i < NBUCKETS; i++)
1726 for (p = table[i]; p; p = next)
1727 {
1728 next = p->next_same_hash;
1729 if (GET_CODE (p->exp) != REG
1730 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1731 remove_from_table (p, i);
1732 }
1733 }
1734
1735 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1736 static void
1737 remove_invalid_subreg_refs (regno, word, mode)
1738 int regno;
1739 int word;
1740 enum machine_mode mode;
1741 {
1742 register int i;
1743 register struct table_elt *p, *next;
1744 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1745
1746 for (i = 0; i < NBUCKETS; i++)
1747 for (p = table[i]; p; p = next)
1748 {
1749 rtx exp;
1750 next = p->next_same_hash;
1751
1752 exp = p->exp;
1753 if (GET_CODE (p->exp) != REG
1754 && (GET_CODE (exp) != SUBREG
1755 || GET_CODE (SUBREG_REG (exp)) != REG
1756 || REGNO (SUBREG_REG (exp)) != regno
1757 || (((SUBREG_WORD (exp)
1758 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1759 >= word)
1760 && SUBREG_WORD (exp) <= end))
1761 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1762 remove_from_table (p, i);
1763 }
1764 }
1765 \f
1766 /* Recompute the hash codes of any valid entries in the hash table that
1767 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1768
1769 This is called when we make a jump equivalence. */
1770
1771 static void
1772 rehash_using_reg (x)
1773 rtx x;
1774 {
1775 unsigned int i;
1776 struct table_elt *p, *next;
1777 unsigned hash;
1778
1779 if (GET_CODE (x) == SUBREG)
1780 x = SUBREG_REG (x);
1781
1782 /* If X is not a register or if the register is known not to be in any
1783 valid entries in the table, we have no work to do. */
1784
1785 if (GET_CODE (x) != REG
1786 || reg_in_table[REGNO (x)] < 0
1787 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1788 return;
1789
1790 /* Scan all hash chains looking for valid entries that mention X.
1791 If we find one and it is in the wrong hash chain, move it. We can skip
1792 objects that are registers, since they are handled specially. */
1793
1794 for (i = 0; i < NBUCKETS; i++)
1795 for (p = table[i]; p; p = next)
1796 {
1797 next = p->next_same_hash;
1798 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1799 && exp_equiv_p (p->exp, p->exp, 1, 0)
1800 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1801 {
1802 if (p->next_same_hash)
1803 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1804
1805 if (p->prev_same_hash)
1806 p->prev_same_hash->next_same_hash = p->next_same_hash;
1807 else
1808 table[i] = p->next_same_hash;
1809
1810 p->next_same_hash = table[hash];
1811 p->prev_same_hash = 0;
1812 if (table[hash])
1813 table[hash]->prev_same_hash = p;
1814 table[hash] = p;
1815 }
1816 }
1817 }
1818 \f
1819 /* Remove from the hash table any expression that is a call-clobbered
1820 register. Also update their TICK values. */
1821
1822 static void
1823 invalidate_for_call ()
1824 {
1825 int regno, endregno;
1826 int i;
1827 unsigned hash;
1828 struct table_elt *p, *next;
1829 int in_table = 0;
1830
1831 /* Go through all the hard registers. For each that is clobbered in
1832 a CALL_INSN, remove the register from quantity chains and update
1833 reg_tick if defined. Also see if any of these registers is currently
1834 in the table. */
1835
1836 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1838 {
1839 delete_reg_equiv (regno);
1840 if (reg_tick[regno] >= 0)
1841 reg_tick[regno]++;
1842
1843 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1844 }
1845
1846 /* In the case where we have no call-clobbered hard registers in the
1847 table, we are done. Otherwise, scan the table and remove any
1848 entry that overlaps a call-clobbered register. */
1849
1850 if (in_table)
1851 for (hash = 0; hash < NBUCKETS; hash++)
1852 for (p = table[hash]; p; p = next)
1853 {
1854 next = p->next_same_hash;
1855
1856 if (p->in_memory)
1857 {
1858 remove_from_table (p, hash);
1859 continue;
1860 }
1861
1862 if (GET_CODE (p->exp) != REG
1863 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1864 continue;
1865
1866 regno = REGNO (p->exp);
1867 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1868
1869 for (i = regno; i < endregno; i++)
1870 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1871 {
1872 remove_from_table (p, hash);
1873 break;
1874 }
1875 }
1876 }
1877 \f
1878 /* Given an expression X of type CONST,
1879 and ELT which is its table entry (or 0 if it
1880 is not in the hash table),
1881 return an alternate expression for X as a register plus integer.
1882 If none can be found, return 0. */
1883
1884 static rtx
1885 use_related_value (x, elt)
1886 rtx x;
1887 struct table_elt *elt;
1888 {
1889 register struct table_elt *relt = 0;
1890 register struct table_elt *p, *q;
1891 HOST_WIDE_INT offset;
1892
1893 /* First, is there anything related known?
1894 If we have a table element, we can tell from that.
1895 Otherwise, must look it up. */
1896
1897 if (elt != 0 && elt->related_value != 0)
1898 relt = elt;
1899 else if (elt == 0 && GET_CODE (x) == CONST)
1900 {
1901 rtx subexp = get_related_value (x);
1902 if (subexp != 0)
1903 relt = lookup (subexp,
1904 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1905 GET_MODE (subexp));
1906 }
1907
1908 if (relt == 0)
1909 return 0;
1910
1911 /* Search all related table entries for one that has an
1912 equivalent register. */
1913
1914 p = relt;
1915 while (1)
1916 {
1917 /* This loop is strange in that it is executed in two different cases.
1918 The first is when X is already in the table. Then it is searching
1919 the RELATED_VALUE list of X's class (RELT). The second case is when
1920 X is not in the table. Then RELT points to a class for the related
1921 value.
1922
1923 Ensure that, whatever case we are in, that we ignore classes that have
1924 the same value as X. */
1925
1926 if (rtx_equal_p (x, p->exp))
1927 q = 0;
1928 else
1929 for (q = p->first_same_value; q; q = q->next_same_value)
1930 if (GET_CODE (q->exp) == REG)
1931 break;
1932
1933 if (q)
1934 break;
1935
1936 p = p->related_value;
1937
1938 /* We went all the way around, so there is nothing to be found.
1939 Alternatively, perhaps RELT was in the table for some other reason
1940 and it has no related values recorded. */
1941 if (p == relt || p == 0)
1942 break;
1943 }
1944
1945 if (q == 0)
1946 return 0;
1947
1948 offset = (get_integer_term (x) - get_integer_term (p->exp));
1949 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1950 return plus_constant (q->exp, offset);
1951 }
1952 \f
1953 /* Hash an rtx. We are careful to make sure the value is never negative.
1954 Equivalent registers hash identically.
1955 MODE is used in hashing for CONST_INTs only;
1956 otherwise the mode of X is used.
1957
1958 Store 1 in do_not_record if any subexpression is volatile.
1959
1960 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1961 which does not have the RTX_UNCHANGING_P bit set.
1962 In this case, also store 1 in hash_arg_in_struct
1963 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1964
1965 Note that cse_insn knows that the hash code of a MEM expression
1966 is just (int) MEM plus the hash code of the address. */
1967
1968 static unsigned
1969 canon_hash (x, mode)
1970 rtx x;
1971 enum machine_mode mode;
1972 {
1973 register int i, j;
1974 register unsigned hash = 0;
1975 register enum rtx_code code;
1976 register char *fmt;
1977
1978 /* repeat is used to turn tail-recursion into iteration. */
1979 repeat:
1980 if (x == 0)
1981 return hash;
1982
1983 code = GET_CODE (x);
1984 switch (code)
1985 {
1986 case REG:
1987 {
1988 register int regno = REGNO (x);
1989
1990 /* On some machines, we can't record any non-fixed hard register,
1991 because extending its life will cause reload problems. We
1992 consider ap, fp, and sp to be fixed for this purpose.
1993 On all machines, we can't record any global registers. */
1994
1995 if (regno < FIRST_PSEUDO_REGISTER
1996 && (global_regs[regno]
1997 || (SMALL_REGISTER_CLASSES
1998 && ! fixed_regs[regno]
1999 && regno != FRAME_POINTER_REGNUM
2000 && regno != HARD_FRAME_POINTER_REGNUM
2001 && regno != ARG_POINTER_REGNUM
2002 && regno != STACK_POINTER_REGNUM)))
2003 {
2004 do_not_record = 1;
2005 return 0;
2006 }
2007 hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
2008 return hash;
2009 }
2010
2011 /* We handle SUBREG of a REG specially because the underlying
2012 reg changes its hash value with every value change; we don't
2013 want to have to forget unrelated subregs when one subreg changes. */
2014 case SUBREG:
2015 {
2016 if (GET_CODE (SUBREG_REG (x)) == REG)
2017 {
2018 hash += (((unsigned) SUBREG << 7)
2019 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2020 return hash;
2021 }
2022 break;
2023 }
2024
2025 case CONST_INT:
2026 {
2027 unsigned HOST_WIDE_INT tem = INTVAL (x);
2028 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2029 return hash;
2030 }
2031
2032 case CONST_DOUBLE:
2033 /* This is like the general case, except that it only counts
2034 the integers representing the constant. */
2035 hash += (unsigned) code + (unsigned) GET_MODE (x);
2036 if (GET_MODE (x) != VOIDmode)
2037 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2038 {
2039 unsigned tem = XINT (x, i);
2040 hash += tem;
2041 }
2042 else
2043 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2044 + (unsigned) CONST_DOUBLE_HIGH (x));
2045 return hash;
2046
2047 /* Assume there is only one rtx object for any given label. */
2048 case LABEL_REF:
2049 hash
2050 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2051 return hash;
2052
2053 case SYMBOL_REF:
2054 hash
2055 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2056 return hash;
2057
2058 case MEM:
2059 if (MEM_VOLATILE_P (x))
2060 {
2061 do_not_record = 1;
2062 return 0;
2063 }
2064 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2065 {
2066 hash_arg_in_memory = 1;
2067 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2068 }
2069 /* Now that we have already found this special case,
2070 might as well speed it up as much as possible. */
2071 hash += (unsigned) MEM;
2072 x = XEXP (x, 0);
2073 goto repeat;
2074
2075 case PRE_DEC:
2076 case PRE_INC:
2077 case POST_DEC:
2078 case POST_INC:
2079 case PC:
2080 case CC0:
2081 case CALL:
2082 case UNSPEC_VOLATILE:
2083 do_not_record = 1;
2084 return 0;
2085
2086 case ASM_OPERANDS:
2087 if (MEM_VOLATILE_P (x))
2088 {
2089 do_not_record = 1;
2090 return 0;
2091 }
2092 break;
2093
2094 default:
2095 break;
2096 }
2097
2098 i = GET_RTX_LENGTH (code) - 1;
2099 hash += (unsigned) code + (unsigned) GET_MODE (x);
2100 fmt = GET_RTX_FORMAT (code);
2101 for (; i >= 0; i--)
2102 {
2103 if (fmt[i] == 'e')
2104 {
2105 rtx tem = XEXP (x, i);
2106
2107 /* If we are about to do the last recursive call
2108 needed at this level, change it into iteration.
2109 This function is called enough to be worth it. */
2110 if (i == 0)
2111 {
2112 x = tem;
2113 goto repeat;
2114 }
2115 hash += canon_hash (tem, 0);
2116 }
2117 else if (fmt[i] == 'E')
2118 for (j = 0; j < XVECLEN (x, i); j++)
2119 hash += canon_hash (XVECEXP (x, i, j), 0);
2120 else if (fmt[i] == 's')
2121 {
2122 register unsigned char *p = (unsigned char *) XSTR (x, i);
2123 if (p)
2124 while (*p)
2125 hash += *p++;
2126 }
2127 else if (fmt[i] == 'i')
2128 {
2129 register unsigned tem = XINT (x, i);
2130 hash += tem;
2131 }
2132 else if (fmt[i] == '0')
2133 /* unused */;
2134 else
2135 abort ();
2136 }
2137 return hash;
2138 }
2139
2140 /* Like canon_hash but with no side effects. */
2141
2142 static unsigned
2143 safe_hash (x, mode)
2144 rtx x;
2145 enum machine_mode mode;
2146 {
2147 int save_do_not_record = do_not_record;
2148 int save_hash_arg_in_memory = hash_arg_in_memory;
2149 int save_hash_arg_in_struct = hash_arg_in_struct;
2150 unsigned hash = canon_hash (x, mode);
2151 hash_arg_in_memory = save_hash_arg_in_memory;
2152 hash_arg_in_struct = save_hash_arg_in_struct;
2153 do_not_record = save_do_not_record;
2154 return hash;
2155 }
2156 \f
2157 /* Return 1 iff X and Y would canonicalize into the same thing,
2158 without actually constructing the canonicalization of either one.
2159 If VALIDATE is nonzero,
2160 we assume X is an expression being processed from the rtl
2161 and Y was found in the hash table. We check register refs
2162 in Y for being marked as valid.
2163
2164 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2165 that is known to be in the register. Ordinarily, we don't allow them
2166 to match, because letting them match would cause unpredictable results
2167 in all the places that search a hash table chain for an equivalent
2168 for a given value. A possible equivalent that has different structure
2169 has its hash code computed from different data. Whether the hash code
2170 is the same as that of the given value is pure luck. */
2171
2172 static int
2173 exp_equiv_p (x, y, validate, equal_values)
2174 rtx x, y;
2175 int validate;
2176 int equal_values;
2177 {
2178 register int i, j;
2179 register enum rtx_code code;
2180 register char *fmt;
2181
2182 /* Note: it is incorrect to assume an expression is equivalent to itself
2183 if VALIDATE is nonzero. */
2184 if (x == y && !validate)
2185 return 1;
2186 if (x == 0 || y == 0)
2187 return x == y;
2188
2189 code = GET_CODE (x);
2190 if (code != GET_CODE (y))
2191 {
2192 if (!equal_values)
2193 return 0;
2194
2195 /* If X is a constant and Y is a register or vice versa, they may be
2196 equivalent. We only have to validate if Y is a register. */
2197 if (CONSTANT_P (x) && GET_CODE (y) == REG
2198 && REGNO_QTY_VALID_P (REGNO (y))
2199 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2200 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2201 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2202 return 1;
2203
2204 if (CONSTANT_P (y) && code == REG
2205 && REGNO_QTY_VALID_P (REGNO (x))
2206 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2207 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2208 return 1;
2209
2210 return 0;
2211 }
2212
2213 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2214 if (GET_MODE (x) != GET_MODE (y))
2215 return 0;
2216
2217 switch (code)
2218 {
2219 case PC:
2220 case CC0:
2221 return x == y;
2222
2223 case CONST_INT:
2224 return INTVAL (x) == INTVAL (y);
2225
2226 case LABEL_REF:
2227 return XEXP (x, 0) == XEXP (y, 0);
2228
2229 case SYMBOL_REF:
2230 return XSTR (x, 0) == XSTR (y, 0);
2231
2232 case REG:
2233 {
2234 int regno = REGNO (y);
2235 int endregno
2236 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2237 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2238 int i;
2239
2240 /* If the quantities are not the same, the expressions are not
2241 equivalent. If there are and we are not to validate, they
2242 are equivalent. Otherwise, ensure all regs are up-to-date. */
2243
2244 if (reg_qty[REGNO (x)] != reg_qty[regno])
2245 return 0;
2246
2247 if (! validate)
2248 return 1;
2249
2250 for (i = regno; i < endregno; i++)
2251 if (reg_in_table[i] != reg_tick[i])
2252 return 0;
2253
2254 return 1;
2255 }
2256
2257 /* For commutative operations, check both orders. */
2258 case PLUS:
2259 case MULT:
2260 case AND:
2261 case IOR:
2262 case XOR:
2263 case NE:
2264 case EQ:
2265 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2266 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2267 validate, equal_values))
2268 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2269 validate, equal_values)
2270 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2271 validate, equal_values)));
2272
2273 default:
2274 break;
2275 }
2276
2277 /* Compare the elements. If any pair of corresponding elements
2278 fail to match, return 0 for the whole things. */
2279
2280 fmt = GET_RTX_FORMAT (code);
2281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2282 {
2283 switch (fmt[i])
2284 {
2285 case 'e':
2286 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2287 return 0;
2288 break;
2289
2290 case 'E':
2291 if (XVECLEN (x, i) != XVECLEN (y, i))
2292 return 0;
2293 for (j = 0; j < XVECLEN (x, i); j++)
2294 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2295 validate, equal_values))
2296 return 0;
2297 break;
2298
2299 case 's':
2300 if (strcmp (XSTR (x, i), XSTR (y, i)))
2301 return 0;
2302 break;
2303
2304 case 'i':
2305 if (XINT (x, i) != XINT (y, i))
2306 return 0;
2307 break;
2308
2309 case 'w':
2310 if (XWINT (x, i) != XWINT (y, i))
2311 return 0;
2312 break;
2313
2314 case '0':
2315 break;
2316
2317 default:
2318 abort ();
2319 }
2320 }
2321
2322 return 1;
2323 }
2324 \f
2325 /* Return 1 iff any subexpression of X matches Y.
2326 Here we do not require that X or Y be valid (for registers referred to)
2327 for being in the hash table. */
2328
2329 static int
2330 refers_to_p (x, y)
2331 rtx x, y;
2332 {
2333 register int i;
2334 register enum rtx_code code;
2335 register char *fmt;
2336
2337 repeat:
2338 if (x == y)
2339 return 1;
2340 if (x == 0 || y == 0)
2341 return 0;
2342
2343 code = GET_CODE (x);
2344 /* If X as a whole has the same code as Y, they may match.
2345 If so, return 1. */
2346 if (code == GET_CODE (y))
2347 {
2348 if (exp_equiv_p (x, y, 0, 1))
2349 return 1;
2350 }
2351
2352 /* X does not match, so try its subexpressions. */
2353
2354 fmt = GET_RTX_FORMAT (code);
2355 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2356 if (fmt[i] == 'e')
2357 {
2358 if (i == 0)
2359 {
2360 x = XEXP (x, 0);
2361 goto repeat;
2362 }
2363 else
2364 if (refers_to_p (XEXP (x, i), y))
2365 return 1;
2366 }
2367 else if (fmt[i] == 'E')
2368 {
2369 int j;
2370 for (j = 0; j < XVECLEN (x, i); j++)
2371 if (refers_to_p (XVECEXP (x, i, j), y))
2372 return 1;
2373 }
2374
2375 return 0;
2376 }
2377 \f
2378 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2379 set PBASE, PSTART, and PEND which correspond to the base of the address,
2380 the starting offset, and ending offset respectively.
2381
2382 ADDR is known to be a nonvarying address. */
2383
2384 /* ??? Despite what the comments say, this function is in fact frequently
2385 passed varying addresses. This does not appear to cause any problems. */
2386
2387 static void
2388 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2389 rtx addr;
2390 int size;
2391 rtx *pbase;
2392 HOST_WIDE_INT *pstart, *pend;
2393 {
2394 rtx base;
2395 HOST_WIDE_INT start, end;
2396
2397 base = addr;
2398 start = 0;
2399 end = 0;
2400
2401 if (flag_pic && GET_CODE (base) == PLUS
2402 && XEXP (base, 0) == pic_offset_table_rtx)
2403 base = XEXP (base, 1);
2404
2405 /* Registers with nonvarying addresses usually have constant equivalents;
2406 but the frame pointer register is also possible. */
2407 if (GET_CODE (base) == REG
2408 && qty_const != 0
2409 && REGNO_QTY_VALID_P (REGNO (base))
2410 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2411 && qty_const[reg_qty[REGNO (base)]] != 0)
2412 base = qty_const[reg_qty[REGNO (base)]];
2413 else if (GET_CODE (base) == PLUS
2414 && GET_CODE (XEXP (base, 1)) == CONST_INT
2415 && GET_CODE (XEXP (base, 0)) == REG
2416 && qty_const != 0
2417 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2418 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2419 == GET_MODE (XEXP (base, 0)))
2420 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2421 {
2422 start = INTVAL (XEXP (base, 1));
2423 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2424 }
2425 /* This can happen as the result of virtual register instantiation,
2426 if the initial offset is too large to be a valid address. */
2427 else if (GET_CODE (base) == PLUS
2428 && GET_CODE (XEXP (base, 0)) == REG
2429 && GET_CODE (XEXP (base, 1)) == REG
2430 && qty_const != 0
2431 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2432 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2433 == GET_MODE (XEXP (base, 0)))
2434 && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2435 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2436 && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2437 == GET_MODE (XEXP (base, 1)))
2438 && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2439 {
2440 rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2441 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2442
2443 /* One of the two values must be a constant. */
2444 if (GET_CODE (base) != CONST_INT)
2445 {
2446 if (GET_CODE (tem) != CONST_INT)
2447 abort ();
2448 start = INTVAL (tem);
2449 }
2450 else
2451 {
2452 start = INTVAL (base);
2453 base = tem;
2454 }
2455 }
2456
2457 /* Handle everything that we can find inside an address that has been
2458 viewed as constant. */
2459
2460 while (1)
2461 {
2462 /* If no part of this switch does a "continue", the code outside
2463 will exit this loop. */
2464
2465 switch (GET_CODE (base))
2466 {
2467 case LO_SUM:
2468 /* By definition, operand1 of a LO_SUM is the associated constant
2469 address. Use the associated constant address as the base
2470 instead. */
2471 base = XEXP (base, 1);
2472 continue;
2473
2474 case CONST:
2475 /* Strip off CONST. */
2476 base = XEXP (base, 0);
2477 continue;
2478
2479 case PLUS:
2480 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2481 {
2482 start += INTVAL (XEXP (base, 1));
2483 base = XEXP (base, 0);
2484 continue;
2485 }
2486 break;
2487
2488 case AND:
2489 /* Handle the case of an AND which is the negative of a power of
2490 two. This is used to represent unaligned memory operations. */
2491 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2492 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2493 {
2494 set_nonvarying_address_components (XEXP (base, 0), size,
2495 pbase, pstart, pend);
2496
2497 /* Assume the worst misalignment. START is affected, but not
2498 END, so compensate but adjusting SIZE. Don't lose any
2499 constant we already had. */
2500
2501 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2502 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2503 end += *pend;
2504 base = *pbase;
2505 }
2506 break;
2507
2508 default:
2509 break;
2510 }
2511
2512 break;
2513 }
2514
2515 if (GET_CODE (base) == CONST_INT)
2516 {
2517 start += INTVAL (base);
2518 base = const0_rtx;
2519 }
2520
2521 end = start + size;
2522
2523 /* Set the return values. */
2524 *pbase = base;
2525 *pstart = start;
2526 *pend = end;
2527 }
2528
2529 /* Return 1 if X has a value that can vary even between two
2530 executions of the program. 0 means X can be compared reliably
2531 against certain constants or near-constants. */
2532
2533 static int
2534 cse_rtx_varies_p (x)
2535 register rtx x;
2536 {
2537 /* We need not check for X and the equivalence class being of the same
2538 mode because if X is equivalent to a constant in some mode, it
2539 doesn't vary in any mode. */
2540
2541 if (GET_CODE (x) == REG
2542 && REGNO_QTY_VALID_P (REGNO (x))
2543 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2544 && qty_const[reg_qty[REGNO (x)]] != 0)
2545 return 0;
2546
2547 if (GET_CODE (x) == PLUS
2548 && GET_CODE (XEXP (x, 1)) == CONST_INT
2549 && GET_CODE (XEXP (x, 0)) == REG
2550 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2551 && (GET_MODE (XEXP (x, 0))
2552 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2553 && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
2554 return 0;
2555
2556 /* This can happen as the result of virtual register instantiation, if
2557 the initial constant is too large to be a valid address. This gives
2558 us a three instruction sequence, load large offset into a register,
2559 load fp minus a constant into a register, then a MEM which is the
2560 sum of the two `constant' registers. */
2561 if (GET_CODE (x) == PLUS
2562 && GET_CODE (XEXP (x, 0)) == REG
2563 && GET_CODE (XEXP (x, 1)) == REG
2564 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2565 && (GET_MODE (XEXP (x, 0))
2566 == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2567 && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2568 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2569 && (GET_MODE (XEXP (x, 1))
2570 == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2571 && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
2572 return 0;
2573
2574 return rtx_varies_p (x);
2575 }
2576 \f
2577 /* Canonicalize an expression:
2578 replace each register reference inside it
2579 with the "oldest" equivalent register.
2580
2581 If INSN is non-zero and we are replacing a pseudo with a hard register
2582 or vice versa, validate_change is used to ensure that INSN remains valid
2583 after we make our substitution. The calls are made with IN_GROUP non-zero
2584 so apply_change_group must be called upon the outermost return from this
2585 function (unless INSN is zero). The result of apply_change_group can
2586 generally be discarded since the changes we are making are optional. */
2587
2588 static rtx
2589 canon_reg (x, insn)
2590 rtx x;
2591 rtx insn;
2592 {
2593 register int i;
2594 register enum rtx_code code;
2595 register char *fmt;
2596
2597 if (x == 0)
2598 return x;
2599
2600 code = GET_CODE (x);
2601 switch (code)
2602 {
2603 case PC:
2604 case CC0:
2605 case CONST:
2606 case CONST_INT:
2607 case CONST_DOUBLE:
2608 case SYMBOL_REF:
2609 case LABEL_REF:
2610 case ADDR_VEC:
2611 case ADDR_DIFF_VEC:
2612 return x;
2613
2614 case REG:
2615 {
2616 register int first;
2617
2618 /* Never replace a hard reg, because hard regs can appear
2619 in more than one machine mode, and we must preserve the mode
2620 of each occurrence. Also, some hard regs appear in
2621 MEMs that are shared and mustn't be altered. Don't try to
2622 replace any reg that maps to a reg of class NO_REGS. */
2623 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2624 || ! REGNO_QTY_VALID_P (REGNO (x)))
2625 return x;
2626
2627 first = qty_first_reg[reg_qty[REGNO (x)]];
2628 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2629 : REGNO_REG_CLASS (first) == NO_REGS ? x
2630 : gen_rtx_REG (qty_mode[reg_qty[REGNO (x)]], first));
2631 }
2632
2633 default:
2634 break;
2635 }
2636
2637 fmt = GET_RTX_FORMAT (code);
2638 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2639 {
2640 register int j;
2641
2642 if (fmt[i] == 'e')
2643 {
2644 rtx new = canon_reg (XEXP (x, i), insn);
2645 int insn_code;
2646
2647 /* If replacing pseudo with hard reg or vice versa, ensure the
2648 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2649 if (insn != 0 && new != 0
2650 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2651 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2652 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2653 || (insn_code = recog_memoized (insn)) < 0
2654 || insn_n_dups[insn_code] > 0))
2655 validate_change (insn, &XEXP (x, i), new, 1);
2656 else
2657 XEXP (x, i) = new;
2658 }
2659 else if (fmt[i] == 'E')
2660 for (j = 0; j < XVECLEN (x, i); j++)
2661 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2662 }
2663
2664 return x;
2665 }
2666 \f
2667 /* LOC is a location within INSN that is an operand address (the contents of
2668 a MEM). Find the best equivalent address to use that is valid for this
2669 insn.
2670
2671 On most CISC machines, complicated address modes are costly, and rtx_cost
2672 is a good approximation for that cost. However, most RISC machines have
2673 only a few (usually only one) memory reference formats. If an address is
2674 valid at all, it is often just as cheap as any other address. Hence, for
2675 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2676 costs of various addresses. For two addresses of equal cost, choose the one
2677 with the highest `rtx_cost' value as that has the potential of eliminating
2678 the most insns. For equal costs, we choose the first in the equivalence
2679 class. Note that we ignore the fact that pseudo registers are cheaper
2680 than hard registers here because we would also prefer the pseudo registers.
2681 */
2682
2683 static void
2684 find_best_addr (insn, loc)
2685 rtx insn;
2686 rtx *loc;
2687 {
2688 struct table_elt *elt;
2689 rtx addr = *loc;
2690 #ifdef ADDRESS_COST
2691 struct table_elt *p;
2692 int found_better = 1;
2693 #endif
2694 int save_do_not_record = do_not_record;
2695 int save_hash_arg_in_memory = hash_arg_in_memory;
2696 int save_hash_arg_in_struct = hash_arg_in_struct;
2697 int addr_volatile;
2698 int regno;
2699 unsigned hash;
2700
2701 /* Do not try to replace constant addresses or addresses of local and
2702 argument slots. These MEM expressions are made only once and inserted
2703 in many instructions, as well as being used to control symbol table
2704 output. It is not safe to clobber them.
2705
2706 There are some uncommon cases where the address is already in a register
2707 for some reason, but we cannot take advantage of that because we have
2708 no easy way to unshare the MEM. In addition, looking up all stack
2709 addresses is costly. */
2710 if ((GET_CODE (addr) == PLUS
2711 && GET_CODE (XEXP (addr, 0)) == REG
2712 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2713 && (regno = REGNO (XEXP (addr, 0)),
2714 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2715 || regno == ARG_POINTER_REGNUM))
2716 || (GET_CODE (addr) == REG
2717 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2718 || regno == HARD_FRAME_POINTER_REGNUM
2719 || regno == ARG_POINTER_REGNUM))
2720 || GET_CODE (addr) == ADDRESSOF
2721 || CONSTANT_ADDRESS_P (addr))
2722 return;
2723
2724 /* If this address is not simply a register, try to fold it. This will
2725 sometimes simplify the expression. Many simplifications
2726 will not be valid, but some, usually applying the associative rule, will
2727 be valid and produce better code. */
2728 if (GET_CODE (addr) != REG)
2729 {
2730 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2731
2732 if (1
2733 #ifdef ADDRESS_COST
2734 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2735 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2736 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2737 #else
2738 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2739 #endif
2740 && validate_change (insn, loc, folded, 0))
2741 addr = folded;
2742 }
2743
2744 /* If this address is not in the hash table, we can't look for equivalences
2745 of the whole address. Also, ignore if volatile. */
2746
2747 do_not_record = 0;
2748 hash = HASH (addr, Pmode);
2749 addr_volatile = do_not_record;
2750 do_not_record = save_do_not_record;
2751 hash_arg_in_memory = save_hash_arg_in_memory;
2752 hash_arg_in_struct = save_hash_arg_in_struct;
2753
2754 if (addr_volatile)
2755 return;
2756
2757 elt = lookup (addr, hash, Pmode);
2758
2759 #ifndef ADDRESS_COST
2760 if (elt)
2761 {
2762 int our_cost = elt->cost;
2763
2764 /* Find the lowest cost below ours that works. */
2765 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2766 if (elt->cost < our_cost
2767 && (GET_CODE (elt->exp) == REG
2768 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2769 && validate_change (insn, loc,
2770 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2771 return;
2772 }
2773 #else
2774
2775 if (elt)
2776 {
2777 /* We need to find the best (under the criteria documented above) entry
2778 in the class that is valid. We use the `flag' field to indicate
2779 choices that were invalid and iterate until we can't find a better
2780 one that hasn't already been tried. */
2781
2782 for (p = elt->first_same_value; p; p = p->next_same_value)
2783 p->flag = 0;
2784
2785 while (found_better)
2786 {
2787 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2788 int best_rtx_cost = (elt->cost + 1) >> 1;
2789 struct table_elt *best_elt = elt;
2790
2791 found_better = 0;
2792 for (p = elt->first_same_value; p; p = p->next_same_value)
2793 if (! p->flag)
2794 {
2795 if ((GET_CODE (p->exp) == REG
2796 || exp_equiv_p (p->exp, p->exp, 1, 0))
2797 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2798 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2799 && (p->cost + 1) >> 1 > best_rtx_cost)))
2800 {
2801 found_better = 1;
2802 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2803 best_rtx_cost = (p->cost + 1) >> 1;
2804 best_elt = p;
2805 }
2806 }
2807
2808 if (found_better)
2809 {
2810 if (validate_change (insn, loc,
2811 canon_reg (copy_rtx (best_elt->exp),
2812 NULL_RTX), 0))
2813 return;
2814 else
2815 best_elt->flag = 1;
2816 }
2817 }
2818 }
2819
2820 /* If the address is a binary operation with the first operand a register
2821 and the second a constant, do the same as above, but looking for
2822 equivalences of the register. Then try to simplify before checking for
2823 the best address to use. This catches a few cases: First is when we
2824 have REG+const and the register is another REG+const. We can often merge
2825 the constants and eliminate one insn and one register. It may also be
2826 that a machine has a cheap REG+REG+const. Finally, this improves the
2827 code on the Alpha for unaligned byte stores. */
2828
2829 if (flag_expensive_optimizations
2830 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2831 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2832 && GET_CODE (XEXP (*loc, 0)) == REG
2833 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2834 {
2835 rtx c = XEXP (*loc, 1);
2836
2837 do_not_record = 0;
2838 hash = HASH (XEXP (*loc, 0), Pmode);
2839 do_not_record = save_do_not_record;
2840 hash_arg_in_memory = save_hash_arg_in_memory;
2841 hash_arg_in_struct = save_hash_arg_in_struct;
2842
2843 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2844 if (elt == 0)
2845 return;
2846
2847 /* We need to find the best (under the criteria documented above) entry
2848 in the class that is valid. We use the `flag' field to indicate
2849 choices that were invalid and iterate until we can't find a better
2850 one that hasn't already been tried. */
2851
2852 for (p = elt->first_same_value; p; p = p->next_same_value)
2853 p->flag = 0;
2854
2855 while (found_better)
2856 {
2857 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2858 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2859 struct table_elt *best_elt = elt;
2860 rtx best_rtx = *loc;
2861 int count;
2862
2863 /* This is at worst case an O(n^2) algorithm, so limit our search
2864 to the first 32 elements on the list. This avoids trouble
2865 compiling code with very long basic blocks that can easily
2866 call cse_gen_binary so many times that we run out of memory. */
2867
2868 found_better = 0;
2869 for (p = elt->first_same_value, count = 0;
2870 p && count < 32;
2871 p = p->next_same_value, count++)
2872 if (! p->flag
2873 && (GET_CODE (p->exp) == REG
2874 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2875 {
2876 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2877
2878 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2879 || (CSE_ADDRESS_COST (new) == best_addr_cost
2880 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2881 {
2882 found_better = 1;
2883 best_addr_cost = CSE_ADDRESS_COST (new);
2884 best_rtx_cost = (COST (new) + 1) >> 1;
2885 best_elt = p;
2886 best_rtx = new;
2887 }
2888 }
2889
2890 if (found_better)
2891 {
2892 if (validate_change (insn, loc,
2893 canon_reg (copy_rtx (best_rtx),
2894 NULL_RTX), 0))
2895 return;
2896 else
2897 best_elt->flag = 1;
2898 }
2899 }
2900 }
2901 #endif
2902 }
2903 \f
2904 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2905 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2906 what values are being compared.
2907
2908 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2909 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2910 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2911 compared to produce cc0.
2912
2913 The return value is the comparison operator and is either the code of
2914 A or the code corresponding to the inverse of the comparison. */
2915
2916 static enum rtx_code
2917 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2918 enum rtx_code code;
2919 rtx *parg1, *parg2;
2920 enum machine_mode *pmode1, *pmode2;
2921 {
2922 rtx arg1, arg2;
2923
2924 arg1 = *parg1, arg2 = *parg2;
2925
2926 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2927
2928 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2929 {
2930 /* Set non-zero when we find something of interest. */
2931 rtx x = 0;
2932 int reverse_code = 0;
2933 struct table_elt *p = 0;
2934
2935 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2936 On machines with CC0, this is the only case that can occur, since
2937 fold_rtx will return the COMPARE or item being compared with zero
2938 when given CC0. */
2939
2940 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2941 x = arg1;
2942
2943 /* If ARG1 is a comparison operator and CODE is testing for
2944 STORE_FLAG_VALUE, get the inner arguments. */
2945
2946 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2947 {
2948 if (code == NE
2949 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2950 && code == LT && STORE_FLAG_VALUE == -1)
2951 #ifdef FLOAT_STORE_FLAG_VALUE
2952 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2953 && FLOAT_STORE_FLAG_VALUE < 0)
2954 #endif
2955 )
2956 x = arg1;
2957 else if (code == EQ
2958 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2959 && code == GE && STORE_FLAG_VALUE == -1)
2960 #ifdef FLOAT_STORE_FLAG_VALUE
2961 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2962 && FLOAT_STORE_FLAG_VALUE < 0)
2963 #endif
2964 )
2965 x = arg1, reverse_code = 1;
2966 }
2967
2968 /* ??? We could also check for
2969
2970 (ne (and (eq (...) (const_int 1))) (const_int 0))
2971
2972 and related forms, but let's wait until we see them occurring. */
2973
2974 if (x == 0)
2975 /* Look up ARG1 in the hash table and see if it has an equivalence
2976 that lets us see what is being compared. */
2977 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2978 GET_MODE (arg1));
2979 if (p) p = p->first_same_value;
2980
2981 for (; p; p = p->next_same_value)
2982 {
2983 enum machine_mode inner_mode = GET_MODE (p->exp);
2984
2985 /* If the entry isn't valid, skip it. */
2986 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2987 continue;
2988
2989 if (GET_CODE (p->exp) == COMPARE
2990 /* Another possibility is that this machine has a compare insn
2991 that includes the comparison code. In that case, ARG1 would
2992 be equivalent to a comparison operation that would set ARG1 to
2993 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2994 ORIG_CODE is the actual comparison being done; if it is an EQ,
2995 we must reverse ORIG_CODE. On machine with a negative value
2996 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2997 || ((code == NE
2998 || (code == LT
2999 && GET_MODE_CLASS (inner_mode) == MODE_INT
3000 && (GET_MODE_BITSIZE (inner_mode)
3001 <= HOST_BITS_PER_WIDE_INT)
3002 && (STORE_FLAG_VALUE
3003 & ((HOST_WIDE_INT) 1
3004 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3005 #ifdef FLOAT_STORE_FLAG_VALUE
3006 || (code == LT
3007 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3008 && FLOAT_STORE_FLAG_VALUE < 0)
3009 #endif
3010 )
3011 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3012 {
3013 x = p->exp;
3014 break;
3015 }
3016 else if ((code == EQ
3017 || (code == GE
3018 && GET_MODE_CLASS (inner_mode) == MODE_INT
3019 && (GET_MODE_BITSIZE (inner_mode)
3020 <= HOST_BITS_PER_WIDE_INT)
3021 && (STORE_FLAG_VALUE
3022 & ((HOST_WIDE_INT) 1
3023 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3024 #ifdef FLOAT_STORE_FLAG_VALUE
3025 || (code == GE
3026 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3027 && FLOAT_STORE_FLAG_VALUE < 0)
3028 #endif
3029 )
3030 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3031 {
3032 reverse_code = 1;
3033 x = p->exp;
3034 break;
3035 }
3036
3037 /* If this is fp + constant, the equivalent is a better operand since
3038 it may let us predict the value of the comparison. */
3039 else if (NONZERO_BASE_PLUS_P (p->exp))
3040 {
3041 arg1 = p->exp;
3042 continue;
3043 }
3044 }
3045
3046 /* If we didn't find a useful equivalence for ARG1, we are done.
3047 Otherwise, set up for the next iteration. */
3048 if (x == 0)
3049 break;
3050
3051 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3052 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3053 code = GET_CODE (x);
3054
3055 if (reverse_code)
3056 code = reverse_condition (code);
3057 }
3058
3059 /* Return our results. Return the modes from before fold_rtx
3060 because fold_rtx might produce const_int, and then it's too late. */
3061 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3062 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3063
3064 return code;
3065 }
3066 \f
3067 /* Try to simplify a unary operation CODE whose output mode is to be
3068 MODE with input operand OP whose mode was originally OP_MODE.
3069 Return zero if no simplification can be made. */
3070
3071 rtx
3072 simplify_unary_operation (code, mode, op, op_mode)
3073 enum rtx_code code;
3074 enum machine_mode mode;
3075 rtx op;
3076 enum machine_mode op_mode;
3077 {
3078 register int width = GET_MODE_BITSIZE (mode);
3079
3080 /* The order of these tests is critical so that, for example, we don't
3081 check the wrong mode (input vs. output) for a conversion operation,
3082 such as FIX. At some point, this should be simplified. */
3083
3084 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3085
3086 if (code == FLOAT && GET_MODE (op) == VOIDmode
3087 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3088 {
3089 HOST_WIDE_INT hv, lv;
3090 REAL_VALUE_TYPE d;
3091
3092 if (GET_CODE (op) == CONST_INT)
3093 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3094 else
3095 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3096
3097 #ifdef REAL_ARITHMETIC
3098 REAL_VALUE_FROM_INT (d, lv, hv, mode);
3099 #else
3100 if (hv < 0)
3101 {
3102 d = (double) (~ hv);
3103 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3104 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3105 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3106 d = (- d - 1.0);
3107 }
3108 else
3109 {
3110 d = (double) hv;
3111 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3112 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3113 d += (double) (unsigned HOST_WIDE_INT) lv;
3114 }
3115 #endif /* REAL_ARITHMETIC */
3116 d = real_value_truncate (mode, d);
3117 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3118 }
3119 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3120 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3121 {
3122 HOST_WIDE_INT hv, lv;
3123 REAL_VALUE_TYPE d;
3124
3125 if (GET_CODE (op) == CONST_INT)
3126 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3127 else
3128 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3129
3130 if (op_mode == VOIDmode)
3131 {
3132 /* We don't know how to interpret negative-looking numbers in
3133 this case, so don't try to fold those. */
3134 if (hv < 0)
3135 return 0;
3136 }
3137 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3138 ;
3139 else
3140 hv = 0, lv &= GET_MODE_MASK (op_mode);
3141
3142 #ifdef REAL_ARITHMETIC
3143 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3144 #else
3145
3146 d = (double) (unsigned HOST_WIDE_INT) hv;
3147 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3148 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3149 d += (double) (unsigned HOST_WIDE_INT) lv;
3150 #endif /* REAL_ARITHMETIC */
3151 d = real_value_truncate (mode, d);
3152 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3153 }
3154 #endif
3155
3156 if (GET_CODE (op) == CONST_INT
3157 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3158 {
3159 register HOST_WIDE_INT arg0 = INTVAL (op);
3160 register HOST_WIDE_INT val;
3161
3162 switch (code)
3163 {
3164 case NOT:
3165 val = ~ arg0;
3166 break;
3167
3168 case NEG:
3169 val = - arg0;
3170 break;
3171
3172 case ABS:
3173 val = (arg0 >= 0 ? arg0 : - arg0);
3174 break;
3175
3176 case FFS:
3177 /* Don't use ffs here. Instead, get low order bit and then its
3178 number. If arg0 is zero, this will return 0, as desired. */
3179 arg0 &= GET_MODE_MASK (mode);
3180 val = exact_log2 (arg0 & (- arg0)) + 1;
3181 break;
3182
3183 case TRUNCATE:
3184 val = arg0;
3185 break;
3186
3187 case ZERO_EXTEND:
3188 if (op_mode == VOIDmode)
3189 op_mode = mode;
3190 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3191 {
3192 /* If we were really extending the mode,
3193 we would have to distinguish between zero-extension
3194 and sign-extension. */
3195 if (width != GET_MODE_BITSIZE (op_mode))
3196 abort ();
3197 val = arg0;
3198 }
3199 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3200 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3201 else
3202 return 0;
3203 break;
3204
3205 case SIGN_EXTEND:
3206 if (op_mode == VOIDmode)
3207 op_mode = mode;
3208 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3209 {
3210 /* If we were really extending the mode,
3211 we would have to distinguish between zero-extension
3212 and sign-extension. */
3213 if (width != GET_MODE_BITSIZE (op_mode))
3214 abort ();
3215 val = arg0;
3216 }
3217 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3218 {
3219 val
3220 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3221 if (val
3222 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3223 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3224 }
3225 else
3226 return 0;
3227 break;
3228
3229 case SQRT:
3230 return 0;
3231
3232 default:
3233 abort ();
3234 }
3235
3236 /* Clear the bits that don't belong in our mode,
3237 unless they and our sign bit are all one.
3238 So we get either a reasonable negative value or a reasonable
3239 unsigned value for this mode. */
3240 if (width < HOST_BITS_PER_WIDE_INT
3241 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3242 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3243 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3244
3245 /* If this would be an entire word for the target, but is not for
3246 the host, then sign-extend on the host so that the number will look
3247 the same way on the host that it would on the target.
3248
3249 For example, when building a 64 bit alpha hosted 32 bit sparc
3250 targeted compiler, then we want the 32 bit unsigned value -1 to be
3251 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3252 The later confuses the sparc backend. */
3253
3254 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3255 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3256 val |= ((HOST_WIDE_INT) (-1) << width);
3257
3258 return GEN_INT (val);
3259 }
3260
3261 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3262 for a DImode operation on a CONST_INT. */
3263 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3264 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3265 {
3266 HOST_WIDE_INT l1, h1, lv, hv;
3267
3268 if (GET_CODE (op) == CONST_DOUBLE)
3269 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3270 else
3271 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3272
3273 switch (code)
3274 {
3275 case NOT:
3276 lv = ~ l1;
3277 hv = ~ h1;
3278 break;
3279
3280 case NEG:
3281 neg_double (l1, h1, &lv, &hv);
3282 break;
3283
3284 case ABS:
3285 if (h1 < 0)
3286 neg_double (l1, h1, &lv, &hv);
3287 else
3288 lv = l1, hv = h1;
3289 break;
3290
3291 case FFS:
3292 hv = 0;
3293 if (l1 == 0)
3294 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3295 else
3296 lv = exact_log2 (l1 & (-l1)) + 1;
3297 break;
3298
3299 case TRUNCATE:
3300 /* This is just a change-of-mode, so do nothing. */
3301 lv = l1, hv = h1;
3302 break;
3303
3304 case ZERO_EXTEND:
3305 if (op_mode == VOIDmode
3306 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3307 return 0;
3308
3309 hv = 0;
3310 lv = l1 & GET_MODE_MASK (op_mode);
3311 break;
3312
3313 case SIGN_EXTEND:
3314 if (op_mode == VOIDmode
3315 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3316 return 0;
3317 else
3318 {
3319 lv = l1 & GET_MODE_MASK (op_mode);
3320 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3321 && (lv & ((HOST_WIDE_INT) 1
3322 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3323 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3324
3325 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3326 }
3327 break;
3328
3329 case SQRT:
3330 return 0;
3331
3332 default:
3333 return 0;
3334 }
3335
3336 return immed_double_const (lv, hv, mode);
3337 }
3338
3339 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3340 else if (GET_CODE (op) == CONST_DOUBLE
3341 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3342 {
3343 REAL_VALUE_TYPE d;
3344 jmp_buf handler;
3345 rtx x;
3346
3347 if (setjmp (handler))
3348 /* There used to be a warning here, but that is inadvisable.
3349 People may want to cause traps, and the natural way
3350 to do it should not get a warning. */
3351 return 0;
3352
3353 set_float_handler (handler);
3354
3355 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3356
3357 switch (code)
3358 {
3359 case NEG:
3360 d = REAL_VALUE_NEGATE (d);
3361 break;
3362
3363 case ABS:
3364 if (REAL_VALUE_NEGATIVE (d))
3365 d = REAL_VALUE_NEGATE (d);
3366 break;
3367
3368 case FLOAT_TRUNCATE:
3369 d = real_value_truncate (mode, d);
3370 break;
3371
3372 case FLOAT_EXTEND:
3373 /* All this does is change the mode. */
3374 break;
3375
3376 case FIX:
3377 d = REAL_VALUE_RNDZINT (d);
3378 break;
3379
3380 case UNSIGNED_FIX:
3381 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3382 break;
3383
3384 case SQRT:
3385 return 0;
3386
3387 default:
3388 abort ();
3389 }
3390
3391 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3392 set_float_handler (NULL_PTR);
3393 return x;
3394 }
3395
3396 else if (GET_CODE (op) == CONST_DOUBLE
3397 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3398 && GET_MODE_CLASS (mode) == MODE_INT
3399 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3400 {
3401 REAL_VALUE_TYPE d;
3402 jmp_buf handler;
3403 HOST_WIDE_INT val;
3404
3405 if (setjmp (handler))
3406 return 0;
3407
3408 set_float_handler (handler);
3409
3410 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3411
3412 switch (code)
3413 {
3414 case FIX:
3415 val = REAL_VALUE_FIX (d);
3416 break;
3417
3418 case UNSIGNED_FIX:
3419 val = REAL_VALUE_UNSIGNED_FIX (d);
3420 break;
3421
3422 default:
3423 abort ();
3424 }
3425
3426 set_float_handler (NULL_PTR);
3427
3428 /* Clear the bits that don't belong in our mode,
3429 unless they and our sign bit are all one.
3430 So we get either a reasonable negative value or a reasonable
3431 unsigned value for this mode. */
3432 if (width < HOST_BITS_PER_WIDE_INT
3433 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3434 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3435 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3436
3437 /* If this would be an entire word for the target, but is not for
3438 the host, then sign-extend on the host so that the number will look
3439 the same way on the host that it would on the target.
3440
3441 For example, when building a 64 bit alpha hosted 32 bit sparc
3442 targeted compiler, then we want the 32 bit unsigned value -1 to be
3443 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3444 The later confuses the sparc backend. */
3445
3446 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3447 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3448 val |= ((HOST_WIDE_INT) (-1) << width);
3449
3450 return GEN_INT (val);
3451 }
3452 #endif
3453 /* This was formerly used only for non-IEEE float.
3454 eggert@twinsun.com says it is safe for IEEE also. */
3455 else
3456 {
3457 /* There are some simplifications we can do even if the operands
3458 aren't constant. */
3459 switch (code)
3460 {
3461 case NEG:
3462 case NOT:
3463 /* (not (not X)) == X, similarly for NEG. */
3464 if (GET_CODE (op) == code)
3465 return XEXP (op, 0);
3466 break;
3467
3468 case SIGN_EXTEND:
3469 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3470 becomes just the MINUS if its mode is MODE. This allows
3471 folding switch statements on machines using casesi (such as
3472 the Vax). */
3473 if (GET_CODE (op) == TRUNCATE
3474 && GET_MODE (XEXP (op, 0)) == mode
3475 && GET_CODE (XEXP (op, 0)) == MINUS
3476 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3477 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3478 return XEXP (op, 0);
3479
3480 #ifdef POINTERS_EXTEND_UNSIGNED
3481 if (! POINTERS_EXTEND_UNSIGNED
3482 && mode == Pmode && GET_MODE (op) == ptr_mode
3483 && CONSTANT_P (op))
3484 return convert_memory_address (Pmode, op);
3485 #endif
3486 break;
3487
3488 #ifdef POINTERS_EXTEND_UNSIGNED
3489 case ZERO_EXTEND:
3490 if (POINTERS_EXTEND_UNSIGNED
3491 && mode == Pmode && GET_MODE (op) == ptr_mode
3492 && CONSTANT_P (op))
3493 return convert_memory_address (Pmode, op);
3494 break;
3495 #endif
3496
3497 default:
3498 break;
3499 }
3500
3501 return 0;
3502 }
3503 }
3504 \f
3505 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3506 and OP1. Return 0 if no simplification is possible.
3507
3508 Don't use this for relational operations such as EQ or LT.
3509 Use simplify_relational_operation instead. */
3510
3511 rtx
3512 simplify_binary_operation (code, mode, op0, op1)
3513 enum rtx_code code;
3514 enum machine_mode mode;
3515 rtx op0, op1;
3516 {
3517 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3518 HOST_WIDE_INT val;
3519 int width = GET_MODE_BITSIZE (mode);
3520 rtx tem;
3521
3522 /* Relational operations don't work here. We must know the mode
3523 of the operands in order to do the comparison correctly.
3524 Assuming a full word can give incorrect results.
3525 Consider comparing 128 with -128 in QImode. */
3526
3527 if (GET_RTX_CLASS (code) == '<')
3528 abort ();
3529
3530 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3531 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3532 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3533 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3534 {
3535 REAL_VALUE_TYPE f0, f1, value;
3536 jmp_buf handler;
3537
3538 if (setjmp (handler))
3539 return 0;
3540
3541 set_float_handler (handler);
3542
3543 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3544 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3545 f0 = real_value_truncate (mode, f0);
3546 f1 = real_value_truncate (mode, f1);
3547
3548 #ifdef REAL_ARITHMETIC
3549 #ifndef REAL_INFINITY
3550 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3551 return 0;
3552 #endif
3553 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3554 #else
3555 switch (code)
3556 {
3557 case PLUS:
3558 value = f0 + f1;
3559 break;
3560 case MINUS:
3561 value = f0 - f1;
3562 break;
3563 case MULT:
3564 value = f0 * f1;
3565 break;
3566 case DIV:
3567 #ifndef REAL_INFINITY
3568 if (f1 == 0)
3569 return 0;
3570 #endif
3571 value = f0 / f1;
3572 break;
3573 case SMIN:
3574 value = MIN (f0, f1);
3575 break;
3576 case SMAX:
3577 value = MAX (f0, f1);
3578 break;
3579 default:
3580 abort ();
3581 }
3582 #endif
3583
3584 value = real_value_truncate (mode, value);
3585 set_float_handler (NULL_PTR);
3586 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3587 }
3588 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3589
3590 /* We can fold some multi-word operations. */
3591 if (GET_MODE_CLASS (mode) == MODE_INT
3592 && width == HOST_BITS_PER_WIDE_INT * 2
3593 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3594 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3595 {
3596 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3597
3598 if (GET_CODE (op0) == CONST_DOUBLE)
3599 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3600 else
3601 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3602
3603 if (GET_CODE (op1) == CONST_DOUBLE)
3604 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3605 else
3606 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3607
3608 switch (code)
3609 {
3610 case MINUS:
3611 /* A - B == A + (-B). */
3612 neg_double (l2, h2, &lv, &hv);
3613 l2 = lv, h2 = hv;
3614
3615 /* .. fall through ... */
3616
3617 case PLUS:
3618 add_double (l1, h1, l2, h2, &lv, &hv);
3619 break;
3620
3621 case MULT:
3622 mul_double (l1, h1, l2, h2, &lv, &hv);
3623 break;
3624
3625 case DIV: case MOD: case UDIV: case UMOD:
3626 /* We'd need to include tree.h to do this and it doesn't seem worth
3627 it. */
3628 return 0;
3629
3630 case AND:
3631 lv = l1 & l2, hv = h1 & h2;
3632 break;
3633
3634 case IOR:
3635 lv = l1 | l2, hv = h1 | h2;
3636 break;
3637
3638 case XOR:
3639 lv = l1 ^ l2, hv = h1 ^ h2;
3640 break;
3641
3642 case SMIN:
3643 if (h1 < h2
3644 || (h1 == h2
3645 && ((unsigned HOST_WIDE_INT) l1
3646 < (unsigned HOST_WIDE_INT) l2)))
3647 lv = l1, hv = h1;
3648 else
3649 lv = l2, hv = h2;
3650 break;
3651
3652 case SMAX:
3653 if (h1 > h2
3654 || (h1 == h2
3655 && ((unsigned HOST_WIDE_INT) l1
3656 > (unsigned HOST_WIDE_INT) l2)))
3657 lv = l1, hv = h1;
3658 else
3659 lv = l2, hv = h2;
3660 break;
3661
3662 case UMIN:
3663 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3664 || (h1 == h2
3665 && ((unsigned HOST_WIDE_INT) l1
3666 < (unsigned HOST_WIDE_INT) l2)))
3667 lv = l1, hv = h1;
3668 else
3669 lv = l2, hv = h2;
3670 break;
3671
3672 case UMAX:
3673 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3674 || (h1 == h2
3675 && ((unsigned HOST_WIDE_INT) l1
3676 > (unsigned HOST_WIDE_INT) l2)))
3677 lv = l1, hv = h1;
3678 else
3679 lv = l2, hv = h2;
3680 break;
3681
3682 case LSHIFTRT: case ASHIFTRT:
3683 case ASHIFT:
3684 case ROTATE: case ROTATERT:
3685 #ifdef SHIFT_COUNT_TRUNCATED
3686 if (SHIFT_COUNT_TRUNCATED)
3687 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3688 #endif
3689
3690 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3691 return 0;
3692
3693 if (code == LSHIFTRT || code == ASHIFTRT)
3694 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3695 code == ASHIFTRT);
3696 else if (code == ASHIFT)
3697 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3698 else if (code == ROTATE)
3699 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3700 else /* code == ROTATERT */
3701 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3702 break;
3703
3704 default:
3705 return 0;
3706 }
3707
3708 return immed_double_const (lv, hv, mode);
3709 }
3710
3711 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3712 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3713 {
3714 /* Even if we can't compute a constant result,
3715 there are some cases worth simplifying. */
3716
3717 switch (code)
3718 {
3719 case PLUS:
3720 /* In IEEE floating point, x+0 is not the same as x. Similarly
3721 for the other optimizations below. */
3722 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3723 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3724 break;
3725
3726 if (op1 == CONST0_RTX (mode))
3727 return op0;
3728
3729 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3730 if (GET_CODE (op0) == NEG)
3731 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3732 else if (GET_CODE (op1) == NEG)
3733 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3734
3735 /* Handle both-operands-constant cases. We can only add
3736 CONST_INTs to constants since the sum of relocatable symbols
3737 can't be handled by most assemblers. Don't add CONST_INT
3738 to CONST_INT since overflow won't be computed properly if wider
3739 than HOST_BITS_PER_WIDE_INT. */
3740
3741 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3742 && GET_CODE (op1) == CONST_INT)
3743 return plus_constant (op0, INTVAL (op1));
3744 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3745 && GET_CODE (op0) == CONST_INT)
3746 return plus_constant (op1, INTVAL (op0));
3747
3748 /* See if this is something like X * C - X or vice versa or
3749 if the multiplication is written as a shift. If so, we can
3750 distribute and make a new multiply, shift, or maybe just
3751 have X (if C is 2 in the example above). But don't make
3752 real multiply if we didn't have one before. */
3753
3754 if (! FLOAT_MODE_P (mode))
3755 {
3756 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3757 rtx lhs = op0, rhs = op1;
3758 int had_mult = 0;
3759
3760 if (GET_CODE (lhs) == NEG)
3761 coeff0 = -1, lhs = XEXP (lhs, 0);
3762 else if (GET_CODE (lhs) == MULT
3763 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3764 {
3765 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3766 had_mult = 1;
3767 }
3768 else if (GET_CODE (lhs) == ASHIFT
3769 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3770 && INTVAL (XEXP (lhs, 1)) >= 0
3771 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3772 {
3773 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3774 lhs = XEXP (lhs, 0);
3775 }
3776
3777 if (GET_CODE (rhs) == NEG)
3778 coeff1 = -1, rhs = XEXP (rhs, 0);
3779 else if (GET_CODE (rhs) == MULT
3780 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3781 {
3782 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3783 had_mult = 1;
3784 }
3785 else if (GET_CODE (rhs) == ASHIFT
3786 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3787 && INTVAL (XEXP (rhs, 1)) >= 0
3788 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3789 {
3790 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3791 rhs = XEXP (rhs, 0);
3792 }
3793
3794 if (rtx_equal_p (lhs, rhs))
3795 {
3796 tem = cse_gen_binary (MULT, mode, lhs,
3797 GEN_INT (coeff0 + coeff1));
3798 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3799 }
3800 }
3801
3802 /* If one of the operands is a PLUS or a MINUS, see if we can
3803 simplify this by the associative law.
3804 Don't use the associative law for floating point.
3805 The inaccuracy makes it nonassociative,
3806 and subtle programs can break if operations are associated. */
3807
3808 if (INTEGRAL_MODE_P (mode)
3809 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3810 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3811 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3812 return tem;
3813 break;
3814
3815 case COMPARE:
3816 #ifdef HAVE_cc0
3817 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3818 using cc0, in which case we want to leave it as a COMPARE
3819 so we can distinguish it from a register-register-copy.
3820
3821 In IEEE floating point, x-0 is not the same as x. */
3822
3823 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3824 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3825 && op1 == CONST0_RTX (mode))
3826 return op0;
3827 #else
3828 /* Do nothing here. */
3829 #endif
3830 break;
3831
3832 case MINUS:
3833 /* None of these optimizations can be done for IEEE
3834 floating point. */
3835 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3836 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3837 break;
3838
3839 /* We can't assume x-x is 0 even with non-IEEE floating point,
3840 but since it is zero except in very strange circumstances, we
3841 will treat it as zero with -ffast-math. */
3842 if (rtx_equal_p (op0, op1)
3843 && ! side_effects_p (op0)
3844 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3845 return CONST0_RTX (mode);
3846
3847 /* Change subtraction from zero into negation. */
3848 if (op0 == CONST0_RTX (mode))
3849 return gen_rtx_NEG (mode, op1);
3850
3851 /* (-1 - a) is ~a. */
3852 if (op0 == constm1_rtx)
3853 return gen_rtx_NOT (mode, op1);
3854
3855 /* Subtracting 0 has no effect. */
3856 if (op1 == CONST0_RTX (mode))
3857 return op0;
3858
3859 /* See if this is something like X * C - X or vice versa or
3860 if the multiplication is written as a shift. If so, we can
3861 distribute and make a new multiply, shift, or maybe just
3862 have X (if C is 2 in the example above). But don't make
3863 real multiply if we didn't have one before. */
3864
3865 if (! FLOAT_MODE_P (mode))
3866 {
3867 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3868 rtx lhs = op0, rhs = op1;
3869 int had_mult = 0;
3870
3871 if (GET_CODE (lhs) == NEG)
3872 coeff0 = -1, lhs = XEXP (lhs, 0);
3873 else if (GET_CODE (lhs) == MULT
3874 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3875 {
3876 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3877 had_mult = 1;
3878 }
3879 else if (GET_CODE (lhs) == ASHIFT
3880 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3881 && INTVAL (XEXP (lhs, 1)) >= 0
3882 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3883 {
3884 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3885 lhs = XEXP (lhs, 0);
3886 }
3887
3888 if (GET_CODE (rhs) == NEG)
3889 coeff1 = - 1, rhs = XEXP (rhs, 0);
3890 else if (GET_CODE (rhs) == MULT
3891 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3892 {
3893 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3894 had_mult = 1;
3895 }
3896 else if (GET_CODE (rhs) == ASHIFT
3897 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3898 && INTVAL (XEXP (rhs, 1)) >= 0
3899 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3900 {
3901 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3902 rhs = XEXP (rhs, 0);
3903 }
3904
3905 if (rtx_equal_p (lhs, rhs))
3906 {
3907 tem = cse_gen_binary (MULT, mode, lhs,
3908 GEN_INT (coeff0 - coeff1));
3909 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3910 }
3911 }
3912
3913 /* (a - (-b)) -> (a + b). */
3914 if (GET_CODE (op1) == NEG)
3915 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3916
3917 /* If one of the operands is a PLUS or a MINUS, see if we can
3918 simplify this by the associative law.
3919 Don't use the associative law for floating point.
3920 The inaccuracy makes it nonassociative,
3921 and subtle programs can break if operations are associated. */
3922
3923 if (INTEGRAL_MODE_P (mode)
3924 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3925 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3926 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3927 return tem;
3928
3929 /* Don't let a relocatable value get a negative coeff. */
3930 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3931 return plus_constant (op0, - INTVAL (op1));
3932
3933 /* (x - (x & y)) -> (x & ~y) */
3934 if (GET_CODE (op1) == AND)
3935 {
3936 if (rtx_equal_p (op0, XEXP (op1, 0)))
3937 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
3938 if (rtx_equal_p (op0, XEXP (op1, 1)))
3939 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
3940 }
3941 break;
3942
3943 case MULT:
3944 if (op1 == constm1_rtx)
3945 {
3946 tem = simplify_unary_operation (NEG, mode, op0, mode);
3947
3948 return tem ? tem : gen_rtx_NEG (mode, op0);
3949 }
3950
3951 /* In IEEE floating point, x*0 is not always 0. */
3952 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3953 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3954 && op1 == CONST0_RTX (mode)
3955 && ! side_effects_p (op0))
3956 return op1;
3957
3958 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3959 However, ANSI says we can drop signals,
3960 so we can do this anyway. */
3961 if (op1 == CONST1_RTX (mode))
3962 return op0;
3963
3964 /* Convert multiply by constant power of two into shift unless
3965 we are still generating RTL. This test is a kludge. */
3966 if (GET_CODE (op1) == CONST_INT
3967 && (val = exact_log2 (INTVAL (op1))) >= 0
3968 /* If the mode is larger than the host word size, and the
3969 uppermost bit is set, then this isn't a power of two due
3970 to implicit sign extension. */
3971 && (width <= HOST_BITS_PER_WIDE_INT
3972 || val != HOST_BITS_PER_WIDE_INT - 1)
3973 && ! rtx_equal_function_value_matters)
3974 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
3975
3976 if (GET_CODE (op1) == CONST_DOUBLE
3977 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3978 {
3979 REAL_VALUE_TYPE d;
3980 jmp_buf handler;
3981 int op1is2, op1ism1;
3982
3983 if (setjmp (handler))
3984 return 0;
3985
3986 set_float_handler (handler);
3987 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3988 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3989 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3990 set_float_handler (NULL_PTR);
3991
3992 /* x*2 is x+x and x*(-1) is -x */
3993 if (op1is2 && GET_MODE (op0) == mode)
3994 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
3995
3996 else if (op1ism1 && GET_MODE (op0) == mode)
3997 return gen_rtx_NEG (mode, op0);
3998 }
3999 break;
4000
4001 case IOR:
4002 if (op1 == const0_rtx)
4003 return op0;
4004 if (GET_CODE (op1) == CONST_INT
4005 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4006 return op1;
4007 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4008 return op0;
4009 /* A | (~A) -> -1 */
4010 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4011 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4012 && ! side_effects_p (op0)
4013 && GET_MODE_CLASS (mode) != MODE_CC)
4014 return constm1_rtx;
4015 break;
4016
4017 case XOR:
4018 if (op1 == const0_rtx)
4019 return op0;
4020 if (GET_CODE (op1) == CONST_INT
4021 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4022 return gen_rtx_NOT (mode, op0);
4023 if (op0 == op1 && ! side_effects_p (op0)
4024 && GET_MODE_CLASS (mode) != MODE_CC)
4025 return const0_rtx;
4026 break;
4027
4028 case AND:
4029 if (op1 == const0_rtx && ! side_effects_p (op0))
4030 return const0_rtx;
4031 if (GET_CODE (op1) == CONST_INT
4032 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4033 return op0;
4034 if (op0 == op1 && ! side_effects_p (op0)
4035 && GET_MODE_CLASS (mode) != MODE_CC)
4036 return op0;
4037 /* A & (~A) -> 0 */
4038 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4039 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4040 && ! side_effects_p (op0)
4041 && GET_MODE_CLASS (mode) != MODE_CC)
4042 return const0_rtx;
4043 break;
4044
4045 case UDIV:
4046 /* Convert divide by power of two into shift (divide by 1 handled
4047 below). */
4048 if (GET_CODE (op1) == CONST_INT
4049 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4050 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4051
4052 /* ... fall through ... */
4053
4054 case DIV:
4055 if (op1 == CONST1_RTX (mode))
4056 return op0;
4057
4058 /* In IEEE floating point, 0/x is not always 0. */
4059 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4060 || ! FLOAT_MODE_P (mode) || flag_fast_math)
4061 && op0 == CONST0_RTX (mode)
4062 && ! side_effects_p (op1))
4063 return op0;
4064
4065 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4066 /* Change division by a constant into multiplication. Only do
4067 this with -ffast-math until an expert says it is safe in
4068 general. */
4069 else if (GET_CODE (op1) == CONST_DOUBLE
4070 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4071 && op1 != CONST0_RTX (mode)
4072 && flag_fast_math)
4073 {
4074 REAL_VALUE_TYPE d;
4075 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4076
4077 if (! REAL_VALUES_EQUAL (d, dconst0))
4078 {
4079 #if defined (REAL_ARITHMETIC)
4080 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4081 return gen_rtx_MULT (mode, op0,
4082 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4083 #else
4084 return gen_rtx_MULT (mode, op0,
4085 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4086 #endif
4087 }
4088 }
4089 #endif
4090 break;
4091
4092 case UMOD:
4093 /* Handle modulus by power of two (mod with 1 handled below). */
4094 if (GET_CODE (op1) == CONST_INT
4095 && exact_log2 (INTVAL (op1)) > 0)
4096 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4097
4098 /* ... fall through ... */
4099
4100 case MOD:
4101 if ((op0 == const0_rtx || op1 == const1_rtx)
4102 && ! side_effects_p (op0) && ! side_effects_p (op1))
4103 return const0_rtx;
4104 break;
4105
4106 case ROTATERT:
4107 case ROTATE:
4108 /* Rotating ~0 always results in ~0. */
4109 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4110 && INTVAL (op0) == GET_MODE_MASK (mode)
4111 && ! side_effects_p (op1))
4112 return op0;
4113
4114 /* ... fall through ... */
4115
4116 case ASHIFT:
4117 case ASHIFTRT:
4118 case LSHIFTRT:
4119 if (op1 == const0_rtx)
4120 return op0;
4121 if (op0 == const0_rtx && ! side_effects_p (op1))
4122 return op0;
4123 break;
4124
4125 case SMIN:
4126 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4127 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4128 && ! side_effects_p (op0))
4129 return op1;
4130 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4131 return op0;
4132 break;
4133
4134 case SMAX:
4135 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4136 && (INTVAL (op1)
4137 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4138 && ! side_effects_p (op0))
4139 return op1;
4140 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4141 return op0;
4142 break;
4143
4144 case UMIN:
4145 if (op1 == const0_rtx && ! side_effects_p (op0))
4146 return op1;
4147 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4148 return op0;
4149 break;
4150
4151 case UMAX:
4152 if (op1 == constm1_rtx && ! side_effects_p (op0))
4153 return op1;
4154 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4155 return op0;
4156 break;
4157
4158 default:
4159 abort ();
4160 }
4161
4162 return 0;
4163 }
4164
4165 /* Get the integer argument values in two forms:
4166 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4167
4168 arg0 = INTVAL (op0);
4169 arg1 = INTVAL (op1);
4170
4171 if (width < HOST_BITS_PER_WIDE_INT)
4172 {
4173 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4174 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4175
4176 arg0s = arg0;
4177 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4178 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4179
4180 arg1s = arg1;
4181 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4182 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4183 }
4184 else
4185 {
4186 arg0s = arg0;
4187 arg1s = arg1;
4188 }
4189
4190 /* Compute the value of the arithmetic. */
4191
4192 switch (code)
4193 {
4194 case PLUS:
4195 val = arg0s + arg1s;
4196 break;
4197
4198 case MINUS:
4199 val = arg0s - arg1s;
4200 break;
4201
4202 case MULT:
4203 val = arg0s * arg1s;
4204 break;
4205
4206 case DIV:
4207 if (arg1s == 0)
4208 return 0;
4209 val = arg0s / arg1s;
4210 break;
4211
4212 case MOD:
4213 if (arg1s == 0)
4214 return 0;
4215 val = arg0s % arg1s;
4216 break;
4217
4218 case UDIV:
4219 if (arg1 == 0)
4220 return 0;
4221 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4222 break;
4223
4224 case UMOD:
4225 if (arg1 == 0)
4226 return 0;
4227 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4228 break;
4229
4230 case AND:
4231 val = arg0 & arg1;
4232 break;
4233
4234 case IOR:
4235 val = arg0 | arg1;
4236 break;
4237
4238 case XOR:
4239 val = arg0 ^ arg1;
4240 break;
4241
4242 case LSHIFTRT:
4243 /* If shift count is undefined, don't fold it; let the machine do
4244 what it wants. But truncate it if the machine will do that. */
4245 if (arg1 < 0)
4246 return 0;
4247
4248 #ifdef SHIFT_COUNT_TRUNCATED
4249 if (SHIFT_COUNT_TRUNCATED)
4250 arg1 %= width;
4251 #endif
4252
4253 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4254 break;
4255
4256 case ASHIFT:
4257 if (arg1 < 0)
4258 return 0;
4259
4260 #ifdef SHIFT_COUNT_TRUNCATED
4261 if (SHIFT_COUNT_TRUNCATED)
4262 arg1 %= width;
4263 #endif
4264
4265 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4266 break;
4267
4268 case ASHIFTRT:
4269 if (arg1 < 0)
4270 return 0;
4271
4272 #ifdef SHIFT_COUNT_TRUNCATED
4273 if (SHIFT_COUNT_TRUNCATED)
4274 arg1 %= width;
4275 #endif
4276
4277 val = arg0s >> arg1;
4278
4279 /* Bootstrap compiler may not have sign extended the right shift.
4280 Manually extend the sign to insure bootstrap cc matches gcc. */
4281 if (arg0s < 0 && arg1 > 0)
4282 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4283
4284 break;
4285
4286 case ROTATERT:
4287 if (arg1 < 0)
4288 return 0;
4289
4290 arg1 %= width;
4291 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4292 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4293 break;
4294
4295 case ROTATE:
4296 if (arg1 < 0)
4297 return 0;
4298
4299 arg1 %= width;
4300 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4301 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4302 break;
4303
4304 case COMPARE:
4305 /* Do nothing here. */
4306 return 0;
4307
4308 case SMIN:
4309 val = arg0s <= arg1s ? arg0s : arg1s;
4310 break;
4311
4312 case UMIN:
4313 val = ((unsigned HOST_WIDE_INT) arg0
4314 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4315 break;
4316
4317 case SMAX:
4318 val = arg0s > arg1s ? arg0s : arg1s;
4319 break;
4320
4321 case UMAX:
4322 val = ((unsigned HOST_WIDE_INT) arg0
4323 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4324 break;
4325
4326 default:
4327 abort ();
4328 }
4329
4330 /* Clear the bits that don't belong in our mode, unless they and our sign
4331 bit are all one. So we get either a reasonable negative value or a
4332 reasonable unsigned value for this mode. */
4333 if (width < HOST_BITS_PER_WIDE_INT
4334 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4335 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4336 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4337
4338 /* If this would be an entire word for the target, but is not for
4339 the host, then sign-extend on the host so that the number will look
4340 the same way on the host that it would on the target.
4341
4342 For example, when building a 64 bit alpha hosted 32 bit sparc
4343 targeted compiler, then we want the 32 bit unsigned value -1 to be
4344 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4345 The later confuses the sparc backend. */
4346
4347 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4348 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4349 val |= ((HOST_WIDE_INT) (-1) << width);
4350
4351 return GEN_INT (val);
4352 }
4353 \f
4354 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4355 PLUS or MINUS.
4356
4357 Rather than test for specific case, we do this by a brute-force method
4358 and do all possible simplifications until no more changes occur. Then
4359 we rebuild the operation. */
4360
4361 static rtx
4362 simplify_plus_minus (code, mode, op0, op1)
4363 enum rtx_code code;
4364 enum machine_mode mode;
4365 rtx op0, op1;
4366 {
4367 rtx ops[8];
4368 int negs[8];
4369 rtx result, tem;
4370 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4371 int first = 1, negate = 0, changed;
4372 int i, j;
4373
4374 bzero ((char *) ops, sizeof ops);
4375
4376 /* Set up the two operands and then expand them until nothing has been
4377 changed. If we run out of room in our array, give up; this should
4378 almost never happen. */
4379
4380 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4381
4382 changed = 1;
4383 while (changed)
4384 {
4385 changed = 0;
4386
4387 for (i = 0; i < n_ops; i++)
4388 switch (GET_CODE (ops[i]))
4389 {
4390 case PLUS:
4391 case MINUS:
4392 if (n_ops == 7)
4393 return 0;
4394
4395 ops[n_ops] = XEXP (ops[i], 1);
4396 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4397 ops[i] = XEXP (ops[i], 0);
4398 input_ops++;
4399 changed = 1;
4400 break;
4401
4402 case NEG:
4403 ops[i] = XEXP (ops[i], 0);
4404 negs[i] = ! negs[i];
4405 changed = 1;
4406 break;
4407
4408 case CONST:
4409 ops[i] = XEXP (ops[i], 0);
4410 input_consts++;
4411 changed = 1;
4412 break;
4413
4414 case NOT:
4415 /* ~a -> (-a - 1) */
4416 if (n_ops != 7)
4417 {
4418 ops[n_ops] = constm1_rtx;
4419 negs[n_ops++] = negs[i];
4420 ops[i] = XEXP (ops[i], 0);
4421 negs[i] = ! negs[i];
4422 changed = 1;
4423 }
4424 break;
4425
4426 case CONST_INT:
4427 if (negs[i])
4428 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4429 break;
4430
4431 default:
4432 break;
4433 }
4434 }
4435
4436 /* If we only have two operands, we can't do anything. */
4437 if (n_ops <= 2)
4438 return 0;
4439
4440 /* Now simplify each pair of operands until nothing changes. The first
4441 time through just simplify constants against each other. */
4442
4443 changed = 1;
4444 while (changed)
4445 {
4446 changed = first;
4447
4448 for (i = 0; i < n_ops - 1; i++)
4449 for (j = i + 1; j < n_ops; j++)
4450 if (ops[i] != 0 && ops[j] != 0
4451 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4452 {
4453 rtx lhs = ops[i], rhs = ops[j];
4454 enum rtx_code ncode = PLUS;
4455
4456 if (negs[i] && ! negs[j])
4457 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4458 else if (! negs[i] && negs[j])
4459 ncode = MINUS;
4460
4461 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4462 if (tem)
4463 {
4464 ops[i] = tem, ops[j] = 0;
4465 negs[i] = negs[i] && negs[j];
4466 if (GET_CODE (tem) == NEG)
4467 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4468
4469 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4470 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4471 changed = 1;
4472 }
4473 }
4474
4475 first = 0;
4476 }
4477
4478 /* Pack all the operands to the lower-numbered entries and give up if
4479 we didn't reduce the number of operands we had. Make sure we
4480 count a CONST as two operands. If we have the same number of
4481 operands, but have made more CONSTs than we had, this is also
4482 an improvement, so accept it. */
4483
4484 for (i = 0, j = 0; j < n_ops; j++)
4485 if (ops[j] != 0)
4486 {
4487 ops[i] = ops[j], negs[i++] = negs[j];
4488 if (GET_CODE (ops[j]) == CONST)
4489 n_consts++;
4490 }
4491
4492 if (i + n_consts > input_ops
4493 || (i + n_consts == input_ops && n_consts <= input_consts))
4494 return 0;
4495
4496 n_ops = i;
4497
4498 /* If we have a CONST_INT, put it last. */
4499 for (i = 0; i < n_ops - 1; i++)
4500 if (GET_CODE (ops[i]) == CONST_INT)
4501 {
4502 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4503 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4504 }
4505
4506 /* Put a non-negated operand first. If there aren't any, make all
4507 operands positive and negate the whole thing later. */
4508 for (i = 0; i < n_ops && negs[i]; i++)
4509 ;
4510
4511 if (i == n_ops)
4512 {
4513 for (i = 0; i < n_ops; i++)
4514 negs[i] = 0;
4515 negate = 1;
4516 }
4517 else if (i != 0)
4518 {
4519 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4520 j = negs[0], negs[0] = negs[i], negs[i] = j;
4521 }
4522
4523 /* Now make the result by performing the requested operations. */
4524 result = ops[0];
4525 for (i = 1; i < n_ops; i++)
4526 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4527
4528 return negate ? gen_rtx_NEG (mode, result) : result;
4529 }
4530 \f
4531 /* Make a binary operation by properly ordering the operands and
4532 seeing if the expression folds. */
4533
4534 static rtx
4535 cse_gen_binary (code, mode, op0, op1)
4536 enum rtx_code code;
4537 enum machine_mode mode;
4538 rtx op0, op1;
4539 {
4540 rtx tem;
4541
4542 /* Put complex operands first and constants second if commutative. */
4543 if (GET_RTX_CLASS (code) == 'c'
4544 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4545 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4546 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4547 || (GET_CODE (op0) == SUBREG
4548 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4549 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4550 tem = op0, op0 = op1, op1 = tem;
4551
4552 /* If this simplifies, do it. */
4553 tem = simplify_binary_operation (code, mode, op0, op1);
4554
4555 if (tem)
4556 return tem;
4557
4558 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4559 just form the operation. */
4560
4561 if (code == PLUS && GET_CODE (op1) == CONST_INT
4562 && GET_MODE (op0) != VOIDmode)
4563 return plus_constant (op0, INTVAL (op1));
4564 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4565 && GET_MODE (op0) != VOIDmode)
4566 return plus_constant (op0, - INTVAL (op1));
4567 else
4568 return gen_rtx_fmt_ee (code, mode, op0, op1);
4569 }
4570 \f
4571 /* Like simplify_binary_operation except used for relational operators.
4572 MODE is the mode of the operands, not that of the result. If MODE
4573 is VOIDmode, both operands must also be VOIDmode and we compare the
4574 operands in "infinite precision".
4575
4576 If no simplification is possible, this function returns zero. Otherwise,
4577 it returns either const_true_rtx or const0_rtx. */
4578
4579 rtx
4580 simplify_relational_operation (code, mode, op0, op1)
4581 enum rtx_code code;
4582 enum machine_mode mode;
4583 rtx op0, op1;
4584 {
4585 int equal, op0lt, op0ltu, op1lt, op1ltu;
4586 rtx tem;
4587
4588 /* If op0 is a compare, extract the comparison arguments from it. */
4589 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4590 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4591
4592 /* We can't simplify MODE_CC values since we don't know what the
4593 actual comparison is. */
4594 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4595 #ifdef HAVE_cc0
4596 || op0 == cc0_rtx
4597 #endif
4598 )
4599 return 0;
4600
4601 /* For integer comparisons of A and B maybe we can simplify A - B and can
4602 then simplify a comparison of that with zero. If A and B are both either
4603 a register or a CONST_INT, this can't help; testing for these cases will
4604 prevent infinite recursion here and speed things up.
4605
4606 If CODE is an unsigned comparison, then we can never do this optimization,
4607 because it gives an incorrect result if the subtraction wraps around zero.
4608 ANSI C defines unsigned operations such that they never overflow, and
4609 thus such cases can not be ignored. */
4610
4611 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4612 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4613 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4614 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4615 && code != GTU && code != GEU && code != LTU && code != LEU)
4616 return simplify_relational_operation (signed_condition (code),
4617 mode, tem, const0_rtx);
4618
4619 /* For non-IEEE floating-point, if the two operands are equal, we know the
4620 result. */
4621 if (rtx_equal_p (op0, op1)
4622 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4623 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4624 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4625
4626 /* If the operands are floating-point constants, see if we can fold
4627 the result. */
4628 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4629 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4630 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4631 {
4632 REAL_VALUE_TYPE d0, d1;
4633 jmp_buf handler;
4634
4635 if (setjmp (handler))
4636 return 0;
4637
4638 set_float_handler (handler);
4639 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4640 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4641 equal = REAL_VALUES_EQUAL (d0, d1);
4642 op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4643 op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4644 set_float_handler (NULL_PTR);
4645 }
4646 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4647
4648 /* Otherwise, see if the operands are both integers. */
4649 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4650 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4651 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4652 {
4653 int width = GET_MODE_BITSIZE (mode);
4654 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4655 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4656
4657 /* Get the two words comprising each integer constant. */
4658 if (GET_CODE (op0) == CONST_DOUBLE)
4659 {
4660 l0u = l0s = CONST_DOUBLE_LOW (op0);
4661 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4662 }
4663 else
4664 {
4665 l0u = l0s = INTVAL (op0);
4666 h0u = h0s = l0s < 0 ? -1 : 0;
4667 }
4668
4669 if (GET_CODE (op1) == CONST_DOUBLE)
4670 {
4671 l1u = l1s = CONST_DOUBLE_LOW (op1);
4672 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4673 }
4674 else
4675 {
4676 l1u = l1s = INTVAL (op1);
4677 h1u = h1s = l1s < 0 ? -1 : 0;
4678 }
4679
4680 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4681 we have to sign or zero-extend the values. */
4682 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4683 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4684
4685 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4686 {
4687 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4688 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4689
4690 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4691 l0s |= ((HOST_WIDE_INT) (-1) << width);
4692
4693 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4694 l1s |= ((HOST_WIDE_INT) (-1) << width);
4695 }
4696
4697 equal = (h0u == h1u && l0u == l1u);
4698 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4699 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4700 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4701 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4702 }
4703
4704 /* Otherwise, there are some code-specific tests we can make. */
4705 else
4706 {
4707 switch (code)
4708 {
4709 case EQ:
4710 /* References to the frame plus a constant or labels cannot
4711 be zero, but a SYMBOL_REF can due to #pragma weak. */
4712 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4713 || GET_CODE (op0) == LABEL_REF)
4714 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4715 /* On some machines, the ap reg can be 0 sometimes. */
4716 && op0 != arg_pointer_rtx
4717 #endif
4718 )
4719 return const0_rtx;
4720 break;
4721
4722 case NE:
4723 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4724 || GET_CODE (op0) == LABEL_REF)
4725 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4726 && op0 != arg_pointer_rtx
4727 #endif
4728 )
4729 return const_true_rtx;
4730 break;
4731
4732 case GEU:
4733 /* Unsigned values are never negative. */
4734 if (op1 == const0_rtx)
4735 return const_true_rtx;
4736 break;
4737
4738 case LTU:
4739 if (op1 == const0_rtx)
4740 return const0_rtx;
4741 break;
4742
4743 case LEU:
4744 /* Unsigned values are never greater than the largest
4745 unsigned value. */
4746 if (GET_CODE (op1) == CONST_INT
4747 && INTVAL (op1) == GET_MODE_MASK (mode)
4748 && INTEGRAL_MODE_P (mode))
4749 return const_true_rtx;
4750 break;
4751
4752 case GTU:
4753 if (GET_CODE (op1) == CONST_INT
4754 && INTVAL (op1) == GET_MODE_MASK (mode)
4755 && INTEGRAL_MODE_P (mode))
4756 return const0_rtx;
4757 break;
4758
4759 default:
4760 break;
4761 }
4762
4763 return 0;
4764 }
4765
4766 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4767 as appropriate. */
4768 switch (code)
4769 {
4770 case EQ:
4771 return equal ? const_true_rtx : const0_rtx;
4772 case NE:
4773 return ! equal ? const_true_rtx : const0_rtx;
4774 case LT:
4775 return op0lt ? const_true_rtx : const0_rtx;
4776 case GT:
4777 return op1lt ? const_true_rtx : const0_rtx;
4778 case LTU:
4779 return op0ltu ? const_true_rtx : const0_rtx;
4780 case GTU:
4781 return op1ltu ? const_true_rtx : const0_rtx;
4782 case LE:
4783 return equal || op0lt ? const_true_rtx : const0_rtx;
4784 case GE:
4785 return equal || op1lt ? const_true_rtx : const0_rtx;
4786 case LEU:
4787 return equal || op0ltu ? const_true_rtx : const0_rtx;
4788 case GEU:
4789 return equal || op1ltu ? const_true_rtx : const0_rtx;
4790 default:
4791 abort ();
4792 }
4793 }
4794 \f
4795 /* Simplify CODE, an operation with result mode MODE and three operands,
4796 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4797 a constant. Return 0 if no simplifications is possible. */
4798
4799 rtx
4800 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4801 enum rtx_code code;
4802 enum machine_mode mode, op0_mode;
4803 rtx op0, op1, op2;
4804 {
4805 int width = GET_MODE_BITSIZE (mode);
4806
4807 /* VOIDmode means "infinite" precision. */
4808 if (width == 0)
4809 width = HOST_BITS_PER_WIDE_INT;
4810
4811 switch (code)
4812 {
4813 case SIGN_EXTRACT:
4814 case ZERO_EXTRACT:
4815 if (GET_CODE (op0) == CONST_INT
4816 && GET_CODE (op1) == CONST_INT
4817 && GET_CODE (op2) == CONST_INT
4818 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4819 && width <= HOST_BITS_PER_WIDE_INT)
4820 {
4821 /* Extracting a bit-field from a constant */
4822 HOST_WIDE_INT val = INTVAL (op0);
4823
4824 if (BITS_BIG_ENDIAN)
4825 val >>= (GET_MODE_BITSIZE (op0_mode)
4826 - INTVAL (op2) - INTVAL (op1));
4827 else
4828 val >>= INTVAL (op2);
4829
4830 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4831 {
4832 /* First zero-extend. */
4833 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4834 /* If desired, propagate sign bit. */
4835 if (code == SIGN_EXTRACT
4836 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4837 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4838 }
4839
4840 /* Clear the bits that don't belong in our mode,
4841 unless they and our sign bit are all one.
4842 So we get either a reasonable negative value or a reasonable
4843 unsigned value for this mode. */
4844 if (width < HOST_BITS_PER_WIDE_INT
4845 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4846 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4847 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4848
4849 return GEN_INT (val);
4850 }
4851 break;
4852
4853 case IF_THEN_ELSE:
4854 if (GET_CODE (op0) == CONST_INT)
4855 return op0 != const0_rtx ? op1 : op2;
4856
4857 /* Convert a == b ? b : a to "a". */
4858 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4859 && rtx_equal_p (XEXP (op0, 0), op1)
4860 && rtx_equal_p (XEXP (op0, 1), op2))
4861 return op1;
4862 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4863 && rtx_equal_p (XEXP (op0, 1), op1)
4864 && rtx_equal_p (XEXP (op0, 0), op2))
4865 return op2;
4866 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
4867 {
4868 rtx temp;
4869 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4870 XEXP (op0, 0), XEXP (op0, 1));
4871 /* See if any simplifications were possible. */
4872 if (temp == const0_rtx)
4873 return op2;
4874 else if (temp == const1_rtx)
4875 return op1;
4876 }
4877 break;
4878
4879 default:
4880 abort ();
4881 }
4882
4883 return 0;
4884 }
4885 \f
4886 /* If X is a nontrivial arithmetic operation on an argument
4887 for which a constant value can be determined, return
4888 the result of operating on that value, as a constant.
4889 Otherwise, return X, possibly with one or more operands
4890 modified by recursive calls to this function.
4891
4892 If X is a register whose contents are known, we do NOT
4893 return those contents here. equiv_constant is called to
4894 perform that task.
4895
4896 INSN is the insn that we may be modifying. If it is 0, make a copy
4897 of X before modifying it. */
4898
4899 static rtx
4900 fold_rtx (x, insn)
4901 rtx x;
4902 rtx insn;
4903 {
4904 register enum rtx_code code;
4905 register enum machine_mode mode;
4906 register char *fmt;
4907 register int i;
4908 rtx new = 0;
4909 int copied = 0;
4910 int must_swap = 0;
4911
4912 /* Folded equivalents of first two operands of X. */
4913 rtx folded_arg0;
4914 rtx folded_arg1;
4915
4916 /* Constant equivalents of first three operands of X;
4917 0 when no such equivalent is known. */
4918 rtx const_arg0;
4919 rtx const_arg1;
4920 rtx const_arg2;
4921
4922 /* The mode of the first operand of X. We need this for sign and zero
4923 extends. */
4924 enum machine_mode mode_arg0;
4925
4926 if (x == 0)
4927 return x;
4928
4929 mode = GET_MODE (x);
4930 code = GET_CODE (x);
4931 switch (code)
4932 {
4933 case CONST:
4934 case CONST_INT:
4935 case CONST_DOUBLE:
4936 case SYMBOL_REF:
4937 case LABEL_REF:
4938 case REG:
4939 /* No use simplifying an EXPR_LIST
4940 since they are used only for lists of args
4941 in a function call's REG_EQUAL note. */
4942 case EXPR_LIST:
4943 /* Changing anything inside an ADDRESSOF is incorrect; we don't
4944 want to (e.g.,) make (addressof (const_int 0)) just because
4945 the location is known to be zero. */
4946 case ADDRESSOF:
4947 return x;
4948
4949 #ifdef HAVE_cc0
4950 case CC0:
4951 return prev_insn_cc0;
4952 #endif
4953
4954 case PC:
4955 /* If the next insn is a CODE_LABEL followed by a jump table,
4956 PC's value is a LABEL_REF pointing to that label. That
4957 lets us fold switch statements on the Vax. */
4958 if (insn && GET_CODE (insn) == JUMP_INSN)
4959 {
4960 rtx next = next_nonnote_insn (insn);
4961
4962 if (next && GET_CODE (next) == CODE_LABEL
4963 && NEXT_INSN (next) != 0
4964 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4965 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4966 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4967 return gen_rtx_LABEL_REF (Pmode, next);
4968 }
4969 break;
4970
4971 case SUBREG:
4972 /* See if we previously assigned a constant value to this SUBREG. */
4973 if ((new = lookup_as_function (x, CONST_INT)) != 0
4974 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4975 return new;
4976
4977 /* If this is a paradoxical SUBREG, we have no idea what value the
4978 extra bits would have. However, if the operand is equivalent
4979 to a SUBREG whose operand is the same as our mode, and all the
4980 modes are within a word, we can just use the inner operand
4981 because these SUBREGs just say how to treat the register.
4982
4983 Similarly if we find an integer constant. */
4984
4985 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4986 {
4987 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4988 struct table_elt *elt;
4989
4990 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4991 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4992 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4993 imode)) != 0)
4994 for (elt = elt->first_same_value;
4995 elt; elt = elt->next_same_value)
4996 {
4997 if (CONSTANT_P (elt->exp)
4998 && GET_MODE (elt->exp) == VOIDmode)
4999 return elt->exp;
5000
5001 if (GET_CODE (elt->exp) == SUBREG
5002 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5003 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5004 return copy_rtx (SUBREG_REG (elt->exp));
5005 }
5006
5007 return x;
5008 }
5009
5010 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5011 We might be able to if the SUBREG is extracting a single word in an
5012 integral mode or extracting the low part. */
5013
5014 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5015 const_arg0 = equiv_constant (folded_arg0);
5016 if (const_arg0)
5017 folded_arg0 = const_arg0;
5018
5019 if (folded_arg0 != SUBREG_REG (x))
5020 {
5021 new = 0;
5022
5023 if (GET_MODE_CLASS (mode) == MODE_INT
5024 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5025 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5026 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5027 GET_MODE (SUBREG_REG (x)));
5028 if (new == 0 && subreg_lowpart_p (x))
5029 new = gen_lowpart_if_possible (mode, folded_arg0);
5030 if (new)
5031 return new;
5032 }
5033
5034 /* If this is a narrowing SUBREG and our operand is a REG, see if
5035 we can find an equivalence for REG that is an arithmetic operation
5036 in a wider mode where both operands are paradoxical SUBREGs
5037 from objects of our result mode. In that case, we couldn't report
5038 an equivalent value for that operation, since we don't know what the
5039 extra bits will be. But we can find an equivalence for this SUBREG
5040 by folding that operation is the narrow mode. This allows us to
5041 fold arithmetic in narrow modes when the machine only supports
5042 word-sized arithmetic.
5043
5044 Also look for a case where we have a SUBREG whose operand is the
5045 same as our result. If both modes are smaller than a word, we
5046 are simply interpreting a register in different modes and we
5047 can use the inner value. */
5048
5049 if (GET_CODE (folded_arg0) == REG
5050 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5051 && subreg_lowpart_p (x))
5052 {
5053 struct table_elt *elt;
5054
5055 /* We can use HASH here since we know that canon_hash won't be
5056 called. */
5057 elt = lookup (folded_arg0,
5058 HASH (folded_arg0, GET_MODE (folded_arg0)),
5059 GET_MODE (folded_arg0));
5060
5061 if (elt)
5062 elt = elt->first_same_value;
5063
5064 for (; elt; elt = elt->next_same_value)
5065 {
5066 enum rtx_code eltcode = GET_CODE (elt->exp);
5067
5068 /* Just check for unary and binary operations. */
5069 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5070 && GET_CODE (elt->exp) != SIGN_EXTEND
5071 && GET_CODE (elt->exp) != ZERO_EXTEND
5072 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5073 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5074 {
5075 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5076
5077 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5078 op0 = fold_rtx (op0, NULL_RTX);
5079
5080 op0 = equiv_constant (op0);
5081 if (op0)
5082 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5083 op0, mode);
5084 }
5085 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5086 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5087 && eltcode != DIV && eltcode != MOD
5088 && eltcode != UDIV && eltcode != UMOD
5089 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5090 && eltcode != ROTATE && eltcode != ROTATERT
5091 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5092 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5093 == mode))
5094 || CONSTANT_P (XEXP (elt->exp, 0)))
5095 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5096 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5097 == mode))
5098 || CONSTANT_P (XEXP (elt->exp, 1))))
5099 {
5100 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5101 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5102
5103 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5104 op0 = fold_rtx (op0, NULL_RTX);
5105
5106 if (op0)
5107 op0 = equiv_constant (op0);
5108
5109 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5110 op1 = fold_rtx (op1, NULL_RTX);
5111
5112 if (op1)
5113 op1 = equiv_constant (op1);
5114
5115 /* If we are looking for the low SImode part of
5116 (ashift:DI c (const_int 32)), it doesn't work
5117 to compute that in SImode, because a 32-bit shift
5118 in SImode is unpredictable. We know the value is 0. */
5119 if (op0 && op1
5120 && GET_CODE (elt->exp) == ASHIFT
5121 && GET_CODE (op1) == CONST_INT
5122 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5123 {
5124 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5125
5126 /* If the count fits in the inner mode's width,
5127 but exceeds the outer mode's width,
5128 the value will get truncated to 0
5129 by the subreg. */
5130 new = const0_rtx;
5131 else
5132 /* If the count exceeds even the inner mode's width,
5133 don't fold this expression. */
5134 new = 0;
5135 }
5136 else if (op0 && op1)
5137 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5138 op0, op1);
5139 }
5140
5141 else if (GET_CODE (elt->exp) == SUBREG
5142 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5143 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5144 <= UNITS_PER_WORD)
5145 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5146 new = copy_rtx (SUBREG_REG (elt->exp));
5147
5148 if (new)
5149 return new;
5150 }
5151 }
5152
5153 return x;
5154
5155 case NOT:
5156 case NEG:
5157 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5158 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5159 new = lookup_as_function (XEXP (x, 0), code);
5160 if (new)
5161 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5162 break;
5163
5164 case MEM:
5165 /* If we are not actually processing an insn, don't try to find the
5166 best address. Not only don't we care, but we could modify the
5167 MEM in an invalid way since we have no insn to validate against. */
5168 if (insn != 0)
5169 find_best_addr (insn, &XEXP (x, 0));
5170
5171 {
5172 /* Even if we don't fold in the insn itself,
5173 we can safely do so here, in hopes of getting a constant. */
5174 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5175 rtx base = 0;
5176 HOST_WIDE_INT offset = 0;
5177
5178 if (GET_CODE (addr) == REG
5179 && REGNO_QTY_VALID_P (REGNO (addr))
5180 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5181 && qty_const[reg_qty[REGNO (addr)]] != 0)
5182 addr = qty_const[reg_qty[REGNO (addr)]];
5183
5184 /* If address is constant, split it into a base and integer offset. */
5185 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5186 base = addr;
5187 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5188 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5189 {
5190 base = XEXP (XEXP (addr, 0), 0);
5191 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5192 }
5193 else if (GET_CODE (addr) == LO_SUM
5194 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5195 base = XEXP (addr, 1);
5196 else if (GET_CODE (addr) == ADDRESSOF)
5197 return change_address (x, VOIDmode, addr);
5198
5199 /* If this is a constant pool reference, we can fold it into its
5200 constant to allow better value tracking. */
5201 if (base && GET_CODE (base) == SYMBOL_REF
5202 && CONSTANT_POOL_ADDRESS_P (base))
5203 {
5204 rtx constant = get_pool_constant (base);
5205 enum machine_mode const_mode = get_pool_mode (base);
5206 rtx new;
5207
5208 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5209 constant_pool_entries_cost = COST (constant);
5210
5211 /* If we are loading the full constant, we have an equivalence. */
5212 if (offset == 0 && mode == const_mode)
5213 return constant;
5214
5215 /* If this actually isn't a constant (weird!), we can't do
5216 anything. Otherwise, handle the two most common cases:
5217 extracting a word from a multi-word constant, and extracting
5218 the low-order bits. Other cases don't seem common enough to
5219 worry about. */
5220 if (! CONSTANT_P (constant))
5221 return x;
5222
5223 if (GET_MODE_CLASS (mode) == MODE_INT
5224 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5225 && offset % UNITS_PER_WORD == 0
5226 && (new = operand_subword (constant,
5227 offset / UNITS_PER_WORD,
5228 0, const_mode)) != 0)
5229 return new;
5230
5231 if (((BYTES_BIG_ENDIAN
5232 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5233 || (! BYTES_BIG_ENDIAN && offset == 0))
5234 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5235 return new;
5236 }
5237
5238 /* If this is a reference to a label at a known position in a jump
5239 table, we also know its value. */
5240 if (base && GET_CODE (base) == LABEL_REF)
5241 {
5242 rtx label = XEXP (base, 0);
5243 rtx table_insn = NEXT_INSN (label);
5244
5245 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5246 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5247 {
5248 rtx table = PATTERN (table_insn);
5249
5250 if (offset >= 0
5251 && (offset / GET_MODE_SIZE (GET_MODE (table))
5252 < XVECLEN (table, 0)))
5253 return XVECEXP (table, 0,
5254 offset / GET_MODE_SIZE (GET_MODE (table)));
5255 }
5256 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5257 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5258 {
5259 rtx table = PATTERN (table_insn);
5260
5261 if (offset >= 0
5262 && (offset / GET_MODE_SIZE (GET_MODE (table))
5263 < XVECLEN (table, 1)))
5264 {
5265 offset /= GET_MODE_SIZE (GET_MODE (table));
5266 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5267 XEXP (table, 0));
5268
5269 if (GET_MODE (table) != Pmode)
5270 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5271
5272 /* Indicate this is a constant. This isn't a
5273 valid form of CONST, but it will only be used
5274 to fold the next insns and then discarded, so
5275 it should be safe. */
5276 return gen_rtx_CONST (GET_MODE (new), new);
5277 }
5278 }
5279 }
5280
5281 return x;
5282 }
5283
5284 case ASM_OPERANDS:
5285 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5286 validate_change (insn, &XVECEXP (x, 3, i),
5287 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5288 break;
5289
5290 default:
5291 break;
5292 }
5293
5294 const_arg0 = 0;
5295 const_arg1 = 0;
5296 const_arg2 = 0;
5297 mode_arg0 = VOIDmode;
5298
5299 /* Try folding our operands.
5300 Then see which ones have constant values known. */
5301
5302 fmt = GET_RTX_FORMAT (code);
5303 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5304 if (fmt[i] == 'e')
5305 {
5306 rtx arg = XEXP (x, i);
5307 rtx folded_arg = arg, const_arg = 0;
5308 enum machine_mode mode_arg = GET_MODE (arg);
5309 rtx cheap_arg, expensive_arg;
5310 rtx replacements[2];
5311 int j;
5312
5313 /* Most arguments are cheap, so handle them specially. */
5314 switch (GET_CODE (arg))
5315 {
5316 case REG:
5317 /* This is the same as calling equiv_constant; it is duplicated
5318 here for speed. */
5319 if (REGNO_QTY_VALID_P (REGNO (arg))
5320 && qty_const[reg_qty[REGNO (arg)]] != 0
5321 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5322 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5323 const_arg
5324 = gen_lowpart_if_possible (GET_MODE (arg),
5325 qty_const[reg_qty[REGNO (arg)]]);
5326 break;
5327
5328 case CONST:
5329 case CONST_INT:
5330 case SYMBOL_REF:
5331 case LABEL_REF:
5332 case CONST_DOUBLE:
5333 const_arg = arg;
5334 break;
5335
5336 #ifdef HAVE_cc0
5337 case CC0:
5338 folded_arg = prev_insn_cc0;
5339 mode_arg = prev_insn_cc0_mode;
5340 const_arg = equiv_constant (folded_arg);
5341 break;
5342 #endif
5343
5344 default:
5345 folded_arg = fold_rtx (arg, insn);
5346 const_arg = equiv_constant (folded_arg);
5347 }
5348
5349 /* For the first three operands, see if the operand
5350 is constant or equivalent to a constant. */
5351 switch (i)
5352 {
5353 case 0:
5354 folded_arg0 = folded_arg;
5355 const_arg0 = const_arg;
5356 mode_arg0 = mode_arg;
5357 break;
5358 case 1:
5359 folded_arg1 = folded_arg;
5360 const_arg1 = const_arg;
5361 break;
5362 case 2:
5363 const_arg2 = const_arg;
5364 break;
5365 }
5366
5367 /* Pick the least expensive of the folded argument and an
5368 equivalent constant argument. */
5369 if (const_arg == 0 || const_arg == folded_arg
5370 || COST (const_arg) > COST (folded_arg))
5371 cheap_arg = folded_arg, expensive_arg = const_arg;
5372 else
5373 cheap_arg = const_arg, expensive_arg = folded_arg;
5374
5375 /* Try to replace the operand with the cheapest of the two
5376 possibilities. If it doesn't work and this is either of the first
5377 two operands of a commutative operation, try swapping them.
5378 If THAT fails, try the more expensive, provided it is cheaper
5379 than what is already there. */
5380
5381 if (cheap_arg == XEXP (x, i))
5382 continue;
5383
5384 if (insn == 0 && ! copied)
5385 {
5386 x = copy_rtx (x);
5387 copied = 1;
5388 }
5389
5390 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5391 for (j = 0;
5392 j < 2 && replacements[j]
5393 && COST (replacements[j]) < COST (XEXP (x, i));
5394 j++)
5395 {
5396 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5397 break;
5398
5399 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5400 {
5401 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5402 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5403
5404 if (apply_change_group ())
5405 {
5406 /* Swap them back to be invalid so that this loop can
5407 continue and flag them to be swapped back later. */
5408 rtx tem;
5409
5410 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5411 XEXP (x, 1) = tem;
5412 must_swap = 1;
5413 break;
5414 }
5415 }
5416 }
5417 }
5418
5419 else
5420 {
5421 if (fmt[i] == 'E')
5422 /* Don't try to fold inside of a vector of expressions.
5423 Doing nothing is harmless. */
5424 {;}
5425 }
5426
5427 /* If a commutative operation, place a constant integer as the second
5428 operand unless the first operand is also a constant integer. Otherwise,
5429 place any constant second unless the first operand is also a constant. */
5430
5431 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5432 {
5433 if (must_swap || (const_arg0
5434 && (const_arg1 == 0
5435 || (GET_CODE (const_arg0) == CONST_INT
5436 && GET_CODE (const_arg1) != CONST_INT))))
5437 {
5438 register rtx tem = XEXP (x, 0);
5439
5440 if (insn == 0 && ! copied)
5441 {
5442 x = copy_rtx (x);
5443 copied = 1;
5444 }
5445
5446 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5447 validate_change (insn, &XEXP (x, 1), tem, 1);
5448 if (apply_change_group ())
5449 {
5450 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5451 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5452 }
5453 }
5454 }
5455
5456 /* If X is an arithmetic operation, see if we can simplify it. */
5457
5458 switch (GET_RTX_CLASS (code))
5459 {
5460 case '1':
5461 {
5462 int is_const = 0;
5463
5464 /* We can't simplify extension ops unless we know the
5465 original mode. */
5466 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5467 && mode_arg0 == VOIDmode)
5468 break;
5469
5470 /* If we had a CONST, strip it off and put it back later if we
5471 fold. */
5472 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5473 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5474
5475 new = simplify_unary_operation (code, mode,
5476 const_arg0 ? const_arg0 : folded_arg0,
5477 mode_arg0);
5478 if (new != 0 && is_const)
5479 new = gen_rtx_CONST (mode, new);
5480 }
5481 break;
5482
5483 case '<':
5484 /* See what items are actually being compared and set FOLDED_ARG[01]
5485 to those values and CODE to the actual comparison code. If any are
5486 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5487 do anything if both operands are already known to be constant. */
5488
5489 if (const_arg0 == 0 || const_arg1 == 0)
5490 {
5491 struct table_elt *p0, *p1;
5492 rtx true = const_true_rtx, false = const0_rtx;
5493 enum machine_mode mode_arg1;
5494
5495 #ifdef FLOAT_STORE_FLAG_VALUE
5496 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5497 {
5498 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5499 mode);
5500 false = CONST0_RTX (mode);
5501 }
5502 #endif
5503
5504 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5505 &mode_arg0, &mode_arg1);
5506 const_arg0 = equiv_constant (folded_arg0);
5507 const_arg1 = equiv_constant (folded_arg1);
5508
5509 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5510 what kinds of things are being compared, so we can't do
5511 anything with this comparison. */
5512
5513 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5514 break;
5515
5516 /* If we do not now have two constants being compared, see
5517 if we can nevertheless deduce some things about the
5518 comparison. */
5519 if (const_arg0 == 0 || const_arg1 == 0)
5520 {
5521 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5522 non-explicit constant? These aren't zero, but we
5523 don't know their sign. */
5524 if (const_arg1 == const0_rtx
5525 && (NONZERO_BASE_PLUS_P (folded_arg0)
5526 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5527 come out as 0. */
5528 || GET_CODE (folded_arg0) == SYMBOL_REF
5529 #endif
5530 || GET_CODE (folded_arg0) == LABEL_REF
5531 || GET_CODE (folded_arg0) == CONST))
5532 {
5533 if (code == EQ)
5534 return false;
5535 else if (code == NE)
5536 return true;
5537 }
5538
5539 /* See if the two operands are the same. We don't do this
5540 for IEEE floating-point since we can't assume x == x
5541 since x might be a NaN. */
5542
5543 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5544 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5545 && (folded_arg0 == folded_arg1
5546 || (GET_CODE (folded_arg0) == REG
5547 && GET_CODE (folded_arg1) == REG
5548 && (reg_qty[REGNO (folded_arg0)]
5549 == reg_qty[REGNO (folded_arg1)]))
5550 || ((p0 = lookup (folded_arg0,
5551 (safe_hash (folded_arg0, mode_arg0)
5552 % NBUCKETS), mode_arg0))
5553 && (p1 = lookup (folded_arg1,
5554 (safe_hash (folded_arg1, mode_arg0)
5555 % NBUCKETS), mode_arg0))
5556 && p0->first_same_value == p1->first_same_value)))
5557 return ((code == EQ || code == LE || code == GE
5558 || code == LEU || code == GEU)
5559 ? true : false);
5560
5561 /* If FOLDED_ARG0 is a register, see if the comparison we are
5562 doing now is either the same as we did before or the reverse
5563 (we only check the reverse if not floating-point). */
5564 else if (GET_CODE (folded_arg0) == REG)
5565 {
5566 int qty = reg_qty[REGNO (folded_arg0)];
5567
5568 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5569 && (comparison_dominates_p (qty_comparison_code[qty], code)
5570 || (comparison_dominates_p (qty_comparison_code[qty],
5571 reverse_condition (code))
5572 && ! FLOAT_MODE_P (mode_arg0)))
5573 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5574 || (const_arg1
5575 && rtx_equal_p (qty_comparison_const[qty],
5576 const_arg1))
5577 || (GET_CODE (folded_arg1) == REG
5578 && (reg_qty[REGNO (folded_arg1)]
5579 == qty_comparison_qty[qty]))))
5580 return (comparison_dominates_p (qty_comparison_code[qty],
5581 code)
5582 ? true : false);
5583 }
5584 }
5585 }
5586
5587 /* If we are comparing against zero, see if the first operand is
5588 equivalent to an IOR with a constant. If so, we may be able to
5589 determine the result of this comparison. */
5590
5591 if (const_arg1 == const0_rtx)
5592 {
5593 rtx y = lookup_as_function (folded_arg0, IOR);
5594 rtx inner_const;
5595
5596 if (y != 0
5597 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5598 && GET_CODE (inner_const) == CONST_INT
5599 && INTVAL (inner_const) != 0)
5600 {
5601 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5602 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5603 && (INTVAL (inner_const)
5604 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5605 rtx true = const_true_rtx, false = const0_rtx;
5606
5607 #ifdef FLOAT_STORE_FLAG_VALUE
5608 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5609 {
5610 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5611 mode);
5612 false = CONST0_RTX (mode);
5613 }
5614 #endif
5615
5616 switch (code)
5617 {
5618 case EQ:
5619 return false;
5620 case NE:
5621 return true;
5622 case LT: case LE:
5623 if (has_sign)
5624 return true;
5625 break;
5626 case GT: case GE:
5627 if (has_sign)
5628 return false;
5629 break;
5630 default:
5631 break;
5632 }
5633 }
5634 }
5635
5636 new = simplify_relational_operation (code, mode_arg0,
5637 const_arg0 ? const_arg0 : folded_arg0,
5638 const_arg1 ? const_arg1 : folded_arg1);
5639 #ifdef FLOAT_STORE_FLAG_VALUE
5640 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5641 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5642 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5643 #endif
5644 break;
5645
5646 case '2':
5647 case 'c':
5648 switch (code)
5649 {
5650 case PLUS:
5651 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5652 with that LABEL_REF as its second operand. If so, the result is
5653 the first operand of that MINUS. This handles switches with an
5654 ADDR_DIFF_VEC table. */
5655 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5656 {
5657 rtx y
5658 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5659 : lookup_as_function (folded_arg0, MINUS);
5660
5661 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5662 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5663 return XEXP (y, 0);
5664
5665 /* Now try for a CONST of a MINUS like the above. */
5666 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5667 : lookup_as_function (folded_arg0, CONST))) != 0
5668 && GET_CODE (XEXP (y, 0)) == MINUS
5669 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5670 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5671 return XEXP (XEXP (y, 0), 0);
5672 }
5673
5674 /* Likewise if the operands are in the other order. */
5675 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5676 {
5677 rtx y
5678 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5679 : lookup_as_function (folded_arg1, MINUS);
5680
5681 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5682 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5683 return XEXP (y, 0);
5684
5685 /* Now try for a CONST of a MINUS like the above. */
5686 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5687 : lookup_as_function (folded_arg1, CONST))) != 0
5688 && GET_CODE (XEXP (y, 0)) == MINUS
5689 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5690 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5691 return XEXP (XEXP (y, 0), 0);
5692 }
5693
5694 /* If second operand is a register equivalent to a negative
5695 CONST_INT, see if we can find a register equivalent to the
5696 positive constant. Make a MINUS if so. Don't do this for
5697 a non-negative constant since we might then alternate between
5698 chosing positive and negative constants. Having the positive
5699 constant previously-used is the more common case. Be sure
5700 the resulting constant is non-negative; if const_arg1 were
5701 the smallest negative number this would overflow: depending
5702 on the mode, this would either just be the same value (and
5703 hence not save anything) or be incorrect. */
5704 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5705 && INTVAL (const_arg1) < 0
5706 && - INTVAL (const_arg1) >= 0
5707 && GET_CODE (folded_arg1) == REG)
5708 {
5709 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5710 struct table_elt *p
5711 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5712 mode);
5713
5714 if (p)
5715 for (p = p->first_same_value; p; p = p->next_same_value)
5716 if (GET_CODE (p->exp) == REG)
5717 return cse_gen_binary (MINUS, mode, folded_arg0,
5718 canon_reg (p->exp, NULL_RTX));
5719 }
5720 goto from_plus;
5721
5722 case MINUS:
5723 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5724 If so, produce (PLUS Z C2-C). */
5725 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5726 {
5727 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5728 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5729 return fold_rtx (plus_constant (copy_rtx (y),
5730 -INTVAL (const_arg1)),
5731 NULL_RTX);
5732 }
5733
5734 /* ... fall through ... */
5735
5736 from_plus:
5737 case SMIN: case SMAX: case UMIN: case UMAX:
5738 case IOR: case AND: case XOR:
5739 case MULT: case DIV: case UDIV:
5740 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5741 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5742 is known to be of similar form, we may be able to replace the
5743 operation with a combined operation. This may eliminate the
5744 intermediate operation if every use is simplified in this way.
5745 Note that the similar optimization done by combine.c only works
5746 if the intermediate operation's result has only one reference. */
5747
5748 if (GET_CODE (folded_arg0) == REG
5749 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5750 {
5751 int is_shift
5752 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5753 rtx y = lookup_as_function (folded_arg0, code);
5754 rtx inner_const;
5755 enum rtx_code associate_code;
5756 rtx new_const;
5757
5758 if (y == 0
5759 || 0 == (inner_const
5760 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5761 || GET_CODE (inner_const) != CONST_INT
5762 /* If we have compiled a statement like
5763 "if (x == (x & mask1))", and now are looking at
5764 "x & mask2", we will have a case where the first operand
5765 of Y is the same as our first operand. Unless we detect
5766 this case, an infinite loop will result. */
5767 || XEXP (y, 0) == folded_arg0)
5768 break;
5769
5770 /* Don't associate these operations if they are a PLUS with the
5771 same constant and it is a power of two. These might be doable
5772 with a pre- or post-increment. Similarly for two subtracts of
5773 identical powers of two with post decrement. */
5774
5775 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5776 && ((HAVE_PRE_INCREMENT
5777 && exact_log2 (INTVAL (const_arg1)) >= 0)
5778 || (HAVE_POST_INCREMENT
5779 && exact_log2 (INTVAL (const_arg1)) >= 0)
5780 || (HAVE_PRE_DECREMENT
5781 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5782 || (HAVE_POST_DECREMENT
5783 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5784 break;
5785
5786 /* Compute the code used to compose the constants. For example,
5787 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5788
5789 associate_code
5790 = (code == MULT || code == DIV || code == UDIV ? MULT
5791 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5792
5793 new_const = simplify_binary_operation (associate_code, mode,
5794 const_arg1, inner_const);
5795
5796 if (new_const == 0)
5797 break;
5798
5799 /* If we are associating shift operations, don't let this
5800 produce a shift of the size of the object or larger.
5801 This could occur when we follow a sign-extend by a right
5802 shift on a machine that does a sign-extend as a pair
5803 of shifts. */
5804
5805 if (is_shift && GET_CODE (new_const) == CONST_INT
5806 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5807 {
5808 /* As an exception, we can turn an ASHIFTRT of this
5809 form into a shift of the number of bits - 1. */
5810 if (code == ASHIFTRT)
5811 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5812 else
5813 break;
5814 }
5815
5816 y = copy_rtx (XEXP (y, 0));
5817
5818 /* If Y contains our first operand (the most common way this
5819 can happen is if Y is a MEM), we would do into an infinite
5820 loop if we tried to fold it. So don't in that case. */
5821
5822 if (! reg_mentioned_p (folded_arg0, y))
5823 y = fold_rtx (y, insn);
5824
5825 return cse_gen_binary (code, mode, y, new_const);
5826 }
5827 break;
5828
5829 default:
5830 break;
5831 }
5832
5833 new = simplify_binary_operation (code, mode,
5834 const_arg0 ? const_arg0 : folded_arg0,
5835 const_arg1 ? const_arg1 : folded_arg1);
5836 break;
5837
5838 case 'o':
5839 /* (lo_sum (high X) X) is simply X. */
5840 if (code == LO_SUM && const_arg0 != 0
5841 && GET_CODE (const_arg0) == HIGH
5842 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5843 return const_arg1;
5844 break;
5845
5846 case '3':
5847 case 'b':
5848 new = simplify_ternary_operation (code, mode, mode_arg0,
5849 const_arg0 ? const_arg0 : folded_arg0,
5850 const_arg1 ? const_arg1 : folded_arg1,
5851 const_arg2 ? const_arg2 : XEXP (x, 2));
5852 break;
5853
5854 case 'x':
5855 /* Always eliminate CONSTANT_P_RTX at this stage. */
5856 if (code == CONSTANT_P_RTX)
5857 return (const_arg0 ? const1_rtx : const0_rtx);
5858 break;
5859 }
5860
5861 return new ? new : x;
5862 }
5863 \f
5864 /* Return a constant value currently equivalent to X.
5865 Return 0 if we don't know one. */
5866
5867 static rtx
5868 equiv_constant (x)
5869 rtx x;
5870 {
5871 if (GET_CODE (x) == REG
5872 && REGNO_QTY_VALID_P (REGNO (x))
5873 && qty_const[reg_qty[REGNO (x)]])
5874 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5875
5876 if (x == 0 || CONSTANT_P (x))
5877 return x;
5878
5879 /* If X is a MEM, try to fold it outside the context of any insn to see if
5880 it might be equivalent to a constant. That handles the case where it
5881 is a constant-pool reference. Then try to look it up in the hash table
5882 in case it is something whose value we have seen before. */
5883
5884 if (GET_CODE (x) == MEM)
5885 {
5886 struct table_elt *elt;
5887
5888 x = fold_rtx (x, NULL_RTX);
5889 if (CONSTANT_P (x))
5890 return x;
5891
5892 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5893 if (elt == 0)
5894 return 0;
5895
5896 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5897 if (elt->is_const && CONSTANT_P (elt->exp))
5898 return elt->exp;
5899 }
5900
5901 return 0;
5902 }
5903 \f
5904 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5905 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5906 least-significant part of X.
5907 MODE specifies how big a part of X to return.
5908
5909 If the requested operation cannot be done, 0 is returned.
5910
5911 This is similar to gen_lowpart in emit-rtl.c. */
5912
5913 rtx
5914 gen_lowpart_if_possible (mode, x)
5915 enum machine_mode mode;
5916 register rtx x;
5917 {
5918 rtx result = gen_lowpart_common (mode, x);
5919
5920 if (result)
5921 return result;
5922 else if (GET_CODE (x) == MEM)
5923 {
5924 /* This is the only other case we handle. */
5925 register int offset = 0;
5926 rtx new;
5927
5928 if (WORDS_BIG_ENDIAN)
5929 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5930 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5931 if (BYTES_BIG_ENDIAN)
5932 /* Adjust the address so that the address-after-the-data is
5933 unchanged. */
5934 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5935 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5936 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
5937 if (! memory_address_p (mode, XEXP (new, 0)))
5938 return 0;
5939 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5940 MEM_COPY_ATTRIBUTES (new, x);
5941 return new;
5942 }
5943 else
5944 return 0;
5945 }
5946 \f
5947 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5948 branch. It will be zero if not.
5949
5950 In certain cases, this can cause us to add an equivalence. For example,
5951 if we are following the taken case of
5952 if (i == 2)
5953 we can add the fact that `i' and '2' are now equivalent.
5954
5955 In any case, we can record that this comparison was passed. If the same
5956 comparison is seen later, we will know its value. */
5957
5958 static void
5959 record_jump_equiv (insn, taken)
5960 rtx insn;
5961 int taken;
5962 {
5963 int cond_known_true;
5964 rtx op0, op1;
5965 enum machine_mode mode, mode0, mode1;
5966 int reversed_nonequality = 0;
5967 enum rtx_code code;
5968
5969 /* Ensure this is the right kind of insn. */
5970 if (! condjump_p (insn) || simplejump_p (insn))
5971 return;
5972
5973 /* See if this jump condition is known true or false. */
5974 if (taken)
5975 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5976 else
5977 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5978
5979 /* Get the type of comparison being done and the operands being compared.
5980 If we had to reverse a non-equality condition, record that fact so we
5981 know that it isn't valid for floating-point. */
5982 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5983 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5984 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5985
5986 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5987 if (! cond_known_true)
5988 {
5989 reversed_nonequality = (code != EQ && code != NE);
5990 code = reverse_condition (code);
5991 }
5992
5993 /* The mode is the mode of the non-constant. */
5994 mode = mode0;
5995 if (mode1 != VOIDmode)
5996 mode = mode1;
5997
5998 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5999 }
6000
6001 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6002 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6003 Make any useful entries we can with that information. Called from
6004 above function and called recursively. */
6005
6006 static void
6007 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6008 enum rtx_code code;
6009 enum machine_mode mode;
6010 rtx op0, op1;
6011 int reversed_nonequality;
6012 {
6013 unsigned op0_hash, op1_hash;
6014 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6015 struct table_elt *op0_elt, *op1_elt;
6016
6017 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6018 we know that they are also equal in the smaller mode (this is also
6019 true for all smaller modes whether or not there is a SUBREG, but
6020 is not worth testing for with no SUBREG. */
6021
6022 /* Note that GET_MODE (op0) may not equal MODE. */
6023 if (code == EQ && GET_CODE (op0) == SUBREG
6024 && (GET_MODE_SIZE (GET_MODE (op0))
6025 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6026 {
6027 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6028 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6029
6030 record_jump_cond (code, mode, SUBREG_REG (op0),
6031 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6032 reversed_nonequality);
6033 }
6034
6035 if (code == EQ && GET_CODE (op1) == SUBREG
6036 && (GET_MODE_SIZE (GET_MODE (op1))
6037 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6038 {
6039 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6040 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6041
6042 record_jump_cond (code, mode, SUBREG_REG (op1),
6043 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6044 reversed_nonequality);
6045 }
6046
6047 /* Similarly, if this is an NE comparison, and either is a SUBREG
6048 making a smaller mode, we know the whole thing is also NE. */
6049
6050 /* Note that GET_MODE (op0) may not equal MODE;
6051 if we test MODE instead, we can get an infinite recursion
6052 alternating between two modes each wider than MODE. */
6053
6054 if (code == NE && GET_CODE (op0) == SUBREG
6055 && subreg_lowpart_p (op0)
6056 && (GET_MODE_SIZE (GET_MODE (op0))
6057 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6058 {
6059 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6060 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6061
6062 record_jump_cond (code, mode, SUBREG_REG (op0),
6063 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6064 reversed_nonequality);
6065 }
6066
6067 if (code == NE && GET_CODE (op1) == SUBREG
6068 && subreg_lowpart_p (op1)
6069 && (GET_MODE_SIZE (GET_MODE (op1))
6070 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6071 {
6072 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6073 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6074
6075 record_jump_cond (code, mode, SUBREG_REG (op1),
6076 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6077 reversed_nonequality);
6078 }
6079
6080 /* Hash both operands. */
6081
6082 do_not_record = 0;
6083 hash_arg_in_memory = 0;
6084 hash_arg_in_struct = 0;
6085 op0_hash = HASH (op0, mode);
6086 op0_in_memory = hash_arg_in_memory;
6087 op0_in_struct = hash_arg_in_struct;
6088
6089 if (do_not_record)
6090 return;
6091
6092 do_not_record = 0;
6093 hash_arg_in_memory = 0;
6094 hash_arg_in_struct = 0;
6095 op1_hash = HASH (op1, mode);
6096 op1_in_memory = hash_arg_in_memory;
6097 op1_in_struct = hash_arg_in_struct;
6098
6099 if (do_not_record)
6100 return;
6101
6102 /* Look up both operands. */
6103 op0_elt = lookup (op0, op0_hash, mode);
6104 op1_elt = lookup (op1, op1_hash, mode);
6105
6106 /* If both operands are already equivalent or if they are not in the
6107 table but are identical, do nothing. */
6108 if ((op0_elt != 0 && op1_elt != 0
6109 && op0_elt->first_same_value == op1_elt->first_same_value)
6110 || op0 == op1 || rtx_equal_p (op0, op1))
6111 return;
6112
6113 /* If we aren't setting two things equal all we can do is save this
6114 comparison. Similarly if this is floating-point. In the latter
6115 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6116 If we record the equality, we might inadvertently delete code
6117 whose intent was to change -0 to +0. */
6118
6119 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6120 {
6121 /* If we reversed a floating-point comparison, if OP0 is not a
6122 register, or if OP1 is neither a register or constant, we can't
6123 do anything. */
6124
6125 if (GET_CODE (op1) != REG)
6126 op1 = equiv_constant (op1);
6127
6128 if ((reversed_nonequality && FLOAT_MODE_P (mode))
6129 || GET_CODE (op0) != REG || op1 == 0)
6130 return;
6131
6132 /* Put OP0 in the hash table if it isn't already. This gives it a
6133 new quantity number. */
6134 if (op0_elt == 0)
6135 {
6136 if (insert_regs (op0, NULL_PTR, 0))
6137 {
6138 rehash_using_reg (op0);
6139 op0_hash = HASH (op0, mode);
6140
6141 /* If OP0 is contained in OP1, this changes its hash code
6142 as well. Faster to rehash than to check, except
6143 for the simple case of a constant. */
6144 if (! CONSTANT_P (op1))
6145 op1_hash = HASH (op1,mode);
6146 }
6147
6148 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6149 op0_elt->in_memory = op0_in_memory;
6150 op0_elt->in_struct = op0_in_struct;
6151 }
6152
6153 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
6154 if (GET_CODE (op1) == REG)
6155 {
6156 /* Look it up again--in case op0 and op1 are the same. */
6157 op1_elt = lookup (op1, op1_hash, mode);
6158
6159 /* Put OP1 in the hash table so it gets a new quantity number. */
6160 if (op1_elt == 0)
6161 {
6162 if (insert_regs (op1, NULL_PTR, 0))
6163 {
6164 rehash_using_reg (op1);
6165 op1_hash = HASH (op1, mode);
6166 }
6167
6168 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6169 op1_elt->in_memory = op1_in_memory;
6170 op1_elt->in_struct = op1_in_struct;
6171 }
6172
6173 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
6174 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
6175 }
6176 else
6177 {
6178 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
6179 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
6180 }
6181
6182 return;
6183 }
6184
6185 /* If either side is still missing an equivalence, make it now,
6186 then merge the equivalences. */
6187
6188 if (op0_elt == 0)
6189 {
6190 if (insert_regs (op0, NULL_PTR, 0))
6191 {
6192 rehash_using_reg (op0);
6193 op0_hash = HASH (op0, mode);
6194 }
6195
6196 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6197 op0_elt->in_memory = op0_in_memory;
6198 op0_elt->in_struct = op0_in_struct;
6199 }
6200
6201 if (op1_elt == 0)
6202 {
6203 if (insert_regs (op1, NULL_PTR, 0))
6204 {
6205 rehash_using_reg (op1);
6206 op1_hash = HASH (op1, mode);
6207 }
6208
6209 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6210 op1_elt->in_memory = op1_in_memory;
6211 op1_elt->in_struct = op1_in_struct;
6212 }
6213
6214 merge_equiv_classes (op0_elt, op1_elt);
6215 last_jump_equiv_class = op0_elt;
6216 }
6217 \f
6218 /* CSE processing for one instruction.
6219 First simplify sources and addresses of all assignments
6220 in the instruction, using previously-computed equivalents values.
6221 Then install the new sources and destinations in the table
6222 of available values.
6223
6224 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6225 the insn. It means that INSN is inside libcall block. In this
6226 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6227
6228 /* Data on one SET contained in the instruction. */
6229
6230 struct set
6231 {
6232 /* The SET rtx itself. */
6233 rtx rtl;
6234 /* The SET_SRC of the rtx (the original value, if it is changing). */
6235 rtx src;
6236 /* The hash-table element for the SET_SRC of the SET. */
6237 struct table_elt *src_elt;
6238 /* Hash value for the SET_SRC. */
6239 unsigned src_hash;
6240 /* Hash value for the SET_DEST. */
6241 unsigned dest_hash;
6242 /* The SET_DEST, with SUBREG, etc., stripped. */
6243 rtx inner_dest;
6244 /* Place where the pointer to the INNER_DEST was found. */
6245 rtx *inner_dest_loc;
6246 /* Nonzero if the SET_SRC is in memory. */
6247 char src_in_memory;
6248 /* Nonzero if the SET_SRC is in a structure. */
6249 char src_in_struct;
6250 /* Nonzero if the SET_SRC contains something
6251 whose value cannot be predicted and understood. */
6252 char src_volatile;
6253 /* Original machine mode, in case it becomes a CONST_INT. */
6254 enum machine_mode mode;
6255 /* A constant equivalent for SET_SRC, if any. */
6256 rtx src_const;
6257 /* Hash value of constant equivalent for SET_SRC. */
6258 unsigned src_const_hash;
6259 /* Table entry for constant equivalent for SET_SRC, if any. */
6260 struct table_elt *src_const_elt;
6261 };
6262
6263 static void
6264 cse_insn (insn, libcall_insn)
6265 rtx insn;
6266 rtx libcall_insn;
6267 {
6268 register rtx x = PATTERN (insn);
6269 register int i;
6270 rtx tem;
6271 register int n_sets = 0;
6272
6273 #ifdef HAVE_cc0
6274 /* Records what this insn does to set CC0. */
6275 rtx this_insn_cc0 = 0;
6276 enum machine_mode this_insn_cc0_mode = VOIDmode;
6277 #endif
6278
6279 rtx src_eqv = 0;
6280 struct table_elt *src_eqv_elt = 0;
6281 int src_eqv_volatile;
6282 int src_eqv_in_memory;
6283 int src_eqv_in_struct;
6284 unsigned src_eqv_hash;
6285
6286 struct set *sets;
6287
6288 this_insn = insn;
6289
6290 /* Find all the SETs and CLOBBERs in this instruction.
6291 Record all the SETs in the array `set' and count them.
6292 Also determine whether there is a CLOBBER that invalidates
6293 all memory references, or all references at varying addresses. */
6294
6295 if (GET_CODE (insn) == CALL_INSN)
6296 {
6297 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6298 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6299 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6300 }
6301
6302 if (GET_CODE (x) == SET)
6303 {
6304 sets = (struct set *) alloca (sizeof (struct set));
6305 sets[0].rtl = x;
6306
6307 /* Ignore SETs that are unconditional jumps.
6308 They never need cse processing, so this does not hurt.
6309 The reason is not efficiency but rather
6310 so that we can test at the end for instructions
6311 that have been simplified to unconditional jumps
6312 and not be misled by unchanged instructions
6313 that were unconditional jumps to begin with. */
6314 if (SET_DEST (x) == pc_rtx
6315 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6316 ;
6317
6318 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6319 The hard function value register is used only once, to copy to
6320 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6321 Ensure we invalidate the destination register. On the 80386 no
6322 other code would invalidate it since it is a fixed_reg.
6323 We need not check the return of apply_change_group; see canon_reg. */
6324
6325 else if (GET_CODE (SET_SRC (x)) == CALL)
6326 {
6327 canon_reg (SET_SRC (x), insn);
6328 apply_change_group ();
6329 fold_rtx (SET_SRC (x), insn);
6330 invalidate (SET_DEST (x), VOIDmode);
6331 }
6332 else
6333 n_sets = 1;
6334 }
6335 else if (GET_CODE (x) == PARALLEL)
6336 {
6337 register int lim = XVECLEN (x, 0);
6338
6339 sets = (struct set *) alloca (lim * sizeof (struct set));
6340
6341 /* Find all regs explicitly clobbered in this insn,
6342 and ensure they are not replaced with any other regs
6343 elsewhere in this insn.
6344 When a reg that is clobbered is also used for input,
6345 we should presume that that is for a reason,
6346 and we should not substitute some other register
6347 which is not supposed to be clobbered.
6348 Therefore, this loop cannot be merged into the one below
6349 because a CALL may precede a CLOBBER and refer to the
6350 value clobbered. We must not let a canonicalization do
6351 anything in that case. */
6352 for (i = 0; i < lim; i++)
6353 {
6354 register rtx y = XVECEXP (x, 0, i);
6355 if (GET_CODE (y) == CLOBBER)
6356 {
6357 rtx clobbered = XEXP (y, 0);
6358
6359 if (GET_CODE (clobbered) == REG
6360 || GET_CODE (clobbered) == SUBREG)
6361 invalidate (clobbered, VOIDmode);
6362 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6363 || GET_CODE (clobbered) == ZERO_EXTRACT)
6364 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6365 }
6366 }
6367
6368 for (i = 0; i < lim; i++)
6369 {
6370 register rtx y = XVECEXP (x, 0, i);
6371 if (GET_CODE (y) == SET)
6372 {
6373 /* As above, we ignore unconditional jumps and call-insns and
6374 ignore the result of apply_change_group. */
6375 if (GET_CODE (SET_SRC (y)) == CALL)
6376 {
6377 canon_reg (SET_SRC (y), insn);
6378 apply_change_group ();
6379 fold_rtx (SET_SRC (y), insn);
6380 invalidate (SET_DEST (y), VOIDmode);
6381 }
6382 else if (SET_DEST (y) == pc_rtx
6383 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6384 ;
6385 else
6386 sets[n_sets++].rtl = y;
6387 }
6388 else if (GET_CODE (y) == CLOBBER)
6389 {
6390 /* If we clobber memory, canon the address.
6391 This does nothing when a register is clobbered
6392 because we have already invalidated the reg. */
6393 if (GET_CODE (XEXP (y, 0)) == MEM)
6394 canon_reg (XEXP (y, 0), NULL_RTX);
6395 }
6396 else if (GET_CODE (y) == USE
6397 && ! (GET_CODE (XEXP (y, 0)) == REG
6398 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6399 canon_reg (y, NULL_RTX);
6400 else if (GET_CODE (y) == CALL)
6401 {
6402 /* The result of apply_change_group can be ignored; see
6403 canon_reg. */
6404 canon_reg (y, insn);
6405 apply_change_group ();
6406 fold_rtx (y, insn);
6407 }
6408 }
6409 }
6410 else if (GET_CODE (x) == CLOBBER)
6411 {
6412 if (GET_CODE (XEXP (x, 0)) == MEM)
6413 canon_reg (XEXP (x, 0), NULL_RTX);
6414 }
6415
6416 /* Canonicalize a USE of a pseudo register or memory location. */
6417 else if (GET_CODE (x) == USE
6418 && ! (GET_CODE (XEXP (x, 0)) == REG
6419 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6420 canon_reg (XEXP (x, 0), NULL_RTX);
6421 else if (GET_CODE (x) == CALL)
6422 {
6423 /* The result of apply_change_group can be ignored; see canon_reg. */
6424 canon_reg (x, insn);
6425 apply_change_group ();
6426 fold_rtx (x, insn);
6427 }
6428
6429 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6430 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6431 is handled specially for this case, and if it isn't set, then there will
6432 be no equivalence for the destination. */
6433 if (n_sets == 1 && REG_NOTES (insn) != 0
6434 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6435 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6436 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6437 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6438
6439 /* Canonicalize sources and addresses of destinations.
6440 We do this in a separate pass to avoid problems when a MATCH_DUP is
6441 present in the insn pattern. In that case, we want to ensure that
6442 we don't break the duplicate nature of the pattern. So we will replace
6443 both operands at the same time. Otherwise, we would fail to find an
6444 equivalent substitution in the loop calling validate_change below.
6445
6446 We used to suppress canonicalization of DEST if it appears in SRC,
6447 but we don't do this any more. */
6448
6449 for (i = 0; i < n_sets; i++)
6450 {
6451 rtx dest = SET_DEST (sets[i].rtl);
6452 rtx src = SET_SRC (sets[i].rtl);
6453 rtx new = canon_reg (src, insn);
6454 int insn_code;
6455
6456 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6457 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6458 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6459 || (insn_code = recog_memoized (insn)) < 0
6460 || insn_n_dups[insn_code] > 0)
6461 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6462 else
6463 SET_SRC (sets[i].rtl) = new;
6464
6465 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6466 {
6467 validate_change (insn, &XEXP (dest, 1),
6468 canon_reg (XEXP (dest, 1), insn), 1);
6469 validate_change (insn, &XEXP (dest, 2),
6470 canon_reg (XEXP (dest, 2), insn), 1);
6471 }
6472
6473 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6474 || GET_CODE (dest) == ZERO_EXTRACT
6475 || GET_CODE (dest) == SIGN_EXTRACT)
6476 dest = XEXP (dest, 0);
6477
6478 if (GET_CODE (dest) == MEM)
6479 canon_reg (dest, insn);
6480 }
6481
6482 /* Now that we have done all the replacements, we can apply the change
6483 group and see if they all work. Note that this will cause some
6484 canonicalizations that would have worked individually not to be applied
6485 because some other canonicalization didn't work, but this should not
6486 occur often.
6487
6488 The result of apply_change_group can be ignored; see canon_reg. */
6489
6490 apply_change_group ();
6491
6492 /* Set sets[i].src_elt to the class each source belongs to.
6493 Detect assignments from or to volatile things
6494 and set set[i] to zero so they will be ignored
6495 in the rest of this function.
6496
6497 Nothing in this loop changes the hash table or the register chains. */
6498
6499 for (i = 0; i < n_sets; i++)
6500 {
6501 register rtx src, dest;
6502 register rtx src_folded;
6503 register struct table_elt *elt = 0, *p;
6504 enum machine_mode mode;
6505 rtx src_eqv_here;
6506 rtx src_const = 0;
6507 rtx src_related = 0;
6508 struct table_elt *src_const_elt = 0;
6509 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6510 int src_related_cost = 10000, src_elt_cost = 10000;
6511 /* Set non-zero if we need to call force_const_mem on with the
6512 contents of src_folded before using it. */
6513 int src_folded_force_flag = 0;
6514
6515 dest = SET_DEST (sets[i].rtl);
6516 src = SET_SRC (sets[i].rtl);
6517
6518 /* If SRC is a constant that has no machine mode,
6519 hash it with the destination's machine mode.
6520 This way we can keep different modes separate. */
6521
6522 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6523 sets[i].mode = mode;
6524
6525 if (src_eqv)
6526 {
6527 enum machine_mode eqvmode = mode;
6528 if (GET_CODE (dest) == STRICT_LOW_PART)
6529 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6530 do_not_record = 0;
6531 hash_arg_in_memory = 0;
6532 hash_arg_in_struct = 0;
6533 src_eqv = fold_rtx (src_eqv, insn);
6534 src_eqv_hash = HASH (src_eqv, eqvmode);
6535
6536 /* Find the equivalence class for the equivalent expression. */
6537
6538 if (!do_not_record)
6539 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6540
6541 src_eqv_volatile = do_not_record;
6542 src_eqv_in_memory = hash_arg_in_memory;
6543 src_eqv_in_struct = hash_arg_in_struct;
6544 }
6545
6546 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6547 value of the INNER register, not the destination. So it is not
6548 a valid substitution for the source. But save it for later. */
6549 if (GET_CODE (dest) == STRICT_LOW_PART)
6550 src_eqv_here = 0;
6551 else
6552 src_eqv_here = src_eqv;
6553
6554 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6555 simplified result, which may not necessarily be valid. */
6556 src_folded = fold_rtx (src, insn);
6557
6558 #if 0
6559 /* ??? This caused bad code to be generated for the m68k port with -O2.
6560 Suppose src is (CONST_INT -1), and that after truncation src_folded
6561 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6562 At the end we will add src and src_const to the same equivalence
6563 class. We now have 3 and -1 on the same equivalence class. This
6564 causes later instructions to be mis-optimized. */
6565 /* If storing a constant in a bitfield, pre-truncate the constant
6566 so we will be able to record it later. */
6567 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6568 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6569 {
6570 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6571
6572 if (GET_CODE (src) == CONST_INT
6573 && GET_CODE (width) == CONST_INT
6574 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6575 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6576 src_folded
6577 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6578 << INTVAL (width)) - 1));
6579 }
6580 #endif
6581
6582 /* Compute SRC's hash code, and also notice if it
6583 should not be recorded at all. In that case,
6584 prevent any further processing of this assignment. */
6585 do_not_record = 0;
6586 hash_arg_in_memory = 0;
6587 hash_arg_in_struct = 0;
6588
6589 sets[i].src = src;
6590 sets[i].src_hash = HASH (src, mode);
6591 sets[i].src_volatile = do_not_record;
6592 sets[i].src_in_memory = hash_arg_in_memory;
6593 sets[i].src_in_struct = hash_arg_in_struct;
6594
6595 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6596 a pseudo that is set more than once, do not record SRC. Using
6597 SRC as a replacement for anything else will be incorrect in that
6598 situation. Note that this usually occurs only for stack slots,
6599 in which case all the RTL would be referring to SRC, so we don't
6600 lose any optimization opportunities by not having SRC in the
6601 hash table. */
6602
6603 if (GET_CODE (src) == MEM
6604 && find_reg_note (insn, REG_EQUIV, src) != 0
6605 && GET_CODE (dest) == REG
6606 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6607 && REG_N_SETS (REGNO (dest)) != 1)
6608 sets[i].src_volatile = 1;
6609
6610 #if 0
6611 /* It is no longer clear why we used to do this, but it doesn't
6612 appear to still be needed. So let's try without it since this
6613 code hurts cse'ing widened ops. */
6614 /* If source is a perverse subreg (such as QI treated as an SI),
6615 treat it as volatile. It may do the work of an SI in one context
6616 where the extra bits are not being used, but cannot replace an SI
6617 in general. */
6618 if (GET_CODE (src) == SUBREG
6619 && (GET_MODE_SIZE (GET_MODE (src))
6620 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6621 sets[i].src_volatile = 1;
6622 #endif
6623
6624 /* Locate all possible equivalent forms for SRC. Try to replace
6625 SRC in the insn with each cheaper equivalent.
6626
6627 We have the following types of equivalents: SRC itself, a folded
6628 version, a value given in a REG_EQUAL note, or a value related
6629 to a constant.
6630
6631 Each of these equivalents may be part of an additional class
6632 of equivalents (if more than one is in the table, they must be in
6633 the same class; we check for this).
6634
6635 If the source is volatile, we don't do any table lookups.
6636
6637 We note any constant equivalent for possible later use in a
6638 REG_NOTE. */
6639
6640 if (!sets[i].src_volatile)
6641 elt = lookup (src, sets[i].src_hash, mode);
6642
6643 sets[i].src_elt = elt;
6644
6645 if (elt && src_eqv_here && src_eqv_elt)
6646 {
6647 if (elt->first_same_value != src_eqv_elt->first_same_value)
6648 {
6649 /* The REG_EQUAL is indicating that two formerly distinct
6650 classes are now equivalent. So merge them. */
6651 merge_equiv_classes (elt, src_eqv_elt);
6652 src_eqv_hash = HASH (src_eqv, elt->mode);
6653 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6654 }
6655
6656 src_eqv_here = 0;
6657 }
6658
6659 else if (src_eqv_elt)
6660 elt = src_eqv_elt;
6661
6662 /* Try to find a constant somewhere and record it in `src_const'.
6663 Record its table element, if any, in `src_const_elt'. Look in
6664 any known equivalences first. (If the constant is not in the
6665 table, also set `sets[i].src_const_hash'). */
6666 if (elt)
6667 for (p = elt->first_same_value; p; p = p->next_same_value)
6668 if (p->is_const)
6669 {
6670 src_const = p->exp;
6671 src_const_elt = elt;
6672 break;
6673 }
6674
6675 if (src_const == 0
6676 && (CONSTANT_P (src_folded)
6677 /* Consider (minus (label_ref L1) (label_ref L2)) as
6678 "constant" here so we will record it. This allows us
6679 to fold switch statements when an ADDR_DIFF_VEC is used. */
6680 || (GET_CODE (src_folded) == MINUS
6681 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6682 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6683 src_const = src_folded, src_const_elt = elt;
6684 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6685 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6686
6687 /* If we don't know if the constant is in the table, get its
6688 hash code and look it up. */
6689 if (src_const && src_const_elt == 0)
6690 {
6691 sets[i].src_const_hash = HASH (src_const, mode);
6692 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6693 }
6694
6695 sets[i].src_const = src_const;
6696 sets[i].src_const_elt = src_const_elt;
6697
6698 /* If the constant and our source are both in the table, mark them as
6699 equivalent. Otherwise, if a constant is in the table but the source
6700 isn't, set ELT to it. */
6701 if (src_const_elt && elt
6702 && src_const_elt->first_same_value != elt->first_same_value)
6703 merge_equiv_classes (elt, src_const_elt);
6704 else if (src_const_elt && elt == 0)
6705 elt = src_const_elt;
6706
6707 /* See if there is a register linearly related to a constant
6708 equivalent of SRC. */
6709 if (src_const
6710 && (GET_CODE (src_const) == CONST
6711 || (src_const_elt && src_const_elt->related_value != 0)))
6712 {
6713 src_related = use_related_value (src_const, src_const_elt);
6714 if (src_related)
6715 {
6716 struct table_elt *src_related_elt
6717 = lookup (src_related, HASH (src_related, mode), mode);
6718 if (src_related_elt && elt)
6719 {
6720 if (elt->first_same_value
6721 != src_related_elt->first_same_value)
6722 /* This can occur when we previously saw a CONST
6723 involving a SYMBOL_REF and then see the SYMBOL_REF
6724 twice. Merge the involved classes. */
6725 merge_equiv_classes (elt, src_related_elt);
6726
6727 src_related = 0;
6728 src_related_elt = 0;
6729 }
6730 else if (src_related_elt && elt == 0)
6731 elt = src_related_elt;
6732 }
6733 }
6734
6735 /* See if we have a CONST_INT that is already in a register in a
6736 wider mode. */
6737
6738 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6739 && GET_MODE_CLASS (mode) == MODE_INT
6740 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6741 {
6742 enum machine_mode wider_mode;
6743
6744 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6745 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6746 && src_related == 0;
6747 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6748 {
6749 struct table_elt *const_elt
6750 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6751
6752 if (const_elt == 0)
6753 continue;
6754
6755 for (const_elt = const_elt->first_same_value;
6756 const_elt; const_elt = const_elt->next_same_value)
6757 if (GET_CODE (const_elt->exp) == REG)
6758 {
6759 src_related = gen_lowpart_if_possible (mode,
6760 const_elt->exp);
6761 break;
6762 }
6763 }
6764 }
6765
6766 /* Another possibility is that we have an AND with a constant in
6767 a mode narrower than a word. If so, it might have been generated
6768 as part of an "if" which would narrow the AND. If we already
6769 have done the AND in a wider mode, we can use a SUBREG of that
6770 value. */
6771
6772 if (flag_expensive_optimizations && ! src_related
6773 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6774 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6775 {
6776 enum machine_mode tmode;
6777 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6778
6779 for (tmode = GET_MODE_WIDER_MODE (mode);
6780 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6781 tmode = GET_MODE_WIDER_MODE (tmode))
6782 {
6783 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6784 struct table_elt *larger_elt;
6785
6786 if (inner)
6787 {
6788 PUT_MODE (new_and, tmode);
6789 XEXP (new_and, 0) = inner;
6790 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6791 if (larger_elt == 0)
6792 continue;
6793
6794 for (larger_elt = larger_elt->first_same_value;
6795 larger_elt; larger_elt = larger_elt->next_same_value)
6796 if (GET_CODE (larger_elt->exp) == REG)
6797 {
6798 src_related
6799 = gen_lowpart_if_possible (mode, larger_elt->exp);
6800 break;
6801 }
6802
6803 if (src_related)
6804 break;
6805 }
6806 }
6807 }
6808
6809 #ifdef LOAD_EXTEND_OP
6810 /* See if a MEM has already been loaded with a widening operation;
6811 if it has, we can use a subreg of that. Many CISC machines
6812 also have such operations, but this is only likely to be
6813 beneficial these machines. */
6814
6815 if (flag_expensive_optimizations && src_related == 0
6816 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6817 && GET_MODE_CLASS (mode) == MODE_INT
6818 && GET_CODE (src) == MEM && ! do_not_record
6819 && LOAD_EXTEND_OP (mode) != NIL)
6820 {
6821 enum machine_mode tmode;
6822
6823 /* Set what we are trying to extend and the operation it might
6824 have been extended with. */
6825 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6826 XEXP (memory_extend_rtx, 0) = src;
6827
6828 for (tmode = GET_MODE_WIDER_MODE (mode);
6829 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6830 tmode = GET_MODE_WIDER_MODE (tmode))
6831 {
6832 struct table_elt *larger_elt;
6833
6834 PUT_MODE (memory_extend_rtx, tmode);
6835 larger_elt = lookup (memory_extend_rtx,
6836 HASH (memory_extend_rtx, tmode), tmode);
6837 if (larger_elt == 0)
6838 continue;
6839
6840 for (larger_elt = larger_elt->first_same_value;
6841 larger_elt; larger_elt = larger_elt->next_same_value)
6842 if (GET_CODE (larger_elt->exp) == REG)
6843 {
6844 src_related = gen_lowpart_if_possible (mode,
6845 larger_elt->exp);
6846 break;
6847 }
6848
6849 if (src_related)
6850 break;
6851 }
6852 }
6853 #endif /* LOAD_EXTEND_OP */
6854
6855 if (src == src_folded)
6856 src_folded = 0;
6857
6858 /* At this point, ELT, if non-zero, points to a class of expressions
6859 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6860 and SRC_RELATED, if non-zero, each contain additional equivalent
6861 expressions. Prune these latter expressions by deleting expressions
6862 already in the equivalence class.
6863
6864 Check for an equivalent identical to the destination. If found,
6865 this is the preferred equivalent since it will likely lead to
6866 elimination of the insn. Indicate this by placing it in
6867 `src_related'. */
6868
6869 if (elt) elt = elt->first_same_value;
6870 for (p = elt; p; p = p->next_same_value)
6871 {
6872 enum rtx_code code = GET_CODE (p->exp);
6873
6874 /* If the expression is not valid, ignore it. Then we do not
6875 have to check for validity below. In most cases, we can use
6876 `rtx_equal_p', since canonicalization has already been done. */
6877 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6878 continue;
6879
6880 /* Also skip paradoxical subregs, unless that's what we're
6881 looking for. */
6882 if (code == SUBREG
6883 && (GET_MODE_SIZE (GET_MODE (p->exp))
6884 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6885 && ! (src != 0
6886 && GET_CODE (src) == SUBREG
6887 && GET_MODE (src) == GET_MODE (p->exp)
6888 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6889 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6890 continue;
6891
6892 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6893 src = 0;
6894 else if (src_folded && GET_CODE (src_folded) == code
6895 && rtx_equal_p (src_folded, p->exp))
6896 src_folded = 0;
6897 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6898 && rtx_equal_p (src_eqv_here, p->exp))
6899 src_eqv_here = 0;
6900 else if (src_related && GET_CODE (src_related) == code
6901 && rtx_equal_p (src_related, p->exp))
6902 src_related = 0;
6903
6904 /* This is the same as the destination of the insns, we want
6905 to prefer it. Copy it to src_related. The code below will
6906 then give it a negative cost. */
6907 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6908 src_related = dest;
6909
6910 }
6911
6912 /* Find the cheapest valid equivalent, trying all the available
6913 possibilities. Prefer items not in the hash table to ones
6914 that are when they are equal cost. Note that we can never
6915 worsen an insn as the current contents will also succeed.
6916 If we find an equivalent identical to the destination, use it as best,
6917 since this insn will probably be eliminated in that case. */
6918 if (src)
6919 {
6920 if (rtx_equal_p (src, dest))
6921 src_cost = -1;
6922 else
6923 src_cost = COST (src);
6924 }
6925
6926 if (src_eqv_here)
6927 {
6928 if (rtx_equal_p (src_eqv_here, dest))
6929 src_eqv_cost = -1;
6930 else
6931 src_eqv_cost = COST (src_eqv_here);
6932 }
6933
6934 if (src_folded)
6935 {
6936 if (rtx_equal_p (src_folded, dest))
6937 src_folded_cost = -1;
6938 else
6939 src_folded_cost = COST (src_folded);
6940 }
6941
6942 if (src_related)
6943 {
6944 if (rtx_equal_p (src_related, dest))
6945 src_related_cost = -1;
6946 else
6947 src_related_cost = COST (src_related);
6948 }
6949
6950 /* If this was an indirect jump insn, a known label will really be
6951 cheaper even though it looks more expensive. */
6952 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6953 src_folded = src_const, src_folded_cost = -1;
6954
6955 /* Terminate loop when replacement made. This must terminate since
6956 the current contents will be tested and will always be valid. */
6957 while (1)
6958 {
6959 rtx trial, old_src;
6960
6961 /* Skip invalid entries. */
6962 while (elt && GET_CODE (elt->exp) != REG
6963 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6964 elt = elt->next_same_value;
6965
6966 /* A paradoxical subreg would be bad here: it'll be the right
6967 size, but later may be adjusted so that the upper bits aren't
6968 what we want. So reject it. */
6969 if (elt != 0
6970 && GET_CODE (elt->exp) == SUBREG
6971 && (GET_MODE_SIZE (GET_MODE (elt->exp))
6972 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6973 /* It is okay, though, if the rtx we're trying to match
6974 will ignore any of the bits we can't predict. */
6975 && ! (src != 0
6976 && GET_CODE (src) == SUBREG
6977 && GET_MODE (src) == GET_MODE (elt->exp)
6978 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6979 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6980 {
6981 elt = elt->next_same_value;
6982 continue;
6983 }
6984
6985 if (elt) src_elt_cost = elt->cost;
6986
6987 /* Find cheapest and skip it for the next time. For items
6988 of equal cost, use this order:
6989 src_folded, src, src_eqv, src_related and hash table entry. */
6990 if (src_folded_cost <= src_cost
6991 && src_folded_cost <= src_eqv_cost
6992 && src_folded_cost <= src_related_cost
6993 && src_folded_cost <= src_elt_cost)
6994 {
6995 trial = src_folded, src_folded_cost = 10000;
6996 if (src_folded_force_flag)
6997 trial = force_const_mem (mode, trial);
6998 }
6999 else if (src_cost <= src_eqv_cost
7000 && src_cost <= src_related_cost
7001 && src_cost <= src_elt_cost)
7002 trial = src, src_cost = 10000;
7003 else if (src_eqv_cost <= src_related_cost
7004 && src_eqv_cost <= src_elt_cost)
7005 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7006 else if (src_related_cost <= src_elt_cost)
7007 trial = copy_rtx (src_related), src_related_cost = 10000;
7008 else
7009 {
7010 trial = copy_rtx (elt->exp);
7011 elt = elt->next_same_value;
7012 src_elt_cost = 10000;
7013 }
7014
7015 /* We don't normally have an insn matching (set (pc) (pc)), so
7016 check for this separately here. We will delete such an
7017 insn below.
7018
7019 Tablejump insns contain a USE of the table, so simply replacing
7020 the operand with the constant won't match. This is simply an
7021 unconditional branch, however, and is therefore valid. Just
7022 insert the substitution here and we will delete and re-emit
7023 the insn later. */
7024
7025 /* Keep track of the original SET_SRC so that we can fix notes
7026 on libcall instructions. */
7027 old_src = SET_SRC (sets[i].rtl);
7028
7029 if (n_sets == 1 && dest == pc_rtx
7030 && (trial == pc_rtx
7031 || (GET_CODE (trial) == LABEL_REF
7032 && ! condjump_p (insn))))
7033 {
7034 /* If TRIAL is a label in front of a jump table, we are
7035 really falling through the switch (this is how casesi
7036 insns work), so we must branch around the table. */
7037 if (GET_CODE (trial) == CODE_LABEL
7038 && NEXT_INSN (trial) != 0
7039 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7040 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7041 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7042
7043 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7044
7045 SET_SRC (sets[i].rtl) = trial;
7046 cse_jumps_altered = 1;
7047 break;
7048 }
7049
7050 /* Look for a substitution that makes a valid insn. */
7051 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7052 {
7053 /* If we just made a substitution inside a libcall, then we
7054 need to make the same substitution in any notes attached
7055 to the RETVAL insn. */
7056 if (libcall_insn
7057 && (GET_CODE (old_src) == REG
7058 || GET_CODE (old_src) == SUBREG
7059 || GET_CODE (old_src) == MEM))
7060 replace_rtx (REG_NOTES (libcall_insn), old_src,
7061 canon_reg (SET_SRC (sets[i].rtl), insn));
7062
7063 /* The result of apply_change_group can be ignored; see
7064 canon_reg. */
7065
7066 validate_change (insn, &SET_SRC (sets[i].rtl),
7067 canon_reg (SET_SRC (sets[i].rtl), insn),
7068 1);
7069 apply_change_group ();
7070 break;
7071 }
7072
7073 /* If we previously found constant pool entries for
7074 constants and this is a constant, try making a
7075 pool entry. Put it in src_folded unless we already have done
7076 this since that is where it likely came from. */
7077
7078 else if (constant_pool_entries_cost
7079 && CONSTANT_P (trial)
7080 && ! (GET_CODE (trial) == CONST
7081 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7082 && (src_folded == 0
7083 || (GET_CODE (src_folded) != MEM
7084 && ! src_folded_force_flag))
7085 && GET_MODE_CLASS (mode) != MODE_CC
7086 && mode != VOIDmode)
7087 {
7088 src_folded_force_flag = 1;
7089 src_folded = trial;
7090 src_folded_cost = constant_pool_entries_cost;
7091 }
7092 }
7093
7094 src = SET_SRC (sets[i].rtl);
7095
7096 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7097 However, there is an important exception: If both are registers
7098 that are not the head of their equivalence class, replace SET_SRC
7099 with the head of the class. If we do not do this, we will have
7100 both registers live over a portion of the basic block. This way,
7101 their lifetimes will likely abut instead of overlapping. */
7102 if (GET_CODE (dest) == REG
7103 && REGNO_QTY_VALID_P (REGNO (dest))
7104 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
7105 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
7106 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7107 /* Don't do this if the original insn had a hard reg as
7108 SET_SRC. */
7109 && (GET_CODE (sets[i].src) != REG
7110 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7111 /* We can't call canon_reg here because it won't do anything if
7112 SRC is a hard register. */
7113 {
7114 int first = qty_first_reg[reg_qty[REGNO (src)]];
7115 rtx new_src
7116 = (first >= FIRST_PSEUDO_REGISTER
7117 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7118
7119 /* We must use validate-change even for this, because this
7120 might be a special no-op instruction, suitable only to
7121 tag notes onto. */
7122 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7123 {
7124 src = new_src;
7125 /* If we had a constant that is cheaper than what we are now
7126 setting SRC to, use that constant. We ignored it when we
7127 thought we could make this into a no-op. */
7128 if (src_const && COST (src_const) < COST (src)
7129 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7130 0))
7131 src = src_const;
7132 }
7133 }
7134
7135 /* If we made a change, recompute SRC values. */
7136 if (src != sets[i].src)
7137 {
7138 do_not_record = 0;
7139 hash_arg_in_memory = 0;
7140 hash_arg_in_struct = 0;
7141 sets[i].src = src;
7142 sets[i].src_hash = HASH (src, mode);
7143 sets[i].src_volatile = do_not_record;
7144 sets[i].src_in_memory = hash_arg_in_memory;
7145 sets[i].src_in_struct = hash_arg_in_struct;
7146 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7147 }
7148
7149 /* If this is a single SET, we are setting a register, and we have an
7150 equivalent constant, we want to add a REG_NOTE. We don't want
7151 to write a REG_EQUAL note for a constant pseudo since verifying that
7152 that pseudo hasn't been eliminated is a pain. Such a note also
7153 won't help anything. */
7154 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7155 && GET_CODE (src_const) != REG)
7156 {
7157 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7158
7159 /* Record the actual constant value in a REG_EQUAL note, making
7160 a new one if one does not already exist. */
7161 if (tem)
7162 XEXP (tem, 0) = src_const;
7163 else
7164 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7165 src_const, REG_NOTES (insn));
7166
7167 /* If storing a constant value in a register that
7168 previously held the constant value 0,
7169 record this fact with a REG_WAS_0 note on this insn.
7170
7171 Note that the *register* is required to have previously held 0,
7172 not just any register in the quantity and we must point to the
7173 insn that set that register to zero.
7174
7175 Rather than track each register individually, we just see if
7176 the last set for this quantity was for this register. */
7177
7178 if (REGNO_QTY_VALID_P (REGNO (dest))
7179 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
7180 {
7181 /* See if we previously had a REG_WAS_0 note. */
7182 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7183 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
7184
7185 if ((tem = single_set (const_insn)) != 0
7186 && rtx_equal_p (SET_DEST (tem), dest))
7187 {
7188 if (note)
7189 XEXP (note, 0) = const_insn;
7190 else
7191 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7192 const_insn,
7193 REG_NOTES (insn));
7194 }
7195 }
7196 }
7197
7198 /* Now deal with the destination. */
7199 do_not_record = 0;
7200 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7201
7202 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7203 to the MEM or REG within it. */
7204 while (GET_CODE (dest) == SIGN_EXTRACT
7205 || GET_CODE (dest) == ZERO_EXTRACT
7206 || GET_CODE (dest) == SUBREG
7207 || GET_CODE (dest) == STRICT_LOW_PART)
7208 {
7209 sets[i].inner_dest_loc = &XEXP (dest, 0);
7210 dest = XEXP (dest, 0);
7211 }
7212
7213 sets[i].inner_dest = dest;
7214
7215 if (GET_CODE (dest) == MEM)
7216 {
7217 #ifdef PUSH_ROUNDING
7218 /* Stack pushes invalidate the stack pointer. */
7219 rtx addr = XEXP (dest, 0);
7220 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7221 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7222 && XEXP (addr, 0) == stack_pointer_rtx)
7223 invalidate (stack_pointer_rtx, Pmode);
7224 #endif
7225 dest = fold_rtx (dest, insn);
7226 }
7227
7228 /* Compute the hash code of the destination now,
7229 before the effects of this instruction are recorded,
7230 since the register values used in the address computation
7231 are those before this instruction. */
7232 sets[i].dest_hash = HASH (dest, mode);
7233
7234 /* Don't enter a bit-field in the hash table
7235 because the value in it after the store
7236 may not equal what was stored, due to truncation. */
7237
7238 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7239 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7240 {
7241 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7242
7243 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7244 && GET_CODE (width) == CONST_INT
7245 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7246 && ! (INTVAL (src_const)
7247 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7248 /* Exception: if the value is constant,
7249 and it won't be truncated, record it. */
7250 ;
7251 else
7252 {
7253 /* This is chosen so that the destination will be invalidated
7254 but no new value will be recorded.
7255 We must invalidate because sometimes constant
7256 values can be recorded for bitfields. */
7257 sets[i].src_elt = 0;
7258 sets[i].src_volatile = 1;
7259 src_eqv = 0;
7260 src_eqv_elt = 0;
7261 }
7262 }
7263
7264 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7265 the insn. */
7266 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7267 {
7268 PUT_CODE (insn, NOTE);
7269 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7270 NOTE_SOURCE_FILE (insn) = 0;
7271 cse_jumps_altered = 1;
7272 /* One less use of the label this insn used to jump to. */
7273 if (JUMP_LABEL (insn) != 0)
7274 --LABEL_NUSES (JUMP_LABEL (insn));
7275 /* No more processing for this set. */
7276 sets[i].rtl = 0;
7277 }
7278
7279 /* If this SET is now setting PC to a label, we know it used to
7280 be a conditional or computed branch. So we see if we can follow
7281 it. If it was a computed branch, delete it and re-emit. */
7282 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7283 {
7284 rtx p;
7285
7286 /* If this is not in the format for a simple branch and
7287 we are the only SET in it, re-emit it. */
7288 if (! simplejump_p (insn) && n_sets == 1)
7289 {
7290 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7291 JUMP_LABEL (new) = XEXP (src, 0);
7292 LABEL_NUSES (XEXP (src, 0))++;
7293 delete_insn (insn);
7294 insn = new;
7295 }
7296 else
7297 /* Otherwise, force rerecognition, since it probably had
7298 a different pattern before.
7299 This shouldn't really be necessary, since whatever
7300 changed the source value above should have done this.
7301 Until the right place is found, might as well do this here. */
7302 INSN_CODE (insn) = -1;
7303
7304 /* Now that we've converted this jump to an unconditional jump,
7305 there is dead code after it. Delete the dead code until we
7306 reach a BARRIER, the end of the function, or a label. Do
7307 not delete NOTEs except for NOTE_INSN_DELETED since later
7308 phases assume these notes are retained. */
7309
7310 p = insn;
7311
7312 while (NEXT_INSN (p) != 0
7313 && GET_CODE (NEXT_INSN (p)) != BARRIER
7314 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7315 {
7316 if (GET_CODE (NEXT_INSN (p)) != NOTE
7317 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7318 delete_insn (NEXT_INSN (p));
7319 else
7320 p = NEXT_INSN (p);
7321 }
7322
7323 /* If we don't have a BARRIER immediately after INSN, put one there.
7324 Much code assumes that there are no NOTEs between a JUMP_INSN and
7325 BARRIER. */
7326
7327 if (NEXT_INSN (insn) == 0
7328 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7329 emit_barrier_before (NEXT_INSN (insn));
7330
7331 /* We might have two BARRIERs separated by notes. Delete the second
7332 one if so. */
7333
7334 if (p != insn && NEXT_INSN (p) != 0
7335 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7336 delete_insn (NEXT_INSN (p));
7337
7338 cse_jumps_altered = 1;
7339 sets[i].rtl = 0;
7340 }
7341
7342 /* If destination is volatile, invalidate it and then do no further
7343 processing for this assignment. */
7344
7345 else if (do_not_record)
7346 {
7347 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7348 || GET_CODE (dest) == MEM)
7349 invalidate (dest, VOIDmode);
7350 else if (GET_CODE (dest) == STRICT_LOW_PART
7351 || GET_CODE (dest) == ZERO_EXTRACT)
7352 invalidate (XEXP (dest, 0), GET_MODE (dest));
7353 sets[i].rtl = 0;
7354 }
7355
7356 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7357 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7358
7359 #ifdef HAVE_cc0
7360 /* If setting CC0, record what it was set to, or a constant, if it
7361 is equivalent to a constant. If it is being set to a floating-point
7362 value, make a COMPARE with the appropriate constant of 0. If we
7363 don't do this, later code can interpret this as a test against
7364 const0_rtx, which can cause problems if we try to put it into an
7365 insn as a floating-point operand. */
7366 if (dest == cc0_rtx)
7367 {
7368 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7369 this_insn_cc0_mode = mode;
7370 if (FLOAT_MODE_P (mode))
7371 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7372 CONST0_RTX (mode));
7373 }
7374 #endif
7375 }
7376
7377 /* Now enter all non-volatile source expressions in the hash table
7378 if they are not already present.
7379 Record their equivalence classes in src_elt.
7380 This way we can insert the corresponding destinations into
7381 the same classes even if the actual sources are no longer in them
7382 (having been invalidated). */
7383
7384 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7385 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7386 {
7387 register struct table_elt *elt;
7388 register struct table_elt *classp = sets[0].src_elt;
7389 rtx dest = SET_DEST (sets[0].rtl);
7390 enum machine_mode eqvmode = GET_MODE (dest);
7391
7392 if (GET_CODE (dest) == STRICT_LOW_PART)
7393 {
7394 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7395 classp = 0;
7396 }
7397 if (insert_regs (src_eqv, classp, 0))
7398 {
7399 rehash_using_reg (src_eqv);
7400 src_eqv_hash = HASH (src_eqv, eqvmode);
7401 }
7402 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7403 elt->in_memory = src_eqv_in_memory;
7404 elt->in_struct = src_eqv_in_struct;
7405 src_eqv_elt = elt;
7406
7407 /* Check to see if src_eqv_elt is the same as a set source which
7408 does not yet have an elt, and if so set the elt of the set source
7409 to src_eqv_elt. */
7410 for (i = 0; i < n_sets; i++)
7411 if (sets[i].rtl && sets[i].src_elt == 0
7412 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7413 sets[i].src_elt = src_eqv_elt;
7414 }
7415
7416 for (i = 0; i < n_sets; i++)
7417 if (sets[i].rtl && ! sets[i].src_volatile
7418 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7419 {
7420 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7421 {
7422 /* REG_EQUAL in setting a STRICT_LOW_PART
7423 gives an equivalent for the entire destination register,
7424 not just for the subreg being stored in now.
7425 This is a more interesting equivalence, so we arrange later
7426 to treat the entire reg as the destination. */
7427 sets[i].src_elt = src_eqv_elt;
7428 sets[i].src_hash = src_eqv_hash;
7429 }
7430 else
7431 {
7432 /* Insert source and constant equivalent into hash table, if not
7433 already present. */
7434 register struct table_elt *classp = src_eqv_elt;
7435 register rtx src = sets[i].src;
7436 register rtx dest = SET_DEST (sets[i].rtl);
7437 enum machine_mode mode
7438 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7439
7440 if (sets[i].src_elt == 0)
7441 {
7442 register struct table_elt *elt;
7443
7444 /* Note that these insert_regs calls cannot remove
7445 any of the src_elt's, because they would have failed to
7446 match if not still valid. */
7447 if (insert_regs (src, classp, 0))
7448 {
7449 rehash_using_reg (src);
7450 sets[i].src_hash = HASH (src, mode);
7451 }
7452 elt = insert (src, classp, sets[i].src_hash, mode);
7453 elt->in_memory = sets[i].src_in_memory;
7454 elt->in_struct = sets[i].src_in_struct;
7455 sets[i].src_elt = classp = elt;
7456 }
7457
7458 if (sets[i].src_const && sets[i].src_const_elt == 0
7459 && src != sets[i].src_const
7460 && ! rtx_equal_p (sets[i].src_const, src))
7461 sets[i].src_elt = insert (sets[i].src_const, classp,
7462 sets[i].src_const_hash, mode);
7463 }
7464 }
7465 else if (sets[i].src_elt == 0)
7466 /* If we did not insert the source into the hash table (e.g., it was
7467 volatile), note the equivalence class for the REG_EQUAL value, if any,
7468 so that the destination goes into that class. */
7469 sets[i].src_elt = src_eqv_elt;
7470
7471 invalidate_from_clobbers (x);
7472
7473 /* Some registers are invalidated by subroutine calls. Memory is
7474 invalidated by non-constant calls. */
7475
7476 if (GET_CODE (insn) == CALL_INSN)
7477 {
7478 if (! CONST_CALL_P (insn))
7479 invalidate_memory ();
7480 invalidate_for_call ();
7481 }
7482
7483 /* Now invalidate everything set by this instruction.
7484 If a SUBREG or other funny destination is being set,
7485 sets[i].rtl is still nonzero, so here we invalidate the reg
7486 a part of which is being set. */
7487
7488 for (i = 0; i < n_sets; i++)
7489 if (sets[i].rtl)
7490 {
7491 /* We can't use the inner dest, because the mode associated with
7492 a ZERO_EXTRACT is significant. */
7493 register rtx dest = SET_DEST (sets[i].rtl);
7494
7495 /* Needed for registers to remove the register from its
7496 previous quantity's chain.
7497 Needed for memory if this is a nonvarying address, unless
7498 we have just done an invalidate_memory that covers even those. */
7499 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7500 || GET_CODE (dest) == MEM)
7501 invalidate (dest, VOIDmode);
7502 else if (GET_CODE (dest) == STRICT_LOW_PART
7503 || GET_CODE (dest) == ZERO_EXTRACT)
7504 invalidate (XEXP (dest, 0), GET_MODE (dest));
7505 }
7506
7507 /* Make sure registers mentioned in destinations
7508 are safe for use in an expression to be inserted.
7509 This removes from the hash table
7510 any invalid entry that refers to one of these registers.
7511
7512 We don't care about the return value from mention_regs because
7513 we are going to hash the SET_DEST values unconditionally. */
7514
7515 for (i = 0; i < n_sets; i++)
7516 {
7517 if (sets[i].rtl)
7518 {
7519 rtx x = SET_DEST (sets[i].rtl);
7520
7521 if (GET_CODE (x) != REG)
7522 mention_regs (x);
7523 else
7524 {
7525 /* We used to rely on all references to a register becoming
7526 inaccessible when a register changes to a new quantity,
7527 since that changes the hash code. However, that is not
7528 safe, since after NBUCKETS new quantities we get a
7529 hash 'collision' of a register with its own invalid
7530 entries. And since SUBREGs have been changed not to
7531 change their hash code with the hash code of the register,
7532 it wouldn't work any longer at all. So we have to check
7533 for any invalid references lying around now.
7534 This code is similar to the REG case in mention_regs,
7535 but it knows that reg_tick has been incremented, and
7536 it leaves reg_in_table as -1 . */
7537 register int regno = REGNO (x);
7538 register int endregno
7539 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7540 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7541 int i;
7542
7543 for (i = regno; i < endregno; i++)
7544 {
7545 if (reg_in_table[i] >= 0)
7546 {
7547 remove_invalid_refs (i);
7548 reg_in_table[i] = -1;
7549 }
7550 }
7551 }
7552 }
7553 }
7554
7555 /* We may have just removed some of the src_elt's from the hash table.
7556 So replace each one with the current head of the same class. */
7557
7558 for (i = 0; i < n_sets; i++)
7559 if (sets[i].rtl)
7560 {
7561 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7562 /* If elt was removed, find current head of same class,
7563 or 0 if nothing remains of that class. */
7564 {
7565 register struct table_elt *elt = sets[i].src_elt;
7566
7567 while (elt && elt->prev_same_value)
7568 elt = elt->prev_same_value;
7569
7570 while (elt && elt->first_same_value == 0)
7571 elt = elt->next_same_value;
7572 sets[i].src_elt = elt ? elt->first_same_value : 0;
7573 }
7574 }
7575
7576 /* Now insert the destinations into their equivalence classes. */
7577
7578 for (i = 0; i < n_sets; i++)
7579 if (sets[i].rtl)
7580 {
7581 register rtx dest = SET_DEST (sets[i].rtl);
7582 rtx inner_dest = sets[i].inner_dest;
7583 register struct table_elt *elt;
7584
7585 /* Don't record value if we are not supposed to risk allocating
7586 floating-point values in registers that might be wider than
7587 memory. */
7588 if ((flag_float_store
7589 && GET_CODE (dest) == MEM
7590 && FLOAT_MODE_P (GET_MODE (dest)))
7591 /* Don't record BLKmode values, because we don't know the
7592 size of it, and can't be sure that other BLKmode values
7593 have the same or smaller size. */
7594 || GET_MODE (dest) == BLKmode
7595 /* Don't record values of destinations set inside a libcall block
7596 since we might delete the libcall. Things should have been set
7597 up so we won't want to reuse such a value, but we play it safe
7598 here. */
7599 || libcall_insn
7600 /* If we didn't put a REG_EQUAL value or a source into the hash
7601 table, there is no point is recording DEST. */
7602 || sets[i].src_elt == 0
7603 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7604 or SIGN_EXTEND, don't record DEST since it can cause
7605 some tracking to be wrong.
7606
7607 ??? Think about this more later. */
7608 || (GET_CODE (dest) == SUBREG
7609 && (GET_MODE_SIZE (GET_MODE (dest))
7610 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7611 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7612 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7613 continue;
7614
7615 /* STRICT_LOW_PART isn't part of the value BEING set,
7616 and neither is the SUBREG inside it.
7617 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7618 if (GET_CODE (dest) == STRICT_LOW_PART)
7619 dest = SUBREG_REG (XEXP (dest, 0));
7620
7621 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7622 /* Registers must also be inserted into chains for quantities. */
7623 if (insert_regs (dest, sets[i].src_elt, 1))
7624 {
7625 /* If `insert_regs' changes something, the hash code must be
7626 recalculated. */
7627 rehash_using_reg (dest);
7628 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7629 }
7630
7631 if (GET_CODE (inner_dest) == MEM
7632 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7633 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7634 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7635 Consider the case in which the address of the MEM is
7636 passed to a function, which alters the MEM. Then, if we
7637 later use Y instead of the MEM we'll miss the update. */
7638 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7639 else
7640 elt = insert (dest, sets[i].src_elt,
7641 sets[i].dest_hash, GET_MODE (dest));
7642
7643 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7644 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7645 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7646 0))));
7647
7648 if (elt->in_memory)
7649 {
7650 /* This implicitly assumes a whole struct
7651 need not have MEM_IN_STRUCT_P.
7652 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7653 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7654 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7655 }
7656
7657 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7658 narrower than M2, and both M1 and M2 are the same number of words,
7659 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7660 make that equivalence as well.
7661
7662 However, BAR may have equivalences for which gen_lowpart_if_possible
7663 will produce a simpler value than gen_lowpart_if_possible applied to
7664 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7665 BAR's equivalences. If we don't get a simplified form, make
7666 the SUBREG. It will not be used in an equivalence, but will
7667 cause two similar assignments to be detected.
7668
7669 Note the loop below will find SUBREG_REG (DEST) since we have
7670 already entered SRC and DEST of the SET in the table. */
7671
7672 if (GET_CODE (dest) == SUBREG
7673 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7674 / UNITS_PER_WORD)
7675 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7676 && (GET_MODE_SIZE (GET_MODE (dest))
7677 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7678 && sets[i].src_elt != 0)
7679 {
7680 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7681 struct table_elt *elt, *classp = 0;
7682
7683 for (elt = sets[i].src_elt->first_same_value; elt;
7684 elt = elt->next_same_value)
7685 {
7686 rtx new_src = 0;
7687 unsigned src_hash;
7688 struct table_elt *src_elt;
7689
7690 /* Ignore invalid entries. */
7691 if (GET_CODE (elt->exp) != REG
7692 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7693 continue;
7694
7695 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7696 if (new_src == 0)
7697 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7698
7699 src_hash = HASH (new_src, new_mode);
7700 src_elt = lookup (new_src, src_hash, new_mode);
7701
7702 /* Put the new source in the hash table is if isn't
7703 already. */
7704 if (src_elt == 0)
7705 {
7706 if (insert_regs (new_src, classp, 0))
7707 {
7708 rehash_using_reg (new_src);
7709 src_hash = HASH (new_src, new_mode);
7710 }
7711 src_elt = insert (new_src, classp, src_hash, new_mode);
7712 src_elt->in_memory = elt->in_memory;
7713 src_elt->in_struct = elt->in_struct;
7714 }
7715 else if (classp && classp != src_elt->first_same_value)
7716 /* Show that two things that we've seen before are
7717 actually the same. */
7718 merge_equiv_classes (src_elt, classp);
7719
7720 classp = src_elt->first_same_value;
7721 /* Ignore invalid entries. */
7722 while (classp
7723 && GET_CODE (classp->exp) != REG
7724 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7725 classp = classp->next_same_value;
7726 }
7727 }
7728 }
7729
7730 /* Special handling for (set REG0 REG1)
7731 where REG0 is the "cheapest", cheaper than REG1.
7732 After cse, REG1 will probably not be used in the sequel,
7733 so (if easily done) change this insn to (set REG1 REG0) and
7734 replace REG1 with REG0 in the previous insn that computed their value.
7735 Then REG1 will become a dead store and won't cloud the situation
7736 for later optimizations.
7737
7738 Do not make this change if REG1 is a hard register, because it will
7739 then be used in the sequel and we may be changing a two-operand insn
7740 into a three-operand insn.
7741
7742 Also do not do this if we are operating on a copy of INSN. */
7743
7744 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7745 && NEXT_INSN (PREV_INSN (insn)) == insn
7746 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7747 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7748 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7749 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7750 == REGNO (SET_DEST (sets[0].rtl))))
7751 {
7752 rtx prev = PREV_INSN (insn);
7753 while (prev && GET_CODE (prev) == NOTE)
7754 prev = PREV_INSN (prev);
7755
7756 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7757 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7758 {
7759 rtx dest = SET_DEST (sets[0].rtl);
7760 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7761
7762 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7763 validate_change (insn, & SET_DEST (sets[0].rtl),
7764 SET_SRC (sets[0].rtl), 1);
7765 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7766 apply_change_group ();
7767
7768 /* If REG1 was equivalent to a constant, REG0 is not. */
7769 if (note)
7770 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7771
7772 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7773 any REG_WAS_0 note on INSN to PREV. */
7774 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7775 if (note)
7776 remove_note (prev, note);
7777
7778 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7779 if (note)
7780 {
7781 remove_note (insn, note);
7782 XEXP (note, 1) = REG_NOTES (prev);
7783 REG_NOTES (prev) = note;
7784 }
7785
7786 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7787 then we must delete it, because the value in REG0 has changed. */
7788 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7789 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7790 remove_note (insn, note);
7791 }
7792 }
7793
7794 /* If this is a conditional jump insn, record any known equivalences due to
7795 the condition being tested. */
7796
7797 last_jump_equiv_class = 0;
7798 if (GET_CODE (insn) == JUMP_INSN
7799 && n_sets == 1 && GET_CODE (x) == SET
7800 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7801 record_jump_equiv (insn, 0);
7802
7803 #ifdef HAVE_cc0
7804 /* If the previous insn set CC0 and this insn no longer references CC0,
7805 delete the previous insn. Here we use the fact that nothing expects CC0
7806 to be valid over an insn, which is true until the final pass. */
7807 if (prev_insn && GET_CODE (prev_insn) == INSN
7808 && (tem = single_set (prev_insn)) != 0
7809 && SET_DEST (tem) == cc0_rtx
7810 && ! reg_mentioned_p (cc0_rtx, x))
7811 {
7812 PUT_CODE (prev_insn, NOTE);
7813 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7814 NOTE_SOURCE_FILE (prev_insn) = 0;
7815 }
7816
7817 prev_insn_cc0 = this_insn_cc0;
7818 prev_insn_cc0_mode = this_insn_cc0_mode;
7819 #endif
7820
7821 prev_insn = insn;
7822 }
7823 \f
7824 /* Remove from the ahsh table all expressions that reference memory. */
7825 static void
7826 invalidate_memory ()
7827 {
7828 register int i;
7829 register struct table_elt *p, *next;
7830
7831 for (i = 0; i < NBUCKETS; i++)
7832 for (p = table[i]; p; p = next)
7833 {
7834 next = p->next_same_hash;
7835 if (p->in_memory)
7836 remove_from_table (p, i);
7837 }
7838 }
7839
7840 /* XXX ??? The name of this function bears little resemblance to
7841 what this function actually does. FIXME. */
7842 static int
7843 note_mem_written (addr)
7844 register rtx addr;
7845 {
7846 /* Pushing or popping the stack invalidates just the stack pointer. */
7847 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7848 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7849 && GET_CODE (XEXP (addr, 0)) == REG
7850 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7851 {
7852 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7853 reg_tick[STACK_POINTER_REGNUM]++;
7854
7855 /* This should be *very* rare. */
7856 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7857 invalidate (stack_pointer_rtx, VOIDmode);
7858 return 1;
7859 }
7860 return 0;
7861 }
7862
7863 /* Perform invalidation on the basis of everything about an insn
7864 except for invalidating the actual places that are SET in it.
7865 This includes the places CLOBBERed, and anything that might
7866 alias with something that is SET or CLOBBERed.
7867
7868 X is the pattern of the insn. */
7869
7870 static void
7871 invalidate_from_clobbers (x)
7872 rtx x;
7873 {
7874 if (GET_CODE (x) == CLOBBER)
7875 {
7876 rtx ref = XEXP (x, 0);
7877 if (ref)
7878 {
7879 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7880 || GET_CODE (ref) == MEM)
7881 invalidate (ref, VOIDmode);
7882 else if (GET_CODE (ref) == STRICT_LOW_PART
7883 || GET_CODE (ref) == ZERO_EXTRACT)
7884 invalidate (XEXP (ref, 0), GET_MODE (ref));
7885 }
7886 }
7887 else if (GET_CODE (x) == PARALLEL)
7888 {
7889 register int i;
7890 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7891 {
7892 register rtx y = XVECEXP (x, 0, i);
7893 if (GET_CODE (y) == CLOBBER)
7894 {
7895 rtx ref = XEXP (y, 0);
7896 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7897 || GET_CODE (ref) == MEM)
7898 invalidate (ref, VOIDmode);
7899 else if (GET_CODE (ref) == STRICT_LOW_PART
7900 || GET_CODE (ref) == ZERO_EXTRACT)
7901 invalidate (XEXP (ref, 0), GET_MODE (ref));
7902 }
7903 }
7904 }
7905 }
7906 \f
7907 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7908 and replace any registers in them with either an equivalent constant
7909 or the canonical form of the register. If we are inside an address,
7910 only do this if the address remains valid.
7911
7912 OBJECT is 0 except when within a MEM in which case it is the MEM.
7913
7914 Return the replacement for X. */
7915
7916 static rtx
7917 cse_process_notes (x, object)
7918 rtx x;
7919 rtx object;
7920 {
7921 enum rtx_code code = GET_CODE (x);
7922 char *fmt = GET_RTX_FORMAT (code);
7923 int i;
7924
7925 switch (code)
7926 {
7927 case CONST_INT:
7928 case CONST:
7929 case SYMBOL_REF:
7930 case LABEL_REF:
7931 case CONST_DOUBLE:
7932 case PC:
7933 case CC0:
7934 case LO_SUM:
7935 return x;
7936
7937 case MEM:
7938 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7939 return x;
7940
7941 case EXPR_LIST:
7942 case INSN_LIST:
7943 if (REG_NOTE_KIND (x) == REG_EQUAL)
7944 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7945 if (XEXP (x, 1))
7946 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7947 return x;
7948
7949 case SIGN_EXTEND:
7950 case ZERO_EXTEND:
7951 case SUBREG:
7952 {
7953 rtx new = cse_process_notes (XEXP (x, 0), object);
7954 /* We don't substitute VOIDmode constants into these rtx,
7955 since they would impede folding. */
7956 if (GET_MODE (new) != VOIDmode)
7957 validate_change (object, &XEXP (x, 0), new, 0);
7958 return x;
7959 }
7960
7961 case REG:
7962 i = reg_qty[REGNO (x)];
7963
7964 /* Return a constant or a constant register. */
7965 if (REGNO_QTY_VALID_P (REGNO (x))
7966 && qty_const[i] != 0
7967 && (CONSTANT_P (qty_const[i])
7968 || GET_CODE (qty_const[i]) == REG))
7969 {
7970 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7971 if (new)
7972 return new;
7973 }
7974
7975 /* Otherwise, canonicalize this register. */
7976 return canon_reg (x, NULL_RTX);
7977
7978 default:
7979 break;
7980 }
7981
7982 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7983 if (fmt[i] == 'e')
7984 validate_change (object, &XEXP (x, i),
7985 cse_process_notes (XEXP (x, i), object), 0);
7986
7987 return x;
7988 }
7989 \f
7990 /* Find common subexpressions between the end test of a loop and the beginning
7991 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7992
7993 Often we have a loop where an expression in the exit test is used
7994 in the body of the loop. For example "while (*p) *q++ = *p++;".
7995 Because of the way we duplicate the loop exit test in front of the loop,
7996 however, we don't detect that common subexpression. This will be caught
7997 when global cse is implemented, but this is a quite common case.
7998
7999 This function handles the most common cases of these common expressions.
8000 It is called after we have processed the basic block ending with the
8001 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8002 jumps to a label used only once. */
8003
8004 static void
8005 cse_around_loop (loop_start)
8006 rtx loop_start;
8007 {
8008 rtx insn;
8009 int i;
8010 struct table_elt *p;
8011
8012 /* If the jump at the end of the loop doesn't go to the start, we don't
8013 do anything. */
8014 for (insn = PREV_INSN (loop_start);
8015 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8016 insn = PREV_INSN (insn))
8017 ;
8018
8019 if (insn == 0
8020 || GET_CODE (insn) != NOTE
8021 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8022 return;
8023
8024 /* If the last insn of the loop (the end test) was an NE comparison,
8025 we will interpret it as an EQ comparison, since we fell through
8026 the loop. Any equivalences resulting from that comparison are
8027 therefore not valid and must be invalidated. */
8028 if (last_jump_equiv_class)
8029 for (p = last_jump_equiv_class->first_same_value; p;
8030 p = p->next_same_value)
8031 {
8032 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8033 || (GET_CODE (p->exp) == SUBREG
8034 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8035 invalidate (p->exp, VOIDmode);
8036 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8037 || GET_CODE (p->exp) == ZERO_EXTRACT)
8038 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8039 }
8040
8041 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8042 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8043
8044 The only thing we do with SET_DEST is invalidate entries, so we
8045 can safely process each SET in order. It is slightly less efficient
8046 to do so, but we only want to handle the most common cases.
8047
8048 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8049 These pseudos won't have valid entries in any of the tables indexed
8050 by register number, such as reg_qty. We avoid out-of-range array
8051 accesses by not processing any instructions created after cse started. */
8052
8053 for (insn = NEXT_INSN (loop_start);
8054 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8055 && INSN_UID (insn) < max_insn_uid
8056 && ! (GET_CODE (insn) == NOTE
8057 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8058 insn = NEXT_INSN (insn))
8059 {
8060 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8061 && (GET_CODE (PATTERN (insn)) == SET
8062 || GET_CODE (PATTERN (insn)) == CLOBBER))
8063 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8064 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8065 && GET_CODE (PATTERN (insn)) == PARALLEL)
8066 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8067 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8068 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8069 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8070 loop_start);
8071 }
8072 }
8073 \f
8074 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
8075 since they are done elsewhere. This function is called via note_stores. */
8076
8077 static void
8078 invalidate_skipped_set (dest, set)
8079 rtx set;
8080 rtx dest;
8081 {
8082 enum rtx_code code = GET_CODE (dest);
8083
8084 if (code == MEM
8085 && ! note_mem_written (dest) /* If this is not a stack push ... */
8086 /* There are times when an address can appear varying and be a PLUS
8087 during this scan when it would be a fixed address were we to know
8088 the proper equivalences. So invalidate all memory if there is
8089 a BLKmode or nonscalar memory reference or a reference to a
8090 variable address. */
8091 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8092 || cse_rtx_varies_p (XEXP (dest, 0))))
8093 {
8094 invalidate_memory ();
8095 return;
8096 }
8097
8098 if (GET_CODE (set) == CLOBBER
8099 #ifdef HAVE_cc0
8100 || dest == cc0_rtx
8101 #endif
8102 || dest == pc_rtx)
8103 return;
8104
8105 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8106 invalidate (XEXP (dest, 0), GET_MODE (dest));
8107 else if (code == REG || code == SUBREG || code == MEM)
8108 invalidate (dest, VOIDmode);
8109 }
8110
8111 /* Invalidate all insns from START up to the end of the function or the
8112 next label. This called when we wish to CSE around a block that is
8113 conditionally executed. */
8114
8115 static void
8116 invalidate_skipped_block (start)
8117 rtx start;
8118 {
8119 rtx insn;
8120
8121 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8122 insn = NEXT_INSN (insn))
8123 {
8124 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8125 continue;
8126
8127 if (GET_CODE (insn) == CALL_INSN)
8128 {
8129 if (! CONST_CALL_P (insn))
8130 invalidate_memory ();
8131 invalidate_for_call ();
8132 }
8133
8134 invalidate_from_clobbers (PATTERN (insn));
8135 note_stores (PATTERN (insn), invalidate_skipped_set);
8136 }
8137 }
8138 \f
8139 /* Used for communication between the following two routines; contains a
8140 value to be checked for modification. */
8141
8142 static rtx cse_check_loop_start_value;
8143
8144 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8145 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8146
8147 static void
8148 cse_check_loop_start (x, set)
8149 rtx x;
8150 rtx set ATTRIBUTE_UNUSED;
8151 {
8152 if (cse_check_loop_start_value == 0
8153 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8154 return;
8155
8156 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8157 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8158 cse_check_loop_start_value = 0;
8159 }
8160
8161 /* X is a SET or CLOBBER contained in INSN that was found near the start of
8162 a loop that starts with the label at LOOP_START.
8163
8164 If X is a SET, we see if its SET_SRC is currently in our hash table.
8165 If so, we see if it has a value equal to some register used only in the
8166 loop exit code (as marked by jump.c).
8167
8168 If those two conditions are true, we search backwards from the start of
8169 the loop to see if that same value was loaded into a register that still
8170 retains its value at the start of the loop.
8171
8172 If so, we insert an insn after the load to copy the destination of that
8173 load into the equivalent register and (try to) replace our SET_SRC with that
8174 register.
8175
8176 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8177
8178 static void
8179 cse_set_around_loop (x, insn, loop_start)
8180 rtx x;
8181 rtx insn;
8182 rtx loop_start;
8183 {
8184 struct table_elt *src_elt;
8185
8186 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8187 are setting PC or CC0 or whose SET_SRC is already a register. */
8188 if (GET_CODE (x) == SET
8189 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8190 && GET_CODE (SET_SRC (x)) != REG)
8191 {
8192 src_elt = lookup (SET_SRC (x),
8193 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8194 GET_MODE (SET_DEST (x)));
8195
8196 if (src_elt)
8197 for (src_elt = src_elt->first_same_value; src_elt;
8198 src_elt = src_elt->next_same_value)
8199 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8200 && COST (src_elt->exp) < COST (SET_SRC (x)))
8201 {
8202 rtx p, set;
8203
8204 /* Look for an insn in front of LOOP_START that sets
8205 something in the desired mode to SET_SRC (x) before we hit
8206 a label or CALL_INSN. */
8207
8208 for (p = prev_nonnote_insn (loop_start);
8209 p && GET_CODE (p) != CALL_INSN
8210 && GET_CODE (p) != CODE_LABEL;
8211 p = prev_nonnote_insn (p))
8212 if ((set = single_set (p)) != 0
8213 && GET_CODE (SET_DEST (set)) == REG
8214 && GET_MODE (SET_DEST (set)) == src_elt->mode
8215 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8216 {
8217 /* We now have to ensure that nothing between P
8218 and LOOP_START modified anything referenced in
8219 SET_SRC (x). We know that nothing within the loop
8220 can modify it, or we would have invalidated it in
8221 the hash table. */
8222 rtx q;
8223
8224 cse_check_loop_start_value = SET_SRC (x);
8225 for (q = p; q != loop_start; q = NEXT_INSN (q))
8226 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8227 note_stores (PATTERN (q), cse_check_loop_start);
8228
8229 /* If nothing was changed and we can replace our
8230 SET_SRC, add an insn after P to copy its destination
8231 to what we will be replacing SET_SRC with. */
8232 if (cse_check_loop_start_value
8233 && validate_change (insn, &SET_SRC (x),
8234 src_elt->exp, 0))
8235 {
8236 /* If this creates new pseudos, this is unsafe,
8237 because the regno of new pseudo is unsuitable
8238 to index into reg_qty when cse_insn processes
8239 the new insn. Therefore, if a new pseudo was
8240 created, discard this optimization. */
8241 int nregs = max_reg_num ();
8242 rtx move
8243 = gen_move_insn (src_elt->exp, SET_DEST (set));
8244 if (nregs != max_reg_num ())
8245 {
8246 if (! validate_change (insn, &SET_SRC (x),
8247 SET_SRC (set), 0))
8248 abort ();
8249 }
8250 else
8251 emit_insn_after (move, p);
8252 }
8253 break;
8254 }
8255 }
8256 }
8257
8258 /* Now invalidate anything modified by X. */
8259 note_mem_written (SET_DEST (x));
8260
8261 /* See comment on similar code in cse_insn for explanation of these tests. */
8262 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8263 || GET_CODE (SET_DEST (x)) == MEM)
8264 invalidate (SET_DEST (x), VOIDmode);
8265 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8266 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8267 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8268 }
8269 \f
8270 /* Find the end of INSN's basic block and return its range,
8271 the total number of SETs in all the insns of the block, the last insn of the
8272 block, and the branch path.
8273
8274 The branch path indicates which branches should be followed. If a non-zero
8275 path size is specified, the block should be rescanned and a different set
8276 of branches will be taken. The branch path is only used if
8277 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8278
8279 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8280 used to describe the block. It is filled in with the information about
8281 the current block. The incoming structure's branch path, if any, is used
8282 to construct the output branch path. */
8283
8284 void
8285 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8286 rtx insn;
8287 struct cse_basic_block_data *data;
8288 int follow_jumps;
8289 int after_loop;
8290 int skip_blocks;
8291 {
8292 rtx p = insn, q;
8293 int nsets = 0;
8294 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8295 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8296 int path_size = data->path_size;
8297 int path_entry = 0;
8298 int i;
8299
8300 /* Update the previous branch path, if any. If the last branch was
8301 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8302 shorten the path by one and look at the previous branch. We know that
8303 at least one branch must have been taken if PATH_SIZE is non-zero. */
8304 while (path_size > 0)
8305 {
8306 if (data->path[path_size - 1].status != NOT_TAKEN)
8307 {
8308 data->path[path_size - 1].status = NOT_TAKEN;
8309 break;
8310 }
8311 else
8312 path_size--;
8313 }
8314
8315 /* Scan to end of this basic block. */
8316 while (p && GET_CODE (p) != CODE_LABEL)
8317 {
8318 /* Don't cse out the end of a loop. This makes a difference
8319 only for the unusual loops that always execute at least once;
8320 all other loops have labels there so we will stop in any case.
8321 Cse'ing out the end of the loop is dangerous because it
8322 might cause an invariant expression inside the loop
8323 to be reused after the end of the loop. This would make it
8324 hard to move the expression out of the loop in loop.c,
8325 especially if it is one of several equivalent expressions
8326 and loop.c would like to eliminate it.
8327
8328 If we are running after loop.c has finished, we can ignore
8329 the NOTE_INSN_LOOP_END. */
8330
8331 if (! after_loop && GET_CODE (p) == NOTE
8332 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8333 break;
8334
8335 /* Don't cse over a call to setjmp; on some machines (eg vax)
8336 the regs restored by the longjmp come from
8337 a later time than the setjmp. */
8338 if (GET_CODE (p) == NOTE
8339 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8340 break;
8341
8342 /* A PARALLEL can have lots of SETs in it,
8343 especially if it is really an ASM_OPERANDS. */
8344 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8345 && GET_CODE (PATTERN (p)) == PARALLEL)
8346 nsets += XVECLEN (PATTERN (p), 0);
8347 else if (GET_CODE (p) != NOTE)
8348 nsets += 1;
8349
8350 /* Ignore insns made by CSE; they cannot affect the boundaries of
8351 the basic block. */
8352
8353 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8354 high_cuid = INSN_CUID (p);
8355 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8356 low_cuid = INSN_CUID (p);
8357
8358 /* See if this insn is in our branch path. If it is and we are to
8359 take it, do so. */
8360 if (path_entry < path_size && data->path[path_entry].branch == p)
8361 {
8362 if (data->path[path_entry].status != NOT_TAKEN)
8363 p = JUMP_LABEL (p);
8364
8365 /* Point to next entry in path, if any. */
8366 path_entry++;
8367 }
8368
8369 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8370 was specified, we haven't reached our maximum path length, there are
8371 insns following the target of the jump, this is the only use of the
8372 jump label, and the target label is preceded by a BARRIER.
8373
8374 Alternatively, we can follow the jump if it branches around a
8375 block of code and there are no other branches into the block.
8376 In this case invalidate_skipped_block will be called to invalidate any
8377 registers set in the block when following the jump. */
8378
8379 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8380 && GET_CODE (p) == JUMP_INSN
8381 && GET_CODE (PATTERN (p)) == SET
8382 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8383 && JUMP_LABEL (p) != 0
8384 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8385 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8386 {
8387 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8388 if ((GET_CODE (q) != NOTE
8389 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8390 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8391 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8392 break;
8393
8394 /* If we ran into a BARRIER, this code is an extension of the
8395 basic block when the branch is taken. */
8396 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8397 {
8398 /* Don't allow ourself to keep walking around an
8399 always-executed loop. */
8400 if (next_real_insn (q) == next)
8401 {
8402 p = NEXT_INSN (p);
8403 continue;
8404 }
8405
8406 /* Similarly, don't put a branch in our path more than once. */
8407 for (i = 0; i < path_entry; i++)
8408 if (data->path[i].branch == p)
8409 break;
8410
8411 if (i != path_entry)
8412 break;
8413
8414 data->path[path_entry].branch = p;
8415 data->path[path_entry++].status = TAKEN;
8416
8417 /* This branch now ends our path. It was possible that we
8418 didn't see this branch the last time around (when the
8419 insn in front of the target was a JUMP_INSN that was
8420 turned into a no-op). */
8421 path_size = path_entry;
8422
8423 p = JUMP_LABEL (p);
8424 /* Mark block so we won't scan it again later. */
8425 PUT_MODE (NEXT_INSN (p), QImode);
8426 }
8427 /* Detect a branch around a block of code. */
8428 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8429 {
8430 register rtx tmp;
8431
8432 if (next_real_insn (q) == next)
8433 {
8434 p = NEXT_INSN (p);
8435 continue;
8436 }
8437
8438 for (i = 0; i < path_entry; i++)
8439 if (data->path[i].branch == p)
8440 break;
8441
8442 if (i != path_entry)
8443 break;
8444
8445 /* This is no_labels_between_p (p, q) with an added check for
8446 reaching the end of a function (in case Q precedes P). */
8447 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8448 if (GET_CODE (tmp) == CODE_LABEL)
8449 break;
8450
8451 if (tmp == q)
8452 {
8453 data->path[path_entry].branch = p;
8454 data->path[path_entry++].status = AROUND;
8455
8456 path_size = path_entry;
8457
8458 p = JUMP_LABEL (p);
8459 /* Mark block so we won't scan it again later. */
8460 PUT_MODE (NEXT_INSN (p), QImode);
8461 }
8462 }
8463 }
8464 p = NEXT_INSN (p);
8465 }
8466
8467 data->low_cuid = low_cuid;
8468 data->high_cuid = high_cuid;
8469 data->nsets = nsets;
8470 data->last = p;
8471
8472 /* If all jumps in the path are not taken, set our path length to zero
8473 so a rescan won't be done. */
8474 for (i = path_size - 1; i >= 0; i--)
8475 if (data->path[i].status != NOT_TAKEN)
8476 break;
8477
8478 if (i == -1)
8479 data->path_size = 0;
8480 else
8481 data->path_size = path_size;
8482
8483 /* End the current branch path. */
8484 data->path[path_size].branch = 0;
8485 }
8486 \f
8487 /* Perform cse on the instructions of a function.
8488 F is the first instruction.
8489 NREGS is one plus the highest pseudo-reg number used in the instruction.
8490
8491 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8492 (only if -frerun-cse-after-loop).
8493
8494 Returns 1 if jump_optimize should be redone due to simplifications
8495 in conditional jump instructions. */
8496
8497 int
8498 cse_main (f, nregs, after_loop, file)
8499 rtx f;
8500 int nregs;
8501 int after_loop;
8502 FILE *file;
8503 {
8504 struct cse_basic_block_data val;
8505 register rtx insn = f;
8506 register int i;
8507
8508 cse_jumps_altered = 0;
8509 recorded_label_ref = 0;
8510 constant_pool_entries_cost = 0;
8511 val.path_size = 0;
8512
8513 init_recog ();
8514 init_alias_analysis ();
8515
8516 max_reg = nregs;
8517
8518 max_insn_uid = get_max_uid ();
8519
8520 all_minus_one = (int *) alloca (nregs * sizeof (int));
8521 consec_ints = (int *) alloca (nregs * sizeof (int));
8522
8523 for (i = 0; i < nregs; i++)
8524 {
8525 all_minus_one[i] = -1;
8526 consec_ints[i] = i;
8527 }
8528
8529 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8530 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8531 reg_qty = (int *) alloca (nregs * sizeof (int));
8532 reg_in_table = (int *) alloca (nregs * sizeof (int));
8533 reg_tick = (int *) alloca (nregs * sizeof (int));
8534
8535 #ifdef LOAD_EXTEND_OP
8536
8537 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8538 and change the code and mode as appropriate. */
8539 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8540 #endif
8541
8542 /* Discard all the free elements of the previous function
8543 since they are allocated in the temporarily obstack. */
8544 bzero ((char *) table, sizeof table);
8545 free_element_chain = 0;
8546 n_elements_made = 0;
8547
8548 /* Find the largest uid. */
8549
8550 max_uid = get_max_uid ();
8551 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8552 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8553
8554 /* Compute the mapping from uids to cuids.
8555 CUIDs are numbers assigned to insns, like uids,
8556 except that cuids increase monotonically through the code.
8557 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8558 between two insns is not affected by -g. */
8559
8560 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8561 {
8562 if (GET_CODE (insn) != NOTE
8563 || NOTE_LINE_NUMBER (insn) < 0)
8564 INSN_CUID (insn) = ++i;
8565 else
8566 /* Give a line number note the same cuid as preceding insn. */
8567 INSN_CUID (insn) = i;
8568 }
8569
8570 /* Initialize which registers are clobbered by calls. */
8571
8572 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8573
8574 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8575 if ((call_used_regs[i]
8576 /* Used to check !fixed_regs[i] here, but that isn't safe;
8577 fixed regs are still call-clobbered, and sched can get
8578 confused if they can "live across calls".
8579
8580 The frame pointer is always preserved across calls. The arg
8581 pointer is if it is fixed. The stack pointer usually is, unless
8582 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8583 will be present. If we are generating PIC code, the PIC offset
8584 table register is preserved across calls. */
8585
8586 && i != STACK_POINTER_REGNUM
8587 && i != FRAME_POINTER_REGNUM
8588 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8589 && i != HARD_FRAME_POINTER_REGNUM
8590 #endif
8591 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8592 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8593 #endif
8594 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8595 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8596 #endif
8597 )
8598 || global_regs[i])
8599 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8600
8601 /* Loop over basic blocks.
8602 Compute the maximum number of qty's needed for each basic block
8603 (which is 2 for each SET). */
8604 insn = f;
8605 while (insn)
8606 {
8607 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8608 flag_cse_skip_blocks);
8609
8610 /* If this basic block was already processed or has no sets, skip it. */
8611 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8612 {
8613 PUT_MODE (insn, VOIDmode);
8614 insn = (val.last ? NEXT_INSN (val.last) : 0);
8615 val.path_size = 0;
8616 continue;
8617 }
8618
8619 cse_basic_block_start = val.low_cuid;
8620 cse_basic_block_end = val.high_cuid;
8621 max_qty = val.nsets * 2;
8622
8623 if (file)
8624 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
8625 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8626 val.nsets);
8627
8628 /* Make MAX_QTY bigger to give us room to optimize
8629 past the end of this basic block, if that should prove useful. */
8630 if (max_qty < 500)
8631 max_qty = 500;
8632
8633 max_qty += max_reg;
8634
8635 /* If this basic block is being extended by following certain jumps,
8636 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8637 Otherwise, we start after this basic block. */
8638 if (val.path_size > 0)
8639 cse_basic_block (insn, val.last, val.path, 0);
8640 else
8641 {
8642 int old_cse_jumps_altered = cse_jumps_altered;
8643 rtx temp;
8644
8645 /* When cse changes a conditional jump to an unconditional
8646 jump, we want to reprocess the block, since it will give
8647 us a new branch path to investigate. */
8648 cse_jumps_altered = 0;
8649 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8650 if (cse_jumps_altered == 0
8651 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8652 insn = temp;
8653
8654 cse_jumps_altered |= old_cse_jumps_altered;
8655 }
8656
8657 #ifdef USE_C_ALLOCA
8658 alloca (0);
8659 #endif
8660 }
8661
8662 /* Tell refers_to_mem_p that qty_const info is not available. */
8663 qty_const = 0;
8664
8665 if (max_elements_made < n_elements_made)
8666 max_elements_made = n_elements_made;
8667
8668 return cse_jumps_altered || recorded_label_ref;
8669 }
8670
8671 /* Process a single basic block. FROM and TO and the limits of the basic
8672 block. NEXT_BRANCH points to the branch path when following jumps or
8673 a null path when not following jumps.
8674
8675 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8676 loop. This is true when we are being called for the last time on a
8677 block and this CSE pass is before loop.c. */
8678
8679 static rtx
8680 cse_basic_block (from, to, next_branch, around_loop)
8681 register rtx from, to;
8682 struct branch_path *next_branch;
8683 int around_loop;
8684 {
8685 register rtx insn;
8686 int to_usage = 0;
8687 rtx libcall_insn = NULL_RTX;
8688 int num_insns = 0;
8689
8690 /* Each of these arrays is undefined before max_reg, so only allocate
8691 the space actually needed and adjust the start below. */
8692
8693 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8694 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8695 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8696 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8697 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8698 qty_comparison_code
8699 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8700 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8701 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8702
8703 qty_first_reg -= max_reg;
8704 qty_last_reg -= max_reg;
8705 qty_mode -= max_reg;
8706 qty_const -= max_reg;
8707 qty_const_insn -= max_reg;
8708 qty_comparison_code -= max_reg;
8709 qty_comparison_qty -= max_reg;
8710 qty_comparison_const -= max_reg;
8711
8712 new_basic_block ();
8713
8714 /* TO might be a label. If so, protect it from being deleted. */
8715 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8716 ++LABEL_NUSES (to);
8717
8718 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8719 {
8720 register enum rtx_code code = GET_CODE (insn);
8721 int i;
8722 struct table_elt *p;
8723
8724 /* If we have processed 1,000 insns, flush the hash table to
8725 avoid extreme quadratic behavior. We must not include NOTEs
8726 in the count since there may be more or them when generating
8727 debugging information. If we clear the table at different
8728 times, code generated with -g -O might be different than code
8729 generated with -O but not -g.
8730
8731 ??? This is a real kludge and needs to be done some other way.
8732 Perhaps for 2.9. */
8733 if (code != NOTE && num_insns++ > 1000)
8734 {
8735 for (i = 0; i < NBUCKETS; i++)
8736 for (p = table[i]; p; p = table[i])
8737 {
8738 /* Note that invalidate can remove elements
8739 after P in the current hash chain. */
8740 if (GET_CODE (p->exp) == REG)
8741 invalidate (p->exp, p->mode);
8742 else
8743 remove_from_table (p, i);
8744 }
8745
8746 num_insns = 0;
8747 }
8748
8749 /* See if this is a branch that is part of the path. If so, and it is
8750 to be taken, do so. */
8751 if (next_branch->branch == insn)
8752 {
8753 enum taken status = next_branch++->status;
8754 if (status != NOT_TAKEN)
8755 {
8756 if (status == TAKEN)
8757 record_jump_equiv (insn, 1);
8758 else
8759 invalidate_skipped_block (NEXT_INSN (insn));
8760
8761 /* Set the last insn as the jump insn; it doesn't affect cc0.
8762 Then follow this branch. */
8763 #ifdef HAVE_cc0
8764 prev_insn_cc0 = 0;
8765 #endif
8766 prev_insn = insn;
8767 insn = JUMP_LABEL (insn);
8768 continue;
8769 }
8770 }
8771
8772 if (GET_MODE (insn) == QImode)
8773 PUT_MODE (insn, VOIDmode);
8774
8775 if (GET_RTX_CLASS (code) == 'i')
8776 {
8777 rtx p;
8778
8779 /* Process notes first so we have all notes in canonical forms when
8780 looking for duplicate operations. */
8781
8782 if (REG_NOTES (insn))
8783 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8784
8785 /* Track when we are inside in LIBCALL block. Inside such a block,
8786 we do not want to record destinations. The last insn of a
8787 LIBCALL block is not considered to be part of the block, since
8788 its destination is the result of the block and hence should be
8789 recorded. */
8790
8791 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8792 libcall_insn = XEXP (p, 0);
8793 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8794 libcall_insn = NULL_RTX;
8795
8796 cse_insn (insn, libcall_insn);
8797 }
8798
8799 /* If INSN is now an unconditional jump, skip to the end of our
8800 basic block by pretending that we just did the last insn in the
8801 basic block. If we are jumping to the end of our block, show
8802 that we can have one usage of TO. */
8803
8804 if (simplejump_p (insn))
8805 {
8806 if (to == 0)
8807 return 0;
8808
8809 if (JUMP_LABEL (insn) == to)
8810 to_usage = 1;
8811
8812 /* Maybe TO was deleted because the jump is unconditional.
8813 If so, there is nothing left in this basic block. */
8814 /* ??? Perhaps it would be smarter to set TO
8815 to whatever follows this insn,
8816 and pretend the basic block had always ended here. */
8817 if (INSN_DELETED_P (to))
8818 break;
8819
8820 insn = PREV_INSN (to);
8821 }
8822
8823 /* See if it is ok to keep on going past the label
8824 which used to end our basic block. Remember that we incremented
8825 the count of that label, so we decrement it here. If we made
8826 a jump unconditional, TO_USAGE will be one; in that case, we don't
8827 want to count the use in that jump. */
8828
8829 if (to != 0 && NEXT_INSN (insn) == to
8830 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8831 {
8832 struct cse_basic_block_data val;
8833 rtx prev;
8834
8835 insn = NEXT_INSN (to);
8836
8837 if (LABEL_NUSES (to) == 0)
8838 insn = delete_insn (to);
8839
8840 /* If TO was the last insn in the function, we are done. */
8841 if (insn == 0)
8842 return 0;
8843
8844 /* If TO was preceded by a BARRIER we are done with this block
8845 because it has no continuation. */
8846 prev = prev_nonnote_insn (to);
8847 if (prev && GET_CODE (prev) == BARRIER)
8848 return insn;
8849
8850 /* Find the end of the following block. Note that we won't be
8851 following branches in this case. */
8852 to_usage = 0;
8853 val.path_size = 0;
8854 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8855
8856 /* If the tables we allocated have enough space left
8857 to handle all the SETs in the next basic block,
8858 continue through it. Otherwise, return,
8859 and that block will be scanned individually. */
8860 if (val.nsets * 2 + next_qty > max_qty)
8861 break;
8862
8863 cse_basic_block_start = val.low_cuid;
8864 cse_basic_block_end = val.high_cuid;
8865 to = val.last;
8866
8867 /* Prevent TO from being deleted if it is a label. */
8868 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8869 ++LABEL_NUSES (to);
8870
8871 /* Back up so we process the first insn in the extension. */
8872 insn = PREV_INSN (insn);
8873 }
8874 }
8875
8876 if (next_qty > max_qty)
8877 abort ();
8878
8879 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8880 the previous insn is the only insn that branches to the head of a loop,
8881 we can cse into the loop. Don't do this if we changed the jump
8882 structure of a loop unless we aren't going to be following jumps. */
8883
8884 if ((cse_jumps_altered == 0
8885 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8886 && around_loop && to != 0
8887 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8888 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8889 && JUMP_LABEL (PREV_INSN (to)) != 0
8890 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8891 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8892
8893 return to ? NEXT_INSN (to) : 0;
8894 }
8895 \f
8896 /* Count the number of times registers are used (not set) in X.
8897 COUNTS is an array in which we accumulate the count, INCR is how much
8898 we count each register usage.
8899
8900 Don't count a usage of DEST, which is the SET_DEST of a SET which
8901 contains X in its SET_SRC. This is because such a SET does not
8902 modify the liveness of DEST. */
8903
8904 static void
8905 count_reg_usage (x, counts, dest, incr)
8906 rtx x;
8907 int *counts;
8908 rtx dest;
8909 int incr;
8910 {
8911 enum rtx_code code;
8912 char *fmt;
8913 int i, j;
8914
8915 if (x == 0)
8916 return;
8917
8918 switch (code = GET_CODE (x))
8919 {
8920 case REG:
8921 if (x != dest)
8922 counts[REGNO (x)] += incr;
8923 return;
8924
8925 case PC:
8926 case CC0:
8927 case CONST:
8928 case CONST_INT:
8929 case CONST_DOUBLE:
8930 case SYMBOL_REF:
8931 case LABEL_REF:
8932 return;
8933
8934 case CLOBBER:
8935 /* If we are clobbering a MEM, mark any registers inside the address
8936 as being used. */
8937 if (GET_CODE (XEXP (x, 0)) == MEM)
8938 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
8939 return;
8940
8941 case SET:
8942 /* Unless we are setting a REG, count everything in SET_DEST. */
8943 if (GET_CODE (SET_DEST (x)) != REG)
8944 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8945
8946 /* If SRC has side-effects, then we can't delete this insn, so the
8947 usage of SET_DEST inside SRC counts.
8948
8949 ??? Strictly-speaking, we might be preserving this insn
8950 because some other SET has side-effects, but that's hard
8951 to do and can't happen now. */
8952 count_reg_usage (SET_SRC (x), counts,
8953 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8954 incr);
8955 return;
8956
8957 case CALL_INSN:
8958 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8959
8960 /* ... falls through ... */
8961 case INSN:
8962 case JUMP_INSN:
8963 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8964
8965 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8966 use them. */
8967
8968 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8969 return;
8970
8971 case EXPR_LIST:
8972 case INSN_LIST:
8973 if (REG_NOTE_KIND (x) == REG_EQUAL
8974 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
8975 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8976 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8977 return;
8978
8979 default:
8980 break;
8981 }
8982
8983 fmt = GET_RTX_FORMAT (code);
8984 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8985 {
8986 if (fmt[i] == 'e')
8987 count_reg_usage (XEXP (x, i), counts, dest, incr);
8988 else if (fmt[i] == 'E')
8989 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8990 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8991 }
8992 }
8993 \f
8994 /* Scan all the insns and delete any that are dead; i.e., they store a register
8995 that is never used or they copy a register to itself.
8996
8997 This is used to remove insns made obviously dead by cse, loop or other
8998 optimizations. It improves the heuristics in loop since it won't try to
8999 move dead invariants out of loops or make givs for dead quantities. The
9000 remaining passes of the compilation are also sped up. */
9001
9002 void
9003 delete_trivially_dead_insns (insns, nreg)
9004 rtx insns;
9005 int nreg;
9006 {
9007 int *counts = (int *) alloca (nreg * sizeof (int));
9008 rtx insn, prev;
9009 #ifdef HAVE_cc0
9010 rtx tem;
9011 #endif
9012 int i;
9013 int in_libcall = 0, dead_libcall = 0;
9014
9015 /* First count the number of times each register is used. */
9016 bzero ((char *) counts, sizeof (int) * nreg);
9017 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9018 count_reg_usage (insn, counts, NULL_RTX, 1);
9019
9020 /* Go from the last insn to the first and delete insns that only set unused
9021 registers or copy a register to itself. As we delete an insn, remove
9022 usage counts for registers it uses. */
9023 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
9024 {
9025 int live_insn = 0;
9026 rtx note;
9027
9028 prev = prev_real_insn (insn);
9029
9030 /* Don't delete any insns that are part of a libcall block unless
9031 we can delete the whole libcall block.
9032
9033 Flow or loop might get confused if we did that. Remember
9034 that we are scanning backwards. */
9035 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9036 {
9037 in_libcall = 1;
9038 live_insn = 1;
9039 dead_libcall = 0;
9040
9041 /* See if there's a REG_EQUAL note on this insn and try to
9042 replace the source with the REG_EQUAL expression.
9043
9044 We assume that insns with REG_RETVALs can only be reg->reg
9045 copies at this point. */
9046 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9047 if (note)
9048 {
9049 rtx set = single_set (insn);
9050 if (set
9051 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9052 {
9053 remove_note (insn,
9054 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9055 dead_libcall = 1;
9056 }
9057 }
9058 }
9059 else if (in_libcall)
9060 live_insn = ! dead_libcall;
9061 else if (GET_CODE (PATTERN (insn)) == SET)
9062 {
9063 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9064 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9065 ;
9066
9067 #ifdef HAVE_cc0
9068 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9069 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9070 && ((tem = next_nonnote_insn (insn)) == 0
9071 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9072 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9073 ;
9074 #endif
9075 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9076 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9077 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9078 || side_effects_p (SET_SRC (PATTERN (insn))))
9079 live_insn = 1;
9080 }
9081 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9082 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9083 {
9084 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9085
9086 if (GET_CODE (elt) == SET)
9087 {
9088 if (GET_CODE (SET_DEST (elt)) == REG
9089 && SET_DEST (elt) == SET_SRC (elt))
9090 ;
9091
9092 #ifdef HAVE_cc0
9093 else if (GET_CODE (SET_DEST (elt)) == CC0
9094 && ! side_effects_p (SET_SRC (elt))
9095 && ((tem = next_nonnote_insn (insn)) == 0
9096 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9097 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9098 ;
9099 #endif
9100 else if (GET_CODE (SET_DEST (elt)) != REG
9101 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9102 || counts[REGNO (SET_DEST (elt))] != 0
9103 || side_effects_p (SET_SRC (elt)))
9104 live_insn = 1;
9105 }
9106 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9107 live_insn = 1;
9108 }
9109 else
9110 live_insn = 1;
9111
9112 /* If this is a dead insn, delete it and show registers in it aren't
9113 being used. */
9114
9115 if (! live_insn)
9116 {
9117 count_reg_usage (insn, counts, NULL_RTX, -1);
9118 delete_insn (insn);
9119 }
9120
9121 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9122 {
9123 in_libcall = 0;
9124 dead_libcall = 0;
9125 }
9126 }
9127 }