]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cse.c
Include function.h in most files.
[thirdparty/gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
26
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "flags.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "toplev.h"
37 #include "output.h"
38 #include "splay-tree.h"
39
40 /* The basic idea of common subexpression elimination is to go
41 through the code, keeping a record of expressions that would
42 have the same value at the current scan point, and replacing
43 expressions encountered with the cheapest equivalent expression.
44
45 It is too complicated to keep track of the different possibilities
46 when control paths merge; so, at each label, we forget all that is
47 known and start fresh. This can be described as processing each
48 basic block separately. Note, however, that these are not quite
49 the same as the basic blocks found by a later pass and used for
50 data flow analysis and register packing. We do not need to start fresh
51 after a conditional jump instruction if there is no label there.
52
53 We use two data structures to record the equivalent expressions:
54 a hash table for most expressions, and several vectors together
55 with "quantity numbers" to record equivalent (pseudo) registers.
56
57 The use of the special data structure for registers is desirable
58 because it is faster. It is possible because registers references
59 contain a fairly small number, the register number, taken from
60 a contiguously allocated series, and two register references are
61 identical if they have the same number. General expressions
62 do not have any such thing, so the only way to retrieve the
63 information recorded on an expression other than a register
64 is to keep it in a hash table.
65
66 Registers and "quantity numbers":
67
68 At the start of each basic block, all of the (hardware and pseudo)
69 registers used in the function are given distinct quantity
70 numbers to indicate their contents. During scan, when the code
71 copies one register into another, we copy the quantity number.
72 When a register is loaded in any other way, we allocate a new
73 quantity number to describe the value generated by this operation.
74 `reg_qty' records what quantity a register is currently thought
75 of as containing.
76
77 All real quantity numbers are greater than or equal to `max_reg'.
78 If register N has not been assigned a quantity, reg_qty[N] will equal N.
79
80 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
81 variables should be referenced with an index below `max_reg'.
82
83 We also maintain a bidirectional chain of registers for each
84 quantity number. `qty_first_reg', `qty_last_reg',
85 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
86
87 The first register in a chain is the one whose lifespan is least local.
88 Among equals, it is the one that was seen first.
89 We replace any equivalent register with that one.
90
91 If two registers have the same quantity number, it must be true that
92 REG expressions with `qty_mode' must be in the hash table for both
93 registers and must be in the same class.
94
95 The converse is not true. Since hard registers may be referenced in
96 any mode, two REG expressions might be equivalent in the hash table
97 but not have the same quantity number if the quantity number of one
98 of the registers is not the same mode as those expressions.
99
100 Constants and quantity numbers
101
102 When a quantity has a known constant value, that value is stored
103 in the appropriate element of qty_const. This is in addition to
104 putting the constant in the hash table as is usual for non-regs.
105
106 Whether a reg or a constant is preferred is determined by the configuration
107 macro CONST_COSTS and will often depend on the constant value. In any
108 event, expressions containing constants can be simplified, by fold_rtx.
109
110 When a quantity has a known nearly constant value (such as an address
111 of a stack slot), that value is stored in the appropriate element
112 of qty_const.
113
114 Integer constants don't have a machine mode. However, cse
115 determines the intended machine mode from the destination
116 of the instruction that moves the constant. The machine mode
117 is recorded in the hash table along with the actual RTL
118 constant expression so that different modes are kept separate.
119
120 Other expressions:
121
122 To record known equivalences among expressions in general
123 we use a hash table called `table'. It has a fixed number of buckets
124 that contain chains of `struct table_elt' elements for expressions.
125 These chains connect the elements whose expressions have the same
126 hash codes.
127
128 Other chains through the same elements connect the elements which
129 currently have equivalent values.
130
131 Register references in an expression are canonicalized before hashing
132 the expression. This is done using `reg_qty' and `qty_first_reg'.
133 The hash code of a register reference is computed using the quantity
134 number, not the register number.
135
136 When the value of an expression changes, it is necessary to remove from the
137 hash table not just that expression but all expressions whose values
138 could be different as a result.
139
140 1. If the value changing is in memory, except in special cases
141 ANYTHING referring to memory could be changed. That is because
142 nobody knows where a pointer does not point.
143 The function `invalidate_memory' removes what is necessary.
144
145 The special cases are when the address is constant or is
146 a constant plus a fixed register such as the frame pointer
147 or a static chain pointer. When such addresses are stored in,
148 we can tell exactly which other such addresses must be invalidated
149 due to overlap. `invalidate' does this.
150 All expressions that refer to non-constant
151 memory addresses are also invalidated. `invalidate_memory' does this.
152
153 2. If the value changing is a register, all expressions
154 containing references to that register, and only those,
155 must be removed.
156
157 Because searching the entire hash table for expressions that contain
158 a register is very slow, we try to figure out when it isn't necessary.
159 Precisely, this is necessary only when expressions have been
160 entered in the hash table using this register, and then the value has
161 changed, and then another expression wants to be added to refer to
162 the register's new value. This sequence of circumstances is rare
163 within any one basic block.
164
165 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
166 reg_tick[i] is incremented whenever a value is stored in register i.
167 reg_in_table[i] holds -1 if no references to register i have been
168 entered in the table; otherwise, it contains the value reg_tick[i] had
169 when the references were entered. If we want to enter a reference
170 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
171 Until we want to enter a new entry, the mere fact that the two vectors
172 don't match makes the entries be ignored if anyone tries to match them.
173
174 Registers themselves are entered in the hash table as well as in
175 the equivalent-register chains. However, the vectors `reg_tick'
176 and `reg_in_table' do not apply to expressions which are simple
177 register references. These expressions are removed from the table
178 immediately when they become invalid, and this can be done even if
179 we do not immediately search for all the expressions that refer to
180 the register.
181
182 A CLOBBER rtx in an instruction invalidates its operand for further
183 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
184 invalidates everything that resides in memory.
185
186 Related expressions:
187
188 Constant expressions that differ only by an additive integer
189 are called related. When a constant expression is put in
190 the table, the related expression with no constant term
191 is also entered. These are made to point at each other
192 so that it is possible to find out if there exists any
193 register equivalent to an expression related to a given expression. */
194
195 /* One plus largest register number used in this function. */
196
197 static int max_reg;
198
199 /* One plus largest instruction UID used in this function at time of
200 cse_main call. */
201
202 static int max_insn_uid;
203
204 /* Length of vectors indexed by quantity number.
205 We know in advance we will not need a quantity number this big. */
206
207 static int max_qty;
208
209 /* Next quantity number to be allocated.
210 This is 1 + the largest number needed so far. */
211
212 static int next_qty;
213
214 /* Indexed by quantity number, gives the first (or last) register
215 in the chain of registers that currently contain this quantity. */
216
217 static int *qty_first_reg;
218 static int *qty_last_reg;
219
220 /* Index by quantity number, gives the mode of the quantity. */
221
222 static enum machine_mode *qty_mode;
223
224 /* Indexed by quantity number, gives the rtx of the constant value of the
225 quantity, or zero if it does not have a known value.
226 A sum of the frame pointer (or arg pointer) plus a constant
227 can also be entered here. */
228
229 static rtx *qty_const;
230
231 /* Indexed by qty number, gives the insn that stored the constant value
232 recorded in `qty_const'. */
233
234 static rtx *qty_const_insn;
235
236 /* The next three variables are used to track when a comparison between a
237 quantity and some constant or register has been passed. In that case, we
238 know the results of the comparison in case we see it again. These variables
239 record a comparison that is known to be true. */
240
241 /* Indexed by qty number, gives the rtx code of a comparison with a known
242 result involving this quantity. If none, it is UNKNOWN. */
243 static enum rtx_code *qty_comparison_code;
244
245 /* Indexed by qty number, gives the constant being compared against in a
246 comparison of known result. If no such comparison, it is undefined.
247 If the comparison is not with a constant, it is zero. */
248
249 static rtx *qty_comparison_const;
250
251 /* Indexed by qty number, gives the quantity being compared against in a
252 comparison of known result. If no such comparison, if it undefined.
253 If the comparison is not with a register, it is -1. */
254
255 static int *qty_comparison_qty;
256
257 #ifdef HAVE_cc0
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
261
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
266
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
269 #endif
270
271 /* Previous actual insn. 0 if at first insn of basic block. */
272
273 static rtx prev_insn;
274
275 /* Insn being scanned. */
276
277 static rtx this_insn;
278
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
281 value.
282
283 Or -1 if this register is at the end of the chain.
284
285 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
286
287 static int *reg_next_eqv;
288 static int *reg_prev_eqv;
289
290 struct cse_reg_info {
291 union {
292 /* The number of times the register has been altered in the current
293 basic block. */
294 int reg_tick;
295
296 /* The next cse_reg_info structure in the free list. */
297 struct cse_reg_info* next;
298 } variant;
299
300 /* The REG_TICK value at which rtx's containing this register are
301 valid in the hash table. If this does not equal the current
302 reg_tick value, such expressions existing in the hash table are
303 invalid. */
304 int reg_in_table;
305
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
308 };
309
310 /* A free list of cse_reg_info entries. */
311 static struct cse_reg_info *cse_reg_info_free_list;
312
313 /* A mapping from registers to cse_reg_info data structures. */
314 static splay_tree cse_reg_info_tree;
315
316 /* The last lookup we did into the cse_reg_info_tree. This allows us
317 to cache repeated lookups. */
318 static int cached_regno;
319 static struct cse_reg_info *cached_cse_reg_info;
320
321 /* A HARD_REG_SET containing all the hard registers for which there is
322 currently a REG expression in the hash table. Note the difference
323 from the above variables, which indicate if the REG is mentioned in some
324 expression in the table. */
325
326 static HARD_REG_SET hard_regs_in_table;
327
328 /* A HARD_REG_SET containing all the hard registers that are invalidated
329 by a CALL_INSN. */
330
331 static HARD_REG_SET regs_invalidated_by_call;
332
333 /* CUID of insn that starts the basic block currently being cse-processed. */
334
335 static int cse_basic_block_start;
336
337 /* CUID of insn that ends the basic block currently being cse-processed. */
338
339 static int cse_basic_block_end;
340
341 /* Vector mapping INSN_UIDs to cuids.
342 The cuids are like uids but increase monotonically always.
343 We use them to see whether a reg is used outside a given basic block. */
344
345 static int *uid_cuid;
346
347 /* Highest UID in UID_CUID. */
348 static int max_uid;
349
350 /* Get the cuid of an insn. */
351
352 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
353
354 /* Nonzero if cse has altered conditional jump insns
355 in such a way that jump optimization should be redone. */
356
357 static int cse_jumps_altered;
358
359 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
360 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
361 to put in the note. */
362 static int recorded_label_ref;
363
364 /* canon_hash stores 1 in do_not_record
365 if it notices a reference to CC0, PC, or some other volatile
366 subexpression. */
367
368 static int do_not_record;
369
370 #ifdef LOAD_EXTEND_OP
371
372 /* Scratch rtl used when looking for load-extended copy of a MEM. */
373 static rtx memory_extend_rtx;
374 #endif
375
376 /* canon_hash stores 1 in hash_arg_in_memory
377 if it notices a reference to memory within the expression being hashed. */
378
379 static int hash_arg_in_memory;
380
381 /* canon_hash stores 1 in hash_arg_in_struct
382 if it notices a reference to memory that's part of a structure. */
383
384 static int hash_arg_in_struct;
385
386 /* The hash table contains buckets which are chains of `struct table_elt's,
387 each recording one expression's information.
388 That expression is in the `exp' field.
389
390 Those elements with the same hash code are chained in both directions
391 through the `next_same_hash' and `prev_same_hash' fields.
392
393 Each set of expressions with equivalent values
394 are on a two-way chain through the `next_same_value'
395 and `prev_same_value' fields, and all point with
396 the `first_same_value' field at the first element in
397 that chain. The chain is in order of increasing cost.
398 Each element's cost value is in its `cost' field.
399
400 The `in_memory' field is nonzero for elements that
401 involve any reference to memory. These elements are removed
402 whenever a write is done to an unidentified location in memory.
403 To be safe, we assume that a memory address is unidentified unless
404 the address is either a symbol constant or a constant plus
405 the frame pointer or argument pointer.
406
407 The `in_struct' field is nonzero for elements that
408 involve any reference to memory inside a structure or array.
409
410 The `related_value' field is used to connect related expressions
411 (that differ by adding an integer).
412 The related expressions are chained in a circular fashion.
413 `related_value' is zero for expressions for which this
414 chain is not useful.
415
416 The `cost' field stores the cost of this element's expression.
417
418 The `is_const' flag is set if the element is a constant (including
419 a fixed address).
420
421 The `flag' field is used as a temporary during some search routines.
422
423 The `mode' field is usually the same as GET_MODE (`exp'), but
424 if `exp' is a CONST_INT and has no machine mode then the `mode'
425 field is the mode it was being used as. Each constant is
426 recorded separately for each mode it is used with. */
427
428
429 struct table_elt
430 {
431 rtx exp;
432 struct table_elt *next_same_hash;
433 struct table_elt *prev_same_hash;
434 struct table_elt *next_same_value;
435 struct table_elt *prev_same_value;
436 struct table_elt *first_same_value;
437 struct table_elt *related_value;
438 int cost;
439 enum machine_mode mode;
440 char in_memory;
441 char in_struct;
442 char is_const;
443 char flag;
444 };
445
446 /* We don't want a lot of buckets, because we rarely have very many
447 things stored in the hash table, and a lot of buckets slows
448 down a lot of loops that happen frequently. */
449 #define NBUCKETS 31
450
451 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
452 register (hard registers may require `do_not_record' to be set). */
453
454 #define HASH(X, M) \
455 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS \
457 : canon_hash (X, M) % NBUCKETS)
458
459 /* Determine whether register number N is considered a fixed register for CSE.
460 It is desirable to replace other regs with fixed regs, to reduce need for
461 non-fixed hard regs.
462 A reg wins if it is either the frame pointer or designated as fixed,
463 but not if it is an overlapping register. */
464 #ifdef OVERLAPPING_REGNO_P
465 #define FIXED_REGNO_P(N) \
466 (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
467 || fixed_regs[N] || global_regs[N]) \
468 && ! OVERLAPPING_REGNO_P ((N)))
469 #else
470 #define FIXED_REGNO_P(N) \
471 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
472 || fixed_regs[N] || global_regs[N])
473 #endif
474
475 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
476 hard registers and pointers into the frame are the cheapest with a cost
477 of 0. Next come pseudos with a cost of one and other hard registers with
478 a cost of 2. Aside from these special cases, call `rtx_cost'. */
479
480 #define CHEAP_REGNO(N) \
481 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
482 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
483 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
484 || ((N) < FIRST_PSEUDO_REGISTER \
485 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
486
487 /* A register is cheap if it is a user variable assigned to the register
488 or if its register number always corresponds to a cheap register. */
489
490 #define CHEAP_REG(N) \
491 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
492 || CHEAP_REGNO (REGNO (N)))
493
494 #define COST(X) \
495 (GET_CODE (X) == REG \
496 ? (CHEAP_REG (X) ? 0 \
497 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
498 : 2) \
499 : notreg_cost(X))
500
501 /* Get the info associated with register N. */
502
503 #define GET_CSE_REG_INFO(N) \
504 (((N) == cached_regno && cached_cse_reg_info) \
505 ? cached_cse_reg_info : get_cse_reg_info ((N)))
506
507 /* Get the number of times this register has been updated in this
508 basic block. */
509
510 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
511
512 /* Get the point at which REG was recorded in the table. */
513
514 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
515
516 /* Get the quantity number for REG. */
517
518 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
519
520 /* Determine if the quantity number for register X represents a valid index
521 into the `qty_...' variables. */
522
523 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
524
525 #ifdef ADDRESS_COST
526 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
527 during CSE, such nodes are present. Using an ADDRESSOF node which
528 refers to the address of a REG is a good thing because we can then
529 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
530 #define CSE_ADDRESS_COST(RTX) \
531 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
532 ? -1 : ADDRESS_COST(RTX))
533 #endif
534
535 static struct table_elt *table[NBUCKETS];
536
537 /* Chain of `struct table_elt's made so far for this function
538 but currently removed from the table. */
539
540 static struct table_elt *free_element_chain;
541
542 /* Number of `struct table_elt' structures made so far for this function. */
543
544 static int n_elements_made;
545
546 /* Maximum value `n_elements_made' has had so far in this compilation
547 for functions previously processed. */
548
549 static int max_elements_made;
550
551 /* Surviving equivalence class when two equivalence classes are merged
552 by recording the effects of a jump in the last insn. Zero if the
553 last insn was not a conditional jump. */
554
555 static struct table_elt *last_jump_equiv_class;
556
557 /* Set to the cost of a constant pool reference if one was found for a
558 symbolic constant. If this was found, it means we should try to
559 convert constants into constant pool entries if they don't fit in
560 the insn. */
561
562 static int constant_pool_entries_cost;
563
564 /* Define maximum length of a branch path. */
565
566 #define PATHLENGTH 10
567
568 /* This data describes a block that will be processed by cse_basic_block. */
569
570 struct cse_basic_block_data {
571 /* Lowest CUID value of insns in block. */
572 int low_cuid;
573 /* Highest CUID value of insns in block. */
574 int high_cuid;
575 /* Total number of SETs in block. */
576 int nsets;
577 /* Last insn in the block. */
578 rtx last;
579 /* Size of current branch path, if any. */
580 int path_size;
581 /* Current branch path, indicating which branches will be taken. */
582 struct branch_path {
583 /* The branch insn. */
584 rtx branch;
585 /* Whether it should be taken or not. AROUND is the same as taken
586 except that it is used when the destination label is not preceded
587 by a BARRIER. */
588 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
589 } path[PATHLENGTH];
590 };
591
592 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
593 virtual regs here because the simplify_*_operation routines are called
594 by integrate.c, which is called before virtual register instantiation. */
595
596 #define FIXED_BASE_PLUS_P(X) \
597 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
598 || (X) == arg_pointer_rtx \
599 || (X) == virtual_stack_vars_rtx \
600 || (X) == virtual_incoming_args_rtx \
601 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
602 && (XEXP (X, 0) == frame_pointer_rtx \
603 || XEXP (X, 0) == hard_frame_pointer_rtx \
604 || XEXP (X, 0) == arg_pointer_rtx \
605 || XEXP (X, 0) == virtual_stack_vars_rtx \
606 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
607 || GET_CODE (X) == ADDRESSOF)
608
609 /* Similar, but also allows reference to the stack pointer.
610
611 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
612 arg_pointer_rtx by itself is nonzero, because on at least one machine,
613 the i960, the arg pointer is zero when it is unused. */
614
615 #define NONZERO_BASE_PLUS_P(X) \
616 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
617 || (X) == virtual_stack_vars_rtx \
618 || (X) == virtual_incoming_args_rtx \
619 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
620 && (XEXP (X, 0) == frame_pointer_rtx \
621 || XEXP (X, 0) == hard_frame_pointer_rtx \
622 || XEXP (X, 0) == arg_pointer_rtx \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
625 || (X) == stack_pointer_rtx \
626 || (X) == virtual_stack_dynamic_rtx \
627 || (X) == virtual_outgoing_args_rtx \
628 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629 && (XEXP (X, 0) == stack_pointer_rtx \
630 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
631 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
632 || GET_CODE (X) == ADDRESSOF)
633
634 static int notreg_cost PROTO((rtx));
635 static void new_basic_block PROTO((void));
636 static void make_new_qty PROTO((int));
637 static void make_regs_eqv PROTO((int, int));
638 static void delete_reg_equiv PROTO((int));
639 static int mention_regs PROTO((rtx));
640 static int insert_regs PROTO((rtx, struct table_elt *, int));
641 static void free_element PROTO((struct table_elt *));
642 static void remove_from_table PROTO((struct table_elt *, unsigned));
643 static struct table_elt *get_element PROTO((void));
644 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
645 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
646 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
647 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
648 enum machine_mode));
649 static void merge_equiv_classes PROTO((struct table_elt *,
650 struct table_elt *));
651 static void invalidate PROTO((rtx, enum machine_mode));
652 static int cse_rtx_varies_p PROTO((rtx));
653 static void remove_invalid_refs PROTO((int));
654 static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
655 static void rehash_using_reg PROTO((rtx));
656 static void invalidate_memory PROTO((void));
657 static void invalidate_for_call PROTO((void));
658 static rtx use_related_value PROTO((rtx, struct table_elt *));
659 static unsigned canon_hash PROTO((rtx, enum machine_mode));
660 static unsigned safe_hash PROTO((rtx, enum machine_mode));
661 static int exp_equiv_p PROTO((rtx, rtx, int, int));
662 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
663 HOST_WIDE_INT *,
664 HOST_WIDE_INT *));
665 static int refers_to_p PROTO((rtx, rtx));
666 static rtx canon_reg PROTO((rtx, rtx));
667 static void find_best_addr PROTO((rtx, rtx *));
668 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
669 enum machine_mode *,
670 enum machine_mode *));
671 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
672 rtx, rtx));
673 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
674 rtx, rtx));
675 static rtx fold_rtx PROTO((rtx, rtx));
676 static rtx equiv_constant PROTO((rtx));
677 static void record_jump_equiv PROTO((rtx, int));
678 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
679 rtx, rtx, int));
680 static void cse_insn PROTO((rtx, rtx));
681 static int note_mem_written PROTO((rtx));
682 static void invalidate_from_clobbers PROTO((rtx));
683 static rtx cse_process_notes PROTO((rtx, rtx));
684 static void cse_around_loop PROTO((rtx));
685 static void invalidate_skipped_set PROTO((rtx, rtx));
686 static void invalidate_skipped_block PROTO((rtx));
687 static void cse_check_loop_start PROTO((rtx, rtx));
688 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
689 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
690 static void count_reg_usage PROTO((rtx, int *, rtx, int));
691 extern void dump_class PROTO((struct table_elt*));
692 static void check_fold_consts PROTO((PTR));
693 static struct cse_reg_info* get_cse_reg_info PROTO((int));
694 static void free_cse_reg_info PROTO((splay_tree_value));
695 static void flush_hash_table PROTO((void));
696
697 extern int rtx_equal_function_value_matters;
698 \f
699 /* Dump the expressions in the equivalence class indicated by CLASSP.
700 This function is used only for debugging. */
701 void
702 dump_class (classp)
703 struct table_elt *classp;
704 {
705 struct table_elt *elt;
706
707 fprintf (stderr, "Equivalence chain for ");
708 print_rtl (stderr, classp->exp);
709 fprintf (stderr, ": \n");
710
711 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
712 {
713 print_rtl (stderr, elt->exp);
714 fprintf (stderr, "\n");
715 }
716 }
717
718 /* Return an estimate of the cost of computing rtx X.
719 One use is in cse, to decide which expression to keep in the hash table.
720 Another is in rtl generation, to pick the cheapest way to multiply.
721 Other uses like the latter are expected in the future. */
722
723 /* Internal function, to compute cost when X is not a register; called
724 from COST macro to keep it simple. */
725
726 static int
727 notreg_cost (x)
728 rtx x;
729 {
730 return ((GET_CODE (x) == SUBREG
731 && GET_CODE (SUBREG_REG (x)) == REG
732 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
733 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
734 && (GET_MODE_SIZE (GET_MODE (x))
735 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
736 && subreg_lowpart_p (x)
737 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
738 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
739 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
740 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
741 : 2))
742 : rtx_cost (x, SET) * 2);
743 }
744
745 /* Return the right cost to give to an operation
746 to make the cost of the corresponding register-to-register instruction
747 N times that of a fast register-to-register instruction. */
748
749 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
750
751 int
752 rtx_cost (x, outer_code)
753 rtx x;
754 enum rtx_code outer_code ATTRIBUTE_UNUSED;
755 {
756 register int i, j;
757 register enum rtx_code code;
758 register char *fmt;
759 register int total;
760
761 if (x == 0)
762 return 0;
763
764 /* Compute the default costs of certain things.
765 Note that RTX_COSTS can override the defaults. */
766
767 code = GET_CODE (x);
768 switch (code)
769 {
770 case MULT:
771 /* Count multiplication by 2**n as a shift,
772 because if we are considering it, we would output it as a shift. */
773 if (GET_CODE (XEXP (x, 1)) == CONST_INT
774 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
775 total = 2;
776 else
777 total = COSTS_N_INSNS (5);
778 break;
779 case DIV:
780 case UDIV:
781 case MOD:
782 case UMOD:
783 total = COSTS_N_INSNS (7);
784 break;
785 case USE:
786 /* Used in loop.c and combine.c as a marker. */
787 total = 0;
788 break;
789 case ASM_OPERANDS:
790 /* We don't want these to be used in substitutions because
791 we have no way of validating the resulting insn. So assign
792 anything containing an ASM_OPERANDS a very high cost. */
793 total = 1000;
794 break;
795 default:
796 total = 2;
797 }
798
799 switch (code)
800 {
801 case REG:
802 return ! CHEAP_REG (x);
803
804 case SUBREG:
805 /* If we can't tie these modes, make this expensive. The larger
806 the mode, the more expensive it is. */
807 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
808 return COSTS_N_INSNS (2
809 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
810 return 2;
811 #ifdef RTX_COSTS
812 RTX_COSTS (x, code, outer_code);
813 #endif
814 #ifdef CONST_COSTS
815 CONST_COSTS (x, code, outer_code);
816 #endif
817
818 default:
819 #ifdef DEFAULT_RTX_COSTS
820 DEFAULT_RTX_COSTS(x, code, outer_code);
821 #endif
822 break;
823 }
824
825 /* Sum the costs of the sub-rtx's, plus cost of this operation,
826 which is already in total. */
827
828 fmt = GET_RTX_FORMAT (code);
829 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
830 if (fmt[i] == 'e')
831 total += rtx_cost (XEXP (x, i), code);
832 else if (fmt[i] == 'E')
833 for (j = 0; j < XVECLEN (x, i); j++)
834 total += rtx_cost (XVECEXP (x, i, j), code);
835
836 return total;
837 }
838 \f
839 static struct cse_reg_info *
840 get_cse_reg_info (regno)
841 int regno;
842 {
843 struct cse_reg_info *cri;
844 splay_tree_node n;
845
846 /* See if we already have this entry. */
847 n = splay_tree_lookup (cse_reg_info_tree,
848 (splay_tree_key) regno);
849 if (n)
850 cri = (struct cse_reg_info *) (n->value);
851 else
852 {
853 /* Get a new cse_reg_info structure. */
854 if (cse_reg_info_free_list)
855 {
856 cri = cse_reg_info_free_list;
857 cse_reg_info_free_list = cri->variant.next;
858 }
859 else
860 cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
861
862 /* Initialize it. */
863 cri->variant.reg_tick = 0;
864 cri->reg_in_table = -1;
865 cri->reg_qty = regno;
866
867 splay_tree_insert (cse_reg_info_tree,
868 (splay_tree_key) regno,
869 (splay_tree_value) cri);
870 }
871
872 /* Cache this lookup; we tend to be looking up information about the
873 same register several times in a row. */
874 cached_regno = regno;
875 cached_cse_reg_info = cri;
876
877 return cri;
878 }
879
880 static void
881 free_cse_reg_info (v)
882 splay_tree_value v;
883 {
884 struct cse_reg_info *cri = (struct cse_reg_info *) v;
885
886 cri->variant.next = cse_reg_info_free_list;
887 cse_reg_info_free_list = cri;
888 }
889
890 /* Clear the hash table and initialize each register with its own quantity,
891 for a new basic block. */
892
893 static void
894 new_basic_block ()
895 {
896 register int i;
897
898 next_qty = max_reg;
899
900 if (cse_reg_info_tree)
901 {
902 splay_tree_delete (cse_reg_info_tree);
903 cached_cse_reg_info = 0;
904 }
905
906 cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
907 free_cse_reg_info);
908
909 CLEAR_HARD_REG_SET (hard_regs_in_table);
910
911 /* The per-quantity values used to be initialized here, but it is
912 much faster to initialize each as it is made in `make_new_qty'. */
913
914 for (i = 0; i < NBUCKETS; i++)
915 {
916 register struct table_elt *this, *next;
917 for (this = table[i]; this; this = next)
918 {
919 next = this->next_same_hash;
920 free_element (this);
921 }
922 }
923
924 bzero ((char *) table, sizeof table);
925
926 prev_insn = 0;
927
928 #ifdef HAVE_cc0
929 prev_insn_cc0 = 0;
930 #endif
931 }
932
933 /* Say that register REG contains a quantity not in any register before
934 and initialize that quantity. */
935
936 static void
937 make_new_qty (reg)
938 register int reg;
939 {
940 register int q;
941
942 if (next_qty >= max_qty)
943 abort ();
944
945 q = REG_QTY (reg) = next_qty++;
946 qty_first_reg[q] = reg;
947 qty_last_reg[q] = reg;
948 qty_const[q] = qty_const_insn[q] = 0;
949 qty_comparison_code[q] = UNKNOWN;
950
951 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
952 }
953
954 /* Make reg NEW equivalent to reg OLD.
955 OLD is not changing; NEW is. */
956
957 static void
958 make_regs_eqv (new, old)
959 register int new, old;
960 {
961 register int lastr, firstr;
962 register int q = REG_QTY (old);
963
964 /* Nothing should become eqv until it has a "non-invalid" qty number. */
965 if (! REGNO_QTY_VALID_P (old))
966 abort ();
967
968 REG_QTY (new) = q;
969 firstr = qty_first_reg[q];
970 lastr = qty_last_reg[q];
971
972 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
973 hard regs. Among pseudos, if NEW will live longer than any other reg
974 of the same qty, and that is beyond the current basic block,
975 make it the new canonical replacement for this qty. */
976 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
977 /* Certain fixed registers might be of the class NO_REGS. This means
978 that not only can they not be allocated by the compiler, but
979 they cannot be used in substitutions or canonicalizations
980 either. */
981 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
982 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
983 || (new >= FIRST_PSEUDO_REGISTER
984 && (firstr < FIRST_PSEUDO_REGISTER
985 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
986 || (uid_cuid[REGNO_FIRST_UID (new)]
987 < cse_basic_block_start))
988 && (uid_cuid[REGNO_LAST_UID (new)]
989 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
990 {
991 reg_prev_eqv[firstr] = new;
992 reg_next_eqv[new] = firstr;
993 reg_prev_eqv[new] = -1;
994 qty_first_reg[q] = new;
995 }
996 else
997 {
998 /* If NEW is a hard reg (known to be non-fixed), insert at end.
999 Otherwise, insert before any non-fixed hard regs that are at the
1000 end. Registers of class NO_REGS cannot be used as an
1001 equivalent for anything. */
1002 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1003 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1004 && new >= FIRST_PSEUDO_REGISTER)
1005 lastr = reg_prev_eqv[lastr];
1006 reg_next_eqv[new] = reg_next_eqv[lastr];
1007 if (reg_next_eqv[lastr] >= 0)
1008 reg_prev_eqv[reg_next_eqv[lastr]] = new;
1009 else
1010 qty_last_reg[q] = new;
1011 reg_next_eqv[lastr] = new;
1012 reg_prev_eqv[new] = lastr;
1013 }
1014 }
1015
1016 /* Remove REG from its equivalence class. */
1017
1018 static void
1019 delete_reg_equiv (reg)
1020 register int reg;
1021 {
1022 register int q = REG_QTY (reg);
1023 register int p, n;
1024
1025 /* If invalid, do nothing. */
1026 if (q == reg)
1027 return;
1028
1029 p = reg_prev_eqv[reg];
1030 n = reg_next_eqv[reg];
1031
1032 if (n != -1)
1033 reg_prev_eqv[n] = p;
1034 else
1035 qty_last_reg[q] = p;
1036 if (p != -1)
1037 reg_next_eqv[p] = n;
1038 else
1039 qty_first_reg[q] = n;
1040
1041 REG_QTY (reg) = reg;
1042 }
1043
1044 /* Remove any invalid expressions from the hash table
1045 that refer to any of the registers contained in expression X.
1046
1047 Make sure that newly inserted references to those registers
1048 as subexpressions will be considered valid.
1049
1050 mention_regs is not called when a register itself
1051 is being stored in the table.
1052
1053 Return 1 if we have done something that may have changed the hash code
1054 of X. */
1055
1056 static int
1057 mention_regs (x)
1058 rtx x;
1059 {
1060 register enum rtx_code code;
1061 register int i, j;
1062 register char *fmt;
1063 register int changed = 0;
1064
1065 if (x == 0)
1066 return 0;
1067
1068 code = GET_CODE (x);
1069 if (code == REG)
1070 {
1071 register int regno = REGNO (x);
1072 register int endregno
1073 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1074 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1075 int i;
1076
1077 for (i = regno; i < endregno; i++)
1078 {
1079 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1080 remove_invalid_refs (i);
1081
1082 REG_IN_TABLE (i) = REG_TICK (i);
1083 }
1084
1085 return 0;
1086 }
1087
1088 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1089 pseudo if they don't use overlapping words. We handle only pseudos
1090 here for simplicity. */
1091 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1092 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1093 {
1094 int i = REGNO (SUBREG_REG (x));
1095
1096 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1097 {
1098 /* If reg_tick has been incremented more than once since
1099 reg_in_table was last set, that means that the entire
1100 register has been set before, so discard anything memorized
1101 for the entrire register, including all SUBREG expressions. */
1102 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1103 remove_invalid_refs (i);
1104 else
1105 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1106 }
1107
1108 REG_IN_TABLE (i) = REG_TICK (i);
1109 return 0;
1110 }
1111
1112 /* If X is a comparison or a COMPARE and either operand is a register
1113 that does not have a quantity, give it one. This is so that a later
1114 call to record_jump_equiv won't cause X to be assigned a different
1115 hash code and not found in the table after that call.
1116
1117 It is not necessary to do this here, since rehash_using_reg can
1118 fix up the table later, but doing this here eliminates the need to
1119 call that expensive function in the most common case where the only
1120 use of the register is in the comparison. */
1121
1122 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1123 {
1124 if (GET_CODE (XEXP (x, 0)) == REG
1125 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1126 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1127 {
1128 rehash_using_reg (XEXP (x, 0));
1129 changed = 1;
1130 }
1131
1132 if (GET_CODE (XEXP (x, 1)) == REG
1133 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1134 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1135 {
1136 rehash_using_reg (XEXP (x, 1));
1137 changed = 1;
1138 }
1139 }
1140
1141 fmt = GET_RTX_FORMAT (code);
1142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1143 if (fmt[i] == 'e')
1144 changed |= mention_regs (XEXP (x, i));
1145 else if (fmt[i] == 'E')
1146 for (j = 0; j < XVECLEN (x, i); j++)
1147 changed |= mention_regs (XVECEXP (x, i, j));
1148
1149 return changed;
1150 }
1151
1152 /* Update the register quantities for inserting X into the hash table
1153 with a value equivalent to CLASSP.
1154 (If the class does not contain a REG, it is irrelevant.)
1155 If MODIFIED is nonzero, X is a destination; it is being modified.
1156 Note that delete_reg_equiv should be called on a register
1157 before insert_regs is done on that register with MODIFIED != 0.
1158
1159 Nonzero value means that elements of reg_qty have changed
1160 so X's hash code may be different. */
1161
1162 static int
1163 insert_regs (x, classp, modified)
1164 rtx x;
1165 struct table_elt *classp;
1166 int modified;
1167 {
1168 if (GET_CODE (x) == REG)
1169 {
1170 register int regno = REGNO (x);
1171
1172 /* If REGNO is in the equivalence table already but is of the
1173 wrong mode for that equivalence, don't do anything here. */
1174
1175 if (REGNO_QTY_VALID_P (regno)
1176 && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1177 return 0;
1178
1179 if (modified || ! REGNO_QTY_VALID_P (regno))
1180 {
1181 if (classp)
1182 for (classp = classp->first_same_value;
1183 classp != 0;
1184 classp = classp->next_same_value)
1185 if (GET_CODE (classp->exp) == REG
1186 && GET_MODE (classp->exp) == GET_MODE (x))
1187 {
1188 make_regs_eqv (regno, REGNO (classp->exp));
1189 return 1;
1190 }
1191
1192 make_new_qty (regno);
1193 qty_mode[REG_QTY (regno)] = GET_MODE (x);
1194 return 1;
1195 }
1196
1197 return 0;
1198 }
1199
1200 /* If X is a SUBREG, we will likely be inserting the inner register in the
1201 table. If that register doesn't have an assigned quantity number at
1202 this point but does later, the insertion that we will be doing now will
1203 not be accessible because its hash code will have changed. So assign
1204 a quantity number now. */
1205
1206 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1207 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1208 {
1209 int regno = REGNO (SUBREG_REG (x));
1210
1211 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1212 /* Mention_regs checks if REG_TICK is exactly one larger than
1213 REG_IN_TABLE to find out if there was only a single preceding
1214 invalidation - for the SUBREG - or another one, which would be
1215 for the full register. Since we don't invalidate the SUBREG
1216 here first, we might have to bump up REG_TICK so that mention_regs
1217 will do the right thing. */
1218 if (REG_IN_TABLE (regno) >= 0
1219 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1220 REG_TICK (regno)++;
1221 mention_regs (x);
1222 return 1;
1223 }
1224 else
1225 return mention_regs (x);
1226 }
1227 \f
1228 /* Look in or update the hash table. */
1229
1230 /* Put the element ELT on the list of free elements. */
1231
1232 static void
1233 free_element (elt)
1234 struct table_elt *elt;
1235 {
1236 elt->next_same_hash = free_element_chain;
1237 free_element_chain = elt;
1238 }
1239
1240 /* Return an element that is free for use. */
1241
1242 static struct table_elt *
1243 get_element ()
1244 {
1245 struct table_elt *elt = free_element_chain;
1246 if (elt)
1247 {
1248 free_element_chain = elt->next_same_hash;
1249 return elt;
1250 }
1251 n_elements_made++;
1252 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1253 }
1254
1255 /* Remove table element ELT from use in the table.
1256 HASH is its hash code, made using the HASH macro.
1257 It's an argument because often that is known in advance
1258 and we save much time not recomputing it. */
1259
1260 static void
1261 remove_from_table (elt, hash)
1262 register struct table_elt *elt;
1263 unsigned hash;
1264 {
1265 if (elt == 0)
1266 return;
1267
1268 /* Mark this element as removed. See cse_insn. */
1269 elt->first_same_value = 0;
1270
1271 /* Remove the table element from its equivalence class. */
1272
1273 {
1274 register struct table_elt *prev = elt->prev_same_value;
1275 register struct table_elt *next = elt->next_same_value;
1276
1277 if (next) next->prev_same_value = prev;
1278
1279 if (prev)
1280 prev->next_same_value = next;
1281 else
1282 {
1283 register struct table_elt *newfirst = next;
1284 while (next)
1285 {
1286 next->first_same_value = newfirst;
1287 next = next->next_same_value;
1288 }
1289 }
1290 }
1291
1292 /* Remove the table element from its hash bucket. */
1293
1294 {
1295 register struct table_elt *prev = elt->prev_same_hash;
1296 register struct table_elt *next = elt->next_same_hash;
1297
1298 if (next) next->prev_same_hash = prev;
1299
1300 if (prev)
1301 prev->next_same_hash = next;
1302 else if (table[hash] == elt)
1303 table[hash] = next;
1304 else
1305 {
1306 /* This entry is not in the proper hash bucket. This can happen
1307 when two classes were merged by `merge_equiv_classes'. Search
1308 for the hash bucket that it heads. This happens only very
1309 rarely, so the cost is acceptable. */
1310 for (hash = 0; hash < NBUCKETS; hash++)
1311 if (table[hash] == elt)
1312 table[hash] = next;
1313 }
1314 }
1315
1316 /* Remove the table element from its related-value circular chain. */
1317
1318 if (elt->related_value != 0 && elt->related_value != elt)
1319 {
1320 register struct table_elt *p = elt->related_value;
1321 while (p->related_value != elt)
1322 p = p->related_value;
1323 p->related_value = elt->related_value;
1324 if (p->related_value == p)
1325 p->related_value = 0;
1326 }
1327
1328 free_element (elt);
1329 }
1330
1331 /* Look up X in the hash table and return its table element,
1332 or 0 if X is not in the table.
1333
1334 MODE is the machine-mode of X, or if X is an integer constant
1335 with VOIDmode then MODE is the mode with which X will be used.
1336
1337 Here we are satisfied to find an expression whose tree structure
1338 looks like X. */
1339
1340 static struct table_elt *
1341 lookup (x, hash, mode)
1342 rtx x;
1343 unsigned hash;
1344 enum machine_mode mode;
1345 {
1346 register struct table_elt *p;
1347
1348 for (p = table[hash]; p; p = p->next_same_hash)
1349 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1350 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1351 return p;
1352
1353 return 0;
1354 }
1355
1356 /* Like `lookup' but don't care whether the table element uses invalid regs.
1357 Also ignore discrepancies in the machine mode of a register. */
1358
1359 static struct table_elt *
1360 lookup_for_remove (x, hash, mode)
1361 rtx x;
1362 unsigned hash;
1363 enum machine_mode mode;
1364 {
1365 register struct table_elt *p;
1366
1367 if (GET_CODE (x) == REG)
1368 {
1369 int regno = REGNO (x);
1370 /* Don't check the machine mode when comparing registers;
1371 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1372 for (p = table[hash]; p; p = p->next_same_hash)
1373 if (GET_CODE (p->exp) == REG
1374 && REGNO (p->exp) == regno)
1375 return p;
1376 }
1377 else
1378 {
1379 for (p = table[hash]; p; p = p->next_same_hash)
1380 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1381 return p;
1382 }
1383
1384 return 0;
1385 }
1386
1387 /* Look for an expression equivalent to X and with code CODE.
1388 If one is found, return that expression. */
1389
1390 static rtx
1391 lookup_as_function (x, code)
1392 rtx x;
1393 enum rtx_code code;
1394 {
1395 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1396 GET_MODE (x));
1397 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1398 long as we are narrowing. So if we looked in vain for a mode narrower
1399 than word_mode before, look for word_mode now. */
1400 if (p == 0 && code == CONST_INT
1401 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1402 {
1403 x = copy_rtx (x);
1404 PUT_MODE (x, word_mode);
1405 p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1406 }
1407
1408 if (p == 0)
1409 return 0;
1410
1411 for (p = p->first_same_value; p; p = p->next_same_value)
1412 {
1413 if (GET_CODE (p->exp) == code
1414 /* Make sure this is a valid entry in the table. */
1415 && exp_equiv_p (p->exp, p->exp, 1, 0))
1416 return p->exp;
1417 }
1418
1419 return 0;
1420 }
1421
1422 /* Insert X in the hash table, assuming HASH is its hash code
1423 and CLASSP is an element of the class it should go in
1424 (or 0 if a new class should be made).
1425 It is inserted at the proper position to keep the class in
1426 the order cheapest first.
1427
1428 MODE is the machine-mode of X, or if X is an integer constant
1429 with VOIDmode then MODE is the mode with which X will be used.
1430
1431 For elements of equal cheapness, the most recent one
1432 goes in front, except that the first element in the list
1433 remains first unless a cheaper element is added. The order of
1434 pseudo-registers does not matter, as canon_reg will be called to
1435 find the cheapest when a register is retrieved from the table.
1436
1437 The in_memory field in the hash table element is set to 0.
1438 The caller must set it nonzero if appropriate.
1439
1440 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1441 and if insert_regs returns a nonzero value
1442 you must then recompute its hash code before calling here.
1443
1444 If necessary, update table showing constant values of quantities. */
1445
1446 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1447
1448 static struct table_elt *
1449 insert (x, classp, hash, mode)
1450 register rtx x;
1451 register struct table_elt *classp;
1452 unsigned hash;
1453 enum machine_mode mode;
1454 {
1455 register struct table_elt *elt;
1456
1457 /* If X is a register and we haven't made a quantity for it,
1458 something is wrong. */
1459 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1460 abort ();
1461
1462 /* If X is a hard register, show it is being put in the table. */
1463 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1464 {
1465 int regno = REGNO (x);
1466 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1467 int i;
1468
1469 for (i = regno; i < endregno; i++)
1470 SET_HARD_REG_BIT (hard_regs_in_table, i);
1471 }
1472
1473 /* If X is a label, show we recorded it. */
1474 if (GET_CODE (x) == LABEL_REF
1475 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1476 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1477 recorded_label_ref = 1;
1478
1479 /* Put an element for X into the right hash bucket. */
1480
1481 elt = get_element ();
1482 elt->exp = x;
1483 elt->cost = COST (x);
1484 elt->next_same_value = 0;
1485 elt->prev_same_value = 0;
1486 elt->next_same_hash = table[hash];
1487 elt->prev_same_hash = 0;
1488 elt->related_value = 0;
1489 elt->in_memory = 0;
1490 elt->mode = mode;
1491 elt->is_const = (CONSTANT_P (x)
1492 /* GNU C++ takes advantage of this for `this'
1493 (and other const values). */
1494 || (RTX_UNCHANGING_P (x)
1495 && GET_CODE (x) == REG
1496 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1497 || FIXED_BASE_PLUS_P (x));
1498
1499 if (table[hash])
1500 table[hash]->prev_same_hash = elt;
1501 table[hash] = elt;
1502
1503 /* Put it into the proper value-class. */
1504 if (classp)
1505 {
1506 classp = classp->first_same_value;
1507 if (CHEAPER (elt, classp))
1508 /* Insert at the head of the class */
1509 {
1510 register struct table_elt *p;
1511 elt->next_same_value = classp;
1512 classp->prev_same_value = elt;
1513 elt->first_same_value = elt;
1514
1515 for (p = classp; p; p = p->next_same_value)
1516 p->first_same_value = elt;
1517 }
1518 else
1519 {
1520 /* Insert not at head of the class. */
1521 /* Put it after the last element cheaper than X. */
1522 register struct table_elt *p, *next;
1523 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1524 p = next);
1525 /* Put it after P and before NEXT. */
1526 elt->next_same_value = next;
1527 if (next)
1528 next->prev_same_value = elt;
1529 elt->prev_same_value = p;
1530 p->next_same_value = elt;
1531 elt->first_same_value = classp;
1532 }
1533 }
1534 else
1535 elt->first_same_value = elt;
1536
1537 /* If this is a constant being set equivalent to a register or a register
1538 being set equivalent to a constant, note the constant equivalence.
1539
1540 If this is a constant, it cannot be equivalent to a different constant,
1541 and a constant is the only thing that can be cheaper than a register. So
1542 we know the register is the head of the class (before the constant was
1543 inserted).
1544
1545 If this is a register that is not already known equivalent to a
1546 constant, we must check the entire class.
1547
1548 If this is a register that is already known equivalent to an insn,
1549 update `qty_const_insn' to show that `this_insn' is the latest
1550 insn making that quantity equivalent to the constant. */
1551
1552 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1553 && GET_CODE (x) != REG)
1554 {
1555 qty_const[REG_QTY (REGNO (classp->exp))]
1556 = gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1557 qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
1558 }
1559
1560 else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
1561 && ! elt->is_const)
1562 {
1563 register struct table_elt *p;
1564
1565 for (p = classp; p != 0; p = p->next_same_value)
1566 {
1567 if (p->is_const && GET_CODE (p->exp) != REG)
1568 {
1569 qty_const[REG_QTY (REGNO (x))]
1570 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1571 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1572 break;
1573 }
1574 }
1575 }
1576
1577 else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1578 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1579 qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1580
1581 /* If this is a constant with symbolic value,
1582 and it has a term with an explicit integer value,
1583 link it up with related expressions. */
1584 if (GET_CODE (x) == CONST)
1585 {
1586 rtx subexp = get_related_value (x);
1587 unsigned subhash;
1588 struct table_elt *subelt, *subelt_prev;
1589
1590 if (subexp != 0)
1591 {
1592 /* Get the integer-free subexpression in the hash table. */
1593 subhash = safe_hash (subexp, mode) % NBUCKETS;
1594 subelt = lookup (subexp, subhash, mode);
1595 if (subelt == 0)
1596 subelt = insert (subexp, NULL_PTR, subhash, mode);
1597 /* Initialize SUBELT's circular chain if it has none. */
1598 if (subelt->related_value == 0)
1599 subelt->related_value = subelt;
1600 /* Find the element in the circular chain that precedes SUBELT. */
1601 subelt_prev = subelt;
1602 while (subelt_prev->related_value != subelt)
1603 subelt_prev = subelt_prev->related_value;
1604 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1605 This way the element that follows SUBELT is the oldest one. */
1606 elt->related_value = subelt_prev->related_value;
1607 subelt_prev->related_value = elt;
1608 }
1609 }
1610
1611 return elt;
1612 }
1613 \f
1614 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1615 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1616 the two classes equivalent.
1617
1618 CLASS1 will be the surviving class; CLASS2 should not be used after this
1619 call.
1620
1621 Any invalid entries in CLASS2 will not be copied. */
1622
1623 static void
1624 merge_equiv_classes (class1, class2)
1625 struct table_elt *class1, *class2;
1626 {
1627 struct table_elt *elt, *next, *new;
1628
1629 /* Ensure we start with the head of the classes. */
1630 class1 = class1->first_same_value;
1631 class2 = class2->first_same_value;
1632
1633 /* If they were already equal, forget it. */
1634 if (class1 == class2)
1635 return;
1636
1637 for (elt = class2; elt; elt = next)
1638 {
1639 unsigned hash;
1640 rtx exp = elt->exp;
1641 enum machine_mode mode = elt->mode;
1642
1643 next = elt->next_same_value;
1644
1645 /* Remove old entry, make a new one in CLASS1's class.
1646 Don't do this for invalid entries as we cannot find their
1647 hash code (it also isn't necessary). */
1648 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1649 {
1650 hash_arg_in_memory = 0;
1651 hash_arg_in_struct = 0;
1652 hash = HASH (exp, mode);
1653
1654 if (GET_CODE (exp) == REG)
1655 delete_reg_equiv (REGNO (exp));
1656
1657 remove_from_table (elt, hash);
1658
1659 if (insert_regs (exp, class1, 0))
1660 {
1661 rehash_using_reg (exp);
1662 hash = HASH (exp, mode);
1663 }
1664 new = insert (exp, class1, hash, mode);
1665 new->in_memory = hash_arg_in_memory;
1666 new->in_struct = hash_arg_in_struct;
1667 }
1668 }
1669 }
1670 \f
1671
1672 /* Flush the entire hash table. */
1673
1674 static void
1675 flush_hash_table ()
1676 {
1677 int i;
1678 struct table_elt *p;
1679
1680 for (i = 0; i < NBUCKETS; i++)
1681 for (p = table[i]; p; p = table[i])
1682 {
1683 /* Note that invalidate can remove elements
1684 after P in the current hash chain. */
1685 if (GET_CODE (p->exp) == REG)
1686 invalidate (p->exp, p->mode);
1687 else
1688 remove_from_table (p, i);
1689 }
1690 }
1691
1692
1693 /* Remove from the hash table, or mark as invalid,
1694 all expressions whose values could be altered by storing in X.
1695 X is a register, a subreg, or a memory reference with nonvarying address
1696 (because, when a memory reference with a varying address is stored in,
1697 all memory references are removed by invalidate_memory
1698 so specific invalidation is superfluous).
1699 FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1700 instead of just the amount indicated by the mode of X. This is only used
1701 for bitfield stores into memory.
1702
1703 A nonvarying address may be just a register or just
1704 a symbol reference, or it may be either of those plus
1705 a numeric offset. */
1706
1707 static void
1708 invalidate (x, full_mode)
1709 rtx x;
1710 enum machine_mode full_mode;
1711 {
1712 register int i;
1713 register struct table_elt *p;
1714
1715 /* If X is a register, dependencies on its contents
1716 are recorded through the qty number mechanism.
1717 Just change the qty number of the register,
1718 mark it as invalid for expressions that refer to it,
1719 and remove it itself. */
1720
1721 if (GET_CODE (x) == REG)
1722 {
1723 register int regno = REGNO (x);
1724 register unsigned hash = HASH (x, GET_MODE (x));
1725
1726 /* Remove REGNO from any quantity list it might be on and indicate
1727 that its value might have changed. If it is a pseudo, remove its
1728 entry from the hash table.
1729
1730 For a hard register, we do the first two actions above for any
1731 additional hard registers corresponding to X. Then, if any of these
1732 registers are in the table, we must remove any REG entries that
1733 overlap these registers. */
1734
1735 delete_reg_equiv (regno);
1736 REG_TICK (regno)++;
1737
1738 if (regno >= FIRST_PSEUDO_REGISTER)
1739 {
1740 /* Because a register can be referenced in more than one mode,
1741 we might have to remove more than one table entry. */
1742
1743 struct table_elt *elt;
1744
1745 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1746 remove_from_table (elt, hash);
1747 }
1748 else
1749 {
1750 HOST_WIDE_INT in_table
1751 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1752 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1753 int tregno, tendregno;
1754 register struct table_elt *p, *next;
1755
1756 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1757
1758 for (i = regno + 1; i < endregno; i++)
1759 {
1760 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1761 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1762 delete_reg_equiv (i);
1763 REG_TICK (i)++;
1764 }
1765
1766 if (in_table)
1767 for (hash = 0; hash < NBUCKETS; hash++)
1768 for (p = table[hash]; p; p = next)
1769 {
1770 next = p->next_same_hash;
1771
1772 if (GET_CODE (p->exp) != REG
1773 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1774 continue;
1775
1776 tregno = REGNO (p->exp);
1777 tendregno
1778 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1779 if (tendregno > regno && tregno < endregno)
1780 remove_from_table (p, hash);
1781 }
1782 }
1783
1784 return;
1785 }
1786
1787 if (GET_CODE (x) == SUBREG)
1788 {
1789 if (GET_CODE (SUBREG_REG (x)) != REG)
1790 abort ();
1791 invalidate (SUBREG_REG (x), VOIDmode);
1792 return;
1793 }
1794
1795 /* If X is a parallel, invalidate all of its elements. */
1796
1797 if (GET_CODE (x) == PARALLEL)
1798 {
1799 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1800 invalidate (XVECEXP (x, 0, i), VOIDmode);
1801 return;
1802 }
1803
1804 /* If X is an expr_list, this is part of a disjoint return value;
1805 extract the location in question ignoring the offset. */
1806
1807 if (GET_CODE (x) == EXPR_LIST)
1808 {
1809 invalidate (XEXP (x, 0), VOIDmode);
1810 return;
1811 }
1812
1813 /* X is not a register; it must be a memory reference with
1814 a nonvarying address. Remove all hash table elements
1815 that refer to overlapping pieces of memory. */
1816
1817 if (GET_CODE (x) != MEM)
1818 abort ();
1819
1820 if (full_mode == VOIDmode)
1821 full_mode = GET_MODE (x);
1822
1823 for (i = 0; i < NBUCKETS; i++)
1824 {
1825 register struct table_elt *next;
1826 for (p = table[i]; p; p = next)
1827 {
1828 next = p->next_same_hash;
1829 /* Invalidate ASM_OPERANDS which reference memory (this is easier
1830 than checking all the aliases). */
1831 if (p->in_memory
1832 && (GET_CODE (p->exp) != MEM
1833 || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1834 remove_from_table (p, i);
1835 }
1836 }
1837 }
1838
1839 /* Remove all expressions that refer to register REGNO,
1840 since they are already invalid, and we are about to
1841 mark that register valid again and don't want the old
1842 expressions to reappear as valid. */
1843
1844 static void
1845 remove_invalid_refs (regno)
1846 int regno;
1847 {
1848 register int i;
1849 register struct table_elt *p, *next;
1850
1851 for (i = 0; i < NBUCKETS; i++)
1852 for (p = table[i]; p; p = next)
1853 {
1854 next = p->next_same_hash;
1855 if (GET_CODE (p->exp) != REG
1856 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1857 remove_from_table (p, i);
1858 }
1859 }
1860
1861 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1862 static void
1863 remove_invalid_subreg_refs (regno, word, mode)
1864 int regno;
1865 int word;
1866 enum machine_mode mode;
1867 {
1868 register int i;
1869 register struct table_elt *p, *next;
1870 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1871
1872 for (i = 0; i < NBUCKETS; i++)
1873 for (p = table[i]; p; p = next)
1874 {
1875 rtx exp;
1876 next = p->next_same_hash;
1877
1878 exp = p->exp;
1879 if (GET_CODE (p->exp) != REG
1880 && (GET_CODE (exp) != SUBREG
1881 || GET_CODE (SUBREG_REG (exp)) != REG
1882 || REGNO (SUBREG_REG (exp)) != regno
1883 || (((SUBREG_WORD (exp)
1884 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1885 >= word)
1886 && SUBREG_WORD (exp) <= end))
1887 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1888 remove_from_table (p, i);
1889 }
1890 }
1891 \f
1892 /* Recompute the hash codes of any valid entries in the hash table that
1893 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1894
1895 This is called when we make a jump equivalence. */
1896
1897 static void
1898 rehash_using_reg (x)
1899 rtx x;
1900 {
1901 unsigned int i;
1902 struct table_elt *p, *next;
1903 unsigned hash;
1904
1905 if (GET_CODE (x) == SUBREG)
1906 x = SUBREG_REG (x);
1907
1908 /* If X is not a register or if the register is known not to be in any
1909 valid entries in the table, we have no work to do. */
1910
1911 if (GET_CODE (x) != REG
1912 || REG_IN_TABLE (REGNO (x)) < 0
1913 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1914 return;
1915
1916 /* Scan all hash chains looking for valid entries that mention X.
1917 If we find one and it is in the wrong hash chain, move it. We can skip
1918 objects that are registers, since they are handled specially. */
1919
1920 for (i = 0; i < NBUCKETS; i++)
1921 for (p = table[i]; p; p = next)
1922 {
1923 next = p->next_same_hash;
1924 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1925 && exp_equiv_p (p->exp, p->exp, 1, 0)
1926 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1927 {
1928 if (p->next_same_hash)
1929 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1930
1931 if (p->prev_same_hash)
1932 p->prev_same_hash->next_same_hash = p->next_same_hash;
1933 else
1934 table[i] = p->next_same_hash;
1935
1936 p->next_same_hash = table[hash];
1937 p->prev_same_hash = 0;
1938 if (table[hash])
1939 table[hash]->prev_same_hash = p;
1940 table[hash] = p;
1941 }
1942 }
1943 }
1944 \f
1945 /* Remove from the hash table any expression that is a call-clobbered
1946 register. Also update their TICK values. */
1947
1948 static void
1949 invalidate_for_call ()
1950 {
1951 int regno, endregno;
1952 int i;
1953 unsigned hash;
1954 struct table_elt *p, *next;
1955 int in_table = 0;
1956
1957 /* Go through all the hard registers. For each that is clobbered in
1958 a CALL_INSN, remove the register from quantity chains and update
1959 reg_tick if defined. Also see if any of these registers is currently
1960 in the table. */
1961
1962 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1963 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1964 {
1965 delete_reg_equiv (regno);
1966 if (REG_TICK (regno) >= 0)
1967 REG_TICK (regno)++;
1968
1969 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1970 }
1971
1972 /* In the case where we have no call-clobbered hard registers in the
1973 table, we are done. Otherwise, scan the table and remove any
1974 entry that overlaps a call-clobbered register. */
1975
1976 if (in_table)
1977 for (hash = 0; hash < NBUCKETS; hash++)
1978 for (p = table[hash]; p; p = next)
1979 {
1980 next = p->next_same_hash;
1981
1982 if (p->in_memory)
1983 {
1984 remove_from_table (p, hash);
1985 continue;
1986 }
1987
1988 if (GET_CODE (p->exp) != REG
1989 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1990 continue;
1991
1992 regno = REGNO (p->exp);
1993 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1994
1995 for (i = regno; i < endregno; i++)
1996 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1997 {
1998 remove_from_table (p, hash);
1999 break;
2000 }
2001 }
2002 }
2003 \f
2004 /* Given an expression X of type CONST,
2005 and ELT which is its table entry (or 0 if it
2006 is not in the hash table),
2007 return an alternate expression for X as a register plus integer.
2008 If none can be found, return 0. */
2009
2010 static rtx
2011 use_related_value (x, elt)
2012 rtx x;
2013 struct table_elt *elt;
2014 {
2015 register struct table_elt *relt = 0;
2016 register struct table_elt *p, *q;
2017 HOST_WIDE_INT offset;
2018
2019 /* First, is there anything related known?
2020 If we have a table element, we can tell from that.
2021 Otherwise, must look it up. */
2022
2023 if (elt != 0 && elt->related_value != 0)
2024 relt = elt;
2025 else if (elt == 0 && GET_CODE (x) == CONST)
2026 {
2027 rtx subexp = get_related_value (x);
2028 if (subexp != 0)
2029 relt = lookup (subexp,
2030 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2031 GET_MODE (subexp));
2032 }
2033
2034 if (relt == 0)
2035 return 0;
2036
2037 /* Search all related table entries for one that has an
2038 equivalent register. */
2039
2040 p = relt;
2041 while (1)
2042 {
2043 /* This loop is strange in that it is executed in two different cases.
2044 The first is when X is already in the table. Then it is searching
2045 the RELATED_VALUE list of X's class (RELT). The second case is when
2046 X is not in the table. Then RELT points to a class for the related
2047 value.
2048
2049 Ensure that, whatever case we are in, that we ignore classes that have
2050 the same value as X. */
2051
2052 if (rtx_equal_p (x, p->exp))
2053 q = 0;
2054 else
2055 for (q = p->first_same_value; q; q = q->next_same_value)
2056 if (GET_CODE (q->exp) == REG)
2057 break;
2058
2059 if (q)
2060 break;
2061
2062 p = p->related_value;
2063
2064 /* We went all the way around, so there is nothing to be found.
2065 Alternatively, perhaps RELT was in the table for some other reason
2066 and it has no related values recorded. */
2067 if (p == relt || p == 0)
2068 break;
2069 }
2070
2071 if (q == 0)
2072 return 0;
2073
2074 offset = (get_integer_term (x) - get_integer_term (p->exp));
2075 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2076 return plus_constant (q->exp, offset);
2077 }
2078 \f
2079 /* Hash an rtx. We are careful to make sure the value is never negative.
2080 Equivalent registers hash identically.
2081 MODE is used in hashing for CONST_INTs only;
2082 otherwise the mode of X is used.
2083
2084 Store 1 in do_not_record if any subexpression is volatile.
2085
2086 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2087 which does not have the RTX_UNCHANGING_P bit set.
2088 In this case, also store 1 in hash_arg_in_struct
2089 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2090
2091 Note that cse_insn knows that the hash code of a MEM expression
2092 is just (int) MEM plus the hash code of the address. */
2093
2094 static unsigned
2095 canon_hash (x, mode)
2096 rtx x;
2097 enum machine_mode mode;
2098 {
2099 register int i, j;
2100 register unsigned hash = 0;
2101 register enum rtx_code code;
2102 register char *fmt;
2103
2104 /* repeat is used to turn tail-recursion into iteration. */
2105 repeat:
2106 if (x == 0)
2107 return hash;
2108
2109 code = GET_CODE (x);
2110 switch (code)
2111 {
2112 case REG:
2113 {
2114 register int regno = REGNO (x);
2115
2116 /* On some machines, we can't record any non-fixed hard register,
2117 because extending its life will cause reload problems. We
2118 consider ap, fp, and sp to be fixed for this purpose.
2119
2120 We also consider CCmode registers to be fixed for this purpose;
2121 failure to do so leads to failure to simplify 0<100 type of
2122 conditionals.
2123
2124 On all machines, we can't record any global registers. */
2125
2126 if (regno < FIRST_PSEUDO_REGISTER
2127 && (global_regs[regno]
2128 || (SMALL_REGISTER_CLASSES
2129 && ! fixed_regs[regno]
2130 && regno != FRAME_POINTER_REGNUM
2131 && regno != HARD_FRAME_POINTER_REGNUM
2132 && regno != ARG_POINTER_REGNUM
2133 && regno != STACK_POINTER_REGNUM
2134 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2135 {
2136 do_not_record = 1;
2137 return 0;
2138 }
2139 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2140 return hash;
2141 }
2142
2143 /* We handle SUBREG of a REG specially because the underlying
2144 reg changes its hash value with every value change; we don't
2145 want to have to forget unrelated subregs when one subreg changes. */
2146 case SUBREG:
2147 {
2148 if (GET_CODE (SUBREG_REG (x)) == REG)
2149 {
2150 hash += (((unsigned) SUBREG << 7)
2151 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2152 return hash;
2153 }
2154 break;
2155 }
2156
2157 case CONST_INT:
2158 {
2159 unsigned HOST_WIDE_INT tem = INTVAL (x);
2160 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2161 return hash;
2162 }
2163
2164 case CONST_DOUBLE:
2165 /* This is like the general case, except that it only counts
2166 the integers representing the constant. */
2167 hash += (unsigned) code + (unsigned) GET_MODE (x);
2168 if (GET_MODE (x) != VOIDmode)
2169 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2170 {
2171 unsigned tem = XINT (x, i);
2172 hash += tem;
2173 }
2174 else
2175 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2176 + (unsigned) CONST_DOUBLE_HIGH (x));
2177 return hash;
2178
2179 /* Assume there is only one rtx object for any given label. */
2180 case LABEL_REF:
2181 hash
2182 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2183 return hash;
2184
2185 case SYMBOL_REF:
2186 hash
2187 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2188 return hash;
2189
2190 case MEM:
2191 if (MEM_VOLATILE_P (x))
2192 {
2193 do_not_record = 1;
2194 return 0;
2195 }
2196 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2197 {
2198 hash_arg_in_memory = 1;
2199 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2200 }
2201 /* Now that we have already found this special case,
2202 might as well speed it up as much as possible. */
2203 hash += (unsigned) MEM;
2204 x = XEXP (x, 0);
2205 goto repeat;
2206
2207 case PRE_DEC:
2208 case PRE_INC:
2209 case POST_DEC:
2210 case POST_INC:
2211 case PC:
2212 case CC0:
2213 case CALL:
2214 case UNSPEC_VOLATILE:
2215 do_not_record = 1;
2216 return 0;
2217
2218 case ASM_OPERANDS:
2219 if (MEM_VOLATILE_P (x))
2220 {
2221 do_not_record = 1;
2222 return 0;
2223 }
2224 break;
2225
2226 default:
2227 break;
2228 }
2229
2230 i = GET_RTX_LENGTH (code) - 1;
2231 hash += (unsigned) code + (unsigned) GET_MODE (x);
2232 fmt = GET_RTX_FORMAT (code);
2233 for (; i >= 0; i--)
2234 {
2235 if (fmt[i] == 'e')
2236 {
2237 rtx tem = XEXP (x, i);
2238
2239 /* If we are about to do the last recursive call
2240 needed at this level, change it into iteration.
2241 This function is called enough to be worth it. */
2242 if (i == 0)
2243 {
2244 x = tem;
2245 goto repeat;
2246 }
2247 hash += canon_hash (tem, 0);
2248 }
2249 else if (fmt[i] == 'E')
2250 for (j = 0; j < XVECLEN (x, i); j++)
2251 hash += canon_hash (XVECEXP (x, i, j), 0);
2252 else if (fmt[i] == 's')
2253 {
2254 register unsigned char *p = (unsigned char *) XSTR (x, i);
2255 if (p)
2256 while (*p)
2257 hash += *p++;
2258 }
2259 else if (fmt[i] == 'i')
2260 {
2261 register unsigned tem = XINT (x, i);
2262 hash += tem;
2263 }
2264 else if (fmt[i] == '0')
2265 /* unused */;
2266 else
2267 abort ();
2268 }
2269 return hash;
2270 }
2271
2272 /* Like canon_hash but with no side effects. */
2273
2274 static unsigned
2275 safe_hash (x, mode)
2276 rtx x;
2277 enum machine_mode mode;
2278 {
2279 int save_do_not_record = do_not_record;
2280 int save_hash_arg_in_memory = hash_arg_in_memory;
2281 int save_hash_arg_in_struct = hash_arg_in_struct;
2282 unsigned hash = canon_hash (x, mode);
2283 hash_arg_in_memory = save_hash_arg_in_memory;
2284 hash_arg_in_struct = save_hash_arg_in_struct;
2285 do_not_record = save_do_not_record;
2286 return hash;
2287 }
2288 \f
2289 /* Return 1 iff X and Y would canonicalize into the same thing,
2290 without actually constructing the canonicalization of either one.
2291 If VALIDATE is nonzero,
2292 we assume X is an expression being processed from the rtl
2293 and Y was found in the hash table. We check register refs
2294 in Y for being marked as valid.
2295
2296 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2297 that is known to be in the register. Ordinarily, we don't allow them
2298 to match, because letting them match would cause unpredictable results
2299 in all the places that search a hash table chain for an equivalent
2300 for a given value. A possible equivalent that has different structure
2301 has its hash code computed from different data. Whether the hash code
2302 is the same as that of the given value is pure luck. */
2303
2304 static int
2305 exp_equiv_p (x, y, validate, equal_values)
2306 rtx x, y;
2307 int validate;
2308 int equal_values;
2309 {
2310 register int i, j;
2311 register enum rtx_code code;
2312 register char *fmt;
2313
2314 /* Note: it is incorrect to assume an expression is equivalent to itself
2315 if VALIDATE is nonzero. */
2316 if (x == y && !validate)
2317 return 1;
2318 if (x == 0 || y == 0)
2319 return x == y;
2320
2321 code = GET_CODE (x);
2322 if (code != GET_CODE (y))
2323 {
2324 if (!equal_values)
2325 return 0;
2326
2327 /* If X is a constant and Y is a register or vice versa, they may be
2328 equivalent. We only have to validate if Y is a register. */
2329 if (CONSTANT_P (x) && GET_CODE (y) == REG
2330 && REGNO_QTY_VALID_P (REGNO (y))
2331 && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2332 && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2333 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2334 return 1;
2335
2336 if (CONSTANT_P (y) && code == REG
2337 && REGNO_QTY_VALID_P (REGNO (x))
2338 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2339 && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
2340 return 1;
2341
2342 return 0;
2343 }
2344
2345 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2346 if (GET_MODE (x) != GET_MODE (y))
2347 return 0;
2348
2349 switch (code)
2350 {
2351 case PC:
2352 case CC0:
2353 return x == y;
2354
2355 case CONST_INT:
2356 return INTVAL (x) == INTVAL (y);
2357
2358 case LABEL_REF:
2359 return XEXP (x, 0) == XEXP (y, 0);
2360
2361 case SYMBOL_REF:
2362 return XSTR (x, 0) == XSTR (y, 0);
2363
2364 case REG:
2365 {
2366 int regno = REGNO (y);
2367 int endregno
2368 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2369 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2370 int i;
2371
2372 /* If the quantities are not the same, the expressions are not
2373 equivalent. If there are and we are not to validate, they
2374 are equivalent. Otherwise, ensure all regs are up-to-date. */
2375
2376 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2377 return 0;
2378
2379 if (! validate)
2380 return 1;
2381
2382 for (i = regno; i < endregno; i++)
2383 if (REG_IN_TABLE (i) != REG_TICK (i))
2384 return 0;
2385
2386 return 1;
2387 }
2388
2389 /* For commutative operations, check both orders. */
2390 case PLUS:
2391 case MULT:
2392 case AND:
2393 case IOR:
2394 case XOR:
2395 case NE:
2396 case EQ:
2397 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2398 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2399 validate, equal_values))
2400 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2401 validate, equal_values)
2402 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2403 validate, equal_values)));
2404
2405 default:
2406 break;
2407 }
2408
2409 /* Compare the elements. If any pair of corresponding elements
2410 fail to match, return 0 for the whole things. */
2411
2412 fmt = GET_RTX_FORMAT (code);
2413 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2414 {
2415 switch (fmt[i])
2416 {
2417 case 'e':
2418 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2419 return 0;
2420 break;
2421
2422 case 'E':
2423 if (XVECLEN (x, i) != XVECLEN (y, i))
2424 return 0;
2425 for (j = 0; j < XVECLEN (x, i); j++)
2426 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2427 validate, equal_values))
2428 return 0;
2429 break;
2430
2431 case 's':
2432 if (strcmp (XSTR (x, i), XSTR (y, i)))
2433 return 0;
2434 break;
2435
2436 case 'i':
2437 if (XINT (x, i) != XINT (y, i))
2438 return 0;
2439 break;
2440
2441 case 'w':
2442 if (XWINT (x, i) != XWINT (y, i))
2443 return 0;
2444 break;
2445
2446 case '0':
2447 break;
2448
2449 default:
2450 abort ();
2451 }
2452 }
2453
2454 return 1;
2455 }
2456 \f
2457 /* Return 1 iff any subexpression of X matches Y.
2458 Here we do not require that X or Y be valid (for registers referred to)
2459 for being in the hash table. */
2460
2461 static int
2462 refers_to_p (x, y)
2463 rtx x, y;
2464 {
2465 register int i;
2466 register enum rtx_code code;
2467 register char *fmt;
2468
2469 repeat:
2470 if (x == y)
2471 return 1;
2472 if (x == 0 || y == 0)
2473 return 0;
2474
2475 code = GET_CODE (x);
2476 /* If X as a whole has the same code as Y, they may match.
2477 If so, return 1. */
2478 if (code == GET_CODE (y))
2479 {
2480 if (exp_equiv_p (x, y, 0, 1))
2481 return 1;
2482 }
2483
2484 /* X does not match, so try its subexpressions. */
2485
2486 fmt = GET_RTX_FORMAT (code);
2487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2488 if (fmt[i] == 'e')
2489 {
2490 if (i == 0)
2491 {
2492 x = XEXP (x, 0);
2493 goto repeat;
2494 }
2495 else
2496 if (refers_to_p (XEXP (x, i), y))
2497 return 1;
2498 }
2499 else if (fmt[i] == 'E')
2500 {
2501 int j;
2502 for (j = 0; j < XVECLEN (x, i); j++)
2503 if (refers_to_p (XVECEXP (x, i, j), y))
2504 return 1;
2505 }
2506
2507 return 0;
2508 }
2509 \f
2510 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2511 set PBASE, PSTART, and PEND which correspond to the base of the address,
2512 the starting offset, and ending offset respectively.
2513
2514 ADDR is known to be a nonvarying address. */
2515
2516 /* ??? Despite what the comments say, this function is in fact frequently
2517 passed varying addresses. This does not appear to cause any problems. */
2518
2519 static void
2520 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2521 rtx addr;
2522 int size;
2523 rtx *pbase;
2524 HOST_WIDE_INT *pstart, *pend;
2525 {
2526 rtx base;
2527 HOST_WIDE_INT start, end;
2528
2529 base = addr;
2530 start = 0;
2531 end = 0;
2532
2533 if (flag_pic && GET_CODE (base) == PLUS
2534 && XEXP (base, 0) == pic_offset_table_rtx)
2535 base = XEXP (base, 1);
2536
2537 /* Registers with nonvarying addresses usually have constant equivalents;
2538 but the frame pointer register is also possible. */
2539 if (GET_CODE (base) == REG
2540 && qty_const != 0
2541 && REGNO_QTY_VALID_P (REGNO (base))
2542 && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2543 && qty_const[REG_QTY (REGNO (base))] != 0)
2544 base = qty_const[REG_QTY (REGNO (base))];
2545 else if (GET_CODE (base) == PLUS
2546 && GET_CODE (XEXP (base, 1)) == CONST_INT
2547 && GET_CODE (XEXP (base, 0)) == REG
2548 && qty_const != 0
2549 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2550 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2551 == GET_MODE (XEXP (base, 0)))
2552 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
2553 {
2554 start = INTVAL (XEXP (base, 1));
2555 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2556 }
2557 /* This can happen as the result of virtual register instantiation,
2558 if the initial offset is too large to be a valid address. */
2559 else if (GET_CODE (base) == PLUS
2560 && GET_CODE (XEXP (base, 0)) == REG
2561 && GET_CODE (XEXP (base, 1)) == REG
2562 && qty_const != 0
2563 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2564 && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2565 == GET_MODE (XEXP (base, 0)))
2566 && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
2567 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2568 && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
2569 == GET_MODE (XEXP (base, 1)))
2570 && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
2571 {
2572 rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2573 base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2574
2575 /* One of the two values must be a constant. */
2576 if (GET_CODE (base) != CONST_INT)
2577 {
2578 if (GET_CODE (tem) != CONST_INT)
2579 abort ();
2580 start = INTVAL (tem);
2581 }
2582 else
2583 {
2584 start = INTVAL (base);
2585 base = tem;
2586 }
2587 }
2588
2589 /* Handle everything that we can find inside an address that has been
2590 viewed as constant. */
2591
2592 while (1)
2593 {
2594 /* If no part of this switch does a "continue", the code outside
2595 will exit this loop. */
2596
2597 switch (GET_CODE (base))
2598 {
2599 case LO_SUM:
2600 /* By definition, operand1 of a LO_SUM is the associated constant
2601 address. Use the associated constant address as the base
2602 instead. */
2603 base = XEXP (base, 1);
2604 continue;
2605
2606 case CONST:
2607 /* Strip off CONST. */
2608 base = XEXP (base, 0);
2609 continue;
2610
2611 case PLUS:
2612 if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2613 {
2614 start += INTVAL (XEXP (base, 1));
2615 base = XEXP (base, 0);
2616 continue;
2617 }
2618 break;
2619
2620 case AND:
2621 /* Handle the case of an AND which is the negative of a power of
2622 two. This is used to represent unaligned memory operations. */
2623 if (GET_CODE (XEXP (base, 1)) == CONST_INT
2624 && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2625 {
2626 set_nonvarying_address_components (XEXP (base, 0), size,
2627 pbase, pstart, pend);
2628
2629 /* Assume the worst misalignment. START is affected, but not
2630 END, so compensate but adjusting SIZE. Don't lose any
2631 constant we already had. */
2632
2633 size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2634 start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2635 end += *pend;
2636 base = *pbase;
2637 }
2638 break;
2639
2640 default:
2641 break;
2642 }
2643
2644 break;
2645 }
2646
2647 if (GET_CODE (base) == CONST_INT)
2648 {
2649 start += INTVAL (base);
2650 base = const0_rtx;
2651 }
2652
2653 end = start + size;
2654
2655 /* Set the return values. */
2656 *pbase = base;
2657 *pstart = start;
2658 *pend = end;
2659 }
2660
2661 /* Return 1 if X has a value that can vary even between two
2662 executions of the program. 0 means X can be compared reliably
2663 against certain constants or near-constants. */
2664
2665 static int
2666 cse_rtx_varies_p (x)
2667 register rtx x;
2668 {
2669 /* We need not check for X and the equivalence class being of the same
2670 mode because if X is equivalent to a constant in some mode, it
2671 doesn't vary in any mode. */
2672
2673 if (GET_CODE (x) == REG
2674 && REGNO_QTY_VALID_P (REGNO (x))
2675 && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2676 && qty_const[REG_QTY (REGNO (x))] != 0)
2677 return 0;
2678
2679 if (GET_CODE (x) == PLUS
2680 && GET_CODE (XEXP (x, 1)) == CONST_INT
2681 && GET_CODE (XEXP (x, 0)) == REG
2682 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2683 && (GET_MODE (XEXP (x, 0))
2684 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2685 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
2686 return 0;
2687
2688 /* This can happen as the result of virtual register instantiation, if
2689 the initial constant is too large to be a valid address. This gives
2690 us a three instruction sequence, load large offset into a register,
2691 load fp minus a constant into a register, then a MEM which is the
2692 sum of the two `constant' registers. */
2693 if (GET_CODE (x) == PLUS
2694 && GET_CODE (XEXP (x, 0)) == REG
2695 && GET_CODE (XEXP (x, 1)) == REG
2696 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2697 && (GET_MODE (XEXP (x, 0))
2698 == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2699 && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
2700 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2701 && (GET_MODE (XEXP (x, 1))
2702 == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2703 && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
2704 return 0;
2705
2706 return rtx_varies_p (x);
2707 }
2708 \f
2709 /* Canonicalize an expression:
2710 replace each register reference inside it
2711 with the "oldest" equivalent register.
2712
2713 If INSN is non-zero and we are replacing a pseudo with a hard register
2714 or vice versa, validate_change is used to ensure that INSN remains valid
2715 after we make our substitution. The calls are made with IN_GROUP non-zero
2716 so apply_change_group must be called upon the outermost return from this
2717 function (unless INSN is zero). The result of apply_change_group can
2718 generally be discarded since the changes we are making are optional. */
2719
2720 static rtx
2721 canon_reg (x, insn)
2722 rtx x;
2723 rtx insn;
2724 {
2725 register int i;
2726 register enum rtx_code code;
2727 register char *fmt;
2728
2729 if (x == 0)
2730 return x;
2731
2732 code = GET_CODE (x);
2733 switch (code)
2734 {
2735 case PC:
2736 case CC0:
2737 case CONST:
2738 case CONST_INT:
2739 case CONST_DOUBLE:
2740 case SYMBOL_REF:
2741 case LABEL_REF:
2742 case ADDR_VEC:
2743 case ADDR_DIFF_VEC:
2744 return x;
2745
2746 case REG:
2747 {
2748 register int first;
2749
2750 /* Never replace a hard reg, because hard regs can appear
2751 in more than one machine mode, and we must preserve the mode
2752 of each occurrence. Also, some hard regs appear in
2753 MEMs that are shared and mustn't be altered. Don't try to
2754 replace any reg that maps to a reg of class NO_REGS. */
2755 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2756 || ! REGNO_QTY_VALID_P (REGNO (x)))
2757 return x;
2758
2759 first = qty_first_reg[REG_QTY (REGNO (x))];
2760 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2761 : REGNO_REG_CLASS (first) == NO_REGS ? x
2762 : gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
2763 }
2764
2765 default:
2766 break;
2767 }
2768
2769 fmt = GET_RTX_FORMAT (code);
2770 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2771 {
2772 register int j;
2773
2774 if (fmt[i] == 'e')
2775 {
2776 rtx new = canon_reg (XEXP (x, i), insn);
2777 int insn_code;
2778
2779 /* If replacing pseudo with hard reg or vice versa, ensure the
2780 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2781 if (insn != 0 && new != 0
2782 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2783 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2784 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2785 || (insn_code = recog_memoized (insn)) < 0
2786 || insn_n_dups[insn_code] > 0))
2787 validate_change (insn, &XEXP (x, i), new, 1);
2788 else
2789 XEXP (x, i) = new;
2790 }
2791 else if (fmt[i] == 'E')
2792 for (j = 0; j < XVECLEN (x, i); j++)
2793 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2794 }
2795
2796 return x;
2797 }
2798 \f
2799 /* LOC is a location within INSN that is an operand address (the contents of
2800 a MEM). Find the best equivalent address to use that is valid for this
2801 insn.
2802
2803 On most CISC machines, complicated address modes are costly, and rtx_cost
2804 is a good approximation for that cost. However, most RISC machines have
2805 only a few (usually only one) memory reference formats. If an address is
2806 valid at all, it is often just as cheap as any other address. Hence, for
2807 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2808 costs of various addresses. For two addresses of equal cost, choose the one
2809 with the highest `rtx_cost' value as that has the potential of eliminating
2810 the most insns. For equal costs, we choose the first in the equivalence
2811 class. Note that we ignore the fact that pseudo registers are cheaper
2812 than hard registers here because we would also prefer the pseudo registers.
2813 */
2814
2815 static void
2816 find_best_addr (insn, loc)
2817 rtx insn;
2818 rtx *loc;
2819 {
2820 struct table_elt *elt;
2821 rtx addr = *loc;
2822 #ifdef ADDRESS_COST
2823 struct table_elt *p;
2824 int found_better = 1;
2825 #endif
2826 int save_do_not_record = do_not_record;
2827 int save_hash_arg_in_memory = hash_arg_in_memory;
2828 int save_hash_arg_in_struct = hash_arg_in_struct;
2829 int addr_volatile;
2830 int regno;
2831 unsigned hash;
2832
2833 /* Do not try to replace constant addresses or addresses of local and
2834 argument slots. These MEM expressions are made only once and inserted
2835 in many instructions, as well as being used to control symbol table
2836 output. It is not safe to clobber them.
2837
2838 There are some uncommon cases where the address is already in a register
2839 for some reason, but we cannot take advantage of that because we have
2840 no easy way to unshare the MEM. In addition, looking up all stack
2841 addresses is costly. */
2842 if ((GET_CODE (addr) == PLUS
2843 && GET_CODE (XEXP (addr, 0)) == REG
2844 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2845 && (regno = REGNO (XEXP (addr, 0)),
2846 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2847 || regno == ARG_POINTER_REGNUM))
2848 || (GET_CODE (addr) == REG
2849 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2850 || regno == HARD_FRAME_POINTER_REGNUM
2851 || regno == ARG_POINTER_REGNUM))
2852 || GET_CODE (addr) == ADDRESSOF
2853 || CONSTANT_ADDRESS_P (addr))
2854 return;
2855
2856 /* If this address is not simply a register, try to fold it. This will
2857 sometimes simplify the expression. Many simplifications
2858 will not be valid, but some, usually applying the associative rule, will
2859 be valid and produce better code. */
2860 if (GET_CODE (addr) != REG)
2861 {
2862 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2863
2864 if (1
2865 #ifdef ADDRESS_COST
2866 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2867 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2868 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2869 #else
2870 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2871 #endif
2872 && validate_change (insn, loc, folded, 0))
2873 addr = folded;
2874 }
2875
2876 /* If this address is not in the hash table, we can't look for equivalences
2877 of the whole address. Also, ignore if volatile. */
2878
2879 do_not_record = 0;
2880 hash = HASH (addr, Pmode);
2881 addr_volatile = do_not_record;
2882 do_not_record = save_do_not_record;
2883 hash_arg_in_memory = save_hash_arg_in_memory;
2884 hash_arg_in_struct = save_hash_arg_in_struct;
2885
2886 if (addr_volatile)
2887 return;
2888
2889 elt = lookup (addr, hash, Pmode);
2890
2891 #ifndef ADDRESS_COST
2892 if (elt)
2893 {
2894 int our_cost = elt->cost;
2895
2896 /* Find the lowest cost below ours that works. */
2897 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2898 if (elt->cost < our_cost
2899 && (GET_CODE (elt->exp) == REG
2900 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2901 && validate_change (insn, loc,
2902 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2903 return;
2904 }
2905 #else
2906
2907 if (elt)
2908 {
2909 /* We need to find the best (under the criteria documented above) entry
2910 in the class that is valid. We use the `flag' field to indicate
2911 choices that were invalid and iterate until we can't find a better
2912 one that hasn't already been tried. */
2913
2914 for (p = elt->first_same_value; p; p = p->next_same_value)
2915 p->flag = 0;
2916
2917 while (found_better)
2918 {
2919 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2920 int best_rtx_cost = (elt->cost + 1) >> 1;
2921 struct table_elt *best_elt = elt;
2922
2923 found_better = 0;
2924 for (p = elt->first_same_value; p; p = p->next_same_value)
2925 if (! p->flag)
2926 {
2927 if ((GET_CODE (p->exp) == REG
2928 || exp_equiv_p (p->exp, p->exp, 1, 0))
2929 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2930 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2931 && (p->cost + 1) >> 1 > best_rtx_cost)))
2932 {
2933 found_better = 1;
2934 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2935 best_rtx_cost = (p->cost + 1) >> 1;
2936 best_elt = p;
2937 }
2938 }
2939
2940 if (found_better)
2941 {
2942 if (validate_change (insn, loc,
2943 canon_reg (copy_rtx (best_elt->exp),
2944 NULL_RTX), 0))
2945 return;
2946 else
2947 best_elt->flag = 1;
2948 }
2949 }
2950 }
2951
2952 /* If the address is a binary operation with the first operand a register
2953 and the second a constant, do the same as above, but looking for
2954 equivalences of the register. Then try to simplify before checking for
2955 the best address to use. This catches a few cases: First is when we
2956 have REG+const and the register is another REG+const. We can often merge
2957 the constants and eliminate one insn and one register. It may also be
2958 that a machine has a cheap REG+REG+const. Finally, this improves the
2959 code on the Alpha for unaligned byte stores. */
2960
2961 if (flag_expensive_optimizations
2962 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2963 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2964 && GET_CODE (XEXP (*loc, 0)) == REG
2965 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2966 {
2967 rtx c = XEXP (*loc, 1);
2968
2969 do_not_record = 0;
2970 hash = HASH (XEXP (*loc, 0), Pmode);
2971 do_not_record = save_do_not_record;
2972 hash_arg_in_memory = save_hash_arg_in_memory;
2973 hash_arg_in_struct = save_hash_arg_in_struct;
2974
2975 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2976 if (elt == 0)
2977 return;
2978
2979 /* We need to find the best (under the criteria documented above) entry
2980 in the class that is valid. We use the `flag' field to indicate
2981 choices that were invalid and iterate until we can't find a better
2982 one that hasn't already been tried. */
2983
2984 for (p = elt->first_same_value; p; p = p->next_same_value)
2985 p->flag = 0;
2986
2987 while (found_better)
2988 {
2989 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2990 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2991 struct table_elt *best_elt = elt;
2992 rtx best_rtx = *loc;
2993 int count;
2994
2995 /* This is at worst case an O(n^2) algorithm, so limit our search
2996 to the first 32 elements on the list. This avoids trouble
2997 compiling code with very long basic blocks that can easily
2998 call cse_gen_binary so many times that we run out of memory. */
2999
3000 found_better = 0;
3001 for (p = elt->first_same_value, count = 0;
3002 p && count < 32;
3003 p = p->next_same_value, count++)
3004 if (! p->flag
3005 && (GET_CODE (p->exp) == REG
3006 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3007 {
3008 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
3009
3010 if ((CSE_ADDRESS_COST (new) < best_addr_cost
3011 || (CSE_ADDRESS_COST (new) == best_addr_cost
3012 && (COST (new) + 1) >> 1 > best_rtx_cost)))
3013 {
3014 found_better = 1;
3015 best_addr_cost = CSE_ADDRESS_COST (new);
3016 best_rtx_cost = (COST (new) + 1) >> 1;
3017 best_elt = p;
3018 best_rtx = new;
3019 }
3020 }
3021
3022 if (found_better)
3023 {
3024 if (validate_change (insn, loc,
3025 canon_reg (copy_rtx (best_rtx),
3026 NULL_RTX), 0))
3027 return;
3028 else
3029 best_elt->flag = 1;
3030 }
3031 }
3032 }
3033 #endif
3034 }
3035 \f
3036 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3037 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3038 what values are being compared.
3039
3040 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3041 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3042 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3043 compared to produce cc0.
3044
3045 The return value is the comparison operator and is either the code of
3046 A or the code corresponding to the inverse of the comparison. */
3047
3048 static enum rtx_code
3049 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3050 enum rtx_code code;
3051 rtx *parg1, *parg2;
3052 enum machine_mode *pmode1, *pmode2;
3053 {
3054 rtx arg1, arg2;
3055
3056 arg1 = *parg1, arg2 = *parg2;
3057
3058 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3059
3060 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3061 {
3062 /* Set non-zero when we find something of interest. */
3063 rtx x = 0;
3064 int reverse_code = 0;
3065 struct table_elt *p = 0;
3066
3067 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3068 On machines with CC0, this is the only case that can occur, since
3069 fold_rtx will return the COMPARE or item being compared with zero
3070 when given CC0. */
3071
3072 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3073 x = arg1;
3074
3075 /* If ARG1 is a comparison operator and CODE is testing for
3076 STORE_FLAG_VALUE, get the inner arguments. */
3077
3078 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3079 {
3080 if (code == NE
3081 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3082 && code == LT && STORE_FLAG_VALUE == -1)
3083 #ifdef FLOAT_STORE_FLAG_VALUE
3084 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3085 && FLOAT_STORE_FLAG_VALUE < 0)
3086 #endif
3087 )
3088 x = arg1;
3089 else if (code == EQ
3090 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3091 && code == GE && STORE_FLAG_VALUE == -1)
3092 #ifdef FLOAT_STORE_FLAG_VALUE
3093 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3094 && FLOAT_STORE_FLAG_VALUE < 0)
3095 #endif
3096 )
3097 x = arg1, reverse_code = 1;
3098 }
3099
3100 /* ??? We could also check for
3101
3102 (ne (and (eq (...) (const_int 1))) (const_int 0))
3103
3104 and related forms, but let's wait until we see them occurring. */
3105
3106 if (x == 0)
3107 /* Look up ARG1 in the hash table and see if it has an equivalence
3108 that lets us see what is being compared. */
3109 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3110 GET_MODE (arg1));
3111 if (p) p = p->first_same_value;
3112
3113 for (; p; p = p->next_same_value)
3114 {
3115 enum machine_mode inner_mode = GET_MODE (p->exp);
3116
3117 /* If the entry isn't valid, skip it. */
3118 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3119 continue;
3120
3121 if (GET_CODE (p->exp) == COMPARE
3122 /* Another possibility is that this machine has a compare insn
3123 that includes the comparison code. In that case, ARG1 would
3124 be equivalent to a comparison operation that would set ARG1 to
3125 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3126 ORIG_CODE is the actual comparison being done; if it is an EQ,
3127 we must reverse ORIG_CODE. On machine with a negative value
3128 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3129 || ((code == NE
3130 || (code == LT
3131 && GET_MODE_CLASS (inner_mode) == MODE_INT
3132 && (GET_MODE_BITSIZE (inner_mode)
3133 <= HOST_BITS_PER_WIDE_INT)
3134 && (STORE_FLAG_VALUE
3135 & ((HOST_WIDE_INT) 1
3136 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3137 #ifdef FLOAT_STORE_FLAG_VALUE
3138 || (code == LT
3139 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3140 && FLOAT_STORE_FLAG_VALUE < 0)
3141 #endif
3142 )
3143 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3144 {
3145 x = p->exp;
3146 break;
3147 }
3148 else if ((code == EQ
3149 || (code == GE
3150 && GET_MODE_CLASS (inner_mode) == MODE_INT
3151 && (GET_MODE_BITSIZE (inner_mode)
3152 <= HOST_BITS_PER_WIDE_INT)
3153 && (STORE_FLAG_VALUE
3154 & ((HOST_WIDE_INT) 1
3155 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3156 #ifdef FLOAT_STORE_FLAG_VALUE
3157 || (code == GE
3158 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3159 && FLOAT_STORE_FLAG_VALUE < 0)
3160 #endif
3161 )
3162 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3163 {
3164 reverse_code = 1;
3165 x = p->exp;
3166 break;
3167 }
3168
3169 /* If this is fp + constant, the equivalent is a better operand since
3170 it may let us predict the value of the comparison. */
3171 else if (NONZERO_BASE_PLUS_P (p->exp))
3172 {
3173 arg1 = p->exp;
3174 continue;
3175 }
3176 }
3177
3178 /* If we didn't find a useful equivalence for ARG1, we are done.
3179 Otherwise, set up for the next iteration. */
3180 if (x == 0)
3181 break;
3182
3183 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3184 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3185 code = GET_CODE (x);
3186
3187 if (reverse_code)
3188 code = reverse_condition (code);
3189 }
3190
3191 /* Return our results. Return the modes from before fold_rtx
3192 because fold_rtx might produce const_int, and then it's too late. */
3193 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3194 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3195
3196 return code;
3197 }
3198 \f
3199 /* Try to simplify a unary operation CODE whose output mode is to be
3200 MODE with input operand OP whose mode was originally OP_MODE.
3201 Return zero if no simplification can be made. */
3202
3203 rtx
3204 simplify_unary_operation (code, mode, op, op_mode)
3205 enum rtx_code code;
3206 enum machine_mode mode;
3207 rtx op;
3208 enum machine_mode op_mode;
3209 {
3210 register int width = GET_MODE_BITSIZE (mode);
3211
3212 /* The order of these tests is critical so that, for example, we don't
3213 check the wrong mode (input vs. output) for a conversion operation,
3214 such as FIX. At some point, this should be simplified. */
3215
3216 #if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3217
3218 if (code == FLOAT && GET_MODE (op) == VOIDmode
3219 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3220 {
3221 HOST_WIDE_INT hv, lv;
3222 REAL_VALUE_TYPE d;
3223
3224 if (GET_CODE (op) == CONST_INT)
3225 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3226 else
3227 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3228
3229 #ifdef REAL_ARITHMETIC
3230 REAL_VALUE_FROM_INT (d, lv, hv, mode);
3231 #else
3232 if (hv < 0)
3233 {
3234 d = (double) (~ hv);
3235 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3236 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3237 d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3238 d = (- d - 1.0);
3239 }
3240 else
3241 {
3242 d = (double) hv;
3243 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3244 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3245 d += (double) (unsigned HOST_WIDE_INT) lv;
3246 }
3247 #endif /* REAL_ARITHMETIC */
3248 d = real_value_truncate (mode, d);
3249 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3250 }
3251 else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3252 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3253 {
3254 HOST_WIDE_INT hv, lv;
3255 REAL_VALUE_TYPE d;
3256
3257 if (GET_CODE (op) == CONST_INT)
3258 lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3259 else
3260 lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
3261
3262 if (op_mode == VOIDmode)
3263 {
3264 /* We don't know how to interpret negative-looking numbers in
3265 this case, so don't try to fold those. */
3266 if (hv < 0)
3267 return 0;
3268 }
3269 else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3270 ;
3271 else
3272 hv = 0, lv &= GET_MODE_MASK (op_mode);
3273
3274 #ifdef REAL_ARITHMETIC
3275 REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3276 #else
3277
3278 d = (double) (unsigned HOST_WIDE_INT) hv;
3279 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3280 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3281 d += (double) (unsigned HOST_WIDE_INT) lv;
3282 #endif /* REAL_ARITHMETIC */
3283 d = real_value_truncate (mode, d);
3284 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3285 }
3286 #endif
3287
3288 if (GET_CODE (op) == CONST_INT
3289 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3290 {
3291 register HOST_WIDE_INT arg0 = INTVAL (op);
3292 register HOST_WIDE_INT val;
3293
3294 switch (code)
3295 {
3296 case NOT:
3297 val = ~ arg0;
3298 break;
3299
3300 case NEG:
3301 val = - arg0;
3302 break;
3303
3304 case ABS:
3305 val = (arg0 >= 0 ? arg0 : - arg0);
3306 break;
3307
3308 case FFS:
3309 /* Don't use ffs here. Instead, get low order bit and then its
3310 number. If arg0 is zero, this will return 0, as desired. */
3311 arg0 &= GET_MODE_MASK (mode);
3312 val = exact_log2 (arg0 & (- arg0)) + 1;
3313 break;
3314
3315 case TRUNCATE:
3316 val = arg0;
3317 break;
3318
3319 case ZERO_EXTEND:
3320 if (op_mode == VOIDmode)
3321 op_mode = mode;
3322 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3323 {
3324 /* If we were really extending the mode,
3325 we would have to distinguish between zero-extension
3326 and sign-extension. */
3327 if (width != GET_MODE_BITSIZE (op_mode))
3328 abort ();
3329 val = arg0;
3330 }
3331 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3332 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3333 else
3334 return 0;
3335 break;
3336
3337 case SIGN_EXTEND:
3338 if (op_mode == VOIDmode)
3339 op_mode = mode;
3340 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3341 {
3342 /* If we were really extending the mode,
3343 we would have to distinguish between zero-extension
3344 and sign-extension. */
3345 if (width != GET_MODE_BITSIZE (op_mode))
3346 abort ();
3347 val = arg0;
3348 }
3349 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3350 {
3351 val
3352 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3353 if (val
3354 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3355 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3356 }
3357 else
3358 return 0;
3359 break;
3360
3361 case SQRT:
3362 return 0;
3363
3364 default:
3365 abort ();
3366 }
3367
3368 val = trunc_int_for_mode (val, mode);
3369
3370 return GEN_INT (val);
3371 }
3372
3373 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3374 for a DImode operation on a CONST_INT. */
3375 else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3376 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3377 {
3378 HOST_WIDE_INT l1, h1, lv, hv;
3379
3380 if (GET_CODE (op) == CONST_DOUBLE)
3381 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3382 else
3383 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3384
3385 switch (code)
3386 {
3387 case NOT:
3388 lv = ~ l1;
3389 hv = ~ h1;
3390 break;
3391
3392 case NEG:
3393 neg_double (l1, h1, &lv, &hv);
3394 break;
3395
3396 case ABS:
3397 if (h1 < 0)
3398 neg_double (l1, h1, &lv, &hv);
3399 else
3400 lv = l1, hv = h1;
3401 break;
3402
3403 case FFS:
3404 hv = 0;
3405 if (l1 == 0)
3406 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3407 else
3408 lv = exact_log2 (l1 & (-l1)) + 1;
3409 break;
3410
3411 case TRUNCATE:
3412 /* This is just a change-of-mode, so do nothing. */
3413 lv = l1, hv = h1;
3414 break;
3415
3416 case ZERO_EXTEND:
3417 if (op_mode == VOIDmode
3418 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3419 return 0;
3420
3421 hv = 0;
3422 lv = l1 & GET_MODE_MASK (op_mode);
3423 break;
3424
3425 case SIGN_EXTEND:
3426 if (op_mode == VOIDmode
3427 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3428 return 0;
3429 else
3430 {
3431 lv = l1 & GET_MODE_MASK (op_mode);
3432 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3433 && (lv & ((HOST_WIDE_INT) 1
3434 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3435 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3436
3437 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3438 }
3439 break;
3440
3441 case SQRT:
3442 return 0;
3443
3444 default:
3445 return 0;
3446 }
3447
3448 return immed_double_const (lv, hv, mode);
3449 }
3450
3451 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3452 else if (GET_CODE (op) == CONST_DOUBLE
3453 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3454 {
3455 REAL_VALUE_TYPE d;
3456 jmp_buf handler;
3457 rtx x;
3458
3459 if (setjmp (handler))
3460 /* There used to be a warning here, but that is inadvisable.
3461 People may want to cause traps, and the natural way
3462 to do it should not get a warning. */
3463 return 0;
3464
3465 set_float_handler (handler);
3466
3467 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3468
3469 switch (code)
3470 {
3471 case NEG:
3472 d = REAL_VALUE_NEGATE (d);
3473 break;
3474
3475 case ABS:
3476 if (REAL_VALUE_NEGATIVE (d))
3477 d = REAL_VALUE_NEGATE (d);
3478 break;
3479
3480 case FLOAT_TRUNCATE:
3481 d = real_value_truncate (mode, d);
3482 break;
3483
3484 case FLOAT_EXTEND:
3485 /* All this does is change the mode. */
3486 break;
3487
3488 case FIX:
3489 d = REAL_VALUE_RNDZINT (d);
3490 break;
3491
3492 case UNSIGNED_FIX:
3493 d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3494 break;
3495
3496 case SQRT:
3497 return 0;
3498
3499 default:
3500 abort ();
3501 }
3502
3503 x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3504 set_float_handler (NULL_PTR);
3505 return x;
3506 }
3507
3508 else if (GET_CODE (op) == CONST_DOUBLE
3509 && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3510 && GET_MODE_CLASS (mode) == MODE_INT
3511 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3512 {
3513 REAL_VALUE_TYPE d;
3514 jmp_buf handler;
3515 HOST_WIDE_INT val;
3516
3517 if (setjmp (handler))
3518 return 0;
3519
3520 set_float_handler (handler);
3521
3522 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3523
3524 switch (code)
3525 {
3526 case FIX:
3527 val = REAL_VALUE_FIX (d);
3528 break;
3529
3530 case UNSIGNED_FIX:
3531 val = REAL_VALUE_UNSIGNED_FIX (d);
3532 break;
3533
3534 default:
3535 abort ();
3536 }
3537
3538 set_float_handler (NULL_PTR);
3539
3540 val = trunc_int_for_mode (val, mode);
3541
3542 return GEN_INT (val);
3543 }
3544 #endif
3545 /* This was formerly used only for non-IEEE float.
3546 eggert@twinsun.com says it is safe for IEEE also. */
3547 else
3548 {
3549 /* There are some simplifications we can do even if the operands
3550 aren't constant. */
3551 switch (code)
3552 {
3553 case NEG:
3554 case NOT:
3555 /* (not (not X)) == X, similarly for NEG. */
3556 if (GET_CODE (op) == code)
3557 return XEXP (op, 0);
3558 break;
3559
3560 case SIGN_EXTEND:
3561 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3562 becomes just the MINUS if its mode is MODE. This allows
3563 folding switch statements on machines using casesi (such as
3564 the Vax). */
3565 if (GET_CODE (op) == TRUNCATE
3566 && GET_MODE (XEXP (op, 0)) == mode
3567 && GET_CODE (XEXP (op, 0)) == MINUS
3568 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3569 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3570 return XEXP (op, 0);
3571
3572 #ifdef POINTERS_EXTEND_UNSIGNED
3573 if (! POINTERS_EXTEND_UNSIGNED
3574 && mode == Pmode && GET_MODE (op) == ptr_mode
3575 && CONSTANT_P (op))
3576 return convert_memory_address (Pmode, op);
3577 #endif
3578 break;
3579
3580 #ifdef POINTERS_EXTEND_UNSIGNED
3581 case ZERO_EXTEND:
3582 if (POINTERS_EXTEND_UNSIGNED
3583 && mode == Pmode && GET_MODE (op) == ptr_mode
3584 && CONSTANT_P (op))
3585 return convert_memory_address (Pmode, op);
3586 break;
3587 #endif
3588
3589 default:
3590 break;
3591 }
3592
3593 return 0;
3594 }
3595 }
3596 \f
3597 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3598 and OP1. Return 0 if no simplification is possible.
3599
3600 Don't use this for relational operations such as EQ or LT.
3601 Use simplify_relational_operation instead. */
3602
3603 rtx
3604 simplify_binary_operation (code, mode, op0, op1)
3605 enum rtx_code code;
3606 enum machine_mode mode;
3607 rtx op0, op1;
3608 {
3609 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3610 HOST_WIDE_INT val;
3611 int width = GET_MODE_BITSIZE (mode);
3612 rtx tem;
3613
3614 /* Relational operations don't work here. We must know the mode
3615 of the operands in order to do the comparison correctly.
3616 Assuming a full word can give incorrect results.
3617 Consider comparing 128 with -128 in QImode. */
3618
3619 if (GET_RTX_CLASS (code) == '<')
3620 abort ();
3621
3622 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3623 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3624 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3625 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3626 {
3627 REAL_VALUE_TYPE f0, f1, value;
3628 jmp_buf handler;
3629
3630 if (setjmp (handler))
3631 return 0;
3632
3633 set_float_handler (handler);
3634
3635 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3636 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3637 f0 = real_value_truncate (mode, f0);
3638 f1 = real_value_truncate (mode, f1);
3639
3640 #ifdef REAL_ARITHMETIC
3641 #ifndef REAL_INFINITY
3642 if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3643 return 0;
3644 #endif
3645 REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3646 #else
3647 switch (code)
3648 {
3649 case PLUS:
3650 value = f0 + f1;
3651 break;
3652 case MINUS:
3653 value = f0 - f1;
3654 break;
3655 case MULT:
3656 value = f0 * f1;
3657 break;
3658 case DIV:
3659 #ifndef REAL_INFINITY
3660 if (f1 == 0)
3661 return 0;
3662 #endif
3663 value = f0 / f1;
3664 break;
3665 case SMIN:
3666 value = MIN (f0, f1);
3667 break;
3668 case SMAX:
3669 value = MAX (f0, f1);
3670 break;
3671 default:
3672 abort ();
3673 }
3674 #endif
3675
3676 value = real_value_truncate (mode, value);
3677 set_float_handler (NULL_PTR);
3678 return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3679 }
3680 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3681
3682 /* We can fold some multi-word operations. */
3683 if (GET_MODE_CLASS (mode) == MODE_INT
3684 && width == HOST_BITS_PER_WIDE_INT * 2
3685 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3686 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3687 {
3688 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3689
3690 if (GET_CODE (op0) == CONST_DOUBLE)
3691 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3692 else
3693 l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3694
3695 if (GET_CODE (op1) == CONST_DOUBLE)
3696 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3697 else
3698 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3699
3700 switch (code)
3701 {
3702 case MINUS:
3703 /* A - B == A + (-B). */
3704 neg_double (l2, h2, &lv, &hv);
3705 l2 = lv, h2 = hv;
3706
3707 /* .. fall through ... */
3708
3709 case PLUS:
3710 add_double (l1, h1, l2, h2, &lv, &hv);
3711 break;
3712
3713 case MULT:
3714 mul_double (l1, h1, l2, h2, &lv, &hv);
3715 break;
3716
3717 case DIV: case MOD: case UDIV: case UMOD:
3718 /* We'd need to include tree.h to do this and it doesn't seem worth
3719 it. */
3720 return 0;
3721
3722 case AND:
3723 lv = l1 & l2, hv = h1 & h2;
3724 break;
3725
3726 case IOR:
3727 lv = l1 | l2, hv = h1 | h2;
3728 break;
3729
3730 case XOR:
3731 lv = l1 ^ l2, hv = h1 ^ h2;
3732 break;
3733
3734 case SMIN:
3735 if (h1 < h2
3736 || (h1 == h2
3737 && ((unsigned HOST_WIDE_INT) l1
3738 < (unsigned HOST_WIDE_INT) l2)))
3739 lv = l1, hv = h1;
3740 else
3741 lv = l2, hv = h2;
3742 break;
3743
3744 case SMAX:
3745 if (h1 > h2
3746 || (h1 == h2
3747 && ((unsigned HOST_WIDE_INT) l1
3748 > (unsigned HOST_WIDE_INT) l2)))
3749 lv = l1, hv = h1;
3750 else
3751 lv = l2, hv = h2;
3752 break;
3753
3754 case UMIN:
3755 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3756 || (h1 == h2
3757 && ((unsigned HOST_WIDE_INT) l1
3758 < (unsigned HOST_WIDE_INT) l2)))
3759 lv = l1, hv = h1;
3760 else
3761 lv = l2, hv = h2;
3762 break;
3763
3764 case UMAX:
3765 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3766 || (h1 == h2
3767 && ((unsigned HOST_WIDE_INT) l1
3768 > (unsigned HOST_WIDE_INT) l2)))
3769 lv = l1, hv = h1;
3770 else
3771 lv = l2, hv = h2;
3772 break;
3773
3774 case LSHIFTRT: case ASHIFTRT:
3775 case ASHIFT:
3776 case ROTATE: case ROTATERT:
3777 #ifdef SHIFT_COUNT_TRUNCATED
3778 if (SHIFT_COUNT_TRUNCATED)
3779 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3780 #endif
3781
3782 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3783 return 0;
3784
3785 if (code == LSHIFTRT || code == ASHIFTRT)
3786 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3787 code == ASHIFTRT);
3788 else if (code == ASHIFT)
3789 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3790 else if (code == ROTATE)
3791 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3792 else /* code == ROTATERT */
3793 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3794 break;
3795
3796 default:
3797 return 0;
3798 }
3799
3800 return immed_double_const (lv, hv, mode);
3801 }
3802
3803 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3804 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3805 {
3806 /* Even if we can't compute a constant result,
3807 there are some cases worth simplifying. */
3808
3809 switch (code)
3810 {
3811 case PLUS:
3812 /* In IEEE floating point, x+0 is not the same as x. Similarly
3813 for the other optimizations below. */
3814 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3815 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3816 break;
3817
3818 if (op1 == CONST0_RTX (mode))
3819 return op0;
3820
3821 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3822 if (GET_CODE (op0) == NEG)
3823 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3824 else if (GET_CODE (op1) == NEG)
3825 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3826
3827 /* Handle both-operands-constant cases. We can only add
3828 CONST_INTs to constants since the sum of relocatable symbols
3829 can't be handled by most assemblers. Don't add CONST_INT
3830 to CONST_INT since overflow won't be computed properly if wider
3831 than HOST_BITS_PER_WIDE_INT. */
3832
3833 if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3834 && GET_CODE (op1) == CONST_INT)
3835 return plus_constant (op0, INTVAL (op1));
3836 else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3837 && GET_CODE (op0) == CONST_INT)
3838 return plus_constant (op1, INTVAL (op0));
3839
3840 /* See if this is something like X * C - X or vice versa or
3841 if the multiplication is written as a shift. If so, we can
3842 distribute and make a new multiply, shift, or maybe just
3843 have X (if C is 2 in the example above). But don't make
3844 real multiply if we didn't have one before. */
3845
3846 if (! FLOAT_MODE_P (mode))
3847 {
3848 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3849 rtx lhs = op0, rhs = op1;
3850 int had_mult = 0;
3851
3852 if (GET_CODE (lhs) == NEG)
3853 coeff0 = -1, lhs = XEXP (lhs, 0);
3854 else if (GET_CODE (lhs) == MULT
3855 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3856 {
3857 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3858 had_mult = 1;
3859 }
3860 else if (GET_CODE (lhs) == ASHIFT
3861 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3862 && INTVAL (XEXP (lhs, 1)) >= 0
3863 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3864 {
3865 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3866 lhs = XEXP (lhs, 0);
3867 }
3868
3869 if (GET_CODE (rhs) == NEG)
3870 coeff1 = -1, rhs = XEXP (rhs, 0);
3871 else if (GET_CODE (rhs) == MULT
3872 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3873 {
3874 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3875 had_mult = 1;
3876 }
3877 else if (GET_CODE (rhs) == ASHIFT
3878 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3879 && INTVAL (XEXP (rhs, 1)) >= 0
3880 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3881 {
3882 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3883 rhs = XEXP (rhs, 0);
3884 }
3885
3886 if (rtx_equal_p (lhs, rhs))
3887 {
3888 tem = cse_gen_binary (MULT, mode, lhs,
3889 GEN_INT (coeff0 + coeff1));
3890 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3891 }
3892 }
3893
3894 /* If one of the operands is a PLUS or a MINUS, see if we can
3895 simplify this by the associative law.
3896 Don't use the associative law for floating point.
3897 The inaccuracy makes it nonassociative,
3898 and subtle programs can break if operations are associated. */
3899
3900 if (INTEGRAL_MODE_P (mode)
3901 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3902 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3903 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3904 return tem;
3905 break;
3906
3907 case COMPARE:
3908 #ifdef HAVE_cc0
3909 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3910 using cc0, in which case we want to leave it as a COMPARE
3911 so we can distinguish it from a register-register-copy.
3912
3913 In IEEE floating point, x-0 is not the same as x. */
3914
3915 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3916 || ! FLOAT_MODE_P (mode) || flag_fast_math)
3917 && op1 == CONST0_RTX (mode))
3918 return op0;
3919 #else
3920 /* Do nothing here. */
3921 #endif
3922 break;
3923
3924 case MINUS:
3925 /* None of these optimizations can be done for IEEE
3926 floating point. */
3927 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3928 && FLOAT_MODE_P (mode) && ! flag_fast_math)
3929 break;
3930
3931 /* We can't assume x-x is 0 even with non-IEEE floating point,
3932 but since it is zero except in very strange circumstances, we
3933 will treat it as zero with -ffast-math. */
3934 if (rtx_equal_p (op0, op1)
3935 && ! side_effects_p (op0)
3936 && (! FLOAT_MODE_P (mode) || flag_fast_math))
3937 return CONST0_RTX (mode);
3938
3939 /* Change subtraction from zero into negation. */
3940 if (op0 == CONST0_RTX (mode))
3941 return gen_rtx_NEG (mode, op1);
3942
3943 /* (-1 - a) is ~a. */
3944 if (op0 == constm1_rtx)
3945 return gen_rtx_NOT (mode, op1);
3946
3947 /* Subtracting 0 has no effect. */
3948 if (op1 == CONST0_RTX (mode))
3949 return op0;
3950
3951 /* See if this is something like X * C - X or vice versa or
3952 if the multiplication is written as a shift. If so, we can
3953 distribute and make a new multiply, shift, or maybe just
3954 have X (if C is 2 in the example above). But don't make
3955 real multiply if we didn't have one before. */
3956
3957 if (! FLOAT_MODE_P (mode))
3958 {
3959 HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3960 rtx lhs = op0, rhs = op1;
3961 int had_mult = 0;
3962
3963 if (GET_CODE (lhs) == NEG)
3964 coeff0 = -1, lhs = XEXP (lhs, 0);
3965 else if (GET_CODE (lhs) == MULT
3966 && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3967 {
3968 coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3969 had_mult = 1;
3970 }
3971 else if (GET_CODE (lhs) == ASHIFT
3972 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3973 && INTVAL (XEXP (lhs, 1)) >= 0
3974 && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3975 {
3976 coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3977 lhs = XEXP (lhs, 0);
3978 }
3979
3980 if (GET_CODE (rhs) == NEG)
3981 coeff1 = - 1, rhs = XEXP (rhs, 0);
3982 else if (GET_CODE (rhs) == MULT
3983 && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3984 {
3985 coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3986 had_mult = 1;
3987 }
3988 else if (GET_CODE (rhs) == ASHIFT
3989 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3990 && INTVAL (XEXP (rhs, 1)) >= 0
3991 && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3992 {
3993 coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3994 rhs = XEXP (rhs, 0);
3995 }
3996
3997 if (rtx_equal_p (lhs, rhs))
3998 {
3999 tem = cse_gen_binary (MULT, mode, lhs,
4000 GEN_INT (coeff0 - coeff1));
4001 return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4002 }
4003 }
4004
4005 /* (a - (-b)) -> (a + b). */
4006 if (GET_CODE (op1) == NEG)
4007 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
4008
4009 /* If one of the operands is a PLUS or a MINUS, see if we can
4010 simplify this by the associative law.
4011 Don't use the associative law for floating point.
4012 The inaccuracy makes it nonassociative,
4013 and subtle programs can break if operations are associated. */
4014
4015 if (INTEGRAL_MODE_P (mode)
4016 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4017 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4018 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4019 return tem;
4020
4021 /* Don't let a relocatable value get a negative coeff. */
4022 if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
4023 return plus_constant (op0, - INTVAL (op1));
4024
4025 /* (x - (x & y)) -> (x & ~y) */
4026 if (GET_CODE (op1) == AND)
4027 {
4028 if (rtx_equal_p (op0, XEXP (op1, 0)))
4029 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
4030 if (rtx_equal_p (op0, XEXP (op1, 1)))
4031 return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
4032 }
4033 break;
4034
4035 case MULT:
4036 if (op1 == constm1_rtx)
4037 {
4038 tem = simplify_unary_operation (NEG, mode, op0, mode);
4039
4040 return tem ? tem : gen_rtx_NEG (mode, op0);
4041 }
4042
4043 /* In IEEE floating point, x*0 is not always 0. */
4044 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4045 || ! FLOAT_MODE_P (mode) || flag_fast_math)
4046 && op1 == CONST0_RTX (mode)
4047 && ! side_effects_p (op0))
4048 return op1;
4049
4050 /* In IEEE floating point, x*1 is not equivalent to x for nans.
4051 However, ANSI says we can drop signals,
4052 so we can do this anyway. */
4053 if (op1 == CONST1_RTX (mode))
4054 return op0;
4055
4056 /* Convert multiply by constant power of two into shift unless
4057 we are still generating RTL. This test is a kludge. */
4058 if (GET_CODE (op1) == CONST_INT
4059 && (val = exact_log2 (INTVAL (op1))) >= 0
4060 /* If the mode is larger than the host word size, and the
4061 uppermost bit is set, then this isn't a power of two due
4062 to implicit sign extension. */
4063 && (width <= HOST_BITS_PER_WIDE_INT
4064 || val != HOST_BITS_PER_WIDE_INT - 1)
4065 && ! rtx_equal_function_value_matters)
4066 return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
4067
4068 if (GET_CODE (op1) == CONST_DOUBLE
4069 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4070 {
4071 REAL_VALUE_TYPE d;
4072 jmp_buf handler;
4073 int op1is2, op1ism1;
4074
4075 if (setjmp (handler))
4076 return 0;
4077
4078 set_float_handler (handler);
4079 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4080 op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4081 op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4082 set_float_handler (NULL_PTR);
4083
4084 /* x*2 is x+x and x*(-1) is -x */
4085 if (op1is2 && GET_MODE (op0) == mode)
4086 return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
4087
4088 else if (op1ism1 && GET_MODE (op0) == mode)
4089 return gen_rtx_NEG (mode, op0);
4090 }
4091 break;
4092
4093 case IOR:
4094 if (op1 == const0_rtx)
4095 return op0;
4096 if (GET_CODE (op1) == CONST_INT
4097 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4098 return op1;
4099 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4100 return op0;
4101 /* A | (~A) -> -1 */
4102 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4103 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4104 && ! side_effects_p (op0)
4105 && GET_MODE_CLASS (mode) != MODE_CC)
4106 return constm1_rtx;
4107 break;
4108
4109 case XOR:
4110 if (op1 == const0_rtx)
4111 return op0;
4112 if (GET_CODE (op1) == CONST_INT
4113 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4114 return gen_rtx_NOT (mode, op0);
4115 if (op0 == op1 && ! side_effects_p (op0)
4116 && GET_MODE_CLASS (mode) != MODE_CC)
4117 return const0_rtx;
4118 break;
4119
4120 case AND:
4121 if (op1 == const0_rtx && ! side_effects_p (op0))
4122 return const0_rtx;
4123 if (GET_CODE (op1) == CONST_INT
4124 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4125 return op0;
4126 if (op0 == op1 && ! side_effects_p (op0)
4127 && GET_MODE_CLASS (mode) != MODE_CC)
4128 return op0;
4129 /* A & (~A) -> 0 */
4130 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4131 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4132 && ! side_effects_p (op0)
4133 && GET_MODE_CLASS (mode) != MODE_CC)
4134 return const0_rtx;
4135 break;
4136
4137 case UDIV:
4138 /* Convert divide by power of two into shift (divide by 1 handled
4139 below). */
4140 if (GET_CODE (op1) == CONST_INT
4141 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4142 return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4143
4144 /* ... fall through ... */
4145
4146 case DIV:
4147 if (op1 == CONST1_RTX (mode))
4148 return op0;
4149
4150 /* In IEEE floating point, 0/x is not always 0. */
4151 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4152 || ! FLOAT_MODE_P (mode) || flag_fast_math)
4153 && op0 == CONST0_RTX (mode)
4154 && ! side_effects_p (op1))
4155 return op0;
4156
4157 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4158 /* Change division by a constant into multiplication. Only do
4159 this with -ffast-math until an expert says it is safe in
4160 general. */
4161 else if (GET_CODE (op1) == CONST_DOUBLE
4162 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4163 && op1 != CONST0_RTX (mode)
4164 && flag_fast_math)
4165 {
4166 REAL_VALUE_TYPE d;
4167 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4168
4169 if (! REAL_VALUES_EQUAL (d, dconst0))
4170 {
4171 #if defined (REAL_ARITHMETIC)
4172 REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4173 return gen_rtx_MULT (mode, op0,
4174 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4175 #else
4176 return gen_rtx_MULT (mode, op0,
4177 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4178 #endif
4179 }
4180 }
4181 #endif
4182 break;
4183
4184 case UMOD:
4185 /* Handle modulus by power of two (mod with 1 handled below). */
4186 if (GET_CODE (op1) == CONST_INT
4187 && exact_log2 (INTVAL (op1)) > 0)
4188 return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4189
4190 /* ... fall through ... */
4191
4192 case MOD:
4193 if ((op0 == const0_rtx || op1 == const1_rtx)
4194 && ! side_effects_p (op0) && ! side_effects_p (op1))
4195 return const0_rtx;
4196 break;
4197
4198 case ROTATERT:
4199 case ROTATE:
4200 /* Rotating ~0 always results in ~0. */
4201 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4202 && INTVAL (op0) == GET_MODE_MASK (mode)
4203 && ! side_effects_p (op1))
4204 return op0;
4205
4206 /* ... fall through ... */
4207
4208 case ASHIFT:
4209 case ASHIFTRT:
4210 case LSHIFTRT:
4211 if (op1 == const0_rtx)
4212 return op0;
4213 if (op0 == const0_rtx && ! side_effects_p (op1))
4214 return op0;
4215 break;
4216
4217 case SMIN:
4218 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4219 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4220 && ! side_effects_p (op0))
4221 return op1;
4222 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4223 return op0;
4224 break;
4225
4226 case SMAX:
4227 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4228 && (INTVAL (op1)
4229 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4230 && ! side_effects_p (op0))
4231 return op1;
4232 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4233 return op0;
4234 break;
4235
4236 case UMIN:
4237 if (op1 == const0_rtx && ! side_effects_p (op0))
4238 return op1;
4239 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4240 return op0;
4241 break;
4242
4243 case UMAX:
4244 if (op1 == constm1_rtx && ! side_effects_p (op0))
4245 return op1;
4246 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4247 return op0;
4248 break;
4249
4250 default:
4251 abort ();
4252 }
4253
4254 return 0;
4255 }
4256
4257 /* Get the integer argument values in two forms:
4258 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4259
4260 arg0 = INTVAL (op0);
4261 arg1 = INTVAL (op1);
4262
4263 if (width < HOST_BITS_PER_WIDE_INT)
4264 {
4265 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4266 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4267
4268 arg0s = arg0;
4269 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4270 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4271
4272 arg1s = arg1;
4273 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4274 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4275 }
4276 else
4277 {
4278 arg0s = arg0;
4279 arg1s = arg1;
4280 }
4281
4282 /* Compute the value of the arithmetic. */
4283
4284 switch (code)
4285 {
4286 case PLUS:
4287 val = arg0s + arg1s;
4288 break;
4289
4290 case MINUS:
4291 val = arg0s - arg1s;
4292 break;
4293
4294 case MULT:
4295 val = arg0s * arg1s;
4296 break;
4297
4298 case DIV:
4299 if (arg1s == 0)
4300 return 0;
4301 val = arg0s / arg1s;
4302 break;
4303
4304 case MOD:
4305 if (arg1s == 0)
4306 return 0;
4307 val = arg0s % arg1s;
4308 break;
4309
4310 case UDIV:
4311 if (arg1 == 0)
4312 return 0;
4313 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4314 break;
4315
4316 case UMOD:
4317 if (arg1 == 0)
4318 return 0;
4319 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4320 break;
4321
4322 case AND:
4323 val = arg0 & arg1;
4324 break;
4325
4326 case IOR:
4327 val = arg0 | arg1;
4328 break;
4329
4330 case XOR:
4331 val = arg0 ^ arg1;
4332 break;
4333
4334 case LSHIFTRT:
4335 /* If shift count is undefined, don't fold it; let the machine do
4336 what it wants. But truncate it if the machine will do that. */
4337 if (arg1 < 0)
4338 return 0;
4339
4340 #ifdef SHIFT_COUNT_TRUNCATED
4341 if (SHIFT_COUNT_TRUNCATED)
4342 arg1 %= width;
4343 #endif
4344
4345 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4346 break;
4347
4348 case ASHIFT:
4349 if (arg1 < 0)
4350 return 0;
4351
4352 #ifdef SHIFT_COUNT_TRUNCATED
4353 if (SHIFT_COUNT_TRUNCATED)
4354 arg1 %= width;
4355 #endif
4356
4357 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4358 break;
4359
4360 case ASHIFTRT:
4361 if (arg1 < 0)
4362 return 0;
4363
4364 #ifdef SHIFT_COUNT_TRUNCATED
4365 if (SHIFT_COUNT_TRUNCATED)
4366 arg1 %= width;
4367 #endif
4368
4369 val = arg0s >> arg1;
4370
4371 /* Bootstrap compiler may not have sign extended the right shift.
4372 Manually extend the sign to insure bootstrap cc matches gcc. */
4373 if (arg0s < 0 && arg1 > 0)
4374 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4375
4376 break;
4377
4378 case ROTATERT:
4379 if (arg1 < 0)
4380 return 0;
4381
4382 arg1 %= width;
4383 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4384 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4385 break;
4386
4387 case ROTATE:
4388 if (arg1 < 0)
4389 return 0;
4390
4391 arg1 %= width;
4392 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4393 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4394 break;
4395
4396 case COMPARE:
4397 /* Do nothing here. */
4398 return 0;
4399
4400 case SMIN:
4401 val = arg0s <= arg1s ? arg0s : arg1s;
4402 break;
4403
4404 case UMIN:
4405 val = ((unsigned HOST_WIDE_INT) arg0
4406 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4407 break;
4408
4409 case SMAX:
4410 val = arg0s > arg1s ? arg0s : arg1s;
4411 break;
4412
4413 case UMAX:
4414 val = ((unsigned HOST_WIDE_INT) arg0
4415 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4416 break;
4417
4418 default:
4419 abort ();
4420 }
4421
4422 val = trunc_int_for_mode (val, mode);
4423
4424 return GEN_INT (val);
4425 }
4426 \f
4427 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
4428 PLUS or MINUS.
4429
4430 Rather than test for specific case, we do this by a brute-force method
4431 and do all possible simplifications until no more changes occur. Then
4432 we rebuild the operation. */
4433
4434 static rtx
4435 simplify_plus_minus (code, mode, op0, op1)
4436 enum rtx_code code;
4437 enum machine_mode mode;
4438 rtx op0, op1;
4439 {
4440 rtx ops[8];
4441 int negs[8];
4442 rtx result, tem;
4443 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4444 int first = 1, negate = 0, changed;
4445 int i, j;
4446
4447 bzero ((char *) ops, sizeof ops);
4448
4449 /* Set up the two operands and then expand them until nothing has been
4450 changed. If we run out of room in our array, give up; this should
4451 almost never happen. */
4452
4453 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4454
4455 changed = 1;
4456 while (changed)
4457 {
4458 changed = 0;
4459
4460 for (i = 0; i < n_ops; i++)
4461 switch (GET_CODE (ops[i]))
4462 {
4463 case PLUS:
4464 case MINUS:
4465 if (n_ops == 7)
4466 return 0;
4467
4468 ops[n_ops] = XEXP (ops[i], 1);
4469 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4470 ops[i] = XEXP (ops[i], 0);
4471 input_ops++;
4472 changed = 1;
4473 break;
4474
4475 case NEG:
4476 ops[i] = XEXP (ops[i], 0);
4477 negs[i] = ! negs[i];
4478 changed = 1;
4479 break;
4480
4481 case CONST:
4482 ops[i] = XEXP (ops[i], 0);
4483 input_consts++;
4484 changed = 1;
4485 break;
4486
4487 case NOT:
4488 /* ~a -> (-a - 1) */
4489 if (n_ops != 7)
4490 {
4491 ops[n_ops] = constm1_rtx;
4492 negs[n_ops++] = negs[i];
4493 ops[i] = XEXP (ops[i], 0);
4494 negs[i] = ! negs[i];
4495 changed = 1;
4496 }
4497 break;
4498
4499 case CONST_INT:
4500 if (negs[i])
4501 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4502 break;
4503
4504 default:
4505 break;
4506 }
4507 }
4508
4509 /* If we only have two operands, we can't do anything. */
4510 if (n_ops <= 2)
4511 return 0;
4512
4513 /* Now simplify each pair of operands until nothing changes. The first
4514 time through just simplify constants against each other. */
4515
4516 changed = 1;
4517 while (changed)
4518 {
4519 changed = first;
4520
4521 for (i = 0; i < n_ops - 1; i++)
4522 for (j = i + 1; j < n_ops; j++)
4523 if (ops[i] != 0 && ops[j] != 0
4524 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4525 {
4526 rtx lhs = ops[i], rhs = ops[j];
4527 enum rtx_code ncode = PLUS;
4528
4529 if (negs[i] && ! negs[j])
4530 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4531 else if (! negs[i] && negs[j])
4532 ncode = MINUS;
4533
4534 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4535 if (tem)
4536 {
4537 ops[i] = tem, ops[j] = 0;
4538 negs[i] = negs[i] && negs[j];
4539 if (GET_CODE (tem) == NEG)
4540 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4541
4542 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4543 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4544 changed = 1;
4545 }
4546 }
4547
4548 first = 0;
4549 }
4550
4551 /* Pack all the operands to the lower-numbered entries and give up if
4552 we didn't reduce the number of operands we had. Make sure we
4553 count a CONST as two operands. If we have the same number of
4554 operands, but have made more CONSTs than we had, this is also
4555 an improvement, so accept it. */
4556
4557 for (i = 0, j = 0; j < n_ops; j++)
4558 if (ops[j] != 0)
4559 {
4560 ops[i] = ops[j], negs[i++] = negs[j];
4561 if (GET_CODE (ops[j]) == CONST)
4562 n_consts++;
4563 }
4564
4565 if (i + n_consts > input_ops
4566 || (i + n_consts == input_ops && n_consts <= input_consts))
4567 return 0;
4568
4569 n_ops = i;
4570
4571 /* If we have a CONST_INT, put it last. */
4572 for (i = 0; i < n_ops - 1; i++)
4573 if (GET_CODE (ops[i]) == CONST_INT)
4574 {
4575 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4576 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4577 }
4578
4579 /* Put a non-negated operand first. If there aren't any, make all
4580 operands positive and negate the whole thing later. */
4581 for (i = 0; i < n_ops && negs[i]; i++)
4582 ;
4583
4584 if (i == n_ops)
4585 {
4586 for (i = 0; i < n_ops; i++)
4587 negs[i] = 0;
4588 negate = 1;
4589 }
4590 else if (i != 0)
4591 {
4592 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4593 j = negs[0], negs[0] = negs[i], negs[i] = j;
4594 }
4595
4596 /* Now make the result by performing the requested operations. */
4597 result = ops[0];
4598 for (i = 1; i < n_ops; i++)
4599 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4600
4601 return negate ? gen_rtx_NEG (mode, result) : result;
4602 }
4603 \f
4604 /* Make a binary operation by properly ordering the operands and
4605 seeing if the expression folds. */
4606
4607 static rtx
4608 cse_gen_binary (code, mode, op0, op1)
4609 enum rtx_code code;
4610 enum machine_mode mode;
4611 rtx op0, op1;
4612 {
4613 rtx tem;
4614
4615 /* Put complex operands first and constants second if commutative. */
4616 if (GET_RTX_CLASS (code) == 'c'
4617 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4618 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4619 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4620 || (GET_CODE (op0) == SUBREG
4621 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4622 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4623 tem = op0, op0 = op1, op1 = tem;
4624
4625 /* If this simplifies, do it. */
4626 tem = simplify_binary_operation (code, mode, op0, op1);
4627
4628 if (tem)
4629 return tem;
4630
4631 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4632 just form the operation. */
4633
4634 if (code == PLUS && GET_CODE (op1) == CONST_INT
4635 && GET_MODE (op0) != VOIDmode)
4636 return plus_constant (op0, INTVAL (op1));
4637 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4638 && GET_MODE (op0) != VOIDmode)
4639 return plus_constant (op0, - INTVAL (op1));
4640 else
4641 return gen_rtx_fmt_ee (code, mode, op0, op1);
4642 }
4643 \f
4644 struct cfc_args
4645 {
4646 /* Input */
4647 rtx op0, op1;
4648 /* Output */
4649 int equal, op0lt, op1lt;
4650 };
4651
4652 static void
4653 check_fold_consts (data)
4654 PTR data;
4655 {
4656 struct cfc_args * args = (struct cfc_args *) data;
4657 REAL_VALUE_TYPE d0, d1;
4658
4659 REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4660 REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4661 args->equal = REAL_VALUES_EQUAL (d0, d1);
4662 args->op0lt = REAL_VALUES_LESS (d0, d1);
4663 args->op1lt = REAL_VALUES_LESS (d1, d0);
4664 }
4665
4666 /* Like simplify_binary_operation except used for relational operators.
4667 MODE is the mode of the operands, not that of the result. If MODE
4668 is VOIDmode, both operands must also be VOIDmode and we compare the
4669 operands in "infinite precision".
4670
4671 If no simplification is possible, this function returns zero. Otherwise,
4672 it returns either const_true_rtx or const0_rtx. */
4673
4674 rtx
4675 simplify_relational_operation (code, mode, op0, op1)
4676 enum rtx_code code;
4677 enum machine_mode mode;
4678 rtx op0, op1;
4679 {
4680 int equal, op0lt, op0ltu, op1lt, op1ltu;
4681 rtx tem;
4682
4683 /* If op0 is a compare, extract the comparison arguments from it. */
4684 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4685 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4686
4687 /* We can't simplify MODE_CC values since we don't know what the
4688 actual comparison is. */
4689 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4690 #ifdef HAVE_cc0
4691 || op0 == cc0_rtx
4692 #endif
4693 )
4694 return 0;
4695
4696 /* For integer comparisons of A and B maybe we can simplify A - B and can
4697 then simplify a comparison of that with zero. If A and B are both either
4698 a register or a CONST_INT, this can't help; testing for these cases will
4699 prevent infinite recursion here and speed things up.
4700
4701 If CODE is an unsigned comparison, then we can never do this optimization,
4702 because it gives an incorrect result if the subtraction wraps around zero.
4703 ANSI C defines unsigned operations such that they never overflow, and
4704 thus such cases can not be ignored. */
4705
4706 if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4707 && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4708 && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4709 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4710 && code != GTU && code != GEU && code != LTU && code != LEU)
4711 return simplify_relational_operation (signed_condition (code),
4712 mode, tem, const0_rtx);
4713
4714 /* For non-IEEE floating-point, if the two operands are equal, we know the
4715 result. */
4716 if (rtx_equal_p (op0, op1)
4717 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4718 || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4719 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4720
4721 /* If the operands are floating-point constants, see if we can fold
4722 the result. */
4723 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4724 else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4725 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4726 {
4727 struct cfc_args args;
4728
4729 /* Setup input for check_fold_consts() */
4730 args.op0 = op0;
4731 args.op1 = op1;
4732
4733 if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4734 /* We got an exception from check_fold_consts() */
4735 return 0;
4736
4737 /* Receive output from check_fold_consts() */
4738 equal = args.equal;
4739 op0lt = op0ltu = args.op0lt;
4740 op1lt = op1ltu = args.op1lt;
4741 }
4742 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4743
4744 /* Otherwise, see if the operands are both integers. */
4745 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4746 && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4747 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4748 {
4749 int width = GET_MODE_BITSIZE (mode);
4750 HOST_WIDE_INT l0s, h0s, l1s, h1s;
4751 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4752
4753 /* Get the two words comprising each integer constant. */
4754 if (GET_CODE (op0) == CONST_DOUBLE)
4755 {
4756 l0u = l0s = CONST_DOUBLE_LOW (op0);
4757 h0u = h0s = CONST_DOUBLE_HIGH (op0);
4758 }
4759 else
4760 {
4761 l0u = l0s = INTVAL (op0);
4762 h0u = h0s = l0s < 0 ? -1 : 0;
4763 }
4764
4765 if (GET_CODE (op1) == CONST_DOUBLE)
4766 {
4767 l1u = l1s = CONST_DOUBLE_LOW (op1);
4768 h1u = h1s = CONST_DOUBLE_HIGH (op1);
4769 }
4770 else
4771 {
4772 l1u = l1s = INTVAL (op1);
4773 h1u = h1s = l1s < 0 ? -1 : 0;
4774 }
4775
4776 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4777 we have to sign or zero-extend the values. */
4778 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4779 h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4780
4781 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4782 {
4783 l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4784 l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4785
4786 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4787 l0s |= ((HOST_WIDE_INT) (-1) << width);
4788
4789 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4790 l1s |= ((HOST_WIDE_INT) (-1) << width);
4791 }
4792
4793 equal = (h0u == h1u && l0u == l1u);
4794 op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4795 op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4796 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4797 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4798 }
4799
4800 /* Otherwise, there are some code-specific tests we can make. */
4801 else
4802 {
4803 switch (code)
4804 {
4805 case EQ:
4806 /* References to the frame plus a constant or labels cannot
4807 be zero, but a SYMBOL_REF can due to #pragma weak. */
4808 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4809 || GET_CODE (op0) == LABEL_REF)
4810 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4811 /* On some machines, the ap reg can be 0 sometimes. */
4812 && op0 != arg_pointer_rtx
4813 #endif
4814 )
4815 return const0_rtx;
4816 break;
4817
4818 case NE:
4819 if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4820 || GET_CODE (op0) == LABEL_REF)
4821 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4822 && op0 != arg_pointer_rtx
4823 #endif
4824 )
4825 return const_true_rtx;
4826 break;
4827
4828 case GEU:
4829 /* Unsigned values are never negative. */
4830 if (op1 == const0_rtx)
4831 return const_true_rtx;
4832 break;
4833
4834 case LTU:
4835 if (op1 == const0_rtx)
4836 return const0_rtx;
4837 break;
4838
4839 case LEU:
4840 /* Unsigned values are never greater than the largest
4841 unsigned value. */
4842 if (GET_CODE (op1) == CONST_INT
4843 && INTVAL (op1) == GET_MODE_MASK (mode)
4844 && INTEGRAL_MODE_P (mode))
4845 return const_true_rtx;
4846 break;
4847
4848 case GTU:
4849 if (GET_CODE (op1) == CONST_INT
4850 && INTVAL (op1) == GET_MODE_MASK (mode)
4851 && INTEGRAL_MODE_P (mode))
4852 return const0_rtx;
4853 break;
4854
4855 default:
4856 break;
4857 }
4858
4859 return 0;
4860 }
4861
4862 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4863 as appropriate. */
4864 switch (code)
4865 {
4866 case EQ:
4867 return equal ? const_true_rtx : const0_rtx;
4868 case NE:
4869 return ! equal ? const_true_rtx : const0_rtx;
4870 case LT:
4871 return op0lt ? const_true_rtx : const0_rtx;
4872 case GT:
4873 return op1lt ? const_true_rtx : const0_rtx;
4874 case LTU:
4875 return op0ltu ? const_true_rtx : const0_rtx;
4876 case GTU:
4877 return op1ltu ? const_true_rtx : const0_rtx;
4878 case LE:
4879 return equal || op0lt ? const_true_rtx : const0_rtx;
4880 case GE:
4881 return equal || op1lt ? const_true_rtx : const0_rtx;
4882 case LEU:
4883 return equal || op0ltu ? const_true_rtx : const0_rtx;
4884 case GEU:
4885 return equal || op1ltu ? const_true_rtx : const0_rtx;
4886 default:
4887 abort ();
4888 }
4889 }
4890 \f
4891 /* Simplify CODE, an operation with result mode MODE and three operands,
4892 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4893 a constant. Return 0 if no simplifications is possible. */
4894
4895 rtx
4896 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4897 enum rtx_code code;
4898 enum machine_mode mode, op0_mode;
4899 rtx op0, op1, op2;
4900 {
4901 int width = GET_MODE_BITSIZE (mode);
4902
4903 /* VOIDmode means "infinite" precision. */
4904 if (width == 0)
4905 width = HOST_BITS_PER_WIDE_INT;
4906
4907 switch (code)
4908 {
4909 case SIGN_EXTRACT:
4910 case ZERO_EXTRACT:
4911 if (GET_CODE (op0) == CONST_INT
4912 && GET_CODE (op1) == CONST_INT
4913 && GET_CODE (op2) == CONST_INT
4914 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4915 && width <= HOST_BITS_PER_WIDE_INT)
4916 {
4917 /* Extracting a bit-field from a constant */
4918 HOST_WIDE_INT val = INTVAL (op0);
4919
4920 if (BITS_BIG_ENDIAN)
4921 val >>= (GET_MODE_BITSIZE (op0_mode)
4922 - INTVAL (op2) - INTVAL (op1));
4923 else
4924 val >>= INTVAL (op2);
4925
4926 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4927 {
4928 /* First zero-extend. */
4929 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4930 /* If desired, propagate sign bit. */
4931 if (code == SIGN_EXTRACT
4932 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4933 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4934 }
4935
4936 /* Clear the bits that don't belong in our mode,
4937 unless they and our sign bit are all one.
4938 So we get either a reasonable negative value or a reasonable
4939 unsigned value for this mode. */
4940 if (width < HOST_BITS_PER_WIDE_INT
4941 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4942 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4943 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4944
4945 return GEN_INT (val);
4946 }
4947 break;
4948
4949 case IF_THEN_ELSE:
4950 if (GET_CODE (op0) == CONST_INT)
4951 return op0 != const0_rtx ? op1 : op2;
4952
4953 /* Convert a == b ? b : a to "a". */
4954 if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4955 && rtx_equal_p (XEXP (op0, 0), op1)
4956 && rtx_equal_p (XEXP (op0, 1), op2))
4957 return op1;
4958 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4959 && rtx_equal_p (XEXP (op0, 1), op1)
4960 && rtx_equal_p (XEXP (op0, 0), op2))
4961 return op2;
4962 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
4963 {
4964 rtx temp;
4965 temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4966 XEXP (op0, 0), XEXP (op0, 1));
4967 /* See if any simplifications were possible. */
4968 if (temp == const0_rtx)
4969 return op2;
4970 else if (temp == const1_rtx)
4971 return op1;
4972 }
4973 break;
4974
4975 default:
4976 abort ();
4977 }
4978
4979 return 0;
4980 }
4981 \f
4982 /* If X is a nontrivial arithmetic operation on an argument
4983 for which a constant value can be determined, return
4984 the result of operating on that value, as a constant.
4985 Otherwise, return X, possibly with one or more operands
4986 modified by recursive calls to this function.
4987
4988 If X is a register whose contents are known, we do NOT
4989 return those contents here. equiv_constant is called to
4990 perform that task.
4991
4992 INSN is the insn that we may be modifying. If it is 0, make a copy
4993 of X before modifying it. */
4994
4995 static rtx
4996 fold_rtx (x, insn)
4997 rtx x;
4998 rtx insn;
4999 {
5000 register enum rtx_code code;
5001 register enum machine_mode mode;
5002 register char *fmt;
5003 register int i;
5004 rtx new = 0;
5005 int copied = 0;
5006 int must_swap = 0;
5007
5008 /* Folded equivalents of first two operands of X. */
5009 rtx folded_arg0;
5010 rtx folded_arg1;
5011
5012 /* Constant equivalents of first three operands of X;
5013 0 when no such equivalent is known. */
5014 rtx const_arg0;
5015 rtx const_arg1;
5016 rtx const_arg2;
5017
5018 /* The mode of the first operand of X. We need this for sign and zero
5019 extends. */
5020 enum machine_mode mode_arg0;
5021
5022 if (x == 0)
5023 return x;
5024
5025 mode = GET_MODE (x);
5026 code = GET_CODE (x);
5027 switch (code)
5028 {
5029 case CONST:
5030 case CONST_INT:
5031 case CONST_DOUBLE:
5032 case SYMBOL_REF:
5033 case LABEL_REF:
5034 case REG:
5035 /* No use simplifying an EXPR_LIST
5036 since they are used only for lists of args
5037 in a function call's REG_EQUAL note. */
5038 case EXPR_LIST:
5039 /* Changing anything inside an ADDRESSOF is incorrect; we don't
5040 want to (e.g.,) make (addressof (const_int 0)) just because
5041 the location is known to be zero. */
5042 case ADDRESSOF:
5043 return x;
5044
5045 #ifdef HAVE_cc0
5046 case CC0:
5047 return prev_insn_cc0;
5048 #endif
5049
5050 case PC:
5051 /* If the next insn is a CODE_LABEL followed by a jump table,
5052 PC's value is a LABEL_REF pointing to that label. That
5053 lets us fold switch statements on the Vax. */
5054 if (insn && GET_CODE (insn) == JUMP_INSN)
5055 {
5056 rtx next = next_nonnote_insn (insn);
5057
5058 if (next && GET_CODE (next) == CODE_LABEL
5059 && NEXT_INSN (next) != 0
5060 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5061 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5062 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
5063 return gen_rtx_LABEL_REF (Pmode, next);
5064 }
5065 break;
5066
5067 case SUBREG:
5068 /* See if we previously assigned a constant value to this SUBREG. */
5069 if ((new = lookup_as_function (x, CONST_INT)) != 0
5070 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
5071 return new;
5072
5073 /* If this is a paradoxical SUBREG, we have no idea what value the
5074 extra bits would have. However, if the operand is equivalent
5075 to a SUBREG whose operand is the same as our mode, and all the
5076 modes are within a word, we can just use the inner operand
5077 because these SUBREGs just say how to treat the register.
5078
5079 Similarly if we find an integer constant. */
5080
5081 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5082 {
5083 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5084 struct table_elt *elt;
5085
5086 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5087 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5088 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5089 imode)) != 0)
5090 for (elt = elt->first_same_value;
5091 elt; elt = elt->next_same_value)
5092 {
5093 if (CONSTANT_P (elt->exp)
5094 && GET_MODE (elt->exp) == VOIDmode)
5095 return elt->exp;
5096
5097 if (GET_CODE (elt->exp) == SUBREG
5098 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5099 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5100 return copy_rtx (SUBREG_REG (elt->exp));
5101 }
5102
5103 return x;
5104 }
5105
5106 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
5107 We might be able to if the SUBREG is extracting a single word in an
5108 integral mode or extracting the low part. */
5109
5110 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5111 const_arg0 = equiv_constant (folded_arg0);
5112 if (const_arg0)
5113 folded_arg0 = const_arg0;
5114
5115 if (folded_arg0 != SUBREG_REG (x))
5116 {
5117 new = 0;
5118
5119 if (GET_MODE_CLASS (mode) == MODE_INT
5120 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5121 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5122 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5123 GET_MODE (SUBREG_REG (x)));
5124 if (new == 0 && subreg_lowpart_p (x))
5125 new = gen_lowpart_if_possible (mode, folded_arg0);
5126 if (new)
5127 return new;
5128 }
5129
5130 /* If this is a narrowing SUBREG and our operand is a REG, see if
5131 we can find an equivalence for REG that is an arithmetic operation
5132 in a wider mode where both operands are paradoxical SUBREGs
5133 from objects of our result mode. In that case, we couldn't report
5134 an equivalent value for that operation, since we don't know what the
5135 extra bits will be. But we can find an equivalence for this SUBREG
5136 by folding that operation is the narrow mode. This allows us to
5137 fold arithmetic in narrow modes when the machine only supports
5138 word-sized arithmetic.
5139
5140 Also look for a case where we have a SUBREG whose operand is the
5141 same as our result. If both modes are smaller than a word, we
5142 are simply interpreting a register in different modes and we
5143 can use the inner value. */
5144
5145 if (GET_CODE (folded_arg0) == REG
5146 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5147 && subreg_lowpart_p (x))
5148 {
5149 struct table_elt *elt;
5150
5151 /* We can use HASH here since we know that canon_hash won't be
5152 called. */
5153 elt = lookup (folded_arg0,
5154 HASH (folded_arg0, GET_MODE (folded_arg0)),
5155 GET_MODE (folded_arg0));
5156
5157 if (elt)
5158 elt = elt->first_same_value;
5159
5160 for (; elt; elt = elt->next_same_value)
5161 {
5162 enum rtx_code eltcode = GET_CODE (elt->exp);
5163
5164 /* Just check for unary and binary operations. */
5165 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5166 && GET_CODE (elt->exp) != SIGN_EXTEND
5167 && GET_CODE (elt->exp) != ZERO_EXTEND
5168 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5169 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5170 {
5171 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5172
5173 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5174 op0 = fold_rtx (op0, NULL_RTX);
5175
5176 op0 = equiv_constant (op0);
5177 if (op0)
5178 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5179 op0, mode);
5180 }
5181 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5182 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5183 && eltcode != DIV && eltcode != MOD
5184 && eltcode != UDIV && eltcode != UMOD
5185 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5186 && eltcode != ROTATE && eltcode != ROTATERT
5187 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5188 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5189 == mode))
5190 || CONSTANT_P (XEXP (elt->exp, 0)))
5191 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5192 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5193 == mode))
5194 || CONSTANT_P (XEXP (elt->exp, 1))))
5195 {
5196 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5197 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5198
5199 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5200 op0 = fold_rtx (op0, NULL_RTX);
5201
5202 if (op0)
5203 op0 = equiv_constant (op0);
5204
5205 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5206 op1 = fold_rtx (op1, NULL_RTX);
5207
5208 if (op1)
5209 op1 = equiv_constant (op1);
5210
5211 /* If we are looking for the low SImode part of
5212 (ashift:DI c (const_int 32)), it doesn't work
5213 to compute that in SImode, because a 32-bit shift
5214 in SImode is unpredictable. We know the value is 0. */
5215 if (op0 && op1
5216 && GET_CODE (elt->exp) == ASHIFT
5217 && GET_CODE (op1) == CONST_INT
5218 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5219 {
5220 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5221
5222 /* If the count fits in the inner mode's width,
5223 but exceeds the outer mode's width,
5224 the value will get truncated to 0
5225 by the subreg. */
5226 new = const0_rtx;
5227 else
5228 /* If the count exceeds even the inner mode's width,
5229 don't fold this expression. */
5230 new = 0;
5231 }
5232 else if (op0 && op1)
5233 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5234 op0, op1);
5235 }
5236
5237 else if (GET_CODE (elt->exp) == SUBREG
5238 && GET_MODE (SUBREG_REG (elt->exp)) == mode
5239 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5240 <= UNITS_PER_WORD)
5241 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5242 new = copy_rtx (SUBREG_REG (elt->exp));
5243
5244 if (new)
5245 return new;
5246 }
5247 }
5248
5249 return x;
5250
5251 case NOT:
5252 case NEG:
5253 /* If we have (NOT Y), see if Y is known to be (NOT Z).
5254 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
5255 new = lookup_as_function (XEXP (x, 0), code);
5256 if (new)
5257 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5258 break;
5259
5260 case MEM:
5261 /* If we are not actually processing an insn, don't try to find the
5262 best address. Not only don't we care, but we could modify the
5263 MEM in an invalid way since we have no insn to validate against. */
5264 if (insn != 0)
5265 find_best_addr (insn, &XEXP (x, 0));
5266
5267 {
5268 /* Even if we don't fold in the insn itself,
5269 we can safely do so here, in hopes of getting a constant. */
5270 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5271 rtx base = 0;
5272 HOST_WIDE_INT offset = 0;
5273
5274 if (GET_CODE (addr) == REG
5275 && REGNO_QTY_VALID_P (REGNO (addr))
5276 && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5277 && qty_const[REG_QTY (REGNO (addr))] != 0)
5278 addr = qty_const[REG_QTY (REGNO (addr))];
5279
5280 /* If address is constant, split it into a base and integer offset. */
5281 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5282 base = addr;
5283 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5284 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5285 {
5286 base = XEXP (XEXP (addr, 0), 0);
5287 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5288 }
5289 else if (GET_CODE (addr) == LO_SUM
5290 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5291 base = XEXP (addr, 1);
5292 else if (GET_CODE (addr) == ADDRESSOF)
5293 return change_address (x, VOIDmode, addr);
5294
5295 /* If this is a constant pool reference, we can fold it into its
5296 constant to allow better value tracking. */
5297 if (base && GET_CODE (base) == SYMBOL_REF
5298 && CONSTANT_POOL_ADDRESS_P (base))
5299 {
5300 rtx constant = get_pool_constant (base);
5301 enum machine_mode const_mode = get_pool_mode (base);
5302 rtx new;
5303
5304 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5305 constant_pool_entries_cost = COST (constant);
5306
5307 /* If we are loading the full constant, we have an equivalence. */
5308 if (offset == 0 && mode == const_mode)
5309 return constant;
5310
5311 /* If this actually isn't a constant (weird!), we can't do
5312 anything. Otherwise, handle the two most common cases:
5313 extracting a word from a multi-word constant, and extracting
5314 the low-order bits. Other cases don't seem common enough to
5315 worry about. */
5316 if (! CONSTANT_P (constant))
5317 return x;
5318
5319 if (GET_MODE_CLASS (mode) == MODE_INT
5320 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5321 && offset % UNITS_PER_WORD == 0
5322 && (new = operand_subword (constant,
5323 offset / UNITS_PER_WORD,
5324 0, const_mode)) != 0)
5325 return new;
5326
5327 if (((BYTES_BIG_ENDIAN
5328 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5329 || (! BYTES_BIG_ENDIAN && offset == 0))
5330 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
5331 return new;
5332 }
5333
5334 /* If this is a reference to a label at a known position in a jump
5335 table, we also know its value. */
5336 if (base && GET_CODE (base) == LABEL_REF)
5337 {
5338 rtx label = XEXP (base, 0);
5339 rtx table_insn = NEXT_INSN (label);
5340
5341 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5342 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5343 {
5344 rtx table = PATTERN (table_insn);
5345
5346 if (offset >= 0
5347 && (offset / GET_MODE_SIZE (GET_MODE (table))
5348 < XVECLEN (table, 0)))
5349 return XVECEXP (table, 0,
5350 offset / GET_MODE_SIZE (GET_MODE (table)));
5351 }
5352 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5353 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5354 {
5355 rtx table = PATTERN (table_insn);
5356
5357 if (offset >= 0
5358 && (offset / GET_MODE_SIZE (GET_MODE (table))
5359 < XVECLEN (table, 1)))
5360 {
5361 offset /= GET_MODE_SIZE (GET_MODE (table));
5362 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5363 XEXP (table, 0));
5364
5365 if (GET_MODE (table) != Pmode)
5366 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5367
5368 /* Indicate this is a constant. This isn't a
5369 valid form of CONST, but it will only be used
5370 to fold the next insns and then discarded, so
5371 it should be safe.
5372
5373 Note this expression must be explicitly discarded,
5374 by cse_insn, else it may end up in a REG_EQUAL note
5375 and "escape" to cause problems elsewhere. */
5376 return gen_rtx_CONST (GET_MODE (new), new);
5377 }
5378 }
5379 }
5380
5381 return x;
5382 }
5383
5384 case ASM_OPERANDS:
5385 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5386 validate_change (insn, &XVECEXP (x, 3, i),
5387 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5388 break;
5389
5390 default:
5391 break;
5392 }
5393
5394 const_arg0 = 0;
5395 const_arg1 = 0;
5396 const_arg2 = 0;
5397 mode_arg0 = VOIDmode;
5398
5399 /* Try folding our operands.
5400 Then see which ones have constant values known. */
5401
5402 fmt = GET_RTX_FORMAT (code);
5403 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5404 if (fmt[i] == 'e')
5405 {
5406 rtx arg = XEXP (x, i);
5407 rtx folded_arg = arg, const_arg = 0;
5408 enum machine_mode mode_arg = GET_MODE (arg);
5409 rtx cheap_arg, expensive_arg;
5410 rtx replacements[2];
5411 int j;
5412
5413 /* Most arguments are cheap, so handle them specially. */
5414 switch (GET_CODE (arg))
5415 {
5416 case REG:
5417 /* This is the same as calling equiv_constant; it is duplicated
5418 here for speed. */
5419 if (REGNO_QTY_VALID_P (REGNO (arg))
5420 && qty_const[REG_QTY (REGNO (arg))] != 0
5421 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5422 && GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
5423 const_arg
5424 = gen_lowpart_if_possible (GET_MODE (arg),
5425 qty_const[REG_QTY (REGNO (arg))]);
5426 break;
5427
5428 case CONST:
5429 case CONST_INT:
5430 case SYMBOL_REF:
5431 case LABEL_REF:
5432 case CONST_DOUBLE:
5433 const_arg = arg;
5434 break;
5435
5436 #ifdef HAVE_cc0
5437 case CC0:
5438 folded_arg = prev_insn_cc0;
5439 mode_arg = prev_insn_cc0_mode;
5440 const_arg = equiv_constant (folded_arg);
5441 break;
5442 #endif
5443
5444 default:
5445 folded_arg = fold_rtx (arg, insn);
5446 const_arg = equiv_constant (folded_arg);
5447 }
5448
5449 /* For the first three operands, see if the operand
5450 is constant or equivalent to a constant. */
5451 switch (i)
5452 {
5453 case 0:
5454 folded_arg0 = folded_arg;
5455 const_arg0 = const_arg;
5456 mode_arg0 = mode_arg;
5457 break;
5458 case 1:
5459 folded_arg1 = folded_arg;
5460 const_arg1 = const_arg;
5461 break;
5462 case 2:
5463 const_arg2 = const_arg;
5464 break;
5465 }
5466
5467 /* Pick the least expensive of the folded argument and an
5468 equivalent constant argument. */
5469 if (const_arg == 0 || const_arg == folded_arg
5470 || COST (const_arg) > COST (folded_arg))
5471 cheap_arg = folded_arg, expensive_arg = const_arg;
5472 else
5473 cheap_arg = const_arg, expensive_arg = folded_arg;
5474
5475 /* Try to replace the operand with the cheapest of the two
5476 possibilities. If it doesn't work and this is either of the first
5477 two operands of a commutative operation, try swapping them.
5478 If THAT fails, try the more expensive, provided it is cheaper
5479 than what is already there. */
5480
5481 if (cheap_arg == XEXP (x, i))
5482 continue;
5483
5484 if (insn == 0 && ! copied)
5485 {
5486 x = copy_rtx (x);
5487 copied = 1;
5488 }
5489
5490 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5491 for (j = 0;
5492 j < 2 && replacements[j]
5493 && COST (replacements[j]) < COST (XEXP (x, i));
5494 j++)
5495 {
5496 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5497 break;
5498
5499 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5500 {
5501 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5502 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5503
5504 if (apply_change_group ())
5505 {
5506 /* Swap them back to be invalid so that this loop can
5507 continue and flag them to be swapped back later. */
5508 rtx tem;
5509
5510 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5511 XEXP (x, 1) = tem;
5512 must_swap = 1;
5513 break;
5514 }
5515 }
5516 }
5517 }
5518
5519 else
5520 {
5521 if (fmt[i] == 'E')
5522 /* Don't try to fold inside of a vector of expressions.
5523 Doing nothing is harmless. */
5524 {;}
5525 }
5526
5527 /* If a commutative operation, place a constant integer as the second
5528 operand unless the first operand is also a constant integer. Otherwise,
5529 place any constant second unless the first operand is also a constant. */
5530
5531 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5532 {
5533 if (must_swap || (const_arg0
5534 && (const_arg1 == 0
5535 || (GET_CODE (const_arg0) == CONST_INT
5536 && GET_CODE (const_arg1) != CONST_INT))))
5537 {
5538 register rtx tem = XEXP (x, 0);
5539
5540 if (insn == 0 && ! copied)
5541 {
5542 x = copy_rtx (x);
5543 copied = 1;
5544 }
5545
5546 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5547 validate_change (insn, &XEXP (x, 1), tem, 1);
5548 if (apply_change_group ())
5549 {
5550 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5551 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5552 }
5553 }
5554 }
5555
5556 /* If X is an arithmetic operation, see if we can simplify it. */
5557
5558 switch (GET_RTX_CLASS (code))
5559 {
5560 case '1':
5561 {
5562 int is_const = 0;
5563
5564 /* We can't simplify extension ops unless we know the
5565 original mode. */
5566 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5567 && mode_arg0 == VOIDmode)
5568 break;
5569
5570 /* If we had a CONST, strip it off and put it back later if we
5571 fold. */
5572 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5573 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5574
5575 new = simplify_unary_operation (code, mode,
5576 const_arg0 ? const_arg0 : folded_arg0,
5577 mode_arg0);
5578 if (new != 0 && is_const)
5579 new = gen_rtx_CONST (mode, new);
5580 }
5581 break;
5582
5583 case '<':
5584 /* See what items are actually being compared and set FOLDED_ARG[01]
5585 to those values and CODE to the actual comparison code. If any are
5586 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5587 do anything if both operands are already known to be constant. */
5588
5589 if (const_arg0 == 0 || const_arg1 == 0)
5590 {
5591 struct table_elt *p0, *p1;
5592 rtx true = const_true_rtx, false = const0_rtx;
5593 enum machine_mode mode_arg1;
5594
5595 #ifdef FLOAT_STORE_FLAG_VALUE
5596 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5597 {
5598 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5599 mode);
5600 false = CONST0_RTX (mode);
5601 }
5602 #endif
5603
5604 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5605 &mode_arg0, &mode_arg1);
5606 const_arg0 = equiv_constant (folded_arg0);
5607 const_arg1 = equiv_constant (folded_arg1);
5608
5609 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5610 what kinds of things are being compared, so we can't do
5611 anything with this comparison. */
5612
5613 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5614 break;
5615
5616 /* If we do not now have two constants being compared, see
5617 if we can nevertheless deduce some things about the
5618 comparison. */
5619 if (const_arg0 == 0 || const_arg1 == 0)
5620 {
5621 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
5622 non-explicit constant? These aren't zero, but we
5623 don't know their sign. */
5624 if (const_arg1 == const0_rtx
5625 && (NONZERO_BASE_PLUS_P (folded_arg0)
5626 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5627 come out as 0. */
5628 || GET_CODE (folded_arg0) == SYMBOL_REF
5629 #endif
5630 || GET_CODE (folded_arg0) == LABEL_REF
5631 || GET_CODE (folded_arg0) == CONST))
5632 {
5633 if (code == EQ)
5634 return false;
5635 else if (code == NE)
5636 return true;
5637 }
5638
5639 /* See if the two operands are the same. We don't do this
5640 for IEEE floating-point since we can't assume x == x
5641 since x might be a NaN. */
5642
5643 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5644 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5645 && (folded_arg0 == folded_arg1
5646 || (GET_CODE (folded_arg0) == REG
5647 && GET_CODE (folded_arg1) == REG
5648 && (REG_QTY (REGNO (folded_arg0))
5649 == REG_QTY (REGNO (folded_arg1))))
5650 || ((p0 = lookup (folded_arg0,
5651 (safe_hash (folded_arg0, mode_arg0)
5652 % NBUCKETS), mode_arg0))
5653 && (p1 = lookup (folded_arg1,
5654 (safe_hash (folded_arg1, mode_arg0)
5655 % NBUCKETS), mode_arg0))
5656 && p0->first_same_value == p1->first_same_value)))
5657 return ((code == EQ || code == LE || code == GE
5658 || code == LEU || code == GEU)
5659 ? true : false);
5660
5661 /* If FOLDED_ARG0 is a register, see if the comparison we are
5662 doing now is either the same as we did before or the reverse
5663 (we only check the reverse if not floating-point). */
5664 else if (GET_CODE (folded_arg0) == REG)
5665 {
5666 int qty = REG_QTY (REGNO (folded_arg0));
5667
5668 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5669 && (comparison_dominates_p (qty_comparison_code[qty], code)
5670 || (comparison_dominates_p (qty_comparison_code[qty],
5671 reverse_condition (code))
5672 && ! FLOAT_MODE_P (mode_arg0)))
5673 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5674 || (const_arg1
5675 && rtx_equal_p (qty_comparison_const[qty],
5676 const_arg1))
5677 || (GET_CODE (folded_arg1) == REG
5678 && (REG_QTY (REGNO (folded_arg1))
5679 == qty_comparison_qty[qty]))))
5680 return (comparison_dominates_p (qty_comparison_code[qty],
5681 code)
5682 ? true : false);
5683 }
5684 }
5685 }
5686
5687 /* If we are comparing against zero, see if the first operand is
5688 equivalent to an IOR with a constant. If so, we may be able to
5689 determine the result of this comparison. */
5690
5691 if (const_arg1 == const0_rtx)
5692 {
5693 rtx y = lookup_as_function (folded_arg0, IOR);
5694 rtx inner_const;
5695
5696 if (y != 0
5697 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5698 && GET_CODE (inner_const) == CONST_INT
5699 && INTVAL (inner_const) != 0)
5700 {
5701 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5702 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5703 && (INTVAL (inner_const)
5704 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5705 rtx true = const_true_rtx, false = const0_rtx;
5706
5707 #ifdef FLOAT_STORE_FLAG_VALUE
5708 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5709 {
5710 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5711 mode);
5712 false = CONST0_RTX (mode);
5713 }
5714 #endif
5715
5716 switch (code)
5717 {
5718 case EQ:
5719 return false;
5720 case NE:
5721 return true;
5722 case LT: case LE:
5723 if (has_sign)
5724 return true;
5725 break;
5726 case GT: case GE:
5727 if (has_sign)
5728 return false;
5729 break;
5730 default:
5731 break;
5732 }
5733 }
5734 }
5735
5736 new = simplify_relational_operation (code, mode_arg0,
5737 const_arg0 ? const_arg0 : folded_arg0,
5738 const_arg1 ? const_arg1 : folded_arg1);
5739 #ifdef FLOAT_STORE_FLAG_VALUE
5740 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5741 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5742 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5743 #endif
5744 break;
5745
5746 case '2':
5747 case 'c':
5748 switch (code)
5749 {
5750 case PLUS:
5751 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5752 with that LABEL_REF as its second operand. If so, the result is
5753 the first operand of that MINUS. This handles switches with an
5754 ADDR_DIFF_VEC table. */
5755 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5756 {
5757 rtx y
5758 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
5759 : lookup_as_function (folded_arg0, MINUS);
5760
5761 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5762 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5763 return XEXP (y, 0);
5764
5765 /* Now try for a CONST of a MINUS like the above. */
5766 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5767 : lookup_as_function (folded_arg0, CONST))) != 0
5768 && GET_CODE (XEXP (y, 0)) == MINUS
5769 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5770 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5771 return XEXP (XEXP (y, 0), 0);
5772 }
5773
5774 /* Likewise if the operands are in the other order. */
5775 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5776 {
5777 rtx y
5778 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
5779 : lookup_as_function (folded_arg1, MINUS);
5780
5781 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5782 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5783 return XEXP (y, 0);
5784
5785 /* Now try for a CONST of a MINUS like the above. */
5786 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5787 : lookup_as_function (folded_arg1, CONST))) != 0
5788 && GET_CODE (XEXP (y, 0)) == MINUS
5789 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5790 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5791 return XEXP (XEXP (y, 0), 0);
5792 }
5793
5794 /* If second operand is a register equivalent to a negative
5795 CONST_INT, see if we can find a register equivalent to the
5796 positive constant. Make a MINUS if so. Don't do this for
5797 a non-negative constant since we might then alternate between
5798 chosing positive and negative constants. Having the positive
5799 constant previously-used is the more common case. Be sure
5800 the resulting constant is non-negative; if const_arg1 were
5801 the smallest negative number this would overflow: depending
5802 on the mode, this would either just be the same value (and
5803 hence not save anything) or be incorrect. */
5804 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5805 && INTVAL (const_arg1) < 0
5806 && - INTVAL (const_arg1) >= 0
5807 && GET_CODE (folded_arg1) == REG)
5808 {
5809 rtx new_const = GEN_INT (- INTVAL (const_arg1));
5810 struct table_elt *p
5811 = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5812 mode);
5813
5814 if (p)
5815 for (p = p->first_same_value; p; p = p->next_same_value)
5816 if (GET_CODE (p->exp) == REG)
5817 return cse_gen_binary (MINUS, mode, folded_arg0,
5818 canon_reg (p->exp, NULL_RTX));
5819 }
5820 goto from_plus;
5821
5822 case MINUS:
5823 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5824 If so, produce (PLUS Z C2-C). */
5825 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5826 {
5827 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5828 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5829 return fold_rtx (plus_constant (copy_rtx (y),
5830 -INTVAL (const_arg1)),
5831 NULL_RTX);
5832 }
5833
5834 /* ... fall through ... */
5835
5836 from_plus:
5837 case SMIN: case SMAX: case UMIN: case UMAX:
5838 case IOR: case AND: case XOR:
5839 case MULT: case DIV: case UDIV:
5840 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5841 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5842 is known to be of similar form, we may be able to replace the
5843 operation with a combined operation. This may eliminate the
5844 intermediate operation if every use is simplified in this way.
5845 Note that the similar optimization done by combine.c only works
5846 if the intermediate operation's result has only one reference. */
5847
5848 if (GET_CODE (folded_arg0) == REG
5849 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5850 {
5851 int is_shift
5852 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5853 rtx y = lookup_as_function (folded_arg0, code);
5854 rtx inner_const;
5855 enum rtx_code associate_code;
5856 rtx new_const;
5857
5858 if (y == 0
5859 || 0 == (inner_const
5860 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5861 || GET_CODE (inner_const) != CONST_INT
5862 /* If we have compiled a statement like
5863 "if (x == (x & mask1))", and now are looking at
5864 "x & mask2", we will have a case where the first operand
5865 of Y is the same as our first operand. Unless we detect
5866 this case, an infinite loop will result. */
5867 || XEXP (y, 0) == folded_arg0)
5868 break;
5869
5870 /* Don't associate these operations if they are a PLUS with the
5871 same constant and it is a power of two. These might be doable
5872 with a pre- or post-increment. Similarly for two subtracts of
5873 identical powers of two with post decrement. */
5874
5875 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5876 && ((HAVE_PRE_INCREMENT
5877 && exact_log2 (INTVAL (const_arg1)) >= 0)
5878 || (HAVE_POST_INCREMENT
5879 && exact_log2 (INTVAL (const_arg1)) >= 0)
5880 || (HAVE_PRE_DECREMENT
5881 && exact_log2 (- INTVAL (const_arg1)) >= 0)
5882 || (HAVE_POST_DECREMENT
5883 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5884 break;
5885
5886 /* Compute the code used to compose the constants. For example,
5887 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5888
5889 associate_code
5890 = (code == MULT || code == DIV || code == UDIV ? MULT
5891 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5892
5893 new_const = simplify_binary_operation (associate_code, mode,
5894 const_arg1, inner_const);
5895
5896 if (new_const == 0)
5897 break;
5898
5899 /* If we are associating shift operations, don't let this
5900 produce a shift of the size of the object or larger.
5901 This could occur when we follow a sign-extend by a right
5902 shift on a machine that does a sign-extend as a pair
5903 of shifts. */
5904
5905 if (is_shift && GET_CODE (new_const) == CONST_INT
5906 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5907 {
5908 /* As an exception, we can turn an ASHIFTRT of this
5909 form into a shift of the number of bits - 1. */
5910 if (code == ASHIFTRT)
5911 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5912 else
5913 break;
5914 }
5915
5916 y = copy_rtx (XEXP (y, 0));
5917
5918 /* If Y contains our first operand (the most common way this
5919 can happen is if Y is a MEM), we would do into an infinite
5920 loop if we tried to fold it. So don't in that case. */
5921
5922 if (! reg_mentioned_p (folded_arg0, y))
5923 y = fold_rtx (y, insn);
5924
5925 return cse_gen_binary (code, mode, y, new_const);
5926 }
5927 break;
5928
5929 default:
5930 break;
5931 }
5932
5933 new = simplify_binary_operation (code, mode,
5934 const_arg0 ? const_arg0 : folded_arg0,
5935 const_arg1 ? const_arg1 : folded_arg1);
5936 break;
5937
5938 case 'o':
5939 /* (lo_sum (high X) X) is simply X. */
5940 if (code == LO_SUM && const_arg0 != 0
5941 && GET_CODE (const_arg0) == HIGH
5942 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5943 return const_arg1;
5944 break;
5945
5946 case '3':
5947 case 'b':
5948 new = simplify_ternary_operation (code, mode, mode_arg0,
5949 const_arg0 ? const_arg0 : folded_arg0,
5950 const_arg1 ? const_arg1 : folded_arg1,
5951 const_arg2 ? const_arg2 : XEXP (x, 2));
5952 break;
5953
5954 case 'x':
5955 /* Always eliminate CONSTANT_P_RTX at this stage. */
5956 if (code == CONSTANT_P_RTX)
5957 return (const_arg0 ? const1_rtx : const0_rtx);
5958 break;
5959 }
5960
5961 return new ? new : x;
5962 }
5963 \f
5964 /* Return a constant value currently equivalent to X.
5965 Return 0 if we don't know one. */
5966
5967 static rtx
5968 equiv_constant (x)
5969 rtx x;
5970 {
5971 if (GET_CODE (x) == REG
5972 && REGNO_QTY_VALID_P (REGNO (x))
5973 && qty_const[REG_QTY (REGNO (x))])
5974 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
5975
5976 if (x == 0 || CONSTANT_P (x))
5977 return x;
5978
5979 /* If X is a MEM, try to fold it outside the context of any insn to see if
5980 it might be equivalent to a constant. That handles the case where it
5981 is a constant-pool reference. Then try to look it up in the hash table
5982 in case it is something whose value we have seen before. */
5983
5984 if (GET_CODE (x) == MEM)
5985 {
5986 struct table_elt *elt;
5987
5988 x = fold_rtx (x, NULL_RTX);
5989 if (CONSTANT_P (x))
5990 return x;
5991
5992 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5993 if (elt == 0)
5994 return 0;
5995
5996 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5997 if (elt->is_const && CONSTANT_P (elt->exp))
5998 return elt->exp;
5999 }
6000
6001 return 0;
6002 }
6003 \f
6004 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6005 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6006 least-significant part of X.
6007 MODE specifies how big a part of X to return.
6008
6009 If the requested operation cannot be done, 0 is returned.
6010
6011 This is similar to gen_lowpart in emit-rtl.c. */
6012
6013 rtx
6014 gen_lowpart_if_possible (mode, x)
6015 enum machine_mode mode;
6016 register rtx x;
6017 {
6018 rtx result = gen_lowpart_common (mode, x);
6019
6020 if (result)
6021 return result;
6022 else if (GET_CODE (x) == MEM)
6023 {
6024 /* This is the only other case we handle. */
6025 register int offset = 0;
6026 rtx new;
6027
6028 if (WORDS_BIG_ENDIAN)
6029 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6030 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6031 if (BYTES_BIG_ENDIAN)
6032 /* Adjust the address so that the address-after-the-data is
6033 unchanged. */
6034 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6035 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6036 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
6037 if (! memory_address_p (mode, XEXP (new, 0)))
6038 return 0;
6039 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6040 MEM_COPY_ATTRIBUTES (new, x);
6041 return new;
6042 }
6043 else
6044 return 0;
6045 }
6046 \f
6047 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6048 branch. It will be zero if not.
6049
6050 In certain cases, this can cause us to add an equivalence. For example,
6051 if we are following the taken case of
6052 if (i == 2)
6053 we can add the fact that `i' and '2' are now equivalent.
6054
6055 In any case, we can record that this comparison was passed. If the same
6056 comparison is seen later, we will know its value. */
6057
6058 static void
6059 record_jump_equiv (insn, taken)
6060 rtx insn;
6061 int taken;
6062 {
6063 int cond_known_true;
6064 rtx op0, op1;
6065 enum machine_mode mode, mode0, mode1;
6066 int reversed_nonequality = 0;
6067 enum rtx_code code;
6068
6069 /* Ensure this is the right kind of insn. */
6070 if (! condjump_p (insn) || simplejump_p (insn))
6071 return;
6072
6073 /* See if this jump condition is known true or false. */
6074 if (taken)
6075 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6076 else
6077 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6078
6079 /* Get the type of comparison being done and the operands being compared.
6080 If we had to reverse a non-equality condition, record that fact so we
6081 know that it isn't valid for floating-point. */
6082 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6083 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6084 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6085
6086 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
6087 if (! cond_known_true)
6088 {
6089 reversed_nonequality = (code != EQ && code != NE);
6090 code = reverse_condition (code);
6091 }
6092
6093 /* The mode is the mode of the non-constant. */
6094 mode = mode0;
6095 if (mode1 != VOIDmode)
6096 mode = mode1;
6097
6098 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6099 }
6100
6101 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6102 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6103 Make any useful entries we can with that information. Called from
6104 above function and called recursively. */
6105
6106 static void
6107 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6108 enum rtx_code code;
6109 enum machine_mode mode;
6110 rtx op0, op1;
6111 int reversed_nonequality;
6112 {
6113 unsigned op0_hash, op1_hash;
6114 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6115 struct table_elt *op0_elt, *op1_elt;
6116
6117 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6118 we know that they are also equal in the smaller mode (this is also
6119 true for all smaller modes whether or not there is a SUBREG, but
6120 is not worth testing for with no SUBREG). */
6121
6122 /* Note that GET_MODE (op0) may not equal MODE. */
6123 if (code == EQ && GET_CODE (op0) == SUBREG
6124 && (GET_MODE_SIZE (GET_MODE (op0))
6125 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6126 {
6127 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6128 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6129
6130 record_jump_cond (code, mode, SUBREG_REG (op0),
6131 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6132 reversed_nonequality);
6133 }
6134
6135 if (code == EQ && GET_CODE (op1) == SUBREG
6136 && (GET_MODE_SIZE (GET_MODE (op1))
6137 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6138 {
6139 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6140 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6141
6142 record_jump_cond (code, mode, SUBREG_REG (op1),
6143 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6144 reversed_nonequality);
6145 }
6146
6147 /* Similarly, if this is an NE comparison, and either is a SUBREG
6148 making a smaller mode, we know the whole thing is also NE. */
6149
6150 /* Note that GET_MODE (op0) may not equal MODE;
6151 if we test MODE instead, we can get an infinite recursion
6152 alternating between two modes each wider than MODE. */
6153
6154 if (code == NE && GET_CODE (op0) == SUBREG
6155 && subreg_lowpart_p (op0)
6156 && (GET_MODE_SIZE (GET_MODE (op0))
6157 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6158 {
6159 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6160 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6161
6162 record_jump_cond (code, mode, SUBREG_REG (op0),
6163 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6164 reversed_nonequality);
6165 }
6166
6167 if (code == NE && GET_CODE (op1) == SUBREG
6168 && subreg_lowpart_p (op1)
6169 && (GET_MODE_SIZE (GET_MODE (op1))
6170 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6171 {
6172 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6173 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6174
6175 record_jump_cond (code, mode, SUBREG_REG (op1),
6176 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6177 reversed_nonequality);
6178 }
6179
6180 /* Hash both operands. */
6181
6182 do_not_record = 0;
6183 hash_arg_in_memory = 0;
6184 hash_arg_in_struct = 0;
6185 op0_hash = HASH (op0, mode);
6186 op0_in_memory = hash_arg_in_memory;
6187 op0_in_struct = hash_arg_in_struct;
6188
6189 if (do_not_record)
6190 return;
6191
6192 do_not_record = 0;
6193 hash_arg_in_memory = 0;
6194 hash_arg_in_struct = 0;
6195 op1_hash = HASH (op1, mode);
6196 op1_in_memory = hash_arg_in_memory;
6197 op1_in_struct = hash_arg_in_struct;
6198
6199 if (do_not_record)
6200 return;
6201
6202 /* Look up both operands. */
6203 op0_elt = lookup (op0, op0_hash, mode);
6204 op1_elt = lookup (op1, op1_hash, mode);
6205
6206 /* If both operands are already equivalent or if they are not in the
6207 table but are identical, do nothing. */
6208 if ((op0_elt != 0 && op1_elt != 0
6209 && op0_elt->first_same_value == op1_elt->first_same_value)
6210 || op0 == op1 || rtx_equal_p (op0, op1))
6211 return;
6212
6213 /* If we aren't setting two things equal all we can do is save this
6214 comparison. Similarly if this is floating-point. In the latter
6215 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6216 If we record the equality, we might inadvertently delete code
6217 whose intent was to change -0 to +0. */
6218
6219 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6220 {
6221 /* If we reversed a floating-point comparison, if OP0 is not a
6222 register, or if OP1 is neither a register or constant, we can't
6223 do anything. */
6224
6225 if (GET_CODE (op1) != REG)
6226 op1 = equiv_constant (op1);
6227
6228 if ((reversed_nonequality && FLOAT_MODE_P (mode))
6229 || GET_CODE (op0) != REG || op1 == 0)
6230 return;
6231
6232 /* Put OP0 in the hash table if it isn't already. This gives it a
6233 new quantity number. */
6234 if (op0_elt == 0)
6235 {
6236 if (insert_regs (op0, NULL_PTR, 0))
6237 {
6238 rehash_using_reg (op0);
6239 op0_hash = HASH (op0, mode);
6240
6241 /* If OP0 is contained in OP1, this changes its hash code
6242 as well. Faster to rehash than to check, except
6243 for the simple case of a constant. */
6244 if (! CONSTANT_P (op1))
6245 op1_hash = HASH (op1,mode);
6246 }
6247
6248 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6249 op0_elt->in_memory = op0_in_memory;
6250 op0_elt->in_struct = op0_in_struct;
6251 }
6252
6253 qty_comparison_code[REG_QTY (REGNO (op0))] = code;
6254 if (GET_CODE (op1) == REG)
6255 {
6256 /* Look it up again--in case op0 and op1 are the same. */
6257 op1_elt = lookup (op1, op1_hash, mode);
6258
6259 /* Put OP1 in the hash table so it gets a new quantity number. */
6260 if (op1_elt == 0)
6261 {
6262 if (insert_regs (op1, NULL_PTR, 0))
6263 {
6264 rehash_using_reg (op1);
6265 op1_hash = HASH (op1, mode);
6266 }
6267
6268 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6269 op1_elt->in_memory = op1_in_memory;
6270 op1_elt->in_struct = op1_in_struct;
6271 }
6272
6273 qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6274 qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
6275 }
6276 else
6277 {
6278 qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6279 qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
6280 }
6281
6282 return;
6283 }
6284
6285 /* If either side is still missing an equivalence, make it now,
6286 then merge the equivalences. */
6287
6288 if (op0_elt == 0)
6289 {
6290 if (insert_regs (op0, NULL_PTR, 0))
6291 {
6292 rehash_using_reg (op0);
6293 op0_hash = HASH (op0, mode);
6294 }
6295
6296 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6297 op0_elt->in_memory = op0_in_memory;
6298 op0_elt->in_struct = op0_in_struct;
6299 }
6300
6301 if (op1_elt == 0)
6302 {
6303 if (insert_regs (op1, NULL_PTR, 0))
6304 {
6305 rehash_using_reg (op1);
6306 op1_hash = HASH (op1, mode);
6307 }
6308
6309 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6310 op1_elt->in_memory = op1_in_memory;
6311 op1_elt->in_struct = op1_in_struct;
6312 }
6313
6314 merge_equiv_classes (op0_elt, op1_elt);
6315 last_jump_equiv_class = op0_elt;
6316 }
6317 \f
6318 /* CSE processing for one instruction.
6319 First simplify sources and addresses of all assignments
6320 in the instruction, using previously-computed equivalents values.
6321 Then install the new sources and destinations in the table
6322 of available values.
6323
6324 If LIBCALL_INSN is nonzero, don't record any equivalence made in
6325 the insn. It means that INSN is inside libcall block. In this
6326 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6327
6328 /* Data on one SET contained in the instruction. */
6329
6330 struct set
6331 {
6332 /* The SET rtx itself. */
6333 rtx rtl;
6334 /* The SET_SRC of the rtx (the original value, if it is changing). */
6335 rtx src;
6336 /* The hash-table element for the SET_SRC of the SET. */
6337 struct table_elt *src_elt;
6338 /* Hash value for the SET_SRC. */
6339 unsigned src_hash;
6340 /* Hash value for the SET_DEST. */
6341 unsigned dest_hash;
6342 /* The SET_DEST, with SUBREG, etc., stripped. */
6343 rtx inner_dest;
6344 /* Place where the pointer to the INNER_DEST was found. */
6345 rtx *inner_dest_loc;
6346 /* Nonzero if the SET_SRC is in memory. */
6347 char src_in_memory;
6348 /* Nonzero if the SET_SRC is in a structure. */
6349 char src_in_struct;
6350 /* Nonzero if the SET_SRC contains something
6351 whose value cannot be predicted and understood. */
6352 char src_volatile;
6353 /* Original machine mode, in case it becomes a CONST_INT. */
6354 enum machine_mode mode;
6355 /* A constant equivalent for SET_SRC, if any. */
6356 rtx src_const;
6357 /* Hash value of constant equivalent for SET_SRC. */
6358 unsigned src_const_hash;
6359 /* Table entry for constant equivalent for SET_SRC, if any. */
6360 struct table_elt *src_const_elt;
6361 };
6362
6363 static void
6364 cse_insn (insn, libcall_insn)
6365 rtx insn;
6366 rtx libcall_insn;
6367 {
6368 register rtx x = PATTERN (insn);
6369 register int i;
6370 rtx tem;
6371 register int n_sets = 0;
6372
6373 #ifdef HAVE_cc0
6374 /* Records what this insn does to set CC0. */
6375 rtx this_insn_cc0 = 0;
6376 enum machine_mode this_insn_cc0_mode = VOIDmode;
6377 #endif
6378
6379 rtx src_eqv = 0;
6380 struct table_elt *src_eqv_elt = 0;
6381 int src_eqv_volatile;
6382 int src_eqv_in_memory;
6383 int src_eqv_in_struct;
6384 unsigned src_eqv_hash;
6385
6386 struct set *sets;
6387
6388 this_insn = insn;
6389
6390 /* Find all the SETs and CLOBBERs in this instruction.
6391 Record all the SETs in the array `set' and count them.
6392 Also determine whether there is a CLOBBER that invalidates
6393 all memory references, or all references at varying addresses. */
6394
6395 if (GET_CODE (insn) == CALL_INSN)
6396 {
6397 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6398 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6399 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6400 }
6401
6402 if (GET_CODE (x) == SET)
6403 {
6404 sets = (struct set *) alloca (sizeof (struct set));
6405 sets[0].rtl = x;
6406
6407 /* Ignore SETs that are unconditional jumps.
6408 They never need cse processing, so this does not hurt.
6409 The reason is not efficiency but rather
6410 so that we can test at the end for instructions
6411 that have been simplified to unconditional jumps
6412 and not be misled by unchanged instructions
6413 that were unconditional jumps to begin with. */
6414 if (SET_DEST (x) == pc_rtx
6415 && GET_CODE (SET_SRC (x)) == LABEL_REF)
6416 ;
6417
6418 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6419 The hard function value register is used only once, to copy to
6420 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6421 Ensure we invalidate the destination register. On the 80386 no
6422 other code would invalidate it since it is a fixed_reg.
6423 We need not check the return of apply_change_group; see canon_reg. */
6424
6425 else if (GET_CODE (SET_SRC (x)) == CALL)
6426 {
6427 canon_reg (SET_SRC (x), insn);
6428 apply_change_group ();
6429 fold_rtx (SET_SRC (x), insn);
6430 invalidate (SET_DEST (x), VOIDmode);
6431 }
6432 else
6433 n_sets = 1;
6434 }
6435 else if (GET_CODE (x) == PARALLEL)
6436 {
6437 register int lim = XVECLEN (x, 0);
6438
6439 sets = (struct set *) alloca (lim * sizeof (struct set));
6440
6441 /* Find all regs explicitly clobbered in this insn,
6442 and ensure they are not replaced with any other regs
6443 elsewhere in this insn.
6444 When a reg that is clobbered is also used for input,
6445 we should presume that that is for a reason,
6446 and we should not substitute some other register
6447 which is not supposed to be clobbered.
6448 Therefore, this loop cannot be merged into the one below
6449 because a CALL may precede a CLOBBER and refer to the
6450 value clobbered. We must not let a canonicalization do
6451 anything in that case. */
6452 for (i = 0; i < lim; i++)
6453 {
6454 register rtx y = XVECEXP (x, 0, i);
6455 if (GET_CODE (y) == CLOBBER)
6456 {
6457 rtx clobbered = XEXP (y, 0);
6458
6459 if (GET_CODE (clobbered) == REG
6460 || GET_CODE (clobbered) == SUBREG)
6461 invalidate (clobbered, VOIDmode);
6462 else if (GET_CODE (clobbered) == STRICT_LOW_PART
6463 || GET_CODE (clobbered) == ZERO_EXTRACT)
6464 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6465 }
6466 }
6467
6468 for (i = 0; i < lim; i++)
6469 {
6470 register rtx y = XVECEXP (x, 0, i);
6471 if (GET_CODE (y) == SET)
6472 {
6473 /* As above, we ignore unconditional jumps and call-insns and
6474 ignore the result of apply_change_group. */
6475 if (GET_CODE (SET_SRC (y)) == CALL)
6476 {
6477 canon_reg (SET_SRC (y), insn);
6478 apply_change_group ();
6479 fold_rtx (SET_SRC (y), insn);
6480 invalidate (SET_DEST (y), VOIDmode);
6481 }
6482 else if (SET_DEST (y) == pc_rtx
6483 && GET_CODE (SET_SRC (y)) == LABEL_REF)
6484 ;
6485 else
6486 sets[n_sets++].rtl = y;
6487 }
6488 else if (GET_CODE (y) == CLOBBER)
6489 {
6490 /* If we clobber memory, canon the address.
6491 This does nothing when a register is clobbered
6492 because we have already invalidated the reg. */
6493 if (GET_CODE (XEXP (y, 0)) == MEM)
6494 canon_reg (XEXP (y, 0), NULL_RTX);
6495 }
6496 else if (GET_CODE (y) == USE
6497 && ! (GET_CODE (XEXP (y, 0)) == REG
6498 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6499 canon_reg (y, NULL_RTX);
6500 else if (GET_CODE (y) == CALL)
6501 {
6502 /* The result of apply_change_group can be ignored; see
6503 canon_reg. */
6504 canon_reg (y, insn);
6505 apply_change_group ();
6506 fold_rtx (y, insn);
6507 }
6508 }
6509 }
6510 else if (GET_CODE (x) == CLOBBER)
6511 {
6512 if (GET_CODE (XEXP (x, 0)) == MEM)
6513 canon_reg (XEXP (x, 0), NULL_RTX);
6514 }
6515
6516 /* Canonicalize a USE of a pseudo register or memory location. */
6517 else if (GET_CODE (x) == USE
6518 && ! (GET_CODE (XEXP (x, 0)) == REG
6519 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6520 canon_reg (XEXP (x, 0), NULL_RTX);
6521 else if (GET_CODE (x) == CALL)
6522 {
6523 /* The result of apply_change_group can be ignored; see canon_reg. */
6524 canon_reg (x, insn);
6525 apply_change_group ();
6526 fold_rtx (x, insn);
6527 }
6528
6529 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6530 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
6531 is handled specially for this case, and if it isn't set, then there will
6532 be no equivalence for the destination. */
6533 if (n_sets == 1 && REG_NOTES (insn) != 0
6534 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6535 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6536 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6537 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6538
6539 /* Canonicalize sources and addresses of destinations.
6540 We do this in a separate pass to avoid problems when a MATCH_DUP is
6541 present in the insn pattern. In that case, we want to ensure that
6542 we don't break the duplicate nature of the pattern. So we will replace
6543 both operands at the same time. Otherwise, we would fail to find an
6544 equivalent substitution in the loop calling validate_change below.
6545
6546 We used to suppress canonicalization of DEST if it appears in SRC,
6547 but we don't do this any more. */
6548
6549 for (i = 0; i < n_sets; i++)
6550 {
6551 rtx dest = SET_DEST (sets[i].rtl);
6552 rtx src = SET_SRC (sets[i].rtl);
6553 rtx new = canon_reg (src, insn);
6554 int insn_code;
6555
6556 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6557 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6558 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6559 || (insn_code = recog_memoized (insn)) < 0
6560 || insn_n_dups[insn_code] > 0)
6561 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6562 else
6563 SET_SRC (sets[i].rtl) = new;
6564
6565 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6566 {
6567 validate_change (insn, &XEXP (dest, 1),
6568 canon_reg (XEXP (dest, 1), insn), 1);
6569 validate_change (insn, &XEXP (dest, 2),
6570 canon_reg (XEXP (dest, 2), insn), 1);
6571 }
6572
6573 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6574 || GET_CODE (dest) == ZERO_EXTRACT
6575 || GET_CODE (dest) == SIGN_EXTRACT)
6576 dest = XEXP (dest, 0);
6577
6578 if (GET_CODE (dest) == MEM)
6579 canon_reg (dest, insn);
6580 }
6581
6582 /* Now that we have done all the replacements, we can apply the change
6583 group and see if they all work. Note that this will cause some
6584 canonicalizations that would have worked individually not to be applied
6585 because some other canonicalization didn't work, but this should not
6586 occur often.
6587
6588 The result of apply_change_group can be ignored; see canon_reg. */
6589
6590 apply_change_group ();
6591
6592 /* Set sets[i].src_elt to the class each source belongs to.
6593 Detect assignments from or to volatile things
6594 and set set[i] to zero so they will be ignored
6595 in the rest of this function.
6596
6597 Nothing in this loop changes the hash table or the register chains. */
6598
6599 for (i = 0; i < n_sets; i++)
6600 {
6601 register rtx src, dest;
6602 register rtx src_folded;
6603 register struct table_elt *elt = 0, *p;
6604 enum machine_mode mode;
6605 rtx src_eqv_here;
6606 rtx src_const = 0;
6607 rtx src_related = 0;
6608 struct table_elt *src_const_elt = 0;
6609 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6610 int src_related_cost = 10000, src_elt_cost = 10000;
6611 /* Set non-zero if we need to call force_const_mem on with the
6612 contents of src_folded before using it. */
6613 int src_folded_force_flag = 0;
6614
6615 dest = SET_DEST (sets[i].rtl);
6616 src = SET_SRC (sets[i].rtl);
6617
6618 /* If SRC is a constant that has no machine mode,
6619 hash it with the destination's machine mode.
6620 This way we can keep different modes separate. */
6621
6622 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6623 sets[i].mode = mode;
6624
6625 if (src_eqv)
6626 {
6627 enum machine_mode eqvmode = mode;
6628 if (GET_CODE (dest) == STRICT_LOW_PART)
6629 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6630 do_not_record = 0;
6631 hash_arg_in_memory = 0;
6632 hash_arg_in_struct = 0;
6633 src_eqv = fold_rtx (src_eqv, insn);
6634 src_eqv_hash = HASH (src_eqv, eqvmode);
6635
6636 /* Find the equivalence class for the equivalent expression. */
6637
6638 if (!do_not_record)
6639 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6640
6641 src_eqv_volatile = do_not_record;
6642 src_eqv_in_memory = hash_arg_in_memory;
6643 src_eqv_in_struct = hash_arg_in_struct;
6644 }
6645
6646 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6647 value of the INNER register, not the destination. So it is not
6648 a valid substitution for the source. But save it for later. */
6649 if (GET_CODE (dest) == STRICT_LOW_PART)
6650 src_eqv_here = 0;
6651 else
6652 src_eqv_here = src_eqv;
6653
6654 /* Simplify and foldable subexpressions in SRC. Then get the fully-
6655 simplified result, which may not necessarily be valid. */
6656 src_folded = fold_rtx (src, insn);
6657
6658 #if 0
6659 /* ??? This caused bad code to be generated for the m68k port with -O2.
6660 Suppose src is (CONST_INT -1), and that after truncation src_folded
6661 is (CONST_INT 3). Suppose src_folded is then used for src_const.
6662 At the end we will add src and src_const to the same equivalence
6663 class. We now have 3 and -1 on the same equivalence class. This
6664 causes later instructions to be mis-optimized. */
6665 /* If storing a constant in a bitfield, pre-truncate the constant
6666 so we will be able to record it later. */
6667 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6668 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6669 {
6670 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6671
6672 if (GET_CODE (src) == CONST_INT
6673 && GET_CODE (width) == CONST_INT
6674 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6675 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6676 src_folded
6677 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6678 << INTVAL (width)) - 1));
6679 }
6680 #endif
6681
6682 /* Compute SRC's hash code, and also notice if it
6683 should not be recorded at all. In that case,
6684 prevent any further processing of this assignment. */
6685 do_not_record = 0;
6686 hash_arg_in_memory = 0;
6687 hash_arg_in_struct = 0;
6688
6689 sets[i].src = src;
6690 sets[i].src_hash = HASH (src, mode);
6691 sets[i].src_volatile = do_not_record;
6692 sets[i].src_in_memory = hash_arg_in_memory;
6693 sets[i].src_in_struct = hash_arg_in_struct;
6694
6695 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6696 a pseudo that is set more than once, do not record SRC. Using
6697 SRC as a replacement for anything else will be incorrect in that
6698 situation. Note that this usually occurs only for stack slots,
6699 in which case all the RTL would be referring to SRC, so we don't
6700 lose any optimization opportunities by not having SRC in the
6701 hash table. */
6702
6703 if (GET_CODE (src) == MEM
6704 && find_reg_note (insn, REG_EQUIV, src) != 0
6705 && GET_CODE (dest) == REG
6706 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6707 && REG_N_SETS (REGNO (dest)) != 1)
6708 sets[i].src_volatile = 1;
6709
6710 #if 0
6711 /* It is no longer clear why we used to do this, but it doesn't
6712 appear to still be needed. So let's try without it since this
6713 code hurts cse'ing widened ops. */
6714 /* If source is a perverse subreg (such as QI treated as an SI),
6715 treat it as volatile. It may do the work of an SI in one context
6716 where the extra bits are not being used, but cannot replace an SI
6717 in general. */
6718 if (GET_CODE (src) == SUBREG
6719 && (GET_MODE_SIZE (GET_MODE (src))
6720 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6721 sets[i].src_volatile = 1;
6722 #endif
6723
6724 /* Locate all possible equivalent forms for SRC. Try to replace
6725 SRC in the insn with each cheaper equivalent.
6726
6727 We have the following types of equivalents: SRC itself, a folded
6728 version, a value given in a REG_EQUAL note, or a value related
6729 to a constant.
6730
6731 Each of these equivalents may be part of an additional class
6732 of equivalents (if more than one is in the table, they must be in
6733 the same class; we check for this).
6734
6735 If the source is volatile, we don't do any table lookups.
6736
6737 We note any constant equivalent for possible later use in a
6738 REG_NOTE. */
6739
6740 if (!sets[i].src_volatile)
6741 elt = lookup (src, sets[i].src_hash, mode);
6742
6743 sets[i].src_elt = elt;
6744
6745 if (elt && src_eqv_here && src_eqv_elt)
6746 {
6747 if (elt->first_same_value != src_eqv_elt->first_same_value)
6748 {
6749 /* The REG_EQUAL is indicating that two formerly distinct
6750 classes are now equivalent. So merge them. */
6751 merge_equiv_classes (elt, src_eqv_elt);
6752 src_eqv_hash = HASH (src_eqv, elt->mode);
6753 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6754 }
6755
6756 src_eqv_here = 0;
6757 }
6758
6759 else if (src_eqv_elt)
6760 elt = src_eqv_elt;
6761
6762 /* Try to find a constant somewhere and record it in `src_const'.
6763 Record its table element, if any, in `src_const_elt'. Look in
6764 any known equivalences first. (If the constant is not in the
6765 table, also set `sets[i].src_const_hash'). */
6766 if (elt)
6767 for (p = elt->first_same_value; p; p = p->next_same_value)
6768 if (p->is_const)
6769 {
6770 src_const = p->exp;
6771 src_const_elt = elt;
6772 break;
6773 }
6774
6775 if (src_const == 0
6776 && (CONSTANT_P (src_folded)
6777 /* Consider (minus (label_ref L1) (label_ref L2)) as
6778 "constant" here so we will record it. This allows us
6779 to fold switch statements when an ADDR_DIFF_VEC is used. */
6780 || (GET_CODE (src_folded) == MINUS
6781 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6782 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6783 src_const = src_folded, src_const_elt = elt;
6784 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6785 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6786
6787 /* If we don't know if the constant is in the table, get its
6788 hash code and look it up. */
6789 if (src_const && src_const_elt == 0)
6790 {
6791 sets[i].src_const_hash = HASH (src_const, mode);
6792 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6793 }
6794
6795 sets[i].src_const = src_const;
6796 sets[i].src_const_elt = src_const_elt;
6797
6798 /* If the constant and our source are both in the table, mark them as
6799 equivalent. Otherwise, if a constant is in the table but the source
6800 isn't, set ELT to it. */
6801 if (src_const_elt && elt
6802 && src_const_elt->first_same_value != elt->first_same_value)
6803 merge_equiv_classes (elt, src_const_elt);
6804 else if (src_const_elt && elt == 0)
6805 elt = src_const_elt;
6806
6807 /* See if there is a register linearly related to a constant
6808 equivalent of SRC. */
6809 if (src_const
6810 && (GET_CODE (src_const) == CONST
6811 || (src_const_elt && src_const_elt->related_value != 0)))
6812 {
6813 src_related = use_related_value (src_const, src_const_elt);
6814 if (src_related)
6815 {
6816 struct table_elt *src_related_elt
6817 = lookup (src_related, HASH (src_related, mode), mode);
6818 if (src_related_elt && elt)
6819 {
6820 if (elt->first_same_value
6821 != src_related_elt->first_same_value)
6822 /* This can occur when we previously saw a CONST
6823 involving a SYMBOL_REF and then see the SYMBOL_REF
6824 twice. Merge the involved classes. */
6825 merge_equiv_classes (elt, src_related_elt);
6826
6827 src_related = 0;
6828 src_related_elt = 0;
6829 }
6830 else if (src_related_elt && elt == 0)
6831 elt = src_related_elt;
6832 }
6833 }
6834
6835 /* See if we have a CONST_INT that is already in a register in a
6836 wider mode. */
6837
6838 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6839 && GET_MODE_CLASS (mode) == MODE_INT
6840 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6841 {
6842 enum machine_mode wider_mode;
6843
6844 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6845 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6846 && src_related == 0;
6847 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6848 {
6849 struct table_elt *const_elt
6850 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6851
6852 if (const_elt == 0)
6853 continue;
6854
6855 for (const_elt = const_elt->first_same_value;
6856 const_elt; const_elt = const_elt->next_same_value)
6857 if (GET_CODE (const_elt->exp) == REG)
6858 {
6859 src_related = gen_lowpart_if_possible (mode,
6860 const_elt->exp);
6861 break;
6862 }
6863 }
6864 }
6865
6866 /* Another possibility is that we have an AND with a constant in
6867 a mode narrower than a word. If so, it might have been generated
6868 as part of an "if" which would narrow the AND. If we already
6869 have done the AND in a wider mode, we can use a SUBREG of that
6870 value. */
6871
6872 if (flag_expensive_optimizations && ! src_related
6873 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6874 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6875 {
6876 enum machine_mode tmode;
6877 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6878
6879 for (tmode = GET_MODE_WIDER_MODE (mode);
6880 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6881 tmode = GET_MODE_WIDER_MODE (tmode))
6882 {
6883 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6884 struct table_elt *larger_elt;
6885
6886 if (inner)
6887 {
6888 PUT_MODE (new_and, tmode);
6889 XEXP (new_and, 0) = inner;
6890 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6891 if (larger_elt == 0)
6892 continue;
6893
6894 for (larger_elt = larger_elt->first_same_value;
6895 larger_elt; larger_elt = larger_elt->next_same_value)
6896 if (GET_CODE (larger_elt->exp) == REG)
6897 {
6898 src_related
6899 = gen_lowpart_if_possible (mode, larger_elt->exp);
6900 break;
6901 }
6902
6903 if (src_related)
6904 break;
6905 }
6906 }
6907 }
6908
6909 #ifdef LOAD_EXTEND_OP
6910 /* See if a MEM has already been loaded with a widening operation;
6911 if it has, we can use a subreg of that. Many CISC machines
6912 also have such operations, but this is only likely to be
6913 beneficial these machines. */
6914
6915 if (flag_expensive_optimizations && src_related == 0
6916 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6917 && GET_MODE_CLASS (mode) == MODE_INT
6918 && GET_CODE (src) == MEM && ! do_not_record
6919 && LOAD_EXTEND_OP (mode) != NIL)
6920 {
6921 enum machine_mode tmode;
6922
6923 /* Set what we are trying to extend and the operation it might
6924 have been extended with. */
6925 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6926 XEXP (memory_extend_rtx, 0) = src;
6927
6928 for (tmode = GET_MODE_WIDER_MODE (mode);
6929 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6930 tmode = GET_MODE_WIDER_MODE (tmode))
6931 {
6932 struct table_elt *larger_elt;
6933
6934 PUT_MODE (memory_extend_rtx, tmode);
6935 larger_elt = lookup (memory_extend_rtx,
6936 HASH (memory_extend_rtx, tmode), tmode);
6937 if (larger_elt == 0)
6938 continue;
6939
6940 for (larger_elt = larger_elt->first_same_value;
6941 larger_elt; larger_elt = larger_elt->next_same_value)
6942 if (GET_CODE (larger_elt->exp) == REG)
6943 {
6944 src_related = gen_lowpart_if_possible (mode,
6945 larger_elt->exp);
6946 break;
6947 }
6948
6949 if (src_related)
6950 break;
6951 }
6952 }
6953 #endif /* LOAD_EXTEND_OP */
6954
6955 if (src == src_folded)
6956 src_folded = 0;
6957
6958 /* At this point, ELT, if non-zero, points to a class of expressions
6959 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6960 and SRC_RELATED, if non-zero, each contain additional equivalent
6961 expressions. Prune these latter expressions by deleting expressions
6962 already in the equivalence class.
6963
6964 Check for an equivalent identical to the destination. If found,
6965 this is the preferred equivalent since it will likely lead to
6966 elimination of the insn. Indicate this by placing it in
6967 `src_related'. */
6968
6969 if (elt) elt = elt->first_same_value;
6970 for (p = elt; p; p = p->next_same_value)
6971 {
6972 enum rtx_code code = GET_CODE (p->exp);
6973
6974 /* If the expression is not valid, ignore it. Then we do not
6975 have to check for validity below. In most cases, we can use
6976 `rtx_equal_p', since canonicalization has already been done. */
6977 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6978 continue;
6979
6980 /* Also skip paradoxical subregs, unless that's what we're
6981 looking for. */
6982 if (code == SUBREG
6983 && (GET_MODE_SIZE (GET_MODE (p->exp))
6984 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6985 && ! (src != 0
6986 && GET_CODE (src) == SUBREG
6987 && GET_MODE (src) == GET_MODE (p->exp)
6988 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6989 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6990 continue;
6991
6992 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6993 src = 0;
6994 else if (src_folded && GET_CODE (src_folded) == code
6995 && rtx_equal_p (src_folded, p->exp))
6996 src_folded = 0;
6997 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6998 && rtx_equal_p (src_eqv_here, p->exp))
6999 src_eqv_here = 0;
7000 else if (src_related && GET_CODE (src_related) == code
7001 && rtx_equal_p (src_related, p->exp))
7002 src_related = 0;
7003
7004 /* This is the same as the destination of the insns, we want
7005 to prefer it. Copy it to src_related. The code below will
7006 then give it a negative cost. */
7007 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7008 src_related = dest;
7009
7010 }
7011
7012 /* Find the cheapest valid equivalent, trying all the available
7013 possibilities. Prefer items not in the hash table to ones
7014 that are when they are equal cost. Note that we can never
7015 worsen an insn as the current contents will also succeed.
7016 If we find an equivalent identical to the destination, use it as best,
7017 since this insn will probably be eliminated in that case. */
7018 if (src)
7019 {
7020 if (rtx_equal_p (src, dest))
7021 src_cost = -1;
7022 else
7023 src_cost = COST (src);
7024 }
7025
7026 if (src_eqv_here)
7027 {
7028 if (rtx_equal_p (src_eqv_here, dest))
7029 src_eqv_cost = -1;
7030 else
7031 src_eqv_cost = COST (src_eqv_here);
7032 }
7033
7034 if (src_folded)
7035 {
7036 if (rtx_equal_p (src_folded, dest))
7037 src_folded_cost = -1;
7038 else
7039 src_folded_cost = COST (src_folded);
7040 }
7041
7042 if (src_related)
7043 {
7044 if (rtx_equal_p (src_related, dest))
7045 src_related_cost = -1;
7046 else
7047 src_related_cost = COST (src_related);
7048 }
7049
7050 /* If this was an indirect jump insn, a known label will really be
7051 cheaper even though it looks more expensive. */
7052 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7053 src_folded = src_const, src_folded_cost = -1;
7054
7055 /* Terminate loop when replacement made. This must terminate since
7056 the current contents will be tested and will always be valid. */
7057 while (1)
7058 {
7059 rtx trial, old_src;
7060
7061 /* Skip invalid entries. */
7062 while (elt && GET_CODE (elt->exp) != REG
7063 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7064 elt = elt->next_same_value;
7065
7066 /* A paradoxical subreg would be bad here: it'll be the right
7067 size, but later may be adjusted so that the upper bits aren't
7068 what we want. So reject it. */
7069 if (elt != 0
7070 && GET_CODE (elt->exp) == SUBREG
7071 && (GET_MODE_SIZE (GET_MODE (elt->exp))
7072 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7073 /* It is okay, though, if the rtx we're trying to match
7074 will ignore any of the bits we can't predict. */
7075 && ! (src != 0
7076 && GET_CODE (src) == SUBREG
7077 && GET_MODE (src) == GET_MODE (elt->exp)
7078 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7079 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7080 {
7081 elt = elt->next_same_value;
7082 continue;
7083 }
7084
7085 if (elt) src_elt_cost = elt->cost;
7086
7087 /* Find cheapest and skip it for the next time. For items
7088 of equal cost, use this order:
7089 src_folded, src, src_eqv, src_related and hash table entry. */
7090 if (src_folded_cost <= src_cost
7091 && src_folded_cost <= src_eqv_cost
7092 && src_folded_cost <= src_related_cost
7093 && src_folded_cost <= src_elt_cost)
7094 {
7095 trial = src_folded, src_folded_cost = 10000;
7096 if (src_folded_force_flag)
7097 trial = force_const_mem (mode, trial);
7098 }
7099 else if (src_cost <= src_eqv_cost
7100 && src_cost <= src_related_cost
7101 && src_cost <= src_elt_cost)
7102 trial = src, src_cost = 10000;
7103 else if (src_eqv_cost <= src_related_cost
7104 && src_eqv_cost <= src_elt_cost)
7105 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7106 else if (src_related_cost <= src_elt_cost)
7107 trial = copy_rtx (src_related), src_related_cost = 10000;
7108 else
7109 {
7110 trial = copy_rtx (elt->exp);
7111 elt = elt->next_same_value;
7112 src_elt_cost = 10000;
7113 }
7114
7115 /* We don't normally have an insn matching (set (pc) (pc)), so
7116 check for this separately here. We will delete such an
7117 insn below.
7118
7119 Tablejump insns contain a USE of the table, so simply replacing
7120 the operand with the constant won't match. This is simply an
7121 unconditional branch, however, and is therefore valid. Just
7122 insert the substitution here and we will delete and re-emit
7123 the insn later. */
7124
7125 /* Keep track of the original SET_SRC so that we can fix notes
7126 on libcall instructions. */
7127 old_src = SET_SRC (sets[i].rtl);
7128
7129 if (n_sets == 1 && dest == pc_rtx
7130 && (trial == pc_rtx
7131 || (GET_CODE (trial) == LABEL_REF
7132 && ! condjump_p (insn))))
7133 {
7134 /* If TRIAL is a label in front of a jump table, we are
7135 really falling through the switch (this is how casesi
7136 insns work), so we must branch around the table. */
7137 if (GET_CODE (trial) == CODE_LABEL
7138 && NEXT_INSN (trial) != 0
7139 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7140 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7141 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7142
7143 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7144
7145 SET_SRC (sets[i].rtl) = trial;
7146 cse_jumps_altered = 1;
7147 break;
7148 }
7149
7150 /* Look for a substitution that makes a valid insn. */
7151 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7152 {
7153 /* If we just made a substitution inside a libcall, then we
7154 need to make the same substitution in any notes attached
7155 to the RETVAL insn. */
7156 if (libcall_insn
7157 && (GET_CODE (old_src) == REG
7158 || GET_CODE (old_src) == SUBREG
7159 || GET_CODE (old_src) == MEM))
7160 replace_rtx (REG_NOTES (libcall_insn), old_src,
7161 canon_reg (SET_SRC (sets[i].rtl), insn));
7162
7163 /* The result of apply_change_group can be ignored; see
7164 canon_reg. */
7165
7166 validate_change (insn, &SET_SRC (sets[i].rtl),
7167 canon_reg (SET_SRC (sets[i].rtl), insn),
7168 1);
7169 apply_change_group ();
7170 break;
7171 }
7172
7173 /* If we previously found constant pool entries for
7174 constants and this is a constant, try making a
7175 pool entry. Put it in src_folded unless we already have done
7176 this since that is where it likely came from. */
7177
7178 else if (constant_pool_entries_cost
7179 && CONSTANT_P (trial)
7180 && ! (GET_CODE (trial) == CONST
7181 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7182 && (src_folded == 0
7183 || (GET_CODE (src_folded) != MEM
7184 && ! src_folded_force_flag))
7185 && GET_MODE_CLASS (mode) != MODE_CC
7186 && mode != VOIDmode)
7187 {
7188 src_folded_force_flag = 1;
7189 src_folded = trial;
7190 src_folded_cost = constant_pool_entries_cost;
7191 }
7192 }
7193
7194 src = SET_SRC (sets[i].rtl);
7195
7196 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7197 However, there is an important exception: If both are registers
7198 that are not the head of their equivalence class, replace SET_SRC
7199 with the head of the class. If we do not do this, we will have
7200 both registers live over a portion of the basic block. This way,
7201 their lifetimes will likely abut instead of overlapping. */
7202 if (GET_CODE (dest) == REG
7203 && REGNO_QTY_VALID_P (REGNO (dest))
7204 && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7205 && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7206 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7207 /* Don't do this if the original insn had a hard reg as
7208 SET_SRC. */
7209 && (GET_CODE (sets[i].src) != REG
7210 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7211 /* We can't call canon_reg here because it won't do anything if
7212 SRC is a hard register. */
7213 {
7214 int first = qty_first_reg[REG_QTY (REGNO (src))];
7215 rtx new_src
7216 = (first >= FIRST_PSEUDO_REGISTER
7217 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7218
7219 /* We must use validate-change even for this, because this
7220 might be a special no-op instruction, suitable only to
7221 tag notes onto. */
7222 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7223 {
7224 src = new_src;
7225 /* If we had a constant that is cheaper than what we are now
7226 setting SRC to, use that constant. We ignored it when we
7227 thought we could make this into a no-op. */
7228 if (src_const && COST (src_const) < COST (src)
7229 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7230 0))
7231 src = src_const;
7232 }
7233 }
7234
7235 /* If we made a change, recompute SRC values. */
7236 if (src != sets[i].src)
7237 {
7238 do_not_record = 0;
7239 hash_arg_in_memory = 0;
7240 hash_arg_in_struct = 0;
7241 sets[i].src = src;
7242 sets[i].src_hash = HASH (src, mode);
7243 sets[i].src_volatile = do_not_record;
7244 sets[i].src_in_memory = hash_arg_in_memory;
7245 sets[i].src_in_struct = hash_arg_in_struct;
7246 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7247 }
7248
7249 /* If this is a single SET, we are setting a register, and we have an
7250 equivalent constant, we want to add a REG_NOTE. We don't want
7251 to write a REG_EQUAL note for a constant pseudo since verifying that
7252 that pseudo hasn't been eliminated is a pain. Such a note also
7253 won't help anything.
7254
7255 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7256 which can be created for a reference to a compile time computable
7257 entry in a jump table. */
7258
7259 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7260 && GET_CODE (src_const) != REG
7261 && ! (GET_CODE (src_const) == CONST
7262 && GET_CODE (XEXP (src_const, 0)) == MINUS
7263 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7264 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7265 {
7266 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7267
7268 /* Make sure that the rtx is not shared with any other insn. */
7269 src_const = copy_rtx (src_const);
7270
7271 /* Record the actual constant value in a REG_EQUAL note, making
7272 a new one if one does not already exist. */
7273 if (tem)
7274 XEXP (tem, 0) = src_const;
7275 else
7276 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7277 src_const, REG_NOTES (insn));
7278
7279 /* If storing a constant value in a register that
7280 previously held the constant value 0,
7281 record this fact with a REG_WAS_0 note on this insn.
7282
7283 Note that the *register* is required to have previously held 0,
7284 not just any register in the quantity and we must point to the
7285 insn that set that register to zero.
7286
7287 Rather than track each register individually, we just see if
7288 the last set for this quantity was for this register. */
7289
7290 if (REGNO_QTY_VALID_P (REGNO (dest))
7291 && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7292 {
7293 /* See if we previously had a REG_WAS_0 note. */
7294 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7295 rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7296
7297 if ((tem = single_set (const_insn)) != 0
7298 && rtx_equal_p (SET_DEST (tem), dest))
7299 {
7300 if (note)
7301 XEXP (note, 0) = const_insn;
7302 else
7303 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7304 const_insn,
7305 REG_NOTES (insn));
7306 }
7307 }
7308 }
7309
7310 /* Now deal with the destination. */
7311 do_not_record = 0;
7312 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7313
7314 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7315 to the MEM or REG within it. */
7316 while (GET_CODE (dest) == SIGN_EXTRACT
7317 || GET_CODE (dest) == ZERO_EXTRACT
7318 || GET_CODE (dest) == SUBREG
7319 || GET_CODE (dest) == STRICT_LOW_PART)
7320 {
7321 sets[i].inner_dest_loc = &XEXP (dest, 0);
7322 dest = XEXP (dest, 0);
7323 }
7324
7325 sets[i].inner_dest = dest;
7326
7327 if (GET_CODE (dest) == MEM)
7328 {
7329 #ifdef PUSH_ROUNDING
7330 /* Stack pushes invalidate the stack pointer. */
7331 rtx addr = XEXP (dest, 0);
7332 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7333 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7334 && XEXP (addr, 0) == stack_pointer_rtx)
7335 invalidate (stack_pointer_rtx, Pmode);
7336 #endif
7337 dest = fold_rtx (dest, insn);
7338 }
7339
7340 /* Compute the hash code of the destination now,
7341 before the effects of this instruction are recorded,
7342 since the register values used in the address computation
7343 are those before this instruction. */
7344 sets[i].dest_hash = HASH (dest, mode);
7345
7346 /* Don't enter a bit-field in the hash table
7347 because the value in it after the store
7348 may not equal what was stored, due to truncation. */
7349
7350 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7351 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7352 {
7353 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7354
7355 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7356 && GET_CODE (width) == CONST_INT
7357 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7358 && ! (INTVAL (src_const)
7359 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7360 /* Exception: if the value is constant,
7361 and it won't be truncated, record it. */
7362 ;
7363 else
7364 {
7365 /* This is chosen so that the destination will be invalidated
7366 but no new value will be recorded.
7367 We must invalidate because sometimes constant
7368 values can be recorded for bitfields. */
7369 sets[i].src_elt = 0;
7370 sets[i].src_volatile = 1;
7371 src_eqv = 0;
7372 src_eqv_elt = 0;
7373 }
7374 }
7375
7376 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7377 the insn. */
7378 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7379 {
7380 PUT_CODE (insn, NOTE);
7381 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7382 NOTE_SOURCE_FILE (insn) = 0;
7383 cse_jumps_altered = 1;
7384 /* One less use of the label this insn used to jump to. */
7385 if (JUMP_LABEL (insn) != 0)
7386 --LABEL_NUSES (JUMP_LABEL (insn));
7387 /* No more processing for this set. */
7388 sets[i].rtl = 0;
7389 }
7390
7391 /* If this SET is now setting PC to a label, we know it used to
7392 be a conditional or computed branch. So we see if we can follow
7393 it. If it was a computed branch, delete it and re-emit. */
7394 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7395 {
7396 rtx p;
7397
7398 /* If this is not in the format for a simple branch and
7399 we are the only SET in it, re-emit it. */
7400 if (! simplejump_p (insn) && n_sets == 1)
7401 {
7402 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7403 JUMP_LABEL (new) = XEXP (src, 0);
7404 LABEL_NUSES (XEXP (src, 0))++;
7405 delete_insn (insn);
7406 insn = new;
7407 }
7408 else
7409 /* Otherwise, force rerecognition, since it probably had
7410 a different pattern before.
7411 This shouldn't really be necessary, since whatever
7412 changed the source value above should have done this.
7413 Until the right place is found, might as well do this here. */
7414 INSN_CODE (insn) = -1;
7415
7416 /* Now that we've converted this jump to an unconditional jump,
7417 there is dead code after it. Delete the dead code until we
7418 reach a BARRIER, the end of the function, or a label. Do
7419 not delete NOTEs except for NOTE_INSN_DELETED since later
7420 phases assume these notes are retained. */
7421
7422 p = insn;
7423
7424 while (NEXT_INSN (p) != 0
7425 && GET_CODE (NEXT_INSN (p)) != BARRIER
7426 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7427 {
7428 /* Note, we must update P with the return value from
7429 delete_insn, otherwise we could get an infinite loop
7430 if NEXT_INSN (p) had INSN_DELETED_P set. */
7431 if (GET_CODE (NEXT_INSN (p)) != NOTE
7432 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7433 p = PREV_INSN (delete_insn (NEXT_INSN (p)));
7434 else
7435 p = NEXT_INSN (p);
7436 }
7437
7438 /* If we don't have a BARRIER immediately after INSN, put one there.
7439 Much code assumes that there are no NOTEs between a JUMP_INSN and
7440 BARRIER. */
7441
7442 if (NEXT_INSN (insn) == 0
7443 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7444 emit_barrier_before (NEXT_INSN (insn));
7445
7446 /* We might have two BARRIERs separated by notes. Delete the second
7447 one if so. */
7448
7449 if (p != insn && NEXT_INSN (p) != 0
7450 && GET_CODE (NEXT_INSN (p)) == BARRIER)
7451 delete_insn (NEXT_INSN (p));
7452
7453 cse_jumps_altered = 1;
7454 sets[i].rtl = 0;
7455 }
7456
7457 /* If destination is volatile, invalidate it and then do no further
7458 processing for this assignment. */
7459
7460 else if (do_not_record)
7461 {
7462 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7463 || GET_CODE (dest) == MEM)
7464 invalidate (dest, VOIDmode);
7465 else if (GET_CODE (dest) == STRICT_LOW_PART
7466 || GET_CODE (dest) == ZERO_EXTRACT)
7467 invalidate (XEXP (dest, 0), GET_MODE (dest));
7468 sets[i].rtl = 0;
7469 }
7470
7471 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7472 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7473
7474 #ifdef HAVE_cc0
7475 /* If setting CC0, record what it was set to, or a constant, if it
7476 is equivalent to a constant. If it is being set to a floating-point
7477 value, make a COMPARE with the appropriate constant of 0. If we
7478 don't do this, later code can interpret this as a test against
7479 const0_rtx, which can cause problems if we try to put it into an
7480 insn as a floating-point operand. */
7481 if (dest == cc0_rtx)
7482 {
7483 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7484 this_insn_cc0_mode = mode;
7485 if (FLOAT_MODE_P (mode))
7486 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7487 CONST0_RTX (mode));
7488 }
7489 #endif
7490 }
7491
7492 /* Now enter all non-volatile source expressions in the hash table
7493 if they are not already present.
7494 Record their equivalence classes in src_elt.
7495 This way we can insert the corresponding destinations into
7496 the same classes even if the actual sources are no longer in them
7497 (having been invalidated). */
7498
7499 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7500 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7501 {
7502 register struct table_elt *elt;
7503 register struct table_elt *classp = sets[0].src_elt;
7504 rtx dest = SET_DEST (sets[0].rtl);
7505 enum machine_mode eqvmode = GET_MODE (dest);
7506
7507 if (GET_CODE (dest) == STRICT_LOW_PART)
7508 {
7509 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7510 classp = 0;
7511 }
7512 if (insert_regs (src_eqv, classp, 0))
7513 {
7514 rehash_using_reg (src_eqv);
7515 src_eqv_hash = HASH (src_eqv, eqvmode);
7516 }
7517 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7518 elt->in_memory = src_eqv_in_memory;
7519 elt->in_struct = src_eqv_in_struct;
7520 src_eqv_elt = elt;
7521
7522 /* Check to see if src_eqv_elt is the same as a set source which
7523 does not yet have an elt, and if so set the elt of the set source
7524 to src_eqv_elt. */
7525 for (i = 0; i < n_sets; i++)
7526 if (sets[i].rtl && sets[i].src_elt == 0
7527 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7528 sets[i].src_elt = src_eqv_elt;
7529 }
7530
7531 for (i = 0; i < n_sets; i++)
7532 if (sets[i].rtl && ! sets[i].src_volatile
7533 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7534 {
7535 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7536 {
7537 /* REG_EQUAL in setting a STRICT_LOW_PART
7538 gives an equivalent for the entire destination register,
7539 not just for the subreg being stored in now.
7540 This is a more interesting equivalence, so we arrange later
7541 to treat the entire reg as the destination. */
7542 sets[i].src_elt = src_eqv_elt;
7543 sets[i].src_hash = src_eqv_hash;
7544 }
7545 else
7546 {
7547 /* Insert source and constant equivalent into hash table, if not
7548 already present. */
7549 register struct table_elt *classp = src_eqv_elt;
7550 register rtx src = sets[i].src;
7551 register rtx dest = SET_DEST (sets[i].rtl);
7552 enum machine_mode mode
7553 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7554
7555 /* Don't put a hard register source into the table if this is
7556 the last insn of a libcall. */
7557 if (sets[i].src_elt == 0
7558 && (GET_CODE (src) != REG
7559 || REGNO (src) >= FIRST_PSEUDO_REGISTER
7560 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7561 {
7562 register struct table_elt *elt;
7563
7564 /* Note that these insert_regs calls cannot remove
7565 any of the src_elt's, because they would have failed to
7566 match if not still valid. */
7567 if (insert_regs (src, classp, 0))
7568 {
7569 rehash_using_reg (src);
7570 sets[i].src_hash = HASH (src, mode);
7571 }
7572 elt = insert (src, classp, sets[i].src_hash, mode);
7573 elt->in_memory = sets[i].src_in_memory;
7574 elt->in_struct = sets[i].src_in_struct;
7575 sets[i].src_elt = classp = elt;
7576 }
7577
7578 if (sets[i].src_const && sets[i].src_const_elt == 0
7579 && src != sets[i].src_const
7580 && ! rtx_equal_p (sets[i].src_const, src))
7581 sets[i].src_elt = insert (sets[i].src_const, classp,
7582 sets[i].src_const_hash, mode);
7583 }
7584 }
7585 else if (sets[i].src_elt == 0)
7586 /* If we did not insert the source into the hash table (e.g., it was
7587 volatile), note the equivalence class for the REG_EQUAL value, if any,
7588 so that the destination goes into that class. */
7589 sets[i].src_elt = src_eqv_elt;
7590
7591 invalidate_from_clobbers (x);
7592
7593 /* Some registers are invalidated by subroutine calls. Memory is
7594 invalidated by non-constant calls. */
7595
7596 if (GET_CODE (insn) == CALL_INSN)
7597 {
7598 if (! CONST_CALL_P (insn))
7599 invalidate_memory ();
7600 invalidate_for_call ();
7601 }
7602
7603 /* Now invalidate everything set by this instruction.
7604 If a SUBREG or other funny destination is being set,
7605 sets[i].rtl is still nonzero, so here we invalidate the reg
7606 a part of which is being set. */
7607
7608 for (i = 0; i < n_sets; i++)
7609 if (sets[i].rtl)
7610 {
7611 /* We can't use the inner dest, because the mode associated with
7612 a ZERO_EXTRACT is significant. */
7613 register rtx dest = SET_DEST (sets[i].rtl);
7614
7615 /* Needed for registers to remove the register from its
7616 previous quantity's chain.
7617 Needed for memory if this is a nonvarying address, unless
7618 we have just done an invalidate_memory that covers even those. */
7619 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7620 || GET_CODE (dest) == MEM)
7621 invalidate (dest, VOIDmode);
7622 else if (GET_CODE (dest) == STRICT_LOW_PART
7623 || GET_CODE (dest) == ZERO_EXTRACT)
7624 invalidate (XEXP (dest, 0), GET_MODE (dest));
7625 }
7626
7627 /* A volatile ASM invalidates everything. */
7628 if (GET_CODE (insn) == INSN
7629 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7630 && MEM_VOLATILE_P (PATTERN (insn)))
7631 flush_hash_table ();
7632
7633 /* Make sure registers mentioned in destinations
7634 are safe for use in an expression to be inserted.
7635 This removes from the hash table
7636 any invalid entry that refers to one of these registers.
7637
7638 We don't care about the return value from mention_regs because
7639 we are going to hash the SET_DEST values unconditionally. */
7640
7641 for (i = 0; i < n_sets; i++)
7642 {
7643 if (sets[i].rtl)
7644 {
7645 rtx x = SET_DEST (sets[i].rtl);
7646
7647 if (GET_CODE (x) != REG)
7648 mention_regs (x);
7649 else
7650 {
7651 /* We used to rely on all references to a register becoming
7652 inaccessible when a register changes to a new quantity,
7653 since that changes the hash code. However, that is not
7654 safe, since after NBUCKETS new quantities we get a
7655 hash 'collision' of a register with its own invalid
7656 entries. And since SUBREGs have been changed not to
7657 change their hash code with the hash code of the register,
7658 it wouldn't work any longer at all. So we have to check
7659 for any invalid references lying around now.
7660 This code is similar to the REG case in mention_regs,
7661 but it knows that reg_tick has been incremented, and
7662 it leaves reg_in_table as -1 . */
7663 register int regno = REGNO (x);
7664 register int endregno
7665 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7666 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7667 int i;
7668
7669 for (i = regno; i < endregno; i++)
7670 {
7671 if (REG_IN_TABLE (i) >= 0)
7672 {
7673 remove_invalid_refs (i);
7674 REG_IN_TABLE (i) = -1;
7675 }
7676 }
7677 }
7678 }
7679 }
7680
7681 /* We may have just removed some of the src_elt's from the hash table.
7682 So replace each one with the current head of the same class. */
7683
7684 for (i = 0; i < n_sets; i++)
7685 if (sets[i].rtl)
7686 {
7687 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7688 /* If elt was removed, find current head of same class,
7689 or 0 if nothing remains of that class. */
7690 {
7691 register struct table_elt *elt = sets[i].src_elt;
7692
7693 while (elt && elt->prev_same_value)
7694 elt = elt->prev_same_value;
7695
7696 while (elt && elt->first_same_value == 0)
7697 elt = elt->next_same_value;
7698 sets[i].src_elt = elt ? elt->first_same_value : 0;
7699 }
7700 }
7701
7702 /* Now insert the destinations into their equivalence classes. */
7703
7704 for (i = 0; i < n_sets; i++)
7705 if (sets[i].rtl)
7706 {
7707 register rtx dest = SET_DEST (sets[i].rtl);
7708 rtx inner_dest = sets[i].inner_dest;
7709 register struct table_elt *elt;
7710
7711 /* Don't record value if we are not supposed to risk allocating
7712 floating-point values in registers that might be wider than
7713 memory. */
7714 if ((flag_float_store
7715 && GET_CODE (dest) == MEM
7716 && FLOAT_MODE_P (GET_MODE (dest)))
7717 /* Don't record BLKmode values, because we don't know the
7718 size of it, and can't be sure that other BLKmode values
7719 have the same or smaller size. */
7720 || GET_MODE (dest) == BLKmode
7721 /* Don't record values of destinations set inside a libcall block
7722 since we might delete the libcall. Things should have been set
7723 up so we won't want to reuse such a value, but we play it safe
7724 here. */
7725 || libcall_insn
7726 /* If we didn't put a REG_EQUAL value or a source into the hash
7727 table, there is no point is recording DEST. */
7728 || sets[i].src_elt == 0
7729 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7730 or SIGN_EXTEND, don't record DEST since it can cause
7731 some tracking to be wrong.
7732
7733 ??? Think about this more later. */
7734 || (GET_CODE (dest) == SUBREG
7735 && (GET_MODE_SIZE (GET_MODE (dest))
7736 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7737 && (GET_CODE (sets[i].src) == SIGN_EXTEND
7738 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7739 continue;
7740
7741 /* STRICT_LOW_PART isn't part of the value BEING set,
7742 and neither is the SUBREG inside it.
7743 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
7744 if (GET_CODE (dest) == STRICT_LOW_PART)
7745 dest = SUBREG_REG (XEXP (dest, 0));
7746
7747 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7748 /* Registers must also be inserted into chains for quantities. */
7749 if (insert_regs (dest, sets[i].src_elt, 1))
7750 {
7751 /* If `insert_regs' changes something, the hash code must be
7752 recalculated. */
7753 rehash_using_reg (dest);
7754 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7755 }
7756
7757 if (GET_CODE (inner_dest) == MEM
7758 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7759 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7760 that (MEM (ADDRESSOF (X))) is equivalent to Y.
7761 Consider the case in which the address of the MEM is
7762 passed to a function, which alters the MEM. Then, if we
7763 later use Y instead of the MEM we'll miss the update. */
7764 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7765 else
7766 elt = insert (dest, sets[i].src_elt,
7767 sets[i].dest_hash, GET_MODE (dest));
7768
7769 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7770 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7771 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7772 0))));
7773
7774 if (elt->in_memory)
7775 {
7776 /* This implicitly assumes a whole struct
7777 need not have MEM_IN_STRUCT_P.
7778 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
7779 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7780 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7781 }
7782
7783 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7784 narrower than M2, and both M1 and M2 are the same number of words,
7785 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7786 make that equivalence as well.
7787
7788 However, BAR may have equivalences for which gen_lowpart_if_possible
7789 will produce a simpler value than gen_lowpart_if_possible applied to
7790 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7791 BAR's equivalences. If we don't get a simplified form, make
7792 the SUBREG. It will not be used in an equivalence, but will
7793 cause two similar assignments to be detected.
7794
7795 Note the loop below will find SUBREG_REG (DEST) since we have
7796 already entered SRC and DEST of the SET in the table. */
7797
7798 if (GET_CODE (dest) == SUBREG
7799 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7800 / UNITS_PER_WORD)
7801 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7802 && (GET_MODE_SIZE (GET_MODE (dest))
7803 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7804 && sets[i].src_elt != 0)
7805 {
7806 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7807 struct table_elt *elt, *classp = 0;
7808
7809 for (elt = sets[i].src_elt->first_same_value; elt;
7810 elt = elt->next_same_value)
7811 {
7812 rtx new_src = 0;
7813 unsigned src_hash;
7814 struct table_elt *src_elt;
7815
7816 /* Ignore invalid entries. */
7817 if (GET_CODE (elt->exp) != REG
7818 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7819 continue;
7820
7821 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7822 if (new_src == 0)
7823 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7824
7825 src_hash = HASH (new_src, new_mode);
7826 src_elt = lookup (new_src, src_hash, new_mode);
7827
7828 /* Put the new source in the hash table is if isn't
7829 already. */
7830 if (src_elt == 0)
7831 {
7832 if (insert_regs (new_src, classp, 0))
7833 {
7834 rehash_using_reg (new_src);
7835 src_hash = HASH (new_src, new_mode);
7836 }
7837 src_elt = insert (new_src, classp, src_hash, new_mode);
7838 src_elt->in_memory = elt->in_memory;
7839 src_elt->in_struct = elt->in_struct;
7840 }
7841 else if (classp && classp != src_elt->first_same_value)
7842 /* Show that two things that we've seen before are
7843 actually the same. */
7844 merge_equiv_classes (src_elt, classp);
7845
7846 classp = src_elt->first_same_value;
7847 /* Ignore invalid entries. */
7848 while (classp
7849 && GET_CODE (classp->exp) != REG
7850 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7851 classp = classp->next_same_value;
7852 }
7853 }
7854 }
7855
7856 /* Special handling for (set REG0 REG1)
7857 where REG0 is the "cheapest", cheaper than REG1.
7858 After cse, REG1 will probably not be used in the sequel,
7859 so (if easily done) change this insn to (set REG1 REG0) and
7860 replace REG1 with REG0 in the previous insn that computed their value.
7861 Then REG1 will become a dead store and won't cloud the situation
7862 for later optimizations.
7863
7864 Do not make this change if REG1 is a hard register, because it will
7865 then be used in the sequel and we may be changing a two-operand insn
7866 into a three-operand insn.
7867
7868 Also do not do this if we are operating on a copy of INSN.
7869
7870 Also don't do this if INSN ends a libcall; this would cause an unrelated
7871 register to be set in the middle of a libcall, and we then get bad code
7872 if the libcall is deleted. */
7873
7874 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7875 && NEXT_INSN (PREV_INSN (insn)) == insn
7876 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7877 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7878 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7879 && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
7880 == REGNO (SET_DEST (sets[0].rtl)))
7881 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7882 {
7883 rtx prev = PREV_INSN (insn);
7884 while (prev && GET_CODE (prev) == NOTE)
7885 prev = PREV_INSN (prev);
7886
7887 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7888 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7889 {
7890 rtx dest = SET_DEST (sets[0].rtl);
7891 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7892
7893 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7894 validate_change (insn, & SET_DEST (sets[0].rtl),
7895 SET_SRC (sets[0].rtl), 1);
7896 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7897 apply_change_group ();
7898
7899 /* If REG1 was equivalent to a constant, REG0 is not. */
7900 if (note)
7901 PUT_REG_NOTE_KIND (note, REG_EQUAL);
7902
7903 /* If there was a REG_WAS_0 note on PREV, remove it. Move
7904 any REG_WAS_0 note on INSN to PREV. */
7905 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7906 if (note)
7907 remove_note (prev, note);
7908
7909 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7910 if (note)
7911 {
7912 remove_note (insn, note);
7913 XEXP (note, 1) = REG_NOTES (prev);
7914 REG_NOTES (prev) = note;
7915 }
7916
7917 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7918 then we must delete it, because the value in REG0 has changed. */
7919 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7920 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7921 remove_note (insn, note);
7922 }
7923 }
7924
7925 /* If this is a conditional jump insn, record any known equivalences due to
7926 the condition being tested. */
7927
7928 last_jump_equiv_class = 0;
7929 if (GET_CODE (insn) == JUMP_INSN
7930 && n_sets == 1 && GET_CODE (x) == SET
7931 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7932 record_jump_equiv (insn, 0);
7933
7934 #ifdef HAVE_cc0
7935 /* If the previous insn set CC0 and this insn no longer references CC0,
7936 delete the previous insn. Here we use the fact that nothing expects CC0
7937 to be valid over an insn, which is true until the final pass. */
7938 if (prev_insn && GET_CODE (prev_insn) == INSN
7939 && (tem = single_set (prev_insn)) != 0
7940 && SET_DEST (tem) == cc0_rtx
7941 && ! reg_mentioned_p (cc0_rtx, x))
7942 {
7943 PUT_CODE (prev_insn, NOTE);
7944 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7945 NOTE_SOURCE_FILE (prev_insn) = 0;
7946 }
7947
7948 prev_insn_cc0 = this_insn_cc0;
7949 prev_insn_cc0_mode = this_insn_cc0_mode;
7950 #endif
7951
7952 prev_insn = insn;
7953 }
7954 \f
7955 /* Remove from the hash table all expressions that reference memory. */
7956 static void
7957 invalidate_memory ()
7958 {
7959 register int i;
7960 register struct table_elt *p, *next;
7961
7962 for (i = 0; i < NBUCKETS; i++)
7963 for (p = table[i]; p; p = next)
7964 {
7965 next = p->next_same_hash;
7966 if (p->in_memory)
7967 remove_from_table (p, i);
7968 }
7969 }
7970
7971 /* XXX ??? The name of this function bears little resemblance to
7972 what this function actually does. FIXME. */
7973 static int
7974 note_mem_written (addr)
7975 register rtx addr;
7976 {
7977 /* Pushing or popping the stack invalidates just the stack pointer. */
7978 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7979 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7980 && GET_CODE (XEXP (addr, 0)) == REG
7981 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7982 {
7983 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
7984 REG_TICK (STACK_POINTER_REGNUM)++;
7985
7986 /* This should be *very* rare. */
7987 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7988 invalidate (stack_pointer_rtx, VOIDmode);
7989 return 1;
7990 }
7991 return 0;
7992 }
7993
7994 /* Perform invalidation on the basis of everything about an insn
7995 except for invalidating the actual places that are SET in it.
7996 This includes the places CLOBBERed, and anything that might
7997 alias with something that is SET or CLOBBERed.
7998
7999 X is the pattern of the insn. */
8000
8001 static void
8002 invalidate_from_clobbers (x)
8003 rtx x;
8004 {
8005 if (GET_CODE (x) == CLOBBER)
8006 {
8007 rtx ref = XEXP (x, 0);
8008 if (ref)
8009 {
8010 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8011 || GET_CODE (ref) == MEM)
8012 invalidate (ref, VOIDmode);
8013 else if (GET_CODE (ref) == STRICT_LOW_PART
8014 || GET_CODE (ref) == ZERO_EXTRACT)
8015 invalidate (XEXP (ref, 0), GET_MODE (ref));
8016 }
8017 }
8018 else if (GET_CODE (x) == PARALLEL)
8019 {
8020 register int i;
8021 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8022 {
8023 register rtx y = XVECEXP (x, 0, i);
8024 if (GET_CODE (y) == CLOBBER)
8025 {
8026 rtx ref = XEXP (y, 0);
8027 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8028 || GET_CODE (ref) == MEM)
8029 invalidate (ref, VOIDmode);
8030 else if (GET_CODE (ref) == STRICT_LOW_PART
8031 || GET_CODE (ref) == ZERO_EXTRACT)
8032 invalidate (XEXP (ref, 0), GET_MODE (ref));
8033 }
8034 }
8035 }
8036 }
8037 \f
8038 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
8039 and replace any registers in them with either an equivalent constant
8040 or the canonical form of the register. If we are inside an address,
8041 only do this if the address remains valid.
8042
8043 OBJECT is 0 except when within a MEM in which case it is the MEM.
8044
8045 Return the replacement for X. */
8046
8047 static rtx
8048 cse_process_notes (x, object)
8049 rtx x;
8050 rtx object;
8051 {
8052 enum rtx_code code = GET_CODE (x);
8053 char *fmt = GET_RTX_FORMAT (code);
8054 int i;
8055
8056 switch (code)
8057 {
8058 case CONST_INT:
8059 case CONST:
8060 case SYMBOL_REF:
8061 case LABEL_REF:
8062 case CONST_DOUBLE:
8063 case PC:
8064 case CC0:
8065 case LO_SUM:
8066 return x;
8067
8068 case MEM:
8069 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8070 return x;
8071
8072 case EXPR_LIST:
8073 case INSN_LIST:
8074 if (REG_NOTE_KIND (x) == REG_EQUAL)
8075 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
8076 if (XEXP (x, 1))
8077 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
8078 return x;
8079
8080 case SIGN_EXTEND:
8081 case ZERO_EXTEND:
8082 case SUBREG:
8083 {
8084 rtx new = cse_process_notes (XEXP (x, 0), object);
8085 /* We don't substitute VOIDmode constants into these rtx,
8086 since they would impede folding. */
8087 if (GET_MODE (new) != VOIDmode)
8088 validate_change (object, &XEXP (x, 0), new, 0);
8089 return x;
8090 }
8091
8092 case REG:
8093 i = REG_QTY (REGNO (x));
8094
8095 /* Return a constant or a constant register. */
8096 if (REGNO_QTY_VALID_P (REGNO (x))
8097 && qty_const[i] != 0
8098 && (CONSTANT_P (qty_const[i])
8099 || GET_CODE (qty_const[i]) == REG))
8100 {
8101 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8102 if (new)
8103 return new;
8104 }
8105
8106 /* Otherwise, canonicalize this register. */
8107 return canon_reg (x, NULL_RTX);
8108
8109 default:
8110 break;
8111 }
8112
8113 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8114 if (fmt[i] == 'e')
8115 validate_change (object, &XEXP (x, i),
8116 cse_process_notes (XEXP (x, i), object), 0);
8117
8118 return x;
8119 }
8120 \f
8121 /* Find common subexpressions between the end test of a loop and the beginning
8122 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
8123
8124 Often we have a loop where an expression in the exit test is used
8125 in the body of the loop. For example "while (*p) *q++ = *p++;".
8126 Because of the way we duplicate the loop exit test in front of the loop,
8127 however, we don't detect that common subexpression. This will be caught
8128 when global cse is implemented, but this is a quite common case.
8129
8130 This function handles the most common cases of these common expressions.
8131 It is called after we have processed the basic block ending with the
8132 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8133 jumps to a label used only once. */
8134
8135 static void
8136 cse_around_loop (loop_start)
8137 rtx loop_start;
8138 {
8139 rtx insn;
8140 int i;
8141 struct table_elt *p;
8142
8143 /* If the jump at the end of the loop doesn't go to the start, we don't
8144 do anything. */
8145 for (insn = PREV_INSN (loop_start);
8146 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8147 insn = PREV_INSN (insn))
8148 ;
8149
8150 if (insn == 0
8151 || GET_CODE (insn) != NOTE
8152 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8153 return;
8154
8155 /* If the last insn of the loop (the end test) was an NE comparison,
8156 we will interpret it as an EQ comparison, since we fell through
8157 the loop. Any equivalences resulting from that comparison are
8158 therefore not valid and must be invalidated. */
8159 if (last_jump_equiv_class)
8160 for (p = last_jump_equiv_class->first_same_value; p;
8161 p = p->next_same_value)
8162 {
8163 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8164 || (GET_CODE (p->exp) == SUBREG
8165 && GET_CODE (SUBREG_REG (p->exp)) == REG))
8166 invalidate (p->exp, VOIDmode);
8167 else if (GET_CODE (p->exp) == STRICT_LOW_PART
8168 || GET_CODE (p->exp) == ZERO_EXTRACT)
8169 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8170 }
8171
8172 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8173 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8174
8175 The only thing we do with SET_DEST is invalidate entries, so we
8176 can safely process each SET in order. It is slightly less efficient
8177 to do so, but we only want to handle the most common cases.
8178
8179 The gen_move_insn call in cse_set_around_loop may create new pseudos.
8180 These pseudos won't have valid entries in any of the tables indexed
8181 by register number, such as reg_qty. We avoid out-of-range array
8182 accesses by not processing any instructions created after cse started. */
8183
8184 for (insn = NEXT_INSN (loop_start);
8185 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8186 && INSN_UID (insn) < max_insn_uid
8187 && ! (GET_CODE (insn) == NOTE
8188 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8189 insn = NEXT_INSN (insn))
8190 {
8191 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8192 && (GET_CODE (PATTERN (insn)) == SET
8193 || GET_CODE (PATTERN (insn)) == CLOBBER))
8194 cse_set_around_loop (PATTERN (insn), insn, loop_start);
8195 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8196 && GET_CODE (PATTERN (insn)) == PARALLEL)
8197 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8198 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8199 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8200 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8201 loop_start);
8202 }
8203 }
8204 \f
8205 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
8206 since they are done elsewhere. This function is called via note_stores. */
8207
8208 static void
8209 invalidate_skipped_set (dest, set)
8210 rtx set;
8211 rtx dest;
8212 {
8213 enum rtx_code code = GET_CODE (dest);
8214
8215 if (code == MEM
8216 && ! note_mem_written (dest) /* If this is not a stack push ... */
8217 /* There are times when an address can appear varying and be a PLUS
8218 during this scan when it would be a fixed address were we to know
8219 the proper equivalences. So invalidate all memory if there is
8220 a BLKmode or nonscalar memory reference or a reference to a
8221 variable address. */
8222 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8223 || cse_rtx_varies_p (XEXP (dest, 0))))
8224 {
8225 invalidate_memory ();
8226 return;
8227 }
8228
8229 if (GET_CODE (set) == CLOBBER
8230 #ifdef HAVE_cc0
8231 || dest == cc0_rtx
8232 #endif
8233 || dest == pc_rtx)
8234 return;
8235
8236 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8237 invalidate (XEXP (dest, 0), GET_MODE (dest));
8238 else if (code == REG || code == SUBREG || code == MEM)
8239 invalidate (dest, VOIDmode);
8240 }
8241
8242 /* Invalidate all insns from START up to the end of the function or the
8243 next label. This called when we wish to CSE around a block that is
8244 conditionally executed. */
8245
8246 static void
8247 invalidate_skipped_block (start)
8248 rtx start;
8249 {
8250 rtx insn;
8251
8252 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8253 insn = NEXT_INSN (insn))
8254 {
8255 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8256 continue;
8257
8258 if (GET_CODE (insn) == CALL_INSN)
8259 {
8260 if (! CONST_CALL_P (insn))
8261 invalidate_memory ();
8262 invalidate_for_call ();
8263 }
8264
8265 invalidate_from_clobbers (PATTERN (insn));
8266 note_stores (PATTERN (insn), invalidate_skipped_set);
8267 }
8268 }
8269 \f
8270 /* Used for communication between the following two routines; contains a
8271 value to be checked for modification. */
8272
8273 static rtx cse_check_loop_start_value;
8274
8275 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8276 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
8277
8278 static void
8279 cse_check_loop_start (x, set)
8280 rtx x;
8281 rtx set ATTRIBUTE_UNUSED;
8282 {
8283 if (cse_check_loop_start_value == 0
8284 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8285 return;
8286
8287 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8288 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8289 cse_check_loop_start_value = 0;
8290 }
8291
8292 /* X is a SET or CLOBBER contained in INSN that was found near the start of
8293 a loop that starts with the label at LOOP_START.
8294
8295 If X is a SET, we see if its SET_SRC is currently in our hash table.
8296 If so, we see if it has a value equal to some register used only in the
8297 loop exit code (as marked by jump.c).
8298
8299 If those two conditions are true, we search backwards from the start of
8300 the loop to see if that same value was loaded into a register that still
8301 retains its value at the start of the loop.
8302
8303 If so, we insert an insn after the load to copy the destination of that
8304 load into the equivalent register and (try to) replace our SET_SRC with that
8305 register.
8306
8307 In any event, we invalidate whatever this SET or CLOBBER modifies. */
8308
8309 static void
8310 cse_set_around_loop (x, insn, loop_start)
8311 rtx x;
8312 rtx insn;
8313 rtx loop_start;
8314 {
8315 struct table_elt *src_elt;
8316
8317 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8318 are setting PC or CC0 or whose SET_SRC is already a register. */
8319 if (GET_CODE (x) == SET
8320 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8321 && GET_CODE (SET_SRC (x)) != REG)
8322 {
8323 src_elt = lookup (SET_SRC (x),
8324 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8325 GET_MODE (SET_DEST (x)));
8326
8327 if (src_elt)
8328 for (src_elt = src_elt->first_same_value; src_elt;
8329 src_elt = src_elt->next_same_value)
8330 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8331 && COST (src_elt->exp) < COST (SET_SRC (x)))
8332 {
8333 rtx p, set;
8334
8335 /* Look for an insn in front of LOOP_START that sets
8336 something in the desired mode to SET_SRC (x) before we hit
8337 a label or CALL_INSN. */
8338
8339 for (p = prev_nonnote_insn (loop_start);
8340 p && GET_CODE (p) != CALL_INSN
8341 && GET_CODE (p) != CODE_LABEL;
8342 p = prev_nonnote_insn (p))
8343 if ((set = single_set (p)) != 0
8344 && GET_CODE (SET_DEST (set)) == REG
8345 && GET_MODE (SET_DEST (set)) == src_elt->mode
8346 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8347 {
8348 /* We now have to ensure that nothing between P
8349 and LOOP_START modified anything referenced in
8350 SET_SRC (x). We know that nothing within the loop
8351 can modify it, or we would have invalidated it in
8352 the hash table. */
8353 rtx q;
8354
8355 cse_check_loop_start_value = SET_SRC (x);
8356 for (q = p; q != loop_start; q = NEXT_INSN (q))
8357 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8358 note_stores (PATTERN (q), cse_check_loop_start);
8359
8360 /* If nothing was changed and we can replace our
8361 SET_SRC, add an insn after P to copy its destination
8362 to what we will be replacing SET_SRC with. */
8363 if (cse_check_loop_start_value
8364 && validate_change (insn, &SET_SRC (x),
8365 src_elt->exp, 0))
8366 {
8367 /* If this creates new pseudos, this is unsafe,
8368 because the regno of new pseudo is unsuitable
8369 to index into reg_qty when cse_insn processes
8370 the new insn. Therefore, if a new pseudo was
8371 created, discard this optimization. */
8372 int nregs = max_reg_num ();
8373 rtx move
8374 = gen_move_insn (src_elt->exp, SET_DEST (set));
8375 if (nregs != max_reg_num ())
8376 {
8377 if (! validate_change (insn, &SET_SRC (x),
8378 SET_SRC (set), 0))
8379 abort ();
8380 }
8381 else
8382 emit_insn_after (move, p);
8383 }
8384 break;
8385 }
8386 }
8387 }
8388
8389 /* Now invalidate anything modified by X. */
8390 note_mem_written (SET_DEST (x));
8391
8392 /* See comment on similar code in cse_insn for explanation of these tests. */
8393 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8394 || GET_CODE (SET_DEST (x)) == MEM)
8395 invalidate (SET_DEST (x), VOIDmode);
8396 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8397 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8398 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8399 }
8400 \f
8401 /* Find the end of INSN's basic block and return its range,
8402 the total number of SETs in all the insns of the block, the last insn of the
8403 block, and the branch path.
8404
8405 The branch path indicates which branches should be followed. If a non-zero
8406 path size is specified, the block should be rescanned and a different set
8407 of branches will be taken. The branch path is only used if
8408 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8409
8410 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8411 used to describe the block. It is filled in with the information about
8412 the current block. The incoming structure's branch path, if any, is used
8413 to construct the output branch path. */
8414
8415 void
8416 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8417 rtx insn;
8418 struct cse_basic_block_data *data;
8419 int follow_jumps;
8420 int after_loop;
8421 int skip_blocks;
8422 {
8423 rtx p = insn, q;
8424 int nsets = 0;
8425 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8426 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8427 int path_size = data->path_size;
8428 int path_entry = 0;
8429 int i;
8430
8431 /* Update the previous branch path, if any. If the last branch was
8432 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
8433 shorten the path by one and look at the previous branch. We know that
8434 at least one branch must have been taken if PATH_SIZE is non-zero. */
8435 while (path_size > 0)
8436 {
8437 if (data->path[path_size - 1].status != NOT_TAKEN)
8438 {
8439 data->path[path_size - 1].status = NOT_TAKEN;
8440 break;
8441 }
8442 else
8443 path_size--;
8444 }
8445
8446 /* Scan to end of this basic block. */
8447 while (p && GET_CODE (p) != CODE_LABEL)
8448 {
8449 /* Don't cse out the end of a loop. This makes a difference
8450 only for the unusual loops that always execute at least once;
8451 all other loops have labels there so we will stop in any case.
8452 Cse'ing out the end of the loop is dangerous because it
8453 might cause an invariant expression inside the loop
8454 to be reused after the end of the loop. This would make it
8455 hard to move the expression out of the loop in loop.c,
8456 especially if it is one of several equivalent expressions
8457 and loop.c would like to eliminate it.
8458
8459 If we are running after loop.c has finished, we can ignore
8460 the NOTE_INSN_LOOP_END. */
8461
8462 if (! after_loop && GET_CODE (p) == NOTE
8463 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8464 break;
8465
8466 /* Don't cse over a call to setjmp; on some machines (eg vax)
8467 the regs restored by the longjmp come from
8468 a later time than the setjmp. */
8469 if (GET_CODE (p) == NOTE
8470 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8471 break;
8472
8473 /* A PARALLEL can have lots of SETs in it,
8474 especially if it is really an ASM_OPERANDS. */
8475 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8476 && GET_CODE (PATTERN (p)) == PARALLEL)
8477 nsets += XVECLEN (PATTERN (p), 0);
8478 else if (GET_CODE (p) != NOTE)
8479 nsets += 1;
8480
8481 /* Ignore insns made by CSE; they cannot affect the boundaries of
8482 the basic block. */
8483
8484 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8485 high_cuid = INSN_CUID (p);
8486 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8487 low_cuid = INSN_CUID (p);
8488
8489 /* See if this insn is in our branch path. If it is and we are to
8490 take it, do so. */
8491 if (path_entry < path_size && data->path[path_entry].branch == p)
8492 {
8493 if (data->path[path_entry].status != NOT_TAKEN)
8494 p = JUMP_LABEL (p);
8495
8496 /* Point to next entry in path, if any. */
8497 path_entry++;
8498 }
8499
8500 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8501 was specified, we haven't reached our maximum path length, there are
8502 insns following the target of the jump, this is the only use of the
8503 jump label, and the target label is preceded by a BARRIER.
8504
8505 Alternatively, we can follow the jump if it branches around a
8506 block of code and there are no other branches into the block.
8507 In this case invalidate_skipped_block will be called to invalidate any
8508 registers set in the block when following the jump. */
8509
8510 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8511 && GET_CODE (p) == JUMP_INSN
8512 && GET_CODE (PATTERN (p)) == SET
8513 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8514 && JUMP_LABEL (p) != 0
8515 && LABEL_NUSES (JUMP_LABEL (p)) == 1
8516 && NEXT_INSN (JUMP_LABEL (p)) != 0)
8517 {
8518 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8519 if ((GET_CODE (q) != NOTE
8520 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8521 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8522 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8523 break;
8524
8525 /* If we ran into a BARRIER, this code is an extension of the
8526 basic block when the branch is taken. */
8527 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8528 {
8529 /* Don't allow ourself to keep walking around an
8530 always-executed loop. */
8531 if (next_real_insn (q) == next)
8532 {
8533 p = NEXT_INSN (p);
8534 continue;
8535 }
8536
8537 /* Similarly, don't put a branch in our path more than once. */
8538 for (i = 0; i < path_entry; i++)
8539 if (data->path[i].branch == p)
8540 break;
8541
8542 if (i != path_entry)
8543 break;
8544
8545 data->path[path_entry].branch = p;
8546 data->path[path_entry++].status = TAKEN;
8547
8548 /* This branch now ends our path. It was possible that we
8549 didn't see this branch the last time around (when the
8550 insn in front of the target was a JUMP_INSN that was
8551 turned into a no-op). */
8552 path_size = path_entry;
8553
8554 p = JUMP_LABEL (p);
8555 /* Mark block so we won't scan it again later. */
8556 PUT_MODE (NEXT_INSN (p), QImode);
8557 }
8558 /* Detect a branch around a block of code. */
8559 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8560 {
8561 register rtx tmp;
8562
8563 if (next_real_insn (q) == next)
8564 {
8565 p = NEXT_INSN (p);
8566 continue;
8567 }
8568
8569 for (i = 0; i < path_entry; i++)
8570 if (data->path[i].branch == p)
8571 break;
8572
8573 if (i != path_entry)
8574 break;
8575
8576 /* This is no_labels_between_p (p, q) with an added check for
8577 reaching the end of a function (in case Q precedes P). */
8578 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8579 if (GET_CODE (tmp) == CODE_LABEL)
8580 break;
8581
8582 if (tmp == q)
8583 {
8584 data->path[path_entry].branch = p;
8585 data->path[path_entry++].status = AROUND;
8586
8587 path_size = path_entry;
8588
8589 p = JUMP_LABEL (p);
8590 /* Mark block so we won't scan it again later. */
8591 PUT_MODE (NEXT_INSN (p), QImode);
8592 }
8593 }
8594 }
8595 p = NEXT_INSN (p);
8596 }
8597
8598 data->low_cuid = low_cuid;
8599 data->high_cuid = high_cuid;
8600 data->nsets = nsets;
8601 data->last = p;
8602
8603 /* If all jumps in the path are not taken, set our path length to zero
8604 so a rescan won't be done. */
8605 for (i = path_size - 1; i >= 0; i--)
8606 if (data->path[i].status != NOT_TAKEN)
8607 break;
8608
8609 if (i == -1)
8610 data->path_size = 0;
8611 else
8612 data->path_size = path_size;
8613
8614 /* End the current branch path. */
8615 data->path[path_size].branch = 0;
8616 }
8617 \f
8618 /* Perform cse on the instructions of a function.
8619 F is the first instruction.
8620 NREGS is one plus the highest pseudo-reg number used in the instruction.
8621
8622 AFTER_LOOP is 1 if this is the cse call done after loop optimization
8623 (only if -frerun-cse-after-loop).
8624
8625 Returns 1 if jump_optimize should be redone due to simplifications
8626 in conditional jump instructions. */
8627
8628 int
8629 cse_main (f, nregs, after_loop, file)
8630 rtx f;
8631 int nregs;
8632 int after_loop;
8633 FILE *file;
8634 {
8635 struct cse_basic_block_data val;
8636 register rtx insn = f;
8637 register int i;
8638
8639 cse_jumps_altered = 0;
8640 recorded_label_ref = 0;
8641 constant_pool_entries_cost = 0;
8642 val.path_size = 0;
8643
8644 init_recog ();
8645 init_alias_analysis ();
8646
8647 max_reg = nregs;
8648
8649 max_insn_uid = get_max_uid ();
8650
8651 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8652 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8653
8654 #ifdef LOAD_EXTEND_OP
8655
8656 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
8657 and change the code and mode as appropriate. */
8658 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8659 #endif
8660
8661 /* Discard all the free elements of the previous function
8662 since they are allocated in the temporarily obstack. */
8663 bzero ((char *) table, sizeof table);
8664 free_element_chain = 0;
8665 n_elements_made = 0;
8666
8667 /* Find the largest uid. */
8668
8669 max_uid = get_max_uid ();
8670 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8671 bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8672
8673 /* Compute the mapping from uids to cuids.
8674 CUIDs are numbers assigned to insns, like uids,
8675 except that cuids increase monotonically through the code.
8676 Don't assign cuids to line-number NOTEs, so that the distance in cuids
8677 between two insns is not affected by -g. */
8678
8679 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8680 {
8681 if (GET_CODE (insn) != NOTE
8682 || NOTE_LINE_NUMBER (insn) < 0)
8683 INSN_CUID (insn) = ++i;
8684 else
8685 /* Give a line number note the same cuid as preceding insn. */
8686 INSN_CUID (insn) = i;
8687 }
8688
8689 /* Initialize which registers are clobbered by calls. */
8690
8691 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8692
8693 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8694 if ((call_used_regs[i]
8695 /* Used to check !fixed_regs[i] here, but that isn't safe;
8696 fixed regs are still call-clobbered, and sched can get
8697 confused if they can "live across calls".
8698
8699 The frame pointer is always preserved across calls. The arg
8700 pointer is if it is fixed. The stack pointer usually is, unless
8701 RETURN_POPS_ARGS, in which case an explicit CLOBBER
8702 will be present. If we are generating PIC code, the PIC offset
8703 table register is preserved across calls. */
8704
8705 && i != STACK_POINTER_REGNUM
8706 && i != FRAME_POINTER_REGNUM
8707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8708 && i != HARD_FRAME_POINTER_REGNUM
8709 #endif
8710 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8711 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8712 #endif
8713 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8714 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8715 #endif
8716 )
8717 || global_regs[i])
8718 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8719
8720 /* Loop over basic blocks.
8721 Compute the maximum number of qty's needed for each basic block
8722 (which is 2 for each SET). */
8723 insn = f;
8724 while (insn)
8725 {
8726 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8727 flag_cse_skip_blocks);
8728
8729 /* If this basic block was already processed or has no sets, skip it. */
8730 if (val.nsets == 0 || GET_MODE (insn) == QImode)
8731 {
8732 PUT_MODE (insn, VOIDmode);
8733 insn = (val.last ? NEXT_INSN (val.last) : 0);
8734 val.path_size = 0;
8735 continue;
8736 }
8737
8738 cse_basic_block_start = val.low_cuid;
8739 cse_basic_block_end = val.high_cuid;
8740 max_qty = val.nsets * 2;
8741
8742 if (file)
8743 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
8744 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8745 val.nsets);
8746
8747 /* Make MAX_QTY bigger to give us room to optimize
8748 past the end of this basic block, if that should prove useful. */
8749 if (max_qty < 500)
8750 max_qty = 500;
8751
8752 max_qty += max_reg;
8753
8754 /* If this basic block is being extended by following certain jumps,
8755 (see `cse_end_of_basic_block'), we reprocess the code from the start.
8756 Otherwise, we start after this basic block. */
8757 if (val.path_size > 0)
8758 cse_basic_block (insn, val.last, val.path, 0);
8759 else
8760 {
8761 int old_cse_jumps_altered = cse_jumps_altered;
8762 rtx temp;
8763
8764 /* When cse changes a conditional jump to an unconditional
8765 jump, we want to reprocess the block, since it will give
8766 us a new branch path to investigate. */
8767 cse_jumps_altered = 0;
8768 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8769 if (cse_jumps_altered == 0
8770 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8771 insn = temp;
8772
8773 cse_jumps_altered |= old_cse_jumps_altered;
8774 }
8775
8776 #ifdef USE_C_ALLOCA
8777 alloca (0);
8778 #endif
8779 }
8780
8781 /* Tell refers_to_mem_p that qty_const info is not available. */
8782 qty_const = 0;
8783
8784 if (max_elements_made < n_elements_made)
8785 max_elements_made = n_elements_made;
8786
8787 return cse_jumps_altered || recorded_label_ref;
8788 }
8789
8790 /* Process a single basic block. FROM and TO and the limits of the basic
8791 block. NEXT_BRANCH points to the branch path when following jumps or
8792 a null path when not following jumps.
8793
8794 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8795 loop. This is true when we are being called for the last time on a
8796 block and this CSE pass is before loop.c. */
8797
8798 static rtx
8799 cse_basic_block (from, to, next_branch, around_loop)
8800 register rtx from, to;
8801 struct branch_path *next_branch;
8802 int around_loop;
8803 {
8804 register rtx insn;
8805 int to_usage = 0;
8806 rtx libcall_insn = NULL_RTX;
8807 int num_insns = 0;
8808
8809 /* Each of these arrays is undefined before max_reg, so only allocate
8810 the space actually needed and adjust the start below. */
8811
8812 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8813 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8814 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8815 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8816 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8817 qty_comparison_code
8818 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8819 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8820 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8821
8822 qty_first_reg -= max_reg;
8823 qty_last_reg -= max_reg;
8824 qty_mode -= max_reg;
8825 qty_const -= max_reg;
8826 qty_const_insn -= max_reg;
8827 qty_comparison_code -= max_reg;
8828 qty_comparison_qty -= max_reg;
8829 qty_comparison_const -= max_reg;
8830
8831 new_basic_block ();
8832
8833 /* TO might be a label. If so, protect it from being deleted. */
8834 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8835 ++LABEL_NUSES (to);
8836
8837 for (insn = from; insn != to; insn = NEXT_INSN (insn))
8838 {
8839 register enum rtx_code code = GET_CODE (insn);
8840
8841 /* If we have processed 1,000 insns, flush the hash table to
8842 avoid extreme quadratic behavior. We must not include NOTEs
8843 in the count since there may be more or them when generating
8844 debugging information. If we clear the table at different
8845 times, code generated with -g -O might be different than code
8846 generated with -O but not -g.
8847
8848 ??? This is a real kludge and needs to be done some other way.
8849 Perhaps for 2.9. */
8850 if (code != NOTE && num_insns++ > 1000)
8851 {
8852 flush_hash_table ();
8853 num_insns = 0;
8854 }
8855
8856 /* See if this is a branch that is part of the path. If so, and it is
8857 to be taken, do so. */
8858 if (next_branch->branch == insn)
8859 {
8860 enum taken status = next_branch++->status;
8861 if (status != NOT_TAKEN)
8862 {
8863 if (status == TAKEN)
8864 record_jump_equiv (insn, 1);
8865 else
8866 invalidate_skipped_block (NEXT_INSN (insn));
8867
8868 /* Set the last insn as the jump insn; it doesn't affect cc0.
8869 Then follow this branch. */
8870 #ifdef HAVE_cc0
8871 prev_insn_cc0 = 0;
8872 #endif
8873 prev_insn = insn;
8874 insn = JUMP_LABEL (insn);
8875 continue;
8876 }
8877 }
8878
8879 if (GET_MODE (insn) == QImode)
8880 PUT_MODE (insn, VOIDmode);
8881
8882 if (GET_RTX_CLASS (code) == 'i')
8883 {
8884 rtx p;
8885
8886 /* Process notes first so we have all notes in canonical forms when
8887 looking for duplicate operations. */
8888
8889 if (REG_NOTES (insn))
8890 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8891
8892 /* Track when we are inside in LIBCALL block. Inside such a block,
8893 we do not want to record destinations. The last insn of a
8894 LIBCALL block is not considered to be part of the block, since
8895 its destination is the result of the block and hence should be
8896 recorded. */
8897
8898 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8899 libcall_insn = XEXP (p, 0);
8900 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8901 libcall_insn = NULL_RTX;
8902
8903 cse_insn (insn, libcall_insn);
8904 }
8905
8906 /* If INSN is now an unconditional jump, skip to the end of our
8907 basic block by pretending that we just did the last insn in the
8908 basic block. If we are jumping to the end of our block, show
8909 that we can have one usage of TO. */
8910
8911 if (simplejump_p (insn))
8912 {
8913 if (to == 0)
8914 return 0;
8915
8916 if (JUMP_LABEL (insn) == to)
8917 to_usage = 1;
8918
8919 /* Maybe TO was deleted because the jump is unconditional.
8920 If so, there is nothing left in this basic block. */
8921 /* ??? Perhaps it would be smarter to set TO
8922 to whatever follows this insn,
8923 and pretend the basic block had always ended here. */
8924 if (INSN_DELETED_P (to))
8925 break;
8926
8927 insn = PREV_INSN (to);
8928 }
8929
8930 /* See if it is ok to keep on going past the label
8931 which used to end our basic block. Remember that we incremented
8932 the count of that label, so we decrement it here. If we made
8933 a jump unconditional, TO_USAGE will be one; in that case, we don't
8934 want to count the use in that jump. */
8935
8936 if (to != 0 && NEXT_INSN (insn) == to
8937 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8938 {
8939 struct cse_basic_block_data val;
8940 rtx prev;
8941
8942 insn = NEXT_INSN (to);
8943
8944 if (LABEL_NUSES (to) == 0)
8945 insn = delete_insn (to);
8946
8947 /* If TO was the last insn in the function, we are done. */
8948 if (insn == 0)
8949 return 0;
8950
8951 /* If TO was preceded by a BARRIER we are done with this block
8952 because it has no continuation. */
8953 prev = prev_nonnote_insn (to);
8954 if (prev && GET_CODE (prev) == BARRIER)
8955 return insn;
8956
8957 /* Find the end of the following block. Note that we won't be
8958 following branches in this case. */
8959 to_usage = 0;
8960 val.path_size = 0;
8961 cse_end_of_basic_block (insn, &val, 0, 0, 0);
8962
8963 /* If the tables we allocated have enough space left
8964 to handle all the SETs in the next basic block,
8965 continue through it. Otherwise, return,
8966 and that block will be scanned individually. */
8967 if (val.nsets * 2 + next_qty > max_qty)
8968 break;
8969
8970 cse_basic_block_start = val.low_cuid;
8971 cse_basic_block_end = val.high_cuid;
8972 to = val.last;
8973
8974 /* Prevent TO from being deleted if it is a label. */
8975 if (to != 0 && GET_CODE (to) == CODE_LABEL)
8976 ++LABEL_NUSES (to);
8977
8978 /* Back up so we process the first insn in the extension. */
8979 insn = PREV_INSN (insn);
8980 }
8981 }
8982
8983 if (next_qty > max_qty)
8984 abort ();
8985
8986 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8987 the previous insn is the only insn that branches to the head of a loop,
8988 we can cse into the loop. Don't do this if we changed the jump
8989 structure of a loop unless we aren't going to be following jumps. */
8990
8991 if ((cse_jumps_altered == 0
8992 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8993 && around_loop && to != 0
8994 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8995 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8996 && JUMP_LABEL (PREV_INSN (to)) != 0
8997 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8998 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8999
9000 return to ? NEXT_INSN (to) : 0;
9001 }
9002 \f
9003 /* Count the number of times registers are used (not set) in X.
9004 COUNTS is an array in which we accumulate the count, INCR is how much
9005 we count each register usage.
9006
9007 Don't count a usage of DEST, which is the SET_DEST of a SET which
9008 contains X in its SET_SRC. This is because such a SET does not
9009 modify the liveness of DEST. */
9010
9011 static void
9012 count_reg_usage (x, counts, dest, incr)
9013 rtx x;
9014 int *counts;
9015 rtx dest;
9016 int incr;
9017 {
9018 enum rtx_code code;
9019 char *fmt;
9020 int i, j;
9021
9022 if (x == 0)
9023 return;
9024
9025 switch (code = GET_CODE (x))
9026 {
9027 case REG:
9028 if (x != dest)
9029 counts[REGNO (x)] += incr;
9030 return;
9031
9032 case PC:
9033 case CC0:
9034 case CONST:
9035 case CONST_INT:
9036 case CONST_DOUBLE:
9037 case SYMBOL_REF:
9038 case LABEL_REF:
9039 return;
9040
9041 case CLOBBER:
9042 /* If we are clobbering a MEM, mark any registers inside the address
9043 as being used. */
9044 if (GET_CODE (XEXP (x, 0)) == MEM)
9045 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
9046 return;
9047
9048 case SET:
9049 /* Unless we are setting a REG, count everything in SET_DEST. */
9050 if (GET_CODE (SET_DEST (x)) != REG)
9051 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9052
9053 /* If SRC has side-effects, then we can't delete this insn, so the
9054 usage of SET_DEST inside SRC counts.
9055
9056 ??? Strictly-speaking, we might be preserving this insn
9057 because some other SET has side-effects, but that's hard
9058 to do and can't happen now. */
9059 count_reg_usage (SET_SRC (x), counts,
9060 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9061 incr);
9062 return;
9063
9064 case CALL_INSN:
9065 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9066
9067 /* ... falls through ... */
9068 case INSN:
9069 case JUMP_INSN:
9070 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
9071
9072 /* Things used in a REG_EQUAL note aren't dead since loop may try to
9073 use them. */
9074
9075 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
9076 return;
9077
9078 case EXPR_LIST:
9079 case INSN_LIST:
9080 if (REG_NOTE_KIND (x) == REG_EQUAL
9081 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
9082 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
9083 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
9084 return;
9085
9086 default:
9087 break;
9088 }
9089
9090 fmt = GET_RTX_FORMAT (code);
9091 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9092 {
9093 if (fmt[i] == 'e')
9094 count_reg_usage (XEXP (x, i), counts, dest, incr);
9095 else if (fmt[i] == 'E')
9096 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9097 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
9098 }
9099 }
9100 \f
9101 /* Scan all the insns and delete any that are dead; i.e., they store a register
9102 that is never used or they copy a register to itself.
9103
9104 This is used to remove insns made obviously dead by cse, loop or other
9105 optimizations. It improves the heuristics in loop since it won't try to
9106 move dead invariants out of loops or make givs for dead quantities. The
9107 remaining passes of the compilation are also sped up. */
9108
9109 void
9110 delete_trivially_dead_insns (insns, nreg)
9111 rtx insns;
9112 int nreg;
9113 {
9114 int *counts = (int *) alloca (nreg * sizeof (int));
9115 rtx insn, prev;
9116 #ifdef HAVE_cc0
9117 rtx tem;
9118 #endif
9119 int i;
9120 int in_libcall = 0, dead_libcall = 0;
9121
9122 /* First count the number of times each register is used. */
9123 bzero ((char *) counts, sizeof (int) * nreg);
9124 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9125 count_reg_usage (insn, counts, NULL_RTX, 1);
9126
9127 /* Go from the last insn to the first and delete insns that only set unused
9128 registers or copy a register to itself. As we delete an insn, remove
9129 usage counts for registers it uses. */
9130 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
9131 {
9132 int live_insn = 0;
9133 rtx note;
9134
9135 prev = prev_real_insn (insn);
9136
9137 /* Don't delete any insns that are part of a libcall block unless
9138 we can delete the whole libcall block.
9139
9140 Flow or loop might get confused if we did that. Remember
9141 that we are scanning backwards. */
9142 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9143 {
9144 in_libcall = 1;
9145 live_insn = 1;
9146 dead_libcall = 0;
9147
9148 /* See if there's a REG_EQUAL note on this insn and try to
9149 replace the source with the REG_EQUAL expression.
9150
9151 We assume that insns with REG_RETVALs can only be reg->reg
9152 copies at this point. */
9153 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9154 if (note)
9155 {
9156 rtx set = single_set (insn);
9157 if (set
9158 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9159 {
9160 remove_note (insn,
9161 find_reg_note (insn, REG_RETVAL, NULL_RTX));
9162 dead_libcall = 1;
9163 }
9164 }
9165 }
9166 else if (in_libcall)
9167 live_insn = ! dead_libcall;
9168 else if (GET_CODE (PATTERN (insn)) == SET)
9169 {
9170 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9171 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9172 ;
9173
9174 #ifdef HAVE_cc0
9175 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9176 && ! side_effects_p (SET_SRC (PATTERN (insn)))
9177 && ((tem = next_nonnote_insn (insn)) == 0
9178 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9179 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9180 ;
9181 #endif
9182 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9183 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9184 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9185 || side_effects_p (SET_SRC (PATTERN (insn))))
9186 live_insn = 1;
9187 }
9188 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9189 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9190 {
9191 rtx elt = XVECEXP (PATTERN (insn), 0, i);
9192
9193 if (GET_CODE (elt) == SET)
9194 {
9195 if (GET_CODE (SET_DEST (elt)) == REG
9196 && SET_DEST (elt) == SET_SRC (elt))
9197 ;
9198
9199 #ifdef HAVE_cc0
9200 else if (GET_CODE (SET_DEST (elt)) == CC0
9201 && ! side_effects_p (SET_SRC (elt))
9202 && ((tem = next_nonnote_insn (insn)) == 0
9203 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9204 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9205 ;
9206 #endif
9207 else if (GET_CODE (SET_DEST (elt)) != REG
9208 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9209 || counts[REGNO (SET_DEST (elt))] != 0
9210 || side_effects_p (SET_SRC (elt)))
9211 live_insn = 1;
9212 }
9213 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9214 live_insn = 1;
9215 }
9216 else
9217 live_insn = 1;
9218
9219 /* If this is a dead insn, delete it and show registers in it aren't
9220 being used. */
9221
9222 if (! live_insn)
9223 {
9224 count_reg_usage (insn, counts, NULL_RTX, -1);
9225 delete_insn (insn);
9226 }
9227
9228 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9229 {
9230 in_libcall = 0;
9231 dead_libcall = 0;
9232 }
9233 }
9234 }