]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cse.c
266170ba353ed6521130541e5a6ab05c48b26270
[thirdparty/gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "toplev.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "timevar.h"
43 #include "except.h"
44 #include "target.h"
45 #include "params.h"
46
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
51
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
57
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
61
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
65
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
74
75 Registers and "quantity numbers":
76
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
85
86 All real quantity numbers are greater than or equal to `max_reg'.
87 If register N has not been assigned a quantity, reg_qty[N] will equal N.
88
89 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
90 entries should be referenced with an index below `max_reg'.
91
92 We also maintain a bidirectional chain of registers for each
93 quantity number. The `qty_table` members `first_reg' and `last_reg',
94 and `reg_eqv_table' members `next' and `prev' hold these chains.
95
96 The first register in a chain is the one whose lifespan is least local.
97 Among equals, it is the one that was seen first.
98 We replace any equivalent register with that one.
99
100 If two registers have the same quantity number, it must be true that
101 REG expressions with qty_table `mode' must be in the hash table for both
102 registers and must be in the same class.
103
104 The converse is not true. Since hard registers may be referenced in
105 any mode, two REG expressions might be equivalent in the hash table
106 but not have the same quantity number if the quantity number of one
107 of the registers is not the same mode as those expressions.
108
109 Constants and quantity numbers
110
111 When a quantity has a known constant value, that value is stored
112 in the appropriate qty_table `const_rtx'. This is in addition to
113 putting the constant in the hash table as is usual for non-regs.
114
115 Whether a reg or a constant is preferred is determined by the configuration
116 macro CONST_COSTS and will often depend on the constant value. In any
117 event, expressions containing constants can be simplified, by fold_rtx.
118
119 When a quantity has a known nearly constant value (such as an address
120 of a stack slot), that value is stored in the appropriate qty_table
121 `const_rtx'.
122
123 Integer constants don't have a machine mode. However, cse
124 determines the intended machine mode from the destination
125 of the instruction that moves the constant. The machine mode
126 is recorded in the hash table along with the actual RTL
127 constant expression so that different modes are kept separate.
128
129 Other expressions:
130
131 To record known equivalences among expressions in general
132 we use a hash table called `table'. It has a fixed number of buckets
133 that contain chains of `struct table_elt' elements for expressions.
134 These chains connect the elements whose expressions have the same
135 hash codes.
136
137 Other chains through the same elements connect the elements which
138 currently have equivalent values.
139
140 Register references in an expression are canonicalized before hashing
141 the expression. This is done using `reg_qty' and qty_table `first_reg'.
142 The hash code of a register reference is computed using the quantity
143 number, not the register number.
144
145 When the value of an expression changes, it is necessary to remove from the
146 hash table not just that expression but all expressions whose values
147 could be different as a result.
148
149 1. If the value changing is in memory, except in special cases
150 ANYTHING referring to memory could be changed. That is because
151 nobody knows where a pointer does not point.
152 The function `invalidate_memory' removes what is necessary.
153
154 The special cases are when the address is constant or is
155 a constant plus a fixed register such as the frame pointer
156 or a static chain pointer. When such addresses are stored in,
157 we can tell exactly which other such addresses must be invalidated
158 due to overlap. `invalidate' does this.
159 All expressions that refer to non-constant
160 memory addresses are also invalidated. `invalidate_memory' does this.
161
162 2. If the value changing is a register, all expressions
163 containing references to that register, and only those,
164 must be removed.
165
166 Because searching the entire hash table for expressions that contain
167 a register is very slow, we try to figure out when it isn't necessary.
168 Precisely, this is necessary only when expressions have been
169 entered in the hash table using this register, and then the value has
170 changed, and then another expression wants to be added to refer to
171 the register's new value. This sequence of circumstances is rare
172 within any one basic block.
173
174 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
175 reg_tick[i] is incremented whenever a value is stored in register i.
176 reg_in_table[i] holds -1 if no references to register i have been
177 entered in the table; otherwise, it contains the value reg_tick[i] had
178 when the references were entered. If we want to enter a reference
179 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
180 Until we want to enter a new entry, the mere fact that the two vectors
181 don't match makes the entries be ignored if anyone tries to match them.
182
183 Registers themselves are entered in the hash table as well as in
184 the equivalent-register chains. However, the vectors `reg_tick'
185 and `reg_in_table' do not apply to expressions which are simple
186 register references. These expressions are removed from the table
187 immediately when they become invalid, and this can be done even if
188 we do not immediately search for all the expressions that refer to
189 the register.
190
191 A CLOBBER rtx in an instruction invalidates its operand for further
192 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
193 invalidates everything that resides in memory.
194
195 Related expressions:
196
197 Constant expressions that differ only by an additive integer
198 are called related. When a constant expression is put in
199 the table, the related expression with no constant term
200 is also entered. These are made to point at each other
201 so that it is possible to find out if there exists any
202 register equivalent to an expression related to a given expression. */
203
204 /* One plus largest register number used in this function. */
205
206 static int max_reg;
207
208 /* One plus largest instruction UID used in this function at time of
209 cse_main call. */
210
211 static int max_insn_uid;
212
213 /* Length of qty_table vector. We know in advance we will not need
214 a quantity number this big. */
215
216 static int max_qty;
217
218 /* Next quantity number to be allocated.
219 This is 1 + the largest number needed so far. */
220
221 static int next_qty;
222
223 /* Per-qty information tracking.
224
225 `first_reg' and `last_reg' track the head and tail of the
226 chain of registers which currently contain this quantity.
227
228 `mode' contains the machine mode of this quantity.
229
230 `const_rtx' holds the rtx of the constant value of this
231 quantity, if known. A summations of the frame/arg pointer
232 and a constant can also be entered here. When this holds
233 a known value, `const_insn' is the insn which stored the
234 constant value.
235
236 `comparison_{code,const,qty}' are used to track when a
237 comparison between a quantity and some constant or register has
238 been passed. In such a case, we know the results of the comparison
239 in case we see it again. These members record a comparison that
240 is known to be true. `comparison_code' holds the rtx code of such
241 a comparison, else it is set to UNKNOWN and the other two
242 comparison members are undefined. `comparison_const' holds
243 the constant being compared against, or zero if the comparison
244 is not against a constant. `comparison_qty' holds the quantity
245 being compared against when the result is known. If the comparison
246 is not with a register, `comparison_qty' is -1. */
247
248 struct qty_table_elem
249 {
250 rtx const_rtx;
251 rtx const_insn;
252 rtx comparison_const;
253 int comparison_qty;
254 unsigned int first_reg, last_reg;
255 /* The sizes of these fields should match the sizes of the
256 code and mode fields of struct rtx_def (see rtl.h). */
257 ENUM_BITFIELD(rtx_code) comparison_code : 16;
258 ENUM_BITFIELD(machine_mode) mode : 8;
259 };
260
261 /* The table of all qtys, indexed by qty number. */
262 static struct qty_table_elem *qty_table;
263
264 #ifdef HAVE_cc0
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
268
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
273
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
276
277 /* Previous actual insn. 0 if at first insn of basic block. */
278
279 static rtx prev_insn;
280 #endif
281
282 /* Insn being scanned. */
283
284 static rtx this_insn;
285
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
288 value.
289
290 Or -1 if this register is at the end of the chain.
291
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
293
294 /* Per-register equivalence chain. */
295 struct reg_eqv_elem
296 {
297 int next, prev;
298 };
299
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
302
303 struct cse_reg_info
304 {
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
307
308 /* The next cse_reg_info structure in the free or used list. */
309 struct cse_reg_info *next;
310
311 /* Search key */
312 unsigned int regno;
313
314 /* The quantity number of the register's current contents. */
315 int reg_qty;
316
317 /* The number of times the register has been altered in the current
318 basic block. */
319 int reg_tick;
320
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
324 invalid. */
325 int reg_in_table;
326
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
329 unsigned int subreg_ticked;
330 };
331
332 /* A free list of cse_reg_info entries. */
333 static struct cse_reg_info *cse_reg_info_free_list;
334
335 /* A used list of cse_reg_info entries. */
336 static struct cse_reg_info *cse_reg_info_used_list;
337 static struct cse_reg_info *cse_reg_info_used_list_end;
338
339 /* A mapping from registers to cse_reg_info data structures. */
340 #define REGHASH_SHIFT 7
341 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
342 #define REGHASH_MASK (REGHASH_SIZE - 1)
343 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
344
345 #define REGHASH_FN(REGNO) \
346 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
347
348 /* The last lookup we did into the cse_reg_info_tree. This allows us
349 to cache repeated lookups. */
350 static unsigned int cached_regno;
351 static struct cse_reg_info *cached_cse_reg_info;
352
353 /* A HARD_REG_SET containing all the hard registers for which there is
354 currently a REG expression in the hash table. Note the difference
355 from the above variables, which indicate if the REG is mentioned in some
356 expression in the table. */
357
358 static HARD_REG_SET hard_regs_in_table;
359
360 /* CUID of insn that starts the basic block currently being cse-processed. */
361
362 static int cse_basic_block_start;
363
364 /* CUID of insn that ends the basic block currently being cse-processed. */
365
366 static int cse_basic_block_end;
367
368 /* Vector mapping INSN_UIDs to cuids.
369 The cuids are like uids but increase monotonically always.
370 We use them to see whether a reg is used outside a given basic block. */
371
372 static int *uid_cuid;
373
374 /* Highest UID in UID_CUID. */
375 static int max_uid;
376
377 /* Get the cuid of an insn. */
378
379 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
380
381 /* Nonzero if this pass has made changes, and therefore it's
382 worthwhile to run the garbage collector. */
383
384 static int cse_altered;
385
386 /* Nonzero if cse has altered conditional jump insns
387 in such a way that jump optimization should be redone. */
388
389 static int cse_jumps_altered;
390
391 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392 REG_LABEL, we have to rerun jump after CSE to put in the note. */
393 static int recorded_label_ref;
394
395 /* canon_hash stores 1 in do_not_record
396 if it notices a reference to CC0, PC, or some other volatile
397 subexpression. */
398
399 static int do_not_record;
400
401 #ifdef LOAD_EXTEND_OP
402
403 /* Scratch rtl used when looking for load-extended copy of a MEM. */
404 static rtx memory_extend_rtx;
405 #endif
406
407 /* canon_hash stores 1 in hash_arg_in_memory
408 if it notices a reference to memory within the expression being hashed. */
409
410 static int hash_arg_in_memory;
411
412 /* The hash table contains buckets which are chains of `struct table_elt's,
413 each recording one expression's information.
414 That expression is in the `exp' field.
415
416 The canon_exp field contains a canonical (from the point of view of
417 alias analysis) version of the `exp' field.
418
419 Those elements with the same hash code are chained in both directions
420 through the `next_same_hash' and `prev_same_hash' fields.
421
422 Each set of expressions with equivalent values
423 are on a two-way chain through the `next_same_value'
424 and `prev_same_value' fields, and all point with
425 the `first_same_value' field at the first element in
426 that chain. The chain is in order of increasing cost.
427 Each element's cost value is in its `cost' field.
428
429 The `in_memory' field is nonzero for elements that
430 involve any reference to memory. These elements are removed
431 whenever a write is done to an unidentified location in memory.
432 To be safe, we assume that a memory address is unidentified unless
433 the address is either a symbol constant or a constant plus
434 the frame pointer or argument pointer.
435
436 The `related_value' field is used to connect related expressions
437 (that differ by adding an integer).
438 The related expressions are chained in a circular fashion.
439 `related_value' is zero for expressions for which this
440 chain is not useful.
441
442 The `cost' field stores the cost of this element's expression.
443 The `regcost' field stores the value returned by approx_reg_cost for
444 this element's expression.
445
446 The `is_const' flag is set if the element is a constant (including
447 a fixed address).
448
449 The `flag' field is used as a temporary during some search routines.
450
451 The `mode' field is usually the same as GET_MODE (`exp'), but
452 if `exp' is a CONST_INT and has no machine mode then the `mode'
453 field is the mode it was being used as. Each constant is
454 recorded separately for each mode it is used with. */
455
456 struct table_elt
457 {
458 rtx exp;
459 rtx canon_exp;
460 struct table_elt *next_same_hash;
461 struct table_elt *prev_same_hash;
462 struct table_elt *next_same_value;
463 struct table_elt *prev_same_value;
464 struct table_elt *first_same_value;
465 struct table_elt *related_value;
466 int cost;
467 int regcost;
468 /* The size of this field should match the size
469 of the mode field of struct rtx_def (see rtl.h). */
470 ENUM_BITFIELD(machine_mode) mode : 8;
471 char in_memory;
472 char is_const;
473 char flag;
474 };
475
476 /* We don't want a lot of buckets, because we rarely have very many
477 things stored in the hash table, and a lot of buckets slows
478 down a lot of loops that happen frequently. */
479 #define HASH_SHIFT 5
480 #define HASH_SIZE (1 << HASH_SHIFT)
481 #define HASH_MASK (HASH_SIZE - 1)
482
483 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
484 register (hard registers may require `do_not_record' to be set). */
485
486 #define HASH(X, M) \
487 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
488 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
489 : canon_hash (X, M)) & HASH_MASK)
490
491 /* Determine whether register number N is considered a fixed register for the
492 purpose of approximating register costs.
493 It is desirable to replace other regs with fixed regs, to reduce need for
494 non-fixed hard regs.
495 A reg wins if it is either the frame pointer or designated as fixed. */
496 #define FIXED_REGNO_P(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || fixed_regs[N] || global_regs[N])
499
500 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
501 hard registers and pointers into the frame are the cheapest with a cost
502 of 0. Next come pseudos with a cost of one and other hard registers with
503 a cost of 2. Aside from these special cases, call `rtx_cost'. */
504
505 #define CHEAP_REGNO(N) \
506 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
507 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
508 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
509 || ((N) < FIRST_PSEUDO_REGISTER \
510 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
511
512 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
513 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
514
515 /* Get the info associated with register N. */
516
517 #define GET_CSE_REG_INFO(N) \
518 (((N) == cached_regno && cached_cse_reg_info) \
519 ? cached_cse_reg_info : get_cse_reg_info ((N)))
520
521 /* Get the number of times this register has been updated in this
522 basic block. */
523
524 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
525
526 /* Get the point at which REG was recorded in the table. */
527
528 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
529
530 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
531 SUBREG). */
532
533 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
534
535 /* Get the quantity number for REG. */
536
537 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
538
539 /* Determine if the quantity number for register X represents a valid index
540 into the qty_table. */
541
542 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
543
544 static struct table_elt *table[HASH_SIZE];
545
546 /* Chain of `struct table_elt's made so far for this function
547 but currently removed from the table. */
548
549 static struct table_elt *free_element_chain;
550
551 /* Number of `struct table_elt' structures made so far for this function. */
552
553 static int n_elements_made;
554
555 /* Maximum value `n_elements_made' has had so far in this compilation
556 for functions previously processed. */
557
558 static int max_elements_made;
559
560 /* Surviving equivalence class when two equivalence classes are merged
561 by recording the effects of a jump in the last insn. Zero if the
562 last insn was not a conditional jump. */
563
564 static struct table_elt *last_jump_equiv_class;
565
566 /* Set to the cost of a constant pool reference if one was found for a
567 symbolic constant. If this was found, it means we should try to
568 convert constants into constant pool entries if they don't fit in
569 the insn. */
570
571 static int constant_pool_entries_cost;
572 static int constant_pool_entries_regcost;
573
574 /* This data describes a block that will be processed by cse_basic_block. */
575
576 struct cse_basic_block_data
577 {
578 /* Lowest CUID value of insns in block. */
579 int low_cuid;
580 /* Highest CUID value of insns in block. */
581 int high_cuid;
582 /* Total number of SETs in block. */
583 int nsets;
584 /* Last insn in the block. */
585 rtx last;
586 /* Size of current branch path, if any. */
587 int path_size;
588 /* Current branch path, indicating which branches will be taken. */
589 struct branch_path
590 {
591 /* The branch insn. */
592 rtx branch;
593 /* Whether it should be taken or not. AROUND is the same as taken
594 except that it is used when the destination label is not preceded
595 by a BARRIER. */
596 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
597 } *path;
598 };
599
600 static bool fixed_base_plus_p (rtx x);
601 static int notreg_cost (rtx, enum rtx_code);
602 static int approx_reg_cost_1 (rtx *, void *);
603 static int approx_reg_cost (rtx);
604 static int preferrable (int, int, int, int);
605 static void new_basic_block (void);
606 static void make_new_qty (unsigned int, enum machine_mode);
607 static void make_regs_eqv (unsigned int, unsigned int);
608 static void delete_reg_equiv (unsigned int);
609 static int mention_regs (rtx);
610 static int insert_regs (rtx, struct table_elt *, int);
611 static void remove_from_table (struct table_elt *, unsigned);
612 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
613 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
614 static rtx lookup_as_function (rtx, enum rtx_code);
615 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
616 enum machine_mode);
617 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
618 static void invalidate (rtx, enum machine_mode);
619 static int cse_rtx_varies_p (rtx, int);
620 static void remove_invalid_refs (unsigned int);
621 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
622 enum machine_mode);
623 static void rehash_using_reg (rtx);
624 static void invalidate_memory (void);
625 static void invalidate_for_call (void);
626 static rtx use_related_value (rtx, struct table_elt *);
627 static unsigned canon_hash (rtx, enum machine_mode);
628 static unsigned canon_hash_string (const char *);
629 static unsigned safe_hash (rtx, enum machine_mode);
630 static int exp_equiv_p (rtx, rtx, int, int);
631 static rtx canon_reg (rtx, rtx);
632 static void find_best_addr (rtx, rtx *, enum machine_mode);
633 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
634 enum machine_mode *,
635 enum machine_mode *);
636 static rtx fold_rtx (rtx, rtx);
637 static rtx equiv_constant (rtx);
638 static void record_jump_equiv (rtx, int);
639 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
640 int);
641 static void cse_insn (rtx, rtx);
642 static int addr_affects_sp_p (rtx);
643 static void invalidate_from_clobbers (rtx);
644 static rtx cse_process_notes (rtx, rtx);
645 static void cse_around_loop (rtx);
646 static void invalidate_skipped_set (rtx, rtx, void *);
647 static void invalidate_skipped_block (rtx);
648 static void cse_check_loop_start (rtx, rtx, void *);
649 static void cse_set_around_loop (rtx, rtx, rtx);
650 static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
651 static void count_reg_usage (rtx, int *, int);
652 static int check_for_label_ref (rtx *, void *);
653 extern void dump_class (struct table_elt*);
654 static struct cse_reg_info * get_cse_reg_info (unsigned int);
655 static int check_dependence (rtx *, void *);
656
657 static void flush_hash_table (void);
658 static bool insn_live_p (rtx, int *);
659 static bool set_live_p (rtx, rtx, int *);
660 static bool dead_libcall_p (rtx, int *);
661 static int cse_change_cc_mode (rtx *, void *);
662 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
663 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
664 \f
665 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
666 virtual regs here because the simplify_*_operation routines are called
667 by integrate.c, which is called before virtual register instantiation. */
668
669 static bool
670 fixed_base_plus_p (rtx x)
671 {
672 switch (GET_CODE (x))
673 {
674 case REG:
675 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
676 return true;
677 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
678 return true;
679 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
680 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
681 return true;
682 return false;
683
684 case PLUS:
685 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
686 return false;
687 return fixed_base_plus_p (XEXP (x, 0));
688
689 case ADDRESSOF:
690 return true;
691
692 default:
693 return false;
694 }
695 }
696
697 /* Dump the expressions in the equivalence class indicated by CLASSP.
698 This function is used only for debugging. */
699 void
700 dump_class (struct table_elt *classp)
701 {
702 struct table_elt *elt;
703
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
707
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
709 {
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
712 }
713 }
714
715 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
716
717 static int
718 approx_reg_cost_1 (rtx *xp, void *data)
719 {
720 rtx x = *xp;
721 int *cost_p = data;
722
723 if (x && GET_CODE (x) == REG)
724 {
725 unsigned int regno = REGNO (x);
726
727 if (! CHEAP_REGNO (regno))
728 {
729 if (regno < FIRST_PSEUDO_REGISTER)
730 {
731 if (SMALL_REGISTER_CLASSES)
732 return 1;
733 *cost_p += 2;
734 }
735 else
736 *cost_p += 1;
737 }
738 }
739
740 return 0;
741 }
742
743 /* Return an estimate of the cost of the registers used in an rtx.
744 This is mostly the number of different REG expressions in the rtx;
745 however for some exceptions like fixed registers we use a cost of
746 0. If any other hard register reference occurs, return MAX_COST. */
747
748 static int
749 approx_reg_cost (rtx x)
750 {
751 int cost = 0;
752
753 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
754 return MAX_COST;
755
756 return cost;
757 }
758
759 /* Return a negative value if an rtx A, whose costs are given by COST_A
760 and REGCOST_A, is more desirable than an rtx B.
761 Return a positive value if A is less desirable, or 0 if the two are
762 equally good. */
763 static int
764 preferrable (int cost_a, int regcost_a, int cost_b, int regcost_b)
765 {
766 /* First, get rid of cases involving expressions that are entirely
767 unwanted. */
768 if (cost_a != cost_b)
769 {
770 if (cost_a == MAX_COST)
771 return 1;
772 if (cost_b == MAX_COST)
773 return -1;
774 }
775
776 /* Avoid extending lifetimes of hardregs. */
777 if (regcost_a != regcost_b)
778 {
779 if (regcost_a == MAX_COST)
780 return 1;
781 if (regcost_b == MAX_COST)
782 return -1;
783 }
784
785 /* Normal operation costs take precedence. */
786 if (cost_a != cost_b)
787 return cost_a - cost_b;
788 /* Only if these are identical consider effects on register pressure. */
789 if (regcost_a != regcost_b)
790 return regcost_a - regcost_b;
791 return 0;
792 }
793
794 /* Internal function, to compute cost when X is not a register; called
795 from COST macro to keep it simple. */
796
797 static int
798 notreg_cost (rtx x, enum rtx_code outer)
799 {
800 return ((GET_CODE (x) == SUBREG
801 && GET_CODE (SUBREG_REG (x)) == REG
802 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
803 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
804 && (GET_MODE_SIZE (GET_MODE (x))
805 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
806 && subreg_lowpart_p (x)
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
808 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
809 ? 0
810 : rtx_cost (x, outer) * 2);
811 }
812
813 /* Return an estimate of the cost of computing rtx X.
814 One use is in cse, to decide which expression to keep in the hash table.
815 Another is in rtl generation, to pick the cheapest way to multiply.
816 Other uses like the latter are expected in the future. */
817
818 int
819 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
820 {
821 int i, j;
822 enum rtx_code code;
823 const char *fmt;
824 int total;
825
826 if (x == 0)
827 return 0;
828
829 /* Compute the default costs of certain things.
830 Note that targetm.rtx_costs can override the defaults. */
831
832 code = GET_CODE (x);
833 switch (code)
834 {
835 case MULT:
836 total = COSTS_N_INSNS (5);
837 break;
838 case DIV:
839 case UDIV:
840 case MOD:
841 case UMOD:
842 total = COSTS_N_INSNS (7);
843 break;
844 case USE:
845 /* Used in loop.c and combine.c as a marker. */
846 total = 0;
847 break;
848 default:
849 total = COSTS_N_INSNS (1);
850 }
851
852 switch (code)
853 {
854 case REG:
855 return 0;
856
857 case SUBREG:
858 /* If we can't tie these modes, make this expensive. The larger
859 the mode, the more expensive it is. */
860 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
861 return COSTS_N_INSNS (2
862 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
863 break;
864
865 default:
866 if ((*targetm.rtx_costs) (x, code, outer_code, &total))
867 return total;
868 break;
869 }
870
871 /* Sum the costs of the sub-rtx's, plus cost of this operation,
872 which is already in total. */
873
874 fmt = GET_RTX_FORMAT (code);
875 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
876 if (fmt[i] == 'e')
877 total += rtx_cost (XEXP (x, i), code);
878 else if (fmt[i] == 'E')
879 for (j = 0; j < XVECLEN (x, i); j++)
880 total += rtx_cost (XVECEXP (x, i, j), code);
881
882 return total;
883 }
884 \f
885 /* Return cost of address expression X.
886 Expect that X is properly formed address reference. */
887
888 int
889 address_cost (rtx x, enum machine_mode mode)
890 {
891 /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
892 during CSE, such nodes are present. Using an ADDRESSOF node which
893 refers to the address of a REG is a good thing because we can then
894 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
895
896 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
897 return -1;
898
899 /* We may be asked for cost of various unusual addresses, such as operands
900 of push instruction. It is not worthwhile to complicate writing
901 of the target hook by such cases. */
902
903 if (!memory_address_p (mode, x))
904 return 1000;
905
906 return (*targetm.address_cost) (x);
907 }
908
909 /* If the target doesn't override, compute the cost as with arithmetic. */
910
911 int
912 default_address_cost (rtx x)
913 {
914 return rtx_cost (x, MEM);
915 }
916 \f
917 static struct cse_reg_info *
918 get_cse_reg_info (unsigned int regno)
919 {
920 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
921 struct cse_reg_info *p;
922
923 for (p = *hash_head; p != NULL; p = p->hash_next)
924 if (p->regno == regno)
925 break;
926
927 if (p == NULL)
928 {
929 /* Get a new cse_reg_info structure. */
930 if (cse_reg_info_free_list)
931 {
932 p = cse_reg_info_free_list;
933 cse_reg_info_free_list = p->next;
934 }
935 else
936 p = xmalloc (sizeof (struct cse_reg_info));
937
938 /* Insert into hash table. */
939 p->hash_next = *hash_head;
940 *hash_head = p;
941
942 /* Initialize it. */
943 p->reg_tick = 1;
944 p->reg_in_table = -1;
945 p->subreg_ticked = -1;
946 p->reg_qty = regno;
947 p->regno = regno;
948 p->next = cse_reg_info_used_list;
949 cse_reg_info_used_list = p;
950 if (!cse_reg_info_used_list_end)
951 cse_reg_info_used_list_end = p;
952 }
953
954 /* Cache this lookup; we tend to be looking up information about the
955 same register several times in a row. */
956 cached_regno = regno;
957 cached_cse_reg_info = p;
958
959 return p;
960 }
961
962 /* Clear the hash table and initialize each register with its own quantity,
963 for a new basic block. */
964
965 static void
966 new_basic_block (void)
967 {
968 int i;
969
970 next_qty = max_reg;
971
972 /* Clear out hash table state for this pass. */
973
974 memset (reg_hash, 0, sizeof reg_hash);
975
976 if (cse_reg_info_used_list)
977 {
978 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
979 cse_reg_info_free_list = cse_reg_info_used_list;
980 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
981 }
982 cached_cse_reg_info = 0;
983
984 CLEAR_HARD_REG_SET (hard_regs_in_table);
985
986 /* The per-quantity values used to be initialized here, but it is
987 much faster to initialize each as it is made in `make_new_qty'. */
988
989 for (i = 0; i < HASH_SIZE; i++)
990 {
991 struct table_elt *first;
992
993 first = table[i];
994 if (first != NULL)
995 {
996 struct table_elt *last = first;
997
998 table[i] = NULL;
999
1000 while (last->next_same_hash != NULL)
1001 last = last->next_same_hash;
1002
1003 /* Now relink this hash entire chain into
1004 the free element list. */
1005
1006 last->next_same_hash = free_element_chain;
1007 free_element_chain = first;
1008 }
1009 }
1010
1011 #ifdef HAVE_cc0
1012 prev_insn = 0;
1013 prev_insn_cc0 = 0;
1014 #endif
1015 }
1016
1017 /* Say that register REG contains a quantity in mode MODE not in any
1018 register before and initialize that quantity. */
1019
1020 static void
1021 make_new_qty (unsigned int reg, enum machine_mode mode)
1022 {
1023 int q;
1024 struct qty_table_elem *ent;
1025 struct reg_eqv_elem *eqv;
1026
1027 if (next_qty >= max_qty)
1028 abort ();
1029
1030 q = REG_QTY (reg) = next_qty++;
1031 ent = &qty_table[q];
1032 ent->first_reg = reg;
1033 ent->last_reg = reg;
1034 ent->mode = mode;
1035 ent->const_rtx = ent->const_insn = NULL_RTX;
1036 ent->comparison_code = UNKNOWN;
1037
1038 eqv = &reg_eqv_table[reg];
1039 eqv->next = eqv->prev = -1;
1040 }
1041
1042 /* Make reg NEW equivalent to reg OLD.
1043 OLD is not changing; NEW is. */
1044
1045 static void
1046 make_regs_eqv (unsigned int new, unsigned int old)
1047 {
1048 unsigned int lastr, firstr;
1049 int q = REG_QTY (old);
1050 struct qty_table_elem *ent;
1051
1052 ent = &qty_table[q];
1053
1054 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1055 if (! REGNO_QTY_VALID_P (old))
1056 abort ();
1057
1058 REG_QTY (new) = q;
1059 firstr = ent->first_reg;
1060 lastr = ent->last_reg;
1061
1062 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1063 hard regs. Among pseudos, if NEW will live longer than any other reg
1064 of the same qty, and that is beyond the current basic block,
1065 make it the new canonical replacement for this qty. */
1066 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1067 /* Certain fixed registers might be of the class NO_REGS. This means
1068 that not only can they not be allocated by the compiler, but
1069 they cannot be used in substitutions or canonicalizations
1070 either. */
1071 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1072 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1073 || (new >= FIRST_PSEUDO_REGISTER
1074 && (firstr < FIRST_PSEUDO_REGISTER
1075 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1076 || (uid_cuid[REGNO_FIRST_UID (new)]
1077 < cse_basic_block_start))
1078 && (uid_cuid[REGNO_LAST_UID (new)]
1079 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1080 {
1081 reg_eqv_table[firstr].prev = new;
1082 reg_eqv_table[new].next = firstr;
1083 reg_eqv_table[new].prev = -1;
1084 ent->first_reg = new;
1085 }
1086 else
1087 {
1088 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1089 Otherwise, insert before any non-fixed hard regs that are at the
1090 end. Registers of class NO_REGS cannot be used as an
1091 equivalent for anything. */
1092 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1093 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1094 && new >= FIRST_PSEUDO_REGISTER)
1095 lastr = reg_eqv_table[lastr].prev;
1096 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1097 if (reg_eqv_table[lastr].next >= 0)
1098 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1099 else
1100 qty_table[q].last_reg = new;
1101 reg_eqv_table[lastr].next = new;
1102 reg_eqv_table[new].prev = lastr;
1103 }
1104 }
1105
1106 /* Remove REG from its equivalence class. */
1107
1108 static void
1109 delete_reg_equiv (unsigned int reg)
1110 {
1111 struct qty_table_elem *ent;
1112 int q = REG_QTY (reg);
1113 int p, n;
1114
1115 /* If invalid, do nothing. */
1116 if (q == (int) reg)
1117 return;
1118
1119 ent = &qty_table[q];
1120
1121 p = reg_eqv_table[reg].prev;
1122 n = reg_eqv_table[reg].next;
1123
1124 if (n != -1)
1125 reg_eqv_table[n].prev = p;
1126 else
1127 ent->last_reg = p;
1128 if (p != -1)
1129 reg_eqv_table[p].next = n;
1130 else
1131 ent->first_reg = n;
1132
1133 REG_QTY (reg) = reg;
1134 }
1135
1136 /* Remove any invalid expressions from the hash table
1137 that refer to any of the registers contained in expression X.
1138
1139 Make sure that newly inserted references to those registers
1140 as subexpressions will be considered valid.
1141
1142 mention_regs is not called when a register itself
1143 is being stored in the table.
1144
1145 Return 1 if we have done something that may have changed the hash code
1146 of X. */
1147
1148 static int
1149 mention_regs (rtx x)
1150 {
1151 enum rtx_code code;
1152 int i, j;
1153 const char *fmt;
1154 int changed = 0;
1155
1156 if (x == 0)
1157 return 0;
1158
1159 code = GET_CODE (x);
1160 if (code == REG)
1161 {
1162 unsigned int regno = REGNO (x);
1163 unsigned int endregno
1164 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1165 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1166 unsigned int i;
1167
1168 for (i = regno; i < endregno; i++)
1169 {
1170 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1171 remove_invalid_refs (i);
1172
1173 REG_IN_TABLE (i) = REG_TICK (i);
1174 SUBREG_TICKED (i) = -1;
1175 }
1176
1177 return 0;
1178 }
1179
1180 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1181 pseudo if they don't use overlapping words. We handle only pseudos
1182 here for simplicity. */
1183 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1184 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1185 {
1186 unsigned int i = REGNO (SUBREG_REG (x));
1187
1188 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1189 {
1190 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1191 the last store to this register really stored into this
1192 subreg, then remove the memory of this subreg.
1193 Otherwise, remove any memory of the entire register and
1194 all its subregs from the table. */
1195 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1196 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1197 remove_invalid_refs (i);
1198 else
1199 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1200 }
1201
1202 REG_IN_TABLE (i) = REG_TICK (i);
1203 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1204 return 0;
1205 }
1206
1207 /* If X is a comparison or a COMPARE and either operand is a register
1208 that does not have a quantity, give it one. This is so that a later
1209 call to record_jump_equiv won't cause X to be assigned a different
1210 hash code and not found in the table after that call.
1211
1212 It is not necessary to do this here, since rehash_using_reg can
1213 fix up the table later, but doing this here eliminates the need to
1214 call that expensive function in the most common case where the only
1215 use of the register is in the comparison. */
1216
1217 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1218 {
1219 if (GET_CODE (XEXP (x, 0)) == REG
1220 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1221 if (insert_regs (XEXP (x, 0), NULL, 0))
1222 {
1223 rehash_using_reg (XEXP (x, 0));
1224 changed = 1;
1225 }
1226
1227 if (GET_CODE (XEXP (x, 1)) == REG
1228 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1229 if (insert_regs (XEXP (x, 1), NULL, 0))
1230 {
1231 rehash_using_reg (XEXP (x, 1));
1232 changed = 1;
1233 }
1234 }
1235
1236 fmt = GET_RTX_FORMAT (code);
1237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1238 if (fmt[i] == 'e')
1239 changed |= mention_regs (XEXP (x, i));
1240 else if (fmt[i] == 'E')
1241 for (j = 0; j < XVECLEN (x, i); j++)
1242 changed |= mention_regs (XVECEXP (x, i, j));
1243
1244 return changed;
1245 }
1246
1247 /* Update the register quantities for inserting X into the hash table
1248 with a value equivalent to CLASSP.
1249 (If the class does not contain a REG, it is irrelevant.)
1250 If MODIFIED is nonzero, X is a destination; it is being modified.
1251 Note that delete_reg_equiv should be called on a register
1252 before insert_regs is done on that register with MODIFIED != 0.
1253
1254 Nonzero value means that elements of reg_qty have changed
1255 so X's hash code may be different. */
1256
1257 static int
1258 insert_regs (rtx x, struct table_elt *classp, int modified)
1259 {
1260 if (GET_CODE (x) == REG)
1261 {
1262 unsigned int regno = REGNO (x);
1263 int qty_valid;
1264
1265 /* If REGNO is in the equivalence table already but is of the
1266 wrong mode for that equivalence, don't do anything here. */
1267
1268 qty_valid = REGNO_QTY_VALID_P (regno);
1269 if (qty_valid)
1270 {
1271 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1272
1273 if (ent->mode != GET_MODE (x))
1274 return 0;
1275 }
1276
1277 if (modified || ! qty_valid)
1278 {
1279 if (classp)
1280 for (classp = classp->first_same_value;
1281 classp != 0;
1282 classp = classp->next_same_value)
1283 if (GET_CODE (classp->exp) == REG
1284 && GET_MODE (classp->exp) == GET_MODE (x))
1285 {
1286 make_regs_eqv (regno, REGNO (classp->exp));
1287 return 1;
1288 }
1289
1290 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1291 than REG_IN_TABLE to find out if there was only a single preceding
1292 invalidation - for the SUBREG - or another one, which would be
1293 for the full register. However, if we find here that REG_TICK
1294 indicates that the register is invalid, it means that it has
1295 been invalidated in a separate operation. The SUBREG might be used
1296 now (then this is a recursive call), or we might use the full REG
1297 now and a SUBREG of it later. So bump up REG_TICK so that
1298 mention_regs will do the right thing. */
1299 if (! modified
1300 && REG_IN_TABLE (regno) >= 0
1301 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1302 REG_TICK (regno)++;
1303 make_new_qty (regno, GET_MODE (x));
1304 return 1;
1305 }
1306
1307 return 0;
1308 }
1309
1310 /* If X is a SUBREG, we will likely be inserting the inner register in the
1311 table. If that register doesn't have an assigned quantity number at
1312 this point but does later, the insertion that we will be doing now will
1313 not be accessible because its hash code will have changed. So assign
1314 a quantity number now. */
1315
1316 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1317 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1318 {
1319 insert_regs (SUBREG_REG (x), NULL, 0);
1320 mention_regs (x);
1321 return 1;
1322 }
1323 else
1324 return mention_regs (x);
1325 }
1326 \f
1327 /* Look in or update the hash table. */
1328
1329 /* Remove table element ELT from use in the table.
1330 HASH is its hash code, made using the HASH macro.
1331 It's an argument because often that is known in advance
1332 and we save much time not recomputing it. */
1333
1334 static void
1335 remove_from_table (struct table_elt *elt, unsigned int hash)
1336 {
1337 if (elt == 0)
1338 return;
1339
1340 /* Mark this element as removed. See cse_insn. */
1341 elt->first_same_value = 0;
1342
1343 /* Remove the table element from its equivalence class. */
1344
1345 {
1346 struct table_elt *prev = elt->prev_same_value;
1347 struct table_elt *next = elt->next_same_value;
1348
1349 if (next)
1350 next->prev_same_value = prev;
1351
1352 if (prev)
1353 prev->next_same_value = next;
1354 else
1355 {
1356 struct table_elt *newfirst = next;
1357 while (next)
1358 {
1359 next->first_same_value = newfirst;
1360 next = next->next_same_value;
1361 }
1362 }
1363 }
1364
1365 /* Remove the table element from its hash bucket. */
1366
1367 {
1368 struct table_elt *prev = elt->prev_same_hash;
1369 struct table_elt *next = elt->next_same_hash;
1370
1371 if (next)
1372 next->prev_same_hash = prev;
1373
1374 if (prev)
1375 prev->next_same_hash = next;
1376 else if (table[hash] == elt)
1377 table[hash] = next;
1378 else
1379 {
1380 /* This entry is not in the proper hash bucket. This can happen
1381 when two classes were merged by `merge_equiv_classes'. Search
1382 for the hash bucket that it heads. This happens only very
1383 rarely, so the cost is acceptable. */
1384 for (hash = 0; hash < HASH_SIZE; hash++)
1385 if (table[hash] == elt)
1386 table[hash] = next;
1387 }
1388 }
1389
1390 /* Remove the table element from its related-value circular chain. */
1391
1392 if (elt->related_value != 0 && elt->related_value != elt)
1393 {
1394 struct table_elt *p = elt->related_value;
1395
1396 while (p->related_value != elt)
1397 p = p->related_value;
1398 p->related_value = elt->related_value;
1399 if (p->related_value == p)
1400 p->related_value = 0;
1401 }
1402
1403 /* Now add it to the free element chain. */
1404 elt->next_same_hash = free_element_chain;
1405 free_element_chain = elt;
1406 }
1407
1408 /* Look up X in the hash table and return its table element,
1409 or 0 if X is not in the table.
1410
1411 MODE is the machine-mode of X, or if X is an integer constant
1412 with VOIDmode then MODE is the mode with which X will be used.
1413
1414 Here we are satisfied to find an expression whose tree structure
1415 looks like X. */
1416
1417 static struct table_elt *
1418 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1419 {
1420 struct table_elt *p;
1421
1422 for (p = table[hash]; p; p = p->next_same_hash)
1423 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1424 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1425 return p;
1426
1427 return 0;
1428 }
1429
1430 /* Like `lookup' but don't care whether the table element uses invalid regs.
1431 Also ignore discrepancies in the machine mode of a register. */
1432
1433 static struct table_elt *
1434 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1435 {
1436 struct table_elt *p;
1437
1438 if (GET_CODE (x) == REG)
1439 {
1440 unsigned int regno = REGNO (x);
1441
1442 /* Don't check the machine mode when comparing registers;
1443 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1444 for (p = table[hash]; p; p = p->next_same_hash)
1445 if (GET_CODE (p->exp) == REG
1446 && REGNO (p->exp) == regno)
1447 return p;
1448 }
1449 else
1450 {
1451 for (p = table[hash]; p; p = p->next_same_hash)
1452 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1453 return p;
1454 }
1455
1456 return 0;
1457 }
1458
1459 /* Look for an expression equivalent to X and with code CODE.
1460 If one is found, return that expression. */
1461
1462 static rtx
1463 lookup_as_function (rtx x, enum rtx_code code)
1464 {
1465 struct table_elt *p
1466 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1467
1468 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1469 long as we are narrowing. So if we looked in vain for a mode narrower
1470 than word_mode before, look for word_mode now. */
1471 if (p == 0 && code == CONST_INT
1472 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1473 {
1474 x = copy_rtx (x);
1475 PUT_MODE (x, word_mode);
1476 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1477 }
1478
1479 if (p == 0)
1480 return 0;
1481
1482 for (p = p->first_same_value; p; p = p->next_same_value)
1483 if (GET_CODE (p->exp) == code
1484 /* Make sure this is a valid entry in the table. */
1485 && exp_equiv_p (p->exp, p->exp, 1, 0))
1486 return p->exp;
1487
1488 return 0;
1489 }
1490
1491 /* Insert X in the hash table, assuming HASH is its hash code
1492 and CLASSP is an element of the class it should go in
1493 (or 0 if a new class should be made).
1494 It is inserted at the proper position to keep the class in
1495 the order cheapest first.
1496
1497 MODE is the machine-mode of X, or if X is an integer constant
1498 with VOIDmode then MODE is the mode with which X will be used.
1499
1500 For elements of equal cheapness, the most recent one
1501 goes in front, except that the first element in the list
1502 remains first unless a cheaper element is added. The order of
1503 pseudo-registers does not matter, as canon_reg will be called to
1504 find the cheapest when a register is retrieved from the table.
1505
1506 The in_memory field in the hash table element is set to 0.
1507 The caller must set it nonzero if appropriate.
1508
1509 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1510 and if insert_regs returns a nonzero value
1511 you must then recompute its hash code before calling here.
1512
1513 If necessary, update table showing constant values of quantities. */
1514
1515 #define CHEAPER(X, Y) \
1516 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1517
1518 static struct table_elt *
1519 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1520 {
1521 struct table_elt *elt;
1522
1523 /* If X is a register and we haven't made a quantity for it,
1524 something is wrong. */
1525 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1526 abort ();
1527
1528 /* If X is a hard register, show it is being put in the table. */
1529 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1530 {
1531 unsigned int regno = REGNO (x);
1532 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1533 unsigned int i;
1534
1535 for (i = regno; i < endregno; i++)
1536 SET_HARD_REG_BIT (hard_regs_in_table, i);
1537 }
1538
1539 /* Put an element for X into the right hash bucket. */
1540
1541 elt = free_element_chain;
1542 if (elt)
1543 free_element_chain = elt->next_same_hash;
1544 else
1545 {
1546 n_elements_made++;
1547 elt = xmalloc (sizeof (struct table_elt));
1548 }
1549
1550 elt->exp = x;
1551 elt->canon_exp = NULL_RTX;
1552 elt->cost = COST (x);
1553 elt->regcost = approx_reg_cost (x);
1554 elt->next_same_value = 0;
1555 elt->prev_same_value = 0;
1556 elt->next_same_hash = table[hash];
1557 elt->prev_same_hash = 0;
1558 elt->related_value = 0;
1559 elt->in_memory = 0;
1560 elt->mode = mode;
1561 elt->is_const = (CONSTANT_P (x)
1562 /* GNU C++ takes advantage of this for `this'
1563 (and other const values). */
1564 || (GET_CODE (x) == REG
1565 && RTX_UNCHANGING_P (x)
1566 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1567 || fixed_base_plus_p (x));
1568
1569 if (table[hash])
1570 table[hash]->prev_same_hash = elt;
1571 table[hash] = elt;
1572
1573 /* Put it into the proper value-class. */
1574 if (classp)
1575 {
1576 classp = classp->first_same_value;
1577 if (CHEAPER (elt, classp))
1578 /* Insert at the head of the class. */
1579 {
1580 struct table_elt *p;
1581 elt->next_same_value = classp;
1582 classp->prev_same_value = elt;
1583 elt->first_same_value = elt;
1584
1585 for (p = classp; p; p = p->next_same_value)
1586 p->first_same_value = elt;
1587 }
1588 else
1589 {
1590 /* Insert not at head of the class. */
1591 /* Put it after the last element cheaper than X. */
1592 struct table_elt *p, *next;
1593
1594 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1595 p = next);
1596
1597 /* Put it after P and before NEXT. */
1598 elt->next_same_value = next;
1599 if (next)
1600 next->prev_same_value = elt;
1601
1602 elt->prev_same_value = p;
1603 p->next_same_value = elt;
1604 elt->first_same_value = classp;
1605 }
1606 }
1607 else
1608 elt->first_same_value = elt;
1609
1610 /* If this is a constant being set equivalent to a register or a register
1611 being set equivalent to a constant, note the constant equivalence.
1612
1613 If this is a constant, it cannot be equivalent to a different constant,
1614 and a constant is the only thing that can be cheaper than a register. So
1615 we know the register is the head of the class (before the constant was
1616 inserted).
1617
1618 If this is a register that is not already known equivalent to a
1619 constant, we must check the entire class.
1620
1621 If this is a register that is already known equivalent to an insn,
1622 update the qtys `const_insn' to show that `this_insn' is the latest
1623 insn making that quantity equivalent to the constant. */
1624
1625 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1626 && GET_CODE (x) != REG)
1627 {
1628 int exp_q = REG_QTY (REGNO (classp->exp));
1629 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1630
1631 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1632 exp_ent->const_insn = this_insn;
1633 }
1634
1635 else if (GET_CODE (x) == REG
1636 && classp
1637 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1638 && ! elt->is_const)
1639 {
1640 struct table_elt *p;
1641
1642 for (p = classp; p != 0; p = p->next_same_value)
1643 {
1644 if (p->is_const && GET_CODE (p->exp) != REG)
1645 {
1646 int x_q = REG_QTY (REGNO (x));
1647 struct qty_table_elem *x_ent = &qty_table[x_q];
1648
1649 x_ent->const_rtx
1650 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1651 x_ent->const_insn = this_insn;
1652 break;
1653 }
1654 }
1655 }
1656
1657 else if (GET_CODE (x) == REG
1658 && qty_table[REG_QTY (REGNO (x))].const_rtx
1659 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1660 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1661
1662 /* If this is a constant with symbolic value,
1663 and it has a term with an explicit integer value,
1664 link it up with related expressions. */
1665 if (GET_CODE (x) == CONST)
1666 {
1667 rtx subexp = get_related_value (x);
1668 unsigned subhash;
1669 struct table_elt *subelt, *subelt_prev;
1670
1671 if (subexp != 0)
1672 {
1673 /* Get the integer-free subexpression in the hash table. */
1674 subhash = safe_hash (subexp, mode) & HASH_MASK;
1675 subelt = lookup (subexp, subhash, mode);
1676 if (subelt == 0)
1677 subelt = insert (subexp, NULL, subhash, mode);
1678 /* Initialize SUBELT's circular chain if it has none. */
1679 if (subelt->related_value == 0)
1680 subelt->related_value = subelt;
1681 /* Find the element in the circular chain that precedes SUBELT. */
1682 subelt_prev = subelt;
1683 while (subelt_prev->related_value != subelt)
1684 subelt_prev = subelt_prev->related_value;
1685 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1686 This way the element that follows SUBELT is the oldest one. */
1687 elt->related_value = subelt_prev->related_value;
1688 subelt_prev->related_value = elt;
1689 }
1690 }
1691
1692 return elt;
1693 }
1694 \f
1695 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1696 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1697 the two classes equivalent.
1698
1699 CLASS1 will be the surviving class; CLASS2 should not be used after this
1700 call.
1701
1702 Any invalid entries in CLASS2 will not be copied. */
1703
1704 static void
1705 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1706 {
1707 struct table_elt *elt, *next, *new;
1708
1709 /* Ensure we start with the head of the classes. */
1710 class1 = class1->first_same_value;
1711 class2 = class2->first_same_value;
1712
1713 /* If they were already equal, forget it. */
1714 if (class1 == class2)
1715 return;
1716
1717 for (elt = class2; elt; elt = next)
1718 {
1719 unsigned int hash;
1720 rtx exp = elt->exp;
1721 enum machine_mode mode = elt->mode;
1722
1723 next = elt->next_same_value;
1724
1725 /* Remove old entry, make a new one in CLASS1's class.
1726 Don't do this for invalid entries as we cannot find their
1727 hash code (it also isn't necessary). */
1728 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1729 {
1730 hash_arg_in_memory = 0;
1731 hash = HASH (exp, mode);
1732
1733 if (GET_CODE (exp) == REG)
1734 delete_reg_equiv (REGNO (exp));
1735
1736 remove_from_table (elt, hash);
1737
1738 if (insert_regs (exp, class1, 0))
1739 {
1740 rehash_using_reg (exp);
1741 hash = HASH (exp, mode);
1742 }
1743 new = insert (exp, class1, hash, mode);
1744 new->in_memory = hash_arg_in_memory;
1745 }
1746 }
1747 }
1748 \f
1749 /* Flush the entire hash table. */
1750
1751 static void
1752 flush_hash_table (void)
1753 {
1754 int i;
1755 struct table_elt *p;
1756
1757 for (i = 0; i < HASH_SIZE; i++)
1758 for (p = table[i]; p; p = table[i])
1759 {
1760 /* Note that invalidate can remove elements
1761 after P in the current hash chain. */
1762 if (GET_CODE (p->exp) == REG)
1763 invalidate (p->exp, p->mode);
1764 else
1765 remove_from_table (p, i);
1766 }
1767 }
1768 \f
1769 /* Function called for each rtx to check whether true dependence exist. */
1770 struct check_dependence_data
1771 {
1772 enum machine_mode mode;
1773 rtx exp;
1774 rtx addr;
1775 };
1776
1777 static int
1778 check_dependence (rtx *x, void *data)
1779 {
1780 struct check_dependence_data *d = (struct check_dependence_data *) data;
1781 if (*x && GET_CODE (*x) == MEM)
1782 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1783 cse_rtx_varies_p);
1784 else
1785 return 0;
1786 }
1787 \f
1788 /* Remove from the hash table, or mark as invalid, all expressions whose
1789 values could be altered by storing in X. X is a register, a subreg, or
1790 a memory reference with nonvarying address (because, when a memory
1791 reference with a varying address is stored in, all memory references are
1792 removed by invalidate_memory so specific invalidation is superfluous).
1793 FULL_MODE, if not VOIDmode, indicates that this much should be
1794 invalidated instead of just the amount indicated by the mode of X. This
1795 is only used for bitfield stores into memory.
1796
1797 A nonvarying address may be just a register or just a symbol reference,
1798 or it may be either of those plus a numeric offset. */
1799
1800 static void
1801 invalidate (rtx x, enum machine_mode full_mode)
1802 {
1803 int i;
1804 struct table_elt *p;
1805 rtx addr;
1806
1807 switch (GET_CODE (x))
1808 {
1809 case REG:
1810 {
1811 /* If X is a register, dependencies on its contents are recorded
1812 through the qty number mechanism. Just change the qty number of
1813 the register, mark it as invalid for expressions that refer to it,
1814 and remove it itself. */
1815 unsigned int regno = REGNO (x);
1816 unsigned int hash = HASH (x, GET_MODE (x));
1817
1818 /* Remove REGNO from any quantity list it might be on and indicate
1819 that its value might have changed. If it is a pseudo, remove its
1820 entry from the hash table.
1821
1822 For a hard register, we do the first two actions above for any
1823 additional hard registers corresponding to X. Then, if any of these
1824 registers are in the table, we must remove any REG entries that
1825 overlap these registers. */
1826
1827 delete_reg_equiv (regno);
1828 REG_TICK (regno)++;
1829 SUBREG_TICKED (regno) = -1;
1830
1831 if (regno >= FIRST_PSEUDO_REGISTER)
1832 {
1833 /* Because a register can be referenced in more than one mode,
1834 we might have to remove more than one table entry. */
1835 struct table_elt *elt;
1836
1837 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1838 remove_from_table (elt, hash);
1839 }
1840 else
1841 {
1842 HOST_WIDE_INT in_table
1843 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1844 unsigned int endregno
1845 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1846 unsigned int tregno, tendregno, rn;
1847 struct table_elt *p, *next;
1848
1849 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1850
1851 for (rn = regno + 1; rn < endregno; rn++)
1852 {
1853 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1854 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1855 delete_reg_equiv (rn);
1856 REG_TICK (rn)++;
1857 SUBREG_TICKED (rn) = -1;
1858 }
1859
1860 if (in_table)
1861 for (hash = 0; hash < HASH_SIZE; hash++)
1862 for (p = table[hash]; p; p = next)
1863 {
1864 next = p->next_same_hash;
1865
1866 if (GET_CODE (p->exp) != REG
1867 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1868 continue;
1869
1870 tregno = REGNO (p->exp);
1871 tendregno
1872 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1873 if (tendregno > regno && tregno < endregno)
1874 remove_from_table (p, hash);
1875 }
1876 }
1877 }
1878 return;
1879
1880 case SUBREG:
1881 invalidate (SUBREG_REG (x), VOIDmode);
1882 return;
1883
1884 case PARALLEL:
1885 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1886 invalidate (XVECEXP (x, 0, i), VOIDmode);
1887 return;
1888
1889 case EXPR_LIST:
1890 /* This is part of a disjoint return value; extract the location in
1891 question ignoring the offset. */
1892 invalidate (XEXP (x, 0), VOIDmode);
1893 return;
1894
1895 case MEM:
1896 addr = canon_rtx (get_addr (XEXP (x, 0)));
1897 /* Calculate the canonical version of X here so that
1898 true_dependence doesn't generate new RTL for X on each call. */
1899 x = canon_rtx (x);
1900
1901 /* Remove all hash table elements that refer to overlapping pieces of
1902 memory. */
1903 if (full_mode == VOIDmode)
1904 full_mode = GET_MODE (x);
1905
1906 for (i = 0; i < HASH_SIZE; i++)
1907 {
1908 struct table_elt *next;
1909
1910 for (p = table[i]; p; p = next)
1911 {
1912 next = p->next_same_hash;
1913 if (p->in_memory)
1914 {
1915 struct check_dependence_data d;
1916
1917 /* Just canonicalize the expression once;
1918 otherwise each time we call invalidate
1919 true_dependence will canonicalize the
1920 expression again. */
1921 if (!p->canon_exp)
1922 p->canon_exp = canon_rtx (p->exp);
1923 d.exp = x;
1924 d.addr = addr;
1925 d.mode = full_mode;
1926 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1927 remove_from_table (p, i);
1928 }
1929 }
1930 }
1931 return;
1932
1933 default:
1934 abort ();
1935 }
1936 }
1937 \f
1938 /* Remove all expressions that refer to register REGNO,
1939 since they are already invalid, and we are about to
1940 mark that register valid again and don't want the old
1941 expressions to reappear as valid. */
1942
1943 static void
1944 remove_invalid_refs (unsigned int regno)
1945 {
1946 unsigned int i;
1947 struct table_elt *p, *next;
1948
1949 for (i = 0; i < HASH_SIZE; i++)
1950 for (p = table[i]; p; p = next)
1951 {
1952 next = p->next_same_hash;
1953 if (GET_CODE (p->exp) != REG
1954 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1955 remove_from_table (p, i);
1956 }
1957 }
1958
1959 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1960 and mode MODE. */
1961 static void
1962 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1963 enum machine_mode mode)
1964 {
1965 unsigned int i;
1966 struct table_elt *p, *next;
1967 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1968
1969 for (i = 0; i < HASH_SIZE; i++)
1970 for (p = table[i]; p; p = next)
1971 {
1972 rtx exp = p->exp;
1973 next = p->next_same_hash;
1974
1975 if (GET_CODE (exp) != REG
1976 && (GET_CODE (exp) != SUBREG
1977 || GET_CODE (SUBREG_REG (exp)) != REG
1978 || REGNO (SUBREG_REG (exp)) != regno
1979 || (((SUBREG_BYTE (exp)
1980 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1981 && SUBREG_BYTE (exp) <= end))
1982 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1983 remove_from_table (p, i);
1984 }
1985 }
1986 \f
1987 /* Recompute the hash codes of any valid entries in the hash table that
1988 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1989
1990 This is called when we make a jump equivalence. */
1991
1992 static void
1993 rehash_using_reg (rtx x)
1994 {
1995 unsigned int i;
1996 struct table_elt *p, *next;
1997 unsigned hash;
1998
1999 if (GET_CODE (x) == SUBREG)
2000 x = SUBREG_REG (x);
2001
2002 /* If X is not a register or if the register is known not to be in any
2003 valid entries in the table, we have no work to do. */
2004
2005 if (GET_CODE (x) != REG
2006 || REG_IN_TABLE (REGNO (x)) < 0
2007 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2008 return;
2009
2010 /* Scan all hash chains looking for valid entries that mention X.
2011 If we find one and it is in the wrong hash chain, move it. We can skip
2012 objects that are registers, since they are handled specially. */
2013
2014 for (i = 0; i < HASH_SIZE; i++)
2015 for (p = table[i]; p; p = next)
2016 {
2017 next = p->next_same_hash;
2018 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2019 && exp_equiv_p (p->exp, p->exp, 1, 0)
2020 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2021 {
2022 if (p->next_same_hash)
2023 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2024
2025 if (p->prev_same_hash)
2026 p->prev_same_hash->next_same_hash = p->next_same_hash;
2027 else
2028 table[i] = p->next_same_hash;
2029
2030 p->next_same_hash = table[hash];
2031 p->prev_same_hash = 0;
2032 if (table[hash])
2033 table[hash]->prev_same_hash = p;
2034 table[hash] = p;
2035 }
2036 }
2037 }
2038 \f
2039 /* Remove from the hash table any expression that is a call-clobbered
2040 register. Also update their TICK values. */
2041
2042 static void
2043 invalidate_for_call (void)
2044 {
2045 unsigned int regno, endregno;
2046 unsigned int i;
2047 unsigned hash;
2048 struct table_elt *p, *next;
2049 int in_table = 0;
2050
2051 /* Go through all the hard registers. For each that is clobbered in
2052 a CALL_INSN, remove the register from quantity chains and update
2053 reg_tick if defined. Also see if any of these registers is currently
2054 in the table. */
2055
2056 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2057 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2058 {
2059 delete_reg_equiv (regno);
2060 if (REG_TICK (regno) >= 0)
2061 {
2062 REG_TICK (regno)++;
2063 SUBREG_TICKED (regno) = -1;
2064 }
2065
2066 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2067 }
2068
2069 /* In the case where we have no call-clobbered hard registers in the
2070 table, we are done. Otherwise, scan the table and remove any
2071 entry that overlaps a call-clobbered register. */
2072
2073 if (in_table)
2074 for (hash = 0; hash < HASH_SIZE; hash++)
2075 for (p = table[hash]; p; p = next)
2076 {
2077 next = p->next_same_hash;
2078
2079 if (GET_CODE (p->exp) != REG
2080 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2081 continue;
2082
2083 regno = REGNO (p->exp);
2084 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2085
2086 for (i = regno; i < endregno; i++)
2087 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2088 {
2089 remove_from_table (p, hash);
2090 break;
2091 }
2092 }
2093 }
2094 \f
2095 /* Given an expression X of type CONST,
2096 and ELT which is its table entry (or 0 if it
2097 is not in the hash table),
2098 return an alternate expression for X as a register plus integer.
2099 If none can be found, return 0. */
2100
2101 static rtx
2102 use_related_value (rtx x, struct table_elt *elt)
2103 {
2104 struct table_elt *relt = 0;
2105 struct table_elt *p, *q;
2106 HOST_WIDE_INT offset;
2107
2108 /* First, is there anything related known?
2109 If we have a table element, we can tell from that.
2110 Otherwise, must look it up. */
2111
2112 if (elt != 0 && elt->related_value != 0)
2113 relt = elt;
2114 else if (elt == 0 && GET_CODE (x) == CONST)
2115 {
2116 rtx subexp = get_related_value (x);
2117 if (subexp != 0)
2118 relt = lookup (subexp,
2119 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2120 GET_MODE (subexp));
2121 }
2122
2123 if (relt == 0)
2124 return 0;
2125
2126 /* Search all related table entries for one that has an
2127 equivalent register. */
2128
2129 p = relt;
2130 while (1)
2131 {
2132 /* This loop is strange in that it is executed in two different cases.
2133 The first is when X is already in the table. Then it is searching
2134 the RELATED_VALUE list of X's class (RELT). The second case is when
2135 X is not in the table. Then RELT points to a class for the related
2136 value.
2137
2138 Ensure that, whatever case we are in, that we ignore classes that have
2139 the same value as X. */
2140
2141 if (rtx_equal_p (x, p->exp))
2142 q = 0;
2143 else
2144 for (q = p->first_same_value; q; q = q->next_same_value)
2145 if (GET_CODE (q->exp) == REG)
2146 break;
2147
2148 if (q)
2149 break;
2150
2151 p = p->related_value;
2152
2153 /* We went all the way around, so there is nothing to be found.
2154 Alternatively, perhaps RELT was in the table for some other reason
2155 and it has no related values recorded. */
2156 if (p == relt || p == 0)
2157 break;
2158 }
2159
2160 if (q == 0)
2161 return 0;
2162
2163 offset = (get_integer_term (x) - get_integer_term (p->exp));
2164 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2165 return plus_constant (q->exp, offset);
2166 }
2167 \f
2168 /* Hash a string. Just add its bytes up. */
2169 static inline unsigned
2170 canon_hash_string (const char *ps)
2171 {
2172 unsigned hash = 0;
2173 const unsigned char *p = (const unsigned char *) ps;
2174
2175 if (p)
2176 while (*p)
2177 hash += *p++;
2178
2179 return hash;
2180 }
2181
2182 /* Hash an rtx. We are careful to make sure the value is never negative.
2183 Equivalent registers hash identically.
2184 MODE is used in hashing for CONST_INTs only;
2185 otherwise the mode of X is used.
2186
2187 Store 1 in do_not_record if any subexpression is volatile.
2188
2189 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2190 which does not have the RTX_UNCHANGING_P bit set.
2191
2192 Note that cse_insn knows that the hash code of a MEM expression
2193 is just (int) MEM plus the hash code of the address. */
2194
2195 static unsigned
2196 canon_hash (rtx x, enum machine_mode mode)
2197 {
2198 int i, j;
2199 unsigned hash = 0;
2200 enum rtx_code code;
2201 const char *fmt;
2202
2203 /* repeat is used to turn tail-recursion into iteration. */
2204 repeat:
2205 if (x == 0)
2206 return hash;
2207
2208 code = GET_CODE (x);
2209 switch (code)
2210 {
2211 case REG:
2212 {
2213 unsigned int regno = REGNO (x);
2214 bool record;
2215
2216 /* On some machines, we can't record any non-fixed hard register,
2217 because extending its life will cause reload problems. We
2218 consider ap, fp, sp, gp to be fixed for this purpose.
2219
2220 We also consider CCmode registers to be fixed for this purpose;
2221 failure to do so leads to failure to simplify 0<100 type of
2222 conditionals.
2223
2224 On all machines, we can't record any global registers.
2225 Nor should we record any register that is in a small
2226 class, as defined by CLASS_LIKELY_SPILLED_P. */
2227
2228 if (regno >= FIRST_PSEUDO_REGISTER)
2229 record = true;
2230 else if (x == frame_pointer_rtx
2231 || x == hard_frame_pointer_rtx
2232 || x == arg_pointer_rtx
2233 || x == stack_pointer_rtx
2234 || x == pic_offset_table_rtx)
2235 record = true;
2236 else if (global_regs[regno])
2237 record = false;
2238 else if (fixed_regs[regno])
2239 record = true;
2240 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2241 record = true;
2242 else if (SMALL_REGISTER_CLASSES)
2243 record = false;
2244 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2245 record = false;
2246 else
2247 record = true;
2248
2249 if (!record)
2250 {
2251 do_not_record = 1;
2252 return 0;
2253 }
2254
2255 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2256 return hash;
2257 }
2258
2259 /* We handle SUBREG of a REG specially because the underlying
2260 reg changes its hash value with every value change; we don't
2261 want to have to forget unrelated subregs when one subreg changes. */
2262 case SUBREG:
2263 {
2264 if (GET_CODE (SUBREG_REG (x)) == REG)
2265 {
2266 hash += (((unsigned) SUBREG << 7)
2267 + REGNO (SUBREG_REG (x))
2268 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2269 return hash;
2270 }
2271 break;
2272 }
2273
2274 case CONST_INT:
2275 {
2276 unsigned HOST_WIDE_INT tem = INTVAL (x);
2277 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2278 return hash;
2279 }
2280
2281 case CONST_DOUBLE:
2282 /* This is like the general case, except that it only counts
2283 the integers representing the constant. */
2284 hash += (unsigned) code + (unsigned) GET_MODE (x);
2285 if (GET_MODE (x) != VOIDmode)
2286 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2287 else
2288 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2289 + (unsigned) CONST_DOUBLE_HIGH (x));
2290 return hash;
2291
2292 case CONST_VECTOR:
2293 {
2294 int units;
2295 rtx elt;
2296
2297 units = CONST_VECTOR_NUNITS (x);
2298
2299 for (i = 0; i < units; ++i)
2300 {
2301 elt = CONST_VECTOR_ELT (x, i);
2302 hash += canon_hash (elt, GET_MODE (elt));
2303 }
2304
2305 return hash;
2306 }
2307
2308 /* Assume there is only one rtx object for any given label. */
2309 case LABEL_REF:
2310 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2311 return hash;
2312
2313 case SYMBOL_REF:
2314 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2315 return hash;
2316
2317 case MEM:
2318 /* We don't record if marked volatile or if BLKmode since we don't
2319 know the size of the move. */
2320 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2321 {
2322 do_not_record = 1;
2323 return 0;
2324 }
2325 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2326 hash_arg_in_memory = 1;
2327
2328 /* Now that we have already found this special case,
2329 might as well speed it up as much as possible. */
2330 hash += (unsigned) MEM;
2331 x = XEXP (x, 0);
2332 goto repeat;
2333
2334 case USE:
2335 /* A USE that mentions non-volatile memory needs special
2336 handling since the MEM may be BLKmode which normally
2337 prevents an entry from being made. Pure calls are
2338 marked by a USE which mentions BLKmode memory. */
2339 if (GET_CODE (XEXP (x, 0)) == MEM
2340 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2341 {
2342 hash += (unsigned) USE;
2343 x = XEXP (x, 0);
2344
2345 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2346 hash_arg_in_memory = 1;
2347
2348 /* Now that we have already found this special case,
2349 might as well speed it up as much as possible. */
2350 hash += (unsigned) MEM;
2351 x = XEXP (x, 0);
2352 goto repeat;
2353 }
2354 break;
2355
2356 case PRE_DEC:
2357 case PRE_INC:
2358 case POST_DEC:
2359 case POST_INC:
2360 case PRE_MODIFY:
2361 case POST_MODIFY:
2362 case PC:
2363 case CC0:
2364 case CALL:
2365 case UNSPEC_VOLATILE:
2366 do_not_record = 1;
2367 return 0;
2368
2369 case ASM_OPERANDS:
2370 if (MEM_VOLATILE_P (x))
2371 {
2372 do_not_record = 1;
2373 return 0;
2374 }
2375 else
2376 {
2377 /* We don't want to take the filename and line into account. */
2378 hash += (unsigned) code + (unsigned) GET_MODE (x)
2379 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2380 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2381 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2382
2383 if (ASM_OPERANDS_INPUT_LENGTH (x))
2384 {
2385 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2386 {
2387 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2388 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2389 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2390 (x, i)));
2391 }
2392
2393 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2394 x = ASM_OPERANDS_INPUT (x, 0);
2395 mode = GET_MODE (x);
2396 goto repeat;
2397 }
2398
2399 return hash;
2400 }
2401 break;
2402
2403 default:
2404 break;
2405 }
2406
2407 i = GET_RTX_LENGTH (code) - 1;
2408 hash += (unsigned) code + (unsigned) GET_MODE (x);
2409 fmt = GET_RTX_FORMAT (code);
2410 for (; i >= 0; i--)
2411 {
2412 if (fmt[i] == 'e')
2413 {
2414 rtx tem = XEXP (x, i);
2415
2416 /* If we are about to do the last recursive call
2417 needed at this level, change it into iteration.
2418 This function is called enough to be worth it. */
2419 if (i == 0)
2420 {
2421 x = tem;
2422 goto repeat;
2423 }
2424 hash += canon_hash (tem, 0);
2425 }
2426 else if (fmt[i] == 'E')
2427 for (j = 0; j < XVECLEN (x, i); j++)
2428 hash += canon_hash (XVECEXP (x, i, j), 0);
2429 else if (fmt[i] == 's')
2430 hash += canon_hash_string (XSTR (x, i));
2431 else if (fmt[i] == 'i')
2432 {
2433 unsigned tem = XINT (x, i);
2434 hash += tem;
2435 }
2436 else if (fmt[i] == '0' || fmt[i] == 't')
2437 /* Unused. */
2438 ;
2439 else
2440 abort ();
2441 }
2442 return hash;
2443 }
2444
2445 /* Like canon_hash but with no side effects. */
2446
2447 static unsigned
2448 safe_hash (rtx x, enum machine_mode mode)
2449 {
2450 int save_do_not_record = do_not_record;
2451 int save_hash_arg_in_memory = hash_arg_in_memory;
2452 unsigned hash = canon_hash (x, mode);
2453 hash_arg_in_memory = save_hash_arg_in_memory;
2454 do_not_record = save_do_not_record;
2455 return hash;
2456 }
2457 \f
2458 /* Return 1 iff X and Y would canonicalize into the same thing,
2459 without actually constructing the canonicalization of either one.
2460 If VALIDATE is nonzero,
2461 we assume X is an expression being processed from the rtl
2462 and Y was found in the hash table. We check register refs
2463 in Y for being marked as valid.
2464
2465 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2466 that is known to be in the register. Ordinarily, we don't allow them
2467 to match, because letting them match would cause unpredictable results
2468 in all the places that search a hash table chain for an equivalent
2469 for a given value. A possible equivalent that has different structure
2470 has its hash code computed from different data. Whether the hash code
2471 is the same as that of the given value is pure luck. */
2472
2473 static int
2474 exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2475 {
2476 int i, j;
2477 enum rtx_code code;
2478 const char *fmt;
2479
2480 /* Note: it is incorrect to assume an expression is equivalent to itself
2481 if VALIDATE is nonzero. */
2482 if (x == y && !validate)
2483 return 1;
2484 if (x == 0 || y == 0)
2485 return x == y;
2486
2487 code = GET_CODE (x);
2488 if (code != GET_CODE (y))
2489 {
2490 if (!equal_values)
2491 return 0;
2492
2493 /* If X is a constant and Y is a register or vice versa, they may be
2494 equivalent. We only have to validate if Y is a register. */
2495 if (CONSTANT_P (x) && GET_CODE (y) == REG
2496 && REGNO_QTY_VALID_P (REGNO (y)))
2497 {
2498 int y_q = REG_QTY (REGNO (y));
2499 struct qty_table_elem *y_ent = &qty_table[y_q];
2500
2501 if (GET_MODE (y) == y_ent->mode
2502 && rtx_equal_p (x, y_ent->const_rtx)
2503 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2504 return 1;
2505 }
2506
2507 if (CONSTANT_P (y) && code == REG
2508 && REGNO_QTY_VALID_P (REGNO (x)))
2509 {
2510 int x_q = REG_QTY (REGNO (x));
2511 struct qty_table_elem *x_ent = &qty_table[x_q];
2512
2513 if (GET_MODE (x) == x_ent->mode
2514 && rtx_equal_p (y, x_ent->const_rtx))
2515 return 1;
2516 }
2517
2518 return 0;
2519 }
2520
2521 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2522 if (GET_MODE (x) != GET_MODE (y))
2523 return 0;
2524
2525 switch (code)
2526 {
2527 case PC:
2528 case CC0:
2529 case CONST_INT:
2530 return x == y;
2531
2532 case LABEL_REF:
2533 return XEXP (x, 0) == XEXP (y, 0);
2534
2535 case SYMBOL_REF:
2536 return XSTR (x, 0) == XSTR (y, 0);
2537
2538 case REG:
2539 {
2540 unsigned int regno = REGNO (y);
2541 unsigned int endregno
2542 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2543 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2544 unsigned int i;
2545
2546 /* If the quantities are not the same, the expressions are not
2547 equivalent. If there are and we are not to validate, they
2548 are equivalent. Otherwise, ensure all regs are up-to-date. */
2549
2550 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2551 return 0;
2552
2553 if (! validate)
2554 return 1;
2555
2556 for (i = regno; i < endregno; i++)
2557 if (REG_IN_TABLE (i) != REG_TICK (i))
2558 return 0;
2559
2560 return 1;
2561 }
2562
2563 /* For commutative operations, check both orders. */
2564 case PLUS:
2565 case MULT:
2566 case AND:
2567 case IOR:
2568 case XOR:
2569 case NE:
2570 case EQ:
2571 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2572 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2573 validate, equal_values))
2574 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2575 validate, equal_values)
2576 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2577 validate, equal_values)));
2578
2579 case ASM_OPERANDS:
2580 /* We don't use the generic code below because we want to
2581 disregard filename and line numbers. */
2582
2583 /* A volatile asm isn't equivalent to any other. */
2584 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2585 return 0;
2586
2587 if (GET_MODE (x) != GET_MODE (y)
2588 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2589 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2590 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2591 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2592 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2593 return 0;
2594
2595 if (ASM_OPERANDS_INPUT_LENGTH (x))
2596 {
2597 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2598 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2599 ASM_OPERANDS_INPUT (y, i),
2600 validate, equal_values)
2601 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2602 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2603 return 0;
2604 }
2605
2606 return 1;
2607
2608 default:
2609 break;
2610 }
2611
2612 /* Compare the elements. If any pair of corresponding elements
2613 fail to match, return 0 for the whole things. */
2614
2615 fmt = GET_RTX_FORMAT (code);
2616 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2617 {
2618 switch (fmt[i])
2619 {
2620 case 'e':
2621 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2622 return 0;
2623 break;
2624
2625 case 'E':
2626 if (XVECLEN (x, i) != XVECLEN (y, i))
2627 return 0;
2628 for (j = 0; j < XVECLEN (x, i); j++)
2629 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2630 validate, equal_values))
2631 return 0;
2632 break;
2633
2634 case 's':
2635 if (strcmp (XSTR (x, i), XSTR (y, i)))
2636 return 0;
2637 break;
2638
2639 case 'i':
2640 if (XINT (x, i) != XINT (y, i))
2641 return 0;
2642 break;
2643
2644 case 'w':
2645 if (XWINT (x, i) != XWINT (y, i))
2646 return 0;
2647 break;
2648
2649 case '0':
2650 case 't':
2651 break;
2652
2653 default:
2654 abort ();
2655 }
2656 }
2657
2658 return 1;
2659 }
2660 \f
2661 /* Return 1 if X has a value that can vary even between two
2662 executions of the program. 0 means X can be compared reliably
2663 against certain constants or near-constants. */
2664
2665 static int
2666 cse_rtx_varies_p (rtx x, int from_alias)
2667 {
2668 /* We need not check for X and the equivalence class being of the same
2669 mode because if X is equivalent to a constant in some mode, it
2670 doesn't vary in any mode. */
2671
2672 if (GET_CODE (x) == REG
2673 && REGNO_QTY_VALID_P (REGNO (x)))
2674 {
2675 int x_q = REG_QTY (REGNO (x));
2676 struct qty_table_elem *x_ent = &qty_table[x_q];
2677
2678 if (GET_MODE (x) == x_ent->mode
2679 && x_ent->const_rtx != NULL_RTX)
2680 return 0;
2681 }
2682
2683 if (GET_CODE (x) == PLUS
2684 && GET_CODE (XEXP (x, 1)) == CONST_INT
2685 && GET_CODE (XEXP (x, 0)) == REG
2686 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2687 {
2688 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2689 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2690
2691 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2692 && x0_ent->const_rtx != NULL_RTX)
2693 return 0;
2694 }
2695
2696 /* This can happen as the result of virtual register instantiation, if
2697 the initial constant is too large to be a valid address. This gives
2698 us a three instruction sequence, load large offset into a register,
2699 load fp minus a constant into a register, then a MEM which is the
2700 sum of the two `constant' registers. */
2701 if (GET_CODE (x) == PLUS
2702 && GET_CODE (XEXP (x, 0)) == REG
2703 && GET_CODE (XEXP (x, 1)) == REG
2704 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2705 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2706 {
2707 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2708 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2709 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2710 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2711
2712 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2713 && x0_ent->const_rtx != NULL_RTX
2714 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2715 && x1_ent->const_rtx != NULL_RTX)
2716 return 0;
2717 }
2718
2719 return rtx_varies_p (x, from_alias);
2720 }
2721 \f
2722 /* Canonicalize an expression:
2723 replace each register reference inside it
2724 with the "oldest" equivalent register.
2725
2726 If INSN is nonzero and we are replacing a pseudo with a hard register
2727 or vice versa, validate_change is used to ensure that INSN remains valid
2728 after we make our substitution. The calls are made with IN_GROUP nonzero
2729 so apply_change_group must be called upon the outermost return from this
2730 function (unless INSN is zero). The result of apply_change_group can
2731 generally be discarded since the changes we are making are optional. */
2732
2733 static rtx
2734 canon_reg (rtx x, rtx insn)
2735 {
2736 int i;
2737 enum rtx_code code;
2738 const char *fmt;
2739
2740 if (x == 0)
2741 return x;
2742
2743 code = GET_CODE (x);
2744 switch (code)
2745 {
2746 case PC:
2747 case CC0:
2748 case CONST:
2749 case CONST_INT:
2750 case CONST_DOUBLE:
2751 case CONST_VECTOR:
2752 case SYMBOL_REF:
2753 case LABEL_REF:
2754 case ADDR_VEC:
2755 case ADDR_DIFF_VEC:
2756 return x;
2757
2758 case REG:
2759 {
2760 int first;
2761 int q;
2762 struct qty_table_elem *ent;
2763
2764 /* Never replace a hard reg, because hard regs can appear
2765 in more than one machine mode, and we must preserve the mode
2766 of each occurrence. Also, some hard regs appear in
2767 MEMs that are shared and mustn't be altered. Don't try to
2768 replace any reg that maps to a reg of class NO_REGS. */
2769 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2770 || ! REGNO_QTY_VALID_P (REGNO (x)))
2771 return x;
2772
2773 q = REG_QTY (REGNO (x));
2774 ent = &qty_table[q];
2775 first = ent->first_reg;
2776 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2777 : REGNO_REG_CLASS (first) == NO_REGS ? x
2778 : gen_rtx_REG (ent->mode, first));
2779 }
2780
2781 default:
2782 break;
2783 }
2784
2785 fmt = GET_RTX_FORMAT (code);
2786 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2787 {
2788 int j;
2789
2790 if (fmt[i] == 'e')
2791 {
2792 rtx new = canon_reg (XEXP (x, i), insn);
2793 int insn_code;
2794
2795 /* If replacing pseudo with hard reg or vice versa, ensure the
2796 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2797 if (insn != 0 && new != 0
2798 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2799 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2800 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2801 || (insn_code = recog_memoized (insn)) < 0
2802 || insn_data[insn_code].n_dups > 0))
2803 validate_change (insn, &XEXP (x, i), new, 1);
2804 else
2805 XEXP (x, i) = new;
2806 }
2807 else if (fmt[i] == 'E')
2808 for (j = 0; j < XVECLEN (x, i); j++)
2809 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2810 }
2811
2812 return x;
2813 }
2814 \f
2815 /* LOC is a location within INSN that is an operand address (the contents of
2816 a MEM). Find the best equivalent address to use that is valid for this
2817 insn.
2818
2819 On most CISC machines, complicated address modes are costly, and rtx_cost
2820 is a good approximation for that cost. However, most RISC machines have
2821 only a few (usually only one) memory reference formats. If an address is
2822 valid at all, it is often just as cheap as any other address. Hence, for
2823 RISC machines, we use `address_cost' to compare the costs of various
2824 addresses. For two addresses of equal cost, choose the one with the
2825 highest `rtx_cost' value as that has the potential of eliminating the
2826 most insns. For equal costs, we choose the first in the equivalence
2827 class. Note that we ignore the fact that pseudo registers are cheaper than
2828 hard registers here because we would also prefer the pseudo registers. */
2829
2830 static void
2831 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2832 {
2833 struct table_elt *elt;
2834 rtx addr = *loc;
2835 struct table_elt *p;
2836 int found_better = 1;
2837 int save_do_not_record = do_not_record;
2838 int save_hash_arg_in_memory = hash_arg_in_memory;
2839 int addr_volatile;
2840 int regno;
2841 unsigned hash;
2842
2843 /* Do not try to replace constant addresses or addresses of local and
2844 argument slots. These MEM expressions are made only once and inserted
2845 in many instructions, as well as being used to control symbol table
2846 output. It is not safe to clobber them.
2847
2848 There are some uncommon cases where the address is already in a register
2849 for some reason, but we cannot take advantage of that because we have
2850 no easy way to unshare the MEM. In addition, looking up all stack
2851 addresses is costly. */
2852 if ((GET_CODE (addr) == PLUS
2853 && GET_CODE (XEXP (addr, 0)) == REG
2854 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2855 && (regno = REGNO (XEXP (addr, 0)),
2856 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2857 || regno == ARG_POINTER_REGNUM))
2858 || (GET_CODE (addr) == REG
2859 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2860 || regno == HARD_FRAME_POINTER_REGNUM
2861 || regno == ARG_POINTER_REGNUM))
2862 || GET_CODE (addr) == ADDRESSOF
2863 || CONSTANT_ADDRESS_P (addr))
2864 return;
2865
2866 /* If this address is not simply a register, try to fold it. This will
2867 sometimes simplify the expression. Many simplifications
2868 will not be valid, but some, usually applying the associative rule, will
2869 be valid and produce better code. */
2870 if (GET_CODE (addr) != REG)
2871 {
2872 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2873 int addr_folded_cost = address_cost (folded, mode);
2874 int addr_cost = address_cost (addr, mode);
2875
2876 if ((addr_folded_cost < addr_cost
2877 || (addr_folded_cost == addr_cost
2878 /* ??? The rtx_cost comparison is left over from an older
2879 version of this code. It is probably no longer helpful. */
2880 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2881 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2882 && validate_change (insn, loc, folded, 0))
2883 addr = folded;
2884 }
2885
2886 /* If this address is not in the hash table, we can't look for equivalences
2887 of the whole address. Also, ignore if volatile. */
2888
2889 do_not_record = 0;
2890 hash = HASH (addr, Pmode);
2891 addr_volatile = do_not_record;
2892 do_not_record = save_do_not_record;
2893 hash_arg_in_memory = save_hash_arg_in_memory;
2894
2895 if (addr_volatile)
2896 return;
2897
2898 elt = lookup (addr, hash, Pmode);
2899
2900 if (elt)
2901 {
2902 /* We need to find the best (under the criteria documented above) entry
2903 in the class that is valid. We use the `flag' field to indicate
2904 choices that were invalid and iterate until we can't find a better
2905 one that hasn't already been tried. */
2906
2907 for (p = elt->first_same_value; p; p = p->next_same_value)
2908 p->flag = 0;
2909
2910 while (found_better)
2911 {
2912 int best_addr_cost = address_cost (*loc, mode);
2913 int best_rtx_cost = (elt->cost + 1) >> 1;
2914 int exp_cost;
2915 struct table_elt *best_elt = elt;
2916
2917 found_better = 0;
2918 for (p = elt->first_same_value; p; p = p->next_same_value)
2919 if (! p->flag)
2920 {
2921 if ((GET_CODE (p->exp) == REG
2922 || exp_equiv_p (p->exp, p->exp, 1, 0))
2923 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2924 || (exp_cost == best_addr_cost
2925 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2926 {
2927 found_better = 1;
2928 best_addr_cost = exp_cost;
2929 best_rtx_cost = (p->cost + 1) >> 1;
2930 best_elt = p;
2931 }
2932 }
2933
2934 if (found_better)
2935 {
2936 if (validate_change (insn, loc,
2937 canon_reg (copy_rtx (best_elt->exp),
2938 NULL_RTX), 0))
2939 return;
2940 else
2941 best_elt->flag = 1;
2942 }
2943 }
2944 }
2945
2946 /* If the address is a binary operation with the first operand a register
2947 and the second a constant, do the same as above, but looking for
2948 equivalences of the register. Then try to simplify before checking for
2949 the best address to use. This catches a few cases: First is when we
2950 have REG+const and the register is another REG+const. We can often merge
2951 the constants and eliminate one insn and one register. It may also be
2952 that a machine has a cheap REG+REG+const. Finally, this improves the
2953 code on the Alpha for unaligned byte stores. */
2954
2955 if (flag_expensive_optimizations
2956 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2957 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2958 && GET_CODE (XEXP (*loc, 0)) == REG)
2959 {
2960 rtx op1 = XEXP (*loc, 1);
2961
2962 do_not_record = 0;
2963 hash = HASH (XEXP (*loc, 0), Pmode);
2964 do_not_record = save_do_not_record;
2965 hash_arg_in_memory = save_hash_arg_in_memory;
2966
2967 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2968 if (elt == 0)
2969 return;
2970
2971 /* We need to find the best (under the criteria documented above) entry
2972 in the class that is valid. We use the `flag' field to indicate
2973 choices that were invalid and iterate until we can't find a better
2974 one that hasn't already been tried. */
2975
2976 for (p = elt->first_same_value; p; p = p->next_same_value)
2977 p->flag = 0;
2978
2979 while (found_better)
2980 {
2981 int best_addr_cost = address_cost (*loc, mode);
2982 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2983 struct table_elt *best_elt = elt;
2984 rtx best_rtx = *loc;
2985 int count;
2986
2987 /* This is at worst case an O(n^2) algorithm, so limit our search
2988 to the first 32 elements on the list. This avoids trouble
2989 compiling code with very long basic blocks that can easily
2990 call simplify_gen_binary so many times that we run out of
2991 memory. */
2992
2993 found_better = 0;
2994 for (p = elt->first_same_value, count = 0;
2995 p && count < 32;
2996 p = p->next_same_value, count++)
2997 if (! p->flag
2998 && (GET_CODE (p->exp) == REG
2999 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3000 {
3001 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3002 p->exp, op1);
3003 int new_cost;
3004 new_cost = address_cost (new, mode);
3005
3006 if (new_cost < best_addr_cost
3007 || (new_cost == best_addr_cost
3008 && (COST (new) + 1) >> 1 > best_rtx_cost))
3009 {
3010 found_better = 1;
3011 best_addr_cost = new_cost;
3012 best_rtx_cost = (COST (new) + 1) >> 1;
3013 best_elt = p;
3014 best_rtx = new;
3015 }
3016 }
3017
3018 if (found_better)
3019 {
3020 if (validate_change (insn, loc,
3021 canon_reg (copy_rtx (best_rtx),
3022 NULL_RTX), 0))
3023 return;
3024 else
3025 best_elt->flag = 1;
3026 }
3027 }
3028 }
3029 }
3030 \f
3031 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3032 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3033 what values are being compared.
3034
3035 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3036 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3037 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3038 compared to produce cc0.
3039
3040 The return value is the comparison operator and is either the code of
3041 A or the code corresponding to the inverse of the comparison. */
3042
3043 static enum rtx_code
3044 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3045 enum machine_mode *pmode1, enum machine_mode *pmode2)
3046 {
3047 rtx arg1, arg2;
3048
3049 arg1 = *parg1, arg2 = *parg2;
3050
3051 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3052
3053 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3054 {
3055 /* Set nonzero when we find something of interest. */
3056 rtx x = 0;
3057 int reverse_code = 0;
3058 struct table_elt *p = 0;
3059
3060 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3061 On machines with CC0, this is the only case that can occur, since
3062 fold_rtx will return the COMPARE or item being compared with zero
3063 when given CC0. */
3064
3065 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3066 x = arg1;
3067
3068 /* If ARG1 is a comparison operator and CODE is testing for
3069 STORE_FLAG_VALUE, get the inner arguments. */
3070
3071 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3072 {
3073 #ifdef FLOAT_STORE_FLAG_VALUE
3074 REAL_VALUE_TYPE fsfv;
3075 #endif
3076
3077 if (code == NE
3078 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3079 && code == LT && STORE_FLAG_VALUE == -1)
3080 #ifdef FLOAT_STORE_FLAG_VALUE
3081 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3082 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3083 REAL_VALUE_NEGATIVE (fsfv)))
3084 #endif
3085 )
3086 x = arg1;
3087 else if (code == EQ
3088 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3089 && code == GE && STORE_FLAG_VALUE == -1)
3090 #ifdef FLOAT_STORE_FLAG_VALUE
3091 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3092 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3093 REAL_VALUE_NEGATIVE (fsfv)))
3094 #endif
3095 )
3096 x = arg1, reverse_code = 1;
3097 }
3098
3099 /* ??? We could also check for
3100
3101 (ne (and (eq (...) (const_int 1))) (const_int 0))
3102
3103 and related forms, but let's wait until we see them occurring. */
3104
3105 if (x == 0)
3106 /* Look up ARG1 in the hash table and see if it has an equivalence
3107 that lets us see what is being compared. */
3108 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3109 GET_MODE (arg1));
3110 if (p)
3111 {
3112 p = p->first_same_value;
3113
3114 /* If what we compare is already known to be constant, that is as
3115 good as it gets.
3116 We need to break the loop in this case, because otherwise we
3117 can have an infinite loop when looking at a reg that is known
3118 to be a constant which is the same as a comparison of a reg
3119 against zero which appears later in the insn stream, which in
3120 turn is constant and the same as the comparison of the first reg
3121 against zero... */
3122 if (p->is_const)
3123 break;
3124 }
3125
3126 for (; p; p = p->next_same_value)
3127 {
3128 enum machine_mode inner_mode = GET_MODE (p->exp);
3129 #ifdef FLOAT_STORE_FLAG_VALUE
3130 REAL_VALUE_TYPE fsfv;
3131 #endif
3132
3133 /* If the entry isn't valid, skip it. */
3134 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3135 continue;
3136
3137 if (GET_CODE (p->exp) == COMPARE
3138 /* Another possibility is that this machine has a compare insn
3139 that includes the comparison code. In that case, ARG1 would
3140 be equivalent to a comparison operation that would set ARG1 to
3141 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3142 ORIG_CODE is the actual comparison being done; if it is an EQ,
3143 we must reverse ORIG_CODE. On machine with a negative value
3144 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3145 || ((code == NE
3146 || (code == LT
3147 && GET_MODE_CLASS (inner_mode) == MODE_INT
3148 && (GET_MODE_BITSIZE (inner_mode)
3149 <= HOST_BITS_PER_WIDE_INT)
3150 && (STORE_FLAG_VALUE
3151 & ((HOST_WIDE_INT) 1
3152 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3153 #ifdef FLOAT_STORE_FLAG_VALUE
3154 || (code == LT
3155 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3156 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3157 REAL_VALUE_NEGATIVE (fsfv)))
3158 #endif
3159 )
3160 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3161 {
3162 x = p->exp;
3163 break;
3164 }
3165 else if ((code == EQ
3166 || (code == GE
3167 && GET_MODE_CLASS (inner_mode) == MODE_INT
3168 && (GET_MODE_BITSIZE (inner_mode)
3169 <= HOST_BITS_PER_WIDE_INT)
3170 && (STORE_FLAG_VALUE
3171 & ((HOST_WIDE_INT) 1
3172 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3173 #ifdef FLOAT_STORE_FLAG_VALUE
3174 || (code == GE
3175 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3176 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3177 REAL_VALUE_NEGATIVE (fsfv)))
3178 #endif
3179 )
3180 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3181 {
3182 reverse_code = 1;
3183 x = p->exp;
3184 break;
3185 }
3186
3187 /* If this non-trapping address, e.g. fp + constant, the
3188 equivalent is a better operand since it may let us predict
3189 the value of the comparison. */
3190 else if (!rtx_addr_can_trap_p (p->exp))
3191 {
3192 arg1 = p->exp;
3193 continue;
3194 }
3195 }
3196
3197 /* If we didn't find a useful equivalence for ARG1, we are done.
3198 Otherwise, set up for the next iteration. */
3199 if (x == 0)
3200 break;
3201
3202 /* If we need to reverse the comparison, make sure that that is
3203 possible -- we can't necessarily infer the value of GE from LT
3204 with floating-point operands. */
3205 if (reverse_code)
3206 {
3207 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3208 if (reversed == UNKNOWN)
3209 break;
3210 else
3211 code = reversed;
3212 }
3213 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3214 code = GET_CODE (x);
3215 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3216 }
3217
3218 /* Return our results. Return the modes from before fold_rtx
3219 because fold_rtx might produce const_int, and then it's too late. */
3220 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3221 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3222
3223 return code;
3224 }
3225 \f
3226 /* If X is a nontrivial arithmetic operation on an argument
3227 for which a constant value can be determined, return
3228 the result of operating on that value, as a constant.
3229 Otherwise, return X, possibly with one or more operands
3230 modified by recursive calls to this function.
3231
3232 If X is a register whose contents are known, we do NOT
3233 return those contents here. equiv_constant is called to
3234 perform that task.
3235
3236 INSN is the insn that we may be modifying. If it is 0, make a copy
3237 of X before modifying it. */
3238
3239 static rtx
3240 fold_rtx (rtx x, rtx insn)
3241 {
3242 enum rtx_code code;
3243 enum machine_mode mode;
3244 const char *fmt;
3245 int i;
3246 rtx new = 0;
3247 int copied = 0;
3248 int must_swap = 0;
3249
3250 /* Folded equivalents of first two operands of X. */
3251 rtx folded_arg0;
3252 rtx folded_arg1;
3253
3254 /* Constant equivalents of first three operands of X;
3255 0 when no such equivalent is known. */
3256 rtx const_arg0;
3257 rtx const_arg1;
3258 rtx const_arg2;
3259
3260 /* The mode of the first operand of X. We need this for sign and zero
3261 extends. */
3262 enum machine_mode mode_arg0;
3263
3264 if (x == 0)
3265 return x;
3266
3267 mode = GET_MODE (x);
3268 code = GET_CODE (x);
3269 switch (code)
3270 {
3271 case CONST:
3272 case CONST_INT:
3273 case CONST_DOUBLE:
3274 case CONST_VECTOR:
3275 case SYMBOL_REF:
3276 case LABEL_REF:
3277 case REG:
3278 /* No use simplifying an EXPR_LIST
3279 since they are used only for lists of args
3280 in a function call's REG_EQUAL note. */
3281 case EXPR_LIST:
3282 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3283 want to (e.g.,) make (addressof (const_int 0)) just because
3284 the location is known to be zero. */
3285 case ADDRESSOF:
3286 return x;
3287
3288 #ifdef HAVE_cc0
3289 case CC0:
3290 return prev_insn_cc0;
3291 #endif
3292
3293 case PC:
3294 /* If the next insn is a CODE_LABEL followed by a jump table,
3295 PC's value is a LABEL_REF pointing to that label. That
3296 lets us fold switch statements on the VAX. */
3297 {
3298 rtx next;
3299 if (insn && tablejump_p (insn, &next, NULL))
3300 return gen_rtx_LABEL_REF (Pmode, next);
3301 }
3302 break;
3303
3304 case SUBREG:
3305 /* See if we previously assigned a constant value to this SUBREG. */
3306 if ((new = lookup_as_function (x, CONST_INT)) != 0
3307 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3308 return new;
3309
3310 /* If this is a paradoxical SUBREG, we have no idea what value the
3311 extra bits would have. However, if the operand is equivalent
3312 to a SUBREG whose operand is the same as our mode, and all the
3313 modes are within a word, we can just use the inner operand
3314 because these SUBREGs just say how to treat the register.
3315
3316 Similarly if we find an integer constant. */
3317
3318 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3319 {
3320 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3321 struct table_elt *elt;
3322
3323 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3324 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3325 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3326 imode)) != 0)
3327 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3328 {
3329 if (CONSTANT_P (elt->exp)
3330 && GET_MODE (elt->exp) == VOIDmode)
3331 return elt->exp;
3332
3333 if (GET_CODE (elt->exp) == SUBREG
3334 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3335 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3336 return copy_rtx (SUBREG_REG (elt->exp));
3337 }
3338
3339 return x;
3340 }
3341
3342 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3343 We might be able to if the SUBREG is extracting a single word in an
3344 integral mode or extracting the low part. */
3345
3346 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3347 const_arg0 = equiv_constant (folded_arg0);
3348 if (const_arg0)
3349 folded_arg0 = const_arg0;
3350
3351 if (folded_arg0 != SUBREG_REG (x))
3352 {
3353 new = simplify_subreg (mode, folded_arg0,
3354 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3355 if (new)
3356 return new;
3357 }
3358
3359 if (GET_CODE (folded_arg0) == REG
3360 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3361 {
3362 struct table_elt *elt;
3363
3364 /* We can use HASH here since we know that canon_hash won't be
3365 called. */
3366 elt = lookup (folded_arg0,
3367 HASH (folded_arg0, GET_MODE (folded_arg0)),
3368 GET_MODE (folded_arg0));
3369
3370 if (elt)
3371 elt = elt->first_same_value;
3372
3373 if (subreg_lowpart_p (x))
3374 /* If this is a narrowing SUBREG and our operand is a REG, see
3375 if we can find an equivalence for REG that is an arithmetic
3376 operation in a wider mode where both operands are paradoxical
3377 SUBREGs from objects of our result mode. In that case, we
3378 couldn-t report an equivalent value for that operation, since we
3379 don't know what the extra bits will be. But we can find an
3380 equivalence for this SUBREG by folding that operation in the
3381 narrow mode. This allows us to fold arithmetic in narrow modes
3382 when the machine only supports word-sized arithmetic.
3383
3384 Also look for a case where we have a SUBREG whose operand
3385 is the same as our result. If both modes are smaller
3386 than a word, we are simply interpreting a register in
3387 different modes and we can use the inner value. */
3388
3389 for (; elt; elt = elt->next_same_value)
3390 {
3391 enum rtx_code eltcode = GET_CODE (elt->exp);
3392
3393 /* Just check for unary and binary operations. */
3394 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3395 && GET_CODE (elt->exp) != SIGN_EXTEND
3396 && GET_CODE (elt->exp) != ZERO_EXTEND
3397 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3398 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3399 && (GET_MODE_CLASS (mode)
3400 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3401 {
3402 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3403
3404 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3405 op0 = fold_rtx (op0, NULL_RTX);
3406
3407 op0 = equiv_constant (op0);
3408 if (op0)
3409 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3410 op0, mode);
3411 }
3412 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3413 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3414 && eltcode != DIV && eltcode != MOD
3415 && eltcode != UDIV && eltcode != UMOD
3416 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3417 && eltcode != ROTATE && eltcode != ROTATERT
3418 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3419 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3420 == mode))
3421 || CONSTANT_P (XEXP (elt->exp, 0)))
3422 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3423 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3424 == mode))
3425 || CONSTANT_P (XEXP (elt->exp, 1))))
3426 {
3427 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3428 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3429
3430 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3431 op0 = fold_rtx (op0, NULL_RTX);
3432
3433 if (op0)
3434 op0 = equiv_constant (op0);
3435
3436 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3437 op1 = fold_rtx (op1, NULL_RTX);
3438
3439 if (op1)
3440 op1 = equiv_constant (op1);
3441
3442 /* If we are looking for the low SImode part of
3443 (ashift:DI c (const_int 32)), it doesn't work
3444 to compute that in SImode, because a 32-bit shift
3445 in SImode is unpredictable. We know the value is 0. */
3446 if (op0 && op1
3447 && GET_CODE (elt->exp) == ASHIFT
3448 && GET_CODE (op1) == CONST_INT
3449 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3450 {
3451 if (INTVAL (op1)
3452 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3453 /* If the count fits in the inner mode's width,
3454 but exceeds the outer mode's width,
3455 the value will get truncated to 0
3456 by the subreg. */
3457 new = CONST0_RTX (mode);
3458 else
3459 /* If the count exceeds even the inner mode's width,
3460 don't fold this expression. */
3461 new = 0;
3462 }
3463 else if (op0 && op1)
3464 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3465 }
3466
3467 else if (GET_CODE (elt->exp) == SUBREG
3468 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3469 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3470 <= UNITS_PER_WORD)
3471 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3472 new = copy_rtx (SUBREG_REG (elt->exp));
3473
3474 if (new)
3475 return new;
3476 }
3477 else
3478 /* A SUBREG resulting from a zero extension may fold to zero if
3479 it extracts higher bits than the ZERO_EXTEND's source bits.
3480 FIXME: if combine tried to, er, combine these instructions,
3481 this transformation may be moved to simplify_subreg. */
3482 for (; elt; elt = elt->next_same_value)
3483 {
3484 if (GET_CODE (elt->exp) == ZERO_EXTEND
3485 && subreg_lsb (x)
3486 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3487 return CONST0_RTX (mode);
3488 }
3489 }
3490
3491 return x;
3492
3493 case NOT:
3494 case NEG:
3495 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3496 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3497 new = lookup_as_function (XEXP (x, 0), code);
3498 if (new)
3499 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3500 break;
3501
3502 case MEM:
3503 /* If we are not actually processing an insn, don't try to find the
3504 best address. Not only don't we care, but we could modify the
3505 MEM in an invalid way since we have no insn to validate against. */
3506 if (insn != 0)
3507 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3508
3509 {
3510 /* Even if we don't fold in the insn itself,
3511 we can safely do so here, in hopes of getting a constant. */
3512 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3513 rtx base = 0;
3514 HOST_WIDE_INT offset = 0;
3515
3516 if (GET_CODE (addr) == REG
3517 && REGNO_QTY_VALID_P (REGNO (addr)))
3518 {
3519 int addr_q = REG_QTY (REGNO (addr));
3520 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3521
3522 if (GET_MODE (addr) == addr_ent->mode
3523 && addr_ent->const_rtx != NULL_RTX)
3524 addr = addr_ent->const_rtx;
3525 }
3526
3527 /* If address is constant, split it into a base and integer offset. */
3528 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3529 base = addr;
3530 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3531 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3532 {
3533 base = XEXP (XEXP (addr, 0), 0);
3534 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3535 }
3536 else if (GET_CODE (addr) == LO_SUM
3537 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3538 base = XEXP (addr, 1);
3539 else if (GET_CODE (addr) == ADDRESSOF)
3540 return change_address (x, VOIDmode, addr);
3541
3542 /* If this is a constant pool reference, we can fold it into its
3543 constant to allow better value tracking. */
3544 if (base && GET_CODE (base) == SYMBOL_REF
3545 && CONSTANT_POOL_ADDRESS_P (base))
3546 {
3547 rtx constant = get_pool_constant (base);
3548 enum machine_mode const_mode = get_pool_mode (base);
3549 rtx new;
3550
3551 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3552 {
3553 constant_pool_entries_cost = COST (constant);
3554 constant_pool_entries_regcost = approx_reg_cost (constant);
3555 }
3556
3557 /* If we are loading the full constant, we have an equivalence. */
3558 if (offset == 0 && mode == const_mode)
3559 return constant;
3560
3561 /* If this actually isn't a constant (weird!), we can't do
3562 anything. Otherwise, handle the two most common cases:
3563 extracting a word from a multi-word constant, and extracting
3564 the low-order bits. Other cases don't seem common enough to
3565 worry about. */
3566 if (! CONSTANT_P (constant))
3567 return x;
3568
3569 if (GET_MODE_CLASS (mode) == MODE_INT
3570 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3571 && offset % UNITS_PER_WORD == 0
3572 && (new = operand_subword (constant,
3573 offset / UNITS_PER_WORD,
3574 0, const_mode)) != 0)
3575 return new;
3576
3577 if (((BYTES_BIG_ENDIAN
3578 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3579 || (! BYTES_BIG_ENDIAN && offset == 0))
3580 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3581 return new;
3582 }
3583
3584 /* If this is a reference to a label at a known position in a jump
3585 table, we also know its value. */
3586 if (base && GET_CODE (base) == LABEL_REF)
3587 {
3588 rtx label = XEXP (base, 0);
3589 rtx table_insn = NEXT_INSN (label);
3590
3591 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3592 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3593 {
3594 rtx table = PATTERN (table_insn);
3595
3596 if (offset >= 0
3597 && (offset / GET_MODE_SIZE (GET_MODE (table))
3598 < XVECLEN (table, 0)))
3599 return XVECEXP (table, 0,
3600 offset / GET_MODE_SIZE (GET_MODE (table)));
3601 }
3602 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3603 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3604 {
3605 rtx table = PATTERN (table_insn);
3606
3607 if (offset >= 0
3608 && (offset / GET_MODE_SIZE (GET_MODE (table))
3609 < XVECLEN (table, 1)))
3610 {
3611 offset /= GET_MODE_SIZE (GET_MODE (table));
3612 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3613 XEXP (table, 0));
3614
3615 if (GET_MODE (table) != Pmode)
3616 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3617
3618 /* Indicate this is a constant. This isn't a
3619 valid form of CONST, but it will only be used
3620 to fold the next insns and then discarded, so
3621 it should be safe.
3622
3623 Note this expression must be explicitly discarded,
3624 by cse_insn, else it may end up in a REG_EQUAL note
3625 and "escape" to cause problems elsewhere. */
3626 return gen_rtx_CONST (GET_MODE (new), new);
3627 }
3628 }
3629 }
3630
3631 return x;
3632 }
3633
3634 #ifdef NO_FUNCTION_CSE
3635 case CALL:
3636 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3637 return x;
3638 break;
3639 #endif
3640
3641 case ASM_OPERANDS:
3642 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3643 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3644 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3645 break;
3646
3647 default:
3648 break;
3649 }
3650
3651 const_arg0 = 0;
3652 const_arg1 = 0;
3653 const_arg2 = 0;
3654 mode_arg0 = VOIDmode;
3655
3656 /* Try folding our operands.
3657 Then see which ones have constant values known. */
3658
3659 fmt = GET_RTX_FORMAT (code);
3660 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3661 if (fmt[i] == 'e')
3662 {
3663 rtx arg = XEXP (x, i);
3664 rtx folded_arg = arg, const_arg = 0;
3665 enum machine_mode mode_arg = GET_MODE (arg);
3666 rtx cheap_arg, expensive_arg;
3667 rtx replacements[2];
3668 int j;
3669 int old_cost = COST_IN (XEXP (x, i), code);
3670
3671 /* Most arguments are cheap, so handle them specially. */
3672 switch (GET_CODE (arg))
3673 {
3674 case REG:
3675 /* This is the same as calling equiv_constant; it is duplicated
3676 here for speed. */
3677 if (REGNO_QTY_VALID_P (REGNO (arg)))
3678 {
3679 int arg_q = REG_QTY (REGNO (arg));
3680 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3681
3682 if (arg_ent->const_rtx != NULL_RTX
3683 && GET_CODE (arg_ent->const_rtx) != REG
3684 && GET_CODE (arg_ent->const_rtx) != PLUS)
3685 const_arg
3686 = gen_lowpart_if_possible (GET_MODE (arg),
3687 arg_ent->const_rtx);
3688 }
3689 break;
3690
3691 case CONST:
3692 case CONST_INT:
3693 case SYMBOL_REF:
3694 case LABEL_REF:
3695 case CONST_DOUBLE:
3696 case CONST_VECTOR:
3697 const_arg = arg;
3698 break;
3699
3700 #ifdef HAVE_cc0
3701 case CC0:
3702 folded_arg = prev_insn_cc0;
3703 mode_arg = prev_insn_cc0_mode;
3704 const_arg = equiv_constant (folded_arg);
3705 break;
3706 #endif
3707
3708 default:
3709 folded_arg = fold_rtx (arg, insn);
3710 const_arg = equiv_constant (folded_arg);
3711 }
3712
3713 /* For the first three operands, see if the operand
3714 is constant or equivalent to a constant. */
3715 switch (i)
3716 {
3717 case 0:
3718 folded_arg0 = folded_arg;
3719 const_arg0 = const_arg;
3720 mode_arg0 = mode_arg;
3721 break;
3722 case 1:
3723 folded_arg1 = folded_arg;
3724 const_arg1 = const_arg;
3725 break;
3726 case 2:
3727 const_arg2 = const_arg;
3728 break;
3729 }
3730
3731 /* Pick the least expensive of the folded argument and an
3732 equivalent constant argument. */
3733 if (const_arg == 0 || const_arg == folded_arg
3734 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3735 cheap_arg = folded_arg, expensive_arg = const_arg;
3736 else
3737 cheap_arg = const_arg, expensive_arg = folded_arg;
3738
3739 /* Try to replace the operand with the cheapest of the two
3740 possibilities. If it doesn't work and this is either of the first
3741 two operands of a commutative operation, try swapping them.
3742 If THAT fails, try the more expensive, provided it is cheaper
3743 than what is already there. */
3744
3745 if (cheap_arg == XEXP (x, i))
3746 continue;
3747
3748 if (insn == 0 && ! copied)
3749 {
3750 x = copy_rtx (x);
3751 copied = 1;
3752 }
3753
3754 /* Order the replacements from cheapest to most expensive. */
3755 replacements[0] = cheap_arg;
3756 replacements[1] = expensive_arg;
3757
3758 for (j = 0; j < 2 && replacements[j]; j++)
3759 {
3760 int new_cost = COST_IN (replacements[j], code);
3761
3762 /* Stop if what existed before was cheaper. Prefer constants
3763 in the case of a tie. */
3764 if (new_cost > old_cost
3765 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3766 break;
3767
3768 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3769 break;
3770
3771 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3772 || code == LTGT || code == UNEQ || code == ORDERED
3773 || code == UNORDERED)
3774 {
3775 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3776 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3777
3778 if (apply_change_group ())
3779 {
3780 /* Swap them back to be invalid so that this loop can
3781 continue and flag them to be swapped back later. */
3782 rtx tem;
3783
3784 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3785 XEXP (x, 1) = tem;
3786 must_swap = 1;
3787 break;
3788 }
3789 }
3790 }
3791 }
3792
3793 else
3794 {
3795 if (fmt[i] == 'E')
3796 /* Don't try to fold inside of a vector of expressions.
3797 Doing nothing is harmless. */
3798 {;}
3799 }
3800
3801 /* If a commutative operation, place a constant integer as the second
3802 operand unless the first operand is also a constant integer. Otherwise,
3803 place any constant second unless the first operand is also a constant. */
3804
3805 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3806 || code == LTGT || code == UNEQ || code == ORDERED
3807 || code == UNORDERED)
3808 {
3809 if (must_swap
3810 || swap_commutative_operands_p (const_arg0 ? const_arg0
3811 : XEXP (x, 0),
3812 const_arg1 ? const_arg1
3813 : XEXP (x, 1)))
3814 {
3815 rtx tem = XEXP (x, 0);
3816
3817 if (insn == 0 && ! copied)
3818 {
3819 x = copy_rtx (x);
3820 copied = 1;
3821 }
3822
3823 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3824 validate_change (insn, &XEXP (x, 1), tem, 1);
3825 if (apply_change_group ())
3826 {
3827 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3828 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3829 }
3830 }
3831 }
3832
3833 /* If X is an arithmetic operation, see if we can simplify it. */
3834
3835 switch (GET_RTX_CLASS (code))
3836 {
3837 case '1':
3838 {
3839 int is_const = 0;
3840
3841 /* We can't simplify extension ops unless we know the
3842 original mode. */
3843 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3844 && mode_arg0 == VOIDmode)
3845 break;
3846
3847 /* If we had a CONST, strip it off and put it back later if we
3848 fold. */
3849 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3850 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3851
3852 new = simplify_unary_operation (code, mode,
3853 const_arg0 ? const_arg0 : folded_arg0,
3854 mode_arg0);
3855 if (new != 0 && is_const)
3856 new = gen_rtx_CONST (mode, new);
3857 }
3858 break;
3859
3860 case '<':
3861 /* See what items are actually being compared and set FOLDED_ARG[01]
3862 to those values and CODE to the actual comparison code. If any are
3863 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3864 do anything if both operands are already known to be constant. */
3865
3866 if (const_arg0 == 0 || const_arg1 == 0)
3867 {
3868 struct table_elt *p0, *p1;
3869 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3870 enum machine_mode mode_arg1;
3871
3872 #ifdef FLOAT_STORE_FLAG_VALUE
3873 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3874 {
3875 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3876 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3877 false_rtx = CONST0_RTX (mode);
3878 }
3879 #endif
3880
3881 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3882 &mode_arg0, &mode_arg1);
3883 const_arg0 = equiv_constant (folded_arg0);
3884 const_arg1 = equiv_constant (folded_arg1);
3885
3886 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3887 what kinds of things are being compared, so we can't do
3888 anything with this comparison. */
3889
3890 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3891 break;
3892
3893 /* If we do not now have two constants being compared, see
3894 if we can nevertheless deduce some things about the
3895 comparison. */
3896 if (const_arg0 == 0 || const_arg1 == 0)
3897 {
3898 /* Some addresses are known to be nonzero. We don't know
3899 their sign, but equality comparisons are known. */
3900 if (const_arg1 == const0_rtx
3901 && nonzero_address_p (folded_arg0))
3902 {
3903 if (code == EQ)
3904 return false_rtx;
3905 else if (code == NE)
3906 return true_rtx;
3907 }
3908
3909 /* See if the two operands are the same. */
3910
3911 if (folded_arg0 == folded_arg1
3912 || (GET_CODE (folded_arg0) == REG
3913 && GET_CODE (folded_arg1) == REG
3914 && (REG_QTY (REGNO (folded_arg0))
3915 == REG_QTY (REGNO (folded_arg1))))
3916 || ((p0 = lookup (folded_arg0,
3917 (safe_hash (folded_arg0, mode_arg0)
3918 & HASH_MASK), mode_arg0))
3919 && (p1 = lookup (folded_arg1,
3920 (safe_hash (folded_arg1, mode_arg0)
3921 & HASH_MASK), mode_arg0))
3922 && p0->first_same_value == p1->first_same_value))
3923 {
3924 /* Sadly two equal NaNs are not equivalent. */
3925 if (!HONOR_NANS (mode_arg0))
3926 return ((code == EQ || code == LE || code == GE
3927 || code == LEU || code == GEU || code == UNEQ
3928 || code == UNLE || code == UNGE
3929 || code == ORDERED)
3930 ? true_rtx : false_rtx);
3931 /* Take care for the FP compares we can resolve. */
3932 if (code == UNEQ || code == UNLE || code == UNGE)
3933 return true_rtx;
3934 if (code == LTGT || code == LT || code == GT)
3935 return false_rtx;
3936 }
3937
3938 /* If FOLDED_ARG0 is a register, see if the comparison we are
3939 doing now is either the same as we did before or the reverse
3940 (we only check the reverse if not floating-point). */
3941 else if (GET_CODE (folded_arg0) == REG)
3942 {
3943 int qty = REG_QTY (REGNO (folded_arg0));
3944
3945 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3946 {
3947 struct qty_table_elem *ent = &qty_table[qty];
3948
3949 if ((comparison_dominates_p (ent->comparison_code, code)
3950 || (! FLOAT_MODE_P (mode_arg0)
3951 && comparison_dominates_p (ent->comparison_code,
3952 reverse_condition (code))))
3953 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3954 || (const_arg1
3955 && rtx_equal_p (ent->comparison_const,
3956 const_arg1))
3957 || (GET_CODE (folded_arg1) == REG
3958 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3959 return (comparison_dominates_p (ent->comparison_code, code)
3960 ? true_rtx : false_rtx);
3961 }
3962 }
3963 }
3964 }
3965
3966 /* If we are comparing against zero, see if the first operand is
3967 equivalent to an IOR with a constant. If so, we may be able to
3968 determine the result of this comparison. */
3969
3970 if (const_arg1 == const0_rtx)
3971 {
3972 rtx y = lookup_as_function (folded_arg0, IOR);
3973 rtx inner_const;
3974
3975 if (y != 0
3976 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3977 && GET_CODE (inner_const) == CONST_INT
3978 && INTVAL (inner_const) != 0)
3979 {
3980 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3981 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3982 && (INTVAL (inner_const)
3983 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3984 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3985
3986 #ifdef FLOAT_STORE_FLAG_VALUE
3987 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3988 {
3989 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3990 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3991 false_rtx = CONST0_RTX (mode);
3992 }
3993 #endif
3994
3995 switch (code)
3996 {
3997 case EQ:
3998 return false_rtx;
3999 case NE:
4000 return true_rtx;
4001 case LT: case LE:
4002 if (has_sign)
4003 return true_rtx;
4004 break;
4005 case GT: case GE:
4006 if (has_sign)
4007 return false_rtx;
4008 break;
4009 default:
4010 break;
4011 }
4012 }
4013 }
4014
4015 new = simplify_relational_operation (code,
4016 (mode_arg0 != VOIDmode
4017 ? mode_arg0
4018 : (GET_MODE (const_arg0
4019 ? const_arg0
4020 : folded_arg0)
4021 != VOIDmode)
4022 ? GET_MODE (const_arg0
4023 ? const_arg0
4024 : folded_arg0)
4025 : GET_MODE (const_arg1
4026 ? const_arg1
4027 : folded_arg1)),
4028 const_arg0 ? const_arg0 : folded_arg0,
4029 const_arg1 ? const_arg1 : folded_arg1);
4030 #ifdef FLOAT_STORE_FLAG_VALUE
4031 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4032 {
4033 if (new == const0_rtx)
4034 new = CONST0_RTX (mode);
4035 else
4036 new = (CONST_DOUBLE_FROM_REAL_VALUE
4037 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4038 }
4039 #endif
4040 break;
4041
4042 case '2':
4043 case 'c':
4044 switch (code)
4045 {
4046 case PLUS:
4047 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4048 with that LABEL_REF as its second operand. If so, the result is
4049 the first operand of that MINUS. This handles switches with an
4050 ADDR_DIFF_VEC table. */
4051 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4052 {
4053 rtx y
4054 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4055 : lookup_as_function (folded_arg0, MINUS);
4056
4057 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4058 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4059 return XEXP (y, 0);
4060
4061 /* Now try for a CONST of a MINUS like the above. */
4062 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4063 : lookup_as_function (folded_arg0, CONST))) != 0
4064 && GET_CODE (XEXP (y, 0)) == MINUS
4065 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4066 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4067 return XEXP (XEXP (y, 0), 0);
4068 }
4069
4070 /* Likewise if the operands are in the other order. */
4071 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4072 {
4073 rtx y
4074 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4075 : lookup_as_function (folded_arg1, MINUS);
4076
4077 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4078 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4079 return XEXP (y, 0);
4080
4081 /* Now try for a CONST of a MINUS like the above. */
4082 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4083 : lookup_as_function (folded_arg1, CONST))) != 0
4084 && GET_CODE (XEXP (y, 0)) == MINUS
4085 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4086 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4087 return XEXP (XEXP (y, 0), 0);
4088 }
4089
4090 /* If second operand is a register equivalent to a negative
4091 CONST_INT, see if we can find a register equivalent to the
4092 positive constant. Make a MINUS if so. Don't do this for
4093 a non-negative constant since we might then alternate between
4094 choosing positive and negative constants. Having the positive
4095 constant previously-used is the more common case. Be sure
4096 the resulting constant is non-negative; if const_arg1 were
4097 the smallest negative number this would overflow: depending
4098 on the mode, this would either just be the same value (and
4099 hence not save anything) or be incorrect. */
4100 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4101 && INTVAL (const_arg1) < 0
4102 /* This used to test
4103
4104 -INTVAL (const_arg1) >= 0
4105
4106 But The Sun V5.0 compilers mis-compiled that test. So
4107 instead we test for the problematic value in a more direct
4108 manner and hope the Sun compilers get it correct. */
4109 && INTVAL (const_arg1) !=
4110 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4111 && GET_CODE (folded_arg1) == REG)
4112 {
4113 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4114 struct table_elt *p
4115 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4116 mode);
4117
4118 if (p)
4119 for (p = p->first_same_value; p; p = p->next_same_value)
4120 if (GET_CODE (p->exp) == REG)
4121 return simplify_gen_binary (MINUS, mode, folded_arg0,
4122 canon_reg (p->exp, NULL_RTX));
4123 }
4124 goto from_plus;
4125
4126 case MINUS:
4127 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4128 If so, produce (PLUS Z C2-C). */
4129 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4130 {
4131 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4132 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4133 return fold_rtx (plus_constant (copy_rtx (y),
4134 -INTVAL (const_arg1)),
4135 NULL_RTX);
4136 }
4137
4138 /* Fall through. */
4139
4140 from_plus:
4141 case SMIN: case SMAX: case UMIN: case UMAX:
4142 case IOR: case AND: case XOR:
4143 case MULT:
4144 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4145 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4146 is known to be of similar form, we may be able to replace the
4147 operation with a combined operation. This may eliminate the
4148 intermediate operation if every use is simplified in this way.
4149 Note that the similar optimization done by combine.c only works
4150 if the intermediate operation's result has only one reference. */
4151
4152 if (GET_CODE (folded_arg0) == REG
4153 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4154 {
4155 int is_shift
4156 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4157 rtx y = lookup_as_function (folded_arg0, code);
4158 rtx inner_const;
4159 enum rtx_code associate_code;
4160 rtx new_const;
4161
4162 if (y == 0
4163 || 0 == (inner_const
4164 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4165 || GET_CODE (inner_const) != CONST_INT
4166 /* If we have compiled a statement like
4167 "if (x == (x & mask1))", and now are looking at
4168 "x & mask2", we will have a case where the first operand
4169 of Y is the same as our first operand. Unless we detect
4170 this case, an infinite loop will result. */
4171 || XEXP (y, 0) == folded_arg0)
4172 break;
4173
4174 /* Don't associate these operations if they are a PLUS with the
4175 same constant and it is a power of two. These might be doable
4176 with a pre- or post-increment. Similarly for two subtracts of
4177 identical powers of two with post decrement. */
4178
4179 if (code == PLUS && const_arg1 == inner_const
4180 && ((HAVE_PRE_INCREMENT
4181 && exact_log2 (INTVAL (const_arg1)) >= 0)
4182 || (HAVE_POST_INCREMENT
4183 && exact_log2 (INTVAL (const_arg1)) >= 0)
4184 || (HAVE_PRE_DECREMENT
4185 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4186 || (HAVE_POST_DECREMENT
4187 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4188 break;
4189
4190 /* Compute the code used to compose the constants. For example,
4191 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4192
4193 associate_code = (is_shift || code == MINUS ? PLUS : code);
4194
4195 new_const = simplify_binary_operation (associate_code, mode,
4196 const_arg1, inner_const);
4197
4198 if (new_const == 0)
4199 break;
4200
4201 /* If we are associating shift operations, don't let this
4202 produce a shift of the size of the object or larger.
4203 This could occur when we follow a sign-extend by a right
4204 shift on a machine that does a sign-extend as a pair
4205 of shifts. */
4206
4207 if (is_shift && GET_CODE (new_const) == CONST_INT
4208 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4209 {
4210 /* As an exception, we can turn an ASHIFTRT of this
4211 form into a shift of the number of bits - 1. */
4212 if (code == ASHIFTRT)
4213 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4214 else
4215 break;
4216 }
4217
4218 y = copy_rtx (XEXP (y, 0));
4219
4220 /* If Y contains our first operand (the most common way this
4221 can happen is if Y is a MEM), we would do into an infinite
4222 loop if we tried to fold it. So don't in that case. */
4223
4224 if (! reg_mentioned_p (folded_arg0, y))
4225 y = fold_rtx (y, insn);
4226
4227 return simplify_gen_binary (code, mode, y, new_const);
4228 }
4229 break;
4230
4231 case DIV: case UDIV:
4232 /* ??? The associative optimization performed immediately above is
4233 also possible for DIV and UDIV using associate_code of MULT.
4234 However, we would need extra code to verify that the
4235 multiplication does not overflow, that is, there is no overflow
4236 in the calculation of new_const. */
4237 break;
4238
4239 default:
4240 break;
4241 }
4242
4243 new = simplify_binary_operation (code, mode,
4244 const_arg0 ? const_arg0 : folded_arg0,
4245 const_arg1 ? const_arg1 : folded_arg1);
4246 break;
4247
4248 case 'o':
4249 /* (lo_sum (high X) X) is simply X. */
4250 if (code == LO_SUM && const_arg0 != 0
4251 && GET_CODE (const_arg0) == HIGH
4252 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4253 return const_arg1;
4254 break;
4255
4256 case '3':
4257 case 'b':
4258 new = simplify_ternary_operation (code, mode, mode_arg0,
4259 const_arg0 ? const_arg0 : folded_arg0,
4260 const_arg1 ? const_arg1 : folded_arg1,
4261 const_arg2 ? const_arg2 : XEXP (x, 2));
4262 break;
4263
4264 case 'x':
4265 /* Eliminate CONSTANT_P_RTX if its constant. */
4266 if (code == CONSTANT_P_RTX)
4267 {
4268 if (const_arg0)
4269 return const1_rtx;
4270 if (optimize == 0 || !flag_gcse)
4271 return const0_rtx;
4272 }
4273 break;
4274 }
4275
4276 return new ? new : x;
4277 }
4278 \f
4279 /* Return a constant value currently equivalent to X.
4280 Return 0 if we don't know one. */
4281
4282 static rtx
4283 equiv_constant (rtx x)
4284 {
4285 if (GET_CODE (x) == REG
4286 && REGNO_QTY_VALID_P (REGNO (x)))
4287 {
4288 int x_q = REG_QTY (REGNO (x));
4289 struct qty_table_elem *x_ent = &qty_table[x_q];
4290
4291 if (x_ent->const_rtx)
4292 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4293 }
4294
4295 if (x == 0 || CONSTANT_P (x))
4296 return x;
4297
4298 /* If X is a MEM, try to fold it outside the context of any insn to see if
4299 it might be equivalent to a constant. That handles the case where it
4300 is a constant-pool reference. Then try to look it up in the hash table
4301 in case it is something whose value we have seen before. */
4302
4303 if (GET_CODE (x) == MEM)
4304 {
4305 struct table_elt *elt;
4306
4307 x = fold_rtx (x, NULL_RTX);
4308 if (CONSTANT_P (x))
4309 return x;
4310
4311 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4312 if (elt == 0)
4313 return 0;
4314
4315 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4316 if (elt->is_const && CONSTANT_P (elt->exp))
4317 return elt->exp;
4318 }
4319
4320 return 0;
4321 }
4322 \f
4323 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4324 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4325 least-significant part of X.
4326 MODE specifies how big a part of X to return.
4327
4328 If the requested operation cannot be done, 0 is returned.
4329
4330 This is similar to gen_lowpart in emit-rtl.c. */
4331
4332 rtx
4333 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4334 {
4335 rtx result = gen_lowpart_common (mode, x);
4336
4337 if (result)
4338 return result;
4339 else if (GET_CODE (x) == MEM)
4340 {
4341 /* This is the only other case we handle. */
4342 int offset = 0;
4343 rtx new;
4344
4345 if (WORDS_BIG_ENDIAN)
4346 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4347 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4348 if (BYTES_BIG_ENDIAN)
4349 /* Adjust the address so that the address-after-the-data is
4350 unchanged. */
4351 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4352 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4353
4354 new = adjust_address_nv (x, mode, offset);
4355 if (! memory_address_p (mode, XEXP (new, 0)))
4356 return 0;
4357
4358 return new;
4359 }
4360 else
4361 return 0;
4362 }
4363 \f
4364 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4365 branch. It will be zero if not.
4366
4367 In certain cases, this can cause us to add an equivalence. For example,
4368 if we are following the taken case of
4369 if (i == 2)
4370 we can add the fact that `i' and '2' are now equivalent.
4371
4372 In any case, we can record that this comparison was passed. If the same
4373 comparison is seen later, we will know its value. */
4374
4375 static void
4376 record_jump_equiv (rtx insn, int taken)
4377 {
4378 int cond_known_true;
4379 rtx op0, op1;
4380 rtx set;
4381 enum machine_mode mode, mode0, mode1;
4382 int reversed_nonequality = 0;
4383 enum rtx_code code;
4384
4385 /* Ensure this is the right kind of insn. */
4386 if (! any_condjump_p (insn))
4387 return;
4388 set = pc_set (insn);
4389
4390 /* See if this jump condition is known true or false. */
4391 if (taken)
4392 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4393 else
4394 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4395
4396 /* Get the type of comparison being done and the operands being compared.
4397 If we had to reverse a non-equality condition, record that fact so we
4398 know that it isn't valid for floating-point. */
4399 code = GET_CODE (XEXP (SET_SRC (set), 0));
4400 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4401 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4402
4403 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4404 if (! cond_known_true)
4405 {
4406 code = reversed_comparison_code_parts (code, op0, op1, insn);
4407
4408 /* Don't remember if we can't find the inverse. */
4409 if (code == UNKNOWN)
4410 return;
4411 }
4412
4413 /* The mode is the mode of the non-constant. */
4414 mode = mode0;
4415 if (mode1 != VOIDmode)
4416 mode = mode1;
4417
4418 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4419 }
4420
4421 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4422 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4423 Make any useful entries we can with that information. Called from
4424 above function and called recursively. */
4425
4426 static void
4427 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4428 rtx op1, int reversed_nonequality)
4429 {
4430 unsigned op0_hash, op1_hash;
4431 int op0_in_memory, op1_in_memory;
4432 struct table_elt *op0_elt, *op1_elt;
4433
4434 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4435 we know that they are also equal in the smaller mode (this is also
4436 true for all smaller modes whether or not there is a SUBREG, but
4437 is not worth testing for with no SUBREG). */
4438
4439 /* Note that GET_MODE (op0) may not equal MODE. */
4440 if (code == EQ && GET_CODE (op0) == SUBREG
4441 && (GET_MODE_SIZE (GET_MODE (op0))
4442 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4443 {
4444 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4445 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4446
4447 record_jump_cond (code, mode, SUBREG_REG (op0),
4448 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4449 reversed_nonequality);
4450 }
4451
4452 if (code == EQ && GET_CODE (op1) == SUBREG
4453 && (GET_MODE_SIZE (GET_MODE (op1))
4454 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4455 {
4456 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4457 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4458
4459 record_jump_cond (code, mode, SUBREG_REG (op1),
4460 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4461 reversed_nonequality);
4462 }
4463
4464 /* Similarly, if this is an NE comparison, and either is a SUBREG
4465 making a smaller mode, we know the whole thing is also NE. */
4466
4467 /* Note that GET_MODE (op0) may not equal MODE;
4468 if we test MODE instead, we can get an infinite recursion
4469 alternating between two modes each wider than MODE. */
4470
4471 if (code == NE && GET_CODE (op0) == SUBREG
4472 && subreg_lowpart_p (op0)
4473 && (GET_MODE_SIZE (GET_MODE (op0))
4474 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4475 {
4476 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4477 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4478
4479 record_jump_cond (code, mode, SUBREG_REG (op0),
4480 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4481 reversed_nonequality);
4482 }
4483
4484 if (code == NE && GET_CODE (op1) == SUBREG
4485 && subreg_lowpart_p (op1)
4486 && (GET_MODE_SIZE (GET_MODE (op1))
4487 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4488 {
4489 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4490 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4491
4492 record_jump_cond (code, mode, SUBREG_REG (op1),
4493 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4494 reversed_nonequality);
4495 }
4496
4497 /* Hash both operands. */
4498
4499 do_not_record = 0;
4500 hash_arg_in_memory = 0;
4501 op0_hash = HASH (op0, mode);
4502 op0_in_memory = hash_arg_in_memory;
4503
4504 if (do_not_record)
4505 return;
4506
4507 do_not_record = 0;
4508 hash_arg_in_memory = 0;
4509 op1_hash = HASH (op1, mode);
4510 op1_in_memory = hash_arg_in_memory;
4511
4512 if (do_not_record)
4513 return;
4514
4515 /* Look up both operands. */
4516 op0_elt = lookup (op0, op0_hash, mode);
4517 op1_elt = lookup (op1, op1_hash, mode);
4518
4519 /* If both operands are already equivalent or if they are not in the
4520 table but are identical, do nothing. */
4521 if ((op0_elt != 0 && op1_elt != 0
4522 && op0_elt->first_same_value == op1_elt->first_same_value)
4523 || op0 == op1 || rtx_equal_p (op0, op1))
4524 return;
4525
4526 /* If we aren't setting two things equal all we can do is save this
4527 comparison. Similarly if this is floating-point. In the latter
4528 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4529 If we record the equality, we might inadvertently delete code
4530 whose intent was to change -0 to +0. */
4531
4532 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4533 {
4534 struct qty_table_elem *ent;
4535 int qty;
4536
4537 /* If we reversed a floating-point comparison, if OP0 is not a
4538 register, or if OP1 is neither a register or constant, we can't
4539 do anything. */
4540
4541 if (GET_CODE (op1) != REG)
4542 op1 = equiv_constant (op1);
4543
4544 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4545 || GET_CODE (op0) != REG || op1 == 0)
4546 return;
4547
4548 /* Put OP0 in the hash table if it isn't already. This gives it a
4549 new quantity number. */
4550 if (op0_elt == 0)
4551 {
4552 if (insert_regs (op0, NULL, 0))
4553 {
4554 rehash_using_reg (op0);
4555 op0_hash = HASH (op0, mode);
4556
4557 /* If OP0 is contained in OP1, this changes its hash code
4558 as well. Faster to rehash than to check, except
4559 for the simple case of a constant. */
4560 if (! CONSTANT_P (op1))
4561 op1_hash = HASH (op1,mode);
4562 }
4563
4564 op0_elt = insert (op0, NULL, op0_hash, mode);
4565 op0_elt->in_memory = op0_in_memory;
4566 }
4567
4568 qty = REG_QTY (REGNO (op0));
4569 ent = &qty_table[qty];
4570
4571 ent->comparison_code = code;
4572 if (GET_CODE (op1) == REG)
4573 {
4574 /* Look it up again--in case op0 and op1 are the same. */
4575 op1_elt = lookup (op1, op1_hash, mode);
4576
4577 /* Put OP1 in the hash table so it gets a new quantity number. */
4578 if (op1_elt == 0)
4579 {
4580 if (insert_regs (op1, NULL, 0))
4581 {
4582 rehash_using_reg (op1);
4583 op1_hash = HASH (op1, mode);
4584 }
4585
4586 op1_elt = insert (op1, NULL, op1_hash, mode);
4587 op1_elt->in_memory = op1_in_memory;
4588 }
4589
4590 ent->comparison_const = NULL_RTX;
4591 ent->comparison_qty = REG_QTY (REGNO (op1));
4592 }
4593 else
4594 {
4595 ent->comparison_const = op1;
4596 ent->comparison_qty = -1;
4597 }
4598
4599 return;
4600 }
4601
4602 /* If either side is still missing an equivalence, make it now,
4603 then merge the equivalences. */
4604
4605 if (op0_elt == 0)
4606 {
4607 if (insert_regs (op0, NULL, 0))
4608 {
4609 rehash_using_reg (op0);
4610 op0_hash = HASH (op0, mode);
4611 }
4612
4613 op0_elt = insert (op0, NULL, op0_hash, mode);
4614 op0_elt->in_memory = op0_in_memory;
4615 }
4616
4617 if (op1_elt == 0)
4618 {
4619 if (insert_regs (op1, NULL, 0))
4620 {
4621 rehash_using_reg (op1);
4622 op1_hash = HASH (op1, mode);
4623 }
4624
4625 op1_elt = insert (op1, NULL, op1_hash, mode);
4626 op1_elt->in_memory = op1_in_memory;
4627 }
4628
4629 merge_equiv_classes (op0_elt, op1_elt);
4630 last_jump_equiv_class = op0_elt;
4631 }
4632 \f
4633 /* CSE processing for one instruction.
4634 First simplify sources and addresses of all assignments
4635 in the instruction, using previously-computed equivalents values.
4636 Then install the new sources and destinations in the table
4637 of available values.
4638
4639 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4640 the insn. It means that INSN is inside libcall block. In this
4641 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4642
4643 /* Data on one SET contained in the instruction. */
4644
4645 struct set
4646 {
4647 /* The SET rtx itself. */
4648 rtx rtl;
4649 /* The SET_SRC of the rtx (the original value, if it is changing). */
4650 rtx src;
4651 /* The hash-table element for the SET_SRC of the SET. */
4652 struct table_elt *src_elt;
4653 /* Hash value for the SET_SRC. */
4654 unsigned src_hash;
4655 /* Hash value for the SET_DEST. */
4656 unsigned dest_hash;
4657 /* The SET_DEST, with SUBREG, etc., stripped. */
4658 rtx inner_dest;
4659 /* Nonzero if the SET_SRC is in memory. */
4660 char src_in_memory;
4661 /* Nonzero if the SET_SRC contains something
4662 whose value cannot be predicted and understood. */
4663 char src_volatile;
4664 /* Original machine mode, in case it becomes a CONST_INT.
4665 The size of this field should match the size of the mode
4666 field of struct rtx_def (see rtl.h). */
4667 ENUM_BITFIELD(machine_mode) mode : 8;
4668 /* A constant equivalent for SET_SRC, if any. */
4669 rtx src_const;
4670 /* Original SET_SRC value used for libcall notes. */
4671 rtx orig_src;
4672 /* Hash value of constant equivalent for SET_SRC. */
4673 unsigned src_const_hash;
4674 /* Table entry for constant equivalent for SET_SRC, if any. */
4675 struct table_elt *src_const_elt;
4676 };
4677
4678 static void
4679 cse_insn (rtx insn, rtx libcall_insn)
4680 {
4681 rtx x = PATTERN (insn);
4682 int i;
4683 rtx tem;
4684 int n_sets = 0;
4685
4686 #ifdef HAVE_cc0
4687 /* Records what this insn does to set CC0. */
4688 rtx this_insn_cc0 = 0;
4689 enum machine_mode this_insn_cc0_mode = VOIDmode;
4690 #endif
4691
4692 rtx src_eqv = 0;
4693 struct table_elt *src_eqv_elt = 0;
4694 int src_eqv_volatile = 0;
4695 int src_eqv_in_memory = 0;
4696 unsigned src_eqv_hash = 0;
4697
4698 struct set *sets = (struct set *) 0;
4699
4700 this_insn = insn;
4701
4702 /* Find all the SETs and CLOBBERs in this instruction.
4703 Record all the SETs in the array `set' and count them.
4704 Also determine whether there is a CLOBBER that invalidates
4705 all memory references, or all references at varying addresses. */
4706
4707 if (GET_CODE (insn) == CALL_INSN)
4708 {
4709 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4710 {
4711 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4712 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4713 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4714 }
4715 }
4716
4717 if (GET_CODE (x) == SET)
4718 {
4719 sets = alloca (sizeof (struct set));
4720 sets[0].rtl = x;
4721
4722 /* Ignore SETs that are unconditional jumps.
4723 They never need cse processing, so this does not hurt.
4724 The reason is not efficiency but rather
4725 so that we can test at the end for instructions
4726 that have been simplified to unconditional jumps
4727 and not be misled by unchanged instructions
4728 that were unconditional jumps to begin with. */
4729 if (SET_DEST (x) == pc_rtx
4730 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4731 ;
4732
4733 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4734 The hard function value register is used only once, to copy to
4735 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4736 Ensure we invalidate the destination register. On the 80386 no
4737 other code would invalidate it since it is a fixed_reg.
4738 We need not check the return of apply_change_group; see canon_reg. */
4739
4740 else if (GET_CODE (SET_SRC (x)) == CALL)
4741 {
4742 canon_reg (SET_SRC (x), insn);
4743 apply_change_group ();
4744 fold_rtx (SET_SRC (x), insn);
4745 invalidate (SET_DEST (x), VOIDmode);
4746 }
4747 else
4748 n_sets = 1;
4749 }
4750 else if (GET_CODE (x) == PARALLEL)
4751 {
4752 int lim = XVECLEN (x, 0);
4753
4754 sets = alloca (lim * sizeof (struct set));
4755
4756 /* Find all regs explicitly clobbered in this insn,
4757 and ensure they are not replaced with any other regs
4758 elsewhere in this insn.
4759 When a reg that is clobbered is also used for input,
4760 we should presume that that is for a reason,
4761 and we should not substitute some other register
4762 which is not supposed to be clobbered.
4763 Therefore, this loop cannot be merged into the one below
4764 because a CALL may precede a CLOBBER and refer to the
4765 value clobbered. We must not let a canonicalization do
4766 anything in that case. */
4767 for (i = 0; i < lim; i++)
4768 {
4769 rtx y = XVECEXP (x, 0, i);
4770 if (GET_CODE (y) == CLOBBER)
4771 {
4772 rtx clobbered = XEXP (y, 0);
4773
4774 if (GET_CODE (clobbered) == REG
4775 || GET_CODE (clobbered) == SUBREG)
4776 invalidate (clobbered, VOIDmode);
4777 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4778 || GET_CODE (clobbered) == ZERO_EXTRACT)
4779 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4780 }
4781 }
4782
4783 for (i = 0; i < lim; i++)
4784 {
4785 rtx y = XVECEXP (x, 0, i);
4786 if (GET_CODE (y) == SET)
4787 {
4788 /* As above, we ignore unconditional jumps and call-insns and
4789 ignore the result of apply_change_group. */
4790 if (GET_CODE (SET_SRC (y)) == CALL)
4791 {
4792 canon_reg (SET_SRC (y), insn);
4793 apply_change_group ();
4794 fold_rtx (SET_SRC (y), insn);
4795 invalidate (SET_DEST (y), VOIDmode);
4796 }
4797 else if (SET_DEST (y) == pc_rtx
4798 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4799 ;
4800 else
4801 sets[n_sets++].rtl = y;
4802 }
4803 else if (GET_CODE (y) == CLOBBER)
4804 {
4805 /* If we clobber memory, canon the address.
4806 This does nothing when a register is clobbered
4807 because we have already invalidated the reg. */
4808 if (GET_CODE (XEXP (y, 0)) == MEM)
4809 canon_reg (XEXP (y, 0), NULL_RTX);
4810 }
4811 else if (GET_CODE (y) == USE
4812 && ! (GET_CODE (XEXP (y, 0)) == REG
4813 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4814 canon_reg (y, NULL_RTX);
4815 else if (GET_CODE (y) == CALL)
4816 {
4817 /* The result of apply_change_group can be ignored; see
4818 canon_reg. */
4819 canon_reg (y, insn);
4820 apply_change_group ();
4821 fold_rtx (y, insn);
4822 }
4823 }
4824 }
4825 else if (GET_CODE (x) == CLOBBER)
4826 {
4827 if (GET_CODE (XEXP (x, 0)) == MEM)
4828 canon_reg (XEXP (x, 0), NULL_RTX);
4829 }
4830
4831 /* Canonicalize a USE of a pseudo register or memory location. */
4832 else if (GET_CODE (x) == USE
4833 && ! (GET_CODE (XEXP (x, 0)) == REG
4834 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4835 canon_reg (XEXP (x, 0), NULL_RTX);
4836 else if (GET_CODE (x) == CALL)
4837 {
4838 /* The result of apply_change_group can be ignored; see canon_reg. */
4839 canon_reg (x, insn);
4840 apply_change_group ();
4841 fold_rtx (x, insn);
4842 }
4843
4844 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4845 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4846 is handled specially for this case, and if it isn't set, then there will
4847 be no equivalence for the destination. */
4848 if (n_sets == 1 && REG_NOTES (insn) != 0
4849 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4850 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4851 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4852 {
4853 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4854 XEXP (tem, 0) = src_eqv;
4855 }
4856
4857 /* Canonicalize sources and addresses of destinations.
4858 We do this in a separate pass to avoid problems when a MATCH_DUP is
4859 present in the insn pattern. In that case, we want to ensure that
4860 we don't break the duplicate nature of the pattern. So we will replace
4861 both operands at the same time. Otherwise, we would fail to find an
4862 equivalent substitution in the loop calling validate_change below.
4863
4864 We used to suppress canonicalization of DEST if it appears in SRC,
4865 but we don't do this any more. */
4866
4867 for (i = 0; i < n_sets; i++)
4868 {
4869 rtx dest = SET_DEST (sets[i].rtl);
4870 rtx src = SET_SRC (sets[i].rtl);
4871 rtx new = canon_reg (src, insn);
4872 int insn_code;
4873
4874 sets[i].orig_src = src;
4875 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4876 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4877 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4878 || (insn_code = recog_memoized (insn)) < 0
4879 || insn_data[insn_code].n_dups > 0)
4880 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4881 else
4882 SET_SRC (sets[i].rtl) = new;
4883
4884 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4885 {
4886 validate_change (insn, &XEXP (dest, 1),
4887 canon_reg (XEXP (dest, 1), insn), 1);
4888 validate_change (insn, &XEXP (dest, 2),
4889 canon_reg (XEXP (dest, 2), insn), 1);
4890 }
4891
4892 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4893 || GET_CODE (dest) == ZERO_EXTRACT
4894 || GET_CODE (dest) == SIGN_EXTRACT)
4895 dest = XEXP (dest, 0);
4896
4897 if (GET_CODE (dest) == MEM)
4898 canon_reg (dest, insn);
4899 }
4900
4901 /* Now that we have done all the replacements, we can apply the change
4902 group and see if they all work. Note that this will cause some
4903 canonicalizations that would have worked individually not to be applied
4904 because some other canonicalization didn't work, but this should not
4905 occur often.
4906
4907 The result of apply_change_group can be ignored; see canon_reg. */
4908
4909 apply_change_group ();
4910
4911 /* Set sets[i].src_elt to the class each source belongs to.
4912 Detect assignments from or to volatile things
4913 and set set[i] to zero so they will be ignored
4914 in the rest of this function.
4915
4916 Nothing in this loop changes the hash table or the register chains. */
4917
4918 for (i = 0; i < n_sets; i++)
4919 {
4920 rtx src, dest;
4921 rtx src_folded;
4922 struct table_elt *elt = 0, *p;
4923 enum machine_mode mode;
4924 rtx src_eqv_here;
4925 rtx src_const = 0;
4926 rtx src_related = 0;
4927 struct table_elt *src_const_elt = 0;
4928 int src_cost = MAX_COST;
4929 int src_eqv_cost = MAX_COST;
4930 int src_folded_cost = MAX_COST;
4931 int src_related_cost = MAX_COST;
4932 int src_elt_cost = MAX_COST;
4933 int src_regcost = MAX_COST;
4934 int src_eqv_regcost = MAX_COST;
4935 int src_folded_regcost = MAX_COST;
4936 int src_related_regcost = MAX_COST;
4937 int src_elt_regcost = MAX_COST;
4938 /* Set nonzero if we need to call force_const_mem on with the
4939 contents of src_folded before using it. */
4940 int src_folded_force_flag = 0;
4941
4942 dest = SET_DEST (sets[i].rtl);
4943 src = SET_SRC (sets[i].rtl);
4944
4945 /* If SRC is a constant that has no machine mode,
4946 hash it with the destination's machine mode.
4947 This way we can keep different modes separate. */
4948
4949 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4950 sets[i].mode = mode;
4951
4952 if (src_eqv)
4953 {
4954 enum machine_mode eqvmode = mode;
4955 if (GET_CODE (dest) == STRICT_LOW_PART)
4956 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4957 do_not_record = 0;
4958 hash_arg_in_memory = 0;
4959 src_eqv_hash = HASH (src_eqv, eqvmode);
4960
4961 /* Find the equivalence class for the equivalent expression. */
4962
4963 if (!do_not_record)
4964 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4965
4966 src_eqv_volatile = do_not_record;
4967 src_eqv_in_memory = hash_arg_in_memory;
4968 }
4969
4970 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4971 value of the INNER register, not the destination. So it is not
4972 a valid substitution for the source. But save it for later. */
4973 if (GET_CODE (dest) == STRICT_LOW_PART)
4974 src_eqv_here = 0;
4975 else
4976 src_eqv_here = src_eqv;
4977
4978 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4979 simplified result, which may not necessarily be valid. */
4980 src_folded = fold_rtx (src, insn);
4981
4982 #if 0
4983 /* ??? This caused bad code to be generated for the m68k port with -O2.
4984 Suppose src is (CONST_INT -1), and that after truncation src_folded
4985 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4986 At the end we will add src and src_const to the same equivalence
4987 class. We now have 3 and -1 on the same equivalence class. This
4988 causes later instructions to be mis-optimized. */
4989 /* If storing a constant in a bitfield, pre-truncate the constant
4990 so we will be able to record it later. */
4991 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4992 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4993 {
4994 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4995
4996 if (GET_CODE (src) == CONST_INT
4997 && GET_CODE (width) == CONST_INT
4998 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4999 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5000 src_folded
5001 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5002 << INTVAL (width)) - 1));
5003 }
5004 #endif
5005
5006 /* Compute SRC's hash code, and also notice if it
5007 should not be recorded at all. In that case,
5008 prevent any further processing of this assignment. */
5009 do_not_record = 0;
5010 hash_arg_in_memory = 0;
5011
5012 sets[i].src = src;
5013 sets[i].src_hash = HASH (src, mode);
5014 sets[i].src_volatile = do_not_record;
5015 sets[i].src_in_memory = hash_arg_in_memory;
5016
5017 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5018 a pseudo, do not record SRC. Using SRC as a replacement for
5019 anything else will be incorrect in that situation. Note that
5020 this usually occurs only for stack slots, in which case all the
5021 RTL would be referring to SRC, so we don't lose any optimization
5022 opportunities by not having SRC in the hash table. */
5023
5024 if (GET_CODE (src) == MEM
5025 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5026 && GET_CODE (dest) == REG
5027 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5028 sets[i].src_volatile = 1;
5029
5030 #if 0
5031 /* It is no longer clear why we used to do this, but it doesn't
5032 appear to still be needed. So let's try without it since this
5033 code hurts cse'ing widened ops. */
5034 /* If source is a perverse subreg (such as QI treated as an SI),
5035 treat it as volatile. It may do the work of an SI in one context
5036 where the extra bits are not being used, but cannot replace an SI
5037 in general. */
5038 if (GET_CODE (src) == SUBREG
5039 && (GET_MODE_SIZE (GET_MODE (src))
5040 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5041 sets[i].src_volatile = 1;
5042 #endif
5043
5044 /* Locate all possible equivalent forms for SRC. Try to replace
5045 SRC in the insn with each cheaper equivalent.
5046
5047 We have the following types of equivalents: SRC itself, a folded
5048 version, a value given in a REG_EQUAL note, or a value related
5049 to a constant.
5050
5051 Each of these equivalents may be part of an additional class
5052 of equivalents (if more than one is in the table, they must be in
5053 the same class; we check for this).
5054
5055 If the source is volatile, we don't do any table lookups.
5056
5057 We note any constant equivalent for possible later use in a
5058 REG_NOTE. */
5059
5060 if (!sets[i].src_volatile)
5061 elt = lookup (src, sets[i].src_hash, mode);
5062
5063 sets[i].src_elt = elt;
5064
5065 if (elt && src_eqv_here && src_eqv_elt)
5066 {
5067 if (elt->first_same_value != src_eqv_elt->first_same_value)
5068 {
5069 /* The REG_EQUAL is indicating that two formerly distinct
5070 classes are now equivalent. So merge them. */
5071 merge_equiv_classes (elt, src_eqv_elt);
5072 src_eqv_hash = HASH (src_eqv, elt->mode);
5073 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5074 }
5075
5076 src_eqv_here = 0;
5077 }
5078
5079 else if (src_eqv_elt)
5080 elt = src_eqv_elt;
5081
5082 /* Try to find a constant somewhere and record it in `src_const'.
5083 Record its table element, if any, in `src_const_elt'. Look in
5084 any known equivalences first. (If the constant is not in the
5085 table, also set `sets[i].src_const_hash'). */
5086 if (elt)
5087 for (p = elt->first_same_value; p; p = p->next_same_value)
5088 if (p->is_const)
5089 {
5090 src_const = p->exp;
5091 src_const_elt = elt;
5092 break;
5093 }
5094
5095 if (src_const == 0
5096 && (CONSTANT_P (src_folded)
5097 /* Consider (minus (label_ref L1) (label_ref L2)) as
5098 "constant" here so we will record it. This allows us
5099 to fold switch statements when an ADDR_DIFF_VEC is used. */
5100 || (GET_CODE (src_folded) == MINUS
5101 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5102 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5103 src_const = src_folded, src_const_elt = elt;
5104 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5105 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5106
5107 /* If we don't know if the constant is in the table, get its
5108 hash code and look it up. */
5109 if (src_const && src_const_elt == 0)
5110 {
5111 sets[i].src_const_hash = HASH (src_const, mode);
5112 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5113 }
5114
5115 sets[i].src_const = src_const;
5116 sets[i].src_const_elt = src_const_elt;
5117
5118 /* If the constant and our source are both in the table, mark them as
5119 equivalent. Otherwise, if a constant is in the table but the source
5120 isn't, set ELT to it. */
5121 if (src_const_elt && elt
5122 && src_const_elt->first_same_value != elt->first_same_value)
5123 merge_equiv_classes (elt, src_const_elt);
5124 else if (src_const_elt && elt == 0)
5125 elt = src_const_elt;
5126
5127 /* See if there is a register linearly related to a constant
5128 equivalent of SRC. */
5129 if (src_const
5130 && (GET_CODE (src_const) == CONST
5131 || (src_const_elt && src_const_elt->related_value != 0)))
5132 {
5133 src_related = use_related_value (src_const, src_const_elt);
5134 if (src_related)
5135 {
5136 struct table_elt *src_related_elt
5137 = lookup (src_related, HASH (src_related, mode), mode);
5138 if (src_related_elt && elt)
5139 {
5140 if (elt->first_same_value
5141 != src_related_elt->first_same_value)
5142 /* This can occur when we previously saw a CONST
5143 involving a SYMBOL_REF and then see the SYMBOL_REF
5144 twice. Merge the involved classes. */
5145 merge_equiv_classes (elt, src_related_elt);
5146
5147 src_related = 0;
5148 src_related_elt = 0;
5149 }
5150 else if (src_related_elt && elt == 0)
5151 elt = src_related_elt;
5152 }
5153 }
5154
5155 /* See if we have a CONST_INT that is already in a register in a
5156 wider mode. */
5157
5158 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5159 && GET_MODE_CLASS (mode) == MODE_INT
5160 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5161 {
5162 enum machine_mode wider_mode;
5163
5164 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5165 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5166 && src_related == 0;
5167 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5168 {
5169 struct table_elt *const_elt
5170 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5171
5172 if (const_elt == 0)
5173 continue;
5174
5175 for (const_elt = const_elt->first_same_value;
5176 const_elt; const_elt = const_elt->next_same_value)
5177 if (GET_CODE (const_elt->exp) == REG)
5178 {
5179 src_related = gen_lowpart_if_possible (mode,
5180 const_elt->exp);
5181 break;
5182 }
5183 }
5184 }
5185
5186 /* Another possibility is that we have an AND with a constant in
5187 a mode narrower than a word. If so, it might have been generated
5188 as part of an "if" which would narrow the AND. If we already
5189 have done the AND in a wider mode, we can use a SUBREG of that
5190 value. */
5191
5192 if (flag_expensive_optimizations && ! src_related
5193 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5194 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5195 {
5196 enum machine_mode tmode;
5197 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5198
5199 for (tmode = GET_MODE_WIDER_MODE (mode);
5200 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5201 tmode = GET_MODE_WIDER_MODE (tmode))
5202 {
5203 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5204 struct table_elt *larger_elt;
5205
5206 if (inner)
5207 {
5208 PUT_MODE (new_and, tmode);
5209 XEXP (new_and, 0) = inner;
5210 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5211 if (larger_elt == 0)
5212 continue;
5213
5214 for (larger_elt = larger_elt->first_same_value;
5215 larger_elt; larger_elt = larger_elt->next_same_value)
5216 if (GET_CODE (larger_elt->exp) == REG)
5217 {
5218 src_related
5219 = gen_lowpart_if_possible (mode, larger_elt->exp);
5220 break;
5221 }
5222
5223 if (src_related)
5224 break;
5225 }
5226 }
5227 }
5228
5229 #ifdef LOAD_EXTEND_OP
5230 /* See if a MEM has already been loaded with a widening operation;
5231 if it has, we can use a subreg of that. Many CISC machines
5232 also have such operations, but this is only likely to be
5233 beneficial these machines. */
5234
5235 if (flag_expensive_optimizations && src_related == 0
5236 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5237 && GET_MODE_CLASS (mode) == MODE_INT
5238 && GET_CODE (src) == MEM && ! do_not_record
5239 && LOAD_EXTEND_OP (mode) != NIL)
5240 {
5241 enum machine_mode tmode;
5242
5243 /* Set what we are trying to extend and the operation it might
5244 have been extended with. */
5245 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5246 XEXP (memory_extend_rtx, 0) = src;
5247
5248 for (tmode = GET_MODE_WIDER_MODE (mode);
5249 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5250 tmode = GET_MODE_WIDER_MODE (tmode))
5251 {
5252 struct table_elt *larger_elt;
5253
5254 PUT_MODE (memory_extend_rtx, tmode);
5255 larger_elt = lookup (memory_extend_rtx,
5256 HASH (memory_extend_rtx, tmode), tmode);
5257 if (larger_elt == 0)
5258 continue;
5259
5260 for (larger_elt = larger_elt->first_same_value;
5261 larger_elt; larger_elt = larger_elt->next_same_value)
5262 if (GET_CODE (larger_elt->exp) == REG)
5263 {
5264 src_related = gen_lowpart_if_possible (mode,
5265 larger_elt->exp);
5266 break;
5267 }
5268
5269 if (src_related)
5270 break;
5271 }
5272 }
5273 #endif /* LOAD_EXTEND_OP */
5274
5275 if (src == src_folded)
5276 src_folded = 0;
5277
5278 /* At this point, ELT, if nonzero, points to a class of expressions
5279 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5280 and SRC_RELATED, if nonzero, each contain additional equivalent
5281 expressions. Prune these latter expressions by deleting expressions
5282 already in the equivalence class.
5283
5284 Check for an equivalent identical to the destination. If found,
5285 this is the preferred equivalent since it will likely lead to
5286 elimination of the insn. Indicate this by placing it in
5287 `src_related'. */
5288
5289 if (elt)
5290 elt = elt->first_same_value;
5291 for (p = elt; p; p = p->next_same_value)
5292 {
5293 enum rtx_code code = GET_CODE (p->exp);
5294
5295 /* If the expression is not valid, ignore it. Then we do not
5296 have to check for validity below. In most cases, we can use
5297 `rtx_equal_p', since canonicalization has already been done. */
5298 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5299 continue;
5300
5301 /* Also skip paradoxical subregs, unless that's what we're
5302 looking for. */
5303 if (code == SUBREG
5304 && (GET_MODE_SIZE (GET_MODE (p->exp))
5305 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5306 && ! (src != 0
5307 && GET_CODE (src) == SUBREG
5308 && GET_MODE (src) == GET_MODE (p->exp)
5309 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5310 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5311 continue;
5312
5313 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5314 src = 0;
5315 else if (src_folded && GET_CODE (src_folded) == code
5316 && rtx_equal_p (src_folded, p->exp))
5317 src_folded = 0;
5318 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5319 && rtx_equal_p (src_eqv_here, p->exp))
5320 src_eqv_here = 0;
5321 else if (src_related && GET_CODE (src_related) == code
5322 && rtx_equal_p (src_related, p->exp))
5323 src_related = 0;
5324
5325 /* This is the same as the destination of the insns, we want
5326 to prefer it. Copy it to src_related. The code below will
5327 then give it a negative cost. */
5328 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5329 src_related = dest;
5330 }
5331
5332 /* Find the cheapest valid equivalent, trying all the available
5333 possibilities. Prefer items not in the hash table to ones
5334 that are when they are equal cost. Note that we can never
5335 worsen an insn as the current contents will also succeed.
5336 If we find an equivalent identical to the destination, use it as best,
5337 since this insn will probably be eliminated in that case. */
5338 if (src)
5339 {
5340 if (rtx_equal_p (src, dest))
5341 src_cost = src_regcost = -1;
5342 else
5343 {
5344 src_cost = COST (src);
5345 src_regcost = approx_reg_cost (src);
5346 }
5347 }
5348
5349 if (src_eqv_here)
5350 {
5351 if (rtx_equal_p (src_eqv_here, dest))
5352 src_eqv_cost = src_eqv_regcost = -1;
5353 else
5354 {
5355 src_eqv_cost = COST (src_eqv_here);
5356 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5357 }
5358 }
5359
5360 if (src_folded)
5361 {
5362 if (rtx_equal_p (src_folded, dest))
5363 src_folded_cost = src_folded_regcost = -1;
5364 else
5365 {
5366 src_folded_cost = COST (src_folded);
5367 src_folded_regcost = approx_reg_cost (src_folded);
5368 }
5369 }
5370
5371 if (src_related)
5372 {
5373 if (rtx_equal_p (src_related, dest))
5374 src_related_cost = src_related_regcost = -1;
5375 else
5376 {
5377 src_related_cost = COST (src_related);
5378 src_related_regcost = approx_reg_cost (src_related);
5379 }
5380 }
5381
5382 /* If this was an indirect jump insn, a known label will really be
5383 cheaper even though it looks more expensive. */
5384 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5385 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5386
5387 /* Terminate loop when replacement made. This must terminate since
5388 the current contents will be tested and will always be valid. */
5389 while (1)
5390 {
5391 rtx trial;
5392
5393 /* Skip invalid entries. */
5394 while (elt && GET_CODE (elt->exp) != REG
5395 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5396 elt = elt->next_same_value;
5397
5398 /* A paradoxical subreg would be bad here: it'll be the right
5399 size, but later may be adjusted so that the upper bits aren't
5400 what we want. So reject it. */
5401 if (elt != 0
5402 && GET_CODE (elt->exp) == SUBREG
5403 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5404 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5405 /* It is okay, though, if the rtx we're trying to match
5406 will ignore any of the bits we can't predict. */
5407 && ! (src != 0
5408 && GET_CODE (src) == SUBREG
5409 && GET_MODE (src) == GET_MODE (elt->exp)
5410 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5411 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5412 {
5413 elt = elt->next_same_value;
5414 continue;
5415 }
5416
5417 if (elt)
5418 {
5419 src_elt_cost = elt->cost;
5420 src_elt_regcost = elt->regcost;
5421 }
5422
5423 /* Find cheapest and skip it for the next time. For items
5424 of equal cost, use this order:
5425 src_folded, src, src_eqv, src_related and hash table entry. */
5426 if (src_folded
5427 && preferrable (src_folded_cost, src_folded_regcost,
5428 src_cost, src_regcost) <= 0
5429 && preferrable (src_folded_cost, src_folded_regcost,
5430 src_eqv_cost, src_eqv_regcost) <= 0
5431 && preferrable (src_folded_cost, src_folded_regcost,
5432 src_related_cost, src_related_regcost) <= 0
5433 && preferrable (src_folded_cost, src_folded_regcost,
5434 src_elt_cost, src_elt_regcost) <= 0)
5435 {
5436 trial = src_folded, src_folded_cost = MAX_COST;
5437 if (src_folded_force_flag)
5438 {
5439 rtx forced = force_const_mem (mode, trial);
5440 if (forced)
5441 trial = forced;
5442 }
5443 }
5444 else if (src
5445 && preferrable (src_cost, src_regcost,
5446 src_eqv_cost, src_eqv_regcost) <= 0
5447 && preferrable (src_cost, src_regcost,
5448 src_related_cost, src_related_regcost) <= 0
5449 && preferrable (src_cost, src_regcost,
5450 src_elt_cost, src_elt_regcost) <= 0)
5451 trial = src, src_cost = MAX_COST;
5452 else if (src_eqv_here
5453 && preferrable (src_eqv_cost, src_eqv_regcost,
5454 src_related_cost, src_related_regcost) <= 0
5455 && preferrable (src_eqv_cost, src_eqv_regcost,
5456 src_elt_cost, src_elt_regcost) <= 0)
5457 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5458 else if (src_related
5459 && preferrable (src_related_cost, src_related_regcost,
5460 src_elt_cost, src_elt_regcost) <= 0)
5461 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5462 else
5463 {
5464 trial = copy_rtx (elt->exp);
5465 elt = elt->next_same_value;
5466 src_elt_cost = MAX_COST;
5467 }
5468
5469 /* We don't normally have an insn matching (set (pc) (pc)), so
5470 check for this separately here. We will delete such an
5471 insn below.
5472
5473 For other cases such as a table jump or conditional jump
5474 where we know the ultimate target, go ahead and replace the
5475 operand. While that may not make a valid insn, we will
5476 reemit the jump below (and also insert any necessary
5477 barriers). */
5478 if (n_sets == 1 && dest == pc_rtx
5479 && (trial == pc_rtx
5480 || (GET_CODE (trial) == LABEL_REF
5481 && ! condjump_p (insn))))
5482 {
5483 SET_SRC (sets[i].rtl) = trial;
5484 cse_jumps_altered = 1;
5485 break;
5486 }
5487
5488 /* Look for a substitution that makes a valid insn. */
5489 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5490 {
5491 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5492
5493 /* If we just made a substitution inside a libcall, then we
5494 need to make the same substitution in any notes attached
5495 to the RETVAL insn. */
5496 if (libcall_insn
5497 && (GET_CODE (sets[i].orig_src) == REG
5498 || GET_CODE (sets[i].orig_src) == SUBREG
5499 || GET_CODE (sets[i].orig_src) == MEM))
5500 simplify_replace_rtx (REG_NOTES (libcall_insn),
5501 sets[i].orig_src, copy_rtx (new));
5502
5503 /* The result of apply_change_group can be ignored; see
5504 canon_reg. */
5505
5506 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5507 apply_change_group ();
5508 break;
5509 }
5510
5511 /* If we previously found constant pool entries for
5512 constants and this is a constant, try making a
5513 pool entry. Put it in src_folded unless we already have done
5514 this since that is where it likely came from. */
5515
5516 else if (constant_pool_entries_cost
5517 && CONSTANT_P (trial)
5518 /* Reject cases that will abort in decode_rtx_const.
5519 On the alpha when simplifying a switch, we get
5520 (const (truncate (minus (label_ref) (label_ref)))). */
5521 && ! (GET_CODE (trial) == CONST
5522 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5523 /* Likewise on IA-64, except without the truncate. */
5524 && ! (GET_CODE (trial) == CONST
5525 && GET_CODE (XEXP (trial, 0)) == MINUS
5526 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5527 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5528 && (src_folded == 0
5529 || (GET_CODE (src_folded) != MEM
5530 && ! src_folded_force_flag))
5531 && GET_MODE_CLASS (mode) != MODE_CC
5532 && mode != VOIDmode)
5533 {
5534 src_folded_force_flag = 1;
5535 src_folded = trial;
5536 src_folded_cost = constant_pool_entries_cost;
5537 src_folded_regcost = constant_pool_entries_regcost;
5538 }
5539 }
5540
5541 src = SET_SRC (sets[i].rtl);
5542
5543 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5544 However, there is an important exception: If both are registers
5545 that are not the head of their equivalence class, replace SET_SRC
5546 with the head of the class. If we do not do this, we will have
5547 both registers live over a portion of the basic block. This way,
5548 their lifetimes will likely abut instead of overlapping. */
5549 if (GET_CODE (dest) == REG
5550 && REGNO_QTY_VALID_P (REGNO (dest)))
5551 {
5552 int dest_q = REG_QTY (REGNO (dest));
5553 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5554
5555 if (dest_ent->mode == GET_MODE (dest)
5556 && dest_ent->first_reg != REGNO (dest)
5557 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5558 /* Don't do this if the original insn had a hard reg as
5559 SET_SRC or SET_DEST. */
5560 && (GET_CODE (sets[i].src) != REG
5561 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5562 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5563 /* We can't call canon_reg here because it won't do anything if
5564 SRC is a hard register. */
5565 {
5566 int src_q = REG_QTY (REGNO (src));
5567 struct qty_table_elem *src_ent = &qty_table[src_q];
5568 int first = src_ent->first_reg;
5569 rtx new_src
5570 = (first >= FIRST_PSEUDO_REGISTER
5571 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5572
5573 /* We must use validate-change even for this, because this
5574 might be a special no-op instruction, suitable only to
5575 tag notes onto. */
5576 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5577 {
5578 src = new_src;
5579 /* If we had a constant that is cheaper than what we are now
5580 setting SRC to, use that constant. We ignored it when we
5581 thought we could make this into a no-op. */
5582 if (src_const && COST (src_const) < COST (src)
5583 && validate_change (insn, &SET_SRC (sets[i].rtl),
5584 src_const, 0))
5585 src = src_const;
5586 }
5587 }
5588 }
5589
5590 /* If we made a change, recompute SRC values. */
5591 if (src != sets[i].src)
5592 {
5593 cse_altered = 1;
5594 do_not_record = 0;
5595 hash_arg_in_memory = 0;
5596 sets[i].src = src;
5597 sets[i].src_hash = HASH (src, mode);
5598 sets[i].src_volatile = do_not_record;
5599 sets[i].src_in_memory = hash_arg_in_memory;
5600 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5601 }
5602
5603 /* If this is a single SET, we are setting a register, and we have an
5604 equivalent constant, we want to add a REG_NOTE. We don't want
5605 to write a REG_EQUAL note for a constant pseudo since verifying that
5606 that pseudo hasn't been eliminated is a pain. Such a note also
5607 won't help anything.
5608
5609 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5610 which can be created for a reference to a compile time computable
5611 entry in a jump table. */
5612
5613 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5614 && GET_CODE (src_const) != REG
5615 && ! (GET_CODE (src_const) == CONST
5616 && GET_CODE (XEXP (src_const, 0)) == MINUS
5617 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5618 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5619 {
5620 /* We only want a REG_EQUAL note if src_const != src. */
5621 if (! rtx_equal_p (src, src_const))
5622 {
5623 /* Make sure that the rtx is not shared. */
5624 src_const = copy_rtx (src_const);
5625
5626 /* Record the actual constant value in a REG_EQUAL note,
5627 making a new one if one does not already exist. */
5628 set_unique_reg_note (insn, REG_EQUAL, src_const);
5629 }
5630 }
5631
5632 /* Now deal with the destination. */
5633 do_not_record = 0;
5634
5635 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5636 to the MEM or REG within it. */
5637 while (GET_CODE (dest) == SIGN_EXTRACT
5638 || GET_CODE (dest) == ZERO_EXTRACT
5639 || GET_CODE (dest) == SUBREG
5640 || GET_CODE (dest) == STRICT_LOW_PART)
5641 dest = XEXP (dest, 0);
5642
5643 sets[i].inner_dest = dest;
5644
5645 if (GET_CODE (dest) == MEM)
5646 {
5647 #ifdef PUSH_ROUNDING
5648 /* Stack pushes invalidate the stack pointer. */
5649 rtx addr = XEXP (dest, 0);
5650 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5651 && XEXP (addr, 0) == stack_pointer_rtx)
5652 invalidate (stack_pointer_rtx, Pmode);
5653 #endif
5654 dest = fold_rtx (dest, insn);
5655 }
5656
5657 /* Compute the hash code of the destination now,
5658 before the effects of this instruction are recorded,
5659 since the register values used in the address computation
5660 are those before this instruction. */
5661 sets[i].dest_hash = HASH (dest, mode);
5662
5663 /* Don't enter a bit-field in the hash table
5664 because the value in it after the store
5665 may not equal what was stored, due to truncation. */
5666
5667 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5668 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5669 {
5670 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5671
5672 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5673 && GET_CODE (width) == CONST_INT
5674 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5675 && ! (INTVAL (src_const)
5676 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5677 /* Exception: if the value is constant,
5678 and it won't be truncated, record it. */
5679 ;
5680 else
5681 {
5682 /* This is chosen so that the destination will be invalidated
5683 but no new value will be recorded.
5684 We must invalidate because sometimes constant
5685 values can be recorded for bitfields. */
5686 sets[i].src_elt = 0;
5687 sets[i].src_volatile = 1;
5688 src_eqv = 0;
5689 src_eqv_elt = 0;
5690 }
5691 }
5692
5693 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5694 the insn. */
5695 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5696 {
5697 /* One less use of the label this insn used to jump to. */
5698 delete_insn (insn);
5699 cse_jumps_altered = 1;
5700 /* No more processing for this set. */
5701 sets[i].rtl = 0;
5702 }
5703
5704 /* If this SET is now setting PC to a label, we know it used to
5705 be a conditional or computed branch. */
5706 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5707 {
5708 /* Now emit a BARRIER after the unconditional jump. */
5709 if (NEXT_INSN (insn) == 0
5710 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5711 emit_barrier_after (insn);
5712
5713 /* We reemit the jump in as many cases as possible just in
5714 case the form of an unconditional jump is significantly
5715 different than a computed jump or conditional jump.
5716
5717 If this insn has multiple sets, then reemitting the
5718 jump is nontrivial. So instead we just force rerecognition
5719 and hope for the best. */
5720 if (n_sets == 1)
5721 {
5722 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5723
5724 JUMP_LABEL (new) = XEXP (src, 0);
5725 LABEL_NUSES (XEXP (src, 0))++;
5726 delete_insn (insn);
5727 insn = new;
5728
5729 /* Now emit a BARRIER after the unconditional jump. */
5730 if (NEXT_INSN (insn) == 0
5731 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5732 emit_barrier_after (insn);
5733 }
5734 else
5735 INSN_CODE (insn) = -1;
5736
5737 never_reached_warning (insn, NULL);
5738
5739 /* Do not bother deleting any unreachable code,
5740 let jump/flow do that. */
5741
5742 cse_jumps_altered = 1;
5743 sets[i].rtl = 0;
5744 }
5745
5746 /* If destination is volatile, invalidate it and then do no further
5747 processing for this assignment. */
5748
5749 else if (do_not_record)
5750 {
5751 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5752 invalidate (dest, VOIDmode);
5753 else if (GET_CODE (dest) == MEM)
5754 {
5755 /* Outgoing arguments for a libcall don't
5756 affect any recorded expressions. */
5757 if (! libcall_insn || insn == libcall_insn)
5758 invalidate (dest, VOIDmode);
5759 }
5760 else if (GET_CODE (dest) == STRICT_LOW_PART
5761 || GET_CODE (dest) == ZERO_EXTRACT)
5762 invalidate (XEXP (dest, 0), GET_MODE (dest));
5763 sets[i].rtl = 0;
5764 }
5765
5766 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5767 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5768
5769 #ifdef HAVE_cc0
5770 /* If setting CC0, record what it was set to, or a constant, if it
5771 is equivalent to a constant. If it is being set to a floating-point
5772 value, make a COMPARE with the appropriate constant of 0. If we
5773 don't do this, later code can interpret this as a test against
5774 const0_rtx, which can cause problems if we try to put it into an
5775 insn as a floating-point operand. */
5776 if (dest == cc0_rtx)
5777 {
5778 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5779 this_insn_cc0_mode = mode;
5780 if (FLOAT_MODE_P (mode))
5781 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5782 CONST0_RTX (mode));
5783 }
5784 #endif
5785 }
5786
5787 /* Now enter all non-volatile source expressions in the hash table
5788 if they are not already present.
5789 Record their equivalence classes in src_elt.
5790 This way we can insert the corresponding destinations into
5791 the same classes even if the actual sources are no longer in them
5792 (having been invalidated). */
5793
5794 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5795 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5796 {
5797 struct table_elt *elt;
5798 struct table_elt *classp = sets[0].src_elt;
5799 rtx dest = SET_DEST (sets[0].rtl);
5800 enum machine_mode eqvmode = GET_MODE (dest);
5801
5802 if (GET_CODE (dest) == STRICT_LOW_PART)
5803 {
5804 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5805 classp = 0;
5806 }
5807 if (insert_regs (src_eqv, classp, 0))
5808 {
5809 rehash_using_reg (src_eqv);
5810 src_eqv_hash = HASH (src_eqv, eqvmode);
5811 }
5812 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5813 elt->in_memory = src_eqv_in_memory;
5814 src_eqv_elt = elt;
5815
5816 /* Check to see if src_eqv_elt is the same as a set source which
5817 does not yet have an elt, and if so set the elt of the set source
5818 to src_eqv_elt. */
5819 for (i = 0; i < n_sets; i++)
5820 if (sets[i].rtl && sets[i].src_elt == 0
5821 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5822 sets[i].src_elt = src_eqv_elt;
5823 }
5824
5825 for (i = 0; i < n_sets; i++)
5826 if (sets[i].rtl && ! sets[i].src_volatile
5827 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5828 {
5829 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5830 {
5831 /* REG_EQUAL in setting a STRICT_LOW_PART
5832 gives an equivalent for the entire destination register,
5833 not just for the subreg being stored in now.
5834 This is a more interesting equivalence, so we arrange later
5835 to treat the entire reg as the destination. */
5836 sets[i].src_elt = src_eqv_elt;
5837 sets[i].src_hash = src_eqv_hash;
5838 }
5839 else
5840 {
5841 /* Insert source and constant equivalent into hash table, if not
5842 already present. */
5843 struct table_elt *classp = src_eqv_elt;
5844 rtx src = sets[i].src;
5845 rtx dest = SET_DEST (sets[i].rtl);
5846 enum machine_mode mode
5847 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5848
5849 /* It's possible that we have a source value known to be
5850 constant but don't have a REG_EQUAL note on the insn.
5851 Lack of a note will mean src_eqv_elt will be NULL. This
5852 can happen where we've generated a SUBREG to access a
5853 CONST_INT that is already in a register in a wider mode.
5854 Ensure that the source expression is put in the proper
5855 constant class. */
5856 if (!classp)
5857 classp = sets[i].src_const_elt;
5858
5859 if (sets[i].src_elt == 0)
5860 {
5861 /* Don't put a hard register source into the table if this is
5862 the last insn of a libcall. In this case, we only need
5863 to put src_eqv_elt in src_elt. */
5864 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5865 {
5866 struct table_elt *elt;
5867
5868 /* Note that these insert_regs calls cannot remove
5869 any of the src_elt's, because they would have failed to
5870 match if not still valid. */
5871 if (insert_regs (src, classp, 0))
5872 {
5873 rehash_using_reg (src);
5874 sets[i].src_hash = HASH (src, mode);
5875 }
5876 elt = insert (src, classp, sets[i].src_hash, mode);
5877 elt->in_memory = sets[i].src_in_memory;
5878 sets[i].src_elt = classp = elt;
5879 }
5880 else
5881 sets[i].src_elt = classp;
5882 }
5883 if (sets[i].src_const && sets[i].src_const_elt == 0
5884 && src != sets[i].src_const
5885 && ! rtx_equal_p (sets[i].src_const, src))
5886 sets[i].src_elt = insert (sets[i].src_const, classp,
5887 sets[i].src_const_hash, mode);
5888 }
5889 }
5890 else if (sets[i].src_elt == 0)
5891 /* If we did not insert the source into the hash table (e.g., it was
5892 volatile), note the equivalence class for the REG_EQUAL value, if any,
5893 so that the destination goes into that class. */
5894 sets[i].src_elt = src_eqv_elt;
5895
5896 invalidate_from_clobbers (x);
5897
5898 /* Some registers are invalidated by subroutine calls. Memory is
5899 invalidated by non-constant calls. */
5900
5901 if (GET_CODE (insn) == CALL_INSN)
5902 {
5903 if (! CONST_OR_PURE_CALL_P (insn))
5904 invalidate_memory ();
5905 invalidate_for_call ();
5906 }
5907
5908 /* Now invalidate everything set by this instruction.
5909 If a SUBREG or other funny destination is being set,
5910 sets[i].rtl is still nonzero, so here we invalidate the reg
5911 a part of which is being set. */
5912
5913 for (i = 0; i < n_sets; i++)
5914 if (sets[i].rtl)
5915 {
5916 /* We can't use the inner dest, because the mode associated with
5917 a ZERO_EXTRACT is significant. */
5918 rtx dest = SET_DEST (sets[i].rtl);
5919
5920 /* Needed for registers to remove the register from its
5921 previous quantity's chain.
5922 Needed for memory if this is a nonvarying address, unless
5923 we have just done an invalidate_memory that covers even those. */
5924 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5925 invalidate (dest, VOIDmode);
5926 else if (GET_CODE (dest) == MEM)
5927 {
5928 /* Outgoing arguments for a libcall don't
5929 affect any recorded expressions. */
5930 if (! libcall_insn || insn == libcall_insn)
5931 invalidate (dest, VOIDmode);
5932 }
5933 else if (GET_CODE (dest) == STRICT_LOW_PART
5934 || GET_CODE (dest) == ZERO_EXTRACT)
5935 invalidate (XEXP (dest, 0), GET_MODE (dest));
5936 }
5937
5938 /* A volatile ASM invalidates everything. */
5939 if (GET_CODE (insn) == INSN
5940 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5941 && MEM_VOLATILE_P (PATTERN (insn)))
5942 flush_hash_table ();
5943
5944 /* Make sure registers mentioned in destinations
5945 are safe for use in an expression to be inserted.
5946 This removes from the hash table
5947 any invalid entry that refers to one of these registers.
5948
5949 We don't care about the return value from mention_regs because
5950 we are going to hash the SET_DEST values unconditionally. */
5951
5952 for (i = 0; i < n_sets; i++)
5953 {
5954 if (sets[i].rtl)
5955 {
5956 rtx x = SET_DEST (sets[i].rtl);
5957
5958 if (GET_CODE (x) != REG)
5959 mention_regs (x);
5960 else
5961 {
5962 /* We used to rely on all references to a register becoming
5963 inaccessible when a register changes to a new quantity,
5964 since that changes the hash code. However, that is not
5965 safe, since after HASH_SIZE new quantities we get a
5966 hash 'collision' of a register with its own invalid
5967 entries. And since SUBREGs have been changed not to
5968 change their hash code with the hash code of the register,
5969 it wouldn't work any longer at all. So we have to check
5970 for any invalid references lying around now.
5971 This code is similar to the REG case in mention_regs,
5972 but it knows that reg_tick has been incremented, and
5973 it leaves reg_in_table as -1 . */
5974 unsigned int regno = REGNO (x);
5975 unsigned int endregno
5976 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5977 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5978 unsigned int i;
5979
5980 for (i = regno; i < endregno; i++)
5981 {
5982 if (REG_IN_TABLE (i) >= 0)
5983 {
5984 remove_invalid_refs (i);
5985 REG_IN_TABLE (i) = -1;
5986 }
5987 }
5988 }
5989 }
5990 }
5991
5992 /* We may have just removed some of the src_elt's from the hash table.
5993 So replace each one with the current head of the same class. */
5994
5995 for (i = 0; i < n_sets; i++)
5996 if (sets[i].rtl)
5997 {
5998 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5999 /* If elt was removed, find current head of same class,
6000 or 0 if nothing remains of that class. */
6001 {
6002 struct table_elt *elt = sets[i].src_elt;
6003
6004 while (elt && elt->prev_same_value)
6005 elt = elt->prev_same_value;
6006
6007 while (elt && elt->first_same_value == 0)
6008 elt = elt->next_same_value;
6009 sets[i].src_elt = elt ? elt->first_same_value : 0;
6010 }
6011 }
6012
6013 /* Now insert the destinations into their equivalence classes. */
6014
6015 for (i = 0; i < n_sets; i++)
6016 if (sets[i].rtl)
6017 {
6018 rtx dest = SET_DEST (sets[i].rtl);
6019 rtx inner_dest = sets[i].inner_dest;
6020 struct table_elt *elt;
6021
6022 /* Don't record value if we are not supposed to risk allocating
6023 floating-point values in registers that might be wider than
6024 memory. */
6025 if ((flag_float_store
6026 && GET_CODE (dest) == MEM
6027 && FLOAT_MODE_P (GET_MODE (dest)))
6028 /* Don't record BLKmode values, because we don't know the
6029 size of it, and can't be sure that other BLKmode values
6030 have the same or smaller size. */
6031 || GET_MODE (dest) == BLKmode
6032 /* Don't record values of destinations set inside a libcall block
6033 since we might delete the libcall. Things should have been set
6034 up so we won't want to reuse such a value, but we play it safe
6035 here. */
6036 || libcall_insn
6037 /* If we didn't put a REG_EQUAL value or a source into the hash
6038 table, there is no point is recording DEST. */
6039 || sets[i].src_elt == 0
6040 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6041 or SIGN_EXTEND, don't record DEST since it can cause
6042 some tracking to be wrong.
6043
6044 ??? Think about this more later. */
6045 || (GET_CODE (dest) == SUBREG
6046 && (GET_MODE_SIZE (GET_MODE (dest))
6047 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6048 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6049 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6050 continue;
6051
6052 /* STRICT_LOW_PART isn't part of the value BEING set,
6053 and neither is the SUBREG inside it.
6054 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6055 if (GET_CODE (dest) == STRICT_LOW_PART)
6056 dest = SUBREG_REG (XEXP (dest, 0));
6057
6058 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6059 /* Registers must also be inserted into chains for quantities. */
6060 if (insert_regs (dest, sets[i].src_elt, 1))
6061 {
6062 /* If `insert_regs' changes something, the hash code must be
6063 recalculated. */
6064 rehash_using_reg (dest);
6065 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6066 }
6067
6068 if (GET_CODE (inner_dest) == MEM
6069 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6070 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6071 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6072 Consider the case in which the address of the MEM is
6073 passed to a function, which alters the MEM. Then, if we
6074 later use Y instead of the MEM we'll miss the update. */
6075 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6076 else
6077 elt = insert (dest, sets[i].src_elt,
6078 sets[i].dest_hash, GET_MODE (dest));
6079
6080 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6081 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6082 || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6083 0))));
6084
6085 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6086 narrower than M2, and both M1 and M2 are the same number of words,
6087 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6088 make that equivalence as well.
6089
6090 However, BAR may have equivalences for which gen_lowpart_if_possible
6091 will produce a simpler value than gen_lowpart_if_possible applied to
6092 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6093 BAR's equivalences. If we don't get a simplified form, make
6094 the SUBREG. It will not be used in an equivalence, but will
6095 cause two similar assignments to be detected.
6096
6097 Note the loop below will find SUBREG_REG (DEST) since we have
6098 already entered SRC and DEST of the SET in the table. */
6099
6100 if (GET_CODE (dest) == SUBREG
6101 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6102 / UNITS_PER_WORD)
6103 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6104 && (GET_MODE_SIZE (GET_MODE (dest))
6105 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6106 && sets[i].src_elt != 0)
6107 {
6108 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6109 struct table_elt *elt, *classp = 0;
6110
6111 for (elt = sets[i].src_elt->first_same_value; elt;
6112 elt = elt->next_same_value)
6113 {
6114 rtx new_src = 0;
6115 unsigned src_hash;
6116 struct table_elt *src_elt;
6117 int byte = 0;
6118
6119 /* Ignore invalid entries. */
6120 if (GET_CODE (elt->exp) != REG
6121 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6122 continue;
6123
6124 /* We may have already been playing subreg games. If the
6125 mode is already correct for the destination, use it. */
6126 if (GET_MODE (elt->exp) == new_mode)
6127 new_src = elt->exp;
6128 else
6129 {
6130 /* Calculate big endian correction for the SUBREG_BYTE.
6131 We have already checked that M1 (GET_MODE (dest))
6132 is not narrower than M2 (new_mode). */
6133 if (BYTES_BIG_ENDIAN)
6134 byte = (GET_MODE_SIZE (GET_MODE (dest))
6135 - GET_MODE_SIZE (new_mode));
6136
6137 new_src = simplify_gen_subreg (new_mode, elt->exp,
6138 GET_MODE (dest), byte);
6139 }
6140
6141 /* The call to simplify_gen_subreg fails if the value
6142 is VOIDmode, yet we can't do any simplification, e.g.
6143 for EXPR_LISTs denoting function call results.
6144 It is invalid to construct a SUBREG with a VOIDmode
6145 SUBREG_REG, hence a zero new_src means we can't do
6146 this substitution. */
6147 if (! new_src)
6148 continue;
6149
6150 src_hash = HASH (new_src, new_mode);
6151 src_elt = lookup (new_src, src_hash, new_mode);
6152
6153 /* Put the new source in the hash table is if isn't
6154 already. */
6155 if (src_elt == 0)
6156 {
6157 if (insert_regs (new_src, classp, 0))
6158 {
6159 rehash_using_reg (new_src);
6160 src_hash = HASH (new_src, new_mode);
6161 }
6162 src_elt = insert (new_src, classp, src_hash, new_mode);
6163 src_elt->in_memory = elt->in_memory;
6164 }
6165 else if (classp && classp != src_elt->first_same_value)
6166 /* Show that two things that we've seen before are
6167 actually the same. */
6168 merge_equiv_classes (src_elt, classp);
6169
6170 classp = src_elt->first_same_value;
6171 /* Ignore invalid entries. */
6172 while (classp
6173 && GET_CODE (classp->exp) != REG
6174 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6175 classp = classp->next_same_value;
6176 }
6177 }
6178 }
6179
6180 /* Special handling for (set REG0 REG1) where REG0 is the
6181 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6182 be used in the sequel, so (if easily done) change this insn to
6183 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6184 that computed their value. Then REG1 will become a dead store
6185 and won't cloud the situation for later optimizations.
6186
6187 Do not make this change if REG1 is a hard register, because it will
6188 then be used in the sequel and we may be changing a two-operand insn
6189 into a three-operand insn.
6190
6191 Also do not do this if we are operating on a copy of INSN.
6192
6193 Also don't do this if INSN ends a libcall; this would cause an unrelated
6194 register to be set in the middle of a libcall, and we then get bad code
6195 if the libcall is deleted. */
6196
6197 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6198 && NEXT_INSN (PREV_INSN (insn)) == insn
6199 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6200 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6201 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6202 {
6203 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6204 struct qty_table_elem *src_ent = &qty_table[src_q];
6205
6206 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6207 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6208 {
6209 rtx prev = insn;
6210 /* Scan for the previous nonnote insn, but stop at a basic
6211 block boundary. */
6212 do
6213 {
6214 prev = PREV_INSN (prev);
6215 }
6216 while (prev && GET_CODE (prev) == NOTE
6217 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6218
6219 /* Do not swap the registers around if the previous instruction
6220 attaches a REG_EQUIV note to REG1.
6221
6222 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6223 from the pseudo that originally shadowed an incoming argument
6224 to another register. Some uses of REG_EQUIV might rely on it
6225 being attached to REG1 rather than REG2.
6226
6227 This section previously turned the REG_EQUIV into a REG_EQUAL
6228 note. We cannot do that because REG_EQUIV may provide an
6229 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6230
6231 if (prev != 0 && GET_CODE (prev) == INSN
6232 && GET_CODE (PATTERN (prev)) == SET
6233 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6234 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6235 {
6236 rtx dest = SET_DEST (sets[0].rtl);
6237 rtx src = SET_SRC (sets[0].rtl);
6238 rtx note;
6239
6240 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6241 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6242 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6243 apply_change_group ();
6244
6245 /* If INSN has a REG_EQUAL note, and this note mentions
6246 REG0, then we must delete it, because the value in
6247 REG0 has changed. If the note's value is REG1, we must
6248 also delete it because that is now this insn's dest. */
6249 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6250 if (note != 0
6251 && (reg_mentioned_p (dest, XEXP (note, 0))
6252 || rtx_equal_p (src, XEXP (note, 0))))
6253 remove_note (insn, note);
6254 }
6255 }
6256 }
6257
6258 /* If this is a conditional jump insn, record any known equivalences due to
6259 the condition being tested. */
6260
6261 last_jump_equiv_class = 0;
6262 if (GET_CODE (insn) == JUMP_INSN
6263 && n_sets == 1 && GET_CODE (x) == SET
6264 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6265 record_jump_equiv (insn, 0);
6266
6267 #ifdef HAVE_cc0
6268 /* If the previous insn set CC0 and this insn no longer references CC0,
6269 delete the previous insn. Here we use the fact that nothing expects CC0
6270 to be valid over an insn, which is true until the final pass. */
6271 if (prev_insn && GET_CODE (prev_insn) == INSN
6272 && (tem = single_set (prev_insn)) != 0
6273 && SET_DEST (tem) == cc0_rtx
6274 && ! reg_mentioned_p (cc0_rtx, x))
6275 delete_insn (prev_insn);
6276
6277 prev_insn_cc0 = this_insn_cc0;
6278 prev_insn_cc0_mode = this_insn_cc0_mode;
6279 prev_insn = insn;
6280 #endif
6281 }
6282 \f
6283 /* Remove from the hash table all expressions that reference memory. */
6284
6285 static void
6286 invalidate_memory (void)
6287 {
6288 int i;
6289 struct table_elt *p, *next;
6290
6291 for (i = 0; i < HASH_SIZE; i++)
6292 for (p = table[i]; p; p = next)
6293 {
6294 next = p->next_same_hash;
6295 if (p->in_memory)
6296 remove_from_table (p, i);
6297 }
6298 }
6299
6300 /* If ADDR is an address that implicitly affects the stack pointer, return
6301 1 and update the register tables to show the effect. Else, return 0. */
6302
6303 static int
6304 addr_affects_sp_p (rtx addr)
6305 {
6306 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6307 && GET_CODE (XEXP (addr, 0)) == REG
6308 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6309 {
6310 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6311 {
6312 REG_TICK (STACK_POINTER_REGNUM)++;
6313 /* Is it possible to use a subreg of SP? */
6314 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6315 }
6316
6317 /* This should be *very* rare. */
6318 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6319 invalidate (stack_pointer_rtx, VOIDmode);
6320
6321 return 1;
6322 }
6323
6324 return 0;
6325 }
6326
6327 /* Perform invalidation on the basis of everything about an insn
6328 except for invalidating the actual places that are SET in it.
6329 This includes the places CLOBBERed, and anything that might
6330 alias with something that is SET or CLOBBERed.
6331
6332 X is the pattern of the insn. */
6333
6334 static void
6335 invalidate_from_clobbers (rtx x)
6336 {
6337 if (GET_CODE (x) == CLOBBER)
6338 {
6339 rtx ref = XEXP (x, 0);
6340 if (ref)
6341 {
6342 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6343 || GET_CODE (ref) == MEM)
6344 invalidate (ref, VOIDmode);
6345 else if (GET_CODE (ref) == STRICT_LOW_PART
6346 || GET_CODE (ref) == ZERO_EXTRACT)
6347 invalidate (XEXP (ref, 0), GET_MODE (ref));
6348 }
6349 }
6350 else if (GET_CODE (x) == PARALLEL)
6351 {
6352 int i;
6353 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6354 {
6355 rtx y = XVECEXP (x, 0, i);
6356 if (GET_CODE (y) == CLOBBER)
6357 {
6358 rtx ref = XEXP (y, 0);
6359 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6360 || GET_CODE (ref) == MEM)
6361 invalidate (ref, VOIDmode);
6362 else if (GET_CODE (ref) == STRICT_LOW_PART
6363 || GET_CODE (ref) == ZERO_EXTRACT)
6364 invalidate (XEXP (ref, 0), GET_MODE (ref));
6365 }
6366 }
6367 }
6368 }
6369 \f
6370 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6371 and replace any registers in them with either an equivalent constant
6372 or the canonical form of the register. If we are inside an address,
6373 only do this if the address remains valid.
6374
6375 OBJECT is 0 except when within a MEM in which case it is the MEM.
6376
6377 Return the replacement for X. */
6378
6379 static rtx
6380 cse_process_notes (rtx x, rtx object)
6381 {
6382 enum rtx_code code = GET_CODE (x);
6383 const char *fmt = GET_RTX_FORMAT (code);
6384 int i;
6385
6386 switch (code)
6387 {
6388 case CONST_INT:
6389 case CONST:
6390 case SYMBOL_REF:
6391 case LABEL_REF:
6392 case CONST_DOUBLE:
6393 case CONST_VECTOR:
6394 case PC:
6395 case CC0:
6396 case LO_SUM:
6397 return x;
6398
6399 case MEM:
6400 validate_change (x, &XEXP (x, 0),
6401 cse_process_notes (XEXP (x, 0), x), 0);
6402 return x;
6403
6404 case EXPR_LIST:
6405 case INSN_LIST:
6406 if (REG_NOTE_KIND (x) == REG_EQUAL)
6407 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6408 if (XEXP (x, 1))
6409 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6410 return x;
6411
6412 case SIGN_EXTEND:
6413 case ZERO_EXTEND:
6414 case SUBREG:
6415 {
6416 rtx new = cse_process_notes (XEXP (x, 0), object);
6417 /* We don't substitute VOIDmode constants into these rtx,
6418 since they would impede folding. */
6419 if (GET_MODE (new) != VOIDmode)
6420 validate_change (object, &XEXP (x, 0), new, 0);
6421 return x;
6422 }
6423
6424 case REG:
6425 i = REG_QTY (REGNO (x));
6426
6427 /* Return a constant or a constant register. */
6428 if (REGNO_QTY_VALID_P (REGNO (x)))
6429 {
6430 struct qty_table_elem *ent = &qty_table[i];
6431
6432 if (ent->const_rtx != NULL_RTX
6433 && (CONSTANT_P (ent->const_rtx)
6434 || GET_CODE (ent->const_rtx) == REG))
6435 {
6436 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6437 if (new)
6438 return new;
6439 }
6440 }
6441
6442 /* Otherwise, canonicalize this register. */
6443 return canon_reg (x, NULL_RTX);
6444
6445 default:
6446 break;
6447 }
6448
6449 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6450 if (fmt[i] == 'e')
6451 validate_change (object, &XEXP (x, i),
6452 cse_process_notes (XEXP (x, i), object), 0);
6453
6454 return x;
6455 }
6456 \f
6457 /* Find common subexpressions between the end test of a loop and the beginning
6458 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6459
6460 Often we have a loop where an expression in the exit test is used
6461 in the body of the loop. For example "while (*p) *q++ = *p++;".
6462 Because of the way we duplicate the loop exit test in front of the loop,
6463 however, we don't detect that common subexpression. This will be caught
6464 when global cse is implemented, but this is a quite common case.
6465
6466 This function handles the most common cases of these common expressions.
6467 It is called after we have processed the basic block ending with the
6468 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6469 jumps to a label used only once. */
6470
6471 static void
6472 cse_around_loop (rtx loop_start)
6473 {
6474 rtx insn;
6475 int i;
6476 struct table_elt *p;
6477
6478 /* If the jump at the end of the loop doesn't go to the start, we don't
6479 do anything. */
6480 for (insn = PREV_INSN (loop_start);
6481 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6482 insn = PREV_INSN (insn))
6483 ;
6484
6485 if (insn == 0
6486 || GET_CODE (insn) != NOTE
6487 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6488 return;
6489
6490 /* If the last insn of the loop (the end test) was an NE comparison,
6491 we will interpret it as an EQ comparison, since we fell through
6492 the loop. Any equivalences resulting from that comparison are
6493 therefore not valid and must be invalidated. */
6494 if (last_jump_equiv_class)
6495 for (p = last_jump_equiv_class->first_same_value; p;
6496 p = p->next_same_value)
6497 {
6498 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6499 || (GET_CODE (p->exp) == SUBREG
6500 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6501 invalidate (p->exp, VOIDmode);
6502 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6503 || GET_CODE (p->exp) == ZERO_EXTRACT)
6504 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6505 }
6506
6507 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6508 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6509
6510 The only thing we do with SET_DEST is invalidate entries, so we
6511 can safely process each SET in order. It is slightly less efficient
6512 to do so, but we only want to handle the most common cases.
6513
6514 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6515 These pseudos won't have valid entries in any of the tables indexed
6516 by register number, such as reg_qty. We avoid out-of-range array
6517 accesses by not processing any instructions created after cse started. */
6518
6519 for (insn = NEXT_INSN (loop_start);
6520 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6521 && INSN_UID (insn) < max_insn_uid
6522 && ! (GET_CODE (insn) == NOTE
6523 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6524 insn = NEXT_INSN (insn))
6525 {
6526 if (INSN_P (insn)
6527 && (GET_CODE (PATTERN (insn)) == SET
6528 || GET_CODE (PATTERN (insn)) == CLOBBER))
6529 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6530 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6531 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6532 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6533 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6534 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6535 loop_start);
6536 }
6537 }
6538 \f
6539 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6540 since they are done elsewhere. This function is called via note_stores. */
6541
6542 static void
6543 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6544 {
6545 enum rtx_code code = GET_CODE (dest);
6546
6547 if (code == MEM
6548 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6549 /* There are times when an address can appear varying and be a PLUS
6550 during this scan when it would be a fixed address were we to know
6551 the proper equivalences. So invalidate all memory if there is
6552 a BLKmode or nonscalar memory reference or a reference to a
6553 variable address. */
6554 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6555 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6556 {
6557 invalidate_memory ();
6558 return;
6559 }
6560
6561 if (GET_CODE (set) == CLOBBER
6562 || CC0_P (dest)
6563 || dest == pc_rtx)
6564 return;
6565
6566 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6567 invalidate (XEXP (dest, 0), GET_MODE (dest));
6568 else if (code == REG || code == SUBREG || code == MEM)
6569 invalidate (dest, VOIDmode);
6570 }
6571
6572 /* Invalidate all insns from START up to the end of the function or the
6573 next label. This called when we wish to CSE around a block that is
6574 conditionally executed. */
6575
6576 static void
6577 invalidate_skipped_block (rtx start)
6578 {
6579 rtx insn;
6580
6581 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6582 insn = NEXT_INSN (insn))
6583 {
6584 if (! INSN_P (insn))
6585 continue;
6586
6587 if (GET_CODE (insn) == CALL_INSN)
6588 {
6589 if (! CONST_OR_PURE_CALL_P (insn))
6590 invalidate_memory ();
6591 invalidate_for_call ();
6592 }
6593
6594 invalidate_from_clobbers (PATTERN (insn));
6595 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6596 }
6597 }
6598 \f
6599 /* If modifying X will modify the value in *DATA (which is really an
6600 `rtx *'), indicate that fact by setting the pointed to value to
6601 NULL_RTX. */
6602
6603 static void
6604 cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6605 {
6606 rtx *cse_check_loop_start_value = (rtx *) data;
6607
6608 if (*cse_check_loop_start_value == NULL_RTX
6609 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6610 return;
6611
6612 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6613 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6614 *cse_check_loop_start_value = NULL_RTX;
6615 }
6616
6617 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6618 a loop that starts with the label at LOOP_START.
6619
6620 If X is a SET, we see if its SET_SRC is currently in our hash table.
6621 If so, we see if it has a value equal to some register used only in the
6622 loop exit code (as marked by jump.c).
6623
6624 If those two conditions are true, we search backwards from the start of
6625 the loop to see if that same value was loaded into a register that still
6626 retains its value at the start of the loop.
6627
6628 If so, we insert an insn after the load to copy the destination of that
6629 load into the equivalent register and (try to) replace our SET_SRC with that
6630 register.
6631
6632 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6633
6634 static void
6635 cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6636 {
6637 struct table_elt *src_elt;
6638
6639 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6640 are setting PC or CC0 or whose SET_SRC is already a register. */
6641 if (GET_CODE (x) == SET
6642 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6643 && GET_CODE (SET_SRC (x)) != REG)
6644 {
6645 src_elt = lookup (SET_SRC (x),
6646 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6647 GET_MODE (SET_DEST (x)));
6648
6649 if (src_elt)
6650 for (src_elt = src_elt->first_same_value; src_elt;
6651 src_elt = src_elt->next_same_value)
6652 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6653 && COST (src_elt->exp) < COST (SET_SRC (x)))
6654 {
6655 rtx p, set;
6656
6657 /* Look for an insn in front of LOOP_START that sets
6658 something in the desired mode to SET_SRC (x) before we hit
6659 a label or CALL_INSN. */
6660
6661 for (p = prev_nonnote_insn (loop_start);
6662 p && GET_CODE (p) != CALL_INSN
6663 && GET_CODE (p) != CODE_LABEL;
6664 p = prev_nonnote_insn (p))
6665 if ((set = single_set (p)) != 0
6666 && GET_CODE (SET_DEST (set)) == REG
6667 && GET_MODE (SET_DEST (set)) == src_elt->mode
6668 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6669 {
6670 /* We now have to ensure that nothing between P
6671 and LOOP_START modified anything referenced in
6672 SET_SRC (x). We know that nothing within the loop
6673 can modify it, or we would have invalidated it in
6674 the hash table. */
6675 rtx q;
6676 rtx cse_check_loop_start_value = SET_SRC (x);
6677 for (q = p; q != loop_start; q = NEXT_INSN (q))
6678 if (INSN_P (q))
6679 note_stores (PATTERN (q),
6680 cse_check_loop_start,
6681 &cse_check_loop_start_value);
6682
6683 /* If nothing was changed and we can replace our
6684 SET_SRC, add an insn after P to copy its destination
6685 to what we will be replacing SET_SRC with. */
6686 if (cse_check_loop_start_value
6687 && single_set (p)
6688 && !can_throw_internal (insn)
6689 && validate_change (insn, &SET_SRC (x),
6690 src_elt->exp, 0))
6691 {
6692 /* If this creates new pseudos, this is unsafe,
6693 because the regno of new pseudo is unsuitable
6694 to index into reg_qty when cse_insn processes
6695 the new insn. Therefore, if a new pseudo was
6696 created, discard this optimization. */
6697 int nregs = max_reg_num ();
6698 rtx move
6699 = gen_move_insn (src_elt->exp, SET_DEST (set));
6700 if (nregs != max_reg_num ())
6701 {
6702 if (! validate_change (insn, &SET_SRC (x),
6703 SET_SRC (set), 0))
6704 abort ();
6705 }
6706 else
6707 {
6708 if (CONSTANT_P (SET_SRC (set))
6709 && ! find_reg_equal_equiv_note (insn))
6710 set_unique_reg_note (insn, REG_EQUAL,
6711 SET_SRC (set));
6712 if (control_flow_insn_p (p))
6713 /* p can cause a control flow transfer so it
6714 is the last insn of a basic block. We can't
6715 therefore use emit_insn_after. */
6716 emit_insn_before (move, next_nonnote_insn (p));
6717 else
6718 emit_insn_after (move, p);
6719 }
6720 }
6721 break;
6722 }
6723 }
6724 }
6725
6726 /* Deal with the destination of X affecting the stack pointer. */
6727 addr_affects_sp_p (SET_DEST (x));
6728
6729 /* See comment on similar code in cse_insn for explanation of these
6730 tests. */
6731 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6732 || GET_CODE (SET_DEST (x)) == MEM)
6733 invalidate (SET_DEST (x), VOIDmode);
6734 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6735 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6736 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6737 }
6738 \f
6739 /* Find the end of INSN's basic block and return its range,
6740 the total number of SETs in all the insns of the block, the last insn of the
6741 block, and the branch path.
6742
6743 The branch path indicates which branches should be followed. If a nonzero
6744 path size is specified, the block should be rescanned and a different set
6745 of branches will be taken. The branch path is only used if
6746 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6747
6748 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6749 used to describe the block. It is filled in with the information about
6750 the current block. The incoming structure's branch path, if any, is used
6751 to construct the output branch path. */
6752
6753 void
6754 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6755 int follow_jumps, int after_loop, int skip_blocks)
6756 {
6757 rtx p = insn, q;
6758 int nsets = 0;
6759 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6760 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6761 int path_size = data->path_size;
6762 int path_entry = 0;
6763 int i;
6764
6765 /* Update the previous branch path, if any. If the last branch was
6766 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6767 shorten the path by one and look at the previous branch. We know that
6768 at least one branch must have been taken if PATH_SIZE is nonzero. */
6769 while (path_size > 0)
6770 {
6771 if (data->path[path_size - 1].status != NOT_TAKEN)
6772 {
6773 data->path[path_size - 1].status = NOT_TAKEN;
6774 break;
6775 }
6776 else
6777 path_size--;
6778 }
6779
6780 /* If the first instruction is marked with QImode, that means we've
6781 already processed this block. Our caller will look at DATA->LAST
6782 to figure out where to go next. We want to return the next block
6783 in the instruction stream, not some branched-to block somewhere
6784 else. We accomplish this by pretending our called forbid us to
6785 follow jumps, or skip blocks. */
6786 if (GET_MODE (insn) == QImode)
6787 follow_jumps = skip_blocks = 0;
6788
6789 /* Scan to end of this basic block. */
6790 while (p && GET_CODE (p) != CODE_LABEL)
6791 {
6792 /* Don't cse out the end of a loop. This makes a difference
6793 only for the unusual loops that always execute at least once;
6794 all other loops have labels there so we will stop in any case.
6795 Cse'ing out the end of the loop is dangerous because it
6796 might cause an invariant expression inside the loop
6797 to be reused after the end of the loop. This would make it
6798 hard to move the expression out of the loop in loop.c,
6799 especially if it is one of several equivalent expressions
6800 and loop.c would like to eliminate it.
6801
6802 If we are running after loop.c has finished, we can ignore
6803 the NOTE_INSN_LOOP_END. */
6804
6805 if (! after_loop && GET_CODE (p) == NOTE
6806 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6807 break;
6808
6809 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6810 the regs restored by the longjmp come from
6811 a later time than the setjmp. */
6812 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6813 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6814 break;
6815
6816 /* A PARALLEL can have lots of SETs in it,
6817 especially if it is really an ASM_OPERANDS. */
6818 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6819 nsets += XVECLEN (PATTERN (p), 0);
6820 else if (GET_CODE (p) != NOTE)
6821 nsets += 1;
6822
6823 /* Ignore insns made by CSE; they cannot affect the boundaries of
6824 the basic block. */
6825
6826 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6827 high_cuid = INSN_CUID (p);
6828 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6829 low_cuid = INSN_CUID (p);
6830
6831 /* See if this insn is in our branch path. If it is and we are to
6832 take it, do so. */
6833 if (path_entry < path_size && data->path[path_entry].branch == p)
6834 {
6835 if (data->path[path_entry].status != NOT_TAKEN)
6836 p = JUMP_LABEL (p);
6837
6838 /* Point to next entry in path, if any. */
6839 path_entry++;
6840 }
6841
6842 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6843 was specified, we haven't reached our maximum path length, there are
6844 insns following the target of the jump, this is the only use of the
6845 jump label, and the target label is preceded by a BARRIER.
6846
6847 Alternatively, we can follow the jump if it branches around a
6848 block of code and there are no other branches into the block.
6849 In this case invalidate_skipped_block will be called to invalidate any
6850 registers set in the block when following the jump. */
6851
6852 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6853 && GET_CODE (p) == JUMP_INSN
6854 && GET_CODE (PATTERN (p)) == SET
6855 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6856 && JUMP_LABEL (p) != 0
6857 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6858 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6859 {
6860 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6861 if ((GET_CODE (q) != NOTE
6862 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6863 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6864 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6865 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6866 break;
6867
6868 /* If we ran into a BARRIER, this code is an extension of the
6869 basic block when the branch is taken. */
6870 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6871 {
6872 /* Don't allow ourself to keep walking around an
6873 always-executed loop. */
6874 if (next_real_insn (q) == next)
6875 {
6876 p = NEXT_INSN (p);
6877 continue;
6878 }
6879
6880 /* Similarly, don't put a branch in our path more than once. */
6881 for (i = 0; i < path_entry; i++)
6882 if (data->path[i].branch == p)
6883 break;
6884
6885 if (i != path_entry)
6886 break;
6887
6888 data->path[path_entry].branch = p;
6889 data->path[path_entry++].status = TAKEN;
6890
6891 /* This branch now ends our path. It was possible that we
6892 didn't see this branch the last time around (when the
6893 insn in front of the target was a JUMP_INSN that was
6894 turned into a no-op). */
6895 path_size = path_entry;
6896
6897 p = JUMP_LABEL (p);
6898 /* Mark block so we won't scan it again later. */
6899 PUT_MODE (NEXT_INSN (p), QImode);
6900 }
6901 /* Detect a branch around a block of code. */
6902 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6903 {
6904 rtx tmp;
6905
6906 if (next_real_insn (q) == next)
6907 {
6908 p = NEXT_INSN (p);
6909 continue;
6910 }
6911
6912 for (i = 0; i < path_entry; i++)
6913 if (data->path[i].branch == p)
6914 break;
6915
6916 if (i != path_entry)
6917 break;
6918
6919 /* This is no_labels_between_p (p, q) with an added check for
6920 reaching the end of a function (in case Q precedes P). */
6921 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6922 if (GET_CODE (tmp) == CODE_LABEL)
6923 break;
6924
6925 if (tmp == q)
6926 {
6927 data->path[path_entry].branch = p;
6928 data->path[path_entry++].status = AROUND;
6929
6930 path_size = path_entry;
6931
6932 p = JUMP_LABEL (p);
6933 /* Mark block so we won't scan it again later. */
6934 PUT_MODE (NEXT_INSN (p), QImode);
6935 }
6936 }
6937 }
6938 p = NEXT_INSN (p);
6939 }
6940
6941 data->low_cuid = low_cuid;
6942 data->high_cuid = high_cuid;
6943 data->nsets = nsets;
6944 data->last = p;
6945
6946 /* If all jumps in the path are not taken, set our path length to zero
6947 so a rescan won't be done. */
6948 for (i = path_size - 1; i >= 0; i--)
6949 if (data->path[i].status != NOT_TAKEN)
6950 break;
6951
6952 if (i == -1)
6953 data->path_size = 0;
6954 else
6955 data->path_size = path_size;
6956
6957 /* End the current branch path. */
6958 data->path[path_size].branch = 0;
6959 }
6960 \f
6961 /* Perform cse on the instructions of a function.
6962 F is the first instruction.
6963 NREGS is one plus the highest pseudo-reg number used in the instruction.
6964
6965 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6966 (only if -frerun-cse-after-loop).
6967
6968 Returns 1 if jump_optimize should be redone due to simplifications
6969 in conditional jump instructions. */
6970
6971 int
6972 cse_main (rtx f, int nregs, int after_loop, FILE *file)
6973 {
6974 struct cse_basic_block_data val;
6975 rtx insn = f;
6976 int i;
6977
6978 val.path = xmalloc (sizeof (struct branch_path)
6979 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6980
6981 cse_jumps_altered = 0;
6982 recorded_label_ref = 0;
6983 constant_pool_entries_cost = 0;
6984 constant_pool_entries_regcost = 0;
6985 val.path_size = 0;
6986
6987 init_recog ();
6988 init_alias_analysis ();
6989
6990 max_reg = nregs;
6991
6992 max_insn_uid = get_max_uid ();
6993
6994 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6995
6996 #ifdef LOAD_EXTEND_OP
6997
6998 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6999 and change the code and mode as appropriate. */
7000 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7001 #endif
7002
7003 /* Reset the counter indicating how many elements have been made
7004 thus far. */
7005 n_elements_made = 0;
7006
7007 /* Find the largest uid. */
7008
7009 max_uid = get_max_uid ();
7010 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7011
7012 /* Compute the mapping from uids to cuids.
7013 CUIDs are numbers assigned to insns, like uids,
7014 except that cuids increase monotonically through the code.
7015 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7016 between two insns is not affected by -g. */
7017
7018 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7019 {
7020 if (GET_CODE (insn) != NOTE
7021 || NOTE_LINE_NUMBER (insn) < 0)
7022 INSN_CUID (insn) = ++i;
7023 else
7024 /* Give a line number note the same cuid as preceding insn. */
7025 INSN_CUID (insn) = i;
7026 }
7027
7028 ggc_push_context ();
7029
7030 /* Loop over basic blocks.
7031 Compute the maximum number of qty's needed for each basic block
7032 (which is 2 for each SET). */
7033 insn = f;
7034 while (insn)
7035 {
7036 cse_altered = 0;
7037 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7038 flag_cse_skip_blocks);
7039
7040 /* If this basic block was already processed or has no sets, skip it. */
7041 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7042 {
7043 PUT_MODE (insn, VOIDmode);
7044 insn = (val.last ? NEXT_INSN (val.last) : 0);
7045 val.path_size = 0;
7046 continue;
7047 }
7048
7049 cse_basic_block_start = val.low_cuid;
7050 cse_basic_block_end = val.high_cuid;
7051 max_qty = val.nsets * 2;
7052
7053 if (file)
7054 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7055 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7056 val.nsets);
7057
7058 /* Make MAX_QTY bigger to give us room to optimize
7059 past the end of this basic block, if that should prove useful. */
7060 if (max_qty < 500)
7061 max_qty = 500;
7062
7063 max_qty += max_reg;
7064
7065 /* If this basic block is being extended by following certain jumps,
7066 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7067 Otherwise, we start after this basic block. */
7068 if (val.path_size > 0)
7069 cse_basic_block (insn, val.last, val.path, 0);
7070 else
7071 {
7072 int old_cse_jumps_altered = cse_jumps_altered;
7073 rtx temp;
7074
7075 /* When cse changes a conditional jump to an unconditional
7076 jump, we want to reprocess the block, since it will give
7077 us a new branch path to investigate. */
7078 cse_jumps_altered = 0;
7079 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7080 if (cse_jumps_altered == 0
7081 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7082 insn = temp;
7083
7084 cse_jumps_altered |= old_cse_jumps_altered;
7085 }
7086
7087 if (cse_altered)
7088 ggc_collect ();
7089
7090 #ifdef USE_C_ALLOCA
7091 alloca (0);
7092 #endif
7093 }
7094
7095 ggc_pop_context ();
7096
7097 if (max_elements_made < n_elements_made)
7098 max_elements_made = n_elements_made;
7099
7100 /* Clean up. */
7101 end_alias_analysis ();
7102 free (uid_cuid);
7103 free (reg_eqv_table);
7104 free (val.path);
7105
7106 return cse_jumps_altered || recorded_label_ref;
7107 }
7108
7109 /* Process a single basic block. FROM and TO and the limits of the basic
7110 block. NEXT_BRANCH points to the branch path when following jumps or
7111 a null path when not following jumps.
7112
7113 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7114 loop. This is true when we are being called for the last time on a
7115 block and this CSE pass is before loop.c. */
7116
7117 static rtx
7118 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7119 int around_loop)
7120 {
7121 rtx insn;
7122 int to_usage = 0;
7123 rtx libcall_insn = NULL_RTX;
7124 int num_insns = 0;
7125 int no_conflict = 0;
7126
7127 /* This array is undefined before max_reg, so only allocate
7128 the space actually needed and adjust the start. */
7129
7130 qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
7131 qty_table -= max_reg;
7132
7133 new_basic_block ();
7134
7135 /* TO might be a label. If so, protect it from being deleted. */
7136 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7137 ++LABEL_NUSES (to);
7138
7139 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7140 {
7141 enum rtx_code code = GET_CODE (insn);
7142
7143 /* If we have processed 1,000 insns, flush the hash table to
7144 avoid extreme quadratic behavior. We must not include NOTEs
7145 in the count since there may be more of them when generating
7146 debugging information. If we clear the table at different
7147 times, code generated with -g -O might be different than code
7148 generated with -O but not -g.
7149
7150 ??? This is a real kludge and needs to be done some other way.
7151 Perhaps for 2.9. */
7152 if (code != NOTE && num_insns++ > 1000)
7153 {
7154 flush_hash_table ();
7155 num_insns = 0;
7156 }
7157
7158 /* See if this is a branch that is part of the path. If so, and it is
7159 to be taken, do so. */
7160 if (next_branch->branch == insn)
7161 {
7162 enum taken status = next_branch++->status;
7163 if (status != NOT_TAKEN)
7164 {
7165 if (status == TAKEN)
7166 record_jump_equiv (insn, 1);
7167 else
7168 invalidate_skipped_block (NEXT_INSN (insn));
7169
7170 /* Set the last insn as the jump insn; it doesn't affect cc0.
7171 Then follow this branch. */
7172 #ifdef HAVE_cc0
7173 prev_insn_cc0 = 0;
7174 prev_insn = insn;
7175 #endif
7176 insn = JUMP_LABEL (insn);
7177 continue;
7178 }
7179 }
7180
7181 if (GET_MODE (insn) == QImode)
7182 PUT_MODE (insn, VOIDmode);
7183
7184 if (GET_RTX_CLASS (code) == 'i')
7185 {
7186 rtx p;
7187
7188 /* Process notes first so we have all notes in canonical forms when
7189 looking for duplicate operations. */
7190
7191 if (REG_NOTES (insn))
7192 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7193
7194 /* Track when we are inside in LIBCALL block. Inside such a block,
7195 we do not want to record destinations. The last insn of a
7196 LIBCALL block is not considered to be part of the block, since
7197 its destination is the result of the block and hence should be
7198 recorded. */
7199
7200 if (REG_NOTES (insn) != 0)
7201 {
7202 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7203 libcall_insn = XEXP (p, 0);
7204 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7205 {
7206 /* Keep libcall_insn for the last SET insn of a no-conflict
7207 block to prevent changing the destination. */
7208 if (! no_conflict)
7209 libcall_insn = 0;
7210 else
7211 no_conflict = -1;
7212 }
7213 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7214 no_conflict = 1;
7215 }
7216
7217 cse_insn (insn, libcall_insn);
7218
7219 if (no_conflict == -1)
7220 {
7221 libcall_insn = 0;
7222 no_conflict = 0;
7223 }
7224
7225 /* If we haven't already found an insn where we added a LABEL_REF,
7226 check this one. */
7227 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7228 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7229 (void *) insn))
7230 recorded_label_ref = 1;
7231 }
7232
7233 /* If INSN is now an unconditional jump, skip to the end of our
7234 basic block by pretending that we just did the last insn in the
7235 basic block. If we are jumping to the end of our block, show
7236 that we can have one usage of TO. */
7237
7238 if (any_uncondjump_p (insn))
7239 {
7240 if (to == 0)
7241 {
7242 free (qty_table + max_reg);
7243 return 0;
7244 }
7245
7246 if (JUMP_LABEL (insn) == to)
7247 to_usage = 1;
7248
7249 /* Maybe TO was deleted because the jump is unconditional.
7250 If so, there is nothing left in this basic block. */
7251 /* ??? Perhaps it would be smarter to set TO
7252 to whatever follows this insn,
7253 and pretend the basic block had always ended here. */
7254 if (INSN_DELETED_P (to))
7255 break;
7256
7257 insn = PREV_INSN (to);
7258 }
7259
7260 /* See if it is ok to keep on going past the label
7261 which used to end our basic block. Remember that we incremented
7262 the count of that label, so we decrement it here. If we made
7263 a jump unconditional, TO_USAGE will be one; in that case, we don't
7264 want to count the use in that jump. */
7265
7266 if (to != 0 && NEXT_INSN (insn) == to
7267 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7268 {
7269 struct cse_basic_block_data val;
7270 rtx prev;
7271
7272 insn = NEXT_INSN (to);
7273
7274 /* If TO was the last insn in the function, we are done. */
7275 if (insn == 0)
7276 {
7277 free (qty_table + max_reg);
7278 return 0;
7279 }
7280
7281 /* If TO was preceded by a BARRIER we are done with this block
7282 because it has no continuation. */
7283 prev = prev_nonnote_insn (to);
7284 if (prev && GET_CODE (prev) == BARRIER)
7285 {
7286 free (qty_table + max_reg);
7287 return insn;
7288 }
7289
7290 /* Find the end of the following block. Note that we won't be
7291 following branches in this case. */
7292 to_usage = 0;
7293 val.path_size = 0;
7294 val.path = xmalloc (sizeof (struct branch_path)
7295 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7296 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7297 free (val.path);
7298
7299 /* If the tables we allocated have enough space left
7300 to handle all the SETs in the next basic block,
7301 continue through it. Otherwise, return,
7302 and that block will be scanned individually. */
7303 if (val.nsets * 2 + next_qty > max_qty)
7304 break;
7305
7306 cse_basic_block_start = val.low_cuid;
7307 cse_basic_block_end = val.high_cuid;
7308 to = val.last;
7309
7310 /* Prevent TO from being deleted if it is a label. */
7311 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7312 ++LABEL_NUSES (to);
7313
7314 /* Back up so we process the first insn in the extension. */
7315 insn = PREV_INSN (insn);
7316 }
7317 }
7318
7319 if (next_qty > max_qty)
7320 abort ();
7321
7322 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7323 the previous insn is the only insn that branches to the head of a loop,
7324 we can cse into the loop. Don't do this if we changed the jump
7325 structure of a loop unless we aren't going to be following jumps. */
7326
7327 insn = prev_nonnote_insn (to);
7328 if ((cse_jumps_altered == 0
7329 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7330 && around_loop && to != 0
7331 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7332 && GET_CODE (insn) == JUMP_INSN
7333 && JUMP_LABEL (insn) != 0
7334 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7335 cse_around_loop (JUMP_LABEL (insn));
7336
7337 free (qty_table + max_reg);
7338
7339 return to ? NEXT_INSN (to) : 0;
7340 }
7341 \f
7342 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7343 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7344
7345 static int
7346 check_for_label_ref (rtx *rtl, void *data)
7347 {
7348 rtx insn = (rtx) data;
7349
7350 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7351 we must rerun jump since it needs to place the note. If this is a
7352 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7353 since no REG_LABEL will be added. */
7354 return (GET_CODE (*rtl) == LABEL_REF
7355 && ! LABEL_REF_NONLOCAL_P (*rtl)
7356 && LABEL_P (XEXP (*rtl, 0))
7357 && INSN_UID (XEXP (*rtl, 0)) != 0
7358 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7359 }
7360 \f
7361 /* Count the number of times registers are used (not set) in X.
7362 COUNTS is an array in which we accumulate the count, INCR is how much
7363 we count each register usage. */
7364
7365 static void
7366 count_reg_usage (rtx x, int *counts, int incr)
7367 {
7368 enum rtx_code code;
7369 rtx note;
7370 const char *fmt;
7371 int i, j;
7372
7373 if (x == 0)
7374 return;
7375
7376 switch (code = GET_CODE (x))
7377 {
7378 case REG:
7379 counts[REGNO (x)] += incr;
7380 return;
7381
7382 case PC:
7383 case CC0:
7384 case CONST:
7385 case CONST_INT:
7386 case CONST_DOUBLE:
7387 case CONST_VECTOR:
7388 case SYMBOL_REF:
7389 case LABEL_REF:
7390 return;
7391
7392 case CLOBBER:
7393 /* If we are clobbering a MEM, mark any registers inside the address
7394 as being used. */
7395 if (GET_CODE (XEXP (x, 0)) == MEM)
7396 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7397 return;
7398
7399 case SET:
7400 /* Unless we are setting a REG, count everything in SET_DEST. */
7401 if (GET_CODE (SET_DEST (x)) != REG)
7402 count_reg_usage (SET_DEST (x), counts, incr);
7403 count_reg_usage (SET_SRC (x), counts, incr);
7404 return;
7405
7406 case CALL_INSN:
7407 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7408 /* Fall through. */
7409
7410 case INSN:
7411 case JUMP_INSN:
7412 count_reg_usage (PATTERN (x), counts, incr);
7413
7414 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7415 use them. */
7416
7417 note = find_reg_equal_equiv_note (x);
7418 if (note)
7419 {
7420 rtx eqv = XEXP (note, 0);
7421
7422 if (GET_CODE (eqv) == EXPR_LIST)
7423 /* This REG_EQUAL note describes the result of a function call.
7424 Process all the arguments. */
7425 do
7426 {
7427 count_reg_usage (XEXP (eqv, 0), counts, incr);
7428 eqv = XEXP (eqv, 1);
7429 }
7430 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7431 else
7432 count_reg_usage (eqv, counts, incr);
7433 }
7434 return;
7435
7436 case EXPR_LIST:
7437 if (REG_NOTE_KIND (x) == REG_EQUAL
7438 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7439 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7440 involving registers in the address. */
7441 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7442 count_reg_usage (XEXP (x, 0), counts, incr);
7443
7444 count_reg_usage (XEXP (x, 1), counts, incr);
7445 return;
7446
7447 case ASM_OPERANDS:
7448 /* Iterate over just the inputs, not the constraints as well. */
7449 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7450 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7451 return;
7452
7453 case INSN_LIST:
7454 abort ();
7455
7456 default:
7457 break;
7458 }
7459
7460 fmt = GET_RTX_FORMAT (code);
7461 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7462 {
7463 if (fmt[i] == 'e')
7464 count_reg_usage (XEXP (x, i), counts, incr);
7465 else if (fmt[i] == 'E')
7466 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7467 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7468 }
7469 }
7470 \f
7471 /* Return true if set is live. */
7472 static bool
7473 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7474 int *counts)
7475 {
7476 #ifdef HAVE_cc0
7477 rtx tem;
7478 #endif
7479
7480 if (set_noop_p (set))
7481 ;
7482
7483 #ifdef HAVE_cc0
7484 else if (GET_CODE (SET_DEST (set)) == CC0
7485 && !side_effects_p (SET_SRC (set))
7486 && ((tem = next_nonnote_insn (insn)) == 0
7487 || !INSN_P (tem)
7488 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7489 return false;
7490 #endif
7491 else if (GET_CODE (SET_DEST (set)) != REG
7492 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7493 || counts[REGNO (SET_DEST (set))] != 0
7494 || side_effects_p (SET_SRC (set))
7495 /* An ADDRESSOF expression can turn into a use of the
7496 internal arg pointer, so always consider the
7497 internal arg pointer live. If it is truly dead,
7498 flow will delete the initializing insn. */
7499 || (SET_DEST (set) == current_function_internal_arg_pointer))
7500 return true;
7501 return false;
7502 }
7503
7504 /* Return true if insn is live. */
7505
7506 static bool
7507 insn_live_p (rtx insn, int *counts)
7508 {
7509 int i;
7510 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7511 return true;
7512 else if (GET_CODE (PATTERN (insn)) == SET)
7513 return set_live_p (PATTERN (insn), insn, counts);
7514 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7515 {
7516 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7517 {
7518 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7519
7520 if (GET_CODE (elt) == SET)
7521 {
7522 if (set_live_p (elt, insn, counts))
7523 return true;
7524 }
7525 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7526 return true;
7527 }
7528 return false;
7529 }
7530 else
7531 return true;
7532 }
7533
7534 /* Return true if libcall is dead as a whole. */
7535
7536 static bool
7537 dead_libcall_p (rtx insn, int *counts)
7538 {
7539 rtx note, set, new;
7540
7541 /* See if there's a REG_EQUAL note on this insn and try to
7542 replace the source with the REG_EQUAL expression.
7543
7544 We assume that insns with REG_RETVALs can only be reg->reg
7545 copies at this point. */
7546 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7547 if (!note)
7548 return false;
7549
7550 set = single_set (insn);
7551 if (!set)
7552 return false;
7553
7554 new = simplify_rtx (XEXP (note, 0));
7555 if (!new)
7556 new = XEXP (note, 0);
7557
7558 /* While changing insn, we must update the counts accordingly. */
7559 count_reg_usage (insn, counts, -1);
7560
7561 if (validate_change (insn, &SET_SRC (set), new, 0))
7562 {
7563 count_reg_usage (insn, counts, 1);
7564 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7565 remove_note (insn, note);
7566 return true;
7567 }
7568
7569 if (CONSTANT_P (new))
7570 {
7571 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7572 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7573 {
7574 count_reg_usage (insn, counts, 1);
7575 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7576 remove_note (insn, note);
7577 return true;
7578 }
7579 }
7580
7581 count_reg_usage (insn, counts, 1);
7582 return false;
7583 }
7584
7585 /* Scan all the insns and delete any that are dead; i.e., they store a register
7586 that is never used or they copy a register to itself.
7587
7588 This is used to remove insns made obviously dead by cse, loop or other
7589 optimizations. It improves the heuristics in loop since it won't try to
7590 move dead invariants out of loops or make givs for dead quantities. The
7591 remaining passes of the compilation are also sped up. */
7592
7593 int
7594 delete_trivially_dead_insns (rtx insns, int nreg)
7595 {
7596 int *counts;
7597 rtx insn, prev;
7598 int in_libcall = 0, dead_libcall = 0;
7599 int ndead = 0, nlastdead, niterations = 0;
7600
7601 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7602 /* First count the number of times each register is used. */
7603 counts = xcalloc (nreg, sizeof (int));
7604 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7605 count_reg_usage (insn, counts, 1);
7606
7607 do
7608 {
7609 nlastdead = ndead;
7610 niterations++;
7611 /* Go from the last insn to the first and delete insns that only set unused
7612 registers or copy a register to itself. As we delete an insn, remove
7613 usage counts for registers it uses.
7614
7615 The first jump optimization pass may leave a real insn as the last
7616 insn in the function. We must not skip that insn or we may end
7617 up deleting code that is not really dead. */
7618 insn = get_last_insn ();
7619 if (! INSN_P (insn))
7620 insn = prev_real_insn (insn);
7621
7622 for (; insn; insn = prev)
7623 {
7624 int live_insn = 0;
7625
7626 prev = prev_real_insn (insn);
7627
7628 /* Don't delete any insns that are part of a libcall block unless
7629 we can delete the whole libcall block.
7630
7631 Flow or loop might get confused if we did that. Remember
7632 that we are scanning backwards. */
7633 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7634 {
7635 in_libcall = 1;
7636 live_insn = 1;
7637 dead_libcall = dead_libcall_p (insn, counts);
7638 }
7639 else if (in_libcall)
7640 live_insn = ! dead_libcall;
7641 else
7642 live_insn = insn_live_p (insn, counts);
7643
7644 /* If this is a dead insn, delete it and show registers in it aren't
7645 being used. */
7646
7647 if (! live_insn)
7648 {
7649 count_reg_usage (insn, counts, -1);
7650 delete_insn_and_edges (insn);
7651 ndead++;
7652 }
7653
7654 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7655 {
7656 in_libcall = 0;
7657 dead_libcall = 0;
7658 }
7659 }
7660 }
7661 while (ndead != nlastdead);
7662
7663 if (rtl_dump_file && ndead)
7664 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7665 ndead, niterations);
7666 /* Clean up. */
7667 free (counts);
7668 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7669 return ndead;
7670 }
7671
7672 /* This function is called via for_each_rtx. The argument, NEWREG, is
7673 a condition code register with the desired mode. If we are looking
7674 at the same register in a different mode, replace it with
7675 NEWREG. */
7676
7677 static int
7678 cse_change_cc_mode (rtx *loc, void *data)
7679 {
7680 rtx newreg = (rtx) data;
7681
7682 if (*loc
7683 && GET_CODE (*loc) == REG
7684 && REGNO (*loc) == REGNO (newreg)
7685 && GET_MODE (*loc) != GET_MODE (newreg))
7686 {
7687 *loc = newreg;
7688 return -1;
7689 }
7690 return 0;
7691 }
7692
7693 /* Change the mode of any reference to the register REGNO (NEWREG) to
7694 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7695 any instruction after START which modifies NEWREG. */
7696
7697 static void
7698 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7699 {
7700 rtx insn;
7701
7702 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7703 {
7704 if (! INSN_P (insn))
7705 continue;
7706
7707 if (insn != start && reg_set_p (newreg, insn))
7708 return;
7709
7710 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7711 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7712 }
7713 }
7714
7715 /* BB is a basic block which finishes with CC_REG as a condition code
7716 register which is set to CC_SRC. Look through the successors of BB
7717 to find blocks which have a single predecessor (i.e., this one),
7718 and look through those blocks for an assignment to CC_REG which is
7719 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7720 permitted to change the mode of CC_SRC to a compatible mode. This
7721 returns VOIDmode if no equivalent assignments were found.
7722 Otherwise it returns the mode which CC_SRC should wind up with.
7723
7724 The main complexity in this function is handling the mode issues.
7725 We may have more than one duplicate which we can eliminate, and we
7726 try to find a mode which will work for multiple duplicates. */
7727
7728 static enum machine_mode
7729 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7730 {
7731 bool found_equiv;
7732 enum machine_mode mode;
7733 unsigned int insn_count;
7734 edge e;
7735 rtx insns[2];
7736 enum machine_mode modes[2];
7737 rtx last_insns[2];
7738 unsigned int i;
7739 rtx newreg;
7740
7741 /* We expect to have two successors. Look at both before picking
7742 the final mode for the comparison. If we have more successors
7743 (i.e., some sort of table jump, although that seems unlikely),
7744 then we require all beyond the first two to use the same
7745 mode. */
7746
7747 found_equiv = false;
7748 mode = GET_MODE (cc_src);
7749 insn_count = 0;
7750 for (e = bb->succ; e; e = e->succ_next)
7751 {
7752 rtx insn;
7753 rtx end;
7754
7755 if (e->flags & EDGE_COMPLEX)
7756 continue;
7757
7758 if (! e->dest->pred
7759 || e->dest->pred->pred_next
7760 || e->dest == EXIT_BLOCK_PTR)
7761 continue;
7762
7763 end = NEXT_INSN (BB_END (e->dest));
7764 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7765 {
7766 rtx set;
7767
7768 if (! INSN_P (insn))
7769 continue;
7770
7771 /* If CC_SRC is modified, we have to stop looking for
7772 something which uses it. */
7773 if (modified_in_p (cc_src, insn))
7774 break;
7775
7776 /* Check whether INSN sets CC_REG to CC_SRC. */
7777 set = single_set (insn);
7778 if (set
7779 && GET_CODE (SET_DEST (set)) == REG
7780 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7781 {
7782 bool found;
7783 enum machine_mode set_mode;
7784 enum machine_mode comp_mode;
7785
7786 found = false;
7787 set_mode = GET_MODE (SET_SRC (set));
7788 comp_mode = set_mode;
7789 if (rtx_equal_p (cc_src, SET_SRC (set)))
7790 found = true;
7791 else if (GET_CODE (cc_src) == COMPARE
7792 && GET_CODE (SET_SRC (set)) == COMPARE
7793 && mode != set_mode
7794 && rtx_equal_p (XEXP (cc_src, 0),
7795 XEXP (SET_SRC (set), 0))
7796 && rtx_equal_p (XEXP (cc_src, 1),
7797 XEXP (SET_SRC (set), 1)))
7798
7799 {
7800 comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7801 if (comp_mode != VOIDmode
7802 && (can_change_mode || comp_mode == mode))
7803 found = true;
7804 }
7805
7806 if (found)
7807 {
7808 found_equiv = true;
7809 if (insn_count < ARRAY_SIZE (insns))
7810 {
7811 insns[insn_count] = insn;
7812 modes[insn_count] = set_mode;
7813 last_insns[insn_count] = end;
7814 ++insn_count;
7815
7816 if (mode != comp_mode)
7817 {
7818 if (! can_change_mode)
7819 abort ();
7820 mode = comp_mode;
7821 PUT_MODE (cc_src, mode);
7822 }
7823 }
7824 else
7825 {
7826 if (set_mode != mode)
7827 {
7828 /* We found a matching expression in the
7829 wrong mode, but we don't have room to
7830 store it in the array. Punt. This case
7831 should be rare. */
7832 break;
7833 }
7834 /* INSN sets CC_REG to a value equal to CC_SRC
7835 with the right mode. We can simply delete
7836 it. */
7837 delete_insn (insn);
7838 }
7839
7840 /* We found an instruction to delete. Keep looking,
7841 in the hopes of finding a three-way jump. */
7842 continue;
7843 }
7844
7845 /* We found an instruction which sets the condition
7846 code, so don't look any farther. */
7847 break;
7848 }
7849
7850 /* If INSN sets CC_REG in some other way, don't look any
7851 farther. */
7852 if (reg_set_p (cc_reg, insn))
7853 break;
7854 }
7855
7856 /* If we fell off the bottom of the block, we can keep looking
7857 through successors. We pass CAN_CHANGE_MODE as false because
7858 we aren't prepared to handle compatibility between the
7859 further blocks and this block. */
7860 if (insn == end)
7861 {
7862 enum machine_mode submode;
7863
7864 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7865 if (submode != VOIDmode)
7866 {
7867 if (submode != mode)
7868 abort ();
7869 found_equiv = true;
7870 can_change_mode = false;
7871 }
7872 }
7873 }
7874
7875 if (! found_equiv)
7876 return VOIDmode;
7877
7878 /* Now INSN_COUNT is the number of instructions we found which set
7879 CC_REG to a value equivalent to CC_SRC. The instructions are in
7880 INSNS. The modes used by those instructions are in MODES. */
7881
7882 newreg = NULL_RTX;
7883 for (i = 0; i < insn_count; ++i)
7884 {
7885 if (modes[i] != mode)
7886 {
7887 /* We need to change the mode of CC_REG in INSNS[i] and
7888 subsequent instructions. */
7889 if (! newreg)
7890 {
7891 if (GET_MODE (cc_reg) == mode)
7892 newreg = cc_reg;
7893 else
7894 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7895 }
7896 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7897 newreg);
7898 }
7899
7900 delete_insn (insns[i]);
7901 }
7902
7903 return mode;
7904 }
7905
7906 /* If we have a fixed condition code register (or two), walk through
7907 the instructions and try to eliminate duplicate assignments. */
7908
7909 void
7910 cse_condition_code_reg (void)
7911 {
7912 unsigned int cc_regno_1;
7913 unsigned int cc_regno_2;
7914 rtx cc_reg_1;
7915 rtx cc_reg_2;
7916 basic_block bb;
7917
7918 if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
7919 return;
7920
7921 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7922 if (cc_regno_2 != INVALID_REGNUM)
7923 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7924 else
7925 cc_reg_2 = NULL_RTX;
7926
7927 FOR_EACH_BB (bb)
7928 {
7929 rtx last_insn;
7930 rtx cc_reg;
7931 rtx insn;
7932 rtx cc_src_insn;
7933 rtx cc_src;
7934 enum machine_mode mode;
7935 enum machine_mode orig_mode;
7936
7937 /* Look for blocks which end with a conditional jump based on a
7938 condition code register. Then look for the instruction which
7939 sets the condition code register. Then look through the
7940 successor blocks for instructions which set the condition
7941 code register to the same value. There are other possible
7942 uses of the condition code register, but these are by far the
7943 most common and the ones which we are most likely to be able
7944 to optimize. */
7945
7946 last_insn = BB_END (bb);
7947 if (GET_CODE (last_insn) != JUMP_INSN)
7948 continue;
7949
7950 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7951 cc_reg = cc_reg_1;
7952 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7953 cc_reg = cc_reg_2;
7954 else
7955 continue;
7956
7957 cc_src_insn = NULL_RTX;
7958 cc_src = NULL_RTX;
7959 for (insn = PREV_INSN (last_insn);
7960 insn && insn != PREV_INSN (BB_HEAD (bb));
7961 insn = PREV_INSN (insn))
7962 {
7963 rtx set;
7964
7965 if (! INSN_P (insn))
7966 continue;
7967 set = single_set (insn);
7968 if (set
7969 && GET_CODE (SET_DEST (set)) == REG
7970 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7971 {
7972 cc_src_insn = insn;
7973 cc_src = SET_SRC (set);
7974 break;
7975 }
7976 else if (reg_set_p (cc_reg, insn))
7977 break;
7978 }
7979
7980 if (! cc_src_insn)
7981 continue;
7982
7983 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7984 continue;
7985
7986 /* Now CC_REG is a condition code register used for a
7987 conditional jump at the end of the block, and CC_SRC, in
7988 CC_SRC_INSN, is the value to which that condition code
7989 register is set, and CC_SRC is still meaningful at the end of
7990 the basic block. */
7991
7992 orig_mode = GET_MODE (cc_src);
7993 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7994 if (mode != VOIDmode)
7995 {
7996 if (mode != GET_MODE (cc_src))
7997 abort ();
7998 if (mode != orig_mode)
7999 cse_change_cc_mode_insns (cc_src_insn, NEXT_INSN (last_insn),
8000 gen_rtx_REG (mode, REGNO (cc_reg)));
8001 }
8002 }
8003 }