]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cse.c
* machmode.h (TRULY_NOOP_TRUNCATION_MODES_P): New macro.
[thirdparty/gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "regs.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "diagnostic-core.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "timevar.h"
41 #include "except.h"
42 #include "target.h"
43 #include "params.h"
44 #include "rtlhooks-def.h"
45 #include "tree-pass.h"
46 #include "df.h"
47 #include "dbgcnt.h"
48
49 /* The basic idea of common subexpression elimination is to go
50 through the code, keeping a record of expressions that would
51 have the same value at the current scan point, and replacing
52 expressions encountered with the cheapest equivalent expression.
53
54 It is too complicated to keep track of the different possibilities
55 when control paths merge in this code; so, at each label, we forget all
56 that is known and start fresh. This can be described as processing each
57 extended basic block separately. We have a separate pass to perform
58 global CSE.
59
60 Note CSE can turn a conditional or computed jump into a nop or
61 an unconditional jump. When this occurs we arrange to run the jump
62 optimizer after CSE to delete the unreachable code.
63
64 We use two data structures to record the equivalent expressions:
65 a hash table for most expressions, and a vector of "quantity
66 numbers" to record equivalent (pseudo) registers.
67
68 The use of the special data structure for registers is desirable
69 because it is faster. It is possible because registers references
70 contain a fairly small number, the register number, taken from
71 a contiguously allocated series, and two register references are
72 identical if they have the same number. General expressions
73 do not have any such thing, so the only way to retrieve the
74 information recorded on an expression other than a register
75 is to keep it in a hash table.
76
77 Registers and "quantity numbers":
78
79 At the start of each basic block, all of the (hardware and pseudo)
80 registers used in the function are given distinct quantity
81 numbers to indicate their contents. During scan, when the code
82 copies one register into another, we copy the quantity number.
83 When a register is loaded in any other way, we allocate a new
84 quantity number to describe the value generated by this operation.
85 `REG_QTY (N)' records what quantity register N is currently thought
86 of as containing.
87
88 All real quantity numbers are greater than or equal to zero.
89 If register N has not been assigned a quantity, `REG_QTY (N)' will
90 equal -N - 1, which is always negative.
91
92 Quantity numbers below zero do not exist and none of the `qty_table'
93 entries should be referenced with a negative index.
94
95 We also maintain a bidirectional chain of registers for each
96 quantity number. The `qty_table` members `first_reg' and `last_reg',
97 and `reg_eqv_table' members `next' and `prev' hold these chains.
98
99 The first register in a chain is the one whose lifespan is least local.
100 Among equals, it is the one that was seen first.
101 We replace any equivalent register with that one.
102
103 If two registers have the same quantity number, it must be true that
104 REG expressions with qty_table `mode' must be in the hash table for both
105 registers and must be in the same class.
106
107 The converse is not true. Since hard registers may be referenced in
108 any mode, two REG expressions might be equivalent in the hash table
109 but not have the same quantity number if the quantity number of one
110 of the registers is not the same mode as those expressions.
111
112 Constants and quantity numbers
113
114 When a quantity has a known constant value, that value is stored
115 in the appropriate qty_table `const_rtx'. This is in addition to
116 putting the constant in the hash table as is usual for non-regs.
117
118 Whether a reg or a constant is preferred is determined by the configuration
119 macro CONST_COSTS and will often depend on the constant value. In any
120 event, expressions containing constants can be simplified, by fold_rtx.
121
122 When a quantity has a known nearly constant value (such as an address
123 of a stack slot), that value is stored in the appropriate qty_table
124 `const_rtx'.
125
126 Integer constants don't have a machine mode. However, cse
127 determines the intended machine mode from the destination
128 of the instruction that moves the constant. The machine mode
129 is recorded in the hash table along with the actual RTL
130 constant expression so that different modes are kept separate.
131
132 Other expressions:
133
134 To record known equivalences among expressions in general
135 we use a hash table called `table'. It has a fixed number of buckets
136 that contain chains of `struct table_elt' elements for expressions.
137 These chains connect the elements whose expressions have the same
138 hash codes.
139
140 Other chains through the same elements connect the elements which
141 currently have equivalent values.
142
143 Register references in an expression are canonicalized before hashing
144 the expression. This is done using `reg_qty' and qty_table `first_reg'.
145 The hash code of a register reference is computed using the quantity
146 number, not the register number.
147
148 When the value of an expression changes, it is necessary to remove from the
149 hash table not just that expression but all expressions whose values
150 could be different as a result.
151
152 1. If the value changing is in memory, except in special cases
153 ANYTHING referring to memory could be changed. That is because
154 nobody knows where a pointer does not point.
155 The function `invalidate_memory' removes what is necessary.
156
157 The special cases are when the address is constant or is
158 a constant plus a fixed register such as the frame pointer
159 or a static chain pointer. When such addresses are stored in,
160 we can tell exactly which other such addresses must be invalidated
161 due to overlap. `invalidate' does this.
162 All expressions that refer to non-constant
163 memory addresses are also invalidated. `invalidate_memory' does this.
164
165 2. If the value changing is a register, all expressions
166 containing references to that register, and only those,
167 must be removed.
168
169 Because searching the entire hash table for expressions that contain
170 a register is very slow, we try to figure out when it isn't necessary.
171 Precisely, this is necessary only when expressions have been
172 entered in the hash table using this register, and then the value has
173 changed, and then another expression wants to be added to refer to
174 the register's new value. This sequence of circumstances is rare
175 within any one basic block.
176
177 `REG_TICK' and `REG_IN_TABLE', accessors for members of
178 cse_reg_info, are used to detect this case. REG_TICK (i) is
179 incremented whenever a value is stored in register i.
180 REG_IN_TABLE (i) holds -1 if no references to register i have been
181 entered in the table; otherwise, it contains the value REG_TICK (i)
182 had when the references were entered. If we want to enter a
183 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
184 remove old references. Until we want to enter a new entry, the
185 mere fact that the two vectors don't match makes the entries be
186 ignored if anyone tries to match them.
187
188 Registers themselves are entered in the hash table as well as in
189 the equivalent-register chains. However, `REG_TICK' and
190 `REG_IN_TABLE' do not apply to expressions which are simple
191 register references. These expressions are removed from the table
192 immediately when they become invalid, and this can be done even if
193 we do not immediately search for all the expressions that refer to
194 the register.
195
196 A CLOBBER rtx in an instruction invalidates its operand for further
197 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
198 invalidates everything that resides in memory.
199
200 Related expressions:
201
202 Constant expressions that differ only by an additive integer
203 are called related. When a constant expression is put in
204 the table, the related expression with no constant term
205 is also entered. These are made to point at each other
206 so that it is possible to find out if there exists any
207 register equivalent to an expression related to a given expression. */
208
209 /* Length of qty_table vector. We know in advance we will not need
210 a quantity number this big. */
211
212 static int max_qty;
213
214 /* Next quantity number to be allocated.
215 This is 1 + the largest number needed so far. */
216
217 static int next_qty;
218
219 /* Per-qty information tracking.
220
221 `first_reg' and `last_reg' track the head and tail of the
222 chain of registers which currently contain this quantity.
223
224 `mode' contains the machine mode of this quantity.
225
226 `const_rtx' holds the rtx of the constant value of this
227 quantity, if known. A summations of the frame/arg pointer
228 and a constant can also be entered here. When this holds
229 a known value, `const_insn' is the insn which stored the
230 constant value.
231
232 `comparison_{code,const,qty}' are used to track when a
233 comparison between a quantity and some constant or register has
234 been passed. In such a case, we know the results of the comparison
235 in case we see it again. These members record a comparison that
236 is known to be true. `comparison_code' holds the rtx code of such
237 a comparison, else it is set to UNKNOWN and the other two
238 comparison members are undefined. `comparison_const' holds
239 the constant being compared against, or zero if the comparison
240 is not against a constant. `comparison_qty' holds the quantity
241 being compared against when the result is known. If the comparison
242 is not with a register, `comparison_qty' is -1. */
243
244 struct qty_table_elem
245 {
246 rtx const_rtx;
247 rtx const_insn;
248 rtx comparison_const;
249 int comparison_qty;
250 unsigned int first_reg, last_reg;
251 /* The sizes of these fields should match the sizes of the
252 code and mode fields of struct rtx_def (see rtl.h). */
253 ENUM_BITFIELD(rtx_code) comparison_code : 16;
254 ENUM_BITFIELD(machine_mode) mode : 8;
255 };
256
257 /* The table of all qtys, indexed by qty number. */
258 static struct qty_table_elem *qty_table;
259
260 /* Structure used to pass arguments via for_each_rtx to function
261 cse_change_cc_mode. */
262 struct change_cc_mode_args
263 {
264 rtx insn;
265 rtx newreg;
266 };
267
268 #ifdef HAVE_cc0
269 /* For machines that have a CC0, we do not record its value in the hash
270 table since its use is guaranteed to be the insn immediately following
271 its definition and any other insn is presumed to invalidate it.
272
273 Instead, we store below the current and last value assigned to CC0.
274 If it should happen to be a constant, it is stored in preference
275 to the actual assigned value. In case it is a constant, we store
276 the mode in which the constant should be interpreted. */
277
278 static rtx this_insn_cc0, prev_insn_cc0;
279 static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
280 #endif
281
282 /* Insn being scanned. */
283
284 static rtx this_insn;
285 static bool optimize_this_for_speed_p;
286
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
289 value.
290
291 Or -1 if this register is at the end of the chain.
292
293 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
294
295 /* Per-register equivalence chain. */
296 struct reg_eqv_elem
297 {
298 int next, prev;
299 };
300
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem *reg_eqv_table;
303
304 struct cse_reg_info
305 {
306 /* The timestamp at which this register is initialized. */
307 unsigned int timestamp;
308
309 /* The quantity number of the register's current contents. */
310 int reg_qty;
311
312 /* The number of times the register has been altered in the current
313 basic block. */
314 int reg_tick;
315
316 /* The REG_TICK value at which rtx's containing this register are
317 valid in the hash table. If this does not equal the current
318 reg_tick value, such expressions existing in the hash table are
319 invalid. */
320 int reg_in_table;
321
322 /* The SUBREG that was set when REG_TICK was last incremented. Set
323 to -1 if the last store was to the whole register, not a subreg. */
324 unsigned int subreg_ticked;
325 };
326
327 /* A table of cse_reg_info indexed by register numbers. */
328 static struct cse_reg_info *cse_reg_info_table;
329
330 /* The size of the above table. */
331 static unsigned int cse_reg_info_table_size;
332
333 /* The index of the first entry that has not been initialized. */
334 static unsigned int cse_reg_info_table_first_uninitialized;
335
336 /* The timestamp at the beginning of the current run of
337 cse_extended_basic_block. We increment this variable at the beginning of
338 the current run of cse_extended_basic_block. The timestamp field of a
339 cse_reg_info entry matches the value of this variable if and only
340 if the entry has been initialized during the current run of
341 cse_extended_basic_block. */
342 static unsigned int cse_reg_info_timestamp;
343
344 /* A HARD_REG_SET containing all the hard registers for which there is
345 currently a REG expression in the hash table. Note the difference
346 from the above variables, which indicate if the REG is mentioned in some
347 expression in the table. */
348
349 static HARD_REG_SET hard_regs_in_table;
350
351 /* True if CSE has altered the CFG. */
352 static bool cse_cfg_altered;
353
354 /* True if CSE has altered conditional jump insns in such a way
355 that jump optimization should be redone. */
356 static bool cse_jumps_altered;
357
358 /* True if we put a LABEL_REF into the hash table for an INSN
359 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
360 to put in the note. */
361 static bool recorded_label_ref;
362
363 /* canon_hash stores 1 in do_not_record
364 if it notices a reference to CC0, PC, or some other volatile
365 subexpression. */
366
367 static int do_not_record;
368
369 /* canon_hash stores 1 in hash_arg_in_memory
370 if it notices a reference to memory within the expression being hashed. */
371
372 static int hash_arg_in_memory;
373
374 /* The hash table contains buckets which are chains of `struct table_elt's,
375 each recording one expression's information.
376 That expression is in the `exp' field.
377
378 The canon_exp field contains a canonical (from the point of view of
379 alias analysis) version of the `exp' field.
380
381 Those elements with the same hash code are chained in both directions
382 through the `next_same_hash' and `prev_same_hash' fields.
383
384 Each set of expressions with equivalent values
385 are on a two-way chain through the `next_same_value'
386 and `prev_same_value' fields, and all point with
387 the `first_same_value' field at the first element in
388 that chain. The chain is in order of increasing cost.
389 Each element's cost value is in its `cost' field.
390
391 The `in_memory' field is nonzero for elements that
392 involve any reference to memory. These elements are removed
393 whenever a write is done to an unidentified location in memory.
394 To be safe, we assume that a memory address is unidentified unless
395 the address is either a symbol constant or a constant plus
396 the frame pointer or argument pointer.
397
398 The `related_value' field is used to connect related expressions
399 (that differ by adding an integer).
400 The related expressions are chained in a circular fashion.
401 `related_value' is zero for expressions for which this
402 chain is not useful.
403
404 The `cost' field stores the cost of this element's expression.
405 The `regcost' field stores the value returned by approx_reg_cost for
406 this element's expression.
407
408 The `is_const' flag is set if the element is a constant (including
409 a fixed address).
410
411 The `flag' field is used as a temporary during some search routines.
412
413 The `mode' field is usually the same as GET_MODE (`exp'), but
414 if `exp' is a CONST_INT and has no machine mode then the `mode'
415 field is the mode it was being used as. Each constant is
416 recorded separately for each mode it is used with. */
417
418 struct table_elt
419 {
420 rtx exp;
421 rtx canon_exp;
422 struct table_elt *next_same_hash;
423 struct table_elt *prev_same_hash;
424 struct table_elt *next_same_value;
425 struct table_elt *prev_same_value;
426 struct table_elt *first_same_value;
427 struct table_elt *related_value;
428 int cost;
429 int regcost;
430 /* The size of this field should match the size
431 of the mode field of struct rtx_def (see rtl.h). */
432 ENUM_BITFIELD(machine_mode) mode : 8;
433 char in_memory;
434 char is_const;
435 char flag;
436 };
437
438 /* We don't want a lot of buckets, because we rarely have very many
439 things stored in the hash table, and a lot of buckets slows
440 down a lot of loops that happen frequently. */
441 #define HASH_SHIFT 5
442 #define HASH_SIZE (1 << HASH_SHIFT)
443 #define HASH_MASK (HASH_SIZE - 1)
444
445 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
447
448 #define HASH(X, M) \
449 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
450 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
451 : canon_hash (X, M)) & HASH_MASK)
452
453 /* Like HASH, but without side-effects. */
454 #define SAFE_HASH(X, M) \
455 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
457 : safe_hash (X, M)) & HASH_MASK)
458
459 /* Determine whether register number N is considered a fixed register for the
460 purpose of approximating register costs.
461 It is desirable to replace other regs with fixed regs, to reduce need for
462 non-fixed hard regs.
463 A reg wins if it is either the frame pointer or designated as fixed. */
464 #define FIXED_REGNO_P(N) \
465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466 || fixed_regs[N] || global_regs[N])
467
468 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
469 hard registers and pointers into the frame are the cheapest with a cost
470 of 0. Next come pseudos with a cost of one and other hard registers with
471 a cost of 2. Aside from these special cases, call `rtx_cost'. */
472
473 #define CHEAP_REGNO(N) \
474 (REGNO_PTR_FRAME_P(N) \
475 || (HARD_REGISTER_NUM_P (N) \
476 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
477
478 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
479 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
480
481 /* Get the number of times this register has been updated in this
482 basic block. */
483
484 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
485
486 /* Get the point at which REG was recorded in the table. */
487
488 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
489
490 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
491 SUBREG). */
492
493 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
494
495 /* Get the quantity number for REG. */
496
497 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
498
499 /* Determine if the quantity number for register X represents a valid index
500 into the qty_table. */
501
502 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
503
504 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
505
506 #define CHEAPER(X, Y) \
507 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
508
509 static struct table_elt *table[HASH_SIZE];
510
511 /* Chain of `struct table_elt's made so far for this function
512 but currently removed from the table. */
513
514 static struct table_elt *free_element_chain;
515
516 /* Set to the cost of a constant pool reference if one was found for a
517 symbolic constant. If this was found, it means we should try to
518 convert constants into constant pool entries if they don't fit in
519 the insn. */
520
521 static int constant_pool_entries_cost;
522 static int constant_pool_entries_regcost;
523
524 /* Trace a patch through the CFG. */
525
526 struct branch_path
527 {
528 /* The basic block for this path entry. */
529 basic_block bb;
530 };
531
532 /* This data describes a block that will be processed by
533 cse_extended_basic_block. */
534
535 struct cse_basic_block_data
536 {
537 /* Total number of SETs in block. */
538 int nsets;
539 /* Size of current branch path, if any. */
540 int path_size;
541 /* Current path, indicating which basic_blocks will be processed. */
542 struct branch_path *path;
543 };
544
545
546 /* Pointers to the live in/live out bitmaps for the boundaries of the
547 current EBB. */
548 static bitmap cse_ebb_live_in, cse_ebb_live_out;
549
550 /* A simple bitmap to track which basic blocks have been visited
551 already as part of an already processed extended basic block. */
552 static sbitmap cse_visited_basic_blocks;
553
554 static bool fixed_base_plus_p (rtx x);
555 static int notreg_cost (rtx, enum rtx_code);
556 static int approx_reg_cost_1 (rtx *, void *);
557 static int approx_reg_cost (rtx);
558 static int preferable (int, int, int, int);
559 static void new_basic_block (void);
560 static void make_new_qty (unsigned int, enum machine_mode);
561 static void make_regs_eqv (unsigned int, unsigned int);
562 static void delete_reg_equiv (unsigned int);
563 static int mention_regs (rtx);
564 static int insert_regs (rtx, struct table_elt *, int);
565 static void remove_from_table (struct table_elt *, unsigned);
566 static void remove_pseudo_from_table (rtx, unsigned);
567 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
568 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
569 static rtx lookup_as_function (rtx, enum rtx_code);
570 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
571 enum machine_mode, int, int);
572 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
573 enum machine_mode);
574 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
575 static void invalidate (rtx, enum machine_mode);
576 static bool cse_rtx_varies_p (const_rtx, bool);
577 static void remove_invalid_refs (unsigned int);
578 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
579 enum machine_mode);
580 static void rehash_using_reg (rtx);
581 static void invalidate_memory (void);
582 static void invalidate_for_call (void);
583 static rtx use_related_value (rtx, struct table_elt *);
584
585 static inline unsigned canon_hash (rtx, enum machine_mode);
586 static inline unsigned safe_hash (rtx, enum machine_mode);
587 static inline unsigned hash_rtx_string (const char *);
588
589 static rtx canon_reg (rtx, rtx);
590 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
591 enum machine_mode *,
592 enum machine_mode *);
593 static rtx fold_rtx (rtx, rtx);
594 static rtx equiv_constant (rtx);
595 static void record_jump_equiv (rtx, bool);
596 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
597 int);
598 static void cse_insn (rtx);
599 static void cse_prescan_path (struct cse_basic_block_data *);
600 static void invalidate_from_clobbers (rtx);
601 static rtx cse_process_notes (rtx, rtx, bool *);
602 static void cse_extended_basic_block (struct cse_basic_block_data *);
603 static void count_reg_usage (rtx, int *, rtx, int);
604 static int check_for_label_ref (rtx *, void *);
605 extern void dump_class (struct table_elt*);
606 static void get_cse_reg_info_1 (unsigned int regno);
607 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
608 static int check_dependence (rtx *, void *);
609
610 static void flush_hash_table (void);
611 static bool insn_live_p (rtx, int *);
612 static bool set_live_p (rtx, rtx, int *);
613 static int cse_change_cc_mode (rtx *, void *);
614 static void cse_change_cc_mode_insn (rtx, rtx);
615 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
616 static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
617 bool);
618 \f
619
620 #undef RTL_HOOKS_GEN_LOWPART
621 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
622
623 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
624 \f
625 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
626 virtual regs here because the simplify_*_operation routines are called
627 by integrate.c, which is called before virtual register instantiation. */
628
629 static bool
630 fixed_base_plus_p (rtx x)
631 {
632 switch (GET_CODE (x))
633 {
634 case REG:
635 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
636 return true;
637 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
638 return true;
639 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
640 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
641 return true;
642 return false;
643
644 case PLUS:
645 if (!CONST_INT_P (XEXP (x, 1)))
646 return false;
647 return fixed_base_plus_p (XEXP (x, 0));
648
649 default:
650 return false;
651 }
652 }
653
654 /* Dump the expressions in the equivalence class indicated by CLASSP.
655 This function is used only for debugging. */
656 void
657 dump_class (struct table_elt *classp)
658 {
659 struct table_elt *elt;
660
661 fprintf (stderr, "Equivalence chain for ");
662 print_rtl (stderr, classp->exp);
663 fprintf (stderr, ": \n");
664
665 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
666 {
667 print_rtl (stderr, elt->exp);
668 fprintf (stderr, "\n");
669 }
670 }
671
672 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
673
674 static int
675 approx_reg_cost_1 (rtx *xp, void *data)
676 {
677 rtx x = *xp;
678 int *cost_p = (int *) data;
679
680 if (x && REG_P (x))
681 {
682 unsigned int regno = REGNO (x);
683
684 if (! CHEAP_REGNO (regno))
685 {
686 if (regno < FIRST_PSEUDO_REGISTER)
687 {
688 if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
689 return 1;
690 *cost_p += 2;
691 }
692 else
693 *cost_p += 1;
694 }
695 }
696
697 return 0;
698 }
699
700 /* Return an estimate of the cost of the registers used in an rtx.
701 This is mostly the number of different REG expressions in the rtx;
702 however for some exceptions like fixed registers we use a cost of
703 0. If any other hard register reference occurs, return MAX_COST. */
704
705 static int
706 approx_reg_cost (rtx x)
707 {
708 int cost = 0;
709
710 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
711 return MAX_COST;
712
713 return cost;
714 }
715
716 /* Return a negative value if an rtx A, whose costs are given by COST_A
717 and REGCOST_A, is more desirable than an rtx B.
718 Return a positive value if A is less desirable, or 0 if the two are
719 equally good. */
720 static int
721 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
722 {
723 /* First, get rid of cases involving expressions that are entirely
724 unwanted. */
725 if (cost_a != cost_b)
726 {
727 if (cost_a == MAX_COST)
728 return 1;
729 if (cost_b == MAX_COST)
730 return -1;
731 }
732
733 /* Avoid extending lifetimes of hardregs. */
734 if (regcost_a != regcost_b)
735 {
736 if (regcost_a == MAX_COST)
737 return 1;
738 if (regcost_b == MAX_COST)
739 return -1;
740 }
741
742 /* Normal operation costs take precedence. */
743 if (cost_a != cost_b)
744 return cost_a - cost_b;
745 /* Only if these are identical consider effects on register pressure. */
746 if (regcost_a != regcost_b)
747 return regcost_a - regcost_b;
748 return 0;
749 }
750
751 /* Internal function, to compute cost when X is not a register; called
752 from COST macro to keep it simple. */
753
754 static int
755 notreg_cost (rtx x, enum rtx_code outer)
756 {
757 return ((GET_CODE (x) == SUBREG
758 && REG_P (SUBREG_REG (x))
759 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
760 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
761 && (GET_MODE_SIZE (GET_MODE (x))
762 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
763 && subreg_lowpart_p (x)
764 && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
765 GET_MODE (SUBREG_REG (x))))
766 ? 0
767 : rtx_cost (x, outer, optimize_this_for_speed_p) * 2);
768 }
769
770 \f
771 /* Initialize CSE_REG_INFO_TABLE. */
772
773 static void
774 init_cse_reg_info (unsigned int nregs)
775 {
776 /* Do we need to grow the table? */
777 if (nregs > cse_reg_info_table_size)
778 {
779 unsigned int new_size;
780
781 if (cse_reg_info_table_size < 2048)
782 {
783 /* Compute a new size that is a power of 2 and no smaller
784 than the large of NREGS and 64. */
785 new_size = (cse_reg_info_table_size
786 ? cse_reg_info_table_size : 64);
787
788 while (new_size < nregs)
789 new_size *= 2;
790 }
791 else
792 {
793 /* If we need a big table, allocate just enough to hold
794 NREGS registers. */
795 new_size = nregs;
796 }
797
798 /* Reallocate the table with NEW_SIZE entries. */
799 free (cse_reg_info_table);
800 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
801 cse_reg_info_table_size = new_size;
802 cse_reg_info_table_first_uninitialized = 0;
803 }
804
805 /* Do we have all of the first NREGS entries initialized? */
806 if (cse_reg_info_table_first_uninitialized < nregs)
807 {
808 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
809 unsigned int i;
810
811 /* Put the old timestamp on newly allocated entries so that they
812 will all be considered out of date. We do not touch those
813 entries beyond the first NREGS entries to be nice to the
814 virtual memory. */
815 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
816 cse_reg_info_table[i].timestamp = old_timestamp;
817
818 cse_reg_info_table_first_uninitialized = nregs;
819 }
820 }
821
822 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
823
824 static void
825 get_cse_reg_info_1 (unsigned int regno)
826 {
827 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
828 entry will be considered to have been initialized. */
829 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
830
831 /* Initialize the rest of the entry. */
832 cse_reg_info_table[regno].reg_tick = 1;
833 cse_reg_info_table[regno].reg_in_table = -1;
834 cse_reg_info_table[regno].subreg_ticked = -1;
835 cse_reg_info_table[regno].reg_qty = -regno - 1;
836 }
837
838 /* Find a cse_reg_info entry for REGNO. */
839
840 static inline struct cse_reg_info *
841 get_cse_reg_info (unsigned int regno)
842 {
843 struct cse_reg_info *p = &cse_reg_info_table[regno];
844
845 /* If this entry has not been initialized, go ahead and initialize
846 it. */
847 if (p->timestamp != cse_reg_info_timestamp)
848 get_cse_reg_info_1 (regno);
849
850 return p;
851 }
852
853 /* Clear the hash table and initialize each register with its own quantity,
854 for a new basic block. */
855
856 static void
857 new_basic_block (void)
858 {
859 int i;
860
861 next_qty = 0;
862
863 /* Invalidate cse_reg_info_table. */
864 cse_reg_info_timestamp++;
865
866 /* Clear out hash table state for this pass. */
867 CLEAR_HARD_REG_SET (hard_regs_in_table);
868
869 /* The per-quantity values used to be initialized here, but it is
870 much faster to initialize each as it is made in `make_new_qty'. */
871
872 for (i = 0; i < HASH_SIZE; i++)
873 {
874 struct table_elt *first;
875
876 first = table[i];
877 if (first != NULL)
878 {
879 struct table_elt *last = first;
880
881 table[i] = NULL;
882
883 while (last->next_same_hash != NULL)
884 last = last->next_same_hash;
885
886 /* Now relink this hash entire chain into
887 the free element list. */
888
889 last->next_same_hash = free_element_chain;
890 free_element_chain = first;
891 }
892 }
893
894 #ifdef HAVE_cc0
895 prev_insn_cc0 = 0;
896 #endif
897 }
898
899 /* Say that register REG contains a quantity in mode MODE not in any
900 register before and initialize that quantity. */
901
902 static void
903 make_new_qty (unsigned int reg, enum machine_mode mode)
904 {
905 int q;
906 struct qty_table_elem *ent;
907 struct reg_eqv_elem *eqv;
908
909 gcc_assert (next_qty < max_qty);
910
911 q = REG_QTY (reg) = next_qty++;
912 ent = &qty_table[q];
913 ent->first_reg = reg;
914 ent->last_reg = reg;
915 ent->mode = mode;
916 ent->const_rtx = ent->const_insn = NULL_RTX;
917 ent->comparison_code = UNKNOWN;
918
919 eqv = &reg_eqv_table[reg];
920 eqv->next = eqv->prev = -1;
921 }
922
923 /* Make reg NEW equivalent to reg OLD.
924 OLD is not changing; NEW is. */
925
926 static void
927 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
928 {
929 unsigned int lastr, firstr;
930 int q = REG_QTY (old_reg);
931 struct qty_table_elem *ent;
932
933 ent = &qty_table[q];
934
935 /* Nothing should become eqv until it has a "non-invalid" qty number. */
936 gcc_assert (REGNO_QTY_VALID_P (old_reg));
937
938 REG_QTY (new_reg) = q;
939 firstr = ent->first_reg;
940 lastr = ent->last_reg;
941
942 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
943 hard regs. Among pseudos, if NEW will live longer than any other reg
944 of the same qty, and that is beyond the current basic block,
945 make it the new canonical replacement for this qty. */
946 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
947 /* Certain fixed registers might be of the class NO_REGS. This means
948 that not only can they not be allocated by the compiler, but
949 they cannot be used in substitutions or canonicalizations
950 either. */
951 && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
952 && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
953 || (new_reg >= FIRST_PSEUDO_REGISTER
954 && (firstr < FIRST_PSEUDO_REGISTER
955 || (bitmap_bit_p (cse_ebb_live_out, new_reg)
956 && !bitmap_bit_p (cse_ebb_live_out, firstr))
957 || (bitmap_bit_p (cse_ebb_live_in, new_reg)
958 && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
959 {
960 reg_eqv_table[firstr].prev = new_reg;
961 reg_eqv_table[new_reg].next = firstr;
962 reg_eqv_table[new_reg].prev = -1;
963 ent->first_reg = new_reg;
964 }
965 else
966 {
967 /* If NEW is a hard reg (known to be non-fixed), insert at end.
968 Otherwise, insert before any non-fixed hard regs that are at the
969 end. Registers of class NO_REGS cannot be used as an
970 equivalent for anything. */
971 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
972 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
973 && new_reg >= FIRST_PSEUDO_REGISTER)
974 lastr = reg_eqv_table[lastr].prev;
975 reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
976 if (reg_eqv_table[lastr].next >= 0)
977 reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
978 else
979 qty_table[q].last_reg = new_reg;
980 reg_eqv_table[lastr].next = new_reg;
981 reg_eqv_table[new_reg].prev = lastr;
982 }
983 }
984
985 /* Remove REG from its equivalence class. */
986
987 static void
988 delete_reg_equiv (unsigned int reg)
989 {
990 struct qty_table_elem *ent;
991 int q = REG_QTY (reg);
992 int p, n;
993
994 /* If invalid, do nothing. */
995 if (! REGNO_QTY_VALID_P (reg))
996 return;
997
998 ent = &qty_table[q];
999
1000 p = reg_eqv_table[reg].prev;
1001 n = reg_eqv_table[reg].next;
1002
1003 if (n != -1)
1004 reg_eqv_table[n].prev = p;
1005 else
1006 ent->last_reg = p;
1007 if (p != -1)
1008 reg_eqv_table[p].next = n;
1009 else
1010 ent->first_reg = n;
1011
1012 REG_QTY (reg) = -reg - 1;
1013 }
1014
1015 /* Remove any invalid expressions from the hash table
1016 that refer to any of the registers contained in expression X.
1017
1018 Make sure that newly inserted references to those registers
1019 as subexpressions will be considered valid.
1020
1021 mention_regs is not called when a register itself
1022 is being stored in the table.
1023
1024 Return 1 if we have done something that may have changed the hash code
1025 of X. */
1026
1027 static int
1028 mention_regs (rtx x)
1029 {
1030 enum rtx_code code;
1031 int i, j;
1032 const char *fmt;
1033 int changed = 0;
1034
1035 if (x == 0)
1036 return 0;
1037
1038 code = GET_CODE (x);
1039 if (code == REG)
1040 {
1041 unsigned int regno = REGNO (x);
1042 unsigned int endregno = END_REGNO (x);
1043 unsigned int i;
1044
1045 for (i = regno; i < endregno; i++)
1046 {
1047 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1048 remove_invalid_refs (i);
1049
1050 REG_IN_TABLE (i) = REG_TICK (i);
1051 SUBREG_TICKED (i) = -1;
1052 }
1053
1054 return 0;
1055 }
1056
1057 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1058 pseudo if they don't use overlapping words. We handle only pseudos
1059 here for simplicity. */
1060 if (code == SUBREG && REG_P (SUBREG_REG (x))
1061 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1062 {
1063 unsigned int i = REGNO (SUBREG_REG (x));
1064
1065 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1066 {
1067 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1068 the last store to this register really stored into this
1069 subreg, then remove the memory of this subreg.
1070 Otherwise, remove any memory of the entire register and
1071 all its subregs from the table. */
1072 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1073 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1074 remove_invalid_refs (i);
1075 else
1076 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1077 }
1078
1079 REG_IN_TABLE (i) = REG_TICK (i);
1080 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1081 return 0;
1082 }
1083
1084 /* If X is a comparison or a COMPARE and either operand is a register
1085 that does not have a quantity, give it one. This is so that a later
1086 call to record_jump_equiv won't cause X to be assigned a different
1087 hash code and not found in the table after that call.
1088
1089 It is not necessary to do this here, since rehash_using_reg can
1090 fix up the table later, but doing this here eliminates the need to
1091 call that expensive function in the most common case where the only
1092 use of the register is in the comparison. */
1093
1094 if (code == COMPARE || COMPARISON_P (x))
1095 {
1096 if (REG_P (XEXP (x, 0))
1097 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1098 if (insert_regs (XEXP (x, 0), NULL, 0))
1099 {
1100 rehash_using_reg (XEXP (x, 0));
1101 changed = 1;
1102 }
1103
1104 if (REG_P (XEXP (x, 1))
1105 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1106 if (insert_regs (XEXP (x, 1), NULL, 0))
1107 {
1108 rehash_using_reg (XEXP (x, 1));
1109 changed = 1;
1110 }
1111 }
1112
1113 fmt = GET_RTX_FORMAT (code);
1114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1115 if (fmt[i] == 'e')
1116 changed |= mention_regs (XEXP (x, i));
1117 else if (fmt[i] == 'E')
1118 for (j = 0; j < XVECLEN (x, i); j++)
1119 changed |= mention_regs (XVECEXP (x, i, j));
1120
1121 return changed;
1122 }
1123
1124 /* Update the register quantities for inserting X into the hash table
1125 with a value equivalent to CLASSP.
1126 (If the class does not contain a REG, it is irrelevant.)
1127 If MODIFIED is nonzero, X is a destination; it is being modified.
1128 Note that delete_reg_equiv should be called on a register
1129 before insert_regs is done on that register with MODIFIED != 0.
1130
1131 Nonzero value means that elements of reg_qty have changed
1132 so X's hash code may be different. */
1133
1134 static int
1135 insert_regs (rtx x, struct table_elt *classp, int modified)
1136 {
1137 if (REG_P (x))
1138 {
1139 unsigned int regno = REGNO (x);
1140 int qty_valid;
1141
1142 /* If REGNO is in the equivalence table already but is of the
1143 wrong mode for that equivalence, don't do anything here. */
1144
1145 qty_valid = REGNO_QTY_VALID_P (regno);
1146 if (qty_valid)
1147 {
1148 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1149
1150 if (ent->mode != GET_MODE (x))
1151 return 0;
1152 }
1153
1154 if (modified || ! qty_valid)
1155 {
1156 if (classp)
1157 for (classp = classp->first_same_value;
1158 classp != 0;
1159 classp = classp->next_same_value)
1160 if (REG_P (classp->exp)
1161 && GET_MODE (classp->exp) == GET_MODE (x))
1162 {
1163 unsigned c_regno = REGNO (classp->exp);
1164
1165 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1166
1167 /* Suppose that 5 is hard reg and 100 and 101 are
1168 pseudos. Consider
1169
1170 (set (reg:si 100) (reg:si 5))
1171 (set (reg:si 5) (reg:si 100))
1172 (set (reg:di 101) (reg:di 5))
1173
1174 We would now set REG_QTY (101) = REG_QTY (5), but the
1175 entry for 5 is in SImode. When we use this later in
1176 copy propagation, we get the register in wrong mode. */
1177 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1178 continue;
1179
1180 make_regs_eqv (regno, c_regno);
1181 return 1;
1182 }
1183
1184 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1185 than REG_IN_TABLE to find out if there was only a single preceding
1186 invalidation - for the SUBREG - or another one, which would be
1187 for the full register. However, if we find here that REG_TICK
1188 indicates that the register is invalid, it means that it has
1189 been invalidated in a separate operation. The SUBREG might be used
1190 now (then this is a recursive call), or we might use the full REG
1191 now and a SUBREG of it later. So bump up REG_TICK so that
1192 mention_regs will do the right thing. */
1193 if (! modified
1194 && REG_IN_TABLE (regno) >= 0
1195 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1196 REG_TICK (regno)++;
1197 make_new_qty (regno, GET_MODE (x));
1198 return 1;
1199 }
1200
1201 return 0;
1202 }
1203
1204 /* If X is a SUBREG, we will likely be inserting the inner register in the
1205 table. If that register doesn't have an assigned quantity number at
1206 this point but does later, the insertion that we will be doing now will
1207 not be accessible because its hash code will have changed. So assign
1208 a quantity number now. */
1209
1210 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1211 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1212 {
1213 insert_regs (SUBREG_REG (x), NULL, 0);
1214 mention_regs (x);
1215 return 1;
1216 }
1217 else
1218 return mention_regs (x);
1219 }
1220 \f
1221
1222 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1223 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1224 CST is equal to an anchor. */
1225
1226 static bool
1227 compute_const_anchors (rtx cst,
1228 HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1229 HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1230 {
1231 HOST_WIDE_INT n = INTVAL (cst);
1232
1233 *lower_base = n & ~(targetm.const_anchor - 1);
1234 if (*lower_base == n)
1235 return false;
1236
1237 *upper_base =
1238 (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1239 *upper_offs = n - *upper_base;
1240 *lower_offs = n - *lower_base;
1241 return true;
1242 }
1243
1244 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1245
1246 static void
1247 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1248 enum machine_mode mode)
1249 {
1250 struct table_elt *elt;
1251 unsigned hash;
1252 rtx anchor_exp;
1253 rtx exp;
1254
1255 anchor_exp = GEN_INT (anchor);
1256 hash = HASH (anchor_exp, mode);
1257 elt = lookup (anchor_exp, hash, mode);
1258 if (!elt)
1259 elt = insert (anchor_exp, NULL, hash, mode);
1260
1261 exp = plus_constant (reg, offs);
1262 /* REG has just been inserted and the hash codes recomputed. */
1263 mention_regs (exp);
1264 hash = HASH (exp, mode);
1265
1266 /* Use the cost of the register rather than the whole expression. When
1267 looking up constant anchors we will further offset the corresponding
1268 expression therefore it does not make sense to prefer REGs over
1269 reg-immediate additions. Prefer instead the oldest expression. Also
1270 don't prefer pseudos over hard regs so that we derive constants in
1271 argument registers from other argument registers rather than from the
1272 original pseudo that was used to synthesize the constant. */
1273 insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1274 }
1275
1276 /* The constant CST is equivalent to the register REG. Create
1277 equivalences between the two anchors of CST and the corresponding
1278 register-offset expressions using REG. */
1279
1280 static void
1281 insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
1282 {
1283 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1284
1285 if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1286 &upper_base, &upper_offs))
1287 return;
1288
1289 /* Ignore anchors of value 0. Constants accessible from zero are
1290 simple. */
1291 if (lower_base != 0)
1292 insert_const_anchor (lower_base, reg, -lower_offs, mode);
1293
1294 if (upper_base != 0)
1295 insert_const_anchor (upper_base, reg, -upper_offs, mode);
1296 }
1297
1298 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1299 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1300 valid expression. Return the cheapest and oldest of such expressions. In
1301 *OLD, return how old the resulting expression is compared to the other
1302 equivalent expressions. */
1303
1304 static rtx
1305 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1306 unsigned *old)
1307 {
1308 struct table_elt *elt;
1309 unsigned idx;
1310 struct table_elt *match_elt;
1311 rtx match;
1312
1313 /* Find the cheapest and *oldest* expression to maximize the chance of
1314 reusing the same pseudo. */
1315
1316 match_elt = NULL;
1317 match = NULL_RTX;
1318 for (elt = anchor_elt->first_same_value, idx = 0;
1319 elt;
1320 elt = elt->next_same_value, idx++)
1321 {
1322 if (match_elt && CHEAPER (match_elt, elt))
1323 return match;
1324
1325 if (REG_P (elt->exp)
1326 || (GET_CODE (elt->exp) == PLUS
1327 && REG_P (XEXP (elt->exp, 0))
1328 && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1329 {
1330 rtx x;
1331
1332 /* Ignore expressions that are no longer valid. */
1333 if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1334 continue;
1335
1336 x = plus_constant (elt->exp, offs);
1337 if (REG_P (x)
1338 || (GET_CODE (x) == PLUS
1339 && IN_RANGE (INTVAL (XEXP (x, 1)),
1340 -targetm.const_anchor,
1341 targetm.const_anchor - 1)))
1342 {
1343 match = x;
1344 match_elt = elt;
1345 *old = idx;
1346 }
1347 }
1348 }
1349
1350 return match;
1351 }
1352
1353 /* Try to express the constant SRC_CONST using a register+offset expression
1354 derived from a constant anchor. Return it if successful or NULL_RTX,
1355 otherwise. */
1356
1357 static rtx
1358 try_const_anchors (rtx src_const, enum machine_mode mode)
1359 {
1360 struct table_elt *lower_elt, *upper_elt;
1361 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1362 rtx lower_anchor_rtx, upper_anchor_rtx;
1363 rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1364 unsigned lower_old, upper_old;
1365
1366 if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1367 &upper_base, &upper_offs))
1368 return NULL_RTX;
1369
1370 lower_anchor_rtx = GEN_INT (lower_base);
1371 upper_anchor_rtx = GEN_INT (upper_base);
1372 lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1373 upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1374
1375 if (lower_elt)
1376 lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1377 if (upper_elt)
1378 upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1379
1380 if (!lower_exp)
1381 return upper_exp;
1382 if (!upper_exp)
1383 return lower_exp;
1384
1385 /* Return the older expression. */
1386 return (upper_old > lower_old ? upper_exp : lower_exp);
1387 }
1388 \f
1389 /* Look in or update the hash table. */
1390
1391 /* Remove table element ELT from use in the table.
1392 HASH is its hash code, made using the HASH macro.
1393 It's an argument because often that is known in advance
1394 and we save much time not recomputing it. */
1395
1396 static void
1397 remove_from_table (struct table_elt *elt, unsigned int hash)
1398 {
1399 if (elt == 0)
1400 return;
1401
1402 /* Mark this element as removed. See cse_insn. */
1403 elt->first_same_value = 0;
1404
1405 /* Remove the table element from its equivalence class. */
1406
1407 {
1408 struct table_elt *prev = elt->prev_same_value;
1409 struct table_elt *next = elt->next_same_value;
1410
1411 if (next)
1412 next->prev_same_value = prev;
1413
1414 if (prev)
1415 prev->next_same_value = next;
1416 else
1417 {
1418 struct table_elt *newfirst = next;
1419 while (next)
1420 {
1421 next->first_same_value = newfirst;
1422 next = next->next_same_value;
1423 }
1424 }
1425 }
1426
1427 /* Remove the table element from its hash bucket. */
1428
1429 {
1430 struct table_elt *prev = elt->prev_same_hash;
1431 struct table_elt *next = elt->next_same_hash;
1432
1433 if (next)
1434 next->prev_same_hash = prev;
1435
1436 if (prev)
1437 prev->next_same_hash = next;
1438 else if (table[hash] == elt)
1439 table[hash] = next;
1440 else
1441 {
1442 /* This entry is not in the proper hash bucket. This can happen
1443 when two classes were merged by `merge_equiv_classes'. Search
1444 for the hash bucket that it heads. This happens only very
1445 rarely, so the cost is acceptable. */
1446 for (hash = 0; hash < HASH_SIZE; hash++)
1447 if (table[hash] == elt)
1448 table[hash] = next;
1449 }
1450 }
1451
1452 /* Remove the table element from its related-value circular chain. */
1453
1454 if (elt->related_value != 0 && elt->related_value != elt)
1455 {
1456 struct table_elt *p = elt->related_value;
1457
1458 while (p->related_value != elt)
1459 p = p->related_value;
1460 p->related_value = elt->related_value;
1461 if (p->related_value == p)
1462 p->related_value = 0;
1463 }
1464
1465 /* Now add it to the free element chain. */
1466 elt->next_same_hash = free_element_chain;
1467 free_element_chain = elt;
1468 }
1469
1470 /* Same as above, but X is a pseudo-register. */
1471
1472 static void
1473 remove_pseudo_from_table (rtx x, unsigned int hash)
1474 {
1475 struct table_elt *elt;
1476
1477 /* Because a pseudo-register can be referenced in more than one
1478 mode, we might have to remove more than one table entry. */
1479 while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1480 remove_from_table (elt, hash);
1481 }
1482
1483 /* Look up X in the hash table and return its table element,
1484 or 0 if X is not in the table.
1485
1486 MODE is the machine-mode of X, or if X is an integer constant
1487 with VOIDmode then MODE is the mode with which X will be used.
1488
1489 Here we are satisfied to find an expression whose tree structure
1490 looks like X. */
1491
1492 static struct table_elt *
1493 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1494 {
1495 struct table_elt *p;
1496
1497 for (p = table[hash]; p; p = p->next_same_hash)
1498 if (mode == p->mode && ((x == p->exp && REG_P (x))
1499 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1500 return p;
1501
1502 return 0;
1503 }
1504
1505 /* Like `lookup' but don't care whether the table element uses invalid regs.
1506 Also ignore discrepancies in the machine mode of a register. */
1507
1508 static struct table_elt *
1509 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1510 {
1511 struct table_elt *p;
1512
1513 if (REG_P (x))
1514 {
1515 unsigned int regno = REGNO (x);
1516
1517 /* Don't check the machine mode when comparing registers;
1518 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1519 for (p = table[hash]; p; p = p->next_same_hash)
1520 if (REG_P (p->exp)
1521 && REGNO (p->exp) == regno)
1522 return p;
1523 }
1524 else
1525 {
1526 for (p = table[hash]; p; p = p->next_same_hash)
1527 if (mode == p->mode
1528 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1529 return p;
1530 }
1531
1532 return 0;
1533 }
1534
1535 /* Look for an expression equivalent to X and with code CODE.
1536 If one is found, return that expression. */
1537
1538 static rtx
1539 lookup_as_function (rtx x, enum rtx_code code)
1540 {
1541 struct table_elt *p
1542 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1543
1544 if (p == 0)
1545 return 0;
1546
1547 for (p = p->first_same_value; p; p = p->next_same_value)
1548 if (GET_CODE (p->exp) == code
1549 /* Make sure this is a valid entry in the table. */
1550 && exp_equiv_p (p->exp, p->exp, 1, false))
1551 return p->exp;
1552
1553 return 0;
1554 }
1555
1556 /* Insert X in the hash table, assuming HASH is its hash code and
1557 CLASSP is an element of the class it should go in (or 0 if a new
1558 class should be made). COST is the code of X and reg_cost is the
1559 cost of registers in X. It is inserted at the proper position to
1560 keep the class in the order cheapest first.
1561
1562 MODE is the machine-mode of X, or if X is an integer constant
1563 with VOIDmode then MODE is the mode with which X will be used.
1564
1565 For elements of equal cheapness, the most recent one
1566 goes in front, except that the first element in the list
1567 remains first unless a cheaper element is added. The order of
1568 pseudo-registers does not matter, as canon_reg will be called to
1569 find the cheapest when a register is retrieved from the table.
1570
1571 The in_memory field in the hash table element is set to 0.
1572 The caller must set it nonzero if appropriate.
1573
1574 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1575 and if insert_regs returns a nonzero value
1576 you must then recompute its hash code before calling here.
1577
1578 If necessary, update table showing constant values of quantities. */
1579
1580 static struct table_elt *
1581 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1582 enum machine_mode mode, int cost, int reg_cost)
1583 {
1584 struct table_elt *elt;
1585
1586 /* If X is a register and we haven't made a quantity for it,
1587 something is wrong. */
1588 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1589
1590 /* If X is a hard register, show it is being put in the table. */
1591 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1592 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1593
1594 /* Put an element for X into the right hash bucket. */
1595
1596 elt = free_element_chain;
1597 if (elt)
1598 free_element_chain = elt->next_same_hash;
1599 else
1600 elt = XNEW (struct table_elt);
1601
1602 elt->exp = x;
1603 elt->canon_exp = NULL_RTX;
1604 elt->cost = cost;
1605 elt->regcost = reg_cost;
1606 elt->next_same_value = 0;
1607 elt->prev_same_value = 0;
1608 elt->next_same_hash = table[hash];
1609 elt->prev_same_hash = 0;
1610 elt->related_value = 0;
1611 elt->in_memory = 0;
1612 elt->mode = mode;
1613 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1614
1615 if (table[hash])
1616 table[hash]->prev_same_hash = elt;
1617 table[hash] = elt;
1618
1619 /* Put it into the proper value-class. */
1620 if (classp)
1621 {
1622 classp = classp->first_same_value;
1623 if (CHEAPER (elt, classp))
1624 /* Insert at the head of the class. */
1625 {
1626 struct table_elt *p;
1627 elt->next_same_value = classp;
1628 classp->prev_same_value = elt;
1629 elt->first_same_value = elt;
1630
1631 for (p = classp; p; p = p->next_same_value)
1632 p->first_same_value = elt;
1633 }
1634 else
1635 {
1636 /* Insert not at head of the class. */
1637 /* Put it after the last element cheaper than X. */
1638 struct table_elt *p, *next;
1639
1640 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1641 p = next);
1642
1643 /* Put it after P and before NEXT. */
1644 elt->next_same_value = next;
1645 if (next)
1646 next->prev_same_value = elt;
1647
1648 elt->prev_same_value = p;
1649 p->next_same_value = elt;
1650 elt->first_same_value = classp;
1651 }
1652 }
1653 else
1654 elt->first_same_value = elt;
1655
1656 /* If this is a constant being set equivalent to a register or a register
1657 being set equivalent to a constant, note the constant equivalence.
1658
1659 If this is a constant, it cannot be equivalent to a different constant,
1660 and a constant is the only thing that can be cheaper than a register. So
1661 we know the register is the head of the class (before the constant was
1662 inserted).
1663
1664 If this is a register that is not already known equivalent to a
1665 constant, we must check the entire class.
1666
1667 If this is a register that is already known equivalent to an insn,
1668 update the qtys `const_insn' to show that `this_insn' is the latest
1669 insn making that quantity equivalent to the constant. */
1670
1671 if (elt->is_const && classp && REG_P (classp->exp)
1672 && !REG_P (x))
1673 {
1674 int exp_q = REG_QTY (REGNO (classp->exp));
1675 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1676
1677 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1678 exp_ent->const_insn = this_insn;
1679 }
1680
1681 else if (REG_P (x)
1682 && classp
1683 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1684 && ! elt->is_const)
1685 {
1686 struct table_elt *p;
1687
1688 for (p = classp; p != 0; p = p->next_same_value)
1689 {
1690 if (p->is_const && !REG_P (p->exp))
1691 {
1692 int x_q = REG_QTY (REGNO (x));
1693 struct qty_table_elem *x_ent = &qty_table[x_q];
1694
1695 x_ent->const_rtx
1696 = gen_lowpart (GET_MODE (x), p->exp);
1697 x_ent->const_insn = this_insn;
1698 break;
1699 }
1700 }
1701 }
1702
1703 else if (REG_P (x)
1704 && qty_table[REG_QTY (REGNO (x))].const_rtx
1705 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1706 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1707
1708 /* If this is a constant with symbolic value,
1709 and it has a term with an explicit integer value,
1710 link it up with related expressions. */
1711 if (GET_CODE (x) == CONST)
1712 {
1713 rtx subexp = get_related_value (x);
1714 unsigned subhash;
1715 struct table_elt *subelt, *subelt_prev;
1716
1717 if (subexp != 0)
1718 {
1719 /* Get the integer-free subexpression in the hash table. */
1720 subhash = SAFE_HASH (subexp, mode);
1721 subelt = lookup (subexp, subhash, mode);
1722 if (subelt == 0)
1723 subelt = insert (subexp, NULL, subhash, mode);
1724 /* Initialize SUBELT's circular chain if it has none. */
1725 if (subelt->related_value == 0)
1726 subelt->related_value = subelt;
1727 /* Find the element in the circular chain that precedes SUBELT. */
1728 subelt_prev = subelt;
1729 while (subelt_prev->related_value != subelt)
1730 subelt_prev = subelt_prev->related_value;
1731 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1732 This way the element that follows SUBELT is the oldest one. */
1733 elt->related_value = subelt_prev->related_value;
1734 subelt_prev->related_value = elt;
1735 }
1736 }
1737
1738 return elt;
1739 }
1740
1741 /* Wrap insert_with_costs by passing the default costs. */
1742
1743 static struct table_elt *
1744 insert (rtx x, struct table_elt *classp, unsigned int hash,
1745 enum machine_mode mode)
1746 {
1747 return
1748 insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1749 }
1750
1751 \f
1752 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1753 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1754 the two classes equivalent.
1755
1756 CLASS1 will be the surviving class; CLASS2 should not be used after this
1757 call.
1758
1759 Any invalid entries in CLASS2 will not be copied. */
1760
1761 static void
1762 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1763 {
1764 struct table_elt *elt, *next, *new_elt;
1765
1766 /* Ensure we start with the head of the classes. */
1767 class1 = class1->first_same_value;
1768 class2 = class2->first_same_value;
1769
1770 /* If they were already equal, forget it. */
1771 if (class1 == class2)
1772 return;
1773
1774 for (elt = class2; elt; elt = next)
1775 {
1776 unsigned int hash;
1777 rtx exp = elt->exp;
1778 enum machine_mode mode = elt->mode;
1779
1780 next = elt->next_same_value;
1781
1782 /* Remove old entry, make a new one in CLASS1's class.
1783 Don't do this for invalid entries as we cannot find their
1784 hash code (it also isn't necessary). */
1785 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1786 {
1787 bool need_rehash = false;
1788
1789 hash_arg_in_memory = 0;
1790 hash = HASH (exp, mode);
1791
1792 if (REG_P (exp))
1793 {
1794 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1795 delete_reg_equiv (REGNO (exp));
1796 }
1797
1798 if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1799 remove_pseudo_from_table (exp, hash);
1800 else
1801 remove_from_table (elt, hash);
1802
1803 if (insert_regs (exp, class1, 0) || need_rehash)
1804 {
1805 rehash_using_reg (exp);
1806 hash = HASH (exp, mode);
1807 }
1808 new_elt = insert (exp, class1, hash, mode);
1809 new_elt->in_memory = hash_arg_in_memory;
1810 }
1811 }
1812 }
1813 \f
1814 /* Flush the entire hash table. */
1815
1816 static void
1817 flush_hash_table (void)
1818 {
1819 int i;
1820 struct table_elt *p;
1821
1822 for (i = 0; i < HASH_SIZE; i++)
1823 for (p = table[i]; p; p = table[i])
1824 {
1825 /* Note that invalidate can remove elements
1826 after P in the current hash chain. */
1827 if (REG_P (p->exp))
1828 invalidate (p->exp, VOIDmode);
1829 else
1830 remove_from_table (p, i);
1831 }
1832 }
1833 \f
1834 /* Function called for each rtx to check whether true dependence exist. */
1835 struct check_dependence_data
1836 {
1837 enum machine_mode mode;
1838 rtx exp;
1839 rtx addr;
1840 };
1841
1842 static int
1843 check_dependence (rtx *x, void *data)
1844 {
1845 struct check_dependence_data *d = (struct check_dependence_data *) data;
1846 if (*x && MEM_P (*x))
1847 return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX,
1848 cse_rtx_varies_p);
1849 else
1850 return 0;
1851 }
1852 \f
1853 /* Remove from the hash table, or mark as invalid, all expressions whose
1854 values could be altered by storing in X. X is a register, a subreg, or
1855 a memory reference with nonvarying address (because, when a memory
1856 reference with a varying address is stored in, all memory references are
1857 removed by invalidate_memory so specific invalidation is superfluous).
1858 FULL_MODE, if not VOIDmode, indicates that this much should be
1859 invalidated instead of just the amount indicated by the mode of X. This
1860 is only used for bitfield stores into memory.
1861
1862 A nonvarying address may be just a register or just a symbol reference,
1863 or it may be either of those plus a numeric offset. */
1864
1865 static void
1866 invalidate (rtx x, enum machine_mode full_mode)
1867 {
1868 int i;
1869 struct table_elt *p;
1870 rtx addr;
1871
1872 switch (GET_CODE (x))
1873 {
1874 case REG:
1875 {
1876 /* If X is a register, dependencies on its contents are recorded
1877 through the qty number mechanism. Just change the qty number of
1878 the register, mark it as invalid for expressions that refer to it,
1879 and remove it itself. */
1880 unsigned int regno = REGNO (x);
1881 unsigned int hash = HASH (x, GET_MODE (x));
1882
1883 /* Remove REGNO from any quantity list it might be on and indicate
1884 that its value might have changed. If it is a pseudo, remove its
1885 entry from the hash table.
1886
1887 For a hard register, we do the first two actions above for any
1888 additional hard registers corresponding to X. Then, if any of these
1889 registers are in the table, we must remove any REG entries that
1890 overlap these registers. */
1891
1892 delete_reg_equiv (regno);
1893 REG_TICK (regno)++;
1894 SUBREG_TICKED (regno) = -1;
1895
1896 if (regno >= FIRST_PSEUDO_REGISTER)
1897 remove_pseudo_from_table (x, hash);
1898 else
1899 {
1900 HOST_WIDE_INT in_table
1901 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1902 unsigned int endregno = END_HARD_REGNO (x);
1903 unsigned int tregno, tendregno, rn;
1904 struct table_elt *p, *next;
1905
1906 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1907
1908 for (rn = regno + 1; rn < endregno; rn++)
1909 {
1910 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1911 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1912 delete_reg_equiv (rn);
1913 REG_TICK (rn)++;
1914 SUBREG_TICKED (rn) = -1;
1915 }
1916
1917 if (in_table)
1918 for (hash = 0; hash < HASH_SIZE; hash++)
1919 for (p = table[hash]; p; p = next)
1920 {
1921 next = p->next_same_hash;
1922
1923 if (!REG_P (p->exp)
1924 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1925 continue;
1926
1927 tregno = REGNO (p->exp);
1928 tendregno = END_HARD_REGNO (p->exp);
1929 if (tendregno > regno && tregno < endregno)
1930 remove_from_table (p, hash);
1931 }
1932 }
1933 }
1934 return;
1935
1936 case SUBREG:
1937 invalidate (SUBREG_REG (x), VOIDmode);
1938 return;
1939
1940 case PARALLEL:
1941 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1942 invalidate (XVECEXP (x, 0, i), VOIDmode);
1943 return;
1944
1945 case EXPR_LIST:
1946 /* This is part of a disjoint return value; extract the location in
1947 question ignoring the offset. */
1948 invalidate (XEXP (x, 0), VOIDmode);
1949 return;
1950
1951 case MEM:
1952 addr = canon_rtx (get_addr (XEXP (x, 0)));
1953 /* Calculate the canonical version of X here so that
1954 true_dependence doesn't generate new RTL for X on each call. */
1955 x = canon_rtx (x);
1956
1957 /* Remove all hash table elements that refer to overlapping pieces of
1958 memory. */
1959 if (full_mode == VOIDmode)
1960 full_mode = GET_MODE (x);
1961
1962 for (i = 0; i < HASH_SIZE; i++)
1963 {
1964 struct table_elt *next;
1965
1966 for (p = table[i]; p; p = next)
1967 {
1968 next = p->next_same_hash;
1969 if (p->in_memory)
1970 {
1971 struct check_dependence_data d;
1972
1973 /* Just canonicalize the expression once;
1974 otherwise each time we call invalidate
1975 true_dependence will canonicalize the
1976 expression again. */
1977 if (!p->canon_exp)
1978 p->canon_exp = canon_rtx (p->exp);
1979 d.exp = x;
1980 d.addr = addr;
1981 d.mode = full_mode;
1982 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1983 remove_from_table (p, i);
1984 }
1985 }
1986 }
1987 return;
1988
1989 default:
1990 gcc_unreachable ();
1991 }
1992 }
1993 \f
1994 /* Remove all expressions that refer to register REGNO,
1995 since they are already invalid, and we are about to
1996 mark that register valid again and don't want the old
1997 expressions to reappear as valid. */
1998
1999 static void
2000 remove_invalid_refs (unsigned int regno)
2001 {
2002 unsigned int i;
2003 struct table_elt *p, *next;
2004
2005 for (i = 0; i < HASH_SIZE; i++)
2006 for (p = table[i]; p; p = next)
2007 {
2008 next = p->next_same_hash;
2009 if (!REG_P (p->exp)
2010 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2011 remove_from_table (p, i);
2012 }
2013 }
2014
2015 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2016 and mode MODE. */
2017 static void
2018 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2019 enum machine_mode mode)
2020 {
2021 unsigned int i;
2022 struct table_elt *p, *next;
2023 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2024
2025 for (i = 0; i < HASH_SIZE; i++)
2026 for (p = table[i]; p; p = next)
2027 {
2028 rtx exp = p->exp;
2029 next = p->next_same_hash;
2030
2031 if (!REG_P (exp)
2032 && (GET_CODE (exp) != SUBREG
2033 || !REG_P (SUBREG_REG (exp))
2034 || REGNO (SUBREG_REG (exp)) != regno
2035 || (((SUBREG_BYTE (exp)
2036 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2037 && SUBREG_BYTE (exp) <= end))
2038 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2039 remove_from_table (p, i);
2040 }
2041 }
2042 \f
2043 /* Recompute the hash codes of any valid entries in the hash table that
2044 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2045
2046 This is called when we make a jump equivalence. */
2047
2048 static void
2049 rehash_using_reg (rtx x)
2050 {
2051 unsigned int i;
2052 struct table_elt *p, *next;
2053 unsigned hash;
2054
2055 if (GET_CODE (x) == SUBREG)
2056 x = SUBREG_REG (x);
2057
2058 /* If X is not a register or if the register is known not to be in any
2059 valid entries in the table, we have no work to do. */
2060
2061 if (!REG_P (x)
2062 || REG_IN_TABLE (REGNO (x)) < 0
2063 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2064 return;
2065
2066 /* Scan all hash chains looking for valid entries that mention X.
2067 If we find one and it is in the wrong hash chain, move it. */
2068
2069 for (i = 0; i < HASH_SIZE; i++)
2070 for (p = table[i]; p; p = next)
2071 {
2072 next = p->next_same_hash;
2073 if (reg_mentioned_p (x, p->exp)
2074 && exp_equiv_p (p->exp, p->exp, 1, false)
2075 && i != (hash = SAFE_HASH (p->exp, p->mode)))
2076 {
2077 if (p->next_same_hash)
2078 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2079
2080 if (p->prev_same_hash)
2081 p->prev_same_hash->next_same_hash = p->next_same_hash;
2082 else
2083 table[i] = p->next_same_hash;
2084
2085 p->next_same_hash = table[hash];
2086 p->prev_same_hash = 0;
2087 if (table[hash])
2088 table[hash]->prev_same_hash = p;
2089 table[hash] = p;
2090 }
2091 }
2092 }
2093 \f
2094 /* Remove from the hash table any expression that is a call-clobbered
2095 register. Also update their TICK values. */
2096
2097 static void
2098 invalidate_for_call (void)
2099 {
2100 unsigned int regno, endregno;
2101 unsigned int i;
2102 unsigned hash;
2103 struct table_elt *p, *next;
2104 int in_table = 0;
2105
2106 /* Go through all the hard registers. For each that is clobbered in
2107 a CALL_INSN, remove the register from quantity chains and update
2108 reg_tick if defined. Also see if any of these registers is currently
2109 in the table. */
2110
2111 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2112 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2113 {
2114 delete_reg_equiv (regno);
2115 if (REG_TICK (regno) >= 0)
2116 {
2117 REG_TICK (regno)++;
2118 SUBREG_TICKED (regno) = -1;
2119 }
2120
2121 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2122 }
2123
2124 /* In the case where we have no call-clobbered hard registers in the
2125 table, we are done. Otherwise, scan the table and remove any
2126 entry that overlaps a call-clobbered register. */
2127
2128 if (in_table)
2129 for (hash = 0; hash < HASH_SIZE; hash++)
2130 for (p = table[hash]; p; p = next)
2131 {
2132 next = p->next_same_hash;
2133
2134 if (!REG_P (p->exp)
2135 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2136 continue;
2137
2138 regno = REGNO (p->exp);
2139 endregno = END_HARD_REGNO (p->exp);
2140
2141 for (i = regno; i < endregno; i++)
2142 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2143 {
2144 remove_from_table (p, hash);
2145 break;
2146 }
2147 }
2148 }
2149 \f
2150 /* Given an expression X of type CONST,
2151 and ELT which is its table entry (or 0 if it
2152 is not in the hash table),
2153 return an alternate expression for X as a register plus integer.
2154 If none can be found, return 0. */
2155
2156 static rtx
2157 use_related_value (rtx x, struct table_elt *elt)
2158 {
2159 struct table_elt *relt = 0;
2160 struct table_elt *p, *q;
2161 HOST_WIDE_INT offset;
2162
2163 /* First, is there anything related known?
2164 If we have a table element, we can tell from that.
2165 Otherwise, must look it up. */
2166
2167 if (elt != 0 && elt->related_value != 0)
2168 relt = elt;
2169 else if (elt == 0 && GET_CODE (x) == CONST)
2170 {
2171 rtx subexp = get_related_value (x);
2172 if (subexp != 0)
2173 relt = lookup (subexp,
2174 SAFE_HASH (subexp, GET_MODE (subexp)),
2175 GET_MODE (subexp));
2176 }
2177
2178 if (relt == 0)
2179 return 0;
2180
2181 /* Search all related table entries for one that has an
2182 equivalent register. */
2183
2184 p = relt;
2185 while (1)
2186 {
2187 /* This loop is strange in that it is executed in two different cases.
2188 The first is when X is already in the table. Then it is searching
2189 the RELATED_VALUE list of X's class (RELT). The second case is when
2190 X is not in the table. Then RELT points to a class for the related
2191 value.
2192
2193 Ensure that, whatever case we are in, that we ignore classes that have
2194 the same value as X. */
2195
2196 if (rtx_equal_p (x, p->exp))
2197 q = 0;
2198 else
2199 for (q = p->first_same_value; q; q = q->next_same_value)
2200 if (REG_P (q->exp))
2201 break;
2202
2203 if (q)
2204 break;
2205
2206 p = p->related_value;
2207
2208 /* We went all the way around, so there is nothing to be found.
2209 Alternatively, perhaps RELT was in the table for some other reason
2210 and it has no related values recorded. */
2211 if (p == relt || p == 0)
2212 break;
2213 }
2214
2215 if (q == 0)
2216 return 0;
2217
2218 offset = (get_integer_term (x) - get_integer_term (p->exp));
2219 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2220 return plus_constant (q->exp, offset);
2221 }
2222 \f
2223
2224 /* Hash a string. Just add its bytes up. */
2225 static inline unsigned
2226 hash_rtx_string (const char *ps)
2227 {
2228 unsigned hash = 0;
2229 const unsigned char *p = (const unsigned char *) ps;
2230
2231 if (p)
2232 while (*p)
2233 hash += *p++;
2234
2235 return hash;
2236 }
2237
2238 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2239 When the callback returns true, we continue with the new rtx. */
2240
2241 unsigned
2242 hash_rtx_cb (const_rtx x, enum machine_mode mode,
2243 int *do_not_record_p, int *hash_arg_in_memory_p,
2244 bool have_reg_qty, hash_rtx_callback_function cb)
2245 {
2246 int i, j;
2247 unsigned hash = 0;
2248 enum rtx_code code;
2249 const char *fmt;
2250 enum machine_mode newmode;
2251 rtx newx;
2252
2253 /* Used to turn recursion into iteration. We can't rely on GCC's
2254 tail-recursion elimination since we need to keep accumulating values
2255 in HASH. */
2256 repeat:
2257 if (x == 0)
2258 return hash;
2259
2260 /* Invoke the callback first. */
2261 if (cb != NULL
2262 && ((*cb) (x, mode, &newx, &newmode)))
2263 {
2264 hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2265 hash_arg_in_memory_p, have_reg_qty, cb);
2266 return hash;
2267 }
2268
2269 code = GET_CODE (x);
2270 switch (code)
2271 {
2272 case REG:
2273 {
2274 unsigned int regno = REGNO (x);
2275
2276 if (do_not_record_p && !reload_completed)
2277 {
2278 /* On some machines, we can't record any non-fixed hard register,
2279 because extending its life will cause reload problems. We
2280 consider ap, fp, sp, gp to be fixed for this purpose.
2281
2282 We also consider CCmode registers to be fixed for this purpose;
2283 failure to do so leads to failure to simplify 0<100 type of
2284 conditionals.
2285
2286 On all machines, we can't record any global registers.
2287 Nor should we record any register that is in a small
2288 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2289 bool record;
2290
2291 if (regno >= FIRST_PSEUDO_REGISTER)
2292 record = true;
2293 else if (x == frame_pointer_rtx
2294 || x == hard_frame_pointer_rtx
2295 || x == arg_pointer_rtx
2296 || x == stack_pointer_rtx
2297 || x == pic_offset_table_rtx)
2298 record = true;
2299 else if (global_regs[regno])
2300 record = false;
2301 else if (fixed_regs[regno])
2302 record = true;
2303 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2304 record = true;
2305 else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2306 record = false;
2307 else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2308 record = false;
2309 else
2310 record = true;
2311
2312 if (!record)
2313 {
2314 *do_not_record_p = 1;
2315 return 0;
2316 }
2317 }
2318
2319 hash += ((unsigned int) REG << 7);
2320 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2321 return hash;
2322 }
2323
2324 /* We handle SUBREG of a REG specially because the underlying
2325 reg changes its hash value with every value change; we don't
2326 want to have to forget unrelated subregs when one subreg changes. */
2327 case SUBREG:
2328 {
2329 if (REG_P (SUBREG_REG (x)))
2330 {
2331 hash += (((unsigned int) SUBREG << 7)
2332 + REGNO (SUBREG_REG (x))
2333 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2334 return hash;
2335 }
2336 break;
2337 }
2338
2339 case CONST_INT:
2340 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2341 + (unsigned int) INTVAL (x));
2342 return hash;
2343
2344 case CONST_DOUBLE:
2345 /* This is like the general case, except that it only counts
2346 the integers representing the constant. */
2347 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2348 if (GET_MODE (x) != VOIDmode)
2349 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2350 else
2351 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2352 + (unsigned int) CONST_DOUBLE_HIGH (x));
2353 return hash;
2354
2355 case CONST_FIXED:
2356 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2357 hash += fixed_hash (CONST_FIXED_VALUE (x));
2358 return hash;
2359
2360 case CONST_VECTOR:
2361 {
2362 int units;
2363 rtx elt;
2364
2365 units = CONST_VECTOR_NUNITS (x);
2366
2367 for (i = 0; i < units; ++i)
2368 {
2369 elt = CONST_VECTOR_ELT (x, i);
2370 hash += hash_rtx_cb (elt, GET_MODE (elt),
2371 do_not_record_p, hash_arg_in_memory_p,
2372 have_reg_qty, cb);
2373 }
2374
2375 return hash;
2376 }
2377
2378 /* Assume there is only one rtx object for any given label. */
2379 case LABEL_REF:
2380 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2381 differences and differences between each stage's debugging dumps. */
2382 hash += (((unsigned int) LABEL_REF << 7)
2383 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2384 return hash;
2385
2386 case SYMBOL_REF:
2387 {
2388 /* Don't hash on the symbol's address to avoid bootstrap differences.
2389 Different hash values may cause expressions to be recorded in
2390 different orders and thus different registers to be used in the
2391 final assembler. This also avoids differences in the dump files
2392 between various stages. */
2393 unsigned int h = 0;
2394 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2395
2396 while (*p)
2397 h += (h << 7) + *p++; /* ??? revisit */
2398
2399 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2400 return hash;
2401 }
2402
2403 case MEM:
2404 /* We don't record if marked volatile or if BLKmode since we don't
2405 know the size of the move. */
2406 if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2407 {
2408 *do_not_record_p = 1;
2409 return 0;
2410 }
2411 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2412 *hash_arg_in_memory_p = 1;
2413
2414 /* Now that we have already found this special case,
2415 might as well speed it up as much as possible. */
2416 hash += (unsigned) MEM;
2417 x = XEXP (x, 0);
2418 goto repeat;
2419
2420 case USE:
2421 /* A USE that mentions non-volatile memory needs special
2422 handling since the MEM may be BLKmode which normally
2423 prevents an entry from being made. Pure calls are
2424 marked by a USE which mentions BLKmode memory.
2425 See calls.c:emit_call_1. */
2426 if (MEM_P (XEXP (x, 0))
2427 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2428 {
2429 hash += (unsigned) USE;
2430 x = XEXP (x, 0);
2431
2432 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2433 *hash_arg_in_memory_p = 1;
2434
2435 /* Now that we have already found this special case,
2436 might as well speed it up as much as possible. */
2437 hash += (unsigned) MEM;
2438 x = XEXP (x, 0);
2439 goto repeat;
2440 }
2441 break;
2442
2443 case PRE_DEC:
2444 case PRE_INC:
2445 case POST_DEC:
2446 case POST_INC:
2447 case PRE_MODIFY:
2448 case POST_MODIFY:
2449 case PC:
2450 case CC0:
2451 case CALL:
2452 case UNSPEC_VOLATILE:
2453 if (do_not_record_p) {
2454 *do_not_record_p = 1;
2455 return 0;
2456 }
2457 else
2458 return hash;
2459 break;
2460
2461 case ASM_OPERANDS:
2462 if (do_not_record_p && MEM_VOLATILE_P (x))
2463 {
2464 *do_not_record_p = 1;
2465 return 0;
2466 }
2467 else
2468 {
2469 /* We don't want to take the filename and line into account. */
2470 hash += (unsigned) code + (unsigned) GET_MODE (x)
2471 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2472 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2473 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2474
2475 if (ASM_OPERANDS_INPUT_LENGTH (x))
2476 {
2477 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2478 {
2479 hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2480 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2481 do_not_record_p, hash_arg_in_memory_p,
2482 have_reg_qty, cb)
2483 + hash_rtx_string
2484 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2485 }
2486
2487 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2488 x = ASM_OPERANDS_INPUT (x, 0);
2489 mode = GET_MODE (x);
2490 goto repeat;
2491 }
2492
2493 return hash;
2494 }
2495 break;
2496
2497 default:
2498 break;
2499 }
2500
2501 i = GET_RTX_LENGTH (code) - 1;
2502 hash += (unsigned) code + (unsigned) GET_MODE (x);
2503 fmt = GET_RTX_FORMAT (code);
2504 for (; i >= 0; i--)
2505 {
2506 switch (fmt[i])
2507 {
2508 case 'e':
2509 /* If we are about to do the last recursive call
2510 needed at this level, change it into iteration.
2511 This function is called enough to be worth it. */
2512 if (i == 0)
2513 {
2514 x = XEXP (x, i);
2515 goto repeat;
2516 }
2517
2518 hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2519 hash_arg_in_memory_p,
2520 have_reg_qty, cb);
2521 break;
2522
2523 case 'E':
2524 for (j = 0; j < XVECLEN (x, i); j++)
2525 hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2526 hash_arg_in_memory_p,
2527 have_reg_qty, cb);
2528 break;
2529
2530 case 's':
2531 hash += hash_rtx_string (XSTR (x, i));
2532 break;
2533
2534 case 'i':
2535 hash += (unsigned int) XINT (x, i);
2536 break;
2537
2538 case '0': case 't':
2539 /* Unused. */
2540 break;
2541
2542 default:
2543 gcc_unreachable ();
2544 }
2545 }
2546
2547 return hash;
2548 }
2549
2550 /* Hash an rtx. We are careful to make sure the value is never negative.
2551 Equivalent registers hash identically.
2552 MODE is used in hashing for CONST_INTs only;
2553 otherwise the mode of X is used.
2554
2555 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2556
2557 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2558 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2559
2560 Note that cse_insn knows that the hash code of a MEM expression
2561 is just (int) MEM plus the hash code of the address. */
2562
2563 unsigned
2564 hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2565 int *hash_arg_in_memory_p, bool have_reg_qty)
2566 {
2567 return hash_rtx_cb (x, mode, do_not_record_p,
2568 hash_arg_in_memory_p, have_reg_qty, NULL);
2569 }
2570
2571 /* Hash an rtx X for cse via hash_rtx.
2572 Stores 1 in do_not_record if any subexpression is volatile.
2573 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2574 does not have the RTX_UNCHANGING_P bit set. */
2575
2576 static inline unsigned
2577 canon_hash (rtx x, enum machine_mode mode)
2578 {
2579 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2580 }
2581
2582 /* Like canon_hash but with no side effects, i.e. do_not_record
2583 and hash_arg_in_memory are not changed. */
2584
2585 static inline unsigned
2586 safe_hash (rtx x, enum machine_mode mode)
2587 {
2588 int dummy_do_not_record;
2589 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2590 }
2591 \f
2592 /* Return 1 iff X and Y would canonicalize into the same thing,
2593 without actually constructing the canonicalization of either one.
2594 If VALIDATE is nonzero,
2595 we assume X is an expression being processed from the rtl
2596 and Y was found in the hash table. We check register refs
2597 in Y for being marked as valid.
2598
2599 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2600
2601 int
2602 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2603 {
2604 int i, j;
2605 enum rtx_code code;
2606 const char *fmt;
2607
2608 /* Note: it is incorrect to assume an expression is equivalent to itself
2609 if VALIDATE is nonzero. */
2610 if (x == y && !validate)
2611 return 1;
2612
2613 if (x == 0 || y == 0)
2614 return x == y;
2615
2616 code = GET_CODE (x);
2617 if (code != GET_CODE (y))
2618 return 0;
2619
2620 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2621 if (GET_MODE (x) != GET_MODE (y))
2622 return 0;
2623
2624 /* MEMs refering to different address space are not equivalent. */
2625 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2626 return 0;
2627
2628 switch (code)
2629 {
2630 case PC:
2631 case CC0:
2632 case CONST_INT:
2633 case CONST_DOUBLE:
2634 case CONST_FIXED:
2635 return x == y;
2636
2637 case LABEL_REF:
2638 return XEXP (x, 0) == XEXP (y, 0);
2639
2640 case SYMBOL_REF:
2641 return XSTR (x, 0) == XSTR (y, 0);
2642
2643 case REG:
2644 if (for_gcse)
2645 return REGNO (x) == REGNO (y);
2646 else
2647 {
2648 unsigned int regno = REGNO (y);
2649 unsigned int i;
2650 unsigned int endregno = END_REGNO (y);
2651
2652 /* If the quantities are not the same, the expressions are not
2653 equivalent. If there are and we are not to validate, they
2654 are equivalent. Otherwise, ensure all regs are up-to-date. */
2655
2656 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2657 return 0;
2658
2659 if (! validate)
2660 return 1;
2661
2662 for (i = regno; i < endregno; i++)
2663 if (REG_IN_TABLE (i) != REG_TICK (i))
2664 return 0;
2665
2666 return 1;
2667 }
2668
2669 case MEM:
2670 if (for_gcse)
2671 {
2672 /* A volatile mem should not be considered equivalent to any
2673 other. */
2674 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2675 return 0;
2676
2677 /* Can't merge two expressions in different alias sets, since we
2678 can decide that the expression is transparent in a block when
2679 it isn't, due to it being set with the different alias set.
2680
2681 Also, can't merge two expressions with different MEM_ATTRS.
2682 They could e.g. be two different entities allocated into the
2683 same space on the stack (see e.g. PR25130). In that case, the
2684 MEM addresses can be the same, even though the two MEMs are
2685 absolutely not equivalent.
2686
2687 But because really all MEM attributes should be the same for
2688 equivalent MEMs, we just use the invariant that MEMs that have
2689 the same attributes share the same mem_attrs data structure. */
2690 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2691 return 0;
2692 }
2693 break;
2694
2695 /* For commutative operations, check both orders. */
2696 case PLUS:
2697 case MULT:
2698 case AND:
2699 case IOR:
2700 case XOR:
2701 case NE:
2702 case EQ:
2703 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2704 validate, for_gcse)
2705 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2706 validate, for_gcse))
2707 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2708 validate, for_gcse)
2709 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2710 validate, for_gcse)));
2711
2712 case ASM_OPERANDS:
2713 /* We don't use the generic code below because we want to
2714 disregard filename and line numbers. */
2715
2716 /* A volatile asm isn't equivalent to any other. */
2717 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2718 return 0;
2719
2720 if (GET_MODE (x) != GET_MODE (y)
2721 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2722 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2723 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2724 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2725 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2726 return 0;
2727
2728 if (ASM_OPERANDS_INPUT_LENGTH (x))
2729 {
2730 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2731 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2732 ASM_OPERANDS_INPUT (y, i),
2733 validate, for_gcse)
2734 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2735 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2736 return 0;
2737 }
2738
2739 return 1;
2740
2741 default:
2742 break;
2743 }
2744
2745 /* Compare the elements. If any pair of corresponding elements
2746 fail to match, return 0 for the whole thing. */
2747
2748 fmt = GET_RTX_FORMAT (code);
2749 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2750 {
2751 switch (fmt[i])
2752 {
2753 case 'e':
2754 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2755 validate, for_gcse))
2756 return 0;
2757 break;
2758
2759 case 'E':
2760 if (XVECLEN (x, i) != XVECLEN (y, i))
2761 return 0;
2762 for (j = 0; j < XVECLEN (x, i); j++)
2763 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2764 validate, for_gcse))
2765 return 0;
2766 break;
2767
2768 case 's':
2769 if (strcmp (XSTR (x, i), XSTR (y, i)))
2770 return 0;
2771 break;
2772
2773 case 'i':
2774 if (XINT (x, i) != XINT (y, i))
2775 return 0;
2776 break;
2777
2778 case 'w':
2779 if (XWINT (x, i) != XWINT (y, i))
2780 return 0;
2781 break;
2782
2783 case '0':
2784 case 't':
2785 break;
2786
2787 default:
2788 gcc_unreachable ();
2789 }
2790 }
2791
2792 return 1;
2793 }
2794 \f
2795 /* Return 1 if X has a value that can vary even between two
2796 executions of the program. 0 means X can be compared reliably
2797 against certain constants or near-constants. */
2798
2799 static bool
2800 cse_rtx_varies_p (const_rtx x, bool from_alias)
2801 {
2802 /* We need not check for X and the equivalence class being of the same
2803 mode because if X is equivalent to a constant in some mode, it
2804 doesn't vary in any mode. */
2805
2806 if (REG_P (x)
2807 && REGNO_QTY_VALID_P (REGNO (x)))
2808 {
2809 int x_q = REG_QTY (REGNO (x));
2810 struct qty_table_elem *x_ent = &qty_table[x_q];
2811
2812 if (GET_MODE (x) == x_ent->mode
2813 && x_ent->const_rtx != NULL_RTX)
2814 return 0;
2815 }
2816
2817 if (GET_CODE (x) == PLUS
2818 && CONST_INT_P (XEXP (x, 1))
2819 && REG_P (XEXP (x, 0))
2820 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2821 {
2822 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2823 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2824
2825 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2826 && x0_ent->const_rtx != NULL_RTX)
2827 return 0;
2828 }
2829
2830 /* This can happen as the result of virtual register instantiation, if
2831 the initial constant is too large to be a valid address. This gives
2832 us a three instruction sequence, load large offset into a register,
2833 load fp minus a constant into a register, then a MEM which is the
2834 sum of the two `constant' registers. */
2835 if (GET_CODE (x) == PLUS
2836 && REG_P (XEXP (x, 0))
2837 && REG_P (XEXP (x, 1))
2838 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2839 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2840 {
2841 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2842 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2843 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2844 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2845
2846 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2847 && x0_ent->const_rtx != NULL_RTX
2848 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2849 && x1_ent->const_rtx != NULL_RTX)
2850 return 0;
2851 }
2852
2853 return rtx_varies_p (x, from_alias);
2854 }
2855 \f
2856 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2857 the result if necessary. INSN is as for canon_reg. */
2858
2859 static void
2860 validate_canon_reg (rtx *xloc, rtx insn)
2861 {
2862 if (*xloc)
2863 {
2864 rtx new_rtx = canon_reg (*xloc, insn);
2865
2866 /* If replacing pseudo with hard reg or vice versa, ensure the
2867 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2868 gcc_assert (insn && new_rtx);
2869 validate_change (insn, xloc, new_rtx, 1);
2870 }
2871 }
2872
2873 /* Canonicalize an expression:
2874 replace each register reference inside it
2875 with the "oldest" equivalent register.
2876
2877 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2878 after we make our substitution. The calls are made with IN_GROUP nonzero
2879 so apply_change_group must be called upon the outermost return from this
2880 function (unless INSN is zero). The result of apply_change_group can
2881 generally be discarded since the changes we are making are optional. */
2882
2883 static rtx
2884 canon_reg (rtx x, rtx insn)
2885 {
2886 int i;
2887 enum rtx_code code;
2888 const char *fmt;
2889
2890 if (x == 0)
2891 return x;
2892
2893 code = GET_CODE (x);
2894 switch (code)
2895 {
2896 case PC:
2897 case CC0:
2898 case CONST:
2899 case CONST_INT:
2900 case CONST_DOUBLE:
2901 case CONST_FIXED:
2902 case CONST_VECTOR:
2903 case SYMBOL_REF:
2904 case LABEL_REF:
2905 case ADDR_VEC:
2906 case ADDR_DIFF_VEC:
2907 return x;
2908
2909 case REG:
2910 {
2911 int first;
2912 int q;
2913 struct qty_table_elem *ent;
2914
2915 /* Never replace a hard reg, because hard regs can appear
2916 in more than one machine mode, and we must preserve the mode
2917 of each occurrence. Also, some hard regs appear in
2918 MEMs that are shared and mustn't be altered. Don't try to
2919 replace any reg that maps to a reg of class NO_REGS. */
2920 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2921 || ! REGNO_QTY_VALID_P (REGNO (x)))
2922 return x;
2923
2924 q = REG_QTY (REGNO (x));
2925 ent = &qty_table[q];
2926 first = ent->first_reg;
2927 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2928 : REGNO_REG_CLASS (first) == NO_REGS ? x
2929 : gen_rtx_REG (ent->mode, first));
2930 }
2931
2932 default:
2933 break;
2934 }
2935
2936 fmt = GET_RTX_FORMAT (code);
2937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2938 {
2939 int j;
2940
2941 if (fmt[i] == 'e')
2942 validate_canon_reg (&XEXP (x, i), insn);
2943 else if (fmt[i] == 'E')
2944 for (j = 0; j < XVECLEN (x, i); j++)
2945 validate_canon_reg (&XVECEXP (x, i, j), insn);
2946 }
2947
2948 return x;
2949 }
2950 \f
2951 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2952 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2953 what values are being compared.
2954
2955 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2956 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2957 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2958 compared to produce cc0.
2959
2960 The return value is the comparison operator and is either the code of
2961 A or the code corresponding to the inverse of the comparison. */
2962
2963 static enum rtx_code
2964 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2965 enum machine_mode *pmode1, enum machine_mode *pmode2)
2966 {
2967 rtx arg1, arg2;
2968
2969 arg1 = *parg1, arg2 = *parg2;
2970
2971 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2972
2973 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2974 {
2975 /* Set nonzero when we find something of interest. */
2976 rtx x = 0;
2977 int reverse_code = 0;
2978 struct table_elt *p = 0;
2979
2980 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2981 On machines with CC0, this is the only case that can occur, since
2982 fold_rtx will return the COMPARE or item being compared with zero
2983 when given CC0. */
2984
2985 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2986 x = arg1;
2987
2988 /* If ARG1 is a comparison operator and CODE is testing for
2989 STORE_FLAG_VALUE, get the inner arguments. */
2990
2991 else if (COMPARISON_P (arg1))
2992 {
2993 #ifdef FLOAT_STORE_FLAG_VALUE
2994 REAL_VALUE_TYPE fsfv;
2995 #endif
2996
2997 if (code == NE
2998 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2999 && code == LT && STORE_FLAG_VALUE == -1)
3000 #ifdef FLOAT_STORE_FLAG_VALUE
3001 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3002 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3003 REAL_VALUE_NEGATIVE (fsfv)))
3004 #endif
3005 )
3006 x = arg1;
3007 else if (code == EQ
3008 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3009 && code == GE && STORE_FLAG_VALUE == -1)
3010 #ifdef FLOAT_STORE_FLAG_VALUE
3011 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3012 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3013 REAL_VALUE_NEGATIVE (fsfv)))
3014 #endif
3015 )
3016 x = arg1, reverse_code = 1;
3017 }
3018
3019 /* ??? We could also check for
3020
3021 (ne (and (eq (...) (const_int 1))) (const_int 0))
3022
3023 and related forms, but let's wait until we see them occurring. */
3024
3025 if (x == 0)
3026 /* Look up ARG1 in the hash table and see if it has an equivalence
3027 that lets us see what is being compared. */
3028 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3029 if (p)
3030 {
3031 p = p->first_same_value;
3032
3033 /* If what we compare is already known to be constant, that is as
3034 good as it gets.
3035 We need to break the loop in this case, because otherwise we
3036 can have an infinite loop when looking at a reg that is known
3037 to be a constant which is the same as a comparison of a reg
3038 against zero which appears later in the insn stream, which in
3039 turn is constant and the same as the comparison of the first reg
3040 against zero... */
3041 if (p->is_const)
3042 break;
3043 }
3044
3045 for (; p; p = p->next_same_value)
3046 {
3047 enum machine_mode inner_mode = GET_MODE (p->exp);
3048 #ifdef FLOAT_STORE_FLAG_VALUE
3049 REAL_VALUE_TYPE fsfv;
3050 #endif
3051
3052 /* If the entry isn't valid, skip it. */
3053 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3054 continue;
3055
3056 if (GET_CODE (p->exp) == COMPARE
3057 /* Another possibility is that this machine has a compare insn
3058 that includes the comparison code. In that case, ARG1 would
3059 be equivalent to a comparison operation that would set ARG1 to
3060 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3061 ORIG_CODE is the actual comparison being done; if it is an EQ,
3062 we must reverse ORIG_CODE. On machine with a negative value
3063 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3064 || ((code == NE
3065 || (code == LT
3066 && val_signbit_known_set_p (inner_mode,
3067 STORE_FLAG_VALUE))
3068 #ifdef FLOAT_STORE_FLAG_VALUE
3069 || (code == LT
3070 && SCALAR_FLOAT_MODE_P (inner_mode)
3071 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3072 REAL_VALUE_NEGATIVE (fsfv)))
3073 #endif
3074 )
3075 && COMPARISON_P (p->exp)))
3076 {
3077 x = p->exp;
3078 break;
3079 }
3080 else if ((code == EQ
3081 || (code == GE
3082 && val_signbit_known_set_p (inner_mode,
3083 STORE_FLAG_VALUE))
3084 #ifdef FLOAT_STORE_FLAG_VALUE
3085 || (code == GE
3086 && SCALAR_FLOAT_MODE_P (inner_mode)
3087 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3088 REAL_VALUE_NEGATIVE (fsfv)))
3089 #endif
3090 )
3091 && COMPARISON_P (p->exp))
3092 {
3093 reverse_code = 1;
3094 x = p->exp;
3095 break;
3096 }
3097
3098 /* If this non-trapping address, e.g. fp + constant, the
3099 equivalent is a better operand since it may let us predict
3100 the value of the comparison. */
3101 else if (!rtx_addr_can_trap_p (p->exp))
3102 {
3103 arg1 = p->exp;
3104 continue;
3105 }
3106 }
3107
3108 /* If we didn't find a useful equivalence for ARG1, we are done.
3109 Otherwise, set up for the next iteration. */
3110 if (x == 0)
3111 break;
3112
3113 /* If we need to reverse the comparison, make sure that that is
3114 possible -- we can't necessarily infer the value of GE from LT
3115 with floating-point operands. */
3116 if (reverse_code)
3117 {
3118 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3119 if (reversed == UNKNOWN)
3120 break;
3121 else
3122 code = reversed;
3123 }
3124 else if (COMPARISON_P (x))
3125 code = GET_CODE (x);
3126 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3127 }
3128
3129 /* Return our results. Return the modes from before fold_rtx
3130 because fold_rtx might produce const_int, and then it's too late. */
3131 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3132 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3133
3134 return code;
3135 }
3136 \f
3137 /* If X is a nontrivial arithmetic operation on an argument for which
3138 a constant value can be determined, return the result of operating
3139 on that value, as a constant. Otherwise, return X, possibly with
3140 one or more operands changed to a forward-propagated constant.
3141
3142 If X is a register whose contents are known, we do NOT return
3143 those contents here; equiv_constant is called to perform that task.
3144 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3145
3146 INSN is the insn that we may be modifying. If it is 0, make a copy
3147 of X before modifying it. */
3148
3149 static rtx
3150 fold_rtx (rtx x, rtx insn)
3151 {
3152 enum rtx_code code;
3153 enum machine_mode mode;
3154 const char *fmt;
3155 int i;
3156 rtx new_rtx = 0;
3157 int changed = 0;
3158
3159 /* Operands of X. */
3160 rtx folded_arg0;
3161 rtx folded_arg1;
3162
3163 /* Constant equivalents of first three operands of X;
3164 0 when no such equivalent is known. */
3165 rtx const_arg0;
3166 rtx const_arg1;
3167 rtx const_arg2;
3168
3169 /* The mode of the first operand of X. We need this for sign and zero
3170 extends. */
3171 enum machine_mode mode_arg0;
3172
3173 if (x == 0)
3174 return x;
3175
3176 /* Try to perform some initial simplifications on X. */
3177 code = GET_CODE (x);
3178 switch (code)
3179 {
3180 case MEM:
3181 case SUBREG:
3182 if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3183 return new_rtx;
3184 return x;
3185
3186 case CONST:
3187 case CONST_INT:
3188 case CONST_DOUBLE:
3189 case CONST_FIXED:
3190 case CONST_VECTOR:
3191 case SYMBOL_REF:
3192 case LABEL_REF:
3193 case REG:
3194 case PC:
3195 /* No use simplifying an EXPR_LIST
3196 since they are used only for lists of args
3197 in a function call's REG_EQUAL note. */
3198 case EXPR_LIST:
3199 return x;
3200
3201 #ifdef HAVE_cc0
3202 case CC0:
3203 return prev_insn_cc0;
3204 #endif
3205
3206 case ASM_OPERANDS:
3207 if (insn)
3208 {
3209 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3210 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3211 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3212 }
3213 return x;
3214
3215 #ifdef NO_FUNCTION_CSE
3216 case CALL:
3217 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3218 return x;
3219 break;
3220 #endif
3221
3222 /* Anything else goes through the loop below. */
3223 default:
3224 break;
3225 }
3226
3227 mode = GET_MODE (x);
3228 const_arg0 = 0;
3229 const_arg1 = 0;
3230 const_arg2 = 0;
3231 mode_arg0 = VOIDmode;
3232
3233 /* Try folding our operands.
3234 Then see which ones have constant values known. */
3235
3236 fmt = GET_RTX_FORMAT (code);
3237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3238 if (fmt[i] == 'e')
3239 {
3240 rtx folded_arg = XEXP (x, i), const_arg;
3241 enum machine_mode mode_arg = GET_MODE (folded_arg);
3242
3243 switch (GET_CODE (folded_arg))
3244 {
3245 case MEM:
3246 case REG:
3247 case SUBREG:
3248 const_arg = equiv_constant (folded_arg);
3249 break;
3250
3251 case CONST:
3252 case CONST_INT:
3253 case SYMBOL_REF:
3254 case LABEL_REF:
3255 case CONST_DOUBLE:
3256 case CONST_FIXED:
3257 case CONST_VECTOR:
3258 const_arg = folded_arg;
3259 break;
3260
3261 #ifdef HAVE_cc0
3262 case CC0:
3263 folded_arg = prev_insn_cc0;
3264 mode_arg = prev_insn_cc0_mode;
3265 const_arg = equiv_constant (folded_arg);
3266 break;
3267 #endif
3268
3269 default:
3270 folded_arg = fold_rtx (folded_arg, insn);
3271 const_arg = equiv_constant (folded_arg);
3272 break;
3273 }
3274
3275 /* For the first three operands, see if the operand
3276 is constant or equivalent to a constant. */
3277 switch (i)
3278 {
3279 case 0:
3280 folded_arg0 = folded_arg;
3281 const_arg0 = const_arg;
3282 mode_arg0 = mode_arg;
3283 break;
3284 case 1:
3285 folded_arg1 = folded_arg;
3286 const_arg1 = const_arg;
3287 break;
3288 case 2:
3289 const_arg2 = const_arg;
3290 break;
3291 }
3292
3293 /* Pick the least expensive of the argument and an equivalent constant
3294 argument. */
3295 if (const_arg != 0
3296 && const_arg != folded_arg
3297 && COST_IN (const_arg, code) <= COST_IN (folded_arg, code)
3298
3299 /* It's not safe to substitute the operand of a conversion
3300 operator with a constant, as the conversion's identity
3301 depends upon the mode of its operand. This optimization
3302 is handled by the call to simplify_unary_operation. */
3303 && (GET_RTX_CLASS (code) != RTX_UNARY
3304 || GET_MODE (const_arg) == mode_arg0
3305 || (code != ZERO_EXTEND
3306 && code != SIGN_EXTEND
3307 && code != TRUNCATE
3308 && code != FLOAT_TRUNCATE
3309 && code != FLOAT_EXTEND
3310 && code != FLOAT
3311 && code != FIX
3312 && code != UNSIGNED_FLOAT
3313 && code != UNSIGNED_FIX)))
3314 folded_arg = const_arg;
3315
3316 if (folded_arg == XEXP (x, i))
3317 continue;
3318
3319 if (insn == NULL_RTX && !changed)
3320 x = copy_rtx (x);
3321 changed = 1;
3322 validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3323 }
3324
3325 if (changed)
3326 {
3327 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3328 consistent with the order in X. */
3329 if (canonicalize_change_group (insn, x))
3330 {
3331 rtx tem;
3332 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3333 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3334 }
3335
3336 apply_change_group ();
3337 }
3338
3339 /* If X is an arithmetic operation, see if we can simplify it. */
3340
3341 switch (GET_RTX_CLASS (code))
3342 {
3343 case RTX_UNARY:
3344 {
3345 /* We can't simplify extension ops unless we know the
3346 original mode. */
3347 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3348 && mode_arg0 == VOIDmode)
3349 break;
3350
3351 new_rtx = simplify_unary_operation (code, mode,
3352 const_arg0 ? const_arg0 : folded_arg0,
3353 mode_arg0);
3354 }
3355 break;
3356
3357 case RTX_COMPARE:
3358 case RTX_COMM_COMPARE:
3359 /* See what items are actually being compared and set FOLDED_ARG[01]
3360 to those values and CODE to the actual comparison code. If any are
3361 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3362 do anything if both operands are already known to be constant. */
3363
3364 /* ??? Vector mode comparisons are not supported yet. */
3365 if (VECTOR_MODE_P (mode))
3366 break;
3367
3368 if (const_arg0 == 0 || const_arg1 == 0)
3369 {
3370 struct table_elt *p0, *p1;
3371 rtx true_rtx, false_rtx;
3372 enum machine_mode mode_arg1;
3373
3374 if (SCALAR_FLOAT_MODE_P (mode))
3375 {
3376 #ifdef FLOAT_STORE_FLAG_VALUE
3377 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3378 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3379 #else
3380 true_rtx = NULL_RTX;
3381 #endif
3382 false_rtx = CONST0_RTX (mode);
3383 }
3384 else
3385 {
3386 true_rtx = const_true_rtx;
3387 false_rtx = const0_rtx;
3388 }
3389
3390 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3391 &mode_arg0, &mode_arg1);
3392
3393 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3394 what kinds of things are being compared, so we can't do
3395 anything with this comparison. */
3396
3397 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3398 break;
3399
3400 const_arg0 = equiv_constant (folded_arg0);
3401 const_arg1 = equiv_constant (folded_arg1);
3402
3403 /* If we do not now have two constants being compared, see
3404 if we can nevertheless deduce some things about the
3405 comparison. */
3406 if (const_arg0 == 0 || const_arg1 == 0)
3407 {
3408 if (const_arg1 != NULL)
3409 {
3410 rtx cheapest_simplification;
3411 int cheapest_cost;
3412 rtx simp_result;
3413 struct table_elt *p;
3414
3415 /* See if we can find an equivalent of folded_arg0
3416 that gets us a cheaper expression, possibly a
3417 constant through simplifications. */
3418 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3419 mode_arg0);
3420
3421 if (p != NULL)
3422 {
3423 cheapest_simplification = x;
3424 cheapest_cost = COST (x);
3425
3426 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3427 {
3428 int cost;
3429
3430 /* If the entry isn't valid, skip it. */
3431 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3432 continue;
3433
3434 /* Try to simplify using this equivalence. */
3435 simp_result
3436 = simplify_relational_operation (code, mode,
3437 mode_arg0,
3438 p->exp,
3439 const_arg1);
3440
3441 if (simp_result == NULL)
3442 continue;
3443
3444 cost = COST (simp_result);
3445 if (cost < cheapest_cost)
3446 {
3447 cheapest_cost = cost;
3448 cheapest_simplification = simp_result;
3449 }
3450 }
3451
3452 /* If we have a cheaper expression now, use that
3453 and try folding it further, from the top. */
3454 if (cheapest_simplification != x)
3455 return fold_rtx (copy_rtx (cheapest_simplification),
3456 insn);
3457 }
3458 }
3459
3460 /* See if the two operands are the same. */
3461
3462 if ((REG_P (folded_arg0)
3463 && REG_P (folded_arg1)
3464 && (REG_QTY (REGNO (folded_arg0))
3465 == REG_QTY (REGNO (folded_arg1))))
3466 || ((p0 = lookup (folded_arg0,
3467 SAFE_HASH (folded_arg0, mode_arg0),
3468 mode_arg0))
3469 && (p1 = lookup (folded_arg1,
3470 SAFE_HASH (folded_arg1, mode_arg0),
3471 mode_arg0))
3472 && p0->first_same_value == p1->first_same_value))
3473 folded_arg1 = folded_arg0;
3474
3475 /* If FOLDED_ARG0 is a register, see if the comparison we are
3476 doing now is either the same as we did before or the reverse
3477 (we only check the reverse if not floating-point). */
3478 else if (REG_P (folded_arg0))
3479 {
3480 int qty = REG_QTY (REGNO (folded_arg0));
3481
3482 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3483 {
3484 struct qty_table_elem *ent = &qty_table[qty];
3485
3486 if ((comparison_dominates_p (ent->comparison_code, code)
3487 || (! FLOAT_MODE_P (mode_arg0)
3488 && comparison_dominates_p (ent->comparison_code,
3489 reverse_condition (code))))
3490 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3491 || (const_arg1
3492 && rtx_equal_p (ent->comparison_const,
3493 const_arg1))
3494 || (REG_P (folded_arg1)
3495 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3496 {
3497 if (comparison_dominates_p (ent->comparison_code, code))
3498 {
3499 if (true_rtx)
3500 return true_rtx;
3501 else
3502 break;
3503 }
3504 else
3505 return false_rtx;
3506 }
3507 }
3508 }
3509 }
3510 }
3511
3512 /* If we are comparing against zero, see if the first operand is
3513 equivalent to an IOR with a constant. If so, we may be able to
3514 determine the result of this comparison. */
3515 if (const_arg1 == const0_rtx && !const_arg0)
3516 {
3517 rtx y = lookup_as_function (folded_arg0, IOR);
3518 rtx inner_const;
3519
3520 if (y != 0
3521 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3522 && CONST_INT_P (inner_const)
3523 && INTVAL (inner_const) != 0)
3524 folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3525 }
3526
3527 {
3528 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3529 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3530 new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3531 }
3532 break;
3533
3534 case RTX_BIN_ARITH:
3535 case RTX_COMM_ARITH:
3536 switch (code)
3537 {
3538 case PLUS:
3539 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3540 with that LABEL_REF as its second operand. If so, the result is
3541 the first operand of that MINUS. This handles switches with an
3542 ADDR_DIFF_VEC table. */
3543 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3544 {
3545 rtx y
3546 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3547 : lookup_as_function (folded_arg0, MINUS);
3548
3549 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3550 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3551 return XEXP (y, 0);
3552
3553 /* Now try for a CONST of a MINUS like the above. */
3554 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3555 : lookup_as_function (folded_arg0, CONST))) != 0
3556 && GET_CODE (XEXP (y, 0)) == MINUS
3557 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3558 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3559 return XEXP (XEXP (y, 0), 0);
3560 }
3561
3562 /* Likewise if the operands are in the other order. */
3563 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3564 {
3565 rtx y
3566 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3567 : lookup_as_function (folded_arg1, MINUS);
3568
3569 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3570 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3571 return XEXP (y, 0);
3572
3573 /* Now try for a CONST of a MINUS like the above. */
3574 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3575 : lookup_as_function (folded_arg1, CONST))) != 0
3576 && GET_CODE (XEXP (y, 0)) == MINUS
3577 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3578 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3579 return XEXP (XEXP (y, 0), 0);
3580 }
3581
3582 /* If second operand is a register equivalent to a negative
3583 CONST_INT, see if we can find a register equivalent to the
3584 positive constant. Make a MINUS if so. Don't do this for
3585 a non-negative constant since we might then alternate between
3586 choosing positive and negative constants. Having the positive
3587 constant previously-used is the more common case. Be sure
3588 the resulting constant is non-negative; if const_arg1 were
3589 the smallest negative number this would overflow: depending
3590 on the mode, this would either just be the same value (and
3591 hence not save anything) or be incorrect. */
3592 if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3593 && INTVAL (const_arg1) < 0
3594 /* This used to test
3595
3596 -INTVAL (const_arg1) >= 0
3597
3598 But The Sun V5.0 compilers mis-compiled that test. So
3599 instead we test for the problematic value in a more direct
3600 manner and hope the Sun compilers get it correct. */
3601 && INTVAL (const_arg1) !=
3602 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3603 && REG_P (folded_arg1))
3604 {
3605 rtx new_const = GEN_INT (-INTVAL (const_arg1));
3606 struct table_elt *p
3607 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3608
3609 if (p)
3610 for (p = p->first_same_value; p; p = p->next_same_value)
3611 if (REG_P (p->exp))
3612 return simplify_gen_binary (MINUS, mode, folded_arg0,
3613 canon_reg (p->exp, NULL_RTX));
3614 }
3615 goto from_plus;
3616
3617 case MINUS:
3618 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3619 If so, produce (PLUS Z C2-C). */
3620 if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3621 {
3622 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3623 if (y && CONST_INT_P (XEXP (y, 1)))
3624 return fold_rtx (plus_constant (copy_rtx (y),
3625 -INTVAL (const_arg1)),
3626 NULL_RTX);
3627 }
3628
3629 /* Fall through. */
3630
3631 from_plus:
3632 case SMIN: case SMAX: case UMIN: case UMAX:
3633 case IOR: case AND: case XOR:
3634 case MULT:
3635 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3636 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3637 is known to be of similar form, we may be able to replace the
3638 operation with a combined operation. This may eliminate the
3639 intermediate operation if every use is simplified in this way.
3640 Note that the similar optimization done by combine.c only works
3641 if the intermediate operation's result has only one reference. */
3642
3643 if (REG_P (folded_arg0)
3644 && const_arg1 && CONST_INT_P (const_arg1))
3645 {
3646 int is_shift
3647 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3648 rtx y, inner_const, new_const;
3649 rtx canon_const_arg1 = const_arg1;
3650 enum rtx_code associate_code;
3651
3652 if (is_shift
3653 && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
3654 || INTVAL (const_arg1) < 0))
3655 {
3656 if (SHIFT_COUNT_TRUNCATED)
3657 canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3658 & (GET_MODE_BITSIZE (mode)
3659 - 1));
3660 else
3661 break;
3662 }
3663
3664 y = lookup_as_function (folded_arg0, code);
3665 if (y == 0)
3666 break;
3667
3668 /* If we have compiled a statement like
3669 "if (x == (x & mask1))", and now are looking at
3670 "x & mask2", we will have a case where the first operand
3671 of Y is the same as our first operand. Unless we detect
3672 this case, an infinite loop will result. */
3673 if (XEXP (y, 0) == folded_arg0)
3674 break;
3675
3676 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3677 if (!inner_const || !CONST_INT_P (inner_const))
3678 break;
3679
3680 /* Don't associate these operations if they are a PLUS with the
3681 same constant and it is a power of two. These might be doable
3682 with a pre- or post-increment. Similarly for two subtracts of
3683 identical powers of two with post decrement. */
3684
3685 if (code == PLUS && const_arg1 == inner_const
3686 && ((HAVE_PRE_INCREMENT
3687 && exact_log2 (INTVAL (const_arg1)) >= 0)
3688 || (HAVE_POST_INCREMENT
3689 && exact_log2 (INTVAL (const_arg1)) >= 0)
3690 || (HAVE_PRE_DECREMENT
3691 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3692 || (HAVE_POST_DECREMENT
3693 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3694 break;
3695
3696 /* ??? Vector mode shifts by scalar
3697 shift operand are not supported yet. */
3698 if (is_shift && VECTOR_MODE_P (mode))
3699 break;
3700
3701 if (is_shift
3702 && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
3703 || INTVAL (inner_const) < 0))
3704 {
3705 if (SHIFT_COUNT_TRUNCATED)
3706 inner_const = GEN_INT (INTVAL (inner_const)
3707 & (GET_MODE_BITSIZE (mode) - 1));
3708 else
3709 break;
3710 }
3711
3712 /* Compute the code used to compose the constants. For example,
3713 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3714
3715 associate_code = (is_shift || code == MINUS ? PLUS : code);
3716
3717 new_const = simplify_binary_operation (associate_code, mode,
3718 canon_const_arg1,
3719 inner_const);
3720
3721 if (new_const == 0)
3722 break;
3723
3724 /* If we are associating shift operations, don't let this
3725 produce a shift of the size of the object or larger.
3726 This could occur when we follow a sign-extend by a right
3727 shift on a machine that does a sign-extend as a pair
3728 of shifts. */
3729
3730 if (is_shift
3731 && CONST_INT_P (new_const)
3732 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3733 {
3734 /* As an exception, we can turn an ASHIFTRT of this
3735 form into a shift of the number of bits - 1. */
3736 if (code == ASHIFTRT)
3737 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3738 else if (!side_effects_p (XEXP (y, 0)))
3739 return CONST0_RTX (mode);
3740 else
3741 break;
3742 }
3743
3744 y = copy_rtx (XEXP (y, 0));
3745
3746 /* If Y contains our first operand (the most common way this
3747 can happen is if Y is a MEM), we would do into an infinite
3748 loop if we tried to fold it. So don't in that case. */
3749
3750 if (! reg_mentioned_p (folded_arg0, y))
3751 y = fold_rtx (y, insn);
3752
3753 return simplify_gen_binary (code, mode, y, new_const);
3754 }
3755 break;
3756
3757 case DIV: case UDIV:
3758 /* ??? The associative optimization performed immediately above is
3759 also possible for DIV and UDIV using associate_code of MULT.
3760 However, we would need extra code to verify that the
3761 multiplication does not overflow, that is, there is no overflow
3762 in the calculation of new_const. */
3763 break;
3764
3765 default:
3766 break;
3767 }
3768
3769 new_rtx = simplify_binary_operation (code, mode,
3770 const_arg0 ? const_arg0 : folded_arg0,
3771 const_arg1 ? const_arg1 : folded_arg1);
3772 break;
3773
3774 case RTX_OBJ:
3775 /* (lo_sum (high X) X) is simply X. */
3776 if (code == LO_SUM && const_arg0 != 0
3777 && GET_CODE (const_arg0) == HIGH
3778 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3779 return const_arg1;
3780 break;
3781
3782 case RTX_TERNARY:
3783 case RTX_BITFIELD_OPS:
3784 new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3785 const_arg0 ? const_arg0 : folded_arg0,
3786 const_arg1 ? const_arg1 : folded_arg1,
3787 const_arg2 ? const_arg2 : XEXP (x, 2));
3788 break;
3789
3790 default:
3791 break;
3792 }
3793
3794 return new_rtx ? new_rtx : x;
3795 }
3796 \f
3797 /* Return a constant value currently equivalent to X.
3798 Return 0 if we don't know one. */
3799
3800 static rtx
3801 equiv_constant (rtx x)
3802 {
3803 if (REG_P (x)
3804 && REGNO_QTY_VALID_P (REGNO (x)))
3805 {
3806 int x_q = REG_QTY (REGNO (x));
3807 struct qty_table_elem *x_ent = &qty_table[x_q];
3808
3809 if (x_ent->const_rtx)
3810 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3811 }
3812
3813 if (x == 0 || CONSTANT_P (x))
3814 return x;
3815
3816 if (GET_CODE (x) == SUBREG)
3817 {
3818 enum machine_mode mode = GET_MODE (x);
3819 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3820 rtx new_rtx;
3821
3822 /* See if we previously assigned a constant value to this SUBREG. */
3823 if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3824 || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3825 || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3826 return new_rtx;
3827
3828 /* If we didn't and if doing so makes sense, see if we previously
3829 assigned a constant value to the enclosing word mode SUBREG. */
3830 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3831 && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3832 {
3833 int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3834 if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3835 {
3836 rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3837 new_rtx = lookup_as_function (y, CONST_INT);
3838 if (new_rtx)
3839 return gen_lowpart (mode, new_rtx);
3840 }
3841 }
3842
3843 /* Otherwise see if we already have a constant for the inner REG. */
3844 if (REG_P (SUBREG_REG (x))
3845 && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3846 return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3847
3848 return 0;
3849 }
3850
3851 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3852 the hash table in case its value was seen before. */
3853
3854 if (MEM_P (x))
3855 {
3856 struct table_elt *elt;
3857
3858 x = avoid_constant_pool_reference (x);
3859 if (CONSTANT_P (x))
3860 return x;
3861
3862 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3863 if (elt == 0)
3864 return 0;
3865
3866 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3867 if (elt->is_const && CONSTANT_P (elt->exp))
3868 return elt->exp;
3869 }
3870
3871 return 0;
3872 }
3873 \f
3874 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3875 "taken" branch.
3876
3877 In certain cases, this can cause us to add an equivalence. For example,
3878 if we are following the taken case of
3879 if (i == 2)
3880 we can add the fact that `i' and '2' are now equivalent.
3881
3882 In any case, we can record that this comparison was passed. If the same
3883 comparison is seen later, we will know its value. */
3884
3885 static void
3886 record_jump_equiv (rtx insn, bool taken)
3887 {
3888 int cond_known_true;
3889 rtx op0, op1;
3890 rtx set;
3891 enum machine_mode mode, mode0, mode1;
3892 int reversed_nonequality = 0;
3893 enum rtx_code code;
3894
3895 /* Ensure this is the right kind of insn. */
3896 gcc_assert (any_condjump_p (insn));
3897
3898 set = pc_set (insn);
3899
3900 /* See if this jump condition is known true or false. */
3901 if (taken)
3902 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3903 else
3904 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3905
3906 /* Get the type of comparison being done and the operands being compared.
3907 If we had to reverse a non-equality condition, record that fact so we
3908 know that it isn't valid for floating-point. */
3909 code = GET_CODE (XEXP (SET_SRC (set), 0));
3910 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3911 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3912
3913 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3914 if (! cond_known_true)
3915 {
3916 code = reversed_comparison_code_parts (code, op0, op1, insn);
3917
3918 /* Don't remember if we can't find the inverse. */
3919 if (code == UNKNOWN)
3920 return;
3921 }
3922
3923 /* The mode is the mode of the non-constant. */
3924 mode = mode0;
3925 if (mode1 != VOIDmode)
3926 mode = mode1;
3927
3928 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3929 }
3930
3931 /* Yet another form of subreg creation. In this case, we want something in
3932 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3933
3934 static rtx
3935 record_jump_cond_subreg (enum machine_mode mode, rtx op)
3936 {
3937 enum machine_mode op_mode = GET_MODE (op);
3938 if (op_mode == mode || op_mode == VOIDmode)
3939 return op;
3940 return lowpart_subreg (mode, op, op_mode);
3941 }
3942
3943 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3944 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3945 Make any useful entries we can with that information. Called from
3946 above function and called recursively. */
3947
3948 static void
3949 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3950 rtx op1, int reversed_nonequality)
3951 {
3952 unsigned op0_hash, op1_hash;
3953 int op0_in_memory, op1_in_memory;
3954 struct table_elt *op0_elt, *op1_elt;
3955
3956 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3957 we know that they are also equal in the smaller mode (this is also
3958 true for all smaller modes whether or not there is a SUBREG, but
3959 is not worth testing for with no SUBREG). */
3960
3961 /* Note that GET_MODE (op0) may not equal MODE. */
3962 if (code == EQ && GET_CODE (op0) == SUBREG
3963 && (GET_MODE_SIZE (GET_MODE (op0))
3964 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3965 {
3966 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3967 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3968 if (tem)
3969 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3970 reversed_nonequality);
3971 }
3972
3973 if (code == EQ && GET_CODE (op1) == SUBREG
3974 && (GET_MODE_SIZE (GET_MODE (op1))
3975 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3976 {
3977 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3978 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3979 if (tem)
3980 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3981 reversed_nonequality);
3982 }
3983
3984 /* Similarly, if this is an NE comparison, and either is a SUBREG
3985 making a smaller mode, we know the whole thing is also NE. */
3986
3987 /* Note that GET_MODE (op0) may not equal MODE;
3988 if we test MODE instead, we can get an infinite recursion
3989 alternating between two modes each wider than MODE. */
3990
3991 if (code == NE && GET_CODE (op0) == SUBREG
3992 && subreg_lowpart_p (op0)
3993 && (GET_MODE_SIZE (GET_MODE (op0))
3994 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3995 {
3996 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3997 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3998 if (tem)
3999 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4000 reversed_nonequality);
4001 }
4002
4003 if (code == NE && GET_CODE (op1) == SUBREG
4004 && subreg_lowpart_p (op1)
4005 && (GET_MODE_SIZE (GET_MODE (op1))
4006 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4007 {
4008 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4009 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4010 if (tem)
4011 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4012 reversed_nonequality);
4013 }
4014
4015 /* Hash both operands. */
4016
4017 do_not_record = 0;
4018 hash_arg_in_memory = 0;
4019 op0_hash = HASH (op0, mode);
4020 op0_in_memory = hash_arg_in_memory;
4021
4022 if (do_not_record)
4023 return;
4024
4025 do_not_record = 0;
4026 hash_arg_in_memory = 0;
4027 op1_hash = HASH (op1, mode);
4028 op1_in_memory = hash_arg_in_memory;
4029
4030 if (do_not_record)
4031 return;
4032
4033 /* Look up both operands. */
4034 op0_elt = lookup (op0, op0_hash, mode);
4035 op1_elt = lookup (op1, op1_hash, mode);
4036
4037 /* If both operands are already equivalent or if they are not in the
4038 table but are identical, do nothing. */
4039 if ((op0_elt != 0 && op1_elt != 0
4040 && op0_elt->first_same_value == op1_elt->first_same_value)
4041 || op0 == op1 || rtx_equal_p (op0, op1))
4042 return;
4043
4044 /* If we aren't setting two things equal all we can do is save this
4045 comparison. Similarly if this is floating-point. In the latter
4046 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4047 If we record the equality, we might inadvertently delete code
4048 whose intent was to change -0 to +0. */
4049
4050 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4051 {
4052 struct qty_table_elem *ent;
4053 int qty;
4054
4055 /* If we reversed a floating-point comparison, if OP0 is not a
4056 register, or if OP1 is neither a register or constant, we can't
4057 do anything. */
4058
4059 if (!REG_P (op1))
4060 op1 = equiv_constant (op1);
4061
4062 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4063 || !REG_P (op0) || op1 == 0)
4064 return;
4065
4066 /* Put OP0 in the hash table if it isn't already. This gives it a
4067 new quantity number. */
4068 if (op0_elt == 0)
4069 {
4070 if (insert_regs (op0, NULL, 0))
4071 {
4072 rehash_using_reg (op0);
4073 op0_hash = HASH (op0, mode);
4074
4075 /* If OP0 is contained in OP1, this changes its hash code
4076 as well. Faster to rehash than to check, except
4077 for the simple case of a constant. */
4078 if (! CONSTANT_P (op1))
4079 op1_hash = HASH (op1,mode);
4080 }
4081
4082 op0_elt = insert (op0, NULL, op0_hash, mode);
4083 op0_elt->in_memory = op0_in_memory;
4084 }
4085
4086 qty = REG_QTY (REGNO (op0));
4087 ent = &qty_table[qty];
4088
4089 ent->comparison_code = code;
4090 if (REG_P (op1))
4091 {
4092 /* Look it up again--in case op0 and op1 are the same. */
4093 op1_elt = lookup (op1, op1_hash, mode);
4094
4095 /* Put OP1 in the hash table so it gets a new quantity number. */
4096 if (op1_elt == 0)
4097 {
4098 if (insert_regs (op1, NULL, 0))
4099 {
4100 rehash_using_reg (op1);
4101 op1_hash = HASH (op1, mode);
4102 }
4103
4104 op1_elt = insert (op1, NULL, op1_hash, mode);
4105 op1_elt->in_memory = op1_in_memory;
4106 }
4107
4108 ent->comparison_const = NULL_RTX;
4109 ent->comparison_qty = REG_QTY (REGNO (op1));
4110 }
4111 else
4112 {
4113 ent->comparison_const = op1;
4114 ent->comparison_qty = -1;
4115 }
4116
4117 return;
4118 }
4119
4120 /* If either side is still missing an equivalence, make it now,
4121 then merge the equivalences. */
4122
4123 if (op0_elt == 0)
4124 {
4125 if (insert_regs (op0, NULL, 0))
4126 {
4127 rehash_using_reg (op0);
4128 op0_hash = HASH (op0, mode);
4129 }
4130
4131 op0_elt = insert (op0, NULL, op0_hash, mode);
4132 op0_elt->in_memory = op0_in_memory;
4133 }
4134
4135 if (op1_elt == 0)
4136 {
4137 if (insert_regs (op1, NULL, 0))
4138 {
4139 rehash_using_reg (op1);
4140 op1_hash = HASH (op1, mode);
4141 }
4142
4143 op1_elt = insert (op1, NULL, op1_hash, mode);
4144 op1_elt->in_memory = op1_in_memory;
4145 }
4146
4147 merge_equiv_classes (op0_elt, op1_elt);
4148 }
4149 \f
4150 /* CSE processing for one instruction.
4151 First simplify sources and addresses of all assignments
4152 in the instruction, using previously-computed equivalents values.
4153 Then install the new sources and destinations in the table
4154 of available values. */
4155
4156 /* Data on one SET contained in the instruction. */
4157
4158 struct set
4159 {
4160 /* The SET rtx itself. */
4161 rtx rtl;
4162 /* The SET_SRC of the rtx (the original value, if it is changing). */
4163 rtx src;
4164 /* The hash-table element for the SET_SRC of the SET. */
4165 struct table_elt *src_elt;
4166 /* Hash value for the SET_SRC. */
4167 unsigned src_hash;
4168 /* Hash value for the SET_DEST. */
4169 unsigned dest_hash;
4170 /* The SET_DEST, with SUBREG, etc., stripped. */
4171 rtx inner_dest;
4172 /* Nonzero if the SET_SRC is in memory. */
4173 char src_in_memory;
4174 /* Nonzero if the SET_SRC contains something
4175 whose value cannot be predicted and understood. */
4176 char src_volatile;
4177 /* Original machine mode, in case it becomes a CONST_INT.
4178 The size of this field should match the size of the mode
4179 field of struct rtx_def (see rtl.h). */
4180 ENUM_BITFIELD(machine_mode) mode : 8;
4181 /* A constant equivalent for SET_SRC, if any. */
4182 rtx src_const;
4183 /* Hash value of constant equivalent for SET_SRC. */
4184 unsigned src_const_hash;
4185 /* Table entry for constant equivalent for SET_SRC, if any. */
4186 struct table_elt *src_const_elt;
4187 /* Table entry for the destination address. */
4188 struct table_elt *dest_addr_elt;
4189 };
4190
4191 static void
4192 cse_insn (rtx insn)
4193 {
4194 rtx x = PATTERN (insn);
4195 int i;
4196 rtx tem;
4197 int n_sets = 0;
4198
4199 rtx src_eqv = 0;
4200 struct table_elt *src_eqv_elt = 0;
4201 int src_eqv_volatile = 0;
4202 int src_eqv_in_memory = 0;
4203 unsigned src_eqv_hash = 0;
4204
4205 struct set *sets = (struct set *) 0;
4206
4207 this_insn = insn;
4208 #ifdef HAVE_cc0
4209 /* Records what this insn does to set CC0. */
4210 this_insn_cc0 = 0;
4211 this_insn_cc0_mode = VOIDmode;
4212 #endif
4213
4214 /* Find all the SETs and CLOBBERs in this instruction.
4215 Record all the SETs in the array `set' and count them.
4216 Also determine whether there is a CLOBBER that invalidates
4217 all memory references, or all references at varying addresses. */
4218
4219 if (CALL_P (insn))
4220 {
4221 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4222 {
4223 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4224 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4225 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4226 }
4227 }
4228
4229 if (GET_CODE (x) == SET)
4230 {
4231 sets = XALLOCA (struct set);
4232 sets[0].rtl = x;
4233
4234 /* Ignore SETs that are unconditional jumps.
4235 They never need cse processing, so this does not hurt.
4236 The reason is not efficiency but rather
4237 so that we can test at the end for instructions
4238 that have been simplified to unconditional jumps
4239 and not be misled by unchanged instructions
4240 that were unconditional jumps to begin with. */
4241 if (SET_DEST (x) == pc_rtx
4242 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4243 ;
4244
4245 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4246 The hard function value register is used only once, to copy to
4247 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4248 Ensure we invalidate the destination register. On the 80386 no
4249 other code would invalidate it since it is a fixed_reg.
4250 We need not check the return of apply_change_group; see canon_reg. */
4251
4252 else if (GET_CODE (SET_SRC (x)) == CALL)
4253 {
4254 canon_reg (SET_SRC (x), insn);
4255 apply_change_group ();
4256 fold_rtx (SET_SRC (x), insn);
4257 invalidate (SET_DEST (x), VOIDmode);
4258 }
4259 else
4260 n_sets = 1;
4261 }
4262 else if (GET_CODE (x) == PARALLEL)
4263 {
4264 int lim = XVECLEN (x, 0);
4265
4266 sets = XALLOCAVEC (struct set, lim);
4267
4268 /* Find all regs explicitly clobbered in this insn,
4269 and ensure they are not replaced with any other regs
4270 elsewhere in this insn.
4271 When a reg that is clobbered is also used for input,
4272 we should presume that that is for a reason,
4273 and we should not substitute some other register
4274 which is not supposed to be clobbered.
4275 Therefore, this loop cannot be merged into the one below
4276 because a CALL may precede a CLOBBER and refer to the
4277 value clobbered. We must not let a canonicalization do
4278 anything in that case. */
4279 for (i = 0; i < lim; i++)
4280 {
4281 rtx y = XVECEXP (x, 0, i);
4282 if (GET_CODE (y) == CLOBBER)
4283 {
4284 rtx clobbered = XEXP (y, 0);
4285
4286 if (REG_P (clobbered)
4287 || GET_CODE (clobbered) == SUBREG)
4288 invalidate (clobbered, VOIDmode);
4289 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4290 || GET_CODE (clobbered) == ZERO_EXTRACT)
4291 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4292 }
4293 }
4294
4295 for (i = 0; i < lim; i++)
4296 {
4297 rtx y = XVECEXP (x, 0, i);
4298 if (GET_CODE (y) == SET)
4299 {
4300 /* As above, we ignore unconditional jumps and call-insns and
4301 ignore the result of apply_change_group. */
4302 if (GET_CODE (SET_SRC (y)) == CALL)
4303 {
4304 canon_reg (SET_SRC (y), insn);
4305 apply_change_group ();
4306 fold_rtx (SET_SRC (y), insn);
4307 invalidate (SET_DEST (y), VOIDmode);
4308 }
4309 else if (SET_DEST (y) == pc_rtx
4310 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4311 ;
4312 else
4313 sets[n_sets++].rtl = y;
4314 }
4315 else if (GET_CODE (y) == CLOBBER)
4316 {
4317 /* If we clobber memory, canon the address.
4318 This does nothing when a register is clobbered
4319 because we have already invalidated the reg. */
4320 if (MEM_P (XEXP (y, 0)))
4321 canon_reg (XEXP (y, 0), insn);
4322 }
4323 else if (GET_CODE (y) == USE
4324 && ! (REG_P (XEXP (y, 0))
4325 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4326 canon_reg (y, insn);
4327 else if (GET_CODE (y) == CALL)
4328 {
4329 /* The result of apply_change_group can be ignored; see
4330 canon_reg. */
4331 canon_reg (y, insn);
4332 apply_change_group ();
4333 fold_rtx (y, insn);
4334 }
4335 }
4336 }
4337 else if (GET_CODE (x) == CLOBBER)
4338 {
4339 if (MEM_P (XEXP (x, 0)))
4340 canon_reg (XEXP (x, 0), insn);
4341 }
4342 /* Canonicalize a USE of a pseudo register or memory location. */
4343 else if (GET_CODE (x) == USE
4344 && ! (REG_P (XEXP (x, 0))
4345 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4346 canon_reg (x, insn);
4347 else if (GET_CODE (x) == ASM_OPERANDS)
4348 {
4349 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4350 {
4351 rtx input = ASM_OPERANDS_INPUT (x, i);
4352 if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
4353 {
4354 input = canon_reg (input, insn);
4355 validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4356 }
4357 }
4358 }
4359 else if (GET_CODE (x) == CALL)
4360 {
4361 /* The result of apply_change_group can be ignored; see canon_reg. */
4362 canon_reg (x, insn);
4363 apply_change_group ();
4364 fold_rtx (x, insn);
4365 }
4366 else if (DEBUG_INSN_P (insn))
4367 canon_reg (PATTERN (insn), insn);
4368
4369 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4370 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4371 is handled specially for this case, and if it isn't set, then there will
4372 be no equivalence for the destination. */
4373 if (n_sets == 1 && REG_NOTES (insn) != 0
4374 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4375 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4376 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4377 {
4378 /* The result of apply_change_group can be ignored; see canon_reg. */
4379 canon_reg (XEXP (tem, 0), insn);
4380 apply_change_group ();
4381 src_eqv = fold_rtx (XEXP (tem, 0), insn);
4382 XEXP (tem, 0) = copy_rtx (src_eqv);
4383 df_notes_rescan (insn);
4384 }
4385
4386 /* Canonicalize sources and addresses of destinations.
4387 We do this in a separate pass to avoid problems when a MATCH_DUP is
4388 present in the insn pattern. In that case, we want to ensure that
4389 we don't break the duplicate nature of the pattern. So we will replace
4390 both operands at the same time. Otherwise, we would fail to find an
4391 equivalent substitution in the loop calling validate_change below.
4392
4393 We used to suppress canonicalization of DEST if it appears in SRC,
4394 but we don't do this any more. */
4395
4396 for (i = 0; i < n_sets; i++)
4397 {
4398 rtx dest = SET_DEST (sets[i].rtl);
4399 rtx src = SET_SRC (sets[i].rtl);
4400 rtx new_rtx = canon_reg (src, insn);
4401
4402 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4403
4404 if (GET_CODE (dest) == ZERO_EXTRACT)
4405 {
4406 validate_change (insn, &XEXP (dest, 1),
4407 canon_reg (XEXP (dest, 1), insn), 1);
4408 validate_change (insn, &XEXP (dest, 2),
4409 canon_reg (XEXP (dest, 2), insn), 1);
4410 }
4411
4412 while (GET_CODE (dest) == SUBREG
4413 || GET_CODE (dest) == ZERO_EXTRACT
4414 || GET_CODE (dest) == STRICT_LOW_PART)
4415 dest = XEXP (dest, 0);
4416
4417 if (MEM_P (dest))
4418 canon_reg (dest, insn);
4419 }
4420
4421 /* Now that we have done all the replacements, we can apply the change
4422 group and see if they all work. Note that this will cause some
4423 canonicalizations that would have worked individually not to be applied
4424 because some other canonicalization didn't work, but this should not
4425 occur often.
4426
4427 The result of apply_change_group can be ignored; see canon_reg. */
4428
4429 apply_change_group ();
4430
4431 /* Set sets[i].src_elt to the class each source belongs to.
4432 Detect assignments from or to volatile things
4433 and set set[i] to zero so they will be ignored
4434 in the rest of this function.
4435
4436 Nothing in this loop changes the hash table or the register chains. */
4437
4438 for (i = 0; i < n_sets; i++)
4439 {
4440 bool repeat = false;
4441 rtx src, dest;
4442 rtx src_folded;
4443 struct table_elt *elt = 0, *p;
4444 enum machine_mode mode;
4445 rtx src_eqv_here;
4446 rtx src_const = 0;
4447 rtx src_related = 0;
4448 bool src_related_is_const_anchor = false;
4449 struct table_elt *src_const_elt = 0;
4450 int src_cost = MAX_COST;
4451 int src_eqv_cost = MAX_COST;
4452 int src_folded_cost = MAX_COST;
4453 int src_related_cost = MAX_COST;
4454 int src_elt_cost = MAX_COST;
4455 int src_regcost = MAX_COST;
4456 int src_eqv_regcost = MAX_COST;
4457 int src_folded_regcost = MAX_COST;
4458 int src_related_regcost = MAX_COST;
4459 int src_elt_regcost = MAX_COST;
4460 /* Set nonzero if we need to call force_const_mem on with the
4461 contents of src_folded before using it. */
4462 int src_folded_force_flag = 0;
4463
4464 dest = SET_DEST (sets[i].rtl);
4465 src = SET_SRC (sets[i].rtl);
4466
4467 /* If SRC is a constant that has no machine mode,
4468 hash it with the destination's machine mode.
4469 This way we can keep different modes separate. */
4470
4471 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4472 sets[i].mode = mode;
4473
4474 if (src_eqv)
4475 {
4476 enum machine_mode eqvmode = mode;
4477 if (GET_CODE (dest) == STRICT_LOW_PART)
4478 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4479 do_not_record = 0;
4480 hash_arg_in_memory = 0;
4481 src_eqv_hash = HASH (src_eqv, eqvmode);
4482
4483 /* Find the equivalence class for the equivalent expression. */
4484
4485 if (!do_not_record)
4486 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4487
4488 src_eqv_volatile = do_not_record;
4489 src_eqv_in_memory = hash_arg_in_memory;
4490 }
4491
4492 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4493 value of the INNER register, not the destination. So it is not
4494 a valid substitution for the source. But save it for later. */
4495 if (GET_CODE (dest) == STRICT_LOW_PART)
4496 src_eqv_here = 0;
4497 else
4498 src_eqv_here = src_eqv;
4499
4500 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4501 simplified result, which may not necessarily be valid. */
4502 src_folded = fold_rtx (src, insn);
4503
4504 #if 0
4505 /* ??? This caused bad code to be generated for the m68k port with -O2.
4506 Suppose src is (CONST_INT -1), and that after truncation src_folded
4507 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4508 At the end we will add src and src_const to the same equivalence
4509 class. We now have 3 and -1 on the same equivalence class. This
4510 causes later instructions to be mis-optimized. */
4511 /* If storing a constant in a bitfield, pre-truncate the constant
4512 so we will be able to record it later. */
4513 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4514 {
4515 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4516
4517 if (CONST_INT_P (src)
4518 && CONST_INT_P (width)
4519 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4520 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4521 src_folded
4522 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4523 << INTVAL (width)) - 1));
4524 }
4525 #endif
4526
4527 /* Compute SRC's hash code, and also notice if it
4528 should not be recorded at all. In that case,
4529 prevent any further processing of this assignment. */
4530 do_not_record = 0;
4531 hash_arg_in_memory = 0;
4532
4533 sets[i].src = src;
4534 sets[i].src_hash = HASH (src, mode);
4535 sets[i].src_volatile = do_not_record;
4536 sets[i].src_in_memory = hash_arg_in_memory;
4537
4538 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4539 a pseudo, do not record SRC. Using SRC as a replacement for
4540 anything else will be incorrect in that situation. Note that
4541 this usually occurs only for stack slots, in which case all the
4542 RTL would be referring to SRC, so we don't lose any optimization
4543 opportunities by not having SRC in the hash table. */
4544
4545 if (MEM_P (src)
4546 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4547 && REG_P (dest)
4548 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4549 sets[i].src_volatile = 1;
4550
4551 #if 0
4552 /* It is no longer clear why we used to do this, but it doesn't
4553 appear to still be needed. So let's try without it since this
4554 code hurts cse'ing widened ops. */
4555 /* If source is a paradoxical subreg (such as QI treated as an SI),
4556 treat it as volatile. It may do the work of an SI in one context
4557 where the extra bits are not being used, but cannot replace an SI
4558 in general. */
4559 if (GET_CODE (src) == SUBREG
4560 && (GET_MODE_SIZE (GET_MODE (src))
4561 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4562 sets[i].src_volatile = 1;
4563 #endif
4564
4565 /* Locate all possible equivalent forms for SRC. Try to replace
4566 SRC in the insn with each cheaper equivalent.
4567
4568 We have the following types of equivalents: SRC itself, a folded
4569 version, a value given in a REG_EQUAL note, or a value related
4570 to a constant.
4571
4572 Each of these equivalents may be part of an additional class
4573 of equivalents (if more than one is in the table, they must be in
4574 the same class; we check for this).
4575
4576 If the source is volatile, we don't do any table lookups.
4577
4578 We note any constant equivalent for possible later use in a
4579 REG_NOTE. */
4580
4581 if (!sets[i].src_volatile)
4582 elt = lookup (src, sets[i].src_hash, mode);
4583
4584 sets[i].src_elt = elt;
4585
4586 if (elt && src_eqv_here && src_eqv_elt)
4587 {
4588 if (elt->first_same_value != src_eqv_elt->first_same_value)
4589 {
4590 /* The REG_EQUAL is indicating that two formerly distinct
4591 classes are now equivalent. So merge them. */
4592 merge_equiv_classes (elt, src_eqv_elt);
4593 src_eqv_hash = HASH (src_eqv, elt->mode);
4594 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4595 }
4596
4597 src_eqv_here = 0;
4598 }
4599
4600 else if (src_eqv_elt)
4601 elt = src_eqv_elt;
4602
4603 /* Try to find a constant somewhere and record it in `src_const'.
4604 Record its table element, if any, in `src_const_elt'. Look in
4605 any known equivalences first. (If the constant is not in the
4606 table, also set `sets[i].src_const_hash'). */
4607 if (elt)
4608 for (p = elt->first_same_value; p; p = p->next_same_value)
4609 if (p->is_const)
4610 {
4611 src_const = p->exp;
4612 src_const_elt = elt;
4613 break;
4614 }
4615
4616 if (src_const == 0
4617 && (CONSTANT_P (src_folded)
4618 /* Consider (minus (label_ref L1) (label_ref L2)) as
4619 "constant" here so we will record it. This allows us
4620 to fold switch statements when an ADDR_DIFF_VEC is used. */
4621 || (GET_CODE (src_folded) == MINUS
4622 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4623 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4624 src_const = src_folded, src_const_elt = elt;
4625 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4626 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4627
4628 /* If we don't know if the constant is in the table, get its
4629 hash code and look it up. */
4630 if (src_const && src_const_elt == 0)
4631 {
4632 sets[i].src_const_hash = HASH (src_const, mode);
4633 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4634 }
4635
4636 sets[i].src_const = src_const;
4637 sets[i].src_const_elt = src_const_elt;
4638
4639 /* If the constant and our source are both in the table, mark them as
4640 equivalent. Otherwise, if a constant is in the table but the source
4641 isn't, set ELT to it. */
4642 if (src_const_elt && elt
4643 && src_const_elt->first_same_value != elt->first_same_value)
4644 merge_equiv_classes (elt, src_const_elt);
4645 else if (src_const_elt && elt == 0)
4646 elt = src_const_elt;
4647
4648 /* See if there is a register linearly related to a constant
4649 equivalent of SRC. */
4650 if (src_const
4651 && (GET_CODE (src_const) == CONST
4652 || (src_const_elt && src_const_elt->related_value != 0)))
4653 {
4654 src_related = use_related_value (src_const, src_const_elt);
4655 if (src_related)
4656 {
4657 struct table_elt *src_related_elt
4658 = lookup (src_related, HASH (src_related, mode), mode);
4659 if (src_related_elt && elt)
4660 {
4661 if (elt->first_same_value
4662 != src_related_elt->first_same_value)
4663 /* This can occur when we previously saw a CONST
4664 involving a SYMBOL_REF and then see the SYMBOL_REF
4665 twice. Merge the involved classes. */
4666 merge_equiv_classes (elt, src_related_elt);
4667
4668 src_related = 0;
4669 src_related_elt = 0;
4670 }
4671 else if (src_related_elt && elt == 0)
4672 elt = src_related_elt;
4673 }
4674 }
4675
4676 /* See if we have a CONST_INT that is already in a register in a
4677 wider mode. */
4678
4679 if (src_const && src_related == 0 && CONST_INT_P (src_const)
4680 && GET_MODE_CLASS (mode) == MODE_INT
4681 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4682 {
4683 enum machine_mode wider_mode;
4684
4685 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4686 wider_mode != VOIDmode
4687 && GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4688 && src_related == 0;
4689 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4690 {
4691 struct table_elt *const_elt
4692 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4693
4694 if (const_elt == 0)
4695 continue;
4696
4697 for (const_elt = const_elt->first_same_value;
4698 const_elt; const_elt = const_elt->next_same_value)
4699 if (REG_P (const_elt->exp))
4700 {
4701 src_related = gen_lowpart (mode, const_elt->exp);
4702 break;
4703 }
4704 }
4705 }
4706
4707 /* Another possibility is that we have an AND with a constant in
4708 a mode narrower than a word. If so, it might have been generated
4709 as part of an "if" which would narrow the AND. If we already
4710 have done the AND in a wider mode, we can use a SUBREG of that
4711 value. */
4712
4713 if (flag_expensive_optimizations && ! src_related
4714 && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4715 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4716 {
4717 enum machine_mode tmode;
4718 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4719
4720 for (tmode = GET_MODE_WIDER_MODE (mode);
4721 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4722 tmode = GET_MODE_WIDER_MODE (tmode))
4723 {
4724 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4725 struct table_elt *larger_elt;
4726
4727 if (inner)
4728 {
4729 PUT_MODE (new_and, tmode);
4730 XEXP (new_and, 0) = inner;
4731 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4732 if (larger_elt == 0)
4733 continue;
4734
4735 for (larger_elt = larger_elt->first_same_value;
4736 larger_elt; larger_elt = larger_elt->next_same_value)
4737 if (REG_P (larger_elt->exp))
4738 {
4739 src_related
4740 = gen_lowpart (mode, larger_elt->exp);
4741 break;
4742 }
4743
4744 if (src_related)
4745 break;
4746 }
4747 }
4748 }
4749
4750 #ifdef LOAD_EXTEND_OP
4751 /* See if a MEM has already been loaded with a widening operation;
4752 if it has, we can use a subreg of that. Many CISC machines
4753 also have such operations, but this is only likely to be
4754 beneficial on these machines. */
4755
4756 if (flag_expensive_optimizations && src_related == 0
4757 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4758 && GET_MODE_CLASS (mode) == MODE_INT
4759 && MEM_P (src) && ! do_not_record
4760 && LOAD_EXTEND_OP (mode) != UNKNOWN)
4761 {
4762 struct rtx_def memory_extend_buf;
4763 rtx memory_extend_rtx = &memory_extend_buf;
4764 enum machine_mode tmode;
4765
4766 /* Set what we are trying to extend and the operation it might
4767 have been extended with. */
4768 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4769 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4770 XEXP (memory_extend_rtx, 0) = src;
4771
4772 for (tmode = GET_MODE_WIDER_MODE (mode);
4773 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4774 tmode = GET_MODE_WIDER_MODE (tmode))
4775 {
4776 struct table_elt *larger_elt;
4777
4778 PUT_MODE (memory_extend_rtx, tmode);
4779 larger_elt = lookup (memory_extend_rtx,
4780 HASH (memory_extend_rtx, tmode), tmode);
4781 if (larger_elt == 0)
4782 continue;
4783
4784 for (larger_elt = larger_elt->first_same_value;
4785 larger_elt; larger_elt = larger_elt->next_same_value)
4786 if (REG_P (larger_elt->exp))
4787 {
4788 src_related = gen_lowpart (mode, larger_elt->exp);
4789 break;
4790 }
4791
4792 if (src_related)
4793 break;
4794 }
4795 }
4796 #endif /* LOAD_EXTEND_OP */
4797
4798 /* Try to express the constant using a register+offset expression
4799 derived from a constant anchor. */
4800
4801 if (targetm.const_anchor
4802 && !src_related
4803 && src_const
4804 && GET_CODE (src_const) == CONST_INT)
4805 {
4806 src_related = try_const_anchors (src_const, mode);
4807 src_related_is_const_anchor = src_related != NULL_RTX;
4808 }
4809
4810
4811 if (src == src_folded)
4812 src_folded = 0;
4813
4814 /* At this point, ELT, if nonzero, points to a class of expressions
4815 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4816 and SRC_RELATED, if nonzero, each contain additional equivalent
4817 expressions. Prune these latter expressions by deleting expressions
4818 already in the equivalence class.
4819
4820 Check for an equivalent identical to the destination. If found,
4821 this is the preferred equivalent since it will likely lead to
4822 elimination of the insn. Indicate this by placing it in
4823 `src_related'. */
4824
4825 if (elt)
4826 elt = elt->first_same_value;
4827 for (p = elt; p; p = p->next_same_value)
4828 {
4829 enum rtx_code code = GET_CODE (p->exp);
4830
4831 /* If the expression is not valid, ignore it. Then we do not
4832 have to check for validity below. In most cases, we can use
4833 `rtx_equal_p', since canonicalization has already been done. */
4834 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4835 continue;
4836
4837 /* Also skip paradoxical subregs, unless that's what we're
4838 looking for. */
4839 if (code == SUBREG
4840 && (GET_MODE_SIZE (GET_MODE (p->exp))
4841 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
4842 && ! (src != 0
4843 && GET_CODE (src) == SUBREG
4844 && GET_MODE (src) == GET_MODE (p->exp)
4845 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4846 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4847 continue;
4848
4849 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4850 src = 0;
4851 else if (src_folded && GET_CODE (src_folded) == code
4852 && rtx_equal_p (src_folded, p->exp))
4853 src_folded = 0;
4854 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4855 && rtx_equal_p (src_eqv_here, p->exp))
4856 src_eqv_here = 0;
4857 else if (src_related && GET_CODE (src_related) == code
4858 && rtx_equal_p (src_related, p->exp))
4859 src_related = 0;
4860
4861 /* This is the same as the destination of the insns, we want
4862 to prefer it. Copy it to src_related. The code below will
4863 then give it a negative cost. */
4864 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4865 src_related = dest;
4866 }
4867
4868 /* Find the cheapest valid equivalent, trying all the available
4869 possibilities. Prefer items not in the hash table to ones
4870 that are when they are equal cost. Note that we can never
4871 worsen an insn as the current contents will also succeed.
4872 If we find an equivalent identical to the destination, use it as best,
4873 since this insn will probably be eliminated in that case. */
4874 if (src)
4875 {
4876 if (rtx_equal_p (src, dest))
4877 src_cost = src_regcost = -1;
4878 else
4879 {
4880 src_cost = COST (src);
4881 src_regcost = approx_reg_cost (src);
4882 }
4883 }
4884
4885 if (src_eqv_here)
4886 {
4887 if (rtx_equal_p (src_eqv_here, dest))
4888 src_eqv_cost = src_eqv_regcost = -1;
4889 else
4890 {
4891 src_eqv_cost = COST (src_eqv_here);
4892 src_eqv_regcost = approx_reg_cost (src_eqv_here);
4893 }
4894 }
4895
4896 if (src_folded)
4897 {
4898 if (rtx_equal_p (src_folded, dest))
4899 src_folded_cost = src_folded_regcost = -1;
4900 else
4901 {
4902 src_folded_cost = COST (src_folded);
4903 src_folded_regcost = approx_reg_cost (src_folded);
4904 }
4905 }
4906
4907 if (src_related)
4908 {
4909 if (rtx_equal_p (src_related, dest))
4910 src_related_cost = src_related_regcost = -1;
4911 else
4912 {
4913 src_related_cost = COST (src_related);
4914 src_related_regcost = approx_reg_cost (src_related);
4915
4916 /* If a const-anchor is used to synthesize a constant that
4917 normally requires multiple instructions then slightly prefer
4918 it over the original sequence. These instructions are likely
4919 to become redundant now. We can't compare against the cost
4920 of src_eqv_here because, on MIPS for example, multi-insn
4921 constants have zero cost; they are assumed to be hoisted from
4922 loops. */
4923 if (src_related_is_const_anchor
4924 && src_related_cost == src_cost
4925 && src_eqv_here)
4926 src_related_cost--;
4927 }
4928 }
4929
4930 /* If this was an indirect jump insn, a known label will really be
4931 cheaper even though it looks more expensive. */
4932 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
4933 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
4934
4935 /* Terminate loop when replacement made. This must terminate since
4936 the current contents will be tested and will always be valid. */
4937 while (1)
4938 {
4939 rtx trial;
4940
4941 /* Skip invalid entries. */
4942 while (elt && !REG_P (elt->exp)
4943 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
4944 elt = elt->next_same_value;
4945
4946 /* A paradoxical subreg would be bad here: it'll be the right
4947 size, but later may be adjusted so that the upper bits aren't
4948 what we want. So reject it. */
4949 if (elt != 0
4950 && GET_CODE (elt->exp) == SUBREG
4951 && (GET_MODE_SIZE (GET_MODE (elt->exp))
4952 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
4953 /* It is okay, though, if the rtx we're trying to match
4954 will ignore any of the bits we can't predict. */
4955 && ! (src != 0
4956 && GET_CODE (src) == SUBREG
4957 && GET_MODE (src) == GET_MODE (elt->exp)
4958 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4959 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4960 {
4961 elt = elt->next_same_value;
4962 continue;
4963 }
4964
4965 if (elt)
4966 {
4967 src_elt_cost = elt->cost;
4968 src_elt_regcost = elt->regcost;
4969 }
4970
4971 /* Find cheapest and skip it for the next time. For items
4972 of equal cost, use this order:
4973 src_folded, src, src_eqv, src_related and hash table entry. */
4974 if (src_folded
4975 && preferable (src_folded_cost, src_folded_regcost,
4976 src_cost, src_regcost) <= 0
4977 && preferable (src_folded_cost, src_folded_regcost,
4978 src_eqv_cost, src_eqv_regcost) <= 0
4979 && preferable (src_folded_cost, src_folded_regcost,
4980 src_related_cost, src_related_regcost) <= 0
4981 && preferable (src_folded_cost, src_folded_regcost,
4982 src_elt_cost, src_elt_regcost) <= 0)
4983 {
4984 trial = src_folded, src_folded_cost = MAX_COST;
4985 if (src_folded_force_flag)
4986 {
4987 rtx forced = force_const_mem (mode, trial);
4988 if (forced)
4989 trial = forced;
4990 }
4991 }
4992 else if (src
4993 && preferable (src_cost, src_regcost,
4994 src_eqv_cost, src_eqv_regcost) <= 0
4995 && preferable (src_cost, src_regcost,
4996 src_related_cost, src_related_regcost) <= 0
4997 && preferable (src_cost, src_regcost,
4998 src_elt_cost, src_elt_regcost) <= 0)
4999 trial = src, src_cost = MAX_COST;
5000 else if (src_eqv_here
5001 && preferable (src_eqv_cost, src_eqv_regcost,
5002 src_related_cost, src_related_regcost) <= 0
5003 && preferable (src_eqv_cost, src_eqv_regcost,
5004 src_elt_cost, src_elt_regcost) <= 0)
5005 trial = src_eqv_here, src_eqv_cost = MAX_COST;
5006 else if (src_related
5007 && preferable (src_related_cost, src_related_regcost,
5008 src_elt_cost, src_elt_regcost) <= 0)
5009 trial = src_related, src_related_cost = MAX_COST;
5010 else
5011 {
5012 trial = elt->exp;
5013 elt = elt->next_same_value;
5014 src_elt_cost = MAX_COST;
5015 }
5016
5017 /* Avoid creation of overlapping memory moves. */
5018 if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5019 {
5020 rtx src, dest;
5021
5022 /* BLKmode moves are not handled by cse anyway. */
5023 if (GET_MODE (trial) == BLKmode)
5024 break;
5025
5026 src = canon_rtx (trial);
5027 dest = canon_rtx (SET_DEST (sets[i].rtl));
5028
5029 if (!MEM_P (src) || !MEM_P (dest)
5030 || !nonoverlapping_memrefs_p (src, dest, false))
5031 break;
5032 }
5033
5034 /* Try to optimize
5035 (set (reg:M N) (const_int A))
5036 (set (reg:M2 O) (const_int B))
5037 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5038 (reg:M2 O)). */
5039 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5040 && CONST_INT_P (trial)
5041 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5042 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5043 && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5044 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl)))
5045 >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5046 && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5047 + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5048 <= HOST_BITS_PER_WIDE_INT))
5049 {
5050 rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5051 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5052 rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5053 unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5054 struct table_elt *dest_elt
5055 = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5056 rtx dest_cst = NULL;
5057
5058 if (dest_elt)
5059 for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5060 if (p->is_const && CONST_INT_P (p->exp))
5061 {
5062 dest_cst = p->exp;
5063 break;
5064 }
5065 if (dest_cst)
5066 {
5067 HOST_WIDE_INT val = INTVAL (dest_cst);
5068 HOST_WIDE_INT mask;
5069 unsigned int shift;
5070 if (BITS_BIG_ENDIAN)
5071 shift = GET_MODE_BITSIZE (GET_MODE (dest_reg))
5072 - INTVAL (pos) - INTVAL (width);
5073 else
5074 shift = INTVAL (pos);
5075 if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5076 mask = ~(HOST_WIDE_INT) 0;
5077 else
5078 mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5079 val &= ~(mask << shift);
5080 val |= (INTVAL (trial) & mask) << shift;
5081 val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5082 validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5083 dest_reg, 1);
5084 validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5085 GEN_INT (val), 1);
5086 if (apply_change_group ())
5087 {
5088 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5089 if (note)
5090 {
5091 remove_note (insn, note);
5092 df_notes_rescan (insn);
5093 }
5094 src_eqv = NULL_RTX;
5095 src_eqv_elt = NULL;
5096 src_eqv_volatile = 0;
5097 src_eqv_in_memory = 0;
5098 src_eqv_hash = 0;
5099 repeat = true;
5100 break;
5101 }
5102 }
5103 }
5104
5105 /* We don't normally have an insn matching (set (pc) (pc)), so
5106 check for this separately here. We will delete such an
5107 insn below.
5108
5109 For other cases such as a table jump or conditional jump
5110 where we know the ultimate target, go ahead and replace the
5111 operand. While that may not make a valid insn, we will
5112 reemit the jump below (and also insert any necessary
5113 barriers). */
5114 if (n_sets == 1 && dest == pc_rtx
5115 && (trial == pc_rtx
5116 || (GET_CODE (trial) == LABEL_REF
5117 && ! condjump_p (insn))))
5118 {
5119 /* Don't substitute non-local labels, this confuses CFG. */
5120 if (GET_CODE (trial) == LABEL_REF
5121 && LABEL_REF_NONLOCAL_P (trial))
5122 continue;
5123
5124 SET_SRC (sets[i].rtl) = trial;
5125 cse_jumps_altered = true;
5126 break;
5127 }
5128
5129 /* Reject certain invalid forms of CONST that we create. */
5130 else if (CONSTANT_P (trial)
5131 && GET_CODE (trial) == CONST
5132 /* Reject cases that will cause decode_rtx_const to
5133 die. On the alpha when simplifying a switch, we
5134 get (const (truncate (minus (label_ref)
5135 (label_ref)))). */
5136 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5137 /* Likewise on IA-64, except without the
5138 truncate. */
5139 || (GET_CODE (XEXP (trial, 0)) == MINUS
5140 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5141 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5142 /* Do nothing for this case. */
5143 ;
5144
5145 /* Look for a substitution that makes a valid insn. */
5146 else if (validate_unshare_change
5147 (insn, &SET_SRC (sets[i].rtl), trial, 0))
5148 {
5149 rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5150
5151 /* The result of apply_change_group can be ignored; see
5152 canon_reg. */
5153
5154 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5155 apply_change_group ();
5156
5157 break;
5158 }
5159
5160 /* If we previously found constant pool entries for
5161 constants and this is a constant, try making a
5162 pool entry. Put it in src_folded unless we already have done
5163 this since that is where it likely came from. */
5164
5165 else if (constant_pool_entries_cost
5166 && CONSTANT_P (trial)
5167 && (src_folded == 0
5168 || (!MEM_P (src_folded)
5169 && ! src_folded_force_flag))
5170 && GET_MODE_CLASS (mode) != MODE_CC
5171 && mode != VOIDmode)
5172 {
5173 src_folded_force_flag = 1;
5174 src_folded = trial;
5175 src_folded_cost = constant_pool_entries_cost;
5176 src_folded_regcost = constant_pool_entries_regcost;
5177 }
5178 }
5179
5180 /* If we changed the insn too much, handle this set from scratch. */
5181 if (repeat)
5182 {
5183 i--;
5184 continue;
5185 }
5186
5187 src = SET_SRC (sets[i].rtl);
5188
5189 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5190 However, there is an important exception: If both are registers
5191 that are not the head of their equivalence class, replace SET_SRC
5192 with the head of the class. If we do not do this, we will have
5193 both registers live over a portion of the basic block. This way,
5194 their lifetimes will likely abut instead of overlapping. */
5195 if (REG_P (dest)
5196 && REGNO_QTY_VALID_P (REGNO (dest)))
5197 {
5198 int dest_q = REG_QTY (REGNO (dest));
5199 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5200
5201 if (dest_ent->mode == GET_MODE (dest)
5202 && dest_ent->first_reg != REGNO (dest)
5203 && REG_P (src) && REGNO (src) == REGNO (dest)
5204 /* Don't do this if the original insn had a hard reg as
5205 SET_SRC or SET_DEST. */
5206 && (!REG_P (sets[i].src)
5207 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5208 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5209 /* We can't call canon_reg here because it won't do anything if
5210 SRC is a hard register. */
5211 {
5212 int src_q = REG_QTY (REGNO (src));
5213 struct qty_table_elem *src_ent = &qty_table[src_q];
5214 int first = src_ent->first_reg;
5215 rtx new_src
5216 = (first >= FIRST_PSEUDO_REGISTER
5217 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5218
5219 /* We must use validate-change even for this, because this
5220 might be a special no-op instruction, suitable only to
5221 tag notes onto. */
5222 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5223 {
5224 src = new_src;
5225 /* If we had a constant that is cheaper than what we are now
5226 setting SRC to, use that constant. We ignored it when we
5227 thought we could make this into a no-op. */
5228 if (src_const && COST (src_const) < COST (src)
5229 && validate_change (insn, &SET_SRC (sets[i].rtl),
5230 src_const, 0))
5231 src = src_const;
5232 }
5233 }
5234 }
5235
5236 /* If we made a change, recompute SRC values. */
5237 if (src != sets[i].src)
5238 {
5239 do_not_record = 0;
5240 hash_arg_in_memory = 0;
5241 sets[i].src = src;
5242 sets[i].src_hash = HASH (src, mode);
5243 sets[i].src_volatile = do_not_record;
5244 sets[i].src_in_memory = hash_arg_in_memory;
5245 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5246 }
5247
5248 /* If this is a single SET, we are setting a register, and we have an
5249 equivalent constant, we want to add a REG_NOTE. We don't want
5250 to write a REG_EQUAL note for a constant pseudo since verifying that
5251 that pseudo hasn't been eliminated is a pain. Such a note also
5252 won't help anything.
5253
5254 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5255 which can be created for a reference to a compile time computable
5256 entry in a jump table. */
5257
5258 if (n_sets == 1 && src_const && REG_P (dest)
5259 && !REG_P (src_const)
5260 && ! (GET_CODE (src_const) == CONST
5261 && GET_CODE (XEXP (src_const, 0)) == MINUS
5262 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5263 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5264 {
5265 /* We only want a REG_EQUAL note if src_const != src. */
5266 if (! rtx_equal_p (src, src_const))
5267 {
5268 /* Make sure that the rtx is not shared. */
5269 src_const = copy_rtx (src_const);
5270
5271 /* Record the actual constant value in a REG_EQUAL note,
5272 making a new one if one does not already exist. */
5273 set_unique_reg_note (insn, REG_EQUAL, src_const);
5274 df_notes_rescan (insn);
5275 }
5276 }
5277
5278 /* Now deal with the destination. */
5279 do_not_record = 0;
5280
5281 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5282 while (GET_CODE (dest) == SUBREG
5283 || GET_CODE (dest) == ZERO_EXTRACT
5284 || GET_CODE (dest) == STRICT_LOW_PART)
5285 dest = XEXP (dest, 0);
5286
5287 sets[i].inner_dest = dest;
5288
5289 if (MEM_P (dest))
5290 {
5291 #ifdef PUSH_ROUNDING
5292 /* Stack pushes invalidate the stack pointer. */
5293 rtx addr = XEXP (dest, 0);
5294 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5295 && XEXP (addr, 0) == stack_pointer_rtx)
5296 invalidate (stack_pointer_rtx, VOIDmode);
5297 #endif
5298 dest = fold_rtx (dest, insn);
5299 }
5300
5301 /* Compute the hash code of the destination now,
5302 before the effects of this instruction are recorded,
5303 since the register values used in the address computation
5304 are those before this instruction. */
5305 sets[i].dest_hash = HASH (dest, mode);
5306
5307 /* Don't enter a bit-field in the hash table
5308 because the value in it after the store
5309 may not equal what was stored, due to truncation. */
5310
5311 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5312 {
5313 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5314
5315 if (src_const != 0 && CONST_INT_P (src_const)
5316 && CONST_INT_P (width)
5317 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5318 && ! (INTVAL (src_const)
5319 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5320 /* Exception: if the value is constant,
5321 and it won't be truncated, record it. */
5322 ;
5323 else
5324 {
5325 /* This is chosen so that the destination will be invalidated
5326 but no new value will be recorded.
5327 We must invalidate because sometimes constant
5328 values can be recorded for bitfields. */
5329 sets[i].src_elt = 0;
5330 sets[i].src_volatile = 1;
5331 src_eqv = 0;
5332 src_eqv_elt = 0;
5333 }
5334 }
5335
5336 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5337 the insn. */
5338 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5339 {
5340 /* One less use of the label this insn used to jump to. */
5341 delete_insn_and_edges (insn);
5342 cse_jumps_altered = true;
5343 /* No more processing for this set. */
5344 sets[i].rtl = 0;
5345 }
5346
5347 /* If this SET is now setting PC to a label, we know it used to
5348 be a conditional or computed branch. */
5349 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5350 && !LABEL_REF_NONLOCAL_P (src))
5351 {
5352 /* We reemit the jump in as many cases as possible just in
5353 case the form of an unconditional jump is significantly
5354 different than a computed jump or conditional jump.
5355
5356 If this insn has multiple sets, then reemitting the
5357 jump is nontrivial. So instead we just force rerecognition
5358 and hope for the best. */
5359 if (n_sets == 1)
5360 {
5361 rtx new_rtx, note;
5362
5363 new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5364 JUMP_LABEL (new_rtx) = XEXP (src, 0);
5365 LABEL_NUSES (XEXP (src, 0))++;
5366
5367 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5368 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5369 if (note)
5370 {
5371 XEXP (note, 1) = NULL_RTX;
5372 REG_NOTES (new_rtx) = note;
5373 }
5374
5375 delete_insn_and_edges (insn);
5376 insn = new_rtx;
5377 }
5378 else
5379 INSN_CODE (insn) = -1;
5380
5381 /* Do not bother deleting any unreachable code, let jump do it. */
5382 cse_jumps_altered = true;
5383 sets[i].rtl = 0;
5384 }
5385
5386 /* If destination is volatile, invalidate it and then do no further
5387 processing for this assignment. */
5388
5389 else if (do_not_record)
5390 {
5391 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5392 invalidate (dest, VOIDmode);
5393 else if (MEM_P (dest))
5394 invalidate (dest, VOIDmode);
5395 else if (GET_CODE (dest) == STRICT_LOW_PART
5396 || GET_CODE (dest) == ZERO_EXTRACT)
5397 invalidate (XEXP (dest, 0), GET_MODE (dest));
5398 sets[i].rtl = 0;
5399 }
5400
5401 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5402 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5403
5404 #ifdef HAVE_cc0
5405 /* If setting CC0, record what it was set to, or a constant, if it
5406 is equivalent to a constant. If it is being set to a floating-point
5407 value, make a COMPARE with the appropriate constant of 0. If we
5408 don't do this, later code can interpret this as a test against
5409 const0_rtx, which can cause problems if we try to put it into an
5410 insn as a floating-point operand. */
5411 if (dest == cc0_rtx)
5412 {
5413 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5414 this_insn_cc0_mode = mode;
5415 if (FLOAT_MODE_P (mode))
5416 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5417 CONST0_RTX (mode));
5418 }
5419 #endif
5420 }
5421
5422 /* Now enter all non-volatile source expressions in the hash table
5423 if they are not already present.
5424 Record their equivalence classes in src_elt.
5425 This way we can insert the corresponding destinations into
5426 the same classes even if the actual sources are no longer in them
5427 (having been invalidated). */
5428
5429 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5430 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5431 {
5432 struct table_elt *elt;
5433 struct table_elt *classp = sets[0].src_elt;
5434 rtx dest = SET_DEST (sets[0].rtl);
5435 enum machine_mode eqvmode = GET_MODE (dest);
5436
5437 if (GET_CODE (dest) == STRICT_LOW_PART)
5438 {
5439 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5440 classp = 0;
5441 }
5442 if (insert_regs (src_eqv, classp, 0))
5443 {
5444 rehash_using_reg (src_eqv);
5445 src_eqv_hash = HASH (src_eqv, eqvmode);
5446 }
5447 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5448 elt->in_memory = src_eqv_in_memory;
5449 src_eqv_elt = elt;
5450
5451 /* Check to see if src_eqv_elt is the same as a set source which
5452 does not yet have an elt, and if so set the elt of the set source
5453 to src_eqv_elt. */
5454 for (i = 0; i < n_sets; i++)
5455 if (sets[i].rtl && sets[i].src_elt == 0
5456 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5457 sets[i].src_elt = src_eqv_elt;
5458 }
5459
5460 for (i = 0; i < n_sets; i++)
5461 if (sets[i].rtl && ! sets[i].src_volatile
5462 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5463 {
5464 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5465 {
5466 /* REG_EQUAL in setting a STRICT_LOW_PART
5467 gives an equivalent for the entire destination register,
5468 not just for the subreg being stored in now.
5469 This is a more interesting equivalence, so we arrange later
5470 to treat the entire reg as the destination. */
5471 sets[i].src_elt = src_eqv_elt;
5472 sets[i].src_hash = src_eqv_hash;
5473 }
5474 else
5475 {
5476 /* Insert source and constant equivalent into hash table, if not
5477 already present. */
5478 struct table_elt *classp = src_eqv_elt;
5479 rtx src = sets[i].src;
5480 rtx dest = SET_DEST (sets[i].rtl);
5481 enum machine_mode mode
5482 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5483
5484 /* It's possible that we have a source value known to be
5485 constant but don't have a REG_EQUAL note on the insn.
5486 Lack of a note will mean src_eqv_elt will be NULL. This
5487 can happen where we've generated a SUBREG to access a
5488 CONST_INT that is already in a register in a wider mode.
5489 Ensure that the source expression is put in the proper
5490 constant class. */
5491 if (!classp)
5492 classp = sets[i].src_const_elt;
5493
5494 if (sets[i].src_elt == 0)
5495 {
5496 struct table_elt *elt;
5497
5498 /* Note that these insert_regs calls cannot remove
5499 any of the src_elt's, because they would have failed to
5500 match if not still valid. */
5501 if (insert_regs (src, classp, 0))
5502 {
5503 rehash_using_reg (src);
5504 sets[i].src_hash = HASH (src, mode);
5505 }
5506 elt = insert (src, classp, sets[i].src_hash, mode);
5507 elt->in_memory = sets[i].src_in_memory;
5508 sets[i].src_elt = classp = elt;
5509 }
5510 if (sets[i].src_const && sets[i].src_const_elt == 0
5511 && src != sets[i].src_const
5512 && ! rtx_equal_p (sets[i].src_const, src))
5513 sets[i].src_elt = insert (sets[i].src_const, classp,
5514 sets[i].src_const_hash, mode);
5515 }
5516 }
5517 else if (sets[i].src_elt == 0)
5518 /* If we did not insert the source into the hash table (e.g., it was
5519 volatile), note the equivalence class for the REG_EQUAL value, if any,
5520 so that the destination goes into that class. */
5521 sets[i].src_elt = src_eqv_elt;
5522
5523 /* Record destination addresses in the hash table. This allows us to
5524 check if they are invalidated by other sets. */
5525 for (i = 0; i < n_sets; i++)
5526 {
5527 if (sets[i].rtl)
5528 {
5529 rtx x = sets[i].inner_dest;
5530 struct table_elt *elt;
5531 enum machine_mode mode;
5532 unsigned hash;
5533
5534 if (MEM_P (x))
5535 {
5536 x = XEXP (x, 0);
5537 mode = GET_MODE (x);
5538 hash = HASH (x, mode);
5539 elt = lookup (x, hash, mode);
5540 if (!elt)
5541 {
5542 if (insert_regs (x, NULL, 0))
5543 {
5544 rtx dest = SET_DEST (sets[i].rtl);
5545
5546 rehash_using_reg (x);
5547 hash = HASH (x, mode);
5548 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5549 }
5550 elt = insert (x, NULL, hash, mode);
5551 }
5552
5553 sets[i].dest_addr_elt = elt;
5554 }
5555 else
5556 sets[i].dest_addr_elt = NULL;
5557 }
5558 }
5559
5560 invalidate_from_clobbers (x);
5561
5562 /* Some registers are invalidated by subroutine calls. Memory is
5563 invalidated by non-constant calls. */
5564
5565 if (CALL_P (insn))
5566 {
5567 if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5568 invalidate_memory ();
5569 invalidate_for_call ();
5570 }
5571
5572 /* Now invalidate everything set by this instruction.
5573 If a SUBREG or other funny destination is being set,
5574 sets[i].rtl is still nonzero, so here we invalidate the reg
5575 a part of which is being set. */
5576
5577 for (i = 0; i < n_sets; i++)
5578 if (sets[i].rtl)
5579 {
5580 /* We can't use the inner dest, because the mode associated with
5581 a ZERO_EXTRACT is significant. */
5582 rtx dest = SET_DEST (sets[i].rtl);
5583
5584 /* Needed for registers to remove the register from its
5585 previous quantity's chain.
5586 Needed for memory if this is a nonvarying address, unless
5587 we have just done an invalidate_memory that covers even those. */
5588 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5589 invalidate (dest, VOIDmode);
5590 else if (MEM_P (dest))
5591 invalidate (dest, VOIDmode);
5592 else if (GET_CODE (dest) == STRICT_LOW_PART
5593 || GET_CODE (dest) == ZERO_EXTRACT)
5594 invalidate (XEXP (dest, 0), GET_MODE (dest));
5595 }
5596
5597 /* A volatile ASM invalidates everything. */
5598 if (NONJUMP_INSN_P (insn)
5599 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5600 && MEM_VOLATILE_P (PATTERN (insn)))
5601 flush_hash_table ();
5602
5603 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5604 the regs restored by the longjmp come from a later time
5605 than the setjmp. */
5606 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5607 {
5608 flush_hash_table ();
5609 goto done;
5610 }
5611
5612 /* Make sure registers mentioned in destinations
5613 are safe for use in an expression to be inserted.
5614 This removes from the hash table
5615 any invalid entry that refers to one of these registers.
5616
5617 We don't care about the return value from mention_regs because
5618 we are going to hash the SET_DEST values unconditionally. */
5619
5620 for (i = 0; i < n_sets; i++)
5621 {
5622 if (sets[i].rtl)
5623 {
5624 rtx x = SET_DEST (sets[i].rtl);
5625
5626 if (!REG_P (x))
5627 mention_regs (x);
5628 else
5629 {
5630 /* We used to rely on all references to a register becoming
5631 inaccessible when a register changes to a new quantity,
5632 since that changes the hash code. However, that is not
5633 safe, since after HASH_SIZE new quantities we get a
5634 hash 'collision' of a register with its own invalid
5635 entries. And since SUBREGs have been changed not to
5636 change their hash code with the hash code of the register,
5637 it wouldn't work any longer at all. So we have to check
5638 for any invalid references lying around now.
5639 This code is similar to the REG case in mention_regs,
5640 but it knows that reg_tick has been incremented, and
5641 it leaves reg_in_table as -1 . */
5642 unsigned int regno = REGNO (x);
5643 unsigned int endregno = END_REGNO (x);
5644 unsigned int i;
5645
5646 for (i = regno; i < endregno; i++)
5647 {
5648 if (REG_IN_TABLE (i) >= 0)
5649 {
5650 remove_invalid_refs (i);
5651 REG_IN_TABLE (i) = -1;
5652 }
5653 }
5654 }
5655 }
5656 }
5657
5658 /* We may have just removed some of the src_elt's from the hash table.
5659 So replace each one with the current head of the same class.
5660 Also check if destination addresses have been removed. */
5661
5662 for (i = 0; i < n_sets; i++)
5663 if (sets[i].rtl)
5664 {
5665 if (sets[i].dest_addr_elt
5666 && sets[i].dest_addr_elt->first_same_value == 0)
5667 {
5668 /* The elt was removed, which means this destination is not
5669 valid after this instruction. */
5670 sets[i].rtl = NULL_RTX;
5671 }
5672 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5673 /* If elt was removed, find current head of same class,
5674 or 0 if nothing remains of that class. */
5675 {
5676 struct table_elt *elt = sets[i].src_elt;
5677
5678 while (elt && elt->prev_same_value)
5679 elt = elt->prev_same_value;
5680
5681 while (elt && elt->first_same_value == 0)
5682 elt = elt->next_same_value;
5683 sets[i].src_elt = elt ? elt->first_same_value : 0;
5684 }
5685 }
5686
5687 /* Now insert the destinations into their equivalence classes. */
5688
5689 for (i = 0; i < n_sets; i++)
5690 if (sets[i].rtl)
5691 {
5692 rtx dest = SET_DEST (sets[i].rtl);
5693 struct table_elt *elt;
5694
5695 /* Don't record value if we are not supposed to risk allocating
5696 floating-point values in registers that might be wider than
5697 memory. */
5698 if ((flag_float_store
5699 && MEM_P (dest)
5700 && FLOAT_MODE_P (GET_MODE (dest)))
5701 /* Don't record BLKmode values, because we don't know the
5702 size of it, and can't be sure that other BLKmode values
5703 have the same or smaller size. */
5704 || GET_MODE (dest) == BLKmode
5705 /* If we didn't put a REG_EQUAL value or a source into the hash
5706 table, there is no point is recording DEST. */
5707 || sets[i].src_elt == 0
5708 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5709 or SIGN_EXTEND, don't record DEST since it can cause
5710 some tracking to be wrong.
5711
5712 ??? Think about this more later. */
5713 || (GET_CODE (dest) == SUBREG
5714 && (GET_MODE_SIZE (GET_MODE (dest))
5715 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5716 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5717 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5718 continue;
5719
5720 /* STRICT_LOW_PART isn't part of the value BEING set,
5721 and neither is the SUBREG inside it.
5722 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5723 if (GET_CODE (dest) == STRICT_LOW_PART)
5724 dest = SUBREG_REG (XEXP (dest, 0));
5725
5726 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5727 /* Registers must also be inserted into chains for quantities. */
5728 if (insert_regs (dest, sets[i].src_elt, 1))
5729 {
5730 /* If `insert_regs' changes something, the hash code must be
5731 recalculated. */
5732 rehash_using_reg (dest);
5733 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5734 }
5735
5736 elt = insert (dest, sets[i].src_elt,
5737 sets[i].dest_hash, GET_MODE (dest));
5738
5739 /* If this is a constant, insert the constant anchors with the
5740 equivalent register-offset expressions using register DEST. */
5741 if (targetm.const_anchor
5742 && REG_P (dest)
5743 && SCALAR_INT_MODE_P (GET_MODE (dest))
5744 && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5745 insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5746
5747 elt->in_memory = (MEM_P (sets[i].inner_dest)
5748 && !MEM_READONLY_P (sets[i].inner_dest));
5749
5750 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5751 narrower than M2, and both M1 and M2 are the same number of words,
5752 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5753 make that equivalence as well.
5754
5755 However, BAR may have equivalences for which gen_lowpart
5756 will produce a simpler value than gen_lowpart applied to
5757 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5758 BAR's equivalences. If we don't get a simplified form, make
5759 the SUBREG. It will not be used in an equivalence, but will
5760 cause two similar assignments to be detected.
5761
5762 Note the loop below will find SUBREG_REG (DEST) since we have
5763 already entered SRC and DEST of the SET in the table. */
5764
5765 if (GET_CODE (dest) == SUBREG
5766 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5767 / UNITS_PER_WORD)
5768 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5769 && (GET_MODE_SIZE (GET_MODE (dest))
5770 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5771 && sets[i].src_elt != 0)
5772 {
5773 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5774 struct table_elt *elt, *classp = 0;
5775
5776 for (elt = sets[i].src_elt->first_same_value; elt;
5777 elt = elt->next_same_value)
5778 {
5779 rtx new_src = 0;
5780 unsigned src_hash;
5781 struct table_elt *src_elt;
5782 int byte = 0;
5783
5784 /* Ignore invalid entries. */
5785 if (!REG_P (elt->exp)
5786 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5787 continue;
5788
5789 /* We may have already been playing subreg games. If the
5790 mode is already correct for the destination, use it. */
5791 if (GET_MODE (elt->exp) == new_mode)
5792 new_src = elt->exp;
5793 else
5794 {
5795 /* Calculate big endian correction for the SUBREG_BYTE.
5796 We have already checked that M1 (GET_MODE (dest))
5797 is not narrower than M2 (new_mode). */
5798 if (BYTES_BIG_ENDIAN)
5799 byte = (GET_MODE_SIZE (GET_MODE (dest))
5800 - GET_MODE_SIZE (new_mode));
5801
5802 new_src = simplify_gen_subreg (new_mode, elt->exp,
5803 GET_MODE (dest), byte);
5804 }
5805
5806 /* The call to simplify_gen_subreg fails if the value
5807 is VOIDmode, yet we can't do any simplification, e.g.
5808 for EXPR_LISTs denoting function call results.
5809 It is invalid to construct a SUBREG with a VOIDmode
5810 SUBREG_REG, hence a zero new_src means we can't do
5811 this substitution. */
5812 if (! new_src)
5813 continue;
5814
5815 src_hash = HASH (new_src, new_mode);
5816 src_elt = lookup (new_src, src_hash, new_mode);
5817
5818 /* Put the new source in the hash table is if isn't
5819 already. */
5820 if (src_elt == 0)
5821 {
5822 if (insert_regs (new_src, classp, 0))
5823 {
5824 rehash_using_reg (new_src);
5825 src_hash = HASH (new_src, new_mode);
5826 }
5827 src_elt = insert (new_src, classp, src_hash, new_mode);
5828 src_elt->in_memory = elt->in_memory;
5829 }
5830 else if (classp && classp != src_elt->first_same_value)
5831 /* Show that two things that we've seen before are
5832 actually the same. */
5833 merge_equiv_classes (src_elt, classp);
5834
5835 classp = src_elt->first_same_value;
5836 /* Ignore invalid entries. */
5837 while (classp
5838 && !REG_P (classp->exp)
5839 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5840 classp = classp->next_same_value;
5841 }
5842 }
5843 }
5844
5845 /* Special handling for (set REG0 REG1) where REG0 is the
5846 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5847 be used in the sequel, so (if easily done) change this insn to
5848 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5849 that computed their value. Then REG1 will become a dead store
5850 and won't cloud the situation for later optimizations.
5851
5852 Do not make this change if REG1 is a hard register, because it will
5853 then be used in the sequel and we may be changing a two-operand insn
5854 into a three-operand insn.
5855
5856 Also do not do this if we are operating on a copy of INSN. */
5857
5858 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
5859 && NEXT_INSN (PREV_INSN (insn)) == insn
5860 && REG_P (SET_SRC (sets[0].rtl))
5861 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5862 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5863 {
5864 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5865 struct qty_table_elem *src_ent = &qty_table[src_q];
5866
5867 if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5868 {
5869 /* Scan for the previous nonnote insn, but stop at a basic
5870 block boundary. */
5871 rtx prev = insn;
5872 rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
5873 do
5874 {
5875 prev = PREV_INSN (prev);
5876 }
5877 while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
5878
5879 /* Do not swap the registers around if the previous instruction
5880 attaches a REG_EQUIV note to REG1.
5881
5882 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5883 from the pseudo that originally shadowed an incoming argument
5884 to another register. Some uses of REG_EQUIV might rely on it
5885 being attached to REG1 rather than REG2.
5886
5887 This section previously turned the REG_EQUIV into a REG_EQUAL
5888 note. We cannot do that because REG_EQUIV may provide an
5889 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5890 if (NONJUMP_INSN_P (prev)
5891 && GET_CODE (PATTERN (prev)) == SET
5892 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5893 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
5894 {
5895 rtx dest = SET_DEST (sets[0].rtl);
5896 rtx src = SET_SRC (sets[0].rtl);
5897 rtx note;
5898
5899 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5900 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5901 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
5902 apply_change_group ();
5903
5904 /* If INSN has a REG_EQUAL note, and this note mentions
5905 REG0, then we must delete it, because the value in
5906 REG0 has changed. If the note's value is REG1, we must
5907 also delete it because that is now this insn's dest. */
5908 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5909 if (note != 0
5910 && (reg_mentioned_p (dest, XEXP (note, 0))
5911 || rtx_equal_p (src, XEXP (note, 0))))
5912 remove_note (insn, note);
5913 }
5914 }
5915 }
5916
5917 done:;
5918 }
5919 \f
5920 /* Remove from the hash table all expressions that reference memory. */
5921
5922 static void
5923 invalidate_memory (void)
5924 {
5925 int i;
5926 struct table_elt *p, *next;
5927
5928 for (i = 0; i < HASH_SIZE; i++)
5929 for (p = table[i]; p; p = next)
5930 {
5931 next = p->next_same_hash;
5932 if (p->in_memory)
5933 remove_from_table (p, i);
5934 }
5935 }
5936
5937 /* Perform invalidation on the basis of everything about an insn
5938 except for invalidating the actual places that are SET in it.
5939 This includes the places CLOBBERed, and anything that might
5940 alias with something that is SET or CLOBBERed.
5941
5942 X is the pattern of the insn. */
5943
5944 static void
5945 invalidate_from_clobbers (rtx x)
5946 {
5947 if (GET_CODE (x) == CLOBBER)
5948 {
5949 rtx ref = XEXP (x, 0);
5950 if (ref)
5951 {
5952 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5953 || MEM_P (ref))
5954 invalidate (ref, VOIDmode);
5955 else if (GET_CODE (ref) == STRICT_LOW_PART
5956 || GET_CODE (ref) == ZERO_EXTRACT)
5957 invalidate (XEXP (ref, 0), GET_MODE (ref));
5958 }
5959 }
5960 else if (GET_CODE (x) == PARALLEL)
5961 {
5962 int i;
5963 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5964 {
5965 rtx y = XVECEXP (x, 0, i);
5966 if (GET_CODE (y) == CLOBBER)
5967 {
5968 rtx ref = XEXP (y, 0);
5969 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5970 || MEM_P (ref))
5971 invalidate (ref, VOIDmode);
5972 else if (GET_CODE (ref) == STRICT_LOW_PART
5973 || GET_CODE (ref) == ZERO_EXTRACT)
5974 invalidate (XEXP (ref, 0), GET_MODE (ref));
5975 }
5976 }
5977 }
5978 }
5979 \f
5980 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5981 and replace any registers in them with either an equivalent constant
5982 or the canonical form of the register. If we are inside an address,
5983 only do this if the address remains valid.
5984
5985 OBJECT is 0 except when within a MEM in which case it is the MEM.
5986
5987 Return the replacement for X. */
5988
5989 static rtx
5990 cse_process_notes_1 (rtx x, rtx object, bool *changed)
5991 {
5992 enum rtx_code code = GET_CODE (x);
5993 const char *fmt = GET_RTX_FORMAT (code);
5994 int i;
5995
5996 switch (code)
5997 {
5998 case CONST_INT:
5999 case CONST:
6000 case SYMBOL_REF:
6001 case LABEL_REF:
6002 case CONST_DOUBLE:
6003 case CONST_FIXED:
6004 case CONST_VECTOR:
6005 case PC:
6006 case CC0:
6007 case LO_SUM:
6008 return x;
6009
6010 case MEM:
6011 validate_change (x, &XEXP (x, 0),
6012 cse_process_notes (XEXP (x, 0), x, changed), 0);
6013 return x;
6014
6015 case EXPR_LIST:
6016 case INSN_LIST:
6017 if (REG_NOTE_KIND (x) == REG_EQUAL)
6018 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6019 if (XEXP (x, 1))
6020 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6021 return x;
6022
6023 case SIGN_EXTEND:
6024 case ZERO_EXTEND:
6025 case SUBREG:
6026 {
6027 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6028 /* We don't substitute VOIDmode constants into these rtx,
6029 since they would impede folding. */
6030 if (GET_MODE (new_rtx) != VOIDmode)
6031 validate_change (object, &XEXP (x, 0), new_rtx, 0);
6032 return x;
6033 }
6034
6035 case REG:
6036 i = REG_QTY (REGNO (x));
6037
6038 /* Return a constant or a constant register. */
6039 if (REGNO_QTY_VALID_P (REGNO (x)))
6040 {
6041 struct qty_table_elem *ent = &qty_table[i];
6042
6043 if (ent->const_rtx != NULL_RTX
6044 && (CONSTANT_P (ent->const_rtx)
6045 || REG_P (ent->const_rtx)))
6046 {
6047 rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6048 if (new_rtx)
6049 return copy_rtx (new_rtx);
6050 }
6051 }
6052
6053 /* Otherwise, canonicalize this register. */
6054 return canon_reg (x, NULL_RTX);
6055
6056 default:
6057 break;
6058 }
6059
6060 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6061 if (fmt[i] == 'e')
6062 validate_change (object, &XEXP (x, i),
6063 cse_process_notes (XEXP (x, i), object, changed), 0);
6064
6065 return x;
6066 }
6067
6068 static rtx
6069 cse_process_notes (rtx x, rtx object, bool *changed)
6070 {
6071 rtx new_rtx = cse_process_notes_1 (x, object, changed);
6072 if (new_rtx != x)
6073 *changed = true;
6074 return new_rtx;
6075 }
6076
6077 \f
6078 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6079
6080 DATA is a pointer to a struct cse_basic_block_data, that is used to
6081 describe the path.
6082 It is filled with a queue of basic blocks, starting with FIRST_BB
6083 and following a trace through the CFG.
6084
6085 If all paths starting at FIRST_BB have been followed, or no new path
6086 starting at FIRST_BB can be constructed, this function returns FALSE.
6087 Otherwise, DATA->path is filled and the function returns TRUE indicating
6088 that a path to follow was found.
6089
6090 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6091 block in the path will be FIRST_BB. */
6092
6093 static bool
6094 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6095 int follow_jumps)
6096 {
6097 basic_block bb;
6098 edge e;
6099 int path_size;
6100
6101 SET_BIT (cse_visited_basic_blocks, first_bb->index);
6102
6103 /* See if there is a previous path. */
6104 path_size = data->path_size;
6105
6106 /* There is a previous path. Make sure it started with FIRST_BB. */
6107 if (path_size)
6108 gcc_assert (data->path[0].bb == first_bb);
6109
6110 /* There was only one basic block in the last path. Clear the path and
6111 return, so that paths starting at another basic block can be tried. */
6112 if (path_size == 1)
6113 {
6114 path_size = 0;
6115 goto done;
6116 }
6117
6118 /* If the path was empty from the beginning, construct a new path. */
6119 if (path_size == 0)
6120 data->path[path_size++].bb = first_bb;
6121 else
6122 {
6123 /* Otherwise, path_size must be equal to or greater than 2, because
6124 a previous path exists that is at least two basic blocks long.
6125
6126 Update the previous branch path, if any. If the last branch was
6127 previously along the branch edge, take the fallthrough edge now. */
6128 while (path_size >= 2)
6129 {
6130 basic_block last_bb_in_path, previous_bb_in_path;
6131 edge e;
6132
6133 --path_size;
6134 last_bb_in_path = data->path[path_size].bb;
6135 previous_bb_in_path = data->path[path_size - 1].bb;
6136
6137 /* If we previously followed a path along the branch edge, try
6138 the fallthru edge now. */
6139 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6140 && any_condjump_p (BB_END (previous_bb_in_path))
6141 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6142 && e == BRANCH_EDGE (previous_bb_in_path))
6143 {
6144 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6145 if (bb != EXIT_BLOCK_PTR
6146 && single_pred_p (bb)
6147 /* We used to assert here that we would only see blocks
6148 that we have not visited yet. But we may end up
6149 visiting basic blocks twice if the CFG has changed
6150 in this run of cse_main, because when the CFG changes
6151 the topological sort of the CFG also changes. A basic
6152 blocks that previously had more than two predecessors
6153 may now have a single predecessor, and become part of
6154 a path that starts at another basic block.
6155
6156 We still want to visit each basic block only once, so
6157 halt the path here if we have already visited BB. */
6158 && !TEST_BIT (cse_visited_basic_blocks, bb->index))
6159 {
6160 SET_BIT (cse_visited_basic_blocks, bb->index);
6161 data->path[path_size++].bb = bb;
6162 break;
6163 }
6164 }
6165
6166 data->path[path_size].bb = NULL;
6167 }
6168
6169 /* If only one block remains in the path, bail. */
6170 if (path_size == 1)
6171 {
6172 path_size = 0;
6173 goto done;
6174 }
6175 }
6176
6177 /* Extend the path if possible. */
6178 if (follow_jumps)
6179 {
6180 bb = data->path[path_size - 1].bb;
6181 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6182 {
6183 if (single_succ_p (bb))
6184 e = single_succ_edge (bb);
6185 else if (EDGE_COUNT (bb->succs) == 2
6186 && any_condjump_p (BB_END (bb)))
6187 {
6188 /* First try to follow the branch. If that doesn't lead
6189 to a useful path, follow the fallthru edge. */
6190 e = BRANCH_EDGE (bb);
6191 if (!single_pred_p (e->dest))
6192 e = FALLTHRU_EDGE (bb);
6193 }
6194 else
6195 e = NULL;
6196
6197 if (e
6198 && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6199 && e->dest != EXIT_BLOCK_PTR
6200 && single_pred_p (e->dest)
6201 /* Avoid visiting basic blocks twice. The large comment
6202 above explains why this can happen. */
6203 && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
6204 {
6205 basic_block bb2 = e->dest;
6206 SET_BIT (cse_visited_basic_blocks, bb2->index);
6207 data->path[path_size++].bb = bb2;
6208 bb = bb2;
6209 }
6210 else
6211 bb = NULL;
6212 }
6213 }
6214
6215 done:
6216 data->path_size = path_size;
6217 return path_size != 0;
6218 }
6219 \f
6220 /* Dump the path in DATA to file F. NSETS is the number of sets
6221 in the path. */
6222
6223 static void
6224 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6225 {
6226 int path_entry;
6227
6228 fprintf (f, ";; Following path with %d sets: ", nsets);
6229 for (path_entry = 0; path_entry < data->path_size; path_entry++)
6230 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6231 fputc ('\n', dump_file);
6232 fflush (f);
6233 }
6234
6235 \f
6236 /* Return true if BB has exception handling successor edges. */
6237
6238 static bool
6239 have_eh_succ_edges (basic_block bb)
6240 {
6241 edge e;
6242 edge_iterator ei;
6243
6244 FOR_EACH_EDGE (e, ei, bb->succs)
6245 if (e->flags & EDGE_EH)
6246 return true;
6247
6248 return false;
6249 }
6250
6251 \f
6252 /* Scan to the end of the path described by DATA. Return an estimate of
6253 the total number of SETs of all insns in the path. */
6254
6255 static void
6256 cse_prescan_path (struct cse_basic_block_data *data)
6257 {
6258 int nsets = 0;
6259 int path_size = data->path_size;
6260 int path_entry;
6261
6262 /* Scan to end of each basic block in the path. */
6263 for (path_entry = 0; path_entry < path_size; path_entry++)
6264 {
6265 basic_block bb;
6266 rtx insn;
6267
6268 bb = data->path[path_entry].bb;
6269
6270 FOR_BB_INSNS (bb, insn)
6271 {
6272 if (!INSN_P (insn))
6273 continue;
6274
6275 /* A PARALLEL can have lots of SETs in it,
6276 especially if it is really an ASM_OPERANDS. */
6277 if (GET_CODE (PATTERN (insn)) == PARALLEL)
6278 nsets += XVECLEN (PATTERN (insn), 0);
6279 else
6280 nsets += 1;
6281 }
6282 }
6283
6284 data->nsets = nsets;
6285 }
6286 \f
6287 /* Process a single extended basic block described by EBB_DATA. */
6288
6289 static void
6290 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6291 {
6292 int path_size = ebb_data->path_size;
6293 int path_entry;
6294 int num_insns = 0;
6295
6296 /* Allocate the space needed by qty_table. */
6297 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6298
6299 new_basic_block ();
6300 cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6301 cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6302 for (path_entry = 0; path_entry < path_size; path_entry++)
6303 {
6304 basic_block bb;
6305 rtx insn;
6306
6307 bb = ebb_data->path[path_entry].bb;
6308
6309 /* Invalidate recorded information for eh regs if there is an EH
6310 edge pointing to that bb. */
6311 if (bb_has_eh_pred (bb))
6312 {
6313 df_ref *def_rec;
6314
6315 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6316 {
6317 df_ref def = *def_rec;
6318 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6319 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6320 }
6321 }
6322
6323 optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6324 FOR_BB_INSNS (bb, insn)
6325 {
6326 /* If we have processed 1,000 insns, flush the hash table to
6327 avoid extreme quadratic behavior. We must not include NOTEs
6328 in the count since there may be more of them when generating
6329 debugging information. If we clear the table at different
6330 times, code generated with -g -O might be different than code
6331 generated with -O but not -g.
6332
6333 FIXME: This is a real kludge and needs to be done some other
6334 way. */
6335 if (NONDEBUG_INSN_P (insn)
6336 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6337 {
6338 flush_hash_table ();
6339 num_insns = 0;
6340 }
6341
6342 if (INSN_P (insn))
6343 {
6344 /* Process notes first so we have all notes in canonical forms
6345 when looking for duplicate operations. */
6346 if (REG_NOTES (insn))
6347 {
6348 bool changed = false;
6349 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6350 NULL_RTX, &changed);
6351 if (changed)
6352 df_notes_rescan (insn);
6353 }
6354
6355 cse_insn (insn);
6356
6357 /* If we haven't already found an insn where we added a LABEL_REF,
6358 check this one. */
6359 if (INSN_P (insn) && !recorded_label_ref
6360 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6361 (void *) insn))
6362 recorded_label_ref = true;
6363
6364 #ifdef HAVE_cc0
6365 if (NONDEBUG_INSN_P (insn))
6366 {
6367 /* If the previous insn sets CC0 and this insn no
6368 longer references CC0, delete the previous insn.
6369 Here we use fact that nothing expects CC0 to be
6370 valid over an insn, which is true until the final
6371 pass. */
6372 rtx prev_insn, tem;
6373
6374 prev_insn = prev_nonnote_nondebug_insn (insn);
6375 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6376 && (tem = single_set (prev_insn)) != NULL_RTX
6377 && SET_DEST (tem) == cc0_rtx
6378 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6379 delete_insn (prev_insn);
6380
6381 /* If this insn is not the last insn in the basic
6382 block, it will be PREV_INSN(insn) in the next
6383 iteration. If we recorded any CC0-related
6384 information for this insn, remember it. */
6385 if (insn != BB_END (bb))
6386 {
6387 prev_insn_cc0 = this_insn_cc0;
6388 prev_insn_cc0_mode = this_insn_cc0_mode;
6389 }
6390 }
6391 #endif
6392 }
6393 }
6394
6395 /* With non-call exceptions, we are not always able to update
6396 the CFG properly inside cse_insn. So clean up possibly
6397 redundant EH edges here. */
6398 if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6399 cse_cfg_altered |= purge_dead_edges (bb);
6400
6401 /* If we changed a conditional jump, we may have terminated
6402 the path we are following. Check that by verifying that
6403 the edge we would take still exists. If the edge does
6404 not exist anymore, purge the remainder of the path.
6405 Note that this will cause us to return to the caller. */
6406 if (path_entry < path_size - 1)
6407 {
6408 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6409 if (!find_edge (bb, next_bb))
6410 {
6411 do
6412 {
6413 path_size--;
6414
6415 /* If we truncate the path, we must also reset the
6416 visited bit on the remaining blocks in the path,
6417 or we will never visit them at all. */
6418 RESET_BIT (cse_visited_basic_blocks,
6419 ebb_data->path[path_size].bb->index);
6420 ebb_data->path[path_size].bb = NULL;
6421 }
6422 while (path_size - 1 != path_entry);
6423 ebb_data->path_size = path_size;
6424 }
6425 }
6426
6427 /* If this is a conditional jump insn, record any known
6428 equivalences due to the condition being tested. */
6429 insn = BB_END (bb);
6430 if (path_entry < path_size - 1
6431 && JUMP_P (insn)
6432 && single_set (insn)
6433 && any_condjump_p (insn))
6434 {
6435 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6436 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6437 record_jump_equiv (insn, taken);
6438 }
6439
6440 #ifdef HAVE_cc0
6441 /* Clear the CC0-tracking related insns, they can't provide
6442 useful information across basic block boundaries. */
6443 prev_insn_cc0 = 0;
6444 #endif
6445 }
6446
6447 gcc_assert (next_qty <= max_qty);
6448
6449 free (qty_table);
6450 }
6451
6452 \f
6453 /* Perform cse on the instructions of a function.
6454 F is the first instruction.
6455 NREGS is one plus the highest pseudo-reg number used in the instruction.
6456
6457 Return 2 if jump optimizations should be redone due to simplifications
6458 in conditional jump instructions.
6459 Return 1 if the CFG should be cleaned up because it has been modified.
6460 Return 0 otherwise. */
6461
6462 int
6463 cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6464 {
6465 struct cse_basic_block_data ebb_data;
6466 basic_block bb;
6467 int *rc_order = XNEWVEC (int, last_basic_block);
6468 int i, n_blocks;
6469
6470 df_set_flags (DF_LR_RUN_DCE);
6471 df_analyze ();
6472 df_set_flags (DF_DEFER_INSN_RESCAN);
6473
6474 reg_scan (get_insns (), max_reg_num ());
6475 init_cse_reg_info (nregs);
6476
6477 ebb_data.path = XNEWVEC (struct branch_path,
6478 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6479
6480 cse_cfg_altered = false;
6481 cse_jumps_altered = false;
6482 recorded_label_ref = false;
6483 constant_pool_entries_cost = 0;
6484 constant_pool_entries_regcost = 0;
6485 ebb_data.path_size = 0;
6486 ebb_data.nsets = 0;
6487 rtl_hooks = cse_rtl_hooks;
6488
6489 init_recog ();
6490 init_alias_analysis ();
6491
6492 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6493
6494 /* Set up the table of already visited basic blocks. */
6495 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6496 sbitmap_zero (cse_visited_basic_blocks);
6497
6498 /* Loop over basic blocks in reverse completion order (RPO),
6499 excluding the ENTRY and EXIT blocks. */
6500 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6501 i = 0;
6502 while (i < n_blocks)
6503 {
6504 /* Find the first block in the RPO queue that we have not yet
6505 processed before. */
6506 do
6507 {
6508 bb = BASIC_BLOCK (rc_order[i++]);
6509 }
6510 while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6511 && i < n_blocks);
6512
6513 /* Find all paths starting with BB, and process them. */
6514 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6515 {
6516 /* Pre-scan the path. */
6517 cse_prescan_path (&ebb_data);
6518
6519 /* If this basic block has no sets, skip it. */
6520 if (ebb_data.nsets == 0)
6521 continue;
6522
6523 /* Get a reasonable estimate for the maximum number of qty's
6524 needed for this path. For this, we take the number of sets
6525 and multiply that by MAX_RECOG_OPERANDS. */
6526 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6527
6528 /* Dump the path we're about to process. */
6529 if (dump_file)
6530 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6531
6532 cse_extended_basic_block (&ebb_data);
6533 }
6534 }
6535
6536 /* Clean up. */
6537 end_alias_analysis ();
6538 free (reg_eqv_table);
6539 free (ebb_data.path);
6540 sbitmap_free (cse_visited_basic_blocks);
6541 free (rc_order);
6542 rtl_hooks = general_rtl_hooks;
6543
6544 if (cse_jumps_altered || recorded_label_ref)
6545 return 2;
6546 else if (cse_cfg_altered)
6547 return 1;
6548 else
6549 return 0;
6550 }
6551 \f
6552 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6553 which there isn't a REG_LABEL_OPERAND note.
6554 Return one if so. DATA is the insn. */
6555
6556 static int
6557 check_for_label_ref (rtx *rtl, void *data)
6558 {
6559 rtx insn = (rtx) data;
6560
6561 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6562 note for it, we must rerun jump since it needs to place the note. If
6563 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6564 don't do this since no REG_LABEL_OPERAND will be added. */
6565 return (GET_CODE (*rtl) == LABEL_REF
6566 && ! LABEL_REF_NONLOCAL_P (*rtl)
6567 && (!JUMP_P (insn)
6568 || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6569 && LABEL_P (XEXP (*rtl, 0))
6570 && INSN_UID (XEXP (*rtl, 0)) != 0
6571 && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6572 }
6573 \f
6574 /* Count the number of times registers are used (not set) in X.
6575 COUNTS is an array in which we accumulate the count, INCR is how much
6576 we count each register usage.
6577
6578 Don't count a usage of DEST, which is the SET_DEST of a SET which
6579 contains X in its SET_SRC. This is because such a SET does not
6580 modify the liveness of DEST.
6581 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6582 We must then count uses of a SET_DEST regardless, because the insn can't be
6583 deleted here. */
6584
6585 static void
6586 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6587 {
6588 enum rtx_code code;
6589 rtx note;
6590 const char *fmt;
6591 int i, j;
6592
6593 if (x == 0)
6594 return;
6595
6596 switch (code = GET_CODE (x))
6597 {
6598 case REG:
6599 if (x != dest)
6600 counts[REGNO (x)] += incr;
6601 return;
6602
6603 case PC:
6604 case CC0:
6605 case CONST:
6606 case CONST_INT:
6607 case CONST_DOUBLE:
6608 case CONST_FIXED:
6609 case CONST_VECTOR:
6610 case SYMBOL_REF:
6611 case LABEL_REF:
6612 return;
6613
6614 case CLOBBER:
6615 /* If we are clobbering a MEM, mark any registers inside the address
6616 as being used. */
6617 if (MEM_P (XEXP (x, 0)))
6618 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6619 return;
6620
6621 case SET:
6622 /* Unless we are setting a REG, count everything in SET_DEST. */
6623 if (!REG_P (SET_DEST (x)))
6624 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6625 count_reg_usage (SET_SRC (x), counts,
6626 dest ? dest : SET_DEST (x),
6627 incr);
6628 return;
6629
6630 case DEBUG_INSN:
6631 return;
6632
6633 case CALL_INSN:
6634 case INSN:
6635 case JUMP_INSN:
6636 /* We expect dest to be NULL_RTX here. If the insn may trap,
6637 or if it cannot be deleted due to side-effects, mark this fact
6638 by setting DEST to pc_rtx. */
6639 if (insn_could_throw_p (x) || side_effects_p (PATTERN (x)))
6640 dest = pc_rtx;
6641 if (code == CALL_INSN)
6642 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6643 count_reg_usage (PATTERN (x), counts, dest, incr);
6644
6645 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6646 use them. */
6647
6648 note = find_reg_equal_equiv_note (x);
6649 if (note)
6650 {
6651 rtx eqv = XEXP (note, 0);
6652
6653 if (GET_CODE (eqv) == EXPR_LIST)
6654 /* This REG_EQUAL note describes the result of a function call.
6655 Process all the arguments. */
6656 do
6657 {
6658 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6659 eqv = XEXP (eqv, 1);
6660 }
6661 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6662 else
6663 count_reg_usage (eqv, counts, dest, incr);
6664 }
6665 return;
6666
6667 case EXPR_LIST:
6668 if (REG_NOTE_KIND (x) == REG_EQUAL
6669 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6670 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6671 involving registers in the address. */
6672 || GET_CODE (XEXP (x, 0)) == CLOBBER)
6673 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6674
6675 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6676 return;
6677
6678 case ASM_OPERANDS:
6679 /* Iterate over just the inputs, not the constraints as well. */
6680 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6681 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6682 return;
6683
6684 case INSN_LIST:
6685 gcc_unreachable ();
6686
6687 default:
6688 break;
6689 }
6690
6691 fmt = GET_RTX_FORMAT (code);
6692 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6693 {
6694 if (fmt[i] == 'e')
6695 count_reg_usage (XEXP (x, i), counts, dest, incr);
6696 else if (fmt[i] == 'E')
6697 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6698 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6699 }
6700 }
6701 \f
6702 /* Return true if X is a dead register. */
6703
6704 static inline int
6705 is_dead_reg (rtx x, int *counts)
6706 {
6707 return (REG_P (x)
6708 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6709 && counts[REGNO (x)] == 0);
6710 }
6711
6712 /* Return true if set is live. */
6713 static bool
6714 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
6715 int *counts)
6716 {
6717 #ifdef HAVE_cc0
6718 rtx tem;
6719 #endif
6720
6721 if (set_noop_p (set))
6722 ;
6723
6724 #ifdef HAVE_cc0
6725 else if (GET_CODE (SET_DEST (set)) == CC0
6726 && !side_effects_p (SET_SRC (set))
6727 && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6728 || !INSN_P (tem)
6729 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6730 return false;
6731 #endif
6732 else if (!is_dead_reg (SET_DEST (set), counts)
6733 || side_effects_p (SET_SRC (set)))
6734 return true;
6735 return false;
6736 }
6737
6738 /* Return true if insn is live. */
6739
6740 static bool
6741 insn_live_p (rtx insn, int *counts)
6742 {
6743 int i;
6744 if (insn_could_throw_p (insn))
6745 return true;
6746 else if (GET_CODE (PATTERN (insn)) == SET)
6747 return set_live_p (PATTERN (insn), insn, counts);
6748 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6749 {
6750 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6751 {
6752 rtx elt = XVECEXP (PATTERN (insn), 0, i);
6753
6754 if (GET_CODE (elt) == SET)
6755 {
6756 if (set_live_p (elt, insn, counts))
6757 return true;
6758 }
6759 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6760 return true;
6761 }
6762 return false;
6763 }
6764 else if (DEBUG_INSN_P (insn))
6765 {
6766 rtx next;
6767
6768 for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6769 if (NOTE_P (next))
6770 continue;
6771 else if (!DEBUG_INSN_P (next))
6772 return true;
6773 else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6774 return false;
6775
6776 return true;
6777 }
6778 else
6779 return true;
6780 }
6781
6782 /* Count the number of stores into pseudo. Callback for note_stores. */
6783
6784 static void
6785 count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6786 {
6787 int *counts = (int *) data;
6788 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6789 counts[REGNO (x)]++;
6790 }
6791
6792 struct dead_debug_insn_data
6793 {
6794 int *counts;
6795 rtx *replacements;
6796 bool seen_repl;
6797 };
6798
6799 /* Return if a DEBUG_INSN needs to be reset because some dead
6800 pseudo doesn't have a replacement. Callback for for_each_rtx. */
6801
6802 static int
6803 is_dead_debug_insn (rtx *loc, void *data)
6804 {
6805 rtx x = *loc;
6806 struct dead_debug_insn_data *ddid = (struct dead_debug_insn_data *) data;
6807
6808 if (is_dead_reg (x, ddid->counts))
6809 {
6810 if (ddid->replacements && ddid->replacements[REGNO (x)] != NULL_RTX)
6811 ddid->seen_repl = true;
6812 else
6813 return 1;
6814 }
6815 return 0;
6816 }
6817
6818 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6819 Callback for simplify_replace_fn_rtx. */
6820
6821 static rtx
6822 replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
6823 {
6824 rtx *replacements = (rtx *) data;
6825
6826 if (REG_P (x)
6827 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6828 && replacements[REGNO (x)] != NULL_RTX)
6829 {
6830 if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
6831 return replacements[REGNO (x)];
6832 return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
6833 GET_MODE (replacements[REGNO (x)]));
6834 }
6835 return NULL_RTX;
6836 }
6837
6838 /* Scan all the insns and delete any that are dead; i.e., they store a register
6839 that is never used or they copy a register to itself.
6840
6841 This is used to remove insns made obviously dead by cse, loop or other
6842 optimizations. It improves the heuristics in loop since it won't try to
6843 move dead invariants out of loops or make givs for dead quantities. The
6844 remaining passes of the compilation are also sped up. */
6845
6846 int
6847 delete_trivially_dead_insns (rtx insns, int nreg)
6848 {
6849 int *counts;
6850 rtx insn, prev;
6851 rtx *replacements = NULL;
6852 int ndead = 0;
6853
6854 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6855 /* First count the number of times each register is used. */
6856 if (MAY_HAVE_DEBUG_INSNS)
6857 {
6858 counts = XCNEWVEC (int, nreg * 3);
6859 for (insn = insns; insn; insn = NEXT_INSN (insn))
6860 if (DEBUG_INSN_P (insn))
6861 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6862 NULL_RTX, 1);
6863 else if (INSN_P (insn))
6864 {
6865 count_reg_usage (insn, counts, NULL_RTX, 1);
6866 note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
6867 }
6868 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
6869 First one counts how many times each pseudo is used outside
6870 of debug insns, second counts how many times each pseudo is
6871 used in debug insns and third counts how many times a pseudo
6872 is stored. */
6873 }
6874 else
6875 {
6876 counts = XCNEWVEC (int, nreg);
6877 for (insn = insns; insn; insn = NEXT_INSN (insn))
6878 if (INSN_P (insn))
6879 count_reg_usage (insn, counts, NULL_RTX, 1);
6880 /* If no debug insns can be present, COUNTS is just an array
6881 which counts how many times each pseudo is used. */
6882 }
6883 /* Go from the last insn to the first and delete insns that only set unused
6884 registers or copy a register to itself. As we delete an insn, remove
6885 usage counts for registers it uses.
6886
6887 The first jump optimization pass may leave a real insn as the last
6888 insn in the function. We must not skip that insn or we may end
6889 up deleting code that is not really dead.
6890
6891 If some otherwise unused register is only used in DEBUG_INSNs,
6892 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
6893 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
6894 has been created for the unused register, replace it with
6895 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
6896 for (insn = get_last_insn (); insn; insn = prev)
6897 {
6898 int live_insn = 0;
6899
6900 prev = PREV_INSN (insn);
6901 if (!INSN_P (insn))
6902 continue;
6903
6904 live_insn = insn_live_p (insn, counts);
6905
6906 /* If this is a dead insn, delete it and show registers in it aren't
6907 being used. */
6908
6909 if (! live_insn && dbg_cnt (delete_trivial_dead))
6910 {
6911 if (DEBUG_INSN_P (insn))
6912 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6913 NULL_RTX, -1);
6914 else
6915 {
6916 rtx set;
6917 if (MAY_HAVE_DEBUG_INSNS
6918 && (set = single_set (insn)) != NULL_RTX
6919 && is_dead_reg (SET_DEST (set), counts)
6920 /* Used at least once in some DEBUG_INSN. */
6921 && counts[REGNO (SET_DEST (set)) + nreg] > 0
6922 /* And set exactly once. */
6923 && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
6924 && !side_effects_p (SET_SRC (set))
6925 && asm_noperands (PATTERN (insn)) < 0)
6926 {
6927 rtx dval, bind;
6928
6929 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
6930 dval = make_debug_expr_from_rtl (SET_DEST (set));
6931
6932 /* Emit a debug bind insn before the insn in which
6933 reg dies. */
6934 bind = gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
6935 DEBUG_EXPR_TREE_DECL (dval),
6936 SET_SRC (set),
6937 VAR_INIT_STATUS_INITIALIZED);
6938 count_reg_usage (bind, counts + nreg, NULL_RTX, 1);
6939
6940 bind = emit_debug_insn_before (bind, insn);
6941 df_insn_rescan (bind);
6942
6943 if (replacements == NULL)
6944 replacements = XCNEWVEC (rtx, nreg);
6945 replacements[REGNO (SET_DEST (set))] = dval;
6946 }
6947
6948 count_reg_usage (insn, counts, NULL_RTX, -1);
6949 ndead++;
6950 }
6951 delete_insn_and_edges (insn);
6952 }
6953 }
6954
6955 if (MAY_HAVE_DEBUG_INSNS)
6956 {
6957 struct dead_debug_insn_data ddid;
6958 ddid.counts = counts;
6959 ddid.replacements = replacements;
6960 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
6961 if (DEBUG_INSN_P (insn))
6962 {
6963 /* If this debug insn references a dead register that wasn't replaced
6964 with an DEBUG_EXPR, reset the DEBUG_INSN. */
6965 ddid.seen_repl = false;
6966 if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn),
6967 is_dead_debug_insn, &ddid))
6968 {
6969 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
6970 df_insn_rescan (insn);
6971 }
6972 else if (ddid.seen_repl)
6973 {
6974 INSN_VAR_LOCATION_LOC (insn)
6975 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
6976 NULL_RTX, replace_dead_reg,
6977 replacements);
6978 df_insn_rescan (insn);
6979 }
6980 }
6981 free (replacements);
6982 }
6983
6984 if (dump_file && ndead)
6985 fprintf (dump_file, "Deleted %i trivially dead insns\n",
6986 ndead);
6987 /* Clean up. */
6988 free (counts);
6989 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6990 return ndead;
6991 }
6992
6993 /* This function is called via for_each_rtx. The argument, NEWREG, is
6994 a condition code register with the desired mode. If we are looking
6995 at the same register in a different mode, replace it with
6996 NEWREG. */
6997
6998 static int
6999 cse_change_cc_mode (rtx *loc, void *data)
7000 {
7001 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7002
7003 if (*loc
7004 && REG_P (*loc)
7005 && REGNO (*loc) == REGNO (args->newreg)
7006 && GET_MODE (*loc) != GET_MODE (args->newreg))
7007 {
7008 validate_change (args->insn, loc, args->newreg, 1);
7009
7010 return -1;
7011 }
7012 return 0;
7013 }
7014
7015 /* Change the mode of any reference to the register REGNO (NEWREG) to
7016 GET_MODE (NEWREG) in INSN. */
7017
7018 static void
7019 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7020 {
7021 struct change_cc_mode_args args;
7022 int success;
7023
7024 if (!INSN_P (insn))
7025 return;
7026
7027 args.insn = insn;
7028 args.newreg = newreg;
7029
7030 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7031 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7032
7033 /* If the following assertion was triggered, there is most probably
7034 something wrong with the cc_modes_compatible back end function.
7035 CC modes only can be considered compatible if the insn - with the mode
7036 replaced by any of the compatible modes - can still be recognized. */
7037 success = apply_change_group ();
7038 gcc_assert (success);
7039 }
7040
7041 /* Change the mode of any reference to the register REGNO (NEWREG) to
7042 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7043 any instruction which modifies NEWREG. */
7044
7045 static void
7046 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7047 {
7048 rtx insn;
7049
7050 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7051 {
7052 if (! INSN_P (insn))
7053 continue;
7054
7055 if (reg_set_p (newreg, insn))
7056 return;
7057
7058 cse_change_cc_mode_insn (insn, newreg);
7059 }
7060 }
7061
7062 /* BB is a basic block which finishes with CC_REG as a condition code
7063 register which is set to CC_SRC. Look through the successors of BB
7064 to find blocks which have a single predecessor (i.e., this one),
7065 and look through those blocks for an assignment to CC_REG which is
7066 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7067 permitted to change the mode of CC_SRC to a compatible mode. This
7068 returns VOIDmode if no equivalent assignments were found.
7069 Otherwise it returns the mode which CC_SRC should wind up with.
7070 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7071 but is passed unmodified down to recursive calls in order to prevent
7072 endless recursion.
7073
7074 The main complexity in this function is handling the mode issues.
7075 We may have more than one duplicate which we can eliminate, and we
7076 try to find a mode which will work for multiple duplicates. */
7077
7078 static enum machine_mode
7079 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7080 bool can_change_mode)
7081 {
7082 bool found_equiv;
7083 enum machine_mode mode;
7084 unsigned int insn_count;
7085 edge e;
7086 rtx insns[2];
7087 enum machine_mode modes[2];
7088 rtx last_insns[2];
7089 unsigned int i;
7090 rtx newreg;
7091 edge_iterator ei;
7092
7093 /* We expect to have two successors. Look at both before picking
7094 the final mode for the comparison. If we have more successors
7095 (i.e., some sort of table jump, although that seems unlikely),
7096 then we require all beyond the first two to use the same
7097 mode. */
7098
7099 found_equiv = false;
7100 mode = GET_MODE (cc_src);
7101 insn_count = 0;
7102 FOR_EACH_EDGE (e, ei, bb->succs)
7103 {
7104 rtx insn;
7105 rtx end;
7106
7107 if (e->flags & EDGE_COMPLEX)
7108 continue;
7109
7110 if (EDGE_COUNT (e->dest->preds) != 1
7111 || e->dest == EXIT_BLOCK_PTR
7112 /* Avoid endless recursion on unreachable blocks. */
7113 || e->dest == orig_bb)
7114 continue;
7115
7116 end = NEXT_INSN (BB_END (e->dest));
7117 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7118 {
7119 rtx set;
7120
7121 if (! INSN_P (insn))
7122 continue;
7123
7124 /* If CC_SRC is modified, we have to stop looking for
7125 something which uses it. */
7126 if (modified_in_p (cc_src, insn))
7127 break;
7128
7129 /* Check whether INSN sets CC_REG to CC_SRC. */
7130 set = single_set (insn);
7131 if (set
7132 && REG_P (SET_DEST (set))
7133 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7134 {
7135 bool found;
7136 enum machine_mode set_mode;
7137 enum machine_mode comp_mode;
7138
7139 found = false;
7140 set_mode = GET_MODE (SET_SRC (set));
7141 comp_mode = set_mode;
7142 if (rtx_equal_p (cc_src, SET_SRC (set)))
7143 found = true;
7144 else if (GET_CODE (cc_src) == COMPARE
7145 && GET_CODE (SET_SRC (set)) == COMPARE
7146 && mode != set_mode
7147 && rtx_equal_p (XEXP (cc_src, 0),
7148 XEXP (SET_SRC (set), 0))
7149 && rtx_equal_p (XEXP (cc_src, 1),
7150 XEXP (SET_SRC (set), 1)))
7151
7152 {
7153 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7154 if (comp_mode != VOIDmode
7155 && (can_change_mode || comp_mode == mode))
7156 found = true;
7157 }
7158
7159 if (found)
7160 {
7161 found_equiv = true;
7162 if (insn_count < ARRAY_SIZE (insns))
7163 {
7164 insns[insn_count] = insn;
7165 modes[insn_count] = set_mode;
7166 last_insns[insn_count] = end;
7167 ++insn_count;
7168
7169 if (mode != comp_mode)
7170 {
7171 gcc_assert (can_change_mode);
7172 mode = comp_mode;
7173
7174 /* The modified insn will be re-recognized later. */
7175 PUT_MODE (cc_src, mode);
7176 }
7177 }
7178 else
7179 {
7180 if (set_mode != mode)
7181 {
7182 /* We found a matching expression in the
7183 wrong mode, but we don't have room to
7184 store it in the array. Punt. This case
7185 should be rare. */
7186 break;
7187 }
7188 /* INSN sets CC_REG to a value equal to CC_SRC
7189 with the right mode. We can simply delete
7190 it. */
7191 delete_insn (insn);
7192 }
7193
7194 /* We found an instruction to delete. Keep looking,
7195 in the hopes of finding a three-way jump. */
7196 continue;
7197 }
7198
7199 /* We found an instruction which sets the condition
7200 code, so don't look any farther. */
7201 break;
7202 }
7203
7204 /* If INSN sets CC_REG in some other way, don't look any
7205 farther. */
7206 if (reg_set_p (cc_reg, insn))
7207 break;
7208 }
7209
7210 /* If we fell off the bottom of the block, we can keep looking
7211 through successors. We pass CAN_CHANGE_MODE as false because
7212 we aren't prepared to handle compatibility between the
7213 further blocks and this block. */
7214 if (insn == end)
7215 {
7216 enum machine_mode submode;
7217
7218 submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7219 if (submode != VOIDmode)
7220 {
7221 gcc_assert (submode == mode);
7222 found_equiv = true;
7223 can_change_mode = false;
7224 }
7225 }
7226 }
7227
7228 if (! found_equiv)
7229 return VOIDmode;
7230
7231 /* Now INSN_COUNT is the number of instructions we found which set
7232 CC_REG to a value equivalent to CC_SRC. The instructions are in
7233 INSNS. The modes used by those instructions are in MODES. */
7234
7235 newreg = NULL_RTX;
7236 for (i = 0; i < insn_count; ++i)
7237 {
7238 if (modes[i] != mode)
7239 {
7240 /* We need to change the mode of CC_REG in INSNS[i] and
7241 subsequent instructions. */
7242 if (! newreg)
7243 {
7244 if (GET_MODE (cc_reg) == mode)
7245 newreg = cc_reg;
7246 else
7247 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7248 }
7249 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7250 newreg);
7251 }
7252
7253 delete_insn_and_edges (insns[i]);
7254 }
7255
7256 return mode;
7257 }
7258
7259 /* If we have a fixed condition code register (or two), walk through
7260 the instructions and try to eliminate duplicate assignments. */
7261
7262 static void
7263 cse_condition_code_reg (void)
7264 {
7265 unsigned int cc_regno_1;
7266 unsigned int cc_regno_2;
7267 rtx cc_reg_1;
7268 rtx cc_reg_2;
7269 basic_block bb;
7270
7271 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7272 return;
7273
7274 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7275 if (cc_regno_2 != INVALID_REGNUM)
7276 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7277 else
7278 cc_reg_2 = NULL_RTX;
7279
7280 FOR_EACH_BB (bb)
7281 {
7282 rtx last_insn;
7283 rtx cc_reg;
7284 rtx insn;
7285 rtx cc_src_insn;
7286 rtx cc_src;
7287 enum machine_mode mode;
7288 enum machine_mode orig_mode;
7289
7290 /* Look for blocks which end with a conditional jump based on a
7291 condition code register. Then look for the instruction which
7292 sets the condition code register. Then look through the
7293 successor blocks for instructions which set the condition
7294 code register to the same value. There are other possible
7295 uses of the condition code register, but these are by far the
7296 most common and the ones which we are most likely to be able
7297 to optimize. */
7298
7299 last_insn = BB_END (bb);
7300 if (!JUMP_P (last_insn))
7301 continue;
7302
7303 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7304 cc_reg = cc_reg_1;
7305 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7306 cc_reg = cc_reg_2;
7307 else
7308 continue;
7309
7310 cc_src_insn = NULL_RTX;
7311 cc_src = NULL_RTX;
7312 for (insn = PREV_INSN (last_insn);
7313 insn && insn != PREV_INSN (BB_HEAD (bb));
7314 insn = PREV_INSN (insn))
7315 {
7316 rtx set;
7317
7318 if (! INSN_P (insn))
7319 continue;
7320 set = single_set (insn);
7321 if (set
7322 && REG_P (SET_DEST (set))
7323 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7324 {
7325 cc_src_insn = insn;
7326 cc_src = SET_SRC (set);
7327 break;
7328 }
7329 else if (reg_set_p (cc_reg, insn))
7330 break;
7331 }
7332
7333 if (! cc_src_insn)
7334 continue;
7335
7336 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7337 continue;
7338
7339 /* Now CC_REG is a condition code register used for a
7340 conditional jump at the end of the block, and CC_SRC, in
7341 CC_SRC_INSN, is the value to which that condition code
7342 register is set, and CC_SRC is still meaningful at the end of
7343 the basic block. */
7344
7345 orig_mode = GET_MODE (cc_src);
7346 mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7347 if (mode != VOIDmode)
7348 {
7349 gcc_assert (mode == GET_MODE (cc_src));
7350 if (mode != orig_mode)
7351 {
7352 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7353
7354 cse_change_cc_mode_insn (cc_src_insn, newreg);
7355
7356 /* Do the same in the following insns that use the
7357 current value of CC_REG within BB. */
7358 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7359 NEXT_INSN (last_insn),
7360 newreg);
7361 }
7362 }
7363 }
7364 }
7365 \f
7366
7367 /* Perform common subexpression elimination. Nonzero value from
7368 `cse_main' means that jumps were simplified and some code may now
7369 be unreachable, so do jump optimization again. */
7370 static bool
7371 gate_handle_cse (void)
7372 {
7373 return optimize > 0;
7374 }
7375
7376 static unsigned int
7377 rest_of_handle_cse (void)
7378 {
7379 int tem;
7380
7381 if (dump_file)
7382 dump_flow_info (dump_file, dump_flags);
7383
7384 tem = cse_main (get_insns (), max_reg_num ());
7385
7386 /* If we are not running more CSE passes, then we are no longer
7387 expecting CSE to be run. But always rerun it in a cheap mode. */
7388 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7389
7390 if (tem == 2)
7391 {
7392 timevar_push (TV_JUMP);
7393 rebuild_jump_labels (get_insns ());
7394 cleanup_cfg (0);
7395 timevar_pop (TV_JUMP);
7396 }
7397 else if (tem == 1 || optimize > 1)
7398 cleanup_cfg (0);
7399
7400 return 0;
7401 }
7402
7403 struct rtl_opt_pass pass_cse =
7404 {
7405 {
7406 RTL_PASS,
7407 "cse1", /* name */
7408 gate_handle_cse, /* gate */
7409 rest_of_handle_cse, /* execute */
7410 NULL, /* sub */
7411 NULL, /* next */
7412 0, /* static_pass_number */
7413 TV_CSE, /* tv_id */
7414 0, /* properties_required */
7415 0, /* properties_provided */
7416 0, /* properties_destroyed */
7417 0, /* todo_flags_start */
7418 TODO_df_finish | TODO_verify_rtl_sharing |
7419 TODO_ggc_collect |
7420 TODO_verify_flow, /* todo_flags_finish */
7421 }
7422 };
7423
7424
7425 static bool
7426 gate_handle_cse2 (void)
7427 {
7428 return optimize > 0 && flag_rerun_cse_after_loop;
7429 }
7430
7431 /* Run second CSE pass after loop optimizations. */
7432 static unsigned int
7433 rest_of_handle_cse2 (void)
7434 {
7435 int tem;
7436
7437 if (dump_file)
7438 dump_flow_info (dump_file, dump_flags);
7439
7440 tem = cse_main (get_insns (), max_reg_num ());
7441
7442 /* Run a pass to eliminate duplicated assignments to condition code
7443 registers. We have to run this after bypass_jumps, because it
7444 makes it harder for that pass to determine whether a jump can be
7445 bypassed safely. */
7446 cse_condition_code_reg ();
7447
7448 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7449
7450 if (tem == 2)
7451 {
7452 timevar_push (TV_JUMP);
7453 rebuild_jump_labels (get_insns ());
7454 cleanup_cfg (0);
7455 timevar_pop (TV_JUMP);
7456 }
7457 else if (tem == 1)
7458 cleanup_cfg (0);
7459
7460 cse_not_expected = 1;
7461 return 0;
7462 }
7463
7464
7465 struct rtl_opt_pass pass_cse2 =
7466 {
7467 {
7468 RTL_PASS,
7469 "cse2", /* name */
7470 gate_handle_cse2, /* gate */
7471 rest_of_handle_cse2, /* execute */
7472 NULL, /* sub */
7473 NULL, /* next */
7474 0, /* static_pass_number */
7475 TV_CSE2, /* tv_id */
7476 0, /* properties_required */
7477 0, /* properties_provided */
7478 0, /* properties_destroyed */
7479 0, /* todo_flags_start */
7480 TODO_df_finish | TODO_verify_rtl_sharing |
7481 TODO_ggc_collect |
7482 TODO_verify_flow /* todo_flags_finish */
7483 }
7484 };
7485
7486 static bool
7487 gate_handle_cse_after_global_opts (void)
7488 {
7489 return optimize > 0 && flag_rerun_cse_after_global_opts;
7490 }
7491
7492 /* Run second CSE pass after loop optimizations. */
7493 static unsigned int
7494 rest_of_handle_cse_after_global_opts (void)
7495 {
7496 int save_cfj;
7497 int tem;
7498
7499 /* We only want to do local CSE, so don't follow jumps. */
7500 save_cfj = flag_cse_follow_jumps;
7501 flag_cse_follow_jumps = 0;
7502
7503 rebuild_jump_labels (get_insns ());
7504 tem = cse_main (get_insns (), max_reg_num ());
7505 purge_all_dead_edges ();
7506 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7507
7508 cse_not_expected = !flag_rerun_cse_after_loop;
7509
7510 /* If cse altered any jumps, rerun jump opts to clean things up. */
7511 if (tem == 2)
7512 {
7513 timevar_push (TV_JUMP);
7514 rebuild_jump_labels (get_insns ());
7515 cleanup_cfg (0);
7516 timevar_pop (TV_JUMP);
7517 }
7518 else if (tem == 1)
7519 cleanup_cfg (0);
7520
7521 flag_cse_follow_jumps = save_cfj;
7522 return 0;
7523 }
7524
7525 struct rtl_opt_pass pass_cse_after_global_opts =
7526 {
7527 {
7528 RTL_PASS,
7529 "cse_local", /* name */
7530 gate_handle_cse_after_global_opts, /* gate */
7531 rest_of_handle_cse_after_global_opts, /* execute */
7532 NULL, /* sub */
7533 NULL, /* next */
7534 0, /* static_pass_number */
7535 TV_CSE, /* tv_id */
7536 0, /* properties_required */
7537 0, /* properties_provided */
7538 0, /* properties_destroyed */
7539 0, /* todo_flags_start */
7540 TODO_df_finish | TODO_verify_rtl_sharing |
7541 TODO_ggc_collect |
7542 TODO_verify_flow /* todo_flags_finish */
7543 }
7544 };