]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cse.c
dumping cleanup phase 1 -- Removing TODO_dump_func
[thirdparty/gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "regs.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "diagnostic-core.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "timevar.h"
41 #include "except.h"
42 #include "target.h"
43 #include "params.h"
44 #include "rtlhooks-def.h"
45 #include "tree-pass.h"
46 #include "df.h"
47 #include "dbgcnt.h"
48
49 /* The basic idea of common subexpression elimination is to go
50 through the code, keeping a record of expressions that would
51 have the same value at the current scan point, and replacing
52 expressions encountered with the cheapest equivalent expression.
53
54 It is too complicated to keep track of the different possibilities
55 when control paths merge in this code; so, at each label, we forget all
56 that is known and start fresh. This can be described as processing each
57 extended basic block separately. We have a separate pass to perform
58 global CSE.
59
60 Note CSE can turn a conditional or computed jump into a nop or
61 an unconditional jump. When this occurs we arrange to run the jump
62 optimizer after CSE to delete the unreachable code.
63
64 We use two data structures to record the equivalent expressions:
65 a hash table for most expressions, and a vector of "quantity
66 numbers" to record equivalent (pseudo) registers.
67
68 The use of the special data structure for registers is desirable
69 because it is faster. It is possible because registers references
70 contain a fairly small number, the register number, taken from
71 a contiguously allocated series, and two register references are
72 identical if they have the same number. General expressions
73 do not have any such thing, so the only way to retrieve the
74 information recorded on an expression other than a register
75 is to keep it in a hash table.
76
77 Registers and "quantity numbers":
78
79 At the start of each basic block, all of the (hardware and pseudo)
80 registers used in the function are given distinct quantity
81 numbers to indicate their contents. During scan, when the code
82 copies one register into another, we copy the quantity number.
83 When a register is loaded in any other way, we allocate a new
84 quantity number to describe the value generated by this operation.
85 `REG_QTY (N)' records what quantity register N is currently thought
86 of as containing.
87
88 All real quantity numbers are greater than or equal to zero.
89 If register N has not been assigned a quantity, `REG_QTY (N)' will
90 equal -N - 1, which is always negative.
91
92 Quantity numbers below zero do not exist and none of the `qty_table'
93 entries should be referenced with a negative index.
94
95 We also maintain a bidirectional chain of registers for each
96 quantity number. The `qty_table` members `first_reg' and `last_reg',
97 and `reg_eqv_table' members `next' and `prev' hold these chains.
98
99 The first register in a chain is the one whose lifespan is least local.
100 Among equals, it is the one that was seen first.
101 We replace any equivalent register with that one.
102
103 If two registers have the same quantity number, it must be true that
104 REG expressions with qty_table `mode' must be in the hash table for both
105 registers and must be in the same class.
106
107 The converse is not true. Since hard registers may be referenced in
108 any mode, two REG expressions might be equivalent in the hash table
109 but not have the same quantity number if the quantity number of one
110 of the registers is not the same mode as those expressions.
111
112 Constants and quantity numbers
113
114 When a quantity has a known constant value, that value is stored
115 in the appropriate qty_table `const_rtx'. This is in addition to
116 putting the constant in the hash table as is usual for non-regs.
117
118 Whether a reg or a constant is preferred is determined by the configuration
119 macro CONST_COSTS and will often depend on the constant value. In any
120 event, expressions containing constants can be simplified, by fold_rtx.
121
122 When a quantity has a known nearly constant value (such as an address
123 of a stack slot), that value is stored in the appropriate qty_table
124 `const_rtx'.
125
126 Integer constants don't have a machine mode. However, cse
127 determines the intended machine mode from the destination
128 of the instruction that moves the constant. The machine mode
129 is recorded in the hash table along with the actual RTL
130 constant expression so that different modes are kept separate.
131
132 Other expressions:
133
134 To record known equivalences among expressions in general
135 we use a hash table called `table'. It has a fixed number of buckets
136 that contain chains of `struct table_elt' elements for expressions.
137 These chains connect the elements whose expressions have the same
138 hash codes.
139
140 Other chains through the same elements connect the elements which
141 currently have equivalent values.
142
143 Register references in an expression are canonicalized before hashing
144 the expression. This is done using `reg_qty' and qty_table `first_reg'.
145 The hash code of a register reference is computed using the quantity
146 number, not the register number.
147
148 When the value of an expression changes, it is necessary to remove from the
149 hash table not just that expression but all expressions whose values
150 could be different as a result.
151
152 1. If the value changing is in memory, except in special cases
153 ANYTHING referring to memory could be changed. That is because
154 nobody knows where a pointer does not point.
155 The function `invalidate_memory' removes what is necessary.
156
157 The special cases are when the address is constant or is
158 a constant plus a fixed register such as the frame pointer
159 or a static chain pointer. When such addresses are stored in,
160 we can tell exactly which other such addresses must be invalidated
161 due to overlap. `invalidate' does this.
162 All expressions that refer to non-constant
163 memory addresses are also invalidated. `invalidate_memory' does this.
164
165 2. If the value changing is a register, all expressions
166 containing references to that register, and only those,
167 must be removed.
168
169 Because searching the entire hash table for expressions that contain
170 a register is very slow, we try to figure out when it isn't necessary.
171 Precisely, this is necessary only when expressions have been
172 entered in the hash table using this register, and then the value has
173 changed, and then another expression wants to be added to refer to
174 the register's new value. This sequence of circumstances is rare
175 within any one basic block.
176
177 `REG_TICK' and `REG_IN_TABLE', accessors for members of
178 cse_reg_info, are used to detect this case. REG_TICK (i) is
179 incremented whenever a value is stored in register i.
180 REG_IN_TABLE (i) holds -1 if no references to register i have been
181 entered in the table; otherwise, it contains the value REG_TICK (i)
182 had when the references were entered. If we want to enter a
183 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
184 remove old references. Until we want to enter a new entry, the
185 mere fact that the two vectors don't match makes the entries be
186 ignored if anyone tries to match them.
187
188 Registers themselves are entered in the hash table as well as in
189 the equivalent-register chains. However, `REG_TICK' and
190 `REG_IN_TABLE' do not apply to expressions which are simple
191 register references. These expressions are removed from the table
192 immediately when they become invalid, and this can be done even if
193 we do not immediately search for all the expressions that refer to
194 the register.
195
196 A CLOBBER rtx in an instruction invalidates its operand for further
197 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
198 invalidates everything that resides in memory.
199
200 Related expressions:
201
202 Constant expressions that differ only by an additive integer
203 are called related. When a constant expression is put in
204 the table, the related expression with no constant term
205 is also entered. These are made to point at each other
206 so that it is possible to find out if there exists any
207 register equivalent to an expression related to a given expression. */
208
209 /* Length of qty_table vector. We know in advance we will not need
210 a quantity number this big. */
211
212 static int max_qty;
213
214 /* Next quantity number to be allocated.
215 This is 1 + the largest number needed so far. */
216
217 static int next_qty;
218
219 /* Per-qty information tracking.
220
221 `first_reg' and `last_reg' track the head and tail of the
222 chain of registers which currently contain this quantity.
223
224 `mode' contains the machine mode of this quantity.
225
226 `const_rtx' holds the rtx of the constant value of this
227 quantity, if known. A summations of the frame/arg pointer
228 and a constant can also be entered here. When this holds
229 a known value, `const_insn' is the insn which stored the
230 constant value.
231
232 `comparison_{code,const,qty}' are used to track when a
233 comparison between a quantity and some constant or register has
234 been passed. In such a case, we know the results of the comparison
235 in case we see it again. These members record a comparison that
236 is known to be true. `comparison_code' holds the rtx code of such
237 a comparison, else it is set to UNKNOWN and the other two
238 comparison members are undefined. `comparison_const' holds
239 the constant being compared against, or zero if the comparison
240 is not against a constant. `comparison_qty' holds the quantity
241 being compared against when the result is known. If the comparison
242 is not with a register, `comparison_qty' is -1. */
243
244 struct qty_table_elem
245 {
246 rtx const_rtx;
247 rtx const_insn;
248 rtx comparison_const;
249 int comparison_qty;
250 unsigned int first_reg, last_reg;
251 /* The sizes of these fields should match the sizes of the
252 code and mode fields of struct rtx_def (see rtl.h). */
253 ENUM_BITFIELD(rtx_code) comparison_code : 16;
254 ENUM_BITFIELD(machine_mode) mode : 8;
255 };
256
257 /* The table of all qtys, indexed by qty number. */
258 static struct qty_table_elem *qty_table;
259
260 /* Structure used to pass arguments via for_each_rtx to function
261 cse_change_cc_mode. */
262 struct change_cc_mode_args
263 {
264 rtx insn;
265 rtx newreg;
266 };
267
268 #ifdef HAVE_cc0
269 /* For machines that have a CC0, we do not record its value in the hash
270 table since its use is guaranteed to be the insn immediately following
271 its definition and any other insn is presumed to invalidate it.
272
273 Instead, we store below the current and last value assigned to CC0.
274 If it should happen to be a constant, it is stored in preference
275 to the actual assigned value. In case it is a constant, we store
276 the mode in which the constant should be interpreted. */
277
278 static rtx this_insn_cc0, prev_insn_cc0;
279 static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
280 #endif
281
282 /* Insn being scanned. */
283
284 static rtx this_insn;
285 static bool optimize_this_for_speed_p;
286
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
289 value.
290
291 Or -1 if this register is at the end of the chain.
292
293 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
294
295 /* Per-register equivalence chain. */
296 struct reg_eqv_elem
297 {
298 int next, prev;
299 };
300
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem *reg_eqv_table;
303
304 struct cse_reg_info
305 {
306 /* The timestamp at which this register is initialized. */
307 unsigned int timestamp;
308
309 /* The quantity number of the register's current contents. */
310 int reg_qty;
311
312 /* The number of times the register has been altered in the current
313 basic block. */
314 int reg_tick;
315
316 /* The REG_TICK value at which rtx's containing this register are
317 valid in the hash table. If this does not equal the current
318 reg_tick value, such expressions existing in the hash table are
319 invalid. */
320 int reg_in_table;
321
322 /* The SUBREG that was set when REG_TICK was last incremented. Set
323 to -1 if the last store was to the whole register, not a subreg. */
324 unsigned int subreg_ticked;
325 };
326
327 /* A table of cse_reg_info indexed by register numbers. */
328 static struct cse_reg_info *cse_reg_info_table;
329
330 /* The size of the above table. */
331 static unsigned int cse_reg_info_table_size;
332
333 /* The index of the first entry that has not been initialized. */
334 static unsigned int cse_reg_info_table_first_uninitialized;
335
336 /* The timestamp at the beginning of the current run of
337 cse_extended_basic_block. We increment this variable at the beginning of
338 the current run of cse_extended_basic_block. The timestamp field of a
339 cse_reg_info entry matches the value of this variable if and only
340 if the entry has been initialized during the current run of
341 cse_extended_basic_block. */
342 static unsigned int cse_reg_info_timestamp;
343
344 /* A HARD_REG_SET containing all the hard registers for which there is
345 currently a REG expression in the hash table. Note the difference
346 from the above variables, which indicate if the REG is mentioned in some
347 expression in the table. */
348
349 static HARD_REG_SET hard_regs_in_table;
350
351 /* True if CSE has altered the CFG. */
352 static bool cse_cfg_altered;
353
354 /* True if CSE has altered conditional jump insns in such a way
355 that jump optimization should be redone. */
356 static bool cse_jumps_altered;
357
358 /* True if we put a LABEL_REF into the hash table for an INSN
359 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
360 to put in the note. */
361 static bool recorded_label_ref;
362
363 /* canon_hash stores 1 in do_not_record
364 if it notices a reference to CC0, PC, or some other volatile
365 subexpression. */
366
367 static int do_not_record;
368
369 /* canon_hash stores 1 in hash_arg_in_memory
370 if it notices a reference to memory within the expression being hashed. */
371
372 static int hash_arg_in_memory;
373
374 /* The hash table contains buckets which are chains of `struct table_elt's,
375 each recording one expression's information.
376 That expression is in the `exp' field.
377
378 The canon_exp field contains a canonical (from the point of view of
379 alias analysis) version of the `exp' field.
380
381 Those elements with the same hash code are chained in both directions
382 through the `next_same_hash' and `prev_same_hash' fields.
383
384 Each set of expressions with equivalent values
385 are on a two-way chain through the `next_same_value'
386 and `prev_same_value' fields, and all point with
387 the `first_same_value' field at the first element in
388 that chain. The chain is in order of increasing cost.
389 Each element's cost value is in its `cost' field.
390
391 The `in_memory' field is nonzero for elements that
392 involve any reference to memory. These elements are removed
393 whenever a write is done to an unidentified location in memory.
394 To be safe, we assume that a memory address is unidentified unless
395 the address is either a symbol constant or a constant plus
396 the frame pointer or argument pointer.
397
398 The `related_value' field is used to connect related expressions
399 (that differ by adding an integer).
400 The related expressions are chained in a circular fashion.
401 `related_value' is zero for expressions for which this
402 chain is not useful.
403
404 The `cost' field stores the cost of this element's expression.
405 The `regcost' field stores the value returned by approx_reg_cost for
406 this element's expression.
407
408 The `is_const' flag is set if the element is a constant (including
409 a fixed address).
410
411 The `flag' field is used as a temporary during some search routines.
412
413 The `mode' field is usually the same as GET_MODE (`exp'), but
414 if `exp' is a CONST_INT and has no machine mode then the `mode'
415 field is the mode it was being used as. Each constant is
416 recorded separately for each mode it is used with. */
417
418 struct table_elt
419 {
420 rtx exp;
421 rtx canon_exp;
422 struct table_elt *next_same_hash;
423 struct table_elt *prev_same_hash;
424 struct table_elt *next_same_value;
425 struct table_elt *prev_same_value;
426 struct table_elt *first_same_value;
427 struct table_elt *related_value;
428 int cost;
429 int regcost;
430 /* The size of this field should match the size
431 of the mode field of struct rtx_def (see rtl.h). */
432 ENUM_BITFIELD(machine_mode) mode : 8;
433 char in_memory;
434 char is_const;
435 char flag;
436 };
437
438 /* We don't want a lot of buckets, because we rarely have very many
439 things stored in the hash table, and a lot of buckets slows
440 down a lot of loops that happen frequently. */
441 #define HASH_SHIFT 5
442 #define HASH_SIZE (1 << HASH_SHIFT)
443 #define HASH_MASK (HASH_SIZE - 1)
444
445 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
447
448 #define HASH(X, M) \
449 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
450 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
451 : canon_hash (X, M)) & HASH_MASK)
452
453 /* Like HASH, but without side-effects. */
454 #define SAFE_HASH(X, M) \
455 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
457 : safe_hash (X, M)) & HASH_MASK)
458
459 /* Determine whether register number N is considered a fixed register for the
460 purpose of approximating register costs.
461 It is desirable to replace other regs with fixed regs, to reduce need for
462 non-fixed hard regs.
463 A reg wins if it is either the frame pointer or designated as fixed. */
464 #define FIXED_REGNO_P(N) \
465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466 || fixed_regs[N] || global_regs[N])
467
468 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
469 hard registers and pointers into the frame are the cheapest with a cost
470 of 0. Next come pseudos with a cost of one and other hard registers with
471 a cost of 2. Aside from these special cases, call `rtx_cost'. */
472
473 #define CHEAP_REGNO(N) \
474 (REGNO_PTR_FRAME_P(N) \
475 || (HARD_REGISTER_NUM_P (N) \
476 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
477
478 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
479 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
480
481 /* Get the number of times this register has been updated in this
482 basic block. */
483
484 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
485
486 /* Get the point at which REG was recorded in the table. */
487
488 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
489
490 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
491 SUBREG). */
492
493 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
494
495 /* Get the quantity number for REG. */
496
497 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
498
499 /* Determine if the quantity number for register X represents a valid index
500 into the qty_table. */
501
502 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
503
504 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
505
506 #define CHEAPER(X, Y) \
507 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
508
509 static struct table_elt *table[HASH_SIZE];
510
511 /* Chain of `struct table_elt's made so far for this function
512 but currently removed from the table. */
513
514 static struct table_elt *free_element_chain;
515
516 /* Set to the cost of a constant pool reference if one was found for a
517 symbolic constant. If this was found, it means we should try to
518 convert constants into constant pool entries if they don't fit in
519 the insn. */
520
521 static int constant_pool_entries_cost;
522 static int constant_pool_entries_regcost;
523
524 /* Trace a patch through the CFG. */
525
526 struct branch_path
527 {
528 /* The basic block for this path entry. */
529 basic_block bb;
530 };
531
532 /* This data describes a block that will be processed by
533 cse_extended_basic_block. */
534
535 struct cse_basic_block_data
536 {
537 /* Total number of SETs in block. */
538 int nsets;
539 /* Size of current branch path, if any. */
540 int path_size;
541 /* Current path, indicating which basic_blocks will be processed. */
542 struct branch_path *path;
543 };
544
545
546 /* Pointers to the live in/live out bitmaps for the boundaries of the
547 current EBB. */
548 static bitmap cse_ebb_live_in, cse_ebb_live_out;
549
550 /* A simple bitmap to track which basic blocks have been visited
551 already as part of an already processed extended basic block. */
552 static sbitmap cse_visited_basic_blocks;
553
554 static bool fixed_base_plus_p (rtx x);
555 static int notreg_cost (rtx, enum rtx_code);
556 static int approx_reg_cost_1 (rtx *, void *);
557 static int approx_reg_cost (rtx);
558 static int preferable (int, int, int, int);
559 static void new_basic_block (void);
560 static void make_new_qty (unsigned int, enum machine_mode);
561 static void make_regs_eqv (unsigned int, unsigned int);
562 static void delete_reg_equiv (unsigned int);
563 static int mention_regs (rtx);
564 static int insert_regs (rtx, struct table_elt *, int);
565 static void remove_from_table (struct table_elt *, unsigned);
566 static void remove_pseudo_from_table (rtx, unsigned);
567 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
568 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
569 static rtx lookup_as_function (rtx, enum rtx_code);
570 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
571 enum machine_mode, int, int);
572 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
573 enum machine_mode);
574 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
575 static void invalidate (rtx, enum machine_mode);
576 static bool cse_rtx_varies_p (const_rtx, bool);
577 static void remove_invalid_refs (unsigned int);
578 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
579 enum machine_mode);
580 static void rehash_using_reg (rtx);
581 static void invalidate_memory (void);
582 static void invalidate_for_call (void);
583 static rtx use_related_value (rtx, struct table_elt *);
584
585 static inline unsigned canon_hash (rtx, enum machine_mode);
586 static inline unsigned safe_hash (rtx, enum machine_mode);
587 static inline unsigned hash_rtx_string (const char *);
588
589 static rtx canon_reg (rtx, rtx);
590 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
591 enum machine_mode *,
592 enum machine_mode *);
593 static rtx fold_rtx (rtx, rtx);
594 static rtx equiv_constant (rtx);
595 static void record_jump_equiv (rtx, bool);
596 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
597 int);
598 static void cse_insn (rtx);
599 static void cse_prescan_path (struct cse_basic_block_data *);
600 static void invalidate_from_clobbers (rtx);
601 static rtx cse_process_notes (rtx, rtx, bool *);
602 static void cse_extended_basic_block (struct cse_basic_block_data *);
603 static void count_reg_usage (rtx, int *, rtx, int);
604 static int check_for_label_ref (rtx *, void *);
605 extern void dump_class (struct table_elt*);
606 static void get_cse_reg_info_1 (unsigned int regno);
607 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
608 static int check_dependence (rtx *, void *);
609
610 static void flush_hash_table (void);
611 static bool insn_live_p (rtx, int *);
612 static bool set_live_p (rtx, rtx, int *);
613 static int cse_change_cc_mode (rtx *, void *);
614 static void cse_change_cc_mode_insn (rtx, rtx);
615 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
616 static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
617 bool);
618 \f
619
620 #undef RTL_HOOKS_GEN_LOWPART
621 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
622
623 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
624 \f
625 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
626 virtual regs here because the simplify_*_operation routines are called
627 by integrate.c, which is called before virtual register instantiation. */
628
629 static bool
630 fixed_base_plus_p (rtx x)
631 {
632 switch (GET_CODE (x))
633 {
634 case REG:
635 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
636 return true;
637 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
638 return true;
639 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
640 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
641 return true;
642 return false;
643
644 case PLUS:
645 if (!CONST_INT_P (XEXP (x, 1)))
646 return false;
647 return fixed_base_plus_p (XEXP (x, 0));
648
649 default:
650 return false;
651 }
652 }
653
654 /* Dump the expressions in the equivalence class indicated by CLASSP.
655 This function is used only for debugging. */
656 void
657 dump_class (struct table_elt *classp)
658 {
659 struct table_elt *elt;
660
661 fprintf (stderr, "Equivalence chain for ");
662 print_rtl (stderr, classp->exp);
663 fprintf (stderr, ": \n");
664
665 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
666 {
667 print_rtl (stderr, elt->exp);
668 fprintf (stderr, "\n");
669 }
670 }
671
672 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
673
674 static int
675 approx_reg_cost_1 (rtx *xp, void *data)
676 {
677 rtx x = *xp;
678 int *cost_p = (int *) data;
679
680 if (x && REG_P (x))
681 {
682 unsigned int regno = REGNO (x);
683
684 if (! CHEAP_REGNO (regno))
685 {
686 if (regno < FIRST_PSEUDO_REGISTER)
687 {
688 if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
689 return 1;
690 *cost_p += 2;
691 }
692 else
693 *cost_p += 1;
694 }
695 }
696
697 return 0;
698 }
699
700 /* Return an estimate of the cost of the registers used in an rtx.
701 This is mostly the number of different REG expressions in the rtx;
702 however for some exceptions like fixed registers we use a cost of
703 0. If any other hard register reference occurs, return MAX_COST. */
704
705 static int
706 approx_reg_cost (rtx x)
707 {
708 int cost = 0;
709
710 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
711 return MAX_COST;
712
713 return cost;
714 }
715
716 /* Return a negative value if an rtx A, whose costs are given by COST_A
717 and REGCOST_A, is more desirable than an rtx B.
718 Return a positive value if A is less desirable, or 0 if the two are
719 equally good. */
720 static int
721 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
722 {
723 /* First, get rid of cases involving expressions that are entirely
724 unwanted. */
725 if (cost_a != cost_b)
726 {
727 if (cost_a == MAX_COST)
728 return 1;
729 if (cost_b == MAX_COST)
730 return -1;
731 }
732
733 /* Avoid extending lifetimes of hardregs. */
734 if (regcost_a != regcost_b)
735 {
736 if (regcost_a == MAX_COST)
737 return 1;
738 if (regcost_b == MAX_COST)
739 return -1;
740 }
741
742 /* Normal operation costs take precedence. */
743 if (cost_a != cost_b)
744 return cost_a - cost_b;
745 /* Only if these are identical consider effects on register pressure. */
746 if (regcost_a != regcost_b)
747 return regcost_a - regcost_b;
748 return 0;
749 }
750
751 /* Internal function, to compute cost when X is not a register; called
752 from COST macro to keep it simple. */
753
754 static int
755 notreg_cost (rtx x, enum rtx_code outer)
756 {
757 return ((GET_CODE (x) == SUBREG
758 && REG_P (SUBREG_REG (x))
759 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
760 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
761 && (GET_MODE_SIZE (GET_MODE (x))
762 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
763 && subreg_lowpart_p (x)
764 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
765 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
766 ? 0
767 : rtx_cost (x, outer, optimize_this_for_speed_p) * 2);
768 }
769
770 \f
771 /* Initialize CSE_REG_INFO_TABLE. */
772
773 static void
774 init_cse_reg_info (unsigned int nregs)
775 {
776 /* Do we need to grow the table? */
777 if (nregs > cse_reg_info_table_size)
778 {
779 unsigned int new_size;
780
781 if (cse_reg_info_table_size < 2048)
782 {
783 /* Compute a new size that is a power of 2 and no smaller
784 than the large of NREGS and 64. */
785 new_size = (cse_reg_info_table_size
786 ? cse_reg_info_table_size : 64);
787
788 while (new_size < nregs)
789 new_size *= 2;
790 }
791 else
792 {
793 /* If we need a big table, allocate just enough to hold
794 NREGS registers. */
795 new_size = nregs;
796 }
797
798 /* Reallocate the table with NEW_SIZE entries. */
799 free (cse_reg_info_table);
800 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
801 cse_reg_info_table_size = new_size;
802 cse_reg_info_table_first_uninitialized = 0;
803 }
804
805 /* Do we have all of the first NREGS entries initialized? */
806 if (cse_reg_info_table_first_uninitialized < nregs)
807 {
808 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
809 unsigned int i;
810
811 /* Put the old timestamp on newly allocated entries so that they
812 will all be considered out of date. We do not touch those
813 entries beyond the first NREGS entries to be nice to the
814 virtual memory. */
815 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
816 cse_reg_info_table[i].timestamp = old_timestamp;
817
818 cse_reg_info_table_first_uninitialized = nregs;
819 }
820 }
821
822 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
823
824 static void
825 get_cse_reg_info_1 (unsigned int regno)
826 {
827 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
828 entry will be considered to have been initialized. */
829 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
830
831 /* Initialize the rest of the entry. */
832 cse_reg_info_table[regno].reg_tick = 1;
833 cse_reg_info_table[regno].reg_in_table = -1;
834 cse_reg_info_table[regno].subreg_ticked = -1;
835 cse_reg_info_table[regno].reg_qty = -regno - 1;
836 }
837
838 /* Find a cse_reg_info entry for REGNO. */
839
840 static inline struct cse_reg_info *
841 get_cse_reg_info (unsigned int regno)
842 {
843 struct cse_reg_info *p = &cse_reg_info_table[regno];
844
845 /* If this entry has not been initialized, go ahead and initialize
846 it. */
847 if (p->timestamp != cse_reg_info_timestamp)
848 get_cse_reg_info_1 (regno);
849
850 return p;
851 }
852
853 /* Clear the hash table and initialize each register with its own quantity,
854 for a new basic block. */
855
856 static void
857 new_basic_block (void)
858 {
859 int i;
860
861 next_qty = 0;
862
863 /* Invalidate cse_reg_info_table. */
864 cse_reg_info_timestamp++;
865
866 /* Clear out hash table state for this pass. */
867 CLEAR_HARD_REG_SET (hard_regs_in_table);
868
869 /* The per-quantity values used to be initialized here, but it is
870 much faster to initialize each as it is made in `make_new_qty'. */
871
872 for (i = 0; i < HASH_SIZE; i++)
873 {
874 struct table_elt *first;
875
876 first = table[i];
877 if (first != NULL)
878 {
879 struct table_elt *last = first;
880
881 table[i] = NULL;
882
883 while (last->next_same_hash != NULL)
884 last = last->next_same_hash;
885
886 /* Now relink this hash entire chain into
887 the free element list. */
888
889 last->next_same_hash = free_element_chain;
890 free_element_chain = first;
891 }
892 }
893
894 #ifdef HAVE_cc0
895 prev_insn_cc0 = 0;
896 #endif
897 }
898
899 /* Say that register REG contains a quantity in mode MODE not in any
900 register before and initialize that quantity. */
901
902 static void
903 make_new_qty (unsigned int reg, enum machine_mode mode)
904 {
905 int q;
906 struct qty_table_elem *ent;
907 struct reg_eqv_elem *eqv;
908
909 gcc_assert (next_qty < max_qty);
910
911 q = REG_QTY (reg) = next_qty++;
912 ent = &qty_table[q];
913 ent->first_reg = reg;
914 ent->last_reg = reg;
915 ent->mode = mode;
916 ent->const_rtx = ent->const_insn = NULL_RTX;
917 ent->comparison_code = UNKNOWN;
918
919 eqv = &reg_eqv_table[reg];
920 eqv->next = eqv->prev = -1;
921 }
922
923 /* Make reg NEW equivalent to reg OLD.
924 OLD is not changing; NEW is. */
925
926 static void
927 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
928 {
929 unsigned int lastr, firstr;
930 int q = REG_QTY (old_reg);
931 struct qty_table_elem *ent;
932
933 ent = &qty_table[q];
934
935 /* Nothing should become eqv until it has a "non-invalid" qty number. */
936 gcc_assert (REGNO_QTY_VALID_P (old_reg));
937
938 REG_QTY (new_reg) = q;
939 firstr = ent->first_reg;
940 lastr = ent->last_reg;
941
942 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
943 hard regs. Among pseudos, if NEW will live longer than any other reg
944 of the same qty, and that is beyond the current basic block,
945 make it the new canonical replacement for this qty. */
946 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
947 /* Certain fixed registers might be of the class NO_REGS. This means
948 that not only can they not be allocated by the compiler, but
949 they cannot be used in substitutions or canonicalizations
950 either. */
951 && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
952 && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
953 || (new_reg >= FIRST_PSEUDO_REGISTER
954 && (firstr < FIRST_PSEUDO_REGISTER
955 || (bitmap_bit_p (cse_ebb_live_out, new_reg)
956 && !bitmap_bit_p (cse_ebb_live_out, firstr))
957 || (bitmap_bit_p (cse_ebb_live_in, new_reg)
958 && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
959 {
960 reg_eqv_table[firstr].prev = new_reg;
961 reg_eqv_table[new_reg].next = firstr;
962 reg_eqv_table[new_reg].prev = -1;
963 ent->first_reg = new_reg;
964 }
965 else
966 {
967 /* If NEW is a hard reg (known to be non-fixed), insert at end.
968 Otherwise, insert before any non-fixed hard regs that are at the
969 end. Registers of class NO_REGS cannot be used as an
970 equivalent for anything. */
971 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
972 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
973 && new_reg >= FIRST_PSEUDO_REGISTER)
974 lastr = reg_eqv_table[lastr].prev;
975 reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
976 if (reg_eqv_table[lastr].next >= 0)
977 reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
978 else
979 qty_table[q].last_reg = new_reg;
980 reg_eqv_table[lastr].next = new_reg;
981 reg_eqv_table[new_reg].prev = lastr;
982 }
983 }
984
985 /* Remove REG from its equivalence class. */
986
987 static void
988 delete_reg_equiv (unsigned int reg)
989 {
990 struct qty_table_elem *ent;
991 int q = REG_QTY (reg);
992 int p, n;
993
994 /* If invalid, do nothing. */
995 if (! REGNO_QTY_VALID_P (reg))
996 return;
997
998 ent = &qty_table[q];
999
1000 p = reg_eqv_table[reg].prev;
1001 n = reg_eqv_table[reg].next;
1002
1003 if (n != -1)
1004 reg_eqv_table[n].prev = p;
1005 else
1006 ent->last_reg = p;
1007 if (p != -1)
1008 reg_eqv_table[p].next = n;
1009 else
1010 ent->first_reg = n;
1011
1012 REG_QTY (reg) = -reg - 1;
1013 }
1014
1015 /* Remove any invalid expressions from the hash table
1016 that refer to any of the registers contained in expression X.
1017
1018 Make sure that newly inserted references to those registers
1019 as subexpressions will be considered valid.
1020
1021 mention_regs is not called when a register itself
1022 is being stored in the table.
1023
1024 Return 1 if we have done something that may have changed the hash code
1025 of X. */
1026
1027 static int
1028 mention_regs (rtx x)
1029 {
1030 enum rtx_code code;
1031 int i, j;
1032 const char *fmt;
1033 int changed = 0;
1034
1035 if (x == 0)
1036 return 0;
1037
1038 code = GET_CODE (x);
1039 if (code == REG)
1040 {
1041 unsigned int regno = REGNO (x);
1042 unsigned int endregno = END_REGNO (x);
1043 unsigned int i;
1044
1045 for (i = regno; i < endregno; i++)
1046 {
1047 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1048 remove_invalid_refs (i);
1049
1050 REG_IN_TABLE (i) = REG_TICK (i);
1051 SUBREG_TICKED (i) = -1;
1052 }
1053
1054 return 0;
1055 }
1056
1057 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1058 pseudo if they don't use overlapping words. We handle only pseudos
1059 here for simplicity. */
1060 if (code == SUBREG && REG_P (SUBREG_REG (x))
1061 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1062 {
1063 unsigned int i = REGNO (SUBREG_REG (x));
1064
1065 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1066 {
1067 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1068 the last store to this register really stored into this
1069 subreg, then remove the memory of this subreg.
1070 Otherwise, remove any memory of the entire register and
1071 all its subregs from the table. */
1072 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1073 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1074 remove_invalid_refs (i);
1075 else
1076 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1077 }
1078
1079 REG_IN_TABLE (i) = REG_TICK (i);
1080 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1081 return 0;
1082 }
1083
1084 /* If X is a comparison or a COMPARE and either operand is a register
1085 that does not have a quantity, give it one. This is so that a later
1086 call to record_jump_equiv won't cause X to be assigned a different
1087 hash code and not found in the table after that call.
1088
1089 It is not necessary to do this here, since rehash_using_reg can
1090 fix up the table later, but doing this here eliminates the need to
1091 call that expensive function in the most common case where the only
1092 use of the register is in the comparison. */
1093
1094 if (code == COMPARE || COMPARISON_P (x))
1095 {
1096 if (REG_P (XEXP (x, 0))
1097 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1098 if (insert_regs (XEXP (x, 0), NULL, 0))
1099 {
1100 rehash_using_reg (XEXP (x, 0));
1101 changed = 1;
1102 }
1103
1104 if (REG_P (XEXP (x, 1))
1105 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1106 if (insert_regs (XEXP (x, 1), NULL, 0))
1107 {
1108 rehash_using_reg (XEXP (x, 1));
1109 changed = 1;
1110 }
1111 }
1112
1113 fmt = GET_RTX_FORMAT (code);
1114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1115 if (fmt[i] == 'e')
1116 changed |= mention_regs (XEXP (x, i));
1117 else if (fmt[i] == 'E')
1118 for (j = 0; j < XVECLEN (x, i); j++)
1119 changed |= mention_regs (XVECEXP (x, i, j));
1120
1121 return changed;
1122 }
1123
1124 /* Update the register quantities for inserting X into the hash table
1125 with a value equivalent to CLASSP.
1126 (If the class does not contain a REG, it is irrelevant.)
1127 If MODIFIED is nonzero, X is a destination; it is being modified.
1128 Note that delete_reg_equiv should be called on a register
1129 before insert_regs is done on that register with MODIFIED != 0.
1130
1131 Nonzero value means that elements of reg_qty have changed
1132 so X's hash code may be different. */
1133
1134 static int
1135 insert_regs (rtx x, struct table_elt *classp, int modified)
1136 {
1137 if (REG_P (x))
1138 {
1139 unsigned int regno = REGNO (x);
1140 int qty_valid;
1141
1142 /* If REGNO is in the equivalence table already but is of the
1143 wrong mode for that equivalence, don't do anything here. */
1144
1145 qty_valid = REGNO_QTY_VALID_P (regno);
1146 if (qty_valid)
1147 {
1148 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1149
1150 if (ent->mode != GET_MODE (x))
1151 return 0;
1152 }
1153
1154 if (modified || ! qty_valid)
1155 {
1156 if (classp)
1157 for (classp = classp->first_same_value;
1158 classp != 0;
1159 classp = classp->next_same_value)
1160 if (REG_P (classp->exp)
1161 && GET_MODE (classp->exp) == GET_MODE (x))
1162 {
1163 unsigned c_regno = REGNO (classp->exp);
1164
1165 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1166
1167 /* Suppose that 5 is hard reg and 100 and 101 are
1168 pseudos. Consider
1169
1170 (set (reg:si 100) (reg:si 5))
1171 (set (reg:si 5) (reg:si 100))
1172 (set (reg:di 101) (reg:di 5))
1173
1174 We would now set REG_QTY (101) = REG_QTY (5), but the
1175 entry for 5 is in SImode. When we use this later in
1176 copy propagation, we get the register in wrong mode. */
1177 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1178 continue;
1179
1180 make_regs_eqv (regno, c_regno);
1181 return 1;
1182 }
1183
1184 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1185 than REG_IN_TABLE to find out if there was only a single preceding
1186 invalidation - for the SUBREG - or another one, which would be
1187 for the full register. However, if we find here that REG_TICK
1188 indicates that the register is invalid, it means that it has
1189 been invalidated in a separate operation. The SUBREG might be used
1190 now (then this is a recursive call), or we might use the full REG
1191 now and a SUBREG of it later. So bump up REG_TICK so that
1192 mention_regs will do the right thing. */
1193 if (! modified
1194 && REG_IN_TABLE (regno) >= 0
1195 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1196 REG_TICK (regno)++;
1197 make_new_qty (regno, GET_MODE (x));
1198 return 1;
1199 }
1200
1201 return 0;
1202 }
1203
1204 /* If X is a SUBREG, we will likely be inserting the inner register in the
1205 table. If that register doesn't have an assigned quantity number at
1206 this point but does later, the insertion that we will be doing now will
1207 not be accessible because its hash code will have changed. So assign
1208 a quantity number now. */
1209
1210 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1211 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1212 {
1213 insert_regs (SUBREG_REG (x), NULL, 0);
1214 mention_regs (x);
1215 return 1;
1216 }
1217 else
1218 return mention_regs (x);
1219 }
1220 \f
1221
1222 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1223 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1224 CST is equal to an anchor. */
1225
1226 static bool
1227 compute_const_anchors (rtx cst,
1228 HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1229 HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1230 {
1231 HOST_WIDE_INT n = INTVAL (cst);
1232
1233 *lower_base = n & ~(targetm.const_anchor - 1);
1234 if (*lower_base == n)
1235 return false;
1236
1237 *upper_base =
1238 (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1239 *upper_offs = n - *upper_base;
1240 *lower_offs = n - *lower_base;
1241 return true;
1242 }
1243
1244 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1245
1246 static void
1247 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1248 enum machine_mode mode)
1249 {
1250 struct table_elt *elt;
1251 unsigned hash;
1252 rtx anchor_exp;
1253 rtx exp;
1254
1255 anchor_exp = GEN_INT (anchor);
1256 hash = HASH (anchor_exp, mode);
1257 elt = lookup (anchor_exp, hash, mode);
1258 if (!elt)
1259 elt = insert (anchor_exp, NULL, hash, mode);
1260
1261 exp = plus_constant (reg, offs);
1262 /* REG has just been inserted and the hash codes recomputed. */
1263 mention_regs (exp);
1264 hash = HASH (exp, mode);
1265
1266 /* Use the cost of the register rather than the whole expression. When
1267 looking up constant anchors we will further offset the corresponding
1268 expression therefore it does not make sense to prefer REGs over
1269 reg-immediate additions. Prefer instead the oldest expression. Also
1270 don't prefer pseudos over hard regs so that we derive constants in
1271 argument registers from other argument registers rather than from the
1272 original pseudo that was used to synthesize the constant. */
1273 insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1274 }
1275
1276 /* The constant CST is equivalent to the register REG. Create
1277 equivalences between the two anchors of CST and the corresponding
1278 register-offset expressions using REG. */
1279
1280 static void
1281 insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
1282 {
1283 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1284
1285 if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1286 &upper_base, &upper_offs))
1287 return;
1288
1289 /* Ignore anchors of value 0. Constants accessible from zero are
1290 simple. */
1291 if (lower_base != 0)
1292 insert_const_anchor (lower_base, reg, -lower_offs, mode);
1293
1294 if (upper_base != 0)
1295 insert_const_anchor (upper_base, reg, -upper_offs, mode);
1296 }
1297
1298 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1299 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1300 valid expression. Return the cheapest and oldest of such expressions. In
1301 *OLD, return how old the resulting expression is compared to the other
1302 equivalent expressions. */
1303
1304 static rtx
1305 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1306 unsigned *old)
1307 {
1308 struct table_elt *elt;
1309 unsigned idx;
1310 struct table_elt *match_elt;
1311 rtx match;
1312
1313 /* Find the cheapest and *oldest* expression to maximize the chance of
1314 reusing the same pseudo. */
1315
1316 match_elt = NULL;
1317 match = NULL_RTX;
1318 for (elt = anchor_elt->first_same_value, idx = 0;
1319 elt;
1320 elt = elt->next_same_value, idx++)
1321 {
1322 if (match_elt && CHEAPER (match_elt, elt))
1323 return match;
1324
1325 if (REG_P (elt->exp)
1326 || (GET_CODE (elt->exp) == PLUS
1327 && REG_P (XEXP (elt->exp, 0))
1328 && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1329 {
1330 rtx x;
1331
1332 /* Ignore expressions that are no longer valid. */
1333 if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1334 continue;
1335
1336 x = plus_constant (elt->exp, offs);
1337 if (REG_P (x)
1338 || (GET_CODE (x) == PLUS
1339 && IN_RANGE (INTVAL (XEXP (x, 1)),
1340 -targetm.const_anchor,
1341 targetm.const_anchor - 1)))
1342 {
1343 match = x;
1344 match_elt = elt;
1345 *old = idx;
1346 }
1347 }
1348 }
1349
1350 return match;
1351 }
1352
1353 /* Try to express the constant SRC_CONST using a register+offset expression
1354 derived from a constant anchor. Return it if successful or NULL_RTX,
1355 otherwise. */
1356
1357 static rtx
1358 try_const_anchors (rtx src_const, enum machine_mode mode)
1359 {
1360 struct table_elt *lower_elt, *upper_elt;
1361 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1362 rtx lower_anchor_rtx, upper_anchor_rtx;
1363 rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1364 unsigned lower_old, upper_old;
1365
1366 if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1367 &upper_base, &upper_offs))
1368 return NULL_RTX;
1369
1370 lower_anchor_rtx = GEN_INT (lower_base);
1371 upper_anchor_rtx = GEN_INT (upper_base);
1372 lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1373 upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1374
1375 if (lower_elt)
1376 lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1377 if (upper_elt)
1378 upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1379
1380 if (!lower_exp)
1381 return upper_exp;
1382 if (!upper_exp)
1383 return lower_exp;
1384
1385 /* Return the older expression. */
1386 return (upper_old > lower_old ? upper_exp : lower_exp);
1387 }
1388 \f
1389 /* Look in or update the hash table. */
1390
1391 /* Remove table element ELT from use in the table.
1392 HASH is its hash code, made using the HASH macro.
1393 It's an argument because often that is known in advance
1394 and we save much time not recomputing it. */
1395
1396 static void
1397 remove_from_table (struct table_elt *elt, unsigned int hash)
1398 {
1399 if (elt == 0)
1400 return;
1401
1402 /* Mark this element as removed. See cse_insn. */
1403 elt->first_same_value = 0;
1404
1405 /* Remove the table element from its equivalence class. */
1406
1407 {
1408 struct table_elt *prev = elt->prev_same_value;
1409 struct table_elt *next = elt->next_same_value;
1410
1411 if (next)
1412 next->prev_same_value = prev;
1413
1414 if (prev)
1415 prev->next_same_value = next;
1416 else
1417 {
1418 struct table_elt *newfirst = next;
1419 while (next)
1420 {
1421 next->first_same_value = newfirst;
1422 next = next->next_same_value;
1423 }
1424 }
1425 }
1426
1427 /* Remove the table element from its hash bucket. */
1428
1429 {
1430 struct table_elt *prev = elt->prev_same_hash;
1431 struct table_elt *next = elt->next_same_hash;
1432
1433 if (next)
1434 next->prev_same_hash = prev;
1435
1436 if (prev)
1437 prev->next_same_hash = next;
1438 else if (table[hash] == elt)
1439 table[hash] = next;
1440 else
1441 {
1442 /* This entry is not in the proper hash bucket. This can happen
1443 when two classes were merged by `merge_equiv_classes'. Search
1444 for the hash bucket that it heads. This happens only very
1445 rarely, so the cost is acceptable. */
1446 for (hash = 0; hash < HASH_SIZE; hash++)
1447 if (table[hash] == elt)
1448 table[hash] = next;
1449 }
1450 }
1451
1452 /* Remove the table element from its related-value circular chain. */
1453
1454 if (elt->related_value != 0 && elt->related_value != elt)
1455 {
1456 struct table_elt *p = elt->related_value;
1457
1458 while (p->related_value != elt)
1459 p = p->related_value;
1460 p->related_value = elt->related_value;
1461 if (p->related_value == p)
1462 p->related_value = 0;
1463 }
1464
1465 /* Now add it to the free element chain. */
1466 elt->next_same_hash = free_element_chain;
1467 free_element_chain = elt;
1468 }
1469
1470 /* Same as above, but X is a pseudo-register. */
1471
1472 static void
1473 remove_pseudo_from_table (rtx x, unsigned int hash)
1474 {
1475 struct table_elt *elt;
1476
1477 /* Because a pseudo-register can be referenced in more than one
1478 mode, we might have to remove more than one table entry. */
1479 while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1480 remove_from_table (elt, hash);
1481 }
1482
1483 /* Look up X in the hash table and return its table element,
1484 or 0 if X is not in the table.
1485
1486 MODE is the machine-mode of X, or if X is an integer constant
1487 with VOIDmode then MODE is the mode with which X will be used.
1488
1489 Here we are satisfied to find an expression whose tree structure
1490 looks like X. */
1491
1492 static struct table_elt *
1493 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1494 {
1495 struct table_elt *p;
1496
1497 for (p = table[hash]; p; p = p->next_same_hash)
1498 if (mode == p->mode && ((x == p->exp && REG_P (x))
1499 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1500 return p;
1501
1502 return 0;
1503 }
1504
1505 /* Like `lookup' but don't care whether the table element uses invalid regs.
1506 Also ignore discrepancies in the machine mode of a register. */
1507
1508 static struct table_elt *
1509 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1510 {
1511 struct table_elt *p;
1512
1513 if (REG_P (x))
1514 {
1515 unsigned int regno = REGNO (x);
1516
1517 /* Don't check the machine mode when comparing registers;
1518 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1519 for (p = table[hash]; p; p = p->next_same_hash)
1520 if (REG_P (p->exp)
1521 && REGNO (p->exp) == regno)
1522 return p;
1523 }
1524 else
1525 {
1526 for (p = table[hash]; p; p = p->next_same_hash)
1527 if (mode == p->mode
1528 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1529 return p;
1530 }
1531
1532 return 0;
1533 }
1534
1535 /* Look for an expression equivalent to X and with code CODE.
1536 If one is found, return that expression. */
1537
1538 static rtx
1539 lookup_as_function (rtx x, enum rtx_code code)
1540 {
1541 struct table_elt *p
1542 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1543
1544 if (p == 0)
1545 return 0;
1546
1547 for (p = p->first_same_value; p; p = p->next_same_value)
1548 if (GET_CODE (p->exp) == code
1549 /* Make sure this is a valid entry in the table. */
1550 && exp_equiv_p (p->exp, p->exp, 1, false))
1551 return p->exp;
1552
1553 return 0;
1554 }
1555
1556 /* Insert X in the hash table, assuming HASH is its hash code and
1557 CLASSP is an element of the class it should go in (or 0 if a new
1558 class should be made). COST is the code of X and reg_cost is the
1559 cost of registers in X. It is inserted at the proper position to
1560 keep the class in the order cheapest first.
1561
1562 MODE is the machine-mode of X, or if X is an integer constant
1563 with VOIDmode then MODE is the mode with which X will be used.
1564
1565 For elements of equal cheapness, the most recent one
1566 goes in front, except that the first element in the list
1567 remains first unless a cheaper element is added. The order of
1568 pseudo-registers does not matter, as canon_reg will be called to
1569 find the cheapest when a register is retrieved from the table.
1570
1571 The in_memory field in the hash table element is set to 0.
1572 The caller must set it nonzero if appropriate.
1573
1574 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1575 and if insert_regs returns a nonzero value
1576 you must then recompute its hash code before calling here.
1577
1578 If necessary, update table showing constant values of quantities. */
1579
1580 static struct table_elt *
1581 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1582 enum machine_mode mode, int cost, int reg_cost)
1583 {
1584 struct table_elt *elt;
1585
1586 /* If X is a register and we haven't made a quantity for it,
1587 something is wrong. */
1588 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1589
1590 /* If X is a hard register, show it is being put in the table. */
1591 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1592 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1593
1594 /* Put an element for X into the right hash bucket. */
1595
1596 elt = free_element_chain;
1597 if (elt)
1598 free_element_chain = elt->next_same_hash;
1599 else
1600 elt = XNEW (struct table_elt);
1601
1602 elt->exp = x;
1603 elt->canon_exp = NULL_RTX;
1604 elt->cost = cost;
1605 elt->regcost = reg_cost;
1606 elt->next_same_value = 0;
1607 elt->prev_same_value = 0;
1608 elt->next_same_hash = table[hash];
1609 elt->prev_same_hash = 0;
1610 elt->related_value = 0;
1611 elt->in_memory = 0;
1612 elt->mode = mode;
1613 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1614
1615 if (table[hash])
1616 table[hash]->prev_same_hash = elt;
1617 table[hash] = elt;
1618
1619 /* Put it into the proper value-class. */
1620 if (classp)
1621 {
1622 classp = classp->first_same_value;
1623 if (CHEAPER (elt, classp))
1624 /* Insert at the head of the class. */
1625 {
1626 struct table_elt *p;
1627 elt->next_same_value = classp;
1628 classp->prev_same_value = elt;
1629 elt->first_same_value = elt;
1630
1631 for (p = classp; p; p = p->next_same_value)
1632 p->first_same_value = elt;
1633 }
1634 else
1635 {
1636 /* Insert not at head of the class. */
1637 /* Put it after the last element cheaper than X. */
1638 struct table_elt *p, *next;
1639
1640 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1641 p = next);
1642
1643 /* Put it after P and before NEXT. */
1644 elt->next_same_value = next;
1645 if (next)
1646 next->prev_same_value = elt;
1647
1648 elt->prev_same_value = p;
1649 p->next_same_value = elt;
1650 elt->first_same_value = classp;
1651 }
1652 }
1653 else
1654 elt->first_same_value = elt;
1655
1656 /* If this is a constant being set equivalent to a register or a register
1657 being set equivalent to a constant, note the constant equivalence.
1658
1659 If this is a constant, it cannot be equivalent to a different constant,
1660 and a constant is the only thing that can be cheaper than a register. So
1661 we know the register is the head of the class (before the constant was
1662 inserted).
1663
1664 If this is a register that is not already known equivalent to a
1665 constant, we must check the entire class.
1666
1667 If this is a register that is already known equivalent to an insn,
1668 update the qtys `const_insn' to show that `this_insn' is the latest
1669 insn making that quantity equivalent to the constant. */
1670
1671 if (elt->is_const && classp && REG_P (classp->exp)
1672 && !REG_P (x))
1673 {
1674 int exp_q = REG_QTY (REGNO (classp->exp));
1675 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1676
1677 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1678 exp_ent->const_insn = this_insn;
1679 }
1680
1681 else if (REG_P (x)
1682 && classp
1683 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1684 && ! elt->is_const)
1685 {
1686 struct table_elt *p;
1687
1688 for (p = classp; p != 0; p = p->next_same_value)
1689 {
1690 if (p->is_const && !REG_P (p->exp))
1691 {
1692 int x_q = REG_QTY (REGNO (x));
1693 struct qty_table_elem *x_ent = &qty_table[x_q];
1694
1695 x_ent->const_rtx
1696 = gen_lowpart (GET_MODE (x), p->exp);
1697 x_ent->const_insn = this_insn;
1698 break;
1699 }
1700 }
1701 }
1702
1703 else if (REG_P (x)
1704 && qty_table[REG_QTY (REGNO (x))].const_rtx
1705 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1706 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1707
1708 /* If this is a constant with symbolic value,
1709 and it has a term with an explicit integer value,
1710 link it up with related expressions. */
1711 if (GET_CODE (x) == CONST)
1712 {
1713 rtx subexp = get_related_value (x);
1714 unsigned subhash;
1715 struct table_elt *subelt, *subelt_prev;
1716
1717 if (subexp != 0)
1718 {
1719 /* Get the integer-free subexpression in the hash table. */
1720 subhash = SAFE_HASH (subexp, mode);
1721 subelt = lookup (subexp, subhash, mode);
1722 if (subelt == 0)
1723 subelt = insert (subexp, NULL, subhash, mode);
1724 /* Initialize SUBELT's circular chain if it has none. */
1725 if (subelt->related_value == 0)
1726 subelt->related_value = subelt;
1727 /* Find the element in the circular chain that precedes SUBELT. */
1728 subelt_prev = subelt;
1729 while (subelt_prev->related_value != subelt)
1730 subelt_prev = subelt_prev->related_value;
1731 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1732 This way the element that follows SUBELT is the oldest one. */
1733 elt->related_value = subelt_prev->related_value;
1734 subelt_prev->related_value = elt;
1735 }
1736 }
1737
1738 return elt;
1739 }
1740
1741 /* Wrap insert_with_costs by passing the default costs. */
1742
1743 static struct table_elt *
1744 insert (rtx x, struct table_elt *classp, unsigned int hash,
1745 enum machine_mode mode)
1746 {
1747 return
1748 insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1749 }
1750
1751 \f
1752 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1753 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1754 the two classes equivalent.
1755
1756 CLASS1 will be the surviving class; CLASS2 should not be used after this
1757 call.
1758
1759 Any invalid entries in CLASS2 will not be copied. */
1760
1761 static void
1762 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1763 {
1764 struct table_elt *elt, *next, *new_elt;
1765
1766 /* Ensure we start with the head of the classes. */
1767 class1 = class1->first_same_value;
1768 class2 = class2->first_same_value;
1769
1770 /* If they were already equal, forget it. */
1771 if (class1 == class2)
1772 return;
1773
1774 for (elt = class2; elt; elt = next)
1775 {
1776 unsigned int hash;
1777 rtx exp = elt->exp;
1778 enum machine_mode mode = elt->mode;
1779
1780 next = elt->next_same_value;
1781
1782 /* Remove old entry, make a new one in CLASS1's class.
1783 Don't do this for invalid entries as we cannot find their
1784 hash code (it also isn't necessary). */
1785 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1786 {
1787 bool need_rehash = false;
1788
1789 hash_arg_in_memory = 0;
1790 hash = HASH (exp, mode);
1791
1792 if (REG_P (exp))
1793 {
1794 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1795 delete_reg_equiv (REGNO (exp));
1796 }
1797
1798 if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1799 remove_pseudo_from_table (exp, hash);
1800 else
1801 remove_from_table (elt, hash);
1802
1803 if (insert_regs (exp, class1, 0) || need_rehash)
1804 {
1805 rehash_using_reg (exp);
1806 hash = HASH (exp, mode);
1807 }
1808 new_elt = insert (exp, class1, hash, mode);
1809 new_elt->in_memory = hash_arg_in_memory;
1810 }
1811 }
1812 }
1813 \f
1814 /* Flush the entire hash table. */
1815
1816 static void
1817 flush_hash_table (void)
1818 {
1819 int i;
1820 struct table_elt *p;
1821
1822 for (i = 0; i < HASH_SIZE; i++)
1823 for (p = table[i]; p; p = table[i])
1824 {
1825 /* Note that invalidate can remove elements
1826 after P in the current hash chain. */
1827 if (REG_P (p->exp))
1828 invalidate (p->exp, VOIDmode);
1829 else
1830 remove_from_table (p, i);
1831 }
1832 }
1833 \f
1834 /* Function called for each rtx to check whether true dependence exist. */
1835 struct check_dependence_data
1836 {
1837 enum machine_mode mode;
1838 rtx exp;
1839 rtx addr;
1840 };
1841
1842 static int
1843 check_dependence (rtx *x, void *data)
1844 {
1845 struct check_dependence_data *d = (struct check_dependence_data *) data;
1846 if (*x && MEM_P (*x))
1847 return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX,
1848 cse_rtx_varies_p);
1849 else
1850 return 0;
1851 }
1852 \f
1853 /* Remove from the hash table, or mark as invalid, all expressions whose
1854 values could be altered by storing in X. X is a register, a subreg, or
1855 a memory reference with nonvarying address (because, when a memory
1856 reference with a varying address is stored in, all memory references are
1857 removed by invalidate_memory so specific invalidation is superfluous).
1858 FULL_MODE, if not VOIDmode, indicates that this much should be
1859 invalidated instead of just the amount indicated by the mode of X. This
1860 is only used for bitfield stores into memory.
1861
1862 A nonvarying address may be just a register or just a symbol reference,
1863 or it may be either of those plus a numeric offset. */
1864
1865 static void
1866 invalidate (rtx x, enum machine_mode full_mode)
1867 {
1868 int i;
1869 struct table_elt *p;
1870 rtx addr;
1871
1872 switch (GET_CODE (x))
1873 {
1874 case REG:
1875 {
1876 /* If X is a register, dependencies on its contents are recorded
1877 through the qty number mechanism. Just change the qty number of
1878 the register, mark it as invalid for expressions that refer to it,
1879 and remove it itself. */
1880 unsigned int regno = REGNO (x);
1881 unsigned int hash = HASH (x, GET_MODE (x));
1882
1883 /* Remove REGNO from any quantity list it might be on and indicate
1884 that its value might have changed. If it is a pseudo, remove its
1885 entry from the hash table.
1886
1887 For a hard register, we do the first two actions above for any
1888 additional hard registers corresponding to X. Then, if any of these
1889 registers are in the table, we must remove any REG entries that
1890 overlap these registers. */
1891
1892 delete_reg_equiv (regno);
1893 REG_TICK (regno)++;
1894 SUBREG_TICKED (regno) = -1;
1895
1896 if (regno >= FIRST_PSEUDO_REGISTER)
1897 remove_pseudo_from_table (x, hash);
1898 else
1899 {
1900 HOST_WIDE_INT in_table
1901 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1902 unsigned int endregno = END_HARD_REGNO (x);
1903 unsigned int tregno, tendregno, rn;
1904 struct table_elt *p, *next;
1905
1906 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1907
1908 for (rn = regno + 1; rn < endregno; rn++)
1909 {
1910 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1911 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1912 delete_reg_equiv (rn);
1913 REG_TICK (rn)++;
1914 SUBREG_TICKED (rn) = -1;
1915 }
1916
1917 if (in_table)
1918 for (hash = 0; hash < HASH_SIZE; hash++)
1919 for (p = table[hash]; p; p = next)
1920 {
1921 next = p->next_same_hash;
1922
1923 if (!REG_P (p->exp)
1924 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1925 continue;
1926
1927 tregno = REGNO (p->exp);
1928 tendregno = END_HARD_REGNO (p->exp);
1929 if (tendregno > regno && tregno < endregno)
1930 remove_from_table (p, hash);
1931 }
1932 }
1933 }
1934 return;
1935
1936 case SUBREG:
1937 invalidate (SUBREG_REG (x), VOIDmode);
1938 return;
1939
1940 case PARALLEL:
1941 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1942 invalidate (XVECEXP (x, 0, i), VOIDmode);
1943 return;
1944
1945 case EXPR_LIST:
1946 /* This is part of a disjoint return value; extract the location in
1947 question ignoring the offset. */
1948 invalidate (XEXP (x, 0), VOIDmode);
1949 return;
1950
1951 case MEM:
1952 addr = canon_rtx (get_addr (XEXP (x, 0)));
1953 /* Calculate the canonical version of X here so that
1954 true_dependence doesn't generate new RTL for X on each call. */
1955 x = canon_rtx (x);
1956
1957 /* Remove all hash table elements that refer to overlapping pieces of
1958 memory. */
1959 if (full_mode == VOIDmode)
1960 full_mode = GET_MODE (x);
1961
1962 for (i = 0; i < HASH_SIZE; i++)
1963 {
1964 struct table_elt *next;
1965
1966 for (p = table[i]; p; p = next)
1967 {
1968 next = p->next_same_hash;
1969 if (p->in_memory)
1970 {
1971 struct check_dependence_data d;
1972
1973 /* Just canonicalize the expression once;
1974 otherwise each time we call invalidate
1975 true_dependence will canonicalize the
1976 expression again. */
1977 if (!p->canon_exp)
1978 p->canon_exp = canon_rtx (p->exp);
1979 d.exp = x;
1980 d.addr = addr;
1981 d.mode = full_mode;
1982 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1983 remove_from_table (p, i);
1984 }
1985 }
1986 }
1987 return;
1988
1989 default:
1990 gcc_unreachable ();
1991 }
1992 }
1993 \f
1994 /* Remove all expressions that refer to register REGNO,
1995 since they are already invalid, and we are about to
1996 mark that register valid again and don't want the old
1997 expressions to reappear as valid. */
1998
1999 static void
2000 remove_invalid_refs (unsigned int regno)
2001 {
2002 unsigned int i;
2003 struct table_elt *p, *next;
2004
2005 for (i = 0; i < HASH_SIZE; i++)
2006 for (p = table[i]; p; p = next)
2007 {
2008 next = p->next_same_hash;
2009 if (!REG_P (p->exp)
2010 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2011 remove_from_table (p, i);
2012 }
2013 }
2014
2015 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2016 and mode MODE. */
2017 static void
2018 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2019 enum machine_mode mode)
2020 {
2021 unsigned int i;
2022 struct table_elt *p, *next;
2023 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2024
2025 for (i = 0; i < HASH_SIZE; i++)
2026 for (p = table[i]; p; p = next)
2027 {
2028 rtx exp = p->exp;
2029 next = p->next_same_hash;
2030
2031 if (!REG_P (exp)
2032 && (GET_CODE (exp) != SUBREG
2033 || !REG_P (SUBREG_REG (exp))
2034 || REGNO (SUBREG_REG (exp)) != regno
2035 || (((SUBREG_BYTE (exp)
2036 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2037 && SUBREG_BYTE (exp) <= end))
2038 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2039 remove_from_table (p, i);
2040 }
2041 }
2042 \f
2043 /* Recompute the hash codes of any valid entries in the hash table that
2044 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2045
2046 This is called when we make a jump equivalence. */
2047
2048 static void
2049 rehash_using_reg (rtx x)
2050 {
2051 unsigned int i;
2052 struct table_elt *p, *next;
2053 unsigned hash;
2054
2055 if (GET_CODE (x) == SUBREG)
2056 x = SUBREG_REG (x);
2057
2058 /* If X is not a register or if the register is known not to be in any
2059 valid entries in the table, we have no work to do. */
2060
2061 if (!REG_P (x)
2062 || REG_IN_TABLE (REGNO (x)) < 0
2063 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2064 return;
2065
2066 /* Scan all hash chains looking for valid entries that mention X.
2067 If we find one and it is in the wrong hash chain, move it. */
2068
2069 for (i = 0; i < HASH_SIZE; i++)
2070 for (p = table[i]; p; p = next)
2071 {
2072 next = p->next_same_hash;
2073 if (reg_mentioned_p (x, p->exp)
2074 && exp_equiv_p (p->exp, p->exp, 1, false)
2075 && i != (hash = SAFE_HASH (p->exp, p->mode)))
2076 {
2077 if (p->next_same_hash)
2078 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2079
2080 if (p->prev_same_hash)
2081 p->prev_same_hash->next_same_hash = p->next_same_hash;
2082 else
2083 table[i] = p->next_same_hash;
2084
2085 p->next_same_hash = table[hash];
2086 p->prev_same_hash = 0;
2087 if (table[hash])
2088 table[hash]->prev_same_hash = p;
2089 table[hash] = p;
2090 }
2091 }
2092 }
2093 \f
2094 /* Remove from the hash table any expression that is a call-clobbered
2095 register. Also update their TICK values. */
2096
2097 static void
2098 invalidate_for_call (void)
2099 {
2100 unsigned int regno, endregno;
2101 unsigned int i;
2102 unsigned hash;
2103 struct table_elt *p, *next;
2104 int in_table = 0;
2105
2106 /* Go through all the hard registers. For each that is clobbered in
2107 a CALL_INSN, remove the register from quantity chains and update
2108 reg_tick if defined. Also see if any of these registers is currently
2109 in the table. */
2110
2111 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2112 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2113 {
2114 delete_reg_equiv (regno);
2115 if (REG_TICK (regno) >= 0)
2116 {
2117 REG_TICK (regno)++;
2118 SUBREG_TICKED (regno) = -1;
2119 }
2120
2121 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2122 }
2123
2124 /* In the case where we have no call-clobbered hard registers in the
2125 table, we are done. Otherwise, scan the table and remove any
2126 entry that overlaps a call-clobbered register. */
2127
2128 if (in_table)
2129 for (hash = 0; hash < HASH_SIZE; hash++)
2130 for (p = table[hash]; p; p = next)
2131 {
2132 next = p->next_same_hash;
2133
2134 if (!REG_P (p->exp)
2135 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2136 continue;
2137
2138 regno = REGNO (p->exp);
2139 endregno = END_HARD_REGNO (p->exp);
2140
2141 for (i = regno; i < endregno; i++)
2142 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2143 {
2144 remove_from_table (p, hash);
2145 break;
2146 }
2147 }
2148 }
2149 \f
2150 /* Given an expression X of type CONST,
2151 and ELT which is its table entry (or 0 if it
2152 is not in the hash table),
2153 return an alternate expression for X as a register plus integer.
2154 If none can be found, return 0. */
2155
2156 static rtx
2157 use_related_value (rtx x, struct table_elt *elt)
2158 {
2159 struct table_elt *relt = 0;
2160 struct table_elt *p, *q;
2161 HOST_WIDE_INT offset;
2162
2163 /* First, is there anything related known?
2164 If we have a table element, we can tell from that.
2165 Otherwise, must look it up. */
2166
2167 if (elt != 0 && elt->related_value != 0)
2168 relt = elt;
2169 else if (elt == 0 && GET_CODE (x) == CONST)
2170 {
2171 rtx subexp = get_related_value (x);
2172 if (subexp != 0)
2173 relt = lookup (subexp,
2174 SAFE_HASH (subexp, GET_MODE (subexp)),
2175 GET_MODE (subexp));
2176 }
2177
2178 if (relt == 0)
2179 return 0;
2180
2181 /* Search all related table entries for one that has an
2182 equivalent register. */
2183
2184 p = relt;
2185 while (1)
2186 {
2187 /* This loop is strange in that it is executed in two different cases.
2188 The first is when X is already in the table. Then it is searching
2189 the RELATED_VALUE list of X's class (RELT). The second case is when
2190 X is not in the table. Then RELT points to a class for the related
2191 value.
2192
2193 Ensure that, whatever case we are in, that we ignore classes that have
2194 the same value as X. */
2195
2196 if (rtx_equal_p (x, p->exp))
2197 q = 0;
2198 else
2199 for (q = p->first_same_value; q; q = q->next_same_value)
2200 if (REG_P (q->exp))
2201 break;
2202
2203 if (q)
2204 break;
2205
2206 p = p->related_value;
2207
2208 /* We went all the way around, so there is nothing to be found.
2209 Alternatively, perhaps RELT was in the table for some other reason
2210 and it has no related values recorded. */
2211 if (p == relt || p == 0)
2212 break;
2213 }
2214
2215 if (q == 0)
2216 return 0;
2217
2218 offset = (get_integer_term (x) - get_integer_term (p->exp));
2219 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2220 return plus_constant (q->exp, offset);
2221 }
2222 \f
2223
2224 /* Hash a string. Just add its bytes up. */
2225 static inline unsigned
2226 hash_rtx_string (const char *ps)
2227 {
2228 unsigned hash = 0;
2229 const unsigned char *p = (const unsigned char *) ps;
2230
2231 if (p)
2232 while (*p)
2233 hash += *p++;
2234
2235 return hash;
2236 }
2237
2238 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2239 When the callback returns true, we continue with the new rtx. */
2240
2241 unsigned
2242 hash_rtx_cb (const_rtx x, enum machine_mode mode,
2243 int *do_not_record_p, int *hash_arg_in_memory_p,
2244 bool have_reg_qty, hash_rtx_callback_function cb)
2245 {
2246 int i, j;
2247 unsigned hash = 0;
2248 enum rtx_code code;
2249 const char *fmt;
2250 enum machine_mode newmode;
2251 rtx newx;
2252
2253 /* Used to turn recursion into iteration. We can't rely on GCC's
2254 tail-recursion elimination since we need to keep accumulating values
2255 in HASH. */
2256 repeat:
2257 if (x == 0)
2258 return hash;
2259
2260 /* Invoke the callback first. */
2261 if (cb != NULL
2262 && ((*cb) (x, mode, &newx, &newmode)))
2263 {
2264 hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2265 hash_arg_in_memory_p, have_reg_qty, cb);
2266 return hash;
2267 }
2268
2269 code = GET_CODE (x);
2270 switch (code)
2271 {
2272 case REG:
2273 {
2274 unsigned int regno = REGNO (x);
2275
2276 if (do_not_record_p && !reload_completed)
2277 {
2278 /* On some machines, we can't record any non-fixed hard register,
2279 because extending its life will cause reload problems. We
2280 consider ap, fp, sp, gp to be fixed for this purpose.
2281
2282 We also consider CCmode registers to be fixed for this purpose;
2283 failure to do so leads to failure to simplify 0<100 type of
2284 conditionals.
2285
2286 On all machines, we can't record any global registers.
2287 Nor should we record any register that is in a small
2288 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2289 bool record;
2290
2291 if (regno >= FIRST_PSEUDO_REGISTER)
2292 record = true;
2293 else if (x == frame_pointer_rtx
2294 || x == hard_frame_pointer_rtx
2295 || x == arg_pointer_rtx
2296 || x == stack_pointer_rtx
2297 || x == pic_offset_table_rtx)
2298 record = true;
2299 else if (global_regs[regno])
2300 record = false;
2301 else if (fixed_regs[regno])
2302 record = true;
2303 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2304 record = true;
2305 else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2306 record = false;
2307 else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2308 record = false;
2309 else
2310 record = true;
2311
2312 if (!record)
2313 {
2314 *do_not_record_p = 1;
2315 return 0;
2316 }
2317 }
2318
2319 hash += ((unsigned int) REG << 7);
2320 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2321 return hash;
2322 }
2323
2324 /* We handle SUBREG of a REG specially because the underlying
2325 reg changes its hash value with every value change; we don't
2326 want to have to forget unrelated subregs when one subreg changes. */
2327 case SUBREG:
2328 {
2329 if (REG_P (SUBREG_REG (x)))
2330 {
2331 hash += (((unsigned int) SUBREG << 7)
2332 + REGNO (SUBREG_REG (x))
2333 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2334 return hash;
2335 }
2336 break;
2337 }
2338
2339 case CONST_INT:
2340 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2341 + (unsigned int) INTVAL (x));
2342 return hash;
2343
2344 case CONST_DOUBLE:
2345 /* This is like the general case, except that it only counts
2346 the integers representing the constant. */
2347 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2348 if (GET_MODE (x) != VOIDmode)
2349 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2350 else
2351 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2352 + (unsigned int) CONST_DOUBLE_HIGH (x));
2353 return hash;
2354
2355 case CONST_FIXED:
2356 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2357 hash += fixed_hash (CONST_FIXED_VALUE (x));
2358 return hash;
2359
2360 case CONST_VECTOR:
2361 {
2362 int units;
2363 rtx elt;
2364
2365 units = CONST_VECTOR_NUNITS (x);
2366
2367 for (i = 0; i < units; ++i)
2368 {
2369 elt = CONST_VECTOR_ELT (x, i);
2370 hash += hash_rtx_cb (elt, GET_MODE (elt),
2371 do_not_record_p, hash_arg_in_memory_p,
2372 have_reg_qty, cb);
2373 }
2374
2375 return hash;
2376 }
2377
2378 /* Assume there is only one rtx object for any given label. */
2379 case LABEL_REF:
2380 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2381 differences and differences between each stage's debugging dumps. */
2382 hash += (((unsigned int) LABEL_REF << 7)
2383 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2384 return hash;
2385
2386 case SYMBOL_REF:
2387 {
2388 /* Don't hash on the symbol's address to avoid bootstrap differences.
2389 Different hash values may cause expressions to be recorded in
2390 different orders and thus different registers to be used in the
2391 final assembler. This also avoids differences in the dump files
2392 between various stages. */
2393 unsigned int h = 0;
2394 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2395
2396 while (*p)
2397 h += (h << 7) + *p++; /* ??? revisit */
2398
2399 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2400 return hash;
2401 }
2402
2403 case MEM:
2404 /* We don't record if marked volatile or if BLKmode since we don't
2405 know the size of the move. */
2406 if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2407 {
2408 *do_not_record_p = 1;
2409 return 0;
2410 }
2411 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2412 *hash_arg_in_memory_p = 1;
2413
2414 /* Now that we have already found this special case,
2415 might as well speed it up as much as possible. */
2416 hash += (unsigned) MEM;
2417 x = XEXP (x, 0);
2418 goto repeat;
2419
2420 case USE:
2421 /* A USE that mentions non-volatile memory needs special
2422 handling since the MEM may be BLKmode which normally
2423 prevents an entry from being made. Pure calls are
2424 marked by a USE which mentions BLKmode memory.
2425 See calls.c:emit_call_1. */
2426 if (MEM_P (XEXP (x, 0))
2427 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2428 {
2429 hash += (unsigned) USE;
2430 x = XEXP (x, 0);
2431
2432 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2433 *hash_arg_in_memory_p = 1;
2434
2435 /* Now that we have already found this special case,
2436 might as well speed it up as much as possible. */
2437 hash += (unsigned) MEM;
2438 x = XEXP (x, 0);
2439 goto repeat;
2440 }
2441 break;
2442
2443 case PRE_DEC:
2444 case PRE_INC:
2445 case POST_DEC:
2446 case POST_INC:
2447 case PRE_MODIFY:
2448 case POST_MODIFY:
2449 case PC:
2450 case CC0:
2451 case CALL:
2452 case UNSPEC_VOLATILE:
2453 if (do_not_record_p) {
2454 *do_not_record_p = 1;
2455 return 0;
2456 }
2457 else
2458 return hash;
2459 break;
2460
2461 case ASM_OPERANDS:
2462 if (do_not_record_p && MEM_VOLATILE_P (x))
2463 {
2464 *do_not_record_p = 1;
2465 return 0;
2466 }
2467 else
2468 {
2469 /* We don't want to take the filename and line into account. */
2470 hash += (unsigned) code + (unsigned) GET_MODE (x)
2471 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2472 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2473 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2474
2475 if (ASM_OPERANDS_INPUT_LENGTH (x))
2476 {
2477 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2478 {
2479 hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2480 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2481 do_not_record_p, hash_arg_in_memory_p,
2482 have_reg_qty, cb)
2483 + hash_rtx_string
2484 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2485 }
2486
2487 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2488 x = ASM_OPERANDS_INPUT (x, 0);
2489 mode = GET_MODE (x);
2490 goto repeat;
2491 }
2492
2493 return hash;
2494 }
2495 break;
2496
2497 default:
2498 break;
2499 }
2500
2501 i = GET_RTX_LENGTH (code) - 1;
2502 hash += (unsigned) code + (unsigned) GET_MODE (x);
2503 fmt = GET_RTX_FORMAT (code);
2504 for (; i >= 0; i--)
2505 {
2506 switch (fmt[i])
2507 {
2508 case 'e':
2509 /* If we are about to do the last recursive call
2510 needed at this level, change it into iteration.
2511 This function is called enough to be worth it. */
2512 if (i == 0)
2513 {
2514 x = XEXP (x, i);
2515 goto repeat;
2516 }
2517
2518 hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2519 hash_arg_in_memory_p,
2520 have_reg_qty, cb);
2521 break;
2522
2523 case 'E':
2524 for (j = 0; j < XVECLEN (x, i); j++)
2525 hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2526 hash_arg_in_memory_p,
2527 have_reg_qty, cb);
2528 break;
2529
2530 case 's':
2531 hash += hash_rtx_string (XSTR (x, i));
2532 break;
2533
2534 case 'i':
2535 hash += (unsigned int) XINT (x, i);
2536 break;
2537
2538 case '0': case 't':
2539 /* Unused. */
2540 break;
2541
2542 default:
2543 gcc_unreachable ();
2544 }
2545 }
2546
2547 return hash;
2548 }
2549
2550 /* Hash an rtx. We are careful to make sure the value is never negative.
2551 Equivalent registers hash identically.
2552 MODE is used in hashing for CONST_INTs only;
2553 otherwise the mode of X is used.
2554
2555 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2556
2557 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2558 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2559
2560 Note that cse_insn knows that the hash code of a MEM expression
2561 is just (int) MEM plus the hash code of the address. */
2562
2563 unsigned
2564 hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2565 int *hash_arg_in_memory_p, bool have_reg_qty)
2566 {
2567 return hash_rtx_cb (x, mode, do_not_record_p,
2568 hash_arg_in_memory_p, have_reg_qty, NULL);
2569 }
2570
2571 /* Hash an rtx X for cse via hash_rtx.
2572 Stores 1 in do_not_record if any subexpression is volatile.
2573 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2574 does not have the RTX_UNCHANGING_P bit set. */
2575
2576 static inline unsigned
2577 canon_hash (rtx x, enum machine_mode mode)
2578 {
2579 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2580 }
2581
2582 /* Like canon_hash but with no side effects, i.e. do_not_record
2583 and hash_arg_in_memory are not changed. */
2584
2585 static inline unsigned
2586 safe_hash (rtx x, enum machine_mode mode)
2587 {
2588 int dummy_do_not_record;
2589 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2590 }
2591 \f
2592 /* Return 1 iff X and Y would canonicalize into the same thing,
2593 without actually constructing the canonicalization of either one.
2594 If VALIDATE is nonzero,
2595 we assume X is an expression being processed from the rtl
2596 and Y was found in the hash table. We check register refs
2597 in Y for being marked as valid.
2598
2599 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2600
2601 int
2602 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2603 {
2604 int i, j;
2605 enum rtx_code code;
2606 const char *fmt;
2607
2608 /* Note: it is incorrect to assume an expression is equivalent to itself
2609 if VALIDATE is nonzero. */
2610 if (x == y && !validate)
2611 return 1;
2612
2613 if (x == 0 || y == 0)
2614 return x == y;
2615
2616 code = GET_CODE (x);
2617 if (code != GET_CODE (y))
2618 return 0;
2619
2620 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2621 if (GET_MODE (x) != GET_MODE (y))
2622 return 0;
2623
2624 /* MEMs refering to different address space are not equivalent. */
2625 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2626 return 0;
2627
2628 switch (code)
2629 {
2630 case PC:
2631 case CC0:
2632 case CONST_INT:
2633 case CONST_DOUBLE:
2634 case CONST_FIXED:
2635 return x == y;
2636
2637 case LABEL_REF:
2638 return XEXP (x, 0) == XEXP (y, 0);
2639
2640 case SYMBOL_REF:
2641 return XSTR (x, 0) == XSTR (y, 0);
2642
2643 case REG:
2644 if (for_gcse)
2645 return REGNO (x) == REGNO (y);
2646 else
2647 {
2648 unsigned int regno = REGNO (y);
2649 unsigned int i;
2650 unsigned int endregno = END_REGNO (y);
2651
2652 /* If the quantities are not the same, the expressions are not
2653 equivalent. If there are and we are not to validate, they
2654 are equivalent. Otherwise, ensure all regs are up-to-date. */
2655
2656 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2657 return 0;
2658
2659 if (! validate)
2660 return 1;
2661
2662 for (i = regno; i < endregno; i++)
2663 if (REG_IN_TABLE (i) != REG_TICK (i))
2664 return 0;
2665
2666 return 1;
2667 }
2668
2669 case MEM:
2670 if (for_gcse)
2671 {
2672 /* A volatile mem should not be considered equivalent to any
2673 other. */
2674 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2675 return 0;
2676
2677 /* Can't merge two expressions in different alias sets, since we
2678 can decide that the expression is transparent in a block when
2679 it isn't, due to it being set with the different alias set.
2680
2681 Also, can't merge two expressions with different MEM_ATTRS.
2682 They could e.g. be two different entities allocated into the
2683 same space on the stack (see e.g. PR25130). In that case, the
2684 MEM addresses can be the same, even though the two MEMs are
2685 absolutely not equivalent.
2686
2687 But because really all MEM attributes should be the same for
2688 equivalent MEMs, we just use the invariant that MEMs that have
2689 the same attributes share the same mem_attrs data structure. */
2690 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2691 return 0;
2692 }
2693 break;
2694
2695 /* For commutative operations, check both orders. */
2696 case PLUS:
2697 case MULT:
2698 case AND:
2699 case IOR:
2700 case XOR:
2701 case NE:
2702 case EQ:
2703 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2704 validate, for_gcse)
2705 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2706 validate, for_gcse))
2707 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2708 validate, for_gcse)
2709 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2710 validate, for_gcse)));
2711
2712 case ASM_OPERANDS:
2713 /* We don't use the generic code below because we want to
2714 disregard filename and line numbers. */
2715
2716 /* A volatile asm isn't equivalent to any other. */
2717 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2718 return 0;
2719
2720 if (GET_MODE (x) != GET_MODE (y)
2721 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2722 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2723 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2724 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2725 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2726 return 0;
2727
2728 if (ASM_OPERANDS_INPUT_LENGTH (x))
2729 {
2730 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2731 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2732 ASM_OPERANDS_INPUT (y, i),
2733 validate, for_gcse)
2734 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2735 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2736 return 0;
2737 }
2738
2739 return 1;
2740
2741 default:
2742 break;
2743 }
2744
2745 /* Compare the elements. If any pair of corresponding elements
2746 fail to match, return 0 for the whole thing. */
2747
2748 fmt = GET_RTX_FORMAT (code);
2749 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2750 {
2751 switch (fmt[i])
2752 {
2753 case 'e':
2754 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2755 validate, for_gcse))
2756 return 0;
2757 break;
2758
2759 case 'E':
2760 if (XVECLEN (x, i) != XVECLEN (y, i))
2761 return 0;
2762 for (j = 0; j < XVECLEN (x, i); j++)
2763 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2764 validate, for_gcse))
2765 return 0;
2766 break;
2767
2768 case 's':
2769 if (strcmp (XSTR (x, i), XSTR (y, i)))
2770 return 0;
2771 break;
2772
2773 case 'i':
2774 if (XINT (x, i) != XINT (y, i))
2775 return 0;
2776 break;
2777
2778 case 'w':
2779 if (XWINT (x, i) != XWINT (y, i))
2780 return 0;
2781 break;
2782
2783 case '0':
2784 case 't':
2785 break;
2786
2787 default:
2788 gcc_unreachable ();
2789 }
2790 }
2791
2792 return 1;
2793 }
2794 \f
2795 /* Return 1 if X has a value that can vary even between two
2796 executions of the program. 0 means X can be compared reliably
2797 against certain constants or near-constants. */
2798
2799 static bool
2800 cse_rtx_varies_p (const_rtx x, bool from_alias)
2801 {
2802 /* We need not check for X and the equivalence class being of the same
2803 mode because if X is equivalent to a constant in some mode, it
2804 doesn't vary in any mode. */
2805
2806 if (REG_P (x)
2807 && REGNO_QTY_VALID_P (REGNO (x)))
2808 {
2809 int x_q = REG_QTY (REGNO (x));
2810 struct qty_table_elem *x_ent = &qty_table[x_q];
2811
2812 if (GET_MODE (x) == x_ent->mode
2813 && x_ent->const_rtx != NULL_RTX)
2814 return 0;
2815 }
2816
2817 if (GET_CODE (x) == PLUS
2818 && CONST_INT_P (XEXP (x, 1))
2819 && REG_P (XEXP (x, 0))
2820 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2821 {
2822 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2823 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2824
2825 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2826 && x0_ent->const_rtx != NULL_RTX)
2827 return 0;
2828 }
2829
2830 /* This can happen as the result of virtual register instantiation, if
2831 the initial constant is too large to be a valid address. This gives
2832 us a three instruction sequence, load large offset into a register,
2833 load fp minus a constant into a register, then a MEM which is the
2834 sum of the two `constant' registers. */
2835 if (GET_CODE (x) == PLUS
2836 && REG_P (XEXP (x, 0))
2837 && REG_P (XEXP (x, 1))
2838 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2839 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2840 {
2841 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2842 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2843 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2844 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2845
2846 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2847 && x0_ent->const_rtx != NULL_RTX
2848 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2849 && x1_ent->const_rtx != NULL_RTX)
2850 return 0;
2851 }
2852
2853 return rtx_varies_p (x, from_alias);
2854 }
2855 \f
2856 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2857 the result if necessary. INSN is as for canon_reg. */
2858
2859 static void
2860 validate_canon_reg (rtx *xloc, rtx insn)
2861 {
2862 if (*xloc)
2863 {
2864 rtx new_rtx = canon_reg (*xloc, insn);
2865
2866 /* If replacing pseudo with hard reg or vice versa, ensure the
2867 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2868 gcc_assert (insn && new_rtx);
2869 validate_change (insn, xloc, new_rtx, 1);
2870 }
2871 }
2872
2873 /* Canonicalize an expression:
2874 replace each register reference inside it
2875 with the "oldest" equivalent register.
2876
2877 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2878 after we make our substitution. The calls are made with IN_GROUP nonzero
2879 so apply_change_group must be called upon the outermost return from this
2880 function (unless INSN is zero). The result of apply_change_group can
2881 generally be discarded since the changes we are making are optional. */
2882
2883 static rtx
2884 canon_reg (rtx x, rtx insn)
2885 {
2886 int i;
2887 enum rtx_code code;
2888 const char *fmt;
2889
2890 if (x == 0)
2891 return x;
2892
2893 code = GET_CODE (x);
2894 switch (code)
2895 {
2896 case PC:
2897 case CC0:
2898 case CONST:
2899 case CONST_INT:
2900 case CONST_DOUBLE:
2901 case CONST_FIXED:
2902 case CONST_VECTOR:
2903 case SYMBOL_REF:
2904 case LABEL_REF:
2905 case ADDR_VEC:
2906 case ADDR_DIFF_VEC:
2907 return x;
2908
2909 case REG:
2910 {
2911 int first;
2912 int q;
2913 struct qty_table_elem *ent;
2914
2915 /* Never replace a hard reg, because hard regs can appear
2916 in more than one machine mode, and we must preserve the mode
2917 of each occurrence. Also, some hard regs appear in
2918 MEMs that are shared and mustn't be altered. Don't try to
2919 replace any reg that maps to a reg of class NO_REGS. */
2920 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2921 || ! REGNO_QTY_VALID_P (REGNO (x)))
2922 return x;
2923
2924 q = REG_QTY (REGNO (x));
2925 ent = &qty_table[q];
2926 first = ent->first_reg;
2927 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2928 : REGNO_REG_CLASS (first) == NO_REGS ? x
2929 : gen_rtx_REG (ent->mode, first));
2930 }
2931
2932 default:
2933 break;
2934 }
2935
2936 fmt = GET_RTX_FORMAT (code);
2937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2938 {
2939 int j;
2940
2941 if (fmt[i] == 'e')
2942 validate_canon_reg (&XEXP (x, i), insn);
2943 else if (fmt[i] == 'E')
2944 for (j = 0; j < XVECLEN (x, i); j++)
2945 validate_canon_reg (&XVECEXP (x, i, j), insn);
2946 }
2947
2948 return x;
2949 }
2950 \f
2951 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2952 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2953 what values are being compared.
2954
2955 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2956 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2957 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2958 compared to produce cc0.
2959
2960 The return value is the comparison operator and is either the code of
2961 A or the code corresponding to the inverse of the comparison. */
2962
2963 static enum rtx_code
2964 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2965 enum machine_mode *pmode1, enum machine_mode *pmode2)
2966 {
2967 rtx arg1, arg2;
2968
2969 arg1 = *parg1, arg2 = *parg2;
2970
2971 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2972
2973 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2974 {
2975 /* Set nonzero when we find something of interest. */
2976 rtx x = 0;
2977 int reverse_code = 0;
2978 struct table_elt *p = 0;
2979
2980 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2981 On machines with CC0, this is the only case that can occur, since
2982 fold_rtx will return the COMPARE or item being compared with zero
2983 when given CC0. */
2984
2985 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2986 x = arg1;
2987
2988 /* If ARG1 is a comparison operator and CODE is testing for
2989 STORE_FLAG_VALUE, get the inner arguments. */
2990
2991 else if (COMPARISON_P (arg1))
2992 {
2993 #ifdef FLOAT_STORE_FLAG_VALUE
2994 REAL_VALUE_TYPE fsfv;
2995 #endif
2996
2997 if (code == NE
2998 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2999 && code == LT && STORE_FLAG_VALUE == -1)
3000 #ifdef FLOAT_STORE_FLAG_VALUE
3001 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3002 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3003 REAL_VALUE_NEGATIVE (fsfv)))
3004 #endif
3005 )
3006 x = arg1;
3007 else if (code == EQ
3008 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3009 && code == GE && STORE_FLAG_VALUE == -1)
3010 #ifdef FLOAT_STORE_FLAG_VALUE
3011 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3012 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3013 REAL_VALUE_NEGATIVE (fsfv)))
3014 #endif
3015 )
3016 x = arg1, reverse_code = 1;
3017 }
3018
3019 /* ??? We could also check for
3020
3021 (ne (and (eq (...) (const_int 1))) (const_int 0))
3022
3023 and related forms, but let's wait until we see them occurring. */
3024
3025 if (x == 0)
3026 /* Look up ARG1 in the hash table and see if it has an equivalence
3027 that lets us see what is being compared. */
3028 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3029 if (p)
3030 {
3031 p = p->first_same_value;
3032
3033 /* If what we compare is already known to be constant, that is as
3034 good as it gets.
3035 We need to break the loop in this case, because otherwise we
3036 can have an infinite loop when looking at a reg that is known
3037 to be a constant which is the same as a comparison of a reg
3038 against zero which appears later in the insn stream, which in
3039 turn is constant and the same as the comparison of the first reg
3040 against zero... */
3041 if (p->is_const)
3042 break;
3043 }
3044
3045 for (; p; p = p->next_same_value)
3046 {
3047 enum machine_mode inner_mode = GET_MODE (p->exp);
3048 #ifdef FLOAT_STORE_FLAG_VALUE
3049 REAL_VALUE_TYPE fsfv;
3050 #endif
3051
3052 /* If the entry isn't valid, skip it. */
3053 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3054 continue;
3055
3056 if (GET_CODE (p->exp) == COMPARE
3057 /* Another possibility is that this machine has a compare insn
3058 that includes the comparison code. In that case, ARG1 would
3059 be equivalent to a comparison operation that would set ARG1 to
3060 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3061 ORIG_CODE is the actual comparison being done; if it is an EQ,
3062 we must reverse ORIG_CODE. On machine with a negative value
3063 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3064 || ((code == NE
3065 || (code == LT
3066 && GET_MODE_CLASS (inner_mode) == MODE_INT
3067 && (GET_MODE_BITSIZE (inner_mode)
3068 <= HOST_BITS_PER_WIDE_INT)
3069 && (STORE_FLAG_VALUE
3070 & ((HOST_WIDE_INT) 1
3071 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3072 #ifdef FLOAT_STORE_FLAG_VALUE
3073 || (code == LT
3074 && SCALAR_FLOAT_MODE_P (inner_mode)
3075 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3076 REAL_VALUE_NEGATIVE (fsfv)))
3077 #endif
3078 )
3079 && COMPARISON_P (p->exp)))
3080 {
3081 x = p->exp;
3082 break;
3083 }
3084 else if ((code == EQ
3085 || (code == GE
3086 && GET_MODE_CLASS (inner_mode) == MODE_INT
3087 && (GET_MODE_BITSIZE (inner_mode)
3088 <= HOST_BITS_PER_WIDE_INT)
3089 && (STORE_FLAG_VALUE
3090 & ((HOST_WIDE_INT) 1
3091 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3092 #ifdef FLOAT_STORE_FLAG_VALUE
3093 || (code == GE
3094 && SCALAR_FLOAT_MODE_P (inner_mode)
3095 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3096 REAL_VALUE_NEGATIVE (fsfv)))
3097 #endif
3098 )
3099 && COMPARISON_P (p->exp))
3100 {
3101 reverse_code = 1;
3102 x = p->exp;
3103 break;
3104 }
3105
3106 /* If this non-trapping address, e.g. fp + constant, the
3107 equivalent is a better operand since it may let us predict
3108 the value of the comparison. */
3109 else if (!rtx_addr_can_trap_p (p->exp))
3110 {
3111 arg1 = p->exp;
3112 continue;
3113 }
3114 }
3115
3116 /* If we didn't find a useful equivalence for ARG1, we are done.
3117 Otherwise, set up for the next iteration. */
3118 if (x == 0)
3119 break;
3120
3121 /* If we need to reverse the comparison, make sure that that is
3122 possible -- we can't necessarily infer the value of GE from LT
3123 with floating-point operands. */
3124 if (reverse_code)
3125 {
3126 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3127 if (reversed == UNKNOWN)
3128 break;
3129 else
3130 code = reversed;
3131 }
3132 else if (COMPARISON_P (x))
3133 code = GET_CODE (x);
3134 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3135 }
3136
3137 /* Return our results. Return the modes from before fold_rtx
3138 because fold_rtx might produce const_int, and then it's too late. */
3139 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3140 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3141
3142 return code;
3143 }
3144 \f
3145 /* If X is a nontrivial arithmetic operation on an argument for which
3146 a constant value can be determined, return the result of operating
3147 on that value, as a constant. Otherwise, return X, possibly with
3148 one or more operands changed to a forward-propagated constant.
3149
3150 If X is a register whose contents are known, we do NOT return
3151 those contents here; equiv_constant is called to perform that task.
3152 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3153
3154 INSN is the insn that we may be modifying. If it is 0, make a copy
3155 of X before modifying it. */
3156
3157 static rtx
3158 fold_rtx (rtx x, rtx insn)
3159 {
3160 enum rtx_code code;
3161 enum machine_mode mode;
3162 const char *fmt;
3163 int i;
3164 rtx new_rtx = 0;
3165 int changed = 0;
3166
3167 /* Operands of X. */
3168 rtx folded_arg0;
3169 rtx folded_arg1;
3170
3171 /* Constant equivalents of first three operands of X;
3172 0 when no such equivalent is known. */
3173 rtx const_arg0;
3174 rtx const_arg1;
3175 rtx const_arg2;
3176
3177 /* The mode of the first operand of X. We need this for sign and zero
3178 extends. */
3179 enum machine_mode mode_arg0;
3180
3181 if (x == 0)
3182 return x;
3183
3184 /* Try to perform some initial simplifications on X. */
3185 code = GET_CODE (x);
3186 switch (code)
3187 {
3188 case MEM:
3189 case SUBREG:
3190 if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3191 return new_rtx;
3192 return x;
3193
3194 case CONST:
3195 case CONST_INT:
3196 case CONST_DOUBLE:
3197 case CONST_FIXED:
3198 case CONST_VECTOR:
3199 case SYMBOL_REF:
3200 case LABEL_REF:
3201 case REG:
3202 case PC:
3203 /* No use simplifying an EXPR_LIST
3204 since they are used only for lists of args
3205 in a function call's REG_EQUAL note. */
3206 case EXPR_LIST:
3207 return x;
3208
3209 #ifdef HAVE_cc0
3210 case CC0:
3211 return prev_insn_cc0;
3212 #endif
3213
3214 case ASM_OPERANDS:
3215 if (insn)
3216 {
3217 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3218 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3219 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3220 }
3221 return x;
3222
3223 #ifdef NO_FUNCTION_CSE
3224 case CALL:
3225 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3226 return x;
3227 break;
3228 #endif
3229
3230 /* Anything else goes through the loop below. */
3231 default:
3232 break;
3233 }
3234
3235 mode = GET_MODE (x);
3236 const_arg0 = 0;
3237 const_arg1 = 0;
3238 const_arg2 = 0;
3239 mode_arg0 = VOIDmode;
3240
3241 /* Try folding our operands.
3242 Then see which ones have constant values known. */
3243
3244 fmt = GET_RTX_FORMAT (code);
3245 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3246 if (fmt[i] == 'e')
3247 {
3248 rtx folded_arg = XEXP (x, i), const_arg;
3249 enum machine_mode mode_arg = GET_MODE (folded_arg);
3250
3251 switch (GET_CODE (folded_arg))
3252 {
3253 case MEM:
3254 case REG:
3255 case SUBREG:
3256 const_arg = equiv_constant (folded_arg);
3257 break;
3258
3259 case CONST:
3260 case CONST_INT:
3261 case SYMBOL_REF:
3262 case LABEL_REF:
3263 case CONST_DOUBLE:
3264 case CONST_FIXED:
3265 case CONST_VECTOR:
3266 const_arg = folded_arg;
3267 break;
3268
3269 #ifdef HAVE_cc0
3270 case CC0:
3271 folded_arg = prev_insn_cc0;
3272 mode_arg = prev_insn_cc0_mode;
3273 const_arg = equiv_constant (folded_arg);
3274 break;
3275 #endif
3276
3277 default:
3278 folded_arg = fold_rtx (folded_arg, insn);
3279 const_arg = equiv_constant (folded_arg);
3280 break;
3281 }
3282
3283 /* For the first three operands, see if the operand
3284 is constant or equivalent to a constant. */
3285 switch (i)
3286 {
3287 case 0:
3288 folded_arg0 = folded_arg;
3289 const_arg0 = const_arg;
3290 mode_arg0 = mode_arg;
3291 break;
3292 case 1:
3293 folded_arg1 = folded_arg;
3294 const_arg1 = const_arg;
3295 break;
3296 case 2:
3297 const_arg2 = const_arg;
3298 break;
3299 }
3300
3301 /* Pick the least expensive of the argument and an equivalent constant
3302 argument. */
3303 if (const_arg != 0
3304 && const_arg != folded_arg
3305 && COST_IN (const_arg, code) <= COST_IN (folded_arg, code)
3306
3307 /* It's not safe to substitute the operand of a conversion
3308 operator with a constant, as the conversion's identity
3309 depends upon the mode of its operand. This optimization
3310 is handled by the call to simplify_unary_operation. */
3311 && (GET_RTX_CLASS (code) != RTX_UNARY
3312 || GET_MODE (const_arg) == mode_arg0
3313 || (code != ZERO_EXTEND
3314 && code != SIGN_EXTEND
3315 && code != TRUNCATE
3316 && code != FLOAT_TRUNCATE
3317 && code != FLOAT_EXTEND
3318 && code != FLOAT
3319 && code != FIX
3320 && code != UNSIGNED_FLOAT
3321 && code != UNSIGNED_FIX)))
3322 folded_arg = const_arg;
3323
3324 if (folded_arg == XEXP (x, i))
3325 continue;
3326
3327 if (insn == NULL_RTX && !changed)
3328 x = copy_rtx (x);
3329 changed = 1;
3330 validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3331 }
3332
3333 if (changed)
3334 {
3335 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3336 consistent with the order in X. */
3337 if (canonicalize_change_group (insn, x))
3338 {
3339 rtx tem;
3340 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3341 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3342 }
3343
3344 apply_change_group ();
3345 }
3346
3347 /* If X is an arithmetic operation, see if we can simplify it. */
3348
3349 switch (GET_RTX_CLASS (code))
3350 {
3351 case RTX_UNARY:
3352 {
3353 /* We can't simplify extension ops unless we know the
3354 original mode. */
3355 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3356 && mode_arg0 == VOIDmode)
3357 break;
3358
3359 new_rtx = simplify_unary_operation (code, mode,
3360 const_arg0 ? const_arg0 : folded_arg0,
3361 mode_arg0);
3362 }
3363 break;
3364
3365 case RTX_COMPARE:
3366 case RTX_COMM_COMPARE:
3367 /* See what items are actually being compared and set FOLDED_ARG[01]
3368 to those values and CODE to the actual comparison code. If any are
3369 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3370 do anything if both operands are already known to be constant. */
3371
3372 /* ??? Vector mode comparisons are not supported yet. */
3373 if (VECTOR_MODE_P (mode))
3374 break;
3375
3376 if (const_arg0 == 0 || const_arg1 == 0)
3377 {
3378 struct table_elt *p0, *p1;
3379 rtx true_rtx, false_rtx;
3380 enum machine_mode mode_arg1;
3381
3382 if (SCALAR_FLOAT_MODE_P (mode))
3383 {
3384 #ifdef FLOAT_STORE_FLAG_VALUE
3385 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3386 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3387 #else
3388 true_rtx = NULL_RTX;
3389 #endif
3390 false_rtx = CONST0_RTX (mode);
3391 }
3392 else
3393 {
3394 true_rtx = const_true_rtx;
3395 false_rtx = const0_rtx;
3396 }
3397
3398 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3399 &mode_arg0, &mode_arg1);
3400
3401 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3402 what kinds of things are being compared, so we can't do
3403 anything with this comparison. */
3404
3405 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3406 break;
3407
3408 const_arg0 = equiv_constant (folded_arg0);
3409 const_arg1 = equiv_constant (folded_arg1);
3410
3411 /* If we do not now have two constants being compared, see
3412 if we can nevertheless deduce some things about the
3413 comparison. */
3414 if (const_arg0 == 0 || const_arg1 == 0)
3415 {
3416 if (const_arg1 != NULL)
3417 {
3418 rtx cheapest_simplification;
3419 int cheapest_cost;
3420 rtx simp_result;
3421 struct table_elt *p;
3422
3423 /* See if we can find an equivalent of folded_arg0
3424 that gets us a cheaper expression, possibly a
3425 constant through simplifications. */
3426 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3427 mode_arg0);
3428
3429 if (p != NULL)
3430 {
3431 cheapest_simplification = x;
3432 cheapest_cost = COST (x);
3433
3434 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3435 {
3436 int cost;
3437
3438 /* If the entry isn't valid, skip it. */
3439 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3440 continue;
3441
3442 /* Try to simplify using this equivalence. */
3443 simp_result
3444 = simplify_relational_operation (code, mode,
3445 mode_arg0,
3446 p->exp,
3447 const_arg1);
3448
3449 if (simp_result == NULL)
3450 continue;
3451
3452 cost = COST (simp_result);
3453 if (cost < cheapest_cost)
3454 {
3455 cheapest_cost = cost;
3456 cheapest_simplification = simp_result;
3457 }
3458 }
3459
3460 /* If we have a cheaper expression now, use that
3461 and try folding it further, from the top. */
3462 if (cheapest_simplification != x)
3463 return fold_rtx (copy_rtx (cheapest_simplification),
3464 insn);
3465 }
3466 }
3467
3468 /* See if the two operands are the same. */
3469
3470 if ((REG_P (folded_arg0)
3471 && REG_P (folded_arg1)
3472 && (REG_QTY (REGNO (folded_arg0))
3473 == REG_QTY (REGNO (folded_arg1))))
3474 || ((p0 = lookup (folded_arg0,
3475 SAFE_HASH (folded_arg0, mode_arg0),
3476 mode_arg0))
3477 && (p1 = lookup (folded_arg1,
3478 SAFE_HASH (folded_arg1, mode_arg0),
3479 mode_arg0))
3480 && p0->first_same_value == p1->first_same_value))
3481 folded_arg1 = folded_arg0;
3482
3483 /* If FOLDED_ARG0 is a register, see if the comparison we are
3484 doing now is either the same as we did before or the reverse
3485 (we only check the reverse if not floating-point). */
3486 else if (REG_P (folded_arg0))
3487 {
3488 int qty = REG_QTY (REGNO (folded_arg0));
3489
3490 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3491 {
3492 struct qty_table_elem *ent = &qty_table[qty];
3493
3494 if ((comparison_dominates_p (ent->comparison_code, code)
3495 || (! FLOAT_MODE_P (mode_arg0)
3496 && comparison_dominates_p (ent->comparison_code,
3497 reverse_condition (code))))
3498 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3499 || (const_arg1
3500 && rtx_equal_p (ent->comparison_const,
3501 const_arg1))
3502 || (REG_P (folded_arg1)
3503 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3504 {
3505 if (comparison_dominates_p (ent->comparison_code, code))
3506 {
3507 if (true_rtx)
3508 return true_rtx;
3509 else
3510 break;
3511 }
3512 else
3513 return false_rtx;
3514 }
3515 }
3516 }
3517 }
3518 }
3519
3520 /* If we are comparing against zero, see if the first operand is
3521 equivalent to an IOR with a constant. If so, we may be able to
3522 determine the result of this comparison. */
3523 if (const_arg1 == const0_rtx && !const_arg0)
3524 {
3525 rtx y = lookup_as_function (folded_arg0, IOR);
3526 rtx inner_const;
3527
3528 if (y != 0
3529 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3530 && CONST_INT_P (inner_const)
3531 && INTVAL (inner_const) != 0)
3532 folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3533 }
3534
3535 {
3536 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3537 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3538 new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3539 }
3540 break;
3541
3542 case RTX_BIN_ARITH:
3543 case RTX_COMM_ARITH:
3544 switch (code)
3545 {
3546 case PLUS:
3547 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3548 with that LABEL_REF as its second operand. If so, the result is
3549 the first operand of that MINUS. This handles switches with an
3550 ADDR_DIFF_VEC table. */
3551 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3552 {
3553 rtx y
3554 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3555 : lookup_as_function (folded_arg0, MINUS);
3556
3557 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3558 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3559 return XEXP (y, 0);
3560
3561 /* Now try for a CONST of a MINUS like the above. */
3562 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3563 : lookup_as_function (folded_arg0, CONST))) != 0
3564 && GET_CODE (XEXP (y, 0)) == MINUS
3565 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3566 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3567 return XEXP (XEXP (y, 0), 0);
3568 }
3569
3570 /* Likewise if the operands are in the other order. */
3571 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3572 {
3573 rtx y
3574 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3575 : lookup_as_function (folded_arg1, MINUS);
3576
3577 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3578 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3579 return XEXP (y, 0);
3580
3581 /* Now try for a CONST of a MINUS like the above. */
3582 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3583 : lookup_as_function (folded_arg1, CONST))) != 0
3584 && GET_CODE (XEXP (y, 0)) == MINUS
3585 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3586 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3587 return XEXP (XEXP (y, 0), 0);
3588 }
3589
3590 /* If second operand is a register equivalent to a negative
3591 CONST_INT, see if we can find a register equivalent to the
3592 positive constant. Make a MINUS if so. Don't do this for
3593 a non-negative constant since we might then alternate between
3594 choosing positive and negative constants. Having the positive
3595 constant previously-used is the more common case. Be sure
3596 the resulting constant is non-negative; if const_arg1 were
3597 the smallest negative number this would overflow: depending
3598 on the mode, this would either just be the same value (and
3599 hence not save anything) or be incorrect. */
3600 if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3601 && INTVAL (const_arg1) < 0
3602 /* This used to test
3603
3604 -INTVAL (const_arg1) >= 0
3605
3606 But The Sun V5.0 compilers mis-compiled that test. So
3607 instead we test for the problematic value in a more direct
3608 manner and hope the Sun compilers get it correct. */
3609 && INTVAL (const_arg1) !=
3610 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3611 && REG_P (folded_arg1))
3612 {
3613 rtx new_const = GEN_INT (-INTVAL (const_arg1));
3614 struct table_elt *p
3615 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3616
3617 if (p)
3618 for (p = p->first_same_value; p; p = p->next_same_value)
3619 if (REG_P (p->exp))
3620 return simplify_gen_binary (MINUS, mode, folded_arg0,
3621 canon_reg (p->exp, NULL_RTX));
3622 }
3623 goto from_plus;
3624
3625 case MINUS:
3626 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3627 If so, produce (PLUS Z C2-C). */
3628 if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3629 {
3630 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3631 if (y && CONST_INT_P (XEXP (y, 1)))
3632 return fold_rtx (plus_constant (copy_rtx (y),
3633 -INTVAL (const_arg1)),
3634 NULL_RTX);
3635 }
3636
3637 /* Fall through. */
3638
3639 from_plus:
3640 case SMIN: case SMAX: case UMIN: case UMAX:
3641 case IOR: case AND: case XOR:
3642 case MULT:
3643 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3644 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3645 is known to be of similar form, we may be able to replace the
3646 operation with a combined operation. This may eliminate the
3647 intermediate operation if every use is simplified in this way.
3648 Note that the similar optimization done by combine.c only works
3649 if the intermediate operation's result has only one reference. */
3650
3651 if (REG_P (folded_arg0)
3652 && const_arg1 && CONST_INT_P (const_arg1))
3653 {
3654 int is_shift
3655 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3656 rtx y, inner_const, new_const;
3657 rtx canon_const_arg1 = const_arg1;
3658 enum rtx_code associate_code;
3659
3660 if (is_shift
3661 && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
3662 || INTVAL (const_arg1) < 0))
3663 {
3664 if (SHIFT_COUNT_TRUNCATED)
3665 canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3666 & (GET_MODE_BITSIZE (mode)
3667 - 1));
3668 else
3669 break;
3670 }
3671
3672 y = lookup_as_function (folded_arg0, code);
3673 if (y == 0)
3674 break;
3675
3676 /* If we have compiled a statement like
3677 "if (x == (x & mask1))", and now are looking at
3678 "x & mask2", we will have a case where the first operand
3679 of Y is the same as our first operand. Unless we detect
3680 this case, an infinite loop will result. */
3681 if (XEXP (y, 0) == folded_arg0)
3682 break;
3683
3684 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3685 if (!inner_const || !CONST_INT_P (inner_const))
3686 break;
3687
3688 /* Don't associate these operations if they are a PLUS with the
3689 same constant and it is a power of two. These might be doable
3690 with a pre- or post-increment. Similarly for two subtracts of
3691 identical powers of two with post decrement. */
3692
3693 if (code == PLUS && const_arg1 == inner_const
3694 && ((HAVE_PRE_INCREMENT
3695 && exact_log2 (INTVAL (const_arg1)) >= 0)
3696 || (HAVE_POST_INCREMENT
3697 && exact_log2 (INTVAL (const_arg1)) >= 0)
3698 || (HAVE_PRE_DECREMENT
3699 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3700 || (HAVE_POST_DECREMENT
3701 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3702 break;
3703
3704 /* ??? Vector mode shifts by scalar
3705 shift operand are not supported yet. */
3706 if (is_shift && VECTOR_MODE_P (mode))
3707 break;
3708
3709 if (is_shift
3710 && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
3711 || INTVAL (inner_const) < 0))
3712 {
3713 if (SHIFT_COUNT_TRUNCATED)
3714 inner_const = GEN_INT (INTVAL (inner_const)
3715 & (GET_MODE_BITSIZE (mode) - 1));
3716 else
3717 break;
3718 }
3719
3720 /* Compute the code used to compose the constants. For example,
3721 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3722
3723 associate_code = (is_shift || code == MINUS ? PLUS : code);
3724
3725 new_const = simplify_binary_operation (associate_code, mode,
3726 canon_const_arg1,
3727 inner_const);
3728
3729 if (new_const == 0)
3730 break;
3731
3732 /* If we are associating shift operations, don't let this
3733 produce a shift of the size of the object or larger.
3734 This could occur when we follow a sign-extend by a right
3735 shift on a machine that does a sign-extend as a pair
3736 of shifts. */
3737
3738 if (is_shift
3739 && CONST_INT_P (new_const)
3740 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3741 {
3742 /* As an exception, we can turn an ASHIFTRT of this
3743 form into a shift of the number of bits - 1. */
3744 if (code == ASHIFTRT)
3745 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3746 else if (!side_effects_p (XEXP (y, 0)))
3747 return CONST0_RTX (mode);
3748 else
3749 break;
3750 }
3751
3752 y = copy_rtx (XEXP (y, 0));
3753
3754 /* If Y contains our first operand (the most common way this
3755 can happen is if Y is a MEM), we would do into an infinite
3756 loop if we tried to fold it. So don't in that case. */
3757
3758 if (! reg_mentioned_p (folded_arg0, y))
3759 y = fold_rtx (y, insn);
3760
3761 return simplify_gen_binary (code, mode, y, new_const);
3762 }
3763 break;
3764
3765 case DIV: case UDIV:
3766 /* ??? The associative optimization performed immediately above is
3767 also possible for DIV and UDIV using associate_code of MULT.
3768 However, we would need extra code to verify that the
3769 multiplication does not overflow, that is, there is no overflow
3770 in the calculation of new_const. */
3771 break;
3772
3773 default:
3774 break;
3775 }
3776
3777 new_rtx = simplify_binary_operation (code, mode,
3778 const_arg0 ? const_arg0 : folded_arg0,
3779 const_arg1 ? const_arg1 : folded_arg1);
3780 break;
3781
3782 case RTX_OBJ:
3783 /* (lo_sum (high X) X) is simply X. */
3784 if (code == LO_SUM && const_arg0 != 0
3785 && GET_CODE (const_arg0) == HIGH
3786 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3787 return const_arg1;
3788 break;
3789
3790 case RTX_TERNARY:
3791 case RTX_BITFIELD_OPS:
3792 new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3793 const_arg0 ? const_arg0 : folded_arg0,
3794 const_arg1 ? const_arg1 : folded_arg1,
3795 const_arg2 ? const_arg2 : XEXP (x, 2));
3796 break;
3797
3798 default:
3799 break;
3800 }
3801
3802 return new_rtx ? new_rtx : x;
3803 }
3804 \f
3805 /* Return a constant value currently equivalent to X.
3806 Return 0 if we don't know one. */
3807
3808 static rtx
3809 equiv_constant (rtx x)
3810 {
3811 if (REG_P (x)
3812 && REGNO_QTY_VALID_P (REGNO (x)))
3813 {
3814 int x_q = REG_QTY (REGNO (x));
3815 struct qty_table_elem *x_ent = &qty_table[x_q];
3816
3817 if (x_ent->const_rtx)
3818 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3819 }
3820
3821 if (x == 0 || CONSTANT_P (x))
3822 return x;
3823
3824 if (GET_CODE (x) == SUBREG)
3825 {
3826 enum machine_mode mode = GET_MODE (x);
3827 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3828 rtx new_rtx;
3829
3830 /* See if we previously assigned a constant value to this SUBREG. */
3831 if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3832 || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3833 || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3834 return new_rtx;
3835
3836 /* If we didn't and if doing so makes sense, see if we previously
3837 assigned a constant value to the enclosing word mode SUBREG. */
3838 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3839 && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3840 {
3841 int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3842 if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3843 {
3844 rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3845 new_rtx = lookup_as_function (y, CONST_INT);
3846 if (new_rtx)
3847 return gen_lowpart (mode, new_rtx);
3848 }
3849 }
3850
3851 /* Otherwise see if we already have a constant for the inner REG. */
3852 if (REG_P (SUBREG_REG (x))
3853 && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3854 return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3855
3856 return 0;
3857 }
3858
3859 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3860 the hash table in case its value was seen before. */
3861
3862 if (MEM_P (x))
3863 {
3864 struct table_elt *elt;
3865
3866 x = avoid_constant_pool_reference (x);
3867 if (CONSTANT_P (x))
3868 return x;
3869
3870 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3871 if (elt == 0)
3872 return 0;
3873
3874 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3875 if (elt->is_const && CONSTANT_P (elt->exp))
3876 return elt->exp;
3877 }
3878
3879 return 0;
3880 }
3881 \f
3882 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3883 "taken" branch.
3884
3885 In certain cases, this can cause us to add an equivalence. For example,
3886 if we are following the taken case of
3887 if (i == 2)
3888 we can add the fact that `i' and '2' are now equivalent.
3889
3890 In any case, we can record that this comparison was passed. If the same
3891 comparison is seen later, we will know its value. */
3892
3893 static void
3894 record_jump_equiv (rtx insn, bool taken)
3895 {
3896 int cond_known_true;
3897 rtx op0, op1;
3898 rtx set;
3899 enum machine_mode mode, mode0, mode1;
3900 int reversed_nonequality = 0;
3901 enum rtx_code code;
3902
3903 /* Ensure this is the right kind of insn. */
3904 gcc_assert (any_condjump_p (insn));
3905
3906 set = pc_set (insn);
3907
3908 /* See if this jump condition is known true or false. */
3909 if (taken)
3910 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3911 else
3912 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3913
3914 /* Get the type of comparison being done and the operands being compared.
3915 If we had to reverse a non-equality condition, record that fact so we
3916 know that it isn't valid for floating-point. */
3917 code = GET_CODE (XEXP (SET_SRC (set), 0));
3918 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3919 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3920
3921 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3922 if (! cond_known_true)
3923 {
3924 code = reversed_comparison_code_parts (code, op0, op1, insn);
3925
3926 /* Don't remember if we can't find the inverse. */
3927 if (code == UNKNOWN)
3928 return;
3929 }
3930
3931 /* The mode is the mode of the non-constant. */
3932 mode = mode0;
3933 if (mode1 != VOIDmode)
3934 mode = mode1;
3935
3936 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3937 }
3938
3939 /* Yet another form of subreg creation. In this case, we want something in
3940 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3941
3942 static rtx
3943 record_jump_cond_subreg (enum machine_mode mode, rtx op)
3944 {
3945 enum machine_mode op_mode = GET_MODE (op);
3946 if (op_mode == mode || op_mode == VOIDmode)
3947 return op;
3948 return lowpart_subreg (mode, op, op_mode);
3949 }
3950
3951 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3952 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3953 Make any useful entries we can with that information. Called from
3954 above function and called recursively. */
3955
3956 static void
3957 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3958 rtx op1, int reversed_nonequality)
3959 {
3960 unsigned op0_hash, op1_hash;
3961 int op0_in_memory, op1_in_memory;
3962 struct table_elt *op0_elt, *op1_elt;
3963
3964 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3965 we know that they are also equal in the smaller mode (this is also
3966 true for all smaller modes whether or not there is a SUBREG, but
3967 is not worth testing for with no SUBREG). */
3968
3969 /* Note that GET_MODE (op0) may not equal MODE. */
3970 if (code == EQ && GET_CODE (op0) == SUBREG
3971 && (GET_MODE_SIZE (GET_MODE (op0))
3972 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3973 {
3974 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3975 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3976 if (tem)
3977 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3978 reversed_nonequality);
3979 }
3980
3981 if (code == EQ && GET_CODE (op1) == SUBREG
3982 && (GET_MODE_SIZE (GET_MODE (op1))
3983 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3984 {
3985 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3986 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3987 if (tem)
3988 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3989 reversed_nonequality);
3990 }
3991
3992 /* Similarly, if this is an NE comparison, and either is a SUBREG
3993 making a smaller mode, we know the whole thing is also NE. */
3994
3995 /* Note that GET_MODE (op0) may not equal MODE;
3996 if we test MODE instead, we can get an infinite recursion
3997 alternating between two modes each wider than MODE. */
3998
3999 if (code == NE && GET_CODE (op0) == SUBREG
4000 && subreg_lowpart_p (op0)
4001 && (GET_MODE_SIZE (GET_MODE (op0))
4002 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4003 {
4004 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4005 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4006 if (tem)
4007 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4008 reversed_nonequality);
4009 }
4010
4011 if (code == NE && GET_CODE (op1) == SUBREG
4012 && subreg_lowpart_p (op1)
4013 && (GET_MODE_SIZE (GET_MODE (op1))
4014 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4015 {
4016 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4017 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4018 if (tem)
4019 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4020 reversed_nonequality);
4021 }
4022
4023 /* Hash both operands. */
4024
4025 do_not_record = 0;
4026 hash_arg_in_memory = 0;
4027 op0_hash = HASH (op0, mode);
4028 op0_in_memory = hash_arg_in_memory;
4029
4030 if (do_not_record)
4031 return;
4032
4033 do_not_record = 0;
4034 hash_arg_in_memory = 0;
4035 op1_hash = HASH (op1, mode);
4036 op1_in_memory = hash_arg_in_memory;
4037
4038 if (do_not_record)
4039 return;
4040
4041 /* Look up both operands. */
4042 op0_elt = lookup (op0, op0_hash, mode);
4043 op1_elt = lookup (op1, op1_hash, mode);
4044
4045 /* If both operands are already equivalent or if they are not in the
4046 table but are identical, do nothing. */
4047 if ((op0_elt != 0 && op1_elt != 0
4048 && op0_elt->first_same_value == op1_elt->first_same_value)
4049 || op0 == op1 || rtx_equal_p (op0, op1))
4050 return;
4051
4052 /* If we aren't setting two things equal all we can do is save this
4053 comparison. Similarly if this is floating-point. In the latter
4054 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4055 If we record the equality, we might inadvertently delete code
4056 whose intent was to change -0 to +0. */
4057
4058 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4059 {
4060 struct qty_table_elem *ent;
4061 int qty;
4062
4063 /* If we reversed a floating-point comparison, if OP0 is not a
4064 register, or if OP1 is neither a register or constant, we can't
4065 do anything. */
4066
4067 if (!REG_P (op1))
4068 op1 = equiv_constant (op1);
4069
4070 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4071 || !REG_P (op0) || op1 == 0)
4072 return;
4073
4074 /* Put OP0 in the hash table if it isn't already. This gives it a
4075 new quantity number. */
4076 if (op0_elt == 0)
4077 {
4078 if (insert_regs (op0, NULL, 0))
4079 {
4080 rehash_using_reg (op0);
4081 op0_hash = HASH (op0, mode);
4082
4083 /* If OP0 is contained in OP1, this changes its hash code
4084 as well. Faster to rehash than to check, except
4085 for the simple case of a constant. */
4086 if (! CONSTANT_P (op1))
4087 op1_hash = HASH (op1,mode);
4088 }
4089
4090 op0_elt = insert (op0, NULL, op0_hash, mode);
4091 op0_elt->in_memory = op0_in_memory;
4092 }
4093
4094 qty = REG_QTY (REGNO (op0));
4095 ent = &qty_table[qty];
4096
4097 ent->comparison_code = code;
4098 if (REG_P (op1))
4099 {
4100 /* Look it up again--in case op0 and op1 are the same. */
4101 op1_elt = lookup (op1, op1_hash, mode);
4102
4103 /* Put OP1 in the hash table so it gets a new quantity number. */
4104 if (op1_elt == 0)
4105 {
4106 if (insert_regs (op1, NULL, 0))
4107 {
4108 rehash_using_reg (op1);
4109 op1_hash = HASH (op1, mode);
4110 }
4111
4112 op1_elt = insert (op1, NULL, op1_hash, mode);
4113 op1_elt->in_memory = op1_in_memory;
4114 }
4115
4116 ent->comparison_const = NULL_RTX;
4117 ent->comparison_qty = REG_QTY (REGNO (op1));
4118 }
4119 else
4120 {
4121 ent->comparison_const = op1;
4122 ent->comparison_qty = -1;
4123 }
4124
4125 return;
4126 }
4127
4128 /* If either side is still missing an equivalence, make it now,
4129 then merge the equivalences. */
4130
4131 if (op0_elt == 0)
4132 {
4133 if (insert_regs (op0, NULL, 0))
4134 {
4135 rehash_using_reg (op0);
4136 op0_hash = HASH (op0, mode);
4137 }
4138
4139 op0_elt = insert (op0, NULL, op0_hash, mode);
4140 op0_elt->in_memory = op0_in_memory;
4141 }
4142
4143 if (op1_elt == 0)
4144 {
4145 if (insert_regs (op1, NULL, 0))
4146 {
4147 rehash_using_reg (op1);
4148 op1_hash = HASH (op1, mode);
4149 }
4150
4151 op1_elt = insert (op1, NULL, op1_hash, mode);
4152 op1_elt->in_memory = op1_in_memory;
4153 }
4154
4155 merge_equiv_classes (op0_elt, op1_elt);
4156 }
4157 \f
4158 /* CSE processing for one instruction.
4159 First simplify sources and addresses of all assignments
4160 in the instruction, using previously-computed equivalents values.
4161 Then install the new sources and destinations in the table
4162 of available values. */
4163
4164 /* Data on one SET contained in the instruction. */
4165
4166 struct set
4167 {
4168 /* The SET rtx itself. */
4169 rtx rtl;
4170 /* The SET_SRC of the rtx (the original value, if it is changing). */
4171 rtx src;
4172 /* The hash-table element for the SET_SRC of the SET. */
4173 struct table_elt *src_elt;
4174 /* Hash value for the SET_SRC. */
4175 unsigned src_hash;
4176 /* Hash value for the SET_DEST. */
4177 unsigned dest_hash;
4178 /* The SET_DEST, with SUBREG, etc., stripped. */
4179 rtx inner_dest;
4180 /* Nonzero if the SET_SRC is in memory. */
4181 char src_in_memory;
4182 /* Nonzero if the SET_SRC contains something
4183 whose value cannot be predicted and understood. */
4184 char src_volatile;
4185 /* Original machine mode, in case it becomes a CONST_INT.
4186 The size of this field should match the size of the mode
4187 field of struct rtx_def (see rtl.h). */
4188 ENUM_BITFIELD(machine_mode) mode : 8;
4189 /* A constant equivalent for SET_SRC, if any. */
4190 rtx src_const;
4191 /* Hash value of constant equivalent for SET_SRC. */
4192 unsigned src_const_hash;
4193 /* Table entry for constant equivalent for SET_SRC, if any. */
4194 struct table_elt *src_const_elt;
4195 /* Table entry for the destination address. */
4196 struct table_elt *dest_addr_elt;
4197 };
4198
4199 static void
4200 cse_insn (rtx insn)
4201 {
4202 rtx x = PATTERN (insn);
4203 int i;
4204 rtx tem;
4205 int n_sets = 0;
4206
4207 rtx src_eqv = 0;
4208 struct table_elt *src_eqv_elt = 0;
4209 int src_eqv_volatile = 0;
4210 int src_eqv_in_memory = 0;
4211 unsigned src_eqv_hash = 0;
4212
4213 struct set *sets = (struct set *) 0;
4214
4215 this_insn = insn;
4216 #ifdef HAVE_cc0
4217 /* Records what this insn does to set CC0. */
4218 this_insn_cc0 = 0;
4219 this_insn_cc0_mode = VOIDmode;
4220 #endif
4221
4222 /* Find all the SETs and CLOBBERs in this instruction.
4223 Record all the SETs in the array `set' and count them.
4224 Also determine whether there is a CLOBBER that invalidates
4225 all memory references, or all references at varying addresses. */
4226
4227 if (CALL_P (insn))
4228 {
4229 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4230 {
4231 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4232 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4233 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4234 }
4235 }
4236
4237 if (GET_CODE (x) == SET)
4238 {
4239 sets = XALLOCA (struct set);
4240 sets[0].rtl = x;
4241
4242 /* Ignore SETs that are unconditional jumps.
4243 They never need cse processing, so this does not hurt.
4244 The reason is not efficiency but rather
4245 so that we can test at the end for instructions
4246 that have been simplified to unconditional jumps
4247 and not be misled by unchanged instructions
4248 that were unconditional jumps to begin with. */
4249 if (SET_DEST (x) == pc_rtx
4250 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4251 ;
4252
4253 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4254 The hard function value register is used only once, to copy to
4255 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4256 Ensure we invalidate the destination register. On the 80386 no
4257 other code would invalidate it since it is a fixed_reg.
4258 We need not check the return of apply_change_group; see canon_reg. */
4259
4260 else if (GET_CODE (SET_SRC (x)) == CALL)
4261 {
4262 canon_reg (SET_SRC (x), insn);
4263 apply_change_group ();
4264 fold_rtx (SET_SRC (x), insn);
4265 invalidate (SET_DEST (x), VOIDmode);
4266 }
4267 else
4268 n_sets = 1;
4269 }
4270 else if (GET_CODE (x) == PARALLEL)
4271 {
4272 int lim = XVECLEN (x, 0);
4273
4274 sets = XALLOCAVEC (struct set, lim);
4275
4276 /* Find all regs explicitly clobbered in this insn,
4277 and ensure they are not replaced with any other regs
4278 elsewhere in this insn.
4279 When a reg that is clobbered is also used for input,
4280 we should presume that that is for a reason,
4281 and we should not substitute some other register
4282 which is not supposed to be clobbered.
4283 Therefore, this loop cannot be merged into the one below
4284 because a CALL may precede a CLOBBER and refer to the
4285 value clobbered. We must not let a canonicalization do
4286 anything in that case. */
4287 for (i = 0; i < lim; i++)
4288 {
4289 rtx y = XVECEXP (x, 0, i);
4290 if (GET_CODE (y) == CLOBBER)
4291 {
4292 rtx clobbered = XEXP (y, 0);
4293
4294 if (REG_P (clobbered)
4295 || GET_CODE (clobbered) == SUBREG)
4296 invalidate (clobbered, VOIDmode);
4297 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4298 || GET_CODE (clobbered) == ZERO_EXTRACT)
4299 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4300 }
4301 }
4302
4303 for (i = 0; i < lim; i++)
4304 {
4305 rtx y = XVECEXP (x, 0, i);
4306 if (GET_CODE (y) == SET)
4307 {
4308 /* As above, we ignore unconditional jumps and call-insns and
4309 ignore the result of apply_change_group. */
4310 if (GET_CODE (SET_SRC (y)) == CALL)
4311 {
4312 canon_reg (SET_SRC (y), insn);
4313 apply_change_group ();
4314 fold_rtx (SET_SRC (y), insn);
4315 invalidate (SET_DEST (y), VOIDmode);
4316 }
4317 else if (SET_DEST (y) == pc_rtx
4318 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4319 ;
4320 else
4321 sets[n_sets++].rtl = y;
4322 }
4323 else if (GET_CODE (y) == CLOBBER)
4324 {
4325 /* If we clobber memory, canon the address.
4326 This does nothing when a register is clobbered
4327 because we have already invalidated the reg. */
4328 if (MEM_P (XEXP (y, 0)))
4329 canon_reg (XEXP (y, 0), insn);
4330 }
4331 else if (GET_CODE (y) == USE
4332 && ! (REG_P (XEXP (y, 0))
4333 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4334 canon_reg (y, insn);
4335 else if (GET_CODE (y) == CALL)
4336 {
4337 /* The result of apply_change_group can be ignored; see
4338 canon_reg. */
4339 canon_reg (y, insn);
4340 apply_change_group ();
4341 fold_rtx (y, insn);
4342 }
4343 }
4344 }
4345 else if (GET_CODE (x) == CLOBBER)
4346 {
4347 if (MEM_P (XEXP (x, 0)))
4348 canon_reg (XEXP (x, 0), insn);
4349 }
4350 /* Canonicalize a USE of a pseudo register or memory location. */
4351 else if (GET_CODE (x) == USE
4352 && ! (REG_P (XEXP (x, 0))
4353 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4354 canon_reg (x, insn);
4355 else if (GET_CODE (x) == ASM_OPERANDS)
4356 {
4357 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4358 {
4359 rtx input = ASM_OPERANDS_INPUT (x, i);
4360 if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
4361 {
4362 input = canon_reg (input, insn);
4363 validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4364 }
4365 }
4366 }
4367 else if (GET_CODE (x) == CALL)
4368 {
4369 /* The result of apply_change_group can be ignored; see canon_reg. */
4370 canon_reg (x, insn);
4371 apply_change_group ();
4372 fold_rtx (x, insn);
4373 }
4374 else if (DEBUG_INSN_P (insn))
4375 canon_reg (PATTERN (insn), insn);
4376
4377 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4378 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4379 is handled specially for this case, and if it isn't set, then there will
4380 be no equivalence for the destination. */
4381 if (n_sets == 1 && REG_NOTES (insn) != 0
4382 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4383 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4384 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4385 {
4386 /* The result of apply_change_group can be ignored; see canon_reg. */
4387 canon_reg (XEXP (tem, 0), insn);
4388 apply_change_group ();
4389 src_eqv = fold_rtx (XEXP (tem, 0), insn);
4390 XEXP (tem, 0) = copy_rtx (src_eqv);
4391 df_notes_rescan (insn);
4392 }
4393
4394 /* Canonicalize sources and addresses of destinations.
4395 We do this in a separate pass to avoid problems when a MATCH_DUP is
4396 present in the insn pattern. In that case, we want to ensure that
4397 we don't break the duplicate nature of the pattern. So we will replace
4398 both operands at the same time. Otherwise, we would fail to find an
4399 equivalent substitution in the loop calling validate_change below.
4400
4401 We used to suppress canonicalization of DEST if it appears in SRC,
4402 but we don't do this any more. */
4403
4404 for (i = 0; i < n_sets; i++)
4405 {
4406 rtx dest = SET_DEST (sets[i].rtl);
4407 rtx src = SET_SRC (sets[i].rtl);
4408 rtx new_rtx = canon_reg (src, insn);
4409
4410 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4411
4412 if (GET_CODE (dest) == ZERO_EXTRACT)
4413 {
4414 validate_change (insn, &XEXP (dest, 1),
4415 canon_reg (XEXP (dest, 1), insn), 1);
4416 validate_change (insn, &XEXP (dest, 2),
4417 canon_reg (XEXP (dest, 2), insn), 1);
4418 }
4419
4420 while (GET_CODE (dest) == SUBREG
4421 || GET_CODE (dest) == ZERO_EXTRACT
4422 || GET_CODE (dest) == STRICT_LOW_PART)
4423 dest = XEXP (dest, 0);
4424
4425 if (MEM_P (dest))
4426 canon_reg (dest, insn);
4427 }
4428
4429 /* Now that we have done all the replacements, we can apply the change
4430 group and see if they all work. Note that this will cause some
4431 canonicalizations that would have worked individually not to be applied
4432 because some other canonicalization didn't work, but this should not
4433 occur often.
4434
4435 The result of apply_change_group can be ignored; see canon_reg. */
4436
4437 apply_change_group ();
4438
4439 /* Set sets[i].src_elt to the class each source belongs to.
4440 Detect assignments from or to volatile things
4441 and set set[i] to zero so they will be ignored
4442 in the rest of this function.
4443
4444 Nothing in this loop changes the hash table or the register chains. */
4445
4446 for (i = 0; i < n_sets; i++)
4447 {
4448 bool repeat = false;
4449 rtx src, dest;
4450 rtx src_folded;
4451 struct table_elt *elt = 0, *p;
4452 enum machine_mode mode;
4453 rtx src_eqv_here;
4454 rtx src_const = 0;
4455 rtx src_related = 0;
4456 bool src_related_is_const_anchor = false;
4457 struct table_elt *src_const_elt = 0;
4458 int src_cost = MAX_COST;
4459 int src_eqv_cost = MAX_COST;
4460 int src_folded_cost = MAX_COST;
4461 int src_related_cost = MAX_COST;
4462 int src_elt_cost = MAX_COST;
4463 int src_regcost = MAX_COST;
4464 int src_eqv_regcost = MAX_COST;
4465 int src_folded_regcost = MAX_COST;
4466 int src_related_regcost = MAX_COST;
4467 int src_elt_regcost = MAX_COST;
4468 /* Set nonzero if we need to call force_const_mem on with the
4469 contents of src_folded before using it. */
4470 int src_folded_force_flag = 0;
4471
4472 dest = SET_DEST (sets[i].rtl);
4473 src = SET_SRC (sets[i].rtl);
4474
4475 /* If SRC is a constant that has no machine mode,
4476 hash it with the destination's machine mode.
4477 This way we can keep different modes separate. */
4478
4479 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4480 sets[i].mode = mode;
4481
4482 if (src_eqv)
4483 {
4484 enum machine_mode eqvmode = mode;
4485 if (GET_CODE (dest) == STRICT_LOW_PART)
4486 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4487 do_not_record = 0;
4488 hash_arg_in_memory = 0;
4489 src_eqv_hash = HASH (src_eqv, eqvmode);
4490
4491 /* Find the equivalence class for the equivalent expression. */
4492
4493 if (!do_not_record)
4494 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4495
4496 src_eqv_volatile = do_not_record;
4497 src_eqv_in_memory = hash_arg_in_memory;
4498 }
4499
4500 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4501 value of the INNER register, not the destination. So it is not
4502 a valid substitution for the source. But save it for later. */
4503 if (GET_CODE (dest) == STRICT_LOW_PART)
4504 src_eqv_here = 0;
4505 else
4506 src_eqv_here = src_eqv;
4507
4508 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4509 simplified result, which may not necessarily be valid. */
4510 src_folded = fold_rtx (src, insn);
4511
4512 #if 0
4513 /* ??? This caused bad code to be generated for the m68k port with -O2.
4514 Suppose src is (CONST_INT -1), and that after truncation src_folded
4515 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4516 At the end we will add src and src_const to the same equivalence
4517 class. We now have 3 and -1 on the same equivalence class. This
4518 causes later instructions to be mis-optimized. */
4519 /* If storing a constant in a bitfield, pre-truncate the constant
4520 so we will be able to record it later. */
4521 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4522 {
4523 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4524
4525 if (CONST_INT_P (src)
4526 && CONST_INT_P (width)
4527 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4528 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4529 src_folded
4530 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4531 << INTVAL (width)) - 1));
4532 }
4533 #endif
4534
4535 /* Compute SRC's hash code, and also notice if it
4536 should not be recorded at all. In that case,
4537 prevent any further processing of this assignment. */
4538 do_not_record = 0;
4539 hash_arg_in_memory = 0;
4540
4541 sets[i].src = src;
4542 sets[i].src_hash = HASH (src, mode);
4543 sets[i].src_volatile = do_not_record;
4544 sets[i].src_in_memory = hash_arg_in_memory;
4545
4546 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4547 a pseudo, do not record SRC. Using SRC as a replacement for
4548 anything else will be incorrect in that situation. Note that
4549 this usually occurs only for stack slots, in which case all the
4550 RTL would be referring to SRC, so we don't lose any optimization
4551 opportunities by not having SRC in the hash table. */
4552
4553 if (MEM_P (src)
4554 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4555 && REG_P (dest)
4556 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4557 sets[i].src_volatile = 1;
4558
4559 #if 0
4560 /* It is no longer clear why we used to do this, but it doesn't
4561 appear to still be needed. So let's try without it since this
4562 code hurts cse'ing widened ops. */
4563 /* If source is a paradoxical subreg (such as QI treated as an SI),
4564 treat it as volatile. It may do the work of an SI in one context
4565 where the extra bits are not being used, but cannot replace an SI
4566 in general. */
4567 if (GET_CODE (src) == SUBREG
4568 && (GET_MODE_SIZE (GET_MODE (src))
4569 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4570 sets[i].src_volatile = 1;
4571 #endif
4572
4573 /* Locate all possible equivalent forms for SRC. Try to replace
4574 SRC in the insn with each cheaper equivalent.
4575
4576 We have the following types of equivalents: SRC itself, a folded
4577 version, a value given in a REG_EQUAL note, or a value related
4578 to a constant.
4579
4580 Each of these equivalents may be part of an additional class
4581 of equivalents (if more than one is in the table, they must be in
4582 the same class; we check for this).
4583
4584 If the source is volatile, we don't do any table lookups.
4585
4586 We note any constant equivalent for possible later use in a
4587 REG_NOTE. */
4588
4589 if (!sets[i].src_volatile)
4590 elt = lookup (src, sets[i].src_hash, mode);
4591
4592 sets[i].src_elt = elt;
4593
4594 if (elt && src_eqv_here && src_eqv_elt)
4595 {
4596 if (elt->first_same_value != src_eqv_elt->first_same_value)
4597 {
4598 /* The REG_EQUAL is indicating that two formerly distinct
4599 classes are now equivalent. So merge them. */
4600 merge_equiv_classes (elt, src_eqv_elt);
4601 src_eqv_hash = HASH (src_eqv, elt->mode);
4602 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4603 }
4604
4605 src_eqv_here = 0;
4606 }
4607
4608 else if (src_eqv_elt)
4609 elt = src_eqv_elt;
4610
4611 /* Try to find a constant somewhere and record it in `src_const'.
4612 Record its table element, if any, in `src_const_elt'. Look in
4613 any known equivalences first. (If the constant is not in the
4614 table, also set `sets[i].src_const_hash'). */
4615 if (elt)
4616 for (p = elt->first_same_value; p; p = p->next_same_value)
4617 if (p->is_const)
4618 {
4619 src_const = p->exp;
4620 src_const_elt = elt;
4621 break;
4622 }
4623
4624 if (src_const == 0
4625 && (CONSTANT_P (src_folded)
4626 /* Consider (minus (label_ref L1) (label_ref L2)) as
4627 "constant" here so we will record it. This allows us
4628 to fold switch statements when an ADDR_DIFF_VEC is used. */
4629 || (GET_CODE (src_folded) == MINUS
4630 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4631 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4632 src_const = src_folded, src_const_elt = elt;
4633 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4634 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4635
4636 /* If we don't know if the constant is in the table, get its
4637 hash code and look it up. */
4638 if (src_const && src_const_elt == 0)
4639 {
4640 sets[i].src_const_hash = HASH (src_const, mode);
4641 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4642 }
4643
4644 sets[i].src_const = src_const;
4645 sets[i].src_const_elt = src_const_elt;
4646
4647 /* If the constant and our source are both in the table, mark them as
4648 equivalent. Otherwise, if a constant is in the table but the source
4649 isn't, set ELT to it. */
4650 if (src_const_elt && elt
4651 && src_const_elt->first_same_value != elt->first_same_value)
4652 merge_equiv_classes (elt, src_const_elt);
4653 else if (src_const_elt && elt == 0)
4654 elt = src_const_elt;
4655
4656 /* See if there is a register linearly related to a constant
4657 equivalent of SRC. */
4658 if (src_const
4659 && (GET_CODE (src_const) == CONST
4660 || (src_const_elt && src_const_elt->related_value != 0)))
4661 {
4662 src_related = use_related_value (src_const, src_const_elt);
4663 if (src_related)
4664 {
4665 struct table_elt *src_related_elt
4666 = lookup (src_related, HASH (src_related, mode), mode);
4667 if (src_related_elt && elt)
4668 {
4669 if (elt->first_same_value
4670 != src_related_elt->first_same_value)
4671 /* This can occur when we previously saw a CONST
4672 involving a SYMBOL_REF and then see the SYMBOL_REF
4673 twice. Merge the involved classes. */
4674 merge_equiv_classes (elt, src_related_elt);
4675
4676 src_related = 0;
4677 src_related_elt = 0;
4678 }
4679 else if (src_related_elt && elt == 0)
4680 elt = src_related_elt;
4681 }
4682 }
4683
4684 /* See if we have a CONST_INT that is already in a register in a
4685 wider mode. */
4686
4687 if (src_const && src_related == 0 && CONST_INT_P (src_const)
4688 && GET_MODE_CLASS (mode) == MODE_INT
4689 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4690 {
4691 enum machine_mode wider_mode;
4692
4693 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4694 wider_mode != VOIDmode
4695 && GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4696 && src_related == 0;
4697 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4698 {
4699 struct table_elt *const_elt
4700 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4701
4702 if (const_elt == 0)
4703 continue;
4704
4705 for (const_elt = const_elt->first_same_value;
4706 const_elt; const_elt = const_elt->next_same_value)
4707 if (REG_P (const_elt->exp))
4708 {
4709 src_related = gen_lowpart (mode, const_elt->exp);
4710 break;
4711 }
4712 }
4713 }
4714
4715 /* Another possibility is that we have an AND with a constant in
4716 a mode narrower than a word. If so, it might have been generated
4717 as part of an "if" which would narrow the AND. If we already
4718 have done the AND in a wider mode, we can use a SUBREG of that
4719 value. */
4720
4721 if (flag_expensive_optimizations && ! src_related
4722 && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4723 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4724 {
4725 enum machine_mode tmode;
4726 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4727
4728 for (tmode = GET_MODE_WIDER_MODE (mode);
4729 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4730 tmode = GET_MODE_WIDER_MODE (tmode))
4731 {
4732 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4733 struct table_elt *larger_elt;
4734
4735 if (inner)
4736 {
4737 PUT_MODE (new_and, tmode);
4738 XEXP (new_and, 0) = inner;
4739 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4740 if (larger_elt == 0)
4741 continue;
4742
4743 for (larger_elt = larger_elt->first_same_value;
4744 larger_elt; larger_elt = larger_elt->next_same_value)
4745 if (REG_P (larger_elt->exp))
4746 {
4747 src_related
4748 = gen_lowpart (mode, larger_elt->exp);
4749 break;
4750 }
4751
4752 if (src_related)
4753 break;
4754 }
4755 }
4756 }
4757
4758 #ifdef LOAD_EXTEND_OP
4759 /* See if a MEM has already been loaded with a widening operation;
4760 if it has, we can use a subreg of that. Many CISC machines
4761 also have such operations, but this is only likely to be
4762 beneficial on these machines. */
4763
4764 if (flag_expensive_optimizations && src_related == 0
4765 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4766 && GET_MODE_CLASS (mode) == MODE_INT
4767 && MEM_P (src) && ! do_not_record
4768 && LOAD_EXTEND_OP (mode) != UNKNOWN)
4769 {
4770 struct rtx_def memory_extend_buf;
4771 rtx memory_extend_rtx = &memory_extend_buf;
4772 enum machine_mode tmode;
4773
4774 /* Set what we are trying to extend and the operation it might
4775 have been extended with. */
4776 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4777 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4778 XEXP (memory_extend_rtx, 0) = src;
4779
4780 for (tmode = GET_MODE_WIDER_MODE (mode);
4781 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4782 tmode = GET_MODE_WIDER_MODE (tmode))
4783 {
4784 struct table_elt *larger_elt;
4785
4786 PUT_MODE (memory_extend_rtx, tmode);
4787 larger_elt = lookup (memory_extend_rtx,
4788 HASH (memory_extend_rtx, tmode), tmode);
4789 if (larger_elt == 0)
4790 continue;
4791
4792 for (larger_elt = larger_elt->first_same_value;
4793 larger_elt; larger_elt = larger_elt->next_same_value)
4794 if (REG_P (larger_elt->exp))
4795 {
4796 src_related = gen_lowpart (mode, larger_elt->exp);
4797 break;
4798 }
4799
4800 if (src_related)
4801 break;
4802 }
4803 }
4804 #endif /* LOAD_EXTEND_OP */
4805
4806 /* Try to express the constant using a register+offset expression
4807 derived from a constant anchor. */
4808
4809 if (targetm.const_anchor
4810 && !src_related
4811 && src_const
4812 && GET_CODE (src_const) == CONST_INT)
4813 {
4814 src_related = try_const_anchors (src_const, mode);
4815 src_related_is_const_anchor = src_related != NULL_RTX;
4816 }
4817
4818
4819 if (src == src_folded)
4820 src_folded = 0;
4821
4822 /* At this point, ELT, if nonzero, points to a class of expressions
4823 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4824 and SRC_RELATED, if nonzero, each contain additional equivalent
4825 expressions. Prune these latter expressions by deleting expressions
4826 already in the equivalence class.
4827
4828 Check for an equivalent identical to the destination. If found,
4829 this is the preferred equivalent since it will likely lead to
4830 elimination of the insn. Indicate this by placing it in
4831 `src_related'. */
4832
4833 if (elt)
4834 elt = elt->first_same_value;
4835 for (p = elt; p; p = p->next_same_value)
4836 {
4837 enum rtx_code code = GET_CODE (p->exp);
4838
4839 /* If the expression is not valid, ignore it. Then we do not
4840 have to check for validity below. In most cases, we can use
4841 `rtx_equal_p', since canonicalization has already been done. */
4842 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4843 continue;
4844
4845 /* Also skip paradoxical subregs, unless that's what we're
4846 looking for. */
4847 if (code == SUBREG
4848 && (GET_MODE_SIZE (GET_MODE (p->exp))
4849 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
4850 && ! (src != 0
4851 && GET_CODE (src) == SUBREG
4852 && GET_MODE (src) == GET_MODE (p->exp)
4853 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4854 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4855 continue;
4856
4857 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4858 src = 0;
4859 else if (src_folded && GET_CODE (src_folded) == code
4860 && rtx_equal_p (src_folded, p->exp))
4861 src_folded = 0;
4862 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4863 && rtx_equal_p (src_eqv_here, p->exp))
4864 src_eqv_here = 0;
4865 else if (src_related && GET_CODE (src_related) == code
4866 && rtx_equal_p (src_related, p->exp))
4867 src_related = 0;
4868
4869 /* This is the same as the destination of the insns, we want
4870 to prefer it. Copy it to src_related. The code below will
4871 then give it a negative cost. */
4872 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4873 src_related = dest;
4874 }
4875
4876 /* Find the cheapest valid equivalent, trying all the available
4877 possibilities. Prefer items not in the hash table to ones
4878 that are when they are equal cost. Note that we can never
4879 worsen an insn as the current contents will also succeed.
4880 If we find an equivalent identical to the destination, use it as best,
4881 since this insn will probably be eliminated in that case. */
4882 if (src)
4883 {
4884 if (rtx_equal_p (src, dest))
4885 src_cost = src_regcost = -1;
4886 else
4887 {
4888 src_cost = COST (src);
4889 src_regcost = approx_reg_cost (src);
4890 }
4891 }
4892
4893 if (src_eqv_here)
4894 {
4895 if (rtx_equal_p (src_eqv_here, dest))
4896 src_eqv_cost = src_eqv_regcost = -1;
4897 else
4898 {
4899 src_eqv_cost = COST (src_eqv_here);
4900 src_eqv_regcost = approx_reg_cost (src_eqv_here);
4901 }
4902 }
4903
4904 if (src_folded)
4905 {
4906 if (rtx_equal_p (src_folded, dest))
4907 src_folded_cost = src_folded_regcost = -1;
4908 else
4909 {
4910 src_folded_cost = COST (src_folded);
4911 src_folded_regcost = approx_reg_cost (src_folded);
4912 }
4913 }
4914
4915 if (src_related)
4916 {
4917 if (rtx_equal_p (src_related, dest))
4918 src_related_cost = src_related_regcost = -1;
4919 else
4920 {
4921 src_related_cost = COST (src_related);
4922 src_related_regcost = approx_reg_cost (src_related);
4923
4924 /* If a const-anchor is used to synthesize a constant that
4925 normally requires multiple instructions then slightly prefer
4926 it over the original sequence. These instructions are likely
4927 to become redundant now. We can't compare against the cost
4928 of src_eqv_here because, on MIPS for example, multi-insn
4929 constants have zero cost; they are assumed to be hoisted from
4930 loops. */
4931 if (src_related_is_const_anchor
4932 && src_related_cost == src_cost
4933 && src_eqv_here)
4934 src_related_cost--;
4935 }
4936 }
4937
4938 /* If this was an indirect jump insn, a known label will really be
4939 cheaper even though it looks more expensive. */
4940 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
4941 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
4942
4943 /* Terminate loop when replacement made. This must terminate since
4944 the current contents will be tested and will always be valid. */
4945 while (1)
4946 {
4947 rtx trial;
4948
4949 /* Skip invalid entries. */
4950 while (elt && !REG_P (elt->exp)
4951 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
4952 elt = elt->next_same_value;
4953
4954 /* A paradoxical subreg would be bad here: it'll be the right
4955 size, but later may be adjusted so that the upper bits aren't
4956 what we want. So reject it. */
4957 if (elt != 0
4958 && GET_CODE (elt->exp) == SUBREG
4959 && (GET_MODE_SIZE (GET_MODE (elt->exp))
4960 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
4961 /* It is okay, though, if the rtx we're trying to match
4962 will ignore any of the bits we can't predict. */
4963 && ! (src != 0
4964 && GET_CODE (src) == SUBREG
4965 && GET_MODE (src) == GET_MODE (elt->exp)
4966 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4967 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4968 {
4969 elt = elt->next_same_value;
4970 continue;
4971 }
4972
4973 if (elt)
4974 {
4975 src_elt_cost = elt->cost;
4976 src_elt_regcost = elt->regcost;
4977 }
4978
4979 /* Find cheapest and skip it for the next time. For items
4980 of equal cost, use this order:
4981 src_folded, src, src_eqv, src_related and hash table entry. */
4982 if (src_folded
4983 && preferable (src_folded_cost, src_folded_regcost,
4984 src_cost, src_regcost) <= 0
4985 && preferable (src_folded_cost, src_folded_regcost,
4986 src_eqv_cost, src_eqv_regcost) <= 0
4987 && preferable (src_folded_cost, src_folded_regcost,
4988 src_related_cost, src_related_regcost) <= 0
4989 && preferable (src_folded_cost, src_folded_regcost,
4990 src_elt_cost, src_elt_regcost) <= 0)
4991 {
4992 trial = src_folded, src_folded_cost = MAX_COST;
4993 if (src_folded_force_flag)
4994 {
4995 rtx forced = force_const_mem (mode, trial);
4996 if (forced)
4997 trial = forced;
4998 }
4999 }
5000 else if (src
5001 && preferable (src_cost, src_regcost,
5002 src_eqv_cost, src_eqv_regcost) <= 0
5003 && preferable (src_cost, src_regcost,
5004 src_related_cost, src_related_regcost) <= 0
5005 && preferable (src_cost, src_regcost,
5006 src_elt_cost, src_elt_regcost) <= 0)
5007 trial = src, src_cost = MAX_COST;
5008 else if (src_eqv_here
5009 && preferable (src_eqv_cost, src_eqv_regcost,
5010 src_related_cost, src_related_regcost) <= 0
5011 && preferable (src_eqv_cost, src_eqv_regcost,
5012 src_elt_cost, src_elt_regcost) <= 0)
5013 trial = src_eqv_here, src_eqv_cost = MAX_COST;
5014 else if (src_related
5015 && preferable (src_related_cost, src_related_regcost,
5016 src_elt_cost, src_elt_regcost) <= 0)
5017 trial = src_related, src_related_cost = MAX_COST;
5018 else
5019 {
5020 trial = elt->exp;
5021 elt = elt->next_same_value;
5022 src_elt_cost = MAX_COST;
5023 }
5024
5025 /* Avoid creation of overlapping memory moves. */
5026 if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5027 {
5028 rtx src, dest;
5029
5030 /* BLKmode moves are not handled by cse anyway. */
5031 if (GET_MODE (trial) == BLKmode)
5032 break;
5033
5034 src = canon_rtx (trial);
5035 dest = canon_rtx (SET_DEST (sets[i].rtl));
5036
5037 if (!MEM_P (src) || !MEM_P (dest)
5038 || !nonoverlapping_memrefs_p (src, dest, false))
5039 break;
5040 }
5041
5042 /* Try to optimize
5043 (set (reg:M N) (const_int A))
5044 (set (reg:M2 O) (const_int B))
5045 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5046 (reg:M2 O)). */
5047 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5048 && CONST_INT_P (trial)
5049 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5050 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5051 && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5052 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl)))
5053 >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5054 && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5055 + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5056 <= HOST_BITS_PER_WIDE_INT))
5057 {
5058 rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5059 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5060 rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5061 unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5062 struct table_elt *dest_elt
5063 = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5064 rtx dest_cst = NULL;
5065
5066 if (dest_elt)
5067 for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5068 if (p->is_const && CONST_INT_P (p->exp))
5069 {
5070 dest_cst = p->exp;
5071 break;
5072 }
5073 if (dest_cst)
5074 {
5075 HOST_WIDE_INT val = INTVAL (dest_cst);
5076 HOST_WIDE_INT mask;
5077 unsigned int shift;
5078 if (BITS_BIG_ENDIAN)
5079 shift = GET_MODE_BITSIZE (GET_MODE (dest_reg))
5080 - INTVAL (pos) - INTVAL (width);
5081 else
5082 shift = INTVAL (pos);
5083 if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5084 mask = ~(HOST_WIDE_INT) 0;
5085 else
5086 mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5087 val &= ~(mask << shift);
5088 val |= (INTVAL (trial) & mask) << shift;
5089 val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5090 validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5091 dest_reg, 1);
5092 validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5093 GEN_INT (val), 1);
5094 if (apply_change_group ())
5095 {
5096 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5097 if (note)
5098 {
5099 remove_note (insn, note);
5100 df_notes_rescan (insn);
5101 }
5102 src_eqv = NULL_RTX;
5103 src_eqv_elt = NULL;
5104 src_eqv_volatile = 0;
5105 src_eqv_in_memory = 0;
5106 src_eqv_hash = 0;
5107 repeat = true;
5108 break;
5109 }
5110 }
5111 }
5112
5113 /* We don't normally have an insn matching (set (pc) (pc)), so
5114 check for this separately here. We will delete such an
5115 insn below.
5116
5117 For other cases such as a table jump or conditional jump
5118 where we know the ultimate target, go ahead and replace the
5119 operand. While that may not make a valid insn, we will
5120 reemit the jump below (and also insert any necessary
5121 barriers). */
5122 if (n_sets == 1 && dest == pc_rtx
5123 && (trial == pc_rtx
5124 || (GET_CODE (trial) == LABEL_REF
5125 && ! condjump_p (insn))))
5126 {
5127 /* Don't substitute non-local labels, this confuses CFG. */
5128 if (GET_CODE (trial) == LABEL_REF
5129 && LABEL_REF_NONLOCAL_P (trial))
5130 continue;
5131
5132 SET_SRC (sets[i].rtl) = trial;
5133 cse_jumps_altered = true;
5134 break;
5135 }
5136
5137 /* Reject certain invalid forms of CONST that we create. */
5138 else if (CONSTANT_P (trial)
5139 && GET_CODE (trial) == CONST
5140 /* Reject cases that will cause decode_rtx_const to
5141 die. On the alpha when simplifying a switch, we
5142 get (const (truncate (minus (label_ref)
5143 (label_ref)))). */
5144 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5145 /* Likewise on IA-64, except without the
5146 truncate. */
5147 || (GET_CODE (XEXP (trial, 0)) == MINUS
5148 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5149 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5150 /* Do nothing for this case. */
5151 ;
5152
5153 /* Look for a substitution that makes a valid insn. */
5154 else if (validate_unshare_change
5155 (insn, &SET_SRC (sets[i].rtl), trial, 0))
5156 {
5157 rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5158
5159 /* The result of apply_change_group can be ignored; see
5160 canon_reg. */
5161
5162 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5163 apply_change_group ();
5164
5165 break;
5166 }
5167
5168 /* If we previously found constant pool entries for
5169 constants and this is a constant, try making a
5170 pool entry. Put it in src_folded unless we already have done
5171 this since that is where it likely came from. */
5172
5173 else if (constant_pool_entries_cost
5174 && CONSTANT_P (trial)
5175 && (src_folded == 0
5176 || (!MEM_P (src_folded)
5177 && ! src_folded_force_flag))
5178 && GET_MODE_CLASS (mode) != MODE_CC
5179 && mode != VOIDmode)
5180 {
5181 src_folded_force_flag = 1;
5182 src_folded = trial;
5183 src_folded_cost = constant_pool_entries_cost;
5184 src_folded_regcost = constant_pool_entries_regcost;
5185 }
5186 }
5187
5188 /* If we changed the insn too much, handle this set from scratch. */
5189 if (repeat)
5190 {
5191 i--;
5192 continue;
5193 }
5194
5195 src = SET_SRC (sets[i].rtl);
5196
5197 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5198 However, there is an important exception: If both are registers
5199 that are not the head of their equivalence class, replace SET_SRC
5200 with the head of the class. If we do not do this, we will have
5201 both registers live over a portion of the basic block. This way,
5202 their lifetimes will likely abut instead of overlapping. */
5203 if (REG_P (dest)
5204 && REGNO_QTY_VALID_P (REGNO (dest)))
5205 {
5206 int dest_q = REG_QTY (REGNO (dest));
5207 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5208
5209 if (dest_ent->mode == GET_MODE (dest)
5210 && dest_ent->first_reg != REGNO (dest)
5211 && REG_P (src) && REGNO (src) == REGNO (dest)
5212 /* Don't do this if the original insn had a hard reg as
5213 SET_SRC or SET_DEST. */
5214 && (!REG_P (sets[i].src)
5215 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5216 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5217 /* We can't call canon_reg here because it won't do anything if
5218 SRC is a hard register. */
5219 {
5220 int src_q = REG_QTY (REGNO (src));
5221 struct qty_table_elem *src_ent = &qty_table[src_q];
5222 int first = src_ent->first_reg;
5223 rtx new_src
5224 = (first >= FIRST_PSEUDO_REGISTER
5225 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5226
5227 /* We must use validate-change even for this, because this
5228 might be a special no-op instruction, suitable only to
5229 tag notes onto. */
5230 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5231 {
5232 src = new_src;
5233 /* If we had a constant that is cheaper than what we are now
5234 setting SRC to, use that constant. We ignored it when we
5235 thought we could make this into a no-op. */
5236 if (src_const && COST (src_const) < COST (src)
5237 && validate_change (insn, &SET_SRC (sets[i].rtl),
5238 src_const, 0))
5239 src = src_const;
5240 }
5241 }
5242 }
5243
5244 /* If we made a change, recompute SRC values. */
5245 if (src != sets[i].src)
5246 {
5247 do_not_record = 0;
5248 hash_arg_in_memory = 0;
5249 sets[i].src = src;
5250 sets[i].src_hash = HASH (src, mode);
5251 sets[i].src_volatile = do_not_record;
5252 sets[i].src_in_memory = hash_arg_in_memory;
5253 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5254 }
5255
5256 /* If this is a single SET, we are setting a register, and we have an
5257 equivalent constant, we want to add a REG_NOTE. We don't want
5258 to write a REG_EQUAL note for a constant pseudo since verifying that
5259 that pseudo hasn't been eliminated is a pain. Such a note also
5260 won't help anything.
5261
5262 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5263 which can be created for a reference to a compile time computable
5264 entry in a jump table. */
5265
5266 if (n_sets == 1 && src_const && REG_P (dest)
5267 && !REG_P (src_const)
5268 && ! (GET_CODE (src_const) == CONST
5269 && GET_CODE (XEXP (src_const, 0)) == MINUS
5270 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5271 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5272 {
5273 /* We only want a REG_EQUAL note if src_const != src. */
5274 if (! rtx_equal_p (src, src_const))
5275 {
5276 /* Make sure that the rtx is not shared. */
5277 src_const = copy_rtx (src_const);
5278
5279 /* Record the actual constant value in a REG_EQUAL note,
5280 making a new one if one does not already exist. */
5281 set_unique_reg_note (insn, REG_EQUAL, src_const);
5282 df_notes_rescan (insn);
5283 }
5284 }
5285
5286 /* Now deal with the destination. */
5287 do_not_record = 0;
5288
5289 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5290 while (GET_CODE (dest) == SUBREG
5291 || GET_CODE (dest) == ZERO_EXTRACT
5292 || GET_CODE (dest) == STRICT_LOW_PART)
5293 dest = XEXP (dest, 0);
5294
5295 sets[i].inner_dest = dest;
5296
5297 if (MEM_P (dest))
5298 {
5299 #ifdef PUSH_ROUNDING
5300 /* Stack pushes invalidate the stack pointer. */
5301 rtx addr = XEXP (dest, 0);
5302 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5303 && XEXP (addr, 0) == stack_pointer_rtx)
5304 invalidate (stack_pointer_rtx, VOIDmode);
5305 #endif
5306 dest = fold_rtx (dest, insn);
5307 }
5308
5309 /* Compute the hash code of the destination now,
5310 before the effects of this instruction are recorded,
5311 since the register values used in the address computation
5312 are those before this instruction. */
5313 sets[i].dest_hash = HASH (dest, mode);
5314
5315 /* Don't enter a bit-field in the hash table
5316 because the value in it after the store
5317 may not equal what was stored, due to truncation. */
5318
5319 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5320 {
5321 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5322
5323 if (src_const != 0 && CONST_INT_P (src_const)
5324 && CONST_INT_P (width)
5325 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5326 && ! (INTVAL (src_const)
5327 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5328 /* Exception: if the value is constant,
5329 and it won't be truncated, record it. */
5330 ;
5331 else
5332 {
5333 /* This is chosen so that the destination will be invalidated
5334 but no new value will be recorded.
5335 We must invalidate because sometimes constant
5336 values can be recorded for bitfields. */
5337 sets[i].src_elt = 0;
5338 sets[i].src_volatile = 1;
5339 src_eqv = 0;
5340 src_eqv_elt = 0;
5341 }
5342 }
5343
5344 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5345 the insn. */
5346 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5347 {
5348 /* One less use of the label this insn used to jump to. */
5349 delete_insn_and_edges (insn);
5350 cse_jumps_altered = true;
5351 /* No more processing for this set. */
5352 sets[i].rtl = 0;
5353 }
5354
5355 /* If this SET is now setting PC to a label, we know it used to
5356 be a conditional or computed branch. */
5357 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5358 && !LABEL_REF_NONLOCAL_P (src))
5359 {
5360 /* We reemit the jump in as many cases as possible just in
5361 case the form of an unconditional jump is significantly
5362 different than a computed jump or conditional jump.
5363
5364 If this insn has multiple sets, then reemitting the
5365 jump is nontrivial. So instead we just force rerecognition
5366 and hope for the best. */
5367 if (n_sets == 1)
5368 {
5369 rtx new_rtx, note;
5370
5371 new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5372 JUMP_LABEL (new_rtx) = XEXP (src, 0);
5373 LABEL_NUSES (XEXP (src, 0))++;
5374
5375 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5376 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5377 if (note)
5378 {
5379 XEXP (note, 1) = NULL_RTX;
5380 REG_NOTES (new_rtx) = note;
5381 }
5382
5383 delete_insn_and_edges (insn);
5384 insn = new_rtx;
5385 }
5386 else
5387 INSN_CODE (insn) = -1;
5388
5389 /* Do not bother deleting any unreachable code, let jump do it. */
5390 cse_jumps_altered = true;
5391 sets[i].rtl = 0;
5392 }
5393
5394 /* If destination is volatile, invalidate it and then do no further
5395 processing for this assignment. */
5396
5397 else if (do_not_record)
5398 {
5399 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5400 invalidate (dest, VOIDmode);
5401 else if (MEM_P (dest))
5402 invalidate (dest, VOIDmode);
5403 else if (GET_CODE (dest) == STRICT_LOW_PART
5404 || GET_CODE (dest) == ZERO_EXTRACT)
5405 invalidate (XEXP (dest, 0), GET_MODE (dest));
5406 sets[i].rtl = 0;
5407 }
5408
5409 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5410 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5411
5412 #ifdef HAVE_cc0
5413 /* If setting CC0, record what it was set to, or a constant, if it
5414 is equivalent to a constant. If it is being set to a floating-point
5415 value, make a COMPARE with the appropriate constant of 0. If we
5416 don't do this, later code can interpret this as a test against
5417 const0_rtx, which can cause problems if we try to put it into an
5418 insn as a floating-point operand. */
5419 if (dest == cc0_rtx)
5420 {
5421 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5422 this_insn_cc0_mode = mode;
5423 if (FLOAT_MODE_P (mode))
5424 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5425 CONST0_RTX (mode));
5426 }
5427 #endif
5428 }
5429
5430 /* Now enter all non-volatile source expressions in the hash table
5431 if they are not already present.
5432 Record their equivalence classes in src_elt.
5433 This way we can insert the corresponding destinations into
5434 the same classes even if the actual sources are no longer in them
5435 (having been invalidated). */
5436
5437 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5438 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5439 {
5440 struct table_elt *elt;
5441 struct table_elt *classp = sets[0].src_elt;
5442 rtx dest = SET_DEST (sets[0].rtl);
5443 enum machine_mode eqvmode = GET_MODE (dest);
5444
5445 if (GET_CODE (dest) == STRICT_LOW_PART)
5446 {
5447 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5448 classp = 0;
5449 }
5450 if (insert_regs (src_eqv, classp, 0))
5451 {
5452 rehash_using_reg (src_eqv);
5453 src_eqv_hash = HASH (src_eqv, eqvmode);
5454 }
5455 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5456 elt->in_memory = src_eqv_in_memory;
5457 src_eqv_elt = elt;
5458
5459 /* Check to see if src_eqv_elt is the same as a set source which
5460 does not yet have an elt, and if so set the elt of the set source
5461 to src_eqv_elt. */
5462 for (i = 0; i < n_sets; i++)
5463 if (sets[i].rtl && sets[i].src_elt == 0
5464 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5465 sets[i].src_elt = src_eqv_elt;
5466 }
5467
5468 for (i = 0; i < n_sets; i++)
5469 if (sets[i].rtl && ! sets[i].src_volatile
5470 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5471 {
5472 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5473 {
5474 /* REG_EQUAL in setting a STRICT_LOW_PART
5475 gives an equivalent for the entire destination register,
5476 not just for the subreg being stored in now.
5477 This is a more interesting equivalence, so we arrange later
5478 to treat the entire reg as the destination. */
5479 sets[i].src_elt = src_eqv_elt;
5480 sets[i].src_hash = src_eqv_hash;
5481 }
5482 else
5483 {
5484 /* Insert source and constant equivalent into hash table, if not
5485 already present. */
5486 struct table_elt *classp = src_eqv_elt;
5487 rtx src = sets[i].src;
5488 rtx dest = SET_DEST (sets[i].rtl);
5489 enum machine_mode mode
5490 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5491
5492 /* It's possible that we have a source value known to be
5493 constant but don't have a REG_EQUAL note on the insn.
5494 Lack of a note will mean src_eqv_elt will be NULL. This
5495 can happen where we've generated a SUBREG to access a
5496 CONST_INT that is already in a register in a wider mode.
5497 Ensure that the source expression is put in the proper
5498 constant class. */
5499 if (!classp)
5500 classp = sets[i].src_const_elt;
5501
5502 if (sets[i].src_elt == 0)
5503 {
5504 struct table_elt *elt;
5505
5506 /* Note that these insert_regs calls cannot remove
5507 any of the src_elt's, because they would have failed to
5508 match if not still valid. */
5509 if (insert_regs (src, classp, 0))
5510 {
5511 rehash_using_reg (src);
5512 sets[i].src_hash = HASH (src, mode);
5513 }
5514 elt = insert (src, classp, sets[i].src_hash, mode);
5515 elt->in_memory = sets[i].src_in_memory;
5516 sets[i].src_elt = classp = elt;
5517 }
5518 if (sets[i].src_const && sets[i].src_const_elt == 0
5519 && src != sets[i].src_const
5520 && ! rtx_equal_p (sets[i].src_const, src))
5521 sets[i].src_elt = insert (sets[i].src_const, classp,
5522 sets[i].src_const_hash, mode);
5523 }
5524 }
5525 else if (sets[i].src_elt == 0)
5526 /* If we did not insert the source into the hash table (e.g., it was
5527 volatile), note the equivalence class for the REG_EQUAL value, if any,
5528 so that the destination goes into that class. */
5529 sets[i].src_elt = src_eqv_elt;
5530
5531 /* Record destination addresses in the hash table. This allows us to
5532 check if they are invalidated by other sets. */
5533 for (i = 0; i < n_sets; i++)
5534 {
5535 if (sets[i].rtl)
5536 {
5537 rtx x = sets[i].inner_dest;
5538 struct table_elt *elt;
5539 enum machine_mode mode;
5540 unsigned hash;
5541
5542 if (MEM_P (x))
5543 {
5544 x = XEXP (x, 0);
5545 mode = GET_MODE (x);
5546 hash = HASH (x, mode);
5547 elt = lookup (x, hash, mode);
5548 if (!elt)
5549 {
5550 if (insert_regs (x, NULL, 0))
5551 {
5552 rtx dest = SET_DEST (sets[i].rtl);
5553
5554 rehash_using_reg (x);
5555 hash = HASH (x, mode);
5556 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5557 }
5558 elt = insert (x, NULL, hash, mode);
5559 }
5560
5561 sets[i].dest_addr_elt = elt;
5562 }
5563 else
5564 sets[i].dest_addr_elt = NULL;
5565 }
5566 }
5567
5568 invalidate_from_clobbers (x);
5569
5570 /* Some registers are invalidated by subroutine calls. Memory is
5571 invalidated by non-constant calls. */
5572
5573 if (CALL_P (insn))
5574 {
5575 if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5576 invalidate_memory ();
5577 invalidate_for_call ();
5578 }
5579
5580 /* Now invalidate everything set by this instruction.
5581 If a SUBREG or other funny destination is being set,
5582 sets[i].rtl is still nonzero, so here we invalidate the reg
5583 a part of which is being set. */
5584
5585 for (i = 0; i < n_sets; i++)
5586 if (sets[i].rtl)
5587 {
5588 /* We can't use the inner dest, because the mode associated with
5589 a ZERO_EXTRACT is significant. */
5590 rtx dest = SET_DEST (sets[i].rtl);
5591
5592 /* Needed for registers to remove the register from its
5593 previous quantity's chain.
5594 Needed for memory if this is a nonvarying address, unless
5595 we have just done an invalidate_memory that covers even those. */
5596 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5597 invalidate (dest, VOIDmode);
5598 else if (MEM_P (dest))
5599 invalidate (dest, VOIDmode);
5600 else if (GET_CODE (dest) == STRICT_LOW_PART
5601 || GET_CODE (dest) == ZERO_EXTRACT)
5602 invalidate (XEXP (dest, 0), GET_MODE (dest));
5603 }
5604
5605 /* A volatile ASM invalidates everything. */
5606 if (NONJUMP_INSN_P (insn)
5607 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5608 && MEM_VOLATILE_P (PATTERN (insn)))
5609 flush_hash_table ();
5610
5611 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5612 the regs restored by the longjmp come from a later time
5613 than the setjmp. */
5614 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5615 {
5616 flush_hash_table ();
5617 goto done;
5618 }
5619
5620 /* Make sure registers mentioned in destinations
5621 are safe for use in an expression to be inserted.
5622 This removes from the hash table
5623 any invalid entry that refers to one of these registers.
5624
5625 We don't care about the return value from mention_regs because
5626 we are going to hash the SET_DEST values unconditionally. */
5627
5628 for (i = 0; i < n_sets; i++)
5629 {
5630 if (sets[i].rtl)
5631 {
5632 rtx x = SET_DEST (sets[i].rtl);
5633
5634 if (!REG_P (x))
5635 mention_regs (x);
5636 else
5637 {
5638 /* We used to rely on all references to a register becoming
5639 inaccessible when a register changes to a new quantity,
5640 since that changes the hash code. However, that is not
5641 safe, since after HASH_SIZE new quantities we get a
5642 hash 'collision' of a register with its own invalid
5643 entries. And since SUBREGs have been changed not to
5644 change their hash code with the hash code of the register,
5645 it wouldn't work any longer at all. So we have to check
5646 for any invalid references lying around now.
5647 This code is similar to the REG case in mention_regs,
5648 but it knows that reg_tick has been incremented, and
5649 it leaves reg_in_table as -1 . */
5650 unsigned int regno = REGNO (x);
5651 unsigned int endregno = END_REGNO (x);
5652 unsigned int i;
5653
5654 for (i = regno; i < endregno; i++)
5655 {
5656 if (REG_IN_TABLE (i) >= 0)
5657 {
5658 remove_invalid_refs (i);
5659 REG_IN_TABLE (i) = -1;
5660 }
5661 }
5662 }
5663 }
5664 }
5665
5666 /* We may have just removed some of the src_elt's from the hash table.
5667 So replace each one with the current head of the same class.
5668 Also check if destination addresses have been removed. */
5669
5670 for (i = 0; i < n_sets; i++)
5671 if (sets[i].rtl)
5672 {
5673 if (sets[i].dest_addr_elt
5674 && sets[i].dest_addr_elt->first_same_value == 0)
5675 {
5676 /* The elt was removed, which means this destination is not
5677 valid after this instruction. */
5678 sets[i].rtl = NULL_RTX;
5679 }
5680 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5681 /* If elt was removed, find current head of same class,
5682 or 0 if nothing remains of that class. */
5683 {
5684 struct table_elt *elt = sets[i].src_elt;
5685
5686 while (elt && elt->prev_same_value)
5687 elt = elt->prev_same_value;
5688
5689 while (elt && elt->first_same_value == 0)
5690 elt = elt->next_same_value;
5691 sets[i].src_elt = elt ? elt->first_same_value : 0;
5692 }
5693 }
5694
5695 /* Now insert the destinations into their equivalence classes. */
5696
5697 for (i = 0; i < n_sets; i++)
5698 if (sets[i].rtl)
5699 {
5700 rtx dest = SET_DEST (sets[i].rtl);
5701 struct table_elt *elt;
5702
5703 /* Don't record value if we are not supposed to risk allocating
5704 floating-point values in registers that might be wider than
5705 memory. */
5706 if ((flag_float_store
5707 && MEM_P (dest)
5708 && FLOAT_MODE_P (GET_MODE (dest)))
5709 /* Don't record BLKmode values, because we don't know the
5710 size of it, and can't be sure that other BLKmode values
5711 have the same or smaller size. */
5712 || GET_MODE (dest) == BLKmode
5713 /* If we didn't put a REG_EQUAL value or a source into the hash
5714 table, there is no point is recording DEST. */
5715 || sets[i].src_elt == 0
5716 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5717 or SIGN_EXTEND, don't record DEST since it can cause
5718 some tracking to be wrong.
5719
5720 ??? Think about this more later. */
5721 || (GET_CODE (dest) == SUBREG
5722 && (GET_MODE_SIZE (GET_MODE (dest))
5723 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5724 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5725 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5726 continue;
5727
5728 /* STRICT_LOW_PART isn't part of the value BEING set,
5729 and neither is the SUBREG inside it.
5730 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5731 if (GET_CODE (dest) == STRICT_LOW_PART)
5732 dest = SUBREG_REG (XEXP (dest, 0));
5733
5734 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5735 /* Registers must also be inserted into chains for quantities. */
5736 if (insert_regs (dest, sets[i].src_elt, 1))
5737 {
5738 /* If `insert_regs' changes something, the hash code must be
5739 recalculated. */
5740 rehash_using_reg (dest);
5741 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5742 }
5743
5744 elt = insert (dest, sets[i].src_elt,
5745 sets[i].dest_hash, GET_MODE (dest));
5746
5747 /* If this is a constant, insert the constant anchors with the
5748 equivalent register-offset expressions using register DEST. */
5749 if (targetm.const_anchor
5750 && REG_P (dest)
5751 && SCALAR_INT_MODE_P (GET_MODE (dest))
5752 && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5753 insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5754
5755 elt->in_memory = (MEM_P (sets[i].inner_dest)
5756 && !MEM_READONLY_P (sets[i].inner_dest));
5757
5758 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5759 narrower than M2, and both M1 and M2 are the same number of words,
5760 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5761 make that equivalence as well.
5762
5763 However, BAR may have equivalences for which gen_lowpart
5764 will produce a simpler value than gen_lowpart applied to
5765 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5766 BAR's equivalences. If we don't get a simplified form, make
5767 the SUBREG. It will not be used in an equivalence, but will
5768 cause two similar assignments to be detected.
5769
5770 Note the loop below will find SUBREG_REG (DEST) since we have
5771 already entered SRC and DEST of the SET in the table. */
5772
5773 if (GET_CODE (dest) == SUBREG
5774 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5775 / UNITS_PER_WORD)
5776 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5777 && (GET_MODE_SIZE (GET_MODE (dest))
5778 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5779 && sets[i].src_elt != 0)
5780 {
5781 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5782 struct table_elt *elt, *classp = 0;
5783
5784 for (elt = sets[i].src_elt->first_same_value; elt;
5785 elt = elt->next_same_value)
5786 {
5787 rtx new_src = 0;
5788 unsigned src_hash;
5789 struct table_elt *src_elt;
5790 int byte = 0;
5791
5792 /* Ignore invalid entries. */
5793 if (!REG_P (elt->exp)
5794 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5795 continue;
5796
5797 /* We may have already been playing subreg games. If the
5798 mode is already correct for the destination, use it. */
5799 if (GET_MODE (elt->exp) == new_mode)
5800 new_src = elt->exp;
5801 else
5802 {
5803 /* Calculate big endian correction for the SUBREG_BYTE.
5804 We have already checked that M1 (GET_MODE (dest))
5805 is not narrower than M2 (new_mode). */
5806 if (BYTES_BIG_ENDIAN)
5807 byte = (GET_MODE_SIZE (GET_MODE (dest))
5808 - GET_MODE_SIZE (new_mode));
5809
5810 new_src = simplify_gen_subreg (new_mode, elt->exp,
5811 GET_MODE (dest), byte);
5812 }
5813
5814 /* The call to simplify_gen_subreg fails if the value
5815 is VOIDmode, yet we can't do any simplification, e.g.
5816 for EXPR_LISTs denoting function call results.
5817 It is invalid to construct a SUBREG with a VOIDmode
5818 SUBREG_REG, hence a zero new_src means we can't do
5819 this substitution. */
5820 if (! new_src)
5821 continue;
5822
5823 src_hash = HASH (new_src, new_mode);
5824 src_elt = lookup (new_src, src_hash, new_mode);
5825
5826 /* Put the new source in the hash table is if isn't
5827 already. */
5828 if (src_elt == 0)
5829 {
5830 if (insert_regs (new_src, classp, 0))
5831 {
5832 rehash_using_reg (new_src);
5833 src_hash = HASH (new_src, new_mode);
5834 }
5835 src_elt = insert (new_src, classp, src_hash, new_mode);
5836 src_elt->in_memory = elt->in_memory;
5837 }
5838 else if (classp && classp != src_elt->first_same_value)
5839 /* Show that two things that we've seen before are
5840 actually the same. */
5841 merge_equiv_classes (src_elt, classp);
5842
5843 classp = src_elt->first_same_value;
5844 /* Ignore invalid entries. */
5845 while (classp
5846 && !REG_P (classp->exp)
5847 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5848 classp = classp->next_same_value;
5849 }
5850 }
5851 }
5852
5853 /* Special handling for (set REG0 REG1) where REG0 is the
5854 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5855 be used in the sequel, so (if easily done) change this insn to
5856 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5857 that computed their value. Then REG1 will become a dead store
5858 and won't cloud the situation for later optimizations.
5859
5860 Do not make this change if REG1 is a hard register, because it will
5861 then be used in the sequel and we may be changing a two-operand insn
5862 into a three-operand insn.
5863
5864 Also do not do this if we are operating on a copy of INSN. */
5865
5866 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
5867 && NEXT_INSN (PREV_INSN (insn)) == insn
5868 && REG_P (SET_SRC (sets[0].rtl))
5869 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5870 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5871 {
5872 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5873 struct qty_table_elem *src_ent = &qty_table[src_q];
5874
5875 if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5876 {
5877 /* Scan for the previous nonnote insn, but stop at a basic
5878 block boundary. */
5879 rtx prev = insn;
5880 rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
5881 do
5882 {
5883 prev = PREV_INSN (prev);
5884 }
5885 while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
5886
5887 /* Do not swap the registers around if the previous instruction
5888 attaches a REG_EQUIV note to REG1.
5889
5890 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5891 from the pseudo that originally shadowed an incoming argument
5892 to another register. Some uses of REG_EQUIV might rely on it
5893 being attached to REG1 rather than REG2.
5894
5895 This section previously turned the REG_EQUIV into a REG_EQUAL
5896 note. We cannot do that because REG_EQUIV may provide an
5897 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5898 if (NONJUMP_INSN_P (prev)
5899 && GET_CODE (PATTERN (prev)) == SET
5900 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5901 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
5902 {
5903 rtx dest = SET_DEST (sets[0].rtl);
5904 rtx src = SET_SRC (sets[0].rtl);
5905 rtx note;
5906
5907 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5908 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5909 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
5910 apply_change_group ();
5911
5912 /* If INSN has a REG_EQUAL note, and this note mentions
5913 REG0, then we must delete it, because the value in
5914 REG0 has changed. If the note's value is REG1, we must
5915 also delete it because that is now this insn's dest. */
5916 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5917 if (note != 0
5918 && (reg_mentioned_p (dest, XEXP (note, 0))
5919 || rtx_equal_p (src, XEXP (note, 0))))
5920 remove_note (insn, note);
5921 }
5922 }
5923 }
5924
5925 done:;
5926 }
5927 \f
5928 /* Remove from the hash table all expressions that reference memory. */
5929
5930 static void
5931 invalidate_memory (void)
5932 {
5933 int i;
5934 struct table_elt *p, *next;
5935
5936 for (i = 0; i < HASH_SIZE; i++)
5937 for (p = table[i]; p; p = next)
5938 {
5939 next = p->next_same_hash;
5940 if (p->in_memory)
5941 remove_from_table (p, i);
5942 }
5943 }
5944
5945 /* Perform invalidation on the basis of everything about an insn
5946 except for invalidating the actual places that are SET in it.
5947 This includes the places CLOBBERed, and anything that might
5948 alias with something that is SET or CLOBBERed.
5949
5950 X is the pattern of the insn. */
5951
5952 static void
5953 invalidate_from_clobbers (rtx x)
5954 {
5955 if (GET_CODE (x) == CLOBBER)
5956 {
5957 rtx ref = XEXP (x, 0);
5958 if (ref)
5959 {
5960 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5961 || MEM_P (ref))
5962 invalidate (ref, VOIDmode);
5963 else if (GET_CODE (ref) == STRICT_LOW_PART
5964 || GET_CODE (ref) == ZERO_EXTRACT)
5965 invalidate (XEXP (ref, 0), GET_MODE (ref));
5966 }
5967 }
5968 else if (GET_CODE (x) == PARALLEL)
5969 {
5970 int i;
5971 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5972 {
5973 rtx y = XVECEXP (x, 0, i);
5974 if (GET_CODE (y) == CLOBBER)
5975 {
5976 rtx ref = XEXP (y, 0);
5977 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5978 || MEM_P (ref))
5979 invalidate (ref, VOIDmode);
5980 else if (GET_CODE (ref) == STRICT_LOW_PART
5981 || GET_CODE (ref) == ZERO_EXTRACT)
5982 invalidate (XEXP (ref, 0), GET_MODE (ref));
5983 }
5984 }
5985 }
5986 }
5987 \f
5988 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5989 and replace any registers in them with either an equivalent constant
5990 or the canonical form of the register. If we are inside an address,
5991 only do this if the address remains valid.
5992
5993 OBJECT is 0 except when within a MEM in which case it is the MEM.
5994
5995 Return the replacement for X. */
5996
5997 static rtx
5998 cse_process_notes_1 (rtx x, rtx object, bool *changed)
5999 {
6000 enum rtx_code code = GET_CODE (x);
6001 const char *fmt = GET_RTX_FORMAT (code);
6002 int i;
6003
6004 switch (code)
6005 {
6006 case CONST_INT:
6007 case CONST:
6008 case SYMBOL_REF:
6009 case LABEL_REF:
6010 case CONST_DOUBLE:
6011 case CONST_FIXED:
6012 case CONST_VECTOR:
6013 case PC:
6014 case CC0:
6015 case LO_SUM:
6016 return x;
6017
6018 case MEM:
6019 validate_change (x, &XEXP (x, 0),
6020 cse_process_notes (XEXP (x, 0), x, changed), 0);
6021 return x;
6022
6023 case EXPR_LIST:
6024 case INSN_LIST:
6025 if (REG_NOTE_KIND (x) == REG_EQUAL)
6026 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6027 if (XEXP (x, 1))
6028 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6029 return x;
6030
6031 case SIGN_EXTEND:
6032 case ZERO_EXTEND:
6033 case SUBREG:
6034 {
6035 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6036 /* We don't substitute VOIDmode constants into these rtx,
6037 since they would impede folding. */
6038 if (GET_MODE (new_rtx) != VOIDmode)
6039 validate_change (object, &XEXP (x, 0), new_rtx, 0);
6040 return x;
6041 }
6042
6043 case REG:
6044 i = REG_QTY (REGNO (x));
6045
6046 /* Return a constant or a constant register. */
6047 if (REGNO_QTY_VALID_P (REGNO (x)))
6048 {
6049 struct qty_table_elem *ent = &qty_table[i];
6050
6051 if (ent->const_rtx != NULL_RTX
6052 && (CONSTANT_P (ent->const_rtx)
6053 || REG_P (ent->const_rtx)))
6054 {
6055 rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6056 if (new_rtx)
6057 return copy_rtx (new_rtx);
6058 }
6059 }
6060
6061 /* Otherwise, canonicalize this register. */
6062 return canon_reg (x, NULL_RTX);
6063
6064 default:
6065 break;
6066 }
6067
6068 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6069 if (fmt[i] == 'e')
6070 validate_change (object, &XEXP (x, i),
6071 cse_process_notes (XEXP (x, i), object, changed), 0);
6072
6073 return x;
6074 }
6075
6076 static rtx
6077 cse_process_notes (rtx x, rtx object, bool *changed)
6078 {
6079 rtx new_rtx = cse_process_notes_1 (x, object, changed);
6080 if (new_rtx != x)
6081 *changed = true;
6082 return new_rtx;
6083 }
6084
6085 \f
6086 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6087
6088 DATA is a pointer to a struct cse_basic_block_data, that is used to
6089 describe the path.
6090 It is filled with a queue of basic blocks, starting with FIRST_BB
6091 and following a trace through the CFG.
6092
6093 If all paths starting at FIRST_BB have been followed, or no new path
6094 starting at FIRST_BB can be constructed, this function returns FALSE.
6095 Otherwise, DATA->path is filled and the function returns TRUE indicating
6096 that a path to follow was found.
6097
6098 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6099 block in the path will be FIRST_BB. */
6100
6101 static bool
6102 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6103 int follow_jumps)
6104 {
6105 basic_block bb;
6106 edge e;
6107 int path_size;
6108
6109 SET_BIT (cse_visited_basic_blocks, first_bb->index);
6110
6111 /* See if there is a previous path. */
6112 path_size = data->path_size;
6113
6114 /* There is a previous path. Make sure it started with FIRST_BB. */
6115 if (path_size)
6116 gcc_assert (data->path[0].bb == first_bb);
6117
6118 /* There was only one basic block in the last path. Clear the path and
6119 return, so that paths starting at another basic block can be tried. */
6120 if (path_size == 1)
6121 {
6122 path_size = 0;
6123 goto done;
6124 }
6125
6126 /* If the path was empty from the beginning, construct a new path. */
6127 if (path_size == 0)
6128 data->path[path_size++].bb = first_bb;
6129 else
6130 {
6131 /* Otherwise, path_size must be equal to or greater than 2, because
6132 a previous path exists that is at least two basic blocks long.
6133
6134 Update the previous branch path, if any. If the last branch was
6135 previously along the branch edge, take the fallthrough edge now. */
6136 while (path_size >= 2)
6137 {
6138 basic_block last_bb_in_path, previous_bb_in_path;
6139 edge e;
6140
6141 --path_size;
6142 last_bb_in_path = data->path[path_size].bb;
6143 previous_bb_in_path = data->path[path_size - 1].bb;
6144
6145 /* If we previously followed a path along the branch edge, try
6146 the fallthru edge now. */
6147 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6148 && any_condjump_p (BB_END (previous_bb_in_path))
6149 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6150 && e == BRANCH_EDGE (previous_bb_in_path))
6151 {
6152 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6153 if (bb != EXIT_BLOCK_PTR
6154 && single_pred_p (bb)
6155 /* We used to assert here that we would only see blocks
6156 that we have not visited yet. But we may end up
6157 visiting basic blocks twice if the CFG has changed
6158 in this run of cse_main, because when the CFG changes
6159 the topological sort of the CFG also changes. A basic
6160 blocks that previously had more than two predecessors
6161 may now have a single predecessor, and become part of
6162 a path that starts at another basic block.
6163
6164 We still want to visit each basic block only once, so
6165 halt the path here if we have already visited BB. */
6166 && !TEST_BIT (cse_visited_basic_blocks, bb->index))
6167 {
6168 SET_BIT (cse_visited_basic_blocks, bb->index);
6169 data->path[path_size++].bb = bb;
6170 break;
6171 }
6172 }
6173
6174 data->path[path_size].bb = NULL;
6175 }
6176
6177 /* If only one block remains in the path, bail. */
6178 if (path_size == 1)
6179 {
6180 path_size = 0;
6181 goto done;
6182 }
6183 }
6184
6185 /* Extend the path if possible. */
6186 if (follow_jumps)
6187 {
6188 bb = data->path[path_size - 1].bb;
6189 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6190 {
6191 if (single_succ_p (bb))
6192 e = single_succ_edge (bb);
6193 else if (EDGE_COUNT (bb->succs) == 2
6194 && any_condjump_p (BB_END (bb)))
6195 {
6196 /* First try to follow the branch. If that doesn't lead
6197 to a useful path, follow the fallthru edge. */
6198 e = BRANCH_EDGE (bb);
6199 if (!single_pred_p (e->dest))
6200 e = FALLTHRU_EDGE (bb);
6201 }
6202 else
6203 e = NULL;
6204
6205 if (e
6206 && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6207 && e->dest != EXIT_BLOCK_PTR
6208 && single_pred_p (e->dest)
6209 /* Avoid visiting basic blocks twice. The large comment
6210 above explains why this can happen. */
6211 && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
6212 {
6213 basic_block bb2 = e->dest;
6214 SET_BIT (cse_visited_basic_blocks, bb2->index);
6215 data->path[path_size++].bb = bb2;
6216 bb = bb2;
6217 }
6218 else
6219 bb = NULL;
6220 }
6221 }
6222
6223 done:
6224 data->path_size = path_size;
6225 return path_size != 0;
6226 }
6227 \f
6228 /* Dump the path in DATA to file F. NSETS is the number of sets
6229 in the path. */
6230
6231 static void
6232 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6233 {
6234 int path_entry;
6235
6236 fprintf (f, ";; Following path with %d sets: ", nsets);
6237 for (path_entry = 0; path_entry < data->path_size; path_entry++)
6238 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6239 fputc ('\n', dump_file);
6240 fflush (f);
6241 }
6242
6243 \f
6244 /* Return true if BB has exception handling successor edges. */
6245
6246 static bool
6247 have_eh_succ_edges (basic_block bb)
6248 {
6249 edge e;
6250 edge_iterator ei;
6251
6252 FOR_EACH_EDGE (e, ei, bb->succs)
6253 if (e->flags & EDGE_EH)
6254 return true;
6255
6256 return false;
6257 }
6258
6259 \f
6260 /* Scan to the end of the path described by DATA. Return an estimate of
6261 the total number of SETs of all insns in the path. */
6262
6263 static void
6264 cse_prescan_path (struct cse_basic_block_data *data)
6265 {
6266 int nsets = 0;
6267 int path_size = data->path_size;
6268 int path_entry;
6269
6270 /* Scan to end of each basic block in the path. */
6271 for (path_entry = 0; path_entry < path_size; path_entry++)
6272 {
6273 basic_block bb;
6274 rtx insn;
6275
6276 bb = data->path[path_entry].bb;
6277
6278 FOR_BB_INSNS (bb, insn)
6279 {
6280 if (!INSN_P (insn))
6281 continue;
6282
6283 /* A PARALLEL can have lots of SETs in it,
6284 especially if it is really an ASM_OPERANDS. */
6285 if (GET_CODE (PATTERN (insn)) == PARALLEL)
6286 nsets += XVECLEN (PATTERN (insn), 0);
6287 else
6288 nsets += 1;
6289 }
6290 }
6291
6292 data->nsets = nsets;
6293 }
6294 \f
6295 /* Process a single extended basic block described by EBB_DATA. */
6296
6297 static void
6298 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6299 {
6300 int path_size = ebb_data->path_size;
6301 int path_entry;
6302 int num_insns = 0;
6303
6304 /* Allocate the space needed by qty_table. */
6305 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6306
6307 new_basic_block ();
6308 cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6309 cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6310 for (path_entry = 0; path_entry < path_size; path_entry++)
6311 {
6312 basic_block bb;
6313 rtx insn;
6314
6315 bb = ebb_data->path[path_entry].bb;
6316
6317 /* Invalidate recorded information for eh regs if there is an EH
6318 edge pointing to that bb. */
6319 if (bb_has_eh_pred (bb))
6320 {
6321 df_ref *def_rec;
6322
6323 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6324 {
6325 df_ref def = *def_rec;
6326 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6327 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6328 }
6329 }
6330
6331 optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6332 FOR_BB_INSNS (bb, insn)
6333 {
6334 /* If we have processed 1,000 insns, flush the hash table to
6335 avoid extreme quadratic behavior. We must not include NOTEs
6336 in the count since there may be more of them when generating
6337 debugging information. If we clear the table at different
6338 times, code generated with -g -O might be different than code
6339 generated with -O but not -g.
6340
6341 FIXME: This is a real kludge and needs to be done some other
6342 way. */
6343 if (NONDEBUG_INSN_P (insn)
6344 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6345 {
6346 flush_hash_table ();
6347 num_insns = 0;
6348 }
6349
6350 if (INSN_P (insn))
6351 {
6352 /* Process notes first so we have all notes in canonical forms
6353 when looking for duplicate operations. */
6354 if (REG_NOTES (insn))
6355 {
6356 bool changed = false;
6357 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6358 NULL_RTX, &changed);
6359 if (changed)
6360 df_notes_rescan (insn);
6361 }
6362
6363 cse_insn (insn);
6364
6365 /* If we haven't already found an insn where we added a LABEL_REF,
6366 check this one. */
6367 if (INSN_P (insn) && !recorded_label_ref
6368 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6369 (void *) insn))
6370 recorded_label_ref = true;
6371
6372 #ifdef HAVE_cc0
6373 if (NONDEBUG_INSN_P (insn))
6374 {
6375 /* If the previous insn sets CC0 and this insn no
6376 longer references CC0, delete the previous insn.
6377 Here we use fact that nothing expects CC0 to be
6378 valid over an insn, which is true until the final
6379 pass. */
6380 rtx prev_insn, tem;
6381
6382 prev_insn = prev_nonnote_nondebug_insn (insn);
6383 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6384 && (tem = single_set (prev_insn)) != NULL_RTX
6385 && SET_DEST (tem) == cc0_rtx
6386 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6387 delete_insn (prev_insn);
6388
6389 /* If this insn is not the last insn in the basic
6390 block, it will be PREV_INSN(insn) in the next
6391 iteration. If we recorded any CC0-related
6392 information for this insn, remember it. */
6393 if (insn != BB_END (bb))
6394 {
6395 prev_insn_cc0 = this_insn_cc0;
6396 prev_insn_cc0_mode = this_insn_cc0_mode;
6397 }
6398 }
6399 #endif
6400 }
6401 }
6402
6403 /* With non-call exceptions, we are not always able to update
6404 the CFG properly inside cse_insn. So clean up possibly
6405 redundant EH edges here. */
6406 if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6407 cse_cfg_altered |= purge_dead_edges (bb);
6408
6409 /* If we changed a conditional jump, we may have terminated
6410 the path we are following. Check that by verifying that
6411 the edge we would take still exists. If the edge does
6412 not exist anymore, purge the remainder of the path.
6413 Note that this will cause us to return to the caller. */
6414 if (path_entry < path_size - 1)
6415 {
6416 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6417 if (!find_edge (bb, next_bb))
6418 {
6419 do
6420 {
6421 path_size--;
6422
6423 /* If we truncate the path, we must also reset the
6424 visited bit on the remaining blocks in the path,
6425 or we will never visit them at all. */
6426 RESET_BIT (cse_visited_basic_blocks,
6427 ebb_data->path[path_size].bb->index);
6428 ebb_data->path[path_size].bb = NULL;
6429 }
6430 while (path_size - 1 != path_entry);
6431 ebb_data->path_size = path_size;
6432 }
6433 }
6434
6435 /* If this is a conditional jump insn, record any known
6436 equivalences due to the condition being tested. */
6437 insn = BB_END (bb);
6438 if (path_entry < path_size - 1
6439 && JUMP_P (insn)
6440 && single_set (insn)
6441 && any_condjump_p (insn))
6442 {
6443 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6444 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6445 record_jump_equiv (insn, taken);
6446 }
6447
6448 #ifdef HAVE_cc0
6449 /* Clear the CC0-tracking related insns, they can't provide
6450 useful information across basic block boundaries. */
6451 prev_insn_cc0 = 0;
6452 #endif
6453 }
6454
6455 gcc_assert (next_qty <= max_qty);
6456
6457 free (qty_table);
6458 }
6459
6460 \f
6461 /* Perform cse on the instructions of a function.
6462 F is the first instruction.
6463 NREGS is one plus the highest pseudo-reg number used in the instruction.
6464
6465 Return 2 if jump optimizations should be redone due to simplifications
6466 in conditional jump instructions.
6467 Return 1 if the CFG should be cleaned up because it has been modified.
6468 Return 0 otherwise. */
6469
6470 int
6471 cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6472 {
6473 struct cse_basic_block_data ebb_data;
6474 basic_block bb;
6475 int *rc_order = XNEWVEC (int, last_basic_block);
6476 int i, n_blocks;
6477
6478 df_set_flags (DF_LR_RUN_DCE);
6479 df_analyze ();
6480 df_set_flags (DF_DEFER_INSN_RESCAN);
6481
6482 reg_scan (get_insns (), max_reg_num ());
6483 init_cse_reg_info (nregs);
6484
6485 ebb_data.path = XNEWVEC (struct branch_path,
6486 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6487
6488 cse_cfg_altered = false;
6489 cse_jumps_altered = false;
6490 recorded_label_ref = false;
6491 constant_pool_entries_cost = 0;
6492 constant_pool_entries_regcost = 0;
6493 ebb_data.path_size = 0;
6494 ebb_data.nsets = 0;
6495 rtl_hooks = cse_rtl_hooks;
6496
6497 init_recog ();
6498 init_alias_analysis ();
6499
6500 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6501
6502 /* Set up the table of already visited basic blocks. */
6503 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6504 sbitmap_zero (cse_visited_basic_blocks);
6505
6506 /* Loop over basic blocks in reverse completion order (RPO),
6507 excluding the ENTRY and EXIT blocks. */
6508 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6509 i = 0;
6510 while (i < n_blocks)
6511 {
6512 /* Find the first block in the RPO queue that we have not yet
6513 processed before. */
6514 do
6515 {
6516 bb = BASIC_BLOCK (rc_order[i++]);
6517 }
6518 while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6519 && i < n_blocks);
6520
6521 /* Find all paths starting with BB, and process them. */
6522 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6523 {
6524 /* Pre-scan the path. */
6525 cse_prescan_path (&ebb_data);
6526
6527 /* If this basic block has no sets, skip it. */
6528 if (ebb_data.nsets == 0)
6529 continue;
6530
6531 /* Get a reasonable estimate for the maximum number of qty's
6532 needed for this path. For this, we take the number of sets
6533 and multiply that by MAX_RECOG_OPERANDS. */
6534 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6535
6536 /* Dump the path we're about to process. */
6537 if (dump_file)
6538 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6539
6540 cse_extended_basic_block (&ebb_data);
6541 }
6542 }
6543
6544 /* Clean up. */
6545 end_alias_analysis ();
6546 free (reg_eqv_table);
6547 free (ebb_data.path);
6548 sbitmap_free (cse_visited_basic_blocks);
6549 free (rc_order);
6550 rtl_hooks = general_rtl_hooks;
6551
6552 if (cse_jumps_altered || recorded_label_ref)
6553 return 2;
6554 else if (cse_cfg_altered)
6555 return 1;
6556 else
6557 return 0;
6558 }
6559 \f
6560 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6561 which there isn't a REG_LABEL_OPERAND note.
6562 Return one if so. DATA is the insn. */
6563
6564 static int
6565 check_for_label_ref (rtx *rtl, void *data)
6566 {
6567 rtx insn = (rtx) data;
6568
6569 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6570 note for it, we must rerun jump since it needs to place the note. If
6571 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6572 don't do this since no REG_LABEL_OPERAND will be added. */
6573 return (GET_CODE (*rtl) == LABEL_REF
6574 && ! LABEL_REF_NONLOCAL_P (*rtl)
6575 && (!JUMP_P (insn)
6576 || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6577 && LABEL_P (XEXP (*rtl, 0))
6578 && INSN_UID (XEXP (*rtl, 0)) != 0
6579 && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6580 }
6581 \f
6582 /* Count the number of times registers are used (not set) in X.
6583 COUNTS is an array in which we accumulate the count, INCR is how much
6584 we count each register usage.
6585
6586 Don't count a usage of DEST, which is the SET_DEST of a SET which
6587 contains X in its SET_SRC. This is because such a SET does not
6588 modify the liveness of DEST.
6589 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6590 We must then count uses of a SET_DEST regardless, because the insn can't be
6591 deleted here. */
6592
6593 static void
6594 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6595 {
6596 enum rtx_code code;
6597 rtx note;
6598 const char *fmt;
6599 int i, j;
6600
6601 if (x == 0)
6602 return;
6603
6604 switch (code = GET_CODE (x))
6605 {
6606 case REG:
6607 if (x != dest)
6608 counts[REGNO (x)] += incr;
6609 return;
6610
6611 case PC:
6612 case CC0:
6613 case CONST:
6614 case CONST_INT:
6615 case CONST_DOUBLE:
6616 case CONST_FIXED:
6617 case CONST_VECTOR:
6618 case SYMBOL_REF:
6619 case LABEL_REF:
6620 return;
6621
6622 case CLOBBER:
6623 /* If we are clobbering a MEM, mark any registers inside the address
6624 as being used. */
6625 if (MEM_P (XEXP (x, 0)))
6626 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6627 return;
6628
6629 case SET:
6630 /* Unless we are setting a REG, count everything in SET_DEST. */
6631 if (!REG_P (SET_DEST (x)))
6632 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6633 count_reg_usage (SET_SRC (x), counts,
6634 dest ? dest : SET_DEST (x),
6635 incr);
6636 return;
6637
6638 case DEBUG_INSN:
6639 return;
6640
6641 case CALL_INSN:
6642 case INSN:
6643 case JUMP_INSN:
6644 /* We expect dest to be NULL_RTX here. If the insn may trap,
6645 or if it cannot be deleted due to side-effects, mark this fact
6646 by setting DEST to pc_rtx. */
6647 if (insn_could_throw_p (x) || side_effects_p (PATTERN (x)))
6648 dest = pc_rtx;
6649 if (code == CALL_INSN)
6650 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6651 count_reg_usage (PATTERN (x), counts, dest, incr);
6652
6653 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6654 use them. */
6655
6656 note = find_reg_equal_equiv_note (x);
6657 if (note)
6658 {
6659 rtx eqv = XEXP (note, 0);
6660
6661 if (GET_CODE (eqv) == EXPR_LIST)
6662 /* This REG_EQUAL note describes the result of a function call.
6663 Process all the arguments. */
6664 do
6665 {
6666 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6667 eqv = XEXP (eqv, 1);
6668 }
6669 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6670 else
6671 count_reg_usage (eqv, counts, dest, incr);
6672 }
6673 return;
6674
6675 case EXPR_LIST:
6676 if (REG_NOTE_KIND (x) == REG_EQUAL
6677 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6678 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6679 involving registers in the address. */
6680 || GET_CODE (XEXP (x, 0)) == CLOBBER)
6681 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6682
6683 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6684 return;
6685
6686 case ASM_OPERANDS:
6687 /* Iterate over just the inputs, not the constraints as well. */
6688 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6689 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6690 return;
6691
6692 case INSN_LIST:
6693 gcc_unreachable ();
6694
6695 default:
6696 break;
6697 }
6698
6699 fmt = GET_RTX_FORMAT (code);
6700 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6701 {
6702 if (fmt[i] == 'e')
6703 count_reg_usage (XEXP (x, i), counts, dest, incr);
6704 else if (fmt[i] == 'E')
6705 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6706 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6707 }
6708 }
6709 \f
6710 /* Return true if X is a dead register. */
6711
6712 static inline int
6713 is_dead_reg (rtx x, int *counts)
6714 {
6715 return (REG_P (x)
6716 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6717 && counts[REGNO (x)] == 0);
6718 }
6719
6720 /* Return true if set is live. */
6721 static bool
6722 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
6723 int *counts)
6724 {
6725 #ifdef HAVE_cc0
6726 rtx tem;
6727 #endif
6728
6729 if (set_noop_p (set))
6730 ;
6731
6732 #ifdef HAVE_cc0
6733 else if (GET_CODE (SET_DEST (set)) == CC0
6734 && !side_effects_p (SET_SRC (set))
6735 && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6736 || !INSN_P (tem)
6737 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6738 return false;
6739 #endif
6740 else if (!is_dead_reg (SET_DEST (set), counts)
6741 || side_effects_p (SET_SRC (set)))
6742 return true;
6743 return false;
6744 }
6745
6746 /* Return true if insn is live. */
6747
6748 static bool
6749 insn_live_p (rtx insn, int *counts)
6750 {
6751 int i;
6752 if (insn_could_throw_p (insn))
6753 return true;
6754 else if (GET_CODE (PATTERN (insn)) == SET)
6755 return set_live_p (PATTERN (insn), insn, counts);
6756 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6757 {
6758 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6759 {
6760 rtx elt = XVECEXP (PATTERN (insn), 0, i);
6761
6762 if (GET_CODE (elt) == SET)
6763 {
6764 if (set_live_p (elt, insn, counts))
6765 return true;
6766 }
6767 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6768 return true;
6769 }
6770 return false;
6771 }
6772 else if (DEBUG_INSN_P (insn))
6773 {
6774 rtx next;
6775
6776 for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6777 if (NOTE_P (next))
6778 continue;
6779 else if (!DEBUG_INSN_P (next))
6780 return true;
6781 else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6782 return false;
6783
6784 return true;
6785 }
6786 else
6787 return true;
6788 }
6789
6790 /* Count the number of stores into pseudo. Callback for note_stores. */
6791
6792 static void
6793 count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6794 {
6795 int *counts = (int *) data;
6796 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6797 counts[REGNO (x)]++;
6798 }
6799
6800 struct dead_debug_insn_data
6801 {
6802 int *counts;
6803 rtx *replacements;
6804 bool seen_repl;
6805 };
6806
6807 /* Return if a DEBUG_INSN needs to be reset because some dead
6808 pseudo doesn't have a replacement. Callback for for_each_rtx. */
6809
6810 static int
6811 is_dead_debug_insn (rtx *loc, void *data)
6812 {
6813 rtx x = *loc;
6814 struct dead_debug_insn_data *ddid = (struct dead_debug_insn_data *) data;
6815
6816 if (is_dead_reg (x, ddid->counts))
6817 {
6818 if (ddid->replacements && ddid->replacements[REGNO (x)] != NULL_RTX)
6819 ddid->seen_repl = true;
6820 else
6821 return 1;
6822 }
6823 return 0;
6824 }
6825
6826 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6827 Callback for simplify_replace_fn_rtx. */
6828
6829 static rtx
6830 replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
6831 {
6832 rtx *replacements = (rtx *) data;
6833
6834 if (REG_P (x)
6835 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6836 && replacements[REGNO (x)] != NULL_RTX)
6837 {
6838 if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
6839 return replacements[REGNO (x)];
6840 return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
6841 GET_MODE (replacements[REGNO (x)]));
6842 }
6843 return NULL_RTX;
6844 }
6845
6846 /* Scan all the insns and delete any that are dead; i.e., they store a register
6847 that is never used or they copy a register to itself.
6848
6849 This is used to remove insns made obviously dead by cse, loop or other
6850 optimizations. It improves the heuristics in loop since it won't try to
6851 move dead invariants out of loops or make givs for dead quantities. The
6852 remaining passes of the compilation are also sped up. */
6853
6854 int
6855 delete_trivially_dead_insns (rtx insns, int nreg)
6856 {
6857 int *counts;
6858 rtx insn, prev;
6859 rtx *replacements = NULL;
6860 int ndead = 0;
6861
6862 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6863 /* First count the number of times each register is used. */
6864 if (MAY_HAVE_DEBUG_INSNS)
6865 {
6866 counts = XCNEWVEC (int, nreg * 3);
6867 for (insn = insns; insn; insn = NEXT_INSN (insn))
6868 if (DEBUG_INSN_P (insn))
6869 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6870 NULL_RTX, 1);
6871 else if (INSN_P (insn))
6872 {
6873 count_reg_usage (insn, counts, NULL_RTX, 1);
6874 note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
6875 }
6876 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
6877 First one counts how many times each pseudo is used outside
6878 of debug insns, second counts how many times each pseudo is
6879 used in debug insns and third counts how many times a pseudo
6880 is stored. */
6881 }
6882 else
6883 {
6884 counts = XCNEWVEC (int, nreg);
6885 for (insn = insns; insn; insn = NEXT_INSN (insn))
6886 if (INSN_P (insn))
6887 count_reg_usage (insn, counts, NULL_RTX, 1);
6888 /* If no debug insns can be present, COUNTS is just an array
6889 which counts how many times each pseudo is used. */
6890 }
6891 /* Go from the last insn to the first and delete insns that only set unused
6892 registers or copy a register to itself. As we delete an insn, remove
6893 usage counts for registers it uses.
6894
6895 The first jump optimization pass may leave a real insn as the last
6896 insn in the function. We must not skip that insn or we may end
6897 up deleting code that is not really dead.
6898
6899 If some otherwise unused register is only used in DEBUG_INSNs,
6900 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
6901 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
6902 has been created for the unused register, replace it with
6903 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
6904 for (insn = get_last_insn (); insn; insn = prev)
6905 {
6906 int live_insn = 0;
6907
6908 prev = PREV_INSN (insn);
6909 if (!INSN_P (insn))
6910 continue;
6911
6912 live_insn = insn_live_p (insn, counts);
6913
6914 /* If this is a dead insn, delete it and show registers in it aren't
6915 being used. */
6916
6917 if (! live_insn && dbg_cnt (delete_trivial_dead))
6918 {
6919 if (DEBUG_INSN_P (insn))
6920 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6921 NULL_RTX, -1);
6922 else
6923 {
6924 rtx set;
6925 if (MAY_HAVE_DEBUG_INSNS
6926 && (set = single_set (insn)) != NULL_RTX
6927 && is_dead_reg (SET_DEST (set), counts)
6928 /* Used at least once in some DEBUG_INSN. */
6929 && counts[REGNO (SET_DEST (set)) + nreg] > 0
6930 /* And set exactly once. */
6931 && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
6932 && !side_effects_p (SET_SRC (set))
6933 && asm_noperands (PATTERN (insn)) < 0)
6934 {
6935 rtx dval, bind;
6936
6937 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
6938 dval = make_debug_expr_from_rtl (SET_DEST (set));
6939
6940 /* Emit a debug bind insn before the insn in which
6941 reg dies. */
6942 bind = gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
6943 DEBUG_EXPR_TREE_DECL (dval),
6944 SET_SRC (set),
6945 VAR_INIT_STATUS_INITIALIZED);
6946 count_reg_usage (bind, counts + nreg, NULL_RTX, 1);
6947
6948 bind = emit_debug_insn_before (bind, insn);
6949 df_insn_rescan (bind);
6950
6951 if (replacements == NULL)
6952 replacements = XCNEWVEC (rtx, nreg);
6953 replacements[REGNO (SET_DEST (set))] = dval;
6954 }
6955
6956 count_reg_usage (insn, counts, NULL_RTX, -1);
6957 ndead++;
6958 }
6959 delete_insn_and_edges (insn);
6960 }
6961 }
6962
6963 if (MAY_HAVE_DEBUG_INSNS)
6964 {
6965 struct dead_debug_insn_data ddid;
6966 ddid.counts = counts;
6967 ddid.replacements = replacements;
6968 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
6969 if (DEBUG_INSN_P (insn))
6970 {
6971 /* If this debug insn references a dead register that wasn't replaced
6972 with an DEBUG_EXPR, reset the DEBUG_INSN. */
6973 ddid.seen_repl = false;
6974 if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn),
6975 is_dead_debug_insn, &ddid))
6976 {
6977 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
6978 df_insn_rescan (insn);
6979 }
6980 else if (ddid.seen_repl)
6981 {
6982 INSN_VAR_LOCATION_LOC (insn)
6983 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
6984 NULL_RTX, replace_dead_reg,
6985 replacements);
6986 df_insn_rescan (insn);
6987 }
6988 }
6989 free (replacements);
6990 }
6991
6992 if (dump_file && ndead)
6993 fprintf (dump_file, "Deleted %i trivially dead insns\n",
6994 ndead);
6995 /* Clean up. */
6996 free (counts);
6997 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6998 return ndead;
6999 }
7000
7001 /* This function is called via for_each_rtx. The argument, NEWREG, is
7002 a condition code register with the desired mode. If we are looking
7003 at the same register in a different mode, replace it with
7004 NEWREG. */
7005
7006 static int
7007 cse_change_cc_mode (rtx *loc, void *data)
7008 {
7009 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7010
7011 if (*loc
7012 && REG_P (*loc)
7013 && REGNO (*loc) == REGNO (args->newreg)
7014 && GET_MODE (*loc) != GET_MODE (args->newreg))
7015 {
7016 validate_change (args->insn, loc, args->newreg, 1);
7017
7018 return -1;
7019 }
7020 return 0;
7021 }
7022
7023 /* Change the mode of any reference to the register REGNO (NEWREG) to
7024 GET_MODE (NEWREG) in INSN. */
7025
7026 static void
7027 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7028 {
7029 struct change_cc_mode_args args;
7030 int success;
7031
7032 if (!INSN_P (insn))
7033 return;
7034
7035 args.insn = insn;
7036 args.newreg = newreg;
7037
7038 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7039 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7040
7041 /* If the following assertion was triggered, there is most probably
7042 something wrong with the cc_modes_compatible back end function.
7043 CC modes only can be considered compatible if the insn - with the mode
7044 replaced by any of the compatible modes - can still be recognized. */
7045 success = apply_change_group ();
7046 gcc_assert (success);
7047 }
7048
7049 /* Change the mode of any reference to the register REGNO (NEWREG) to
7050 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7051 any instruction which modifies NEWREG. */
7052
7053 static void
7054 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7055 {
7056 rtx insn;
7057
7058 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7059 {
7060 if (! INSN_P (insn))
7061 continue;
7062
7063 if (reg_set_p (newreg, insn))
7064 return;
7065
7066 cse_change_cc_mode_insn (insn, newreg);
7067 }
7068 }
7069
7070 /* BB is a basic block which finishes with CC_REG as a condition code
7071 register which is set to CC_SRC. Look through the successors of BB
7072 to find blocks which have a single predecessor (i.e., this one),
7073 and look through those blocks for an assignment to CC_REG which is
7074 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7075 permitted to change the mode of CC_SRC to a compatible mode. This
7076 returns VOIDmode if no equivalent assignments were found.
7077 Otherwise it returns the mode which CC_SRC should wind up with.
7078 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7079 but is passed unmodified down to recursive calls in order to prevent
7080 endless recursion.
7081
7082 The main complexity in this function is handling the mode issues.
7083 We may have more than one duplicate which we can eliminate, and we
7084 try to find a mode which will work for multiple duplicates. */
7085
7086 static enum machine_mode
7087 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7088 bool can_change_mode)
7089 {
7090 bool found_equiv;
7091 enum machine_mode mode;
7092 unsigned int insn_count;
7093 edge e;
7094 rtx insns[2];
7095 enum machine_mode modes[2];
7096 rtx last_insns[2];
7097 unsigned int i;
7098 rtx newreg;
7099 edge_iterator ei;
7100
7101 /* We expect to have two successors. Look at both before picking
7102 the final mode for the comparison. If we have more successors
7103 (i.e., some sort of table jump, although that seems unlikely),
7104 then we require all beyond the first two to use the same
7105 mode. */
7106
7107 found_equiv = false;
7108 mode = GET_MODE (cc_src);
7109 insn_count = 0;
7110 FOR_EACH_EDGE (e, ei, bb->succs)
7111 {
7112 rtx insn;
7113 rtx end;
7114
7115 if (e->flags & EDGE_COMPLEX)
7116 continue;
7117
7118 if (EDGE_COUNT (e->dest->preds) != 1
7119 || e->dest == EXIT_BLOCK_PTR
7120 /* Avoid endless recursion on unreachable blocks. */
7121 || e->dest == orig_bb)
7122 continue;
7123
7124 end = NEXT_INSN (BB_END (e->dest));
7125 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7126 {
7127 rtx set;
7128
7129 if (! INSN_P (insn))
7130 continue;
7131
7132 /* If CC_SRC is modified, we have to stop looking for
7133 something which uses it. */
7134 if (modified_in_p (cc_src, insn))
7135 break;
7136
7137 /* Check whether INSN sets CC_REG to CC_SRC. */
7138 set = single_set (insn);
7139 if (set
7140 && REG_P (SET_DEST (set))
7141 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7142 {
7143 bool found;
7144 enum machine_mode set_mode;
7145 enum machine_mode comp_mode;
7146
7147 found = false;
7148 set_mode = GET_MODE (SET_SRC (set));
7149 comp_mode = set_mode;
7150 if (rtx_equal_p (cc_src, SET_SRC (set)))
7151 found = true;
7152 else if (GET_CODE (cc_src) == COMPARE
7153 && GET_CODE (SET_SRC (set)) == COMPARE
7154 && mode != set_mode
7155 && rtx_equal_p (XEXP (cc_src, 0),
7156 XEXP (SET_SRC (set), 0))
7157 && rtx_equal_p (XEXP (cc_src, 1),
7158 XEXP (SET_SRC (set), 1)))
7159
7160 {
7161 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7162 if (comp_mode != VOIDmode
7163 && (can_change_mode || comp_mode == mode))
7164 found = true;
7165 }
7166
7167 if (found)
7168 {
7169 found_equiv = true;
7170 if (insn_count < ARRAY_SIZE (insns))
7171 {
7172 insns[insn_count] = insn;
7173 modes[insn_count] = set_mode;
7174 last_insns[insn_count] = end;
7175 ++insn_count;
7176
7177 if (mode != comp_mode)
7178 {
7179 gcc_assert (can_change_mode);
7180 mode = comp_mode;
7181
7182 /* The modified insn will be re-recognized later. */
7183 PUT_MODE (cc_src, mode);
7184 }
7185 }
7186 else
7187 {
7188 if (set_mode != mode)
7189 {
7190 /* We found a matching expression in the
7191 wrong mode, but we don't have room to
7192 store it in the array. Punt. This case
7193 should be rare. */
7194 break;
7195 }
7196 /* INSN sets CC_REG to a value equal to CC_SRC
7197 with the right mode. We can simply delete
7198 it. */
7199 delete_insn (insn);
7200 }
7201
7202 /* We found an instruction to delete. Keep looking,
7203 in the hopes of finding a three-way jump. */
7204 continue;
7205 }
7206
7207 /* We found an instruction which sets the condition
7208 code, so don't look any farther. */
7209 break;
7210 }
7211
7212 /* If INSN sets CC_REG in some other way, don't look any
7213 farther. */
7214 if (reg_set_p (cc_reg, insn))
7215 break;
7216 }
7217
7218 /* If we fell off the bottom of the block, we can keep looking
7219 through successors. We pass CAN_CHANGE_MODE as false because
7220 we aren't prepared to handle compatibility between the
7221 further blocks and this block. */
7222 if (insn == end)
7223 {
7224 enum machine_mode submode;
7225
7226 submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7227 if (submode != VOIDmode)
7228 {
7229 gcc_assert (submode == mode);
7230 found_equiv = true;
7231 can_change_mode = false;
7232 }
7233 }
7234 }
7235
7236 if (! found_equiv)
7237 return VOIDmode;
7238
7239 /* Now INSN_COUNT is the number of instructions we found which set
7240 CC_REG to a value equivalent to CC_SRC. The instructions are in
7241 INSNS. The modes used by those instructions are in MODES. */
7242
7243 newreg = NULL_RTX;
7244 for (i = 0; i < insn_count; ++i)
7245 {
7246 if (modes[i] != mode)
7247 {
7248 /* We need to change the mode of CC_REG in INSNS[i] and
7249 subsequent instructions. */
7250 if (! newreg)
7251 {
7252 if (GET_MODE (cc_reg) == mode)
7253 newreg = cc_reg;
7254 else
7255 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7256 }
7257 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7258 newreg);
7259 }
7260
7261 delete_insn_and_edges (insns[i]);
7262 }
7263
7264 return mode;
7265 }
7266
7267 /* If we have a fixed condition code register (or two), walk through
7268 the instructions and try to eliminate duplicate assignments. */
7269
7270 static void
7271 cse_condition_code_reg (void)
7272 {
7273 unsigned int cc_regno_1;
7274 unsigned int cc_regno_2;
7275 rtx cc_reg_1;
7276 rtx cc_reg_2;
7277 basic_block bb;
7278
7279 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7280 return;
7281
7282 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7283 if (cc_regno_2 != INVALID_REGNUM)
7284 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7285 else
7286 cc_reg_2 = NULL_RTX;
7287
7288 FOR_EACH_BB (bb)
7289 {
7290 rtx last_insn;
7291 rtx cc_reg;
7292 rtx insn;
7293 rtx cc_src_insn;
7294 rtx cc_src;
7295 enum machine_mode mode;
7296 enum machine_mode orig_mode;
7297
7298 /* Look for blocks which end with a conditional jump based on a
7299 condition code register. Then look for the instruction which
7300 sets the condition code register. Then look through the
7301 successor blocks for instructions which set the condition
7302 code register to the same value. There are other possible
7303 uses of the condition code register, but these are by far the
7304 most common and the ones which we are most likely to be able
7305 to optimize. */
7306
7307 last_insn = BB_END (bb);
7308 if (!JUMP_P (last_insn))
7309 continue;
7310
7311 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7312 cc_reg = cc_reg_1;
7313 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7314 cc_reg = cc_reg_2;
7315 else
7316 continue;
7317
7318 cc_src_insn = NULL_RTX;
7319 cc_src = NULL_RTX;
7320 for (insn = PREV_INSN (last_insn);
7321 insn && insn != PREV_INSN (BB_HEAD (bb));
7322 insn = PREV_INSN (insn))
7323 {
7324 rtx set;
7325
7326 if (! INSN_P (insn))
7327 continue;
7328 set = single_set (insn);
7329 if (set
7330 && REG_P (SET_DEST (set))
7331 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7332 {
7333 cc_src_insn = insn;
7334 cc_src = SET_SRC (set);
7335 break;
7336 }
7337 else if (reg_set_p (cc_reg, insn))
7338 break;
7339 }
7340
7341 if (! cc_src_insn)
7342 continue;
7343
7344 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7345 continue;
7346
7347 /* Now CC_REG is a condition code register used for a
7348 conditional jump at the end of the block, and CC_SRC, in
7349 CC_SRC_INSN, is the value to which that condition code
7350 register is set, and CC_SRC is still meaningful at the end of
7351 the basic block. */
7352
7353 orig_mode = GET_MODE (cc_src);
7354 mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7355 if (mode != VOIDmode)
7356 {
7357 gcc_assert (mode == GET_MODE (cc_src));
7358 if (mode != orig_mode)
7359 {
7360 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7361
7362 cse_change_cc_mode_insn (cc_src_insn, newreg);
7363
7364 /* Do the same in the following insns that use the
7365 current value of CC_REG within BB. */
7366 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7367 NEXT_INSN (last_insn),
7368 newreg);
7369 }
7370 }
7371 }
7372 }
7373 \f
7374
7375 /* Perform common subexpression elimination. Nonzero value from
7376 `cse_main' means that jumps were simplified and some code may now
7377 be unreachable, so do jump optimization again. */
7378 static bool
7379 gate_handle_cse (void)
7380 {
7381 return optimize > 0;
7382 }
7383
7384 static unsigned int
7385 rest_of_handle_cse (void)
7386 {
7387 int tem;
7388
7389 if (dump_file)
7390 dump_flow_info (dump_file, dump_flags);
7391
7392 tem = cse_main (get_insns (), max_reg_num ());
7393
7394 /* If we are not running more CSE passes, then we are no longer
7395 expecting CSE to be run. But always rerun it in a cheap mode. */
7396 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7397
7398 if (tem == 2)
7399 {
7400 timevar_push (TV_JUMP);
7401 rebuild_jump_labels (get_insns ());
7402 cleanup_cfg (0);
7403 timevar_pop (TV_JUMP);
7404 }
7405 else if (tem == 1 || optimize > 1)
7406 cleanup_cfg (0);
7407
7408 return 0;
7409 }
7410
7411 struct rtl_opt_pass pass_cse =
7412 {
7413 {
7414 RTL_PASS,
7415 "cse1", /* name */
7416 gate_handle_cse, /* gate */
7417 rest_of_handle_cse, /* execute */
7418 NULL, /* sub */
7419 NULL, /* next */
7420 0, /* static_pass_number */
7421 TV_CSE, /* tv_id */
7422 0, /* properties_required */
7423 0, /* properties_provided */
7424 0, /* properties_destroyed */
7425 0, /* todo_flags_start */
7426 TODO_df_finish | TODO_verify_rtl_sharing |
7427 TODO_ggc_collect |
7428 TODO_verify_flow, /* todo_flags_finish */
7429 }
7430 };
7431
7432
7433 static bool
7434 gate_handle_cse2 (void)
7435 {
7436 return optimize > 0 && flag_rerun_cse_after_loop;
7437 }
7438
7439 /* Run second CSE pass after loop optimizations. */
7440 static unsigned int
7441 rest_of_handle_cse2 (void)
7442 {
7443 int tem;
7444
7445 if (dump_file)
7446 dump_flow_info (dump_file, dump_flags);
7447
7448 tem = cse_main (get_insns (), max_reg_num ());
7449
7450 /* Run a pass to eliminate duplicated assignments to condition code
7451 registers. We have to run this after bypass_jumps, because it
7452 makes it harder for that pass to determine whether a jump can be
7453 bypassed safely. */
7454 cse_condition_code_reg ();
7455
7456 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7457
7458 if (tem == 2)
7459 {
7460 timevar_push (TV_JUMP);
7461 rebuild_jump_labels (get_insns ());
7462 cleanup_cfg (0);
7463 timevar_pop (TV_JUMP);
7464 }
7465 else if (tem == 1)
7466 cleanup_cfg (0);
7467
7468 cse_not_expected = 1;
7469 return 0;
7470 }
7471
7472
7473 struct rtl_opt_pass pass_cse2 =
7474 {
7475 {
7476 RTL_PASS,
7477 "cse2", /* name */
7478 gate_handle_cse2, /* gate */
7479 rest_of_handle_cse2, /* execute */
7480 NULL, /* sub */
7481 NULL, /* next */
7482 0, /* static_pass_number */
7483 TV_CSE2, /* tv_id */
7484 0, /* properties_required */
7485 0, /* properties_provided */
7486 0, /* properties_destroyed */
7487 0, /* todo_flags_start */
7488 TODO_df_finish | TODO_verify_rtl_sharing |
7489 TODO_ggc_collect |
7490 TODO_verify_flow /* todo_flags_finish */
7491 }
7492 };
7493
7494 static bool
7495 gate_handle_cse_after_global_opts (void)
7496 {
7497 return optimize > 0 && flag_rerun_cse_after_global_opts;
7498 }
7499
7500 /* Run second CSE pass after loop optimizations. */
7501 static unsigned int
7502 rest_of_handle_cse_after_global_opts (void)
7503 {
7504 int save_cfj;
7505 int tem;
7506
7507 /* We only want to do local CSE, so don't follow jumps. */
7508 save_cfj = flag_cse_follow_jumps;
7509 flag_cse_follow_jumps = 0;
7510
7511 rebuild_jump_labels (get_insns ());
7512 tem = cse_main (get_insns (), max_reg_num ());
7513 purge_all_dead_edges ();
7514 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7515
7516 cse_not_expected = !flag_rerun_cse_after_loop;
7517
7518 /* If cse altered any jumps, rerun jump opts to clean things up. */
7519 if (tem == 2)
7520 {
7521 timevar_push (TV_JUMP);
7522 rebuild_jump_labels (get_insns ());
7523 cleanup_cfg (0);
7524 timevar_pop (TV_JUMP);
7525 }
7526 else if (tem == 1)
7527 cleanup_cfg (0);
7528
7529 flag_cse_follow_jumps = save_cfj;
7530 return 0;
7531 }
7532
7533 struct rtl_opt_pass pass_cse_after_global_opts =
7534 {
7535 {
7536 RTL_PASS,
7537 "cse_local", /* name */
7538 gate_handle_cse_after_global_opts, /* gate */
7539 rest_of_handle_cse_after_global_opts, /* execute */
7540 NULL, /* sub */
7541 NULL, /* next */
7542 0, /* static_pass_number */
7543 TV_CSE, /* tv_id */
7544 0, /* properties_required */
7545 0, /* properties_provided */
7546 0, /* properties_destroyed */
7547 0, /* todo_flags_start */
7548 TODO_df_finish | TODO_verify_rtl_sharing |
7549 TODO_ggc_collect |
7550 TODO_verify_flow /* todo_flags_finish */
7551 }
7552 };