1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "insn-config.h"
37 #include "cfgcleanup.h"
41 #include "rtlhooks-def.h"
42 #include "tree-pass.h"
46 #include "function-abi.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `REG_QTY (N)' records what quantity register N is currently thought
87 All real quantity numbers are greater than or equal to zero.
88 If register N has not been assigned a quantity, `REG_QTY (N)' will
89 equal -N - 1, which is always negative.
91 Quantity numbers below zero do not exist and none of the `qty_table'
92 entries should be referenced with a negative index.
94 We also maintain a bidirectional chain of registers for each
95 quantity number. The `qty_table` members `first_reg' and `last_reg',
96 and `reg_eqv_table' members `next' and `prev' hold these chains.
98 The first register in a chain is the one whose lifespan is least local.
99 Among equals, it is the one that was seen first.
100 We replace any equivalent register with that one.
102 If two registers have the same quantity number, it must be true that
103 REG expressions with qty_table `mode' must be in the hash table for both
104 registers and must be in the same class.
106 The converse is not true. Since hard registers may be referenced in
107 any mode, two REG expressions might be equivalent in the hash table
108 but not have the same quantity number if the quantity number of one
109 of the registers is not the same mode as those expressions.
111 Constants and quantity numbers
113 When a quantity has a known constant value, that value is stored
114 in the appropriate qty_table `const_rtx'. This is in addition to
115 putting the constant in the hash table as is usual for non-regs.
117 Whether a reg or a constant is preferred is determined by the configuration
118 macro CONST_COSTS and will often depend on the constant value. In any
119 event, expressions containing constants can be simplified, by fold_rtx.
121 When a quantity has a known nearly constant value (such as an address
122 of a stack slot), that value is stored in the appropriate qty_table
125 Integer constants don't have a machine mode. However, cse
126 determines the intended machine mode from the destination
127 of the instruction that moves the constant. The machine mode
128 is recorded in the hash table along with the actual RTL
129 constant expression so that different modes are kept separate.
133 To record known equivalences among expressions in general
134 we use a hash table called `table'. It has a fixed number of buckets
135 that contain chains of `struct table_elt' elements for expressions.
136 These chains connect the elements whose expressions have the same
139 Other chains through the same elements connect the elements which
140 currently have equivalent values.
142 Register references in an expression are canonicalized before hashing
143 the expression. This is done using `reg_qty' and qty_table `first_reg'.
144 The hash code of a register reference is computed using the quantity
145 number, not the register number.
147 When the value of an expression changes, it is necessary to remove from the
148 hash table not just that expression but all expressions whose values
149 could be different as a result.
151 1. If the value changing is in memory, except in special cases
152 ANYTHING referring to memory could be changed. That is because
153 nobody knows where a pointer does not point.
154 The function `invalidate_memory' removes what is necessary.
156 The special cases are when the address is constant or is
157 a constant plus a fixed register such as the frame pointer
158 or a static chain pointer. When such addresses are stored in,
159 we can tell exactly which other such addresses must be invalidated
160 due to overlap. `invalidate' does this.
161 All expressions that refer to non-constant
162 memory addresses are also invalidated. `invalidate_memory' does this.
164 2. If the value changing is a register, all expressions
165 containing references to that register, and only those,
168 Because searching the entire hash table for expressions that contain
169 a register is very slow, we try to figure out when it isn't necessary.
170 Precisely, this is necessary only when expressions have been
171 entered in the hash table using this register, and then the value has
172 changed, and then another expression wants to be added to refer to
173 the register's new value. This sequence of circumstances is rare
174 within any one basic block.
176 `REG_TICK' and `REG_IN_TABLE', accessors for members of
177 cse_reg_info, are used to detect this case. REG_TICK (i) is
178 incremented whenever a value is stored in register i.
179 REG_IN_TABLE (i) holds -1 if no references to register i have been
180 entered in the table; otherwise, it contains the value REG_TICK (i)
181 had when the references were entered. If we want to enter a
182 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183 remove old references. Until we want to enter a new entry, the
184 mere fact that the two vectors don't match makes the entries be
185 ignored if anyone tries to match them.
187 Registers themselves are entered in the hash table as well as in
188 the equivalent-register chains. However, `REG_TICK' and
189 `REG_IN_TABLE' do not apply to expressions which are simple
190 register references. These expressions are removed from the table
191 immediately when they become invalid, and this can be done even if
192 we do not immediately search for all the expressions that refer to
195 A CLOBBER rtx in an instruction invalidates its operand for further
196 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
197 invalidates everything that resides in memory.
201 Constant expressions that differ only by an additive integer
202 are called related. When a constant expression is put in
203 the table, the related expression with no constant term
204 is also entered. These are made to point at each other
205 so that it is possible to find out if there exists any
206 register equivalent to an expression related to a given expression. */
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
246 rtx_insn
*const_insn
;
247 rtx comparison_const
;
249 unsigned int first_reg
, last_reg
;
250 /* The sizes of these fields should match the sizes of the
251 code and mode fields of struct rtx_def (see rtl.h). */
252 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
253 ENUM_BITFIELD(machine_mode
) mode
: 8;
256 /* The table of all qtys, indexed by qty number. */
257 static struct qty_table_elem
*qty_table
;
259 /* For machines that have a CC0, we do not record its value in the hash
260 table since its use is guaranteed to be the insn immediately following
261 its definition and any other insn is presumed to invalidate it.
263 Instead, we store below the current and last value assigned to CC0.
264 If it should happen to be a constant, it is stored in preference
265 to the actual assigned value. In case it is a constant, we store
266 the mode in which the constant should be interpreted. */
268 static rtx this_insn_cc0
, prev_insn_cc0
;
269 static machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
271 /* Insn being scanned. */
273 static rtx_insn
*this_insn
;
274 static bool optimize_this_for_speed_p
;
276 /* Index by register number, gives the number of the next (or
277 previous) register in the chain of registers sharing the same
280 Or -1 if this register is at the end of the chain.
282 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
284 /* Per-register equivalence chain. */
290 /* The table of all register equivalence chains. */
291 static struct reg_eqv_elem
*reg_eqv_table
;
295 /* The timestamp at which this register is initialized. */
296 unsigned int timestamp
;
298 /* The quantity number of the register's current contents. */
301 /* The number of times the register has been altered in the current
305 /* The REG_TICK value at which rtx's containing this register are
306 valid in the hash table. If this does not equal the current
307 reg_tick value, such expressions existing in the hash table are
311 /* The SUBREG that was set when REG_TICK was last incremented. Set
312 to -1 if the last store was to the whole register, not a subreg. */
313 unsigned int subreg_ticked
;
316 /* A table of cse_reg_info indexed by register numbers. */
317 static struct cse_reg_info
*cse_reg_info_table
;
319 /* The size of the above table. */
320 static unsigned int cse_reg_info_table_size
;
322 /* The index of the first entry that has not been initialized. */
323 static unsigned int cse_reg_info_table_first_uninitialized
;
325 /* The timestamp at the beginning of the current run of
326 cse_extended_basic_block. We increment this variable at the beginning of
327 the current run of cse_extended_basic_block. The timestamp field of a
328 cse_reg_info entry matches the value of this variable if and only
329 if the entry has been initialized during the current run of
330 cse_extended_basic_block. */
331 static unsigned int cse_reg_info_timestamp
;
333 /* A HARD_REG_SET containing all the hard registers for which there is
334 currently a REG expression in the hash table. Note the difference
335 from the above variables, which indicate if the REG is mentioned in some
336 expression in the table. */
338 static HARD_REG_SET hard_regs_in_table
;
340 /* True if CSE has altered the CFG. */
341 static bool cse_cfg_altered
;
343 /* True if CSE has altered conditional jump insns in such a way
344 that jump optimization should be redone. */
345 static bool cse_jumps_altered
;
347 /* True if we put a LABEL_REF into the hash table for an INSN
348 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
349 to put in the note. */
350 static bool recorded_label_ref
;
352 /* canon_hash stores 1 in do_not_record
353 if it notices a reference to CC0, PC, or some other volatile
356 static int do_not_record
;
358 /* canon_hash stores 1 in hash_arg_in_memory
359 if it notices a reference to memory within the expression being hashed. */
361 static int hash_arg_in_memory
;
363 /* The hash table contains buckets which are chains of `struct table_elt's,
364 each recording one expression's information.
365 That expression is in the `exp' field.
367 The canon_exp field contains a canonical (from the point of view of
368 alias analysis) version of the `exp' field.
370 Those elements with the same hash code are chained in both directions
371 through the `next_same_hash' and `prev_same_hash' fields.
373 Each set of expressions with equivalent values
374 are on a two-way chain through the `next_same_value'
375 and `prev_same_value' fields, and all point with
376 the `first_same_value' field at the first element in
377 that chain. The chain is in order of increasing cost.
378 Each element's cost value is in its `cost' field.
380 The `in_memory' field is nonzero for elements that
381 involve any reference to memory. These elements are removed
382 whenever a write is done to an unidentified location in memory.
383 To be safe, we assume that a memory address is unidentified unless
384 the address is either a symbol constant or a constant plus
385 the frame pointer or argument pointer.
387 The `related_value' field is used to connect related expressions
388 (that differ by adding an integer).
389 The related expressions are chained in a circular fashion.
390 `related_value' is zero for expressions for which this
393 The `cost' field stores the cost of this element's expression.
394 The `regcost' field stores the value returned by approx_reg_cost for
395 this element's expression.
397 The `is_const' flag is set if the element is a constant (including
400 The `flag' field is used as a temporary during some search routines.
402 The `mode' field is usually the same as GET_MODE (`exp'), but
403 if `exp' is a CONST_INT and has no machine mode then the `mode'
404 field is the mode it was being used as. Each constant is
405 recorded separately for each mode it is used with. */
411 struct table_elt
*next_same_hash
;
412 struct table_elt
*prev_same_hash
;
413 struct table_elt
*next_same_value
;
414 struct table_elt
*prev_same_value
;
415 struct table_elt
*first_same_value
;
416 struct table_elt
*related_value
;
419 /* The size of this field should match the size
420 of the mode field of struct rtx_def (see rtl.h). */
421 ENUM_BITFIELD(machine_mode
) mode
: 8;
427 /* We don't want a lot of buckets, because we rarely have very many
428 things stored in the hash table, and a lot of buckets slows
429 down a lot of loops that happen frequently. */
431 #define HASH_SIZE (1 << HASH_SHIFT)
432 #define HASH_MASK (HASH_SIZE - 1)
434 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
435 register (hard registers may require `do_not_record' to be set). */
438 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
439 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
440 : canon_hash (X, M)) & HASH_MASK)
442 /* Like HASH, but without side-effects. */
443 #define SAFE_HASH(X, M) \
444 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
445 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
446 : safe_hash (X, M)) & HASH_MASK)
448 /* Determine whether register number N is considered a fixed register for the
449 purpose of approximating register costs.
450 It is desirable to replace other regs with fixed regs, to reduce need for
452 A reg wins if it is either the frame pointer or designated as fixed. */
453 #define FIXED_REGNO_P(N) \
454 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
455 || fixed_regs[N] || global_regs[N])
457 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
458 hard registers and pointers into the frame are the cheapest with a cost
459 of 0. Next come pseudos with a cost of one and other hard registers with
460 a cost of 2. Aside from these special cases, call `rtx_cost'. */
462 #define CHEAP_REGNO(N) \
463 (REGNO_PTR_FRAME_P (N) \
464 || (HARD_REGISTER_NUM_P (N) \
465 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
467 #define COST(X, MODE) \
468 (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
469 #define COST_IN(X, MODE, OUTER, OPNO) \
470 (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
472 /* Get the number of times this register has been updated in this
475 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
477 /* Get the point at which REG was recorded in the table. */
479 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
481 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
484 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
486 /* Get the quantity number for REG. */
488 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
490 /* Determine if the quantity number for register X represents a valid index
491 into the qty_table. */
493 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
495 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
497 #define CHEAPER(X, Y) \
498 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
500 static struct table_elt
*table
[HASH_SIZE
];
502 /* Chain of `struct table_elt's made so far for this function
503 but currently removed from the table. */
505 static struct table_elt
*free_element_chain
;
507 /* Set to the cost of a constant pool reference if one was found for a
508 symbolic constant. If this was found, it means we should try to
509 convert constants into constant pool entries if they don't fit in
512 static int constant_pool_entries_cost
;
513 static int constant_pool_entries_regcost
;
515 /* Trace a patch through the CFG. */
519 /* The basic block for this path entry. */
523 /* This data describes a block that will be processed by
524 cse_extended_basic_block. */
526 struct cse_basic_block_data
528 /* Total number of SETs in block. */
530 /* Size of current branch path, if any. */
532 /* Current path, indicating which basic_blocks will be processed. */
533 struct branch_path
*path
;
537 /* Pointers to the live in/live out bitmaps for the boundaries of the
539 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
541 /* A simple bitmap to track which basic blocks have been visited
542 already as part of an already processed extended basic block. */
543 static sbitmap cse_visited_basic_blocks
;
545 static bool fixed_base_plus_p (rtx x
);
546 static int notreg_cost (rtx
, machine_mode
, enum rtx_code
, int);
547 static int preferable (int, int, int, int);
548 static void new_basic_block (void);
549 static void make_new_qty (unsigned int, machine_mode
);
550 static void make_regs_eqv (unsigned int, unsigned int);
551 static void delete_reg_equiv (unsigned int);
552 static int mention_regs (rtx
);
553 static int insert_regs (rtx
, struct table_elt
*, int);
554 static void remove_from_table (struct table_elt
*, unsigned);
555 static void remove_pseudo_from_table (rtx
, unsigned);
556 static struct table_elt
*lookup (rtx
, unsigned, machine_mode
);
557 static struct table_elt
*lookup_for_remove (rtx
, unsigned, machine_mode
);
558 static rtx
lookup_as_function (rtx
, enum rtx_code
);
559 static struct table_elt
*insert_with_costs (rtx
, struct table_elt
*, unsigned,
560 machine_mode
, int, int);
561 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
563 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
564 static void invalidate (rtx
, machine_mode
);
565 static void remove_invalid_refs (unsigned int);
566 static void remove_invalid_subreg_refs (unsigned int, poly_uint64
,
568 static void rehash_using_reg (rtx
);
569 static void invalidate_memory (void);
570 static rtx
use_related_value (rtx
, struct table_elt
*);
572 static inline unsigned canon_hash (rtx
, machine_mode
);
573 static inline unsigned safe_hash (rtx
, machine_mode
);
574 static inline unsigned hash_rtx_string (const char *);
576 static rtx
canon_reg (rtx
, rtx_insn
*);
577 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
580 static rtx
fold_rtx (rtx
, rtx_insn
*);
581 static rtx
equiv_constant (rtx
);
582 static void record_jump_equiv (rtx_insn
*, bool);
583 static void record_jump_cond (enum rtx_code
, machine_mode
, rtx
, rtx
,
585 static void cse_insn (rtx_insn
*);
586 static void cse_prescan_path (struct cse_basic_block_data
*);
587 static void invalidate_from_clobbers (rtx_insn
*);
588 static void invalidate_from_sets_and_clobbers (rtx_insn
*);
589 static rtx
cse_process_notes (rtx
, rtx
, bool *);
590 static void cse_extended_basic_block (struct cse_basic_block_data
*);
591 extern void dump_class (struct table_elt
*);
592 static void get_cse_reg_info_1 (unsigned int regno
);
593 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
595 static void flush_hash_table (void);
596 static bool insn_live_p (rtx_insn
*, int *);
597 static bool set_live_p (rtx
, rtx_insn
*, int *);
598 static void cse_change_cc_mode_insn (rtx_insn
*, rtx
);
599 static void cse_change_cc_mode_insns (rtx_insn
*, rtx_insn
*, rtx
);
600 static machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
604 #undef RTL_HOOKS_GEN_LOWPART
605 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
607 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
609 /* Nonzero if X has the form (PLUS frame-pointer integer). */
612 fixed_base_plus_p (rtx x
)
614 switch (GET_CODE (x
))
617 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
619 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
624 if (!CONST_INT_P (XEXP (x
, 1)))
626 return fixed_base_plus_p (XEXP (x
, 0));
633 /* Dump the expressions in the equivalence class indicated by CLASSP.
634 This function is used only for debugging. */
636 dump_class (struct table_elt
*classp
)
638 struct table_elt
*elt
;
640 fprintf (stderr
, "Equivalence chain for ");
641 print_rtl (stderr
, classp
->exp
);
642 fprintf (stderr
, ": \n");
644 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
646 print_rtl (stderr
, elt
->exp
);
647 fprintf (stderr
, "\n");
651 /* Return an estimate of the cost of the registers used in an rtx.
652 This is mostly the number of different REG expressions in the rtx;
653 however for some exceptions like fixed registers we use a cost of
654 0. If any other hard register reference occurs, return MAX_COST. */
657 approx_reg_cost (const_rtx x
)
660 subrtx_iterator::array_type array
;
661 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
666 unsigned int regno
= REGNO (x
);
667 if (!CHEAP_REGNO (regno
))
669 if (regno
< FIRST_PSEUDO_REGISTER
)
671 if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
683 /* Return a negative value if an rtx A, whose costs are given by COST_A
684 and REGCOST_A, is more desirable than an rtx B.
685 Return a positive value if A is less desirable, or 0 if the two are
688 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
690 /* First, get rid of cases involving expressions that are entirely
692 if (cost_a
!= cost_b
)
694 if (cost_a
== MAX_COST
)
696 if (cost_b
== MAX_COST
)
700 /* Avoid extending lifetimes of hardregs. */
701 if (regcost_a
!= regcost_b
)
703 if (regcost_a
== MAX_COST
)
705 if (regcost_b
== MAX_COST
)
709 /* Normal operation costs take precedence. */
710 if (cost_a
!= cost_b
)
711 return cost_a
- cost_b
;
712 /* Only if these are identical consider effects on register pressure. */
713 if (regcost_a
!= regcost_b
)
714 return regcost_a
- regcost_b
;
718 /* Internal function, to compute cost when X is not a register; called
719 from COST macro to keep it simple. */
722 notreg_cost (rtx x
, machine_mode mode
, enum rtx_code outer
, int opno
)
724 scalar_int_mode int_mode
, inner_mode
;
725 return ((GET_CODE (x
) == SUBREG
726 && REG_P (SUBREG_REG (x
))
727 && is_int_mode (mode
, &int_mode
)
728 && is_int_mode (GET_MODE (SUBREG_REG (x
)), &inner_mode
)
729 && GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (inner_mode
)
730 && subreg_lowpart_p (x
)
731 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, inner_mode
))
733 : rtx_cost (x
, mode
, outer
, opno
, optimize_this_for_speed_p
) * 2);
737 /* Initialize CSE_REG_INFO_TABLE. */
740 init_cse_reg_info (unsigned int nregs
)
742 /* Do we need to grow the table? */
743 if (nregs
> cse_reg_info_table_size
)
745 unsigned int new_size
;
747 if (cse_reg_info_table_size
< 2048)
749 /* Compute a new size that is a power of 2 and no smaller
750 than the large of NREGS and 64. */
751 new_size
= (cse_reg_info_table_size
752 ? cse_reg_info_table_size
: 64);
754 while (new_size
< nregs
)
759 /* If we need a big table, allocate just enough to hold
764 /* Reallocate the table with NEW_SIZE entries. */
765 free (cse_reg_info_table
);
766 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
767 cse_reg_info_table_size
= new_size
;
768 cse_reg_info_table_first_uninitialized
= 0;
771 /* Do we have all of the first NREGS entries initialized? */
772 if (cse_reg_info_table_first_uninitialized
< nregs
)
774 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
777 /* Put the old timestamp on newly allocated entries so that they
778 will all be considered out of date. We do not touch those
779 entries beyond the first NREGS entries to be nice to the
781 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
782 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
784 cse_reg_info_table_first_uninitialized
= nregs
;
788 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
791 get_cse_reg_info_1 (unsigned int regno
)
793 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
794 entry will be considered to have been initialized. */
795 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
797 /* Initialize the rest of the entry. */
798 cse_reg_info_table
[regno
].reg_tick
= 1;
799 cse_reg_info_table
[regno
].reg_in_table
= -1;
800 cse_reg_info_table
[regno
].subreg_ticked
= -1;
801 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
804 /* Find a cse_reg_info entry for REGNO. */
806 static inline struct cse_reg_info
*
807 get_cse_reg_info (unsigned int regno
)
809 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
811 /* If this entry has not been initialized, go ahead and initialize
813 if (p
->timestamp
!= cse_reg_info_timestamp
)
814 get_cse_reg_info_1 (regno
);
819 /* Clear the hash table and initialize each register with its own quantity,
820 for a new basic block. */
823 new_basic_block (void)
829 /* Invalidate cse_reg_info_table. */
830 cse_reg_info_timestamp
++;
832 /* Clear out hash table state for this pass. */
833 CLEAR_HARD_REG_SET (hard_regs_in_table
);
835 /* The per-quantity values used to be initialized here, but it is
836 much faster to initialize each as it is made in `make_new_qty'. */
838 for (i
= 0; i
< HASH_SIZE
; i
++)
840 struct table_elt
*first
;
845 struct table_elt
*last
= first
;
849 while (last
->next_same_hash
!= NULL
)
850 last
= last
->next_same_hash
;
852 /* Now relink this hash entire chain into
853 the free element list. */
855 last
->next_same_hash
= free_element_chain
;
856 free_element_chain
= first
;
863 /* Say that register REG contains a quantity in mode MODE not in any
864 register before and initialize that quantity. */
867 make_new_qty (unsigned int reg
, machine_mode mode
)
870 struct qty_table_elem
*ent
;
871 struct reg_eqv_elem
*eqv
;
873 gcc_assert (next_qty
< max_qty
);
875 q
= REG_QTY (reg
) = next_qty
++;
877 ent
->first_reg
= reg
;
880 ent
->const_rtx
= ent
->const_insn
= NULL
;
881 ent
->comparison_code
= UNKNOWN
;
883 eqv
= ®_eqv_table
[reg
];
884 eqv
->next
= eqv
->prev
= -1;
887 /* Make reg NEW equivalent to reg OLD.
888 OLD is not changing; NEW is. */
891 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
893 unsigned int lastr
, firstr
;
894 int q
= REG_QTY (old_reg
);
895 struct qty_table_elem
*ent
;
899 /* Nothing should become eqv until it has a "non-invalid" qty number. */
900 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
902 REG_QTY (new_reg
) = q
;
903 firstr
= ent
->first_reg
;
904 lastr
= ent
->last_reg
;
906 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
907 hard regs. Among pseudos, if NEW will live longer than any other reg
908 of the same qty, and that is beyond the current basic block,
909 make it the new canonical replacement for this qty. */
910 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
911 /* Certain fixed registers might be of the class NO_REGS. This means
912 that not only can they not be allocated by the compiler, but
913 they cannot be used in substitutions or canonicalizations
915 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
916 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
917 || (new_reg
>= FIRST_PSEUDO_REGISTER
918 && (firstr
< FIRST_PSEUDO_REGISTER
919 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
920 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
921 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
922 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
924 reg_eqv_table
[firstr
].prev
= new_reg
;
925 reg_eqv_table
[new_reg
].next
= firstr
;
926 reg_eqv_table
[new_reg
].prev
= -1;
927 ent
->first_reg
= new_reg
;
931 /* If NEW is a hard reg (known to be non-fixed), insert at end.
932 Otherwise, insert before any non-fixed hard regs that are at the
933 end. Registers of class NO_REGS cannot be used as an
934 equivalent for anything. */
935 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
936 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
937 && new_reg
>= FIRST_PSEUDO_REGISTER
)
938 lastr
= reg_eqv_table
[lastr
].prev
;
939 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
940 if (reg_eqv_table
[lastr
].next
>= 0)
941 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
943 qty_table
[q
].last_reg
= new_reg
;
944 reg_eqv_table
[lastr
].next
= new_reg
;
945 reg_eqv_table
[new_reg
].prev
= lastr
;
949 /* Remove REG from its equivalence class. */
952 delete_reg_equiv (unsigned int reg
)
954 struct qty_table_elem
*ent
;
955 int q
= REG_QTY (reg
);
958 /* If invalid, do nothing. */
959 if (! REGNO_QTY_VALID_P (reg
))
964 p
= reg_eqv_table
[reg
].prev
;
965 n
= reg_eqv_table
[reg
].next
;
968 reg_eqv_table
[n
].prev
= p
;
972 reg_eqv_table
[p
].next
= n
;
976 REG_QTY (reg
) = -reg
- 1;
979 /* Remove any invalid expressions from the hash table
980 that refer to any of the registers contained in expression X.
982 Make sure that newly inserted references to those registers
983 as subexpressions will be considered valid.
985 mention_regs is not called when a register itself
986 is being stored in the table.
988 Return 1 if we have done something that may have changed the hash code
1002 code
= GET_CODE (x
);
1005 unsigned int regno
= REGNO (x
);
1006 unsigned int endregno
= END_REGNO (x
);
1009 for (i
= regno
; i
< endregno
; i
++)
1011 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1012 remove_invalid_refs (i
);
1014 REG_IN_TABLE (i
) = REG_TICK (i
);
1015 SUBREG_TICKED (i
) = -1;
1021 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1022 pseudo if they don't use overlapping words. We handle only pseudos
1023 here for simplicity. */
1024 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1025 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1027 unsigned int i
= REGNO (SUBREG_REG (x
));
1029 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1031 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1032 the last store to this register really stored into this
1033 subreg, then remove the memory of this subreg.
1034 Otherwise, remove any memory of the entire register and
1035 all its subregs from the table. */
1036 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1037 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1038 remove_invalid_refs (i
);
1040 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1043 REG_IN_TABLE (i
) = REG_TICK (i
);
1044 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1048 /* If X is a comparison or a COMPARE and either operand is a register
1049 that does not have a quantity, give it one. This is so that a later
1050 call to record_jump_equiv won't cause X to be assigned a different
1051 hash code and not found in the table after that call.
1053 It is not necessary to do this here, since rehash_using_reg can
1054 fix up the table later, but doing this here eliminates the need to
1055 call that expensive function in the most common case where the only
1056 use of the register is in the comparison. */
1058 if (code
== COMPARE
|| COMPARISON_P (x
))
1060 if (REG_P (XEXP (x
, 0))
1061 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1062 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1064 rehash_using_reg (XEXP (x
, 0));
1068 if (REG_P (XEXP (x
, 1))
1069 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1070 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1072 rehash_using_reg (XEXP (x
, 1));
1077 fmt
= GET_RTX_FORMAT (code
);
1078 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1080 changed
|= mention_regs (XEXP (x
, i
));
1081 else if (fmt
[i
] == 'E')
1082 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1083 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1088 /* Update the register quantities for inserting X into the hash table
1089 with a value equivalent to CLASSP.
1090 (If the class does not contain a REG, it is irrelevant.)
1091 If MODIFIED is nonzero, X is a destination; it is being modified.
1092 Note that delete_reg_equiv should be called on a register
1093 before insert_regs is done on that register with MODIFIED != 0.
1095 Nonzero value means that elements of reg_qty have changed
1096 so X's hash code may be different. */
1099 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1103 unsigned int regno
= REGNO (x
);
1106 /* If REGNO is in the equivalence table already but is of the
1107 wrong mode for that equivalence, don't do anything here. */
1109 qty_valid
= REGNO_QTY_VALID_P (regno
);
1112 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1114 if (ent
->mode
!= GET_MODE (x
))
1118 if (modified
|| ! qty_valid
)
1121 for (classp
= classp
->first_same_value
;
1123 classp
= classp
->next_same_value
)
1124 if (REG_P (classp
->exp
)
1125 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1127 unsigned c_regno
= REGNO (classp
->exp
);
1129 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1131 /* Suppose that 5 is hard reg and 100 and 101 are
1134 (set (reg:si 100) (reg:si 5))
1135 (set (reg:si 5) (reg:si 100))
1136 (set (reg:di 101) (reg:di 5))
1138 We would now set REG_QTY (101) = REG_QTY (5), but the
1139 entry for 5 is in SImode. When we use this later in
1140 copy propagation, we get the register in wrong mode. */
1141 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1144 make_regs_eqv (regno
, c_regno
);
1148 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1149 than REG_IN_TABLE to find out if there was only a single preceding
1150 invalidation - for the SUBREG - or another one, which would be
1151 for the full register. However, if we find here that REG_TICK
1152 indicates that the register is invalid, it means that it has
1153 been invalidated in a separate operation. The SUBREG might be used
1154 now (then this is a recursive call), or we might use the full REG
1155 now and a SUBREG of it later. So bump up REG_TICK so that
1156 mention_regs will do the right thing. */
1158 && REG_IN_TABLE (regno
) >= 0
1159 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1161 make_new_qty (regno
, GET_MODE (x
));
1168 /* If X is a SUBREG, we will likely be inserting the inner register in the
1169 table. If that register doesn't have an assigned quantity number at
1170 this point but does later, the insertion that we will be doing now will
1171 not be accessible because its hash code will have changed. So assign
1172 a quantity number now. */
1174 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1175 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1177 insert_regs (SUBREG_REG (x
), NULL
, 0);
1182 return mention_regs (x
);
1186 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1187 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1188 CST is equal to an anchor. */
1191 compute_const_anchors (rtx cst
,
1192 HOST_WIDE_INT
*lower_base
, HOST_WIDE_INT
*lower_offs
,
1193 HOST_WIDE_INT
*upper_base
, HOST_WIDE_INT
*upper_offs
)
1195 HOST_WIDE_INT n
= INTVAL (cst
);
1197 *lower_base
= n
& ~(targetm
.const_anchor
- 1);
1198 if (*lower_base
== n
)
1202 (n
+ (targetm
.const_anchor
- 1)) & ~(targetm
.const_anchor
- 1);
1203 *upper_offs
= n
- *upper_base
;
1204 *lower_offs
= n
- *lower_base
;
1208 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1211 insert_const_anchor (HOST_WIDE_INT anchor
, rtx reg
, HOST_WIDE_INT offs
,
1214 struct table_elt
*elt
;
1219 anchor_exp
= GEN_INT (anchor
);
1220 hash
= HASH (anchor_exp
, mode
);
1221 elt
= lookup (anchor_exp
, hash
, mode
);
1223 elt
= insert (anchor_exp
, NULL
, hash
, mode
);
1225 exp
= plus_constant (mode
, reg
, offs
);
1226 /* REG has just been inserted and the hash codes recomputed. */
1228 hash
= HASH (exp
, mode
);
1230 /* Use the cost of the register rather than the whole expression. When
1231 looking up constant anchors we will further offset the corresponding
1232 expression therefore it does not make sense to prefer REGs over
1233 reg-immediate additions. Prefer instead the oldest expression. Also
1234 don't prefer pseudos over hard regs so that we derive constants in
1235 argument registers from other argument registers rather than from the
1236 original pseudo that was used to synthesize the constant. */
1237 insert_with_costs (exp
, elt
, hash
, mode
, COST (reg
, mode
), 1);
1240 /* The constant CST is equivalent to the register REG. Create
1241 equivalences between the two anchors of CST and the corresponding
1242 register-offset expressions using REG. */
1245 insert_const_anchors (rtx reg
, rtx cst
, machine_mode mode
)
1247 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1249 if (!compute_const_anchors (cst
, &lower_base
, &lower_offs
,
1250 &upper_base
, &upper_offs
))
1253 /* Ignore anchors of value 0. Constants accessible from zero are
1255 if (lower_base
!= 0)
1256 insert_const_anchor (lower_base
, reg
, -lower_offs
, mode
);
1258 if (upper_base
!= 0)
1259 insert_const_anchor (upper_base
, reg
, -upper_offs
, mode
);
1262 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1263 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1264 valid expression. Return the cheapest and oldest of such expressions. In
1265 *OLD, return how old the resulting expression is compared to the other
1266 equivalent expressions. */
1269 find_reg_offset_for_const (struct table_elt
*anchor_elt
, HOST_WIDE_INT offs
,
1272 struct table_elt
*elt
;
1274 struct table_elt
*match_elt
;
1277 /* Find the cheapest and *oldest* expression to maximize the chance of
1278 reusing the same pseudo. */
1282 for (elt
= anchor_elt
->first_same_value
, idx
= 0;
1284 elt
= elt
->next_same_value
, idx
++)
1286 if (match_elt
&& CHEAPER (match_elt
, elt
))
1289 if (REG_P (elt
->exp
)
1290 || (GET_CODE (elt
->exp
) == PLUS
1291 && REG_P (XEXP (elt
->exp
, 0))
1292 && GET_CODE (XEXP (elt
->exp
, 1)) == CONST_INT
))
1296 /* Ignore expressions that are no longer valid. */
1297 if (!REG_P (elt
->exp
) && !exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
1300 x
= plus_constant (GET_MODE (elt
->exp
), elt
->exp
, offs
);
1302 || (GET_CODE (x
) == PLUS
1303 && IN_RANGE (INTVAL (XEXP (x
, 1)),
1304 -targetm
.const_anchor
,
1305 targetm
.const_anchor
- 1)))
1317 /* Try to express the constant SRC_CONST using a register+offset expression
1318 derived from a constant anchor. Return it if successful or NULL_RTX,
1322 try_const_anchors (rtx src_const
, machine_mode mode
)
1324 struct table_elt
*lower_elt
, *upper_elt
;
1325 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1326 rtx lower_anchor_rtx
, upper_anchor_rtx
;
1327 rtx lower_exp
= NULL_RTX
, upper_exp
= NULL_RTX
;
1328 unsigned lower_old
, upper_old
;
1330 /* CONST_INT is used for CC modes, but we should leave those alone. */
1331 if (GET_MODE_CLASS (mode
) == MODE_CC
)
1334 gcc_assert (SCALAR_INT_MODE_P (mode
));
1335 if (!compute_const_anchors (src_const
, &lower_base
, &lower_offs
,
1336 &upper_base
, &upper_offs
))
1339 lower_anchor_rtx
= GEN_INT (lower_base
);
1340 upper_anchor_rtx
= GEN_INT (upper_base
);
1341 lower_elt
= lookup (lower_anchor_rtx
, HASH (lower_anchor_rtx
, mode
), mode
);
1342 upper_elt
= lookup (upper_anchor_rtx
, HASH (upper_anchor_rtx
, mode
), mode
);
1345 lower_exp
= find_reg_offset_for_const (lower_elt
, lower_offs
, &lower_old
);
1347 upper_exp
= find_reg_offset_for_const (upper_elt
, upper_offs
, &upper_old
);
1354 /* Return the older expression. */
1355 return (upper_old
> lower_old
? upper_exp
: lower_exp
);
1358 /* Look in or update the hash table. */
1360 /* Remove table element ELT from use in the table.
1361 HASH is its hash code, made using the HASH macro.
1362 It's an argument because often that is known in advance
1363 and we save much time not recomputing it. */
1366 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1371 /* Mark this element as removed. See cse_insn. */
1372 elt
->first_same_value
= 0;
1374 /* Remove the table element from its equivalence class. */
1377 struct table_elt
*prev
= elt
->prev_same_value
;
1378 struct table_elt
*next
= elt
->next_same_value
;
1381 next
->prev_same_value
= prev
;
1384 prev
->next_same_value
= next
;
1387 struct table_elt
*newfirst
= next
;
1390 next
->first_same_value
= newfirst
;
1391 next
= next
->next_same_value
;
1396 /* Remove the table element from its hash bucket. */
1399 struct table_elt
*prev
= elt
->prev_same_hash
;
1400 struct table_elt
*next
= elt
->next_same_hash
;
1403 next
->prev_same_hash
= prev
;
1406 prev
->next_same_hash
= next
;
1407 else if (table
[hash
] == elt
)
1411 /* This entry is not in the proper hash bucket. This can happen
1412 when two classes were merged by `merge_equiv_classes'. Search
1413 for the hash bucket that it heads. This happens only very
1414 rarely, so the cost is acceptable. */
1415 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1416 if (table
[hash
] == elt
)
1421 /* Remove the table element from its related-value circular chain. */
1423 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1425 struct table_elt
*p
= elt
->related_value
;
1427 while (p
->related_value
!= elt
)
1428 p
= p
->related_value
;
1429 p
->related_value
= elt
->related_value
;
1430 if (p
->related_value
== p
)
1431 p
->related_value
= 0;
1434 /* Now add it to the free element chain. */
1435 elt
->next_same_hash
= free_element_chain
;
1436 free_element_chain
= elt
;
1439 /* Same as above, but X is a pseudo-register. */
1442 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1444 struct table_elt
*elt
;
1446 /* Because a pseudo-register can be referenced in more than one
1447 mode, we might have to remove more than one table entry. */
1448 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1449 remove_from_table (elt
, hash
);
1452 /* Look up X in the hash table and return its table element,
1453 or 0 if X is not in the table.
1455 MODE is the machine-mode of X, or if X is an integer constant
1456 with VOIDmode then MODE is the mode with which X will be used.
1458 Here we are satisfied to find an expression whose tree structure
1461 static struct table_elt
*
1462 lookup (rtx x
, unsigned int hash
, machine_mode mode
)
1464 struct table_elt
*p
;
1466 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1467 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1468 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1474 /* Like `lookup' but don't care whether the table element uses invalid regs.
1475 Also ignore discrepancies in the machine mode of a register. */
1477 static struct table_elt
*
1478 lookup_for_remove (rtx x
, unsigned int hash
, machine_mode mode
)
1480 struct table_elt
*p
;
1484 unsigned int regno
= REGNO (x
);
1486 /* Don't check the machine mode when comparing registers;
1487 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1488 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1490 && REGNO (p
->exp
) == regno
)
1495 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1497 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1504 /* Look for an expression equivalent to X and with code CODE.
1505 If one is found, return that expression. */
1508 lookup_as_function (rtx x
, enum rtx_code code
)
1511 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1516 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1517 if (GET_CODE (p
->exp
) == code
1518 /* Make sure this is a valid entry in the table. */
1519 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1525 /* Insert X in the hash table, assuming HASH is its hash code and
1526 CLASSP is an element of the class it should go in (or 0 if a new
1527 class should be made). COST is the code of X and reg_cost is the
1528 cost of registers in X. It is inserted at the proper position to
1529 keep the class in the order cheapest first.
1531 MODE is the machine-mode of X, or if X is an integer constant
1532 with VOIDmode then MODE is the mode with which X will be used.
1534 For elements of equal cheapness, the most recent one
1535 goes in front, except that the first element in the list
1536 remains first unless a cheaper element is added. The order of
1537 pseudo-registers does not matter, as canon_reg will be called to
1538 find the cheapest when a register is retrieved from the table.
1540 The in_memory field in the hash table element is set to 0.
1541 The caller must set it nonzero if appropriate.
1543 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1544 and if insert_regs returns a nonzero value
1545 you must then recompute its hash code before calling here.
1547 If necessary, update table showing constant values of quantities. */
1549 static struct table_elt
*
1550 insert_with_costs (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1551 machine_mode mode
, int cost
, int reg_cost
)
1553 struct table_elt
*elt
;
1555 /* If X is a register and we haven't made a quantity for it,
1556 something is wrong. */
1557 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1559 /* If X is a hard register, show it is being put in the table. */
1560 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1561 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1563 /* Put an element for X into the right hash bucket. */
1565 elt
= free_element_chain
;
1567 free_element_chain
= elt
->next_same_hash
;
1569 elt
= XNEW (struct table_elt
);
1572 elt
->canon_exp
= NULL_RTX
;
1574 elt
->regcost
= reg_cost
;
1575 elt
->next_same_value
= 0;
1576 elt
->prev_same_value
= 0;
1577 elt
->next_same_hash
= table
[hash
];
1578 elt
->prev_same_hash
= 0;
1579 elt
->related_value
= 0;
1582 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1585 table
[hash
]->prev_same_hash
= elt
;
1588 /* Put it into the proper value-class. */
1591 classp
= classp
->first_same_value
;
1592 if (CHEAPER (elt
, classp
))
1593 /* Insert at the head of the class. */
1595 struct table_elt
*p
;
1596 elt
->next_same_value
= classp
;
1597 classp
->prev_same_value
= elt
;
1598 elt
->first_same_value
= elt
;
1600 for (p
= classp
; p
; p
= p
->next_same_value
)
1601 p
->first_same_value
= elt
;
1605 /* Insert not at head of the class. */
1606 /* Put it after the last element cheaper than X. */
1607 struct table_elt
*p
, *next
;
1610 (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1614 /* Put it after P and before NEXT. */
1615 elt
->next_same_value
= next
;
1617 next
->prev_same_value
= elt
;
1619 elt
->prev_same_value
= p
;
1620 p
->next_same_value
= elt
;
1621 elt
->first_same_value
= classp
;
1625 elt
->first_same_value
= elt
;
1627 /* If this is a constant being set equivalent to a register or a register
1628 being set equivalent to a constant, note the constant equivalence.
1630 If this is a constant, it cannot be equivalent to a different constant,
1631 and a constant is the only thing that can be cheaper than a register. So
1632 we know the register is the head of the class (before the constant was
1635 If this is a register that is not already known equivalent to a
1636 constant, we must check the entire class.
1638 If this is a register that is already known equivalent to an insn,
1639 update the qtys `const_insn' to show that `this_insn' is the latest
1640 insn making that quantity equivalent to the constant. */
1642 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1645 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1646 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1648 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1649 exp_ent
->const_insn
= this_insn
;
1654 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1657 struct table_elt
*p
;
1659 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1661 if (p
->is_const
&& !REG_P (p
->exp
))
1663 int x_q
= REG_QTY (REGNO (x
));
1664 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1667 = gen_lowpart (GET_MODE (x
), p
->exp
);
1668 x_ent
->const_insn
= this_insn
;
1675 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1676 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1677 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1679 /* If this is a constant with symbolic value,
1680 and it has a term with an explicit integer value,
1681 link it up with related expressions. */
1682 if (GET_CODE (x
) == CONST
)
1684 rtx subexp
= get_related_value (x
);
1686 struct table_elt
*subelt
, *subelt_prev
;
1690 /* Get the integer-free subexpression in the hash table. */
1691 subhash
= SAFE_HASH (subexp
, mode
);
1692 subelt
= lookup (subexp
, subhash
, mode
);
1694 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1695 /* Initialize SUBELT's circular chain if it has none. */
1696 if (subelt
->related_value
== 0)
1697 subelt
->related_value
= subelt
;
1698 /* Find the element in the circular chain that precedes SUBELT. */
1699 subelt_prev
= subelt
;
1700 while (subelt_prev
->related_value
!= subelt
)
1701 subelt_prev
= subelt_prev
->related_value
;
1702 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1703 This way the element that follows SUBELT is the oldest one. */
1704 elt
->related_value
= subelt_prev
->related_value
;
1705 subelt_prev
->related_value
= elt
;
1712 /* Wrap insert_with_costs by passing the default costs. */
1714 static struct table_elt
*
1715 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1718 return insert_with_costs (x
, classp
, hash
, mode
,
1719 COST (x
, mode
), approx_reg_cost (x
));
1723 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1724 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1725 the two classes equivalent.
1727 CLASS1 will be the surviving class; CLASS2 should not be used after this
1730 Any invalid entries in CLASS2 will not be copied. */
1733 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1735 struct table_elt
*elt
, *next
, *new_elt
;
1737 /* Ensure we start with the head of the classes. */
1738 class1
= class1
->first_same_value
;
1739 class2
= class2
->first_same_value
;
1741 /* If they were already equal, forget it. */
1742 if (class1
== class2
)
1745 for (elt
= class2
; elt
; elt
= next
)
1749 machine_mode mode
= elt
->mode
;
1751 next
= elt
->next_same_value
;
1753 /* Remove old entry, make a new one in CLASS1's class.
1754 Don't do this for invalid entries as we cannot find their
1755 hash code (it also isn't necessary). */
1756 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1758 bool need_rehash
= false;
1760 hash_arg_in_memory
= 0;
1761 hash
= HASH (exp
, mode
);
1765 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1766 delete_reg_equiv (REGNO (exp
));
1769 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1770 remove_pseudo_from_table (exp
, hash
);
1772 remove_from_table (elt
, hash
);
1774 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1776 rehash_using_reg (exp
);
1777 hash
= HASH (exp
, mode
);
1779 new_elt
= insert (exp
, class1
, hash
, mode
);
1780 new_elt
->in_memory
= hash_arg_in_memory
;
1781 if (GET_CODE (exp
) == ASM_OPERANDS
&& elt
->cost
== MAX_COST
)
1782 new_elt
->cost
= MAX_COST
;
1787 /* Flush the entire hash table. */
1790 flush_hash_table (void)
1793 struct table_elt
*p
;
1795 for (i
= 0; i
< HASH_SIZE
; i
++)
1796 for (p
= table
[i
]; p
; p
= table
[i
])
1798 /* Note that invalidate can remove elements
1799 after P in the current hash chain. */
1801 invalidate (p
->exp
, VOIDmode
);
1803 remove_from_table (p
, i
);
1807 /* Check whether an anti dependence exists between X and EXP. MODE and
1808 ADDR are as for canon_anti_dependence. */
1811 check_dependence (const_rtx x
, rtx exp
, machine_mode mode
, rtx addr
)
1813 subrtx_iterator::array_type array
;
1814 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
1816 const_rtx x
= *iter
;
1817 if (MEM_P (x
) && canon_anti_dependence (x
, true, exp
, mode
, addr
))
1823 /* Remove from the hash table, or mark as invalid, all expressions whose
1824 values could be altered by storing in register X. */
1827 invalidate_reg (rtx x
)
1829 gcc_assert (GET_CODE (x
) == REG
);
1831 /* If X is a register, dependencies on its contents are recorded
1832 through the qty number mechanism. Just change the qty number of
1833 the register, mark it as invalid for expressions that refer to it,
1834 and remove it itself. */
1835 unsigned int regno
= REGNO (x
);
1836 unsigned int hash
= HASH (x
, GET_MODE (x
));
1838 /* Remove REGNO from any quantity list it might be on and indicate
1839 that its value might have changed. If it is a pseudo, remove its
1840 entry from the hash table.
1842 For a hard register, we do the first two actions above for any
1843 additional hard registers corresponding to X. Then, if any of these
1844 registers are in the table, we must remove any REG entries that
1845 overlap these registers. */
1847 delete_reg_equiv (regno
);
1849 SUBREG_TICKED (regno
) = -1;
1851 if (regno
>= FIRST_PSEUDO_REGISTER
)
1852 remove_pseudo_from_table (x
, hash
);
1855 HOST_WIDE_INT in_table
= TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1856 unsigned int endregno
= END_REGNO (x
);
1858 struct table_elt
*p
, *next
;
1860 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1862 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1864 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1865 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1866 delete_reg_equiv (rn
);
1868 SUBREG_TICKED (rn
) = -1;
1872 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1873 for (p
= table
[hash
]; p
; p
= next
)
1875 next
= p
->next_same_hash
;
1877 if (!REG_P (p
->exp
) || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1880 unsigned int tregno
= REGNO (p
->exp
);
1881 unsigned int tendregno
= END_REGNO (p
->exp
);
1882 if (tendregno
> regno
&& tregno
< endregno
)
1883 remove_from_table (p
, hash
);
1888 /* Remove from the hash table, or mark as invalid, all expressions whose
1889 values could be altered by storing in X. X is a register, a subreg, or
1890 a memory reference with nonvarying address (because, when a memory
1891 reference with a varying address is stored in, all memory references are
1892 removed by invalidate_memory so specific invalidation is superfluous).
1893 FULL_MODE, if not VOIDmode, indicates that this much should be
1894 invalidated instead of just the amount indicated by the mode of X. This
1895 is only used for bitfield stores into memory.
1897 A nonvarying address may be just a register or just a symbol reference,
1898 or it may be either of those plus a numeric offset. */
1901 invalidate (rtx x
, machine_mode full_mode
)
1904 struct table_elt
*p
;
1907 switch (GET_CODE (x
))
1914 invalidate (SUBREG_REG (x
), VOIDmode
);
1918 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1919 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1923 /* This is part of a disjoint return value; extract the location in
1924 question ignoring the offset. */
1925 invalidate (XEXP (x
, 0), VOIDmode
);
1929 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1930 /* Calculate the canonical version of X here so that
1931 true_dependence doesn't generate new RTL for X on each call. */
1934 /* Remove all hash table elements that refer to overlapping pieces of
1936 if (full_mode
== VOIDmode
)
1937 full_mode
= GET_MODE (x
);
1939 for (i
= 0; i
< HASH_SIZE
; i
++)
1941 struct table_elt
*next
;
1943 for (p
= table
[i
]; p
; p
= next
)
1945 next
= p
->next_same_hash
;
1948 /* Just canonicalize the expression once;
1949 otherwise each time we call invalidate
1950 true_dependence will canonicalize the
1951 expression again. */
1953 p
->canon_exp
= canon_rtx (p
->exp
);
1954 if (check_dependence (p
->canon_exp
, x
, full_mode
, addr
))
1955 remove_from_table (p
, i
);
1966 /* Invalidate DEST. Used when DEST is not going to be added
1967 into the hash table for some reason, e.g. do_not_record
1971 invalidate_dest (rtx dest
)
1974 || GET_CODE (dest
) == SUBREG
1976 invalidate (dest
, VOIDmode
);
1977 else if (GET_CODE (dest
) == STRICT_LOW_PART
1978 || GET_CODE (dest
) == ZERO_EXTRACT
)
1979 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
1982 /* Remove all expressions that refer to register REGNO,
1983 since they are already invalid, and we are about to
1984 mark that register valid again and don't want the old
1985 expressions to reappear as valid. */
1988 remove_invalid_refs (unsigned int regno
)
1991 struct table_elt
*p
, *next
;
1993 for (i
= 0; i
< HASH_SIZE
; i
++)
1994 for (p
= table
[i
]; p
; p
= next
)
1996 next
= p
->next_same_hash
;
1997 if (!REG_P (p
->exp
) && refers_to_regno_p (regno
, p
->exp
))
1998 remove_from_table (p
, i
);
2002 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2005 remove_invalid_subreg_refs (unsigned int regno
, poly_uint64 offset
,
2009 struct table_elt
*p
, *next
;
2011 for (i
= 0; i
< HASH_SIZE
; i
++)
2012 for (p
= table
[i
]; p
; p
= next
)
2015 next
= p
->next_same_hash
;
2018 && (GET_CODE (exp
) != SUBREG
2019 || !REG_P (SUBREG_REG (exp
))
2020 || REGNO (SUBREG_REG (exp
)) != regno
2021 || ranges_maybe_overlap_p (SUBREG_BYTE (exp
),
2022 GET_MODE_SIZE (GET_MODE (exp
)),
2023 offset
, GET_MODE_SIZE (mode
)))
2024 && refers_to_regno_p (regno
, p
->exp
))
2025 remove_from_table (p
, i
);
2029 /* Recompute the hash codes of any valid entries in the hash table that
2030 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2032 This is called when we make a jump equivalence. */
2035 rehash_using_reg (rtx x
)
2038 struct table_elt
*p
, *next
;
2041 if (GET_CODE (x
) == SUBREG
)
2044 /* If X is not a register or if the register is known not to be in any
2045 valid entries in the table, we have no work to do. */
2048 || REG_IN_TABLE (REGNO (x
)) < 0
2049 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
2052 /* Scan all hash chains looking for valid entries that mention X.
2053 If we find one and it is in the wrong hash chain, move it. */
2055 for (i
= 0; i
< HASH_SIZE
; i
++)
2056 for (p
= table
[i
]; p
; p
= next
)
2058 next
= p
->next_same_hash
;
2059 if (reg_mentioned_p (x
, p
->exp
)
2060 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
2061 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
2063 if (p
->next_same_hash
)
2064 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
2066 if (p
->prev_same_hash
)
2067 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
2069 table
[i
] = p
->next_same_hash
;
2071 p
->next_same_hash
= table
[hash
];
2072 p
->prev_same_hash
= 0;
2074 table
[hash
]->prev_same_hash
= p
;
2080 /* Remove from the hash table any expression that is a call-clobbered
2081 register in INSN. Also update their TICK values. */
2084 invalidate_for_call (rtx_insn
*insn
)
2088 struct table_elt
*p
, *next
;
2090 hard_reg_set_iterator hrsi
;
2092 /* Go through all the hard registers. For each that might be clobbered
2093 in call insn INSN, remove the register from quantity chains and update
2094 reg_tick if defined. Also see if any of these registers is currently
2097 ??? We could be more precise for partially-clobbered registers,
2098 and only invalidate values that actually occupy the clobbered part
2099 of the registers. It doesn't seem worth the effort though, since
2100 we shouldn't see this situation much before RA. Whatever choice
2101 we make here has to be consistent with the table walk below,
2102 so any change to this test will require a change there too. */
2103 HARD_REG_SET callee_clobbers
2104 = insn_callee_abi (insn
).full_and_partial_reg_clobbers ();
2105 EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers
, 0, regno
, hrsi
)
2107 delete_reg_equiv (regno
);
2108 if (REG_TICK (regno
) >= 0)
2111 SUBREG_TICKED (regno
) = -1;
2113 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
2116 /* In the case where we have no call-clobbered hard registers in the
2117 table, we are done. Otherwise, scan the table and remove any
2118 entry that overlaps a call-clobbered register. */
2121 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
2122 for (p
= table
[hash
]; p
; p
= next
)
2124 next
= p
->next_same_hash
;
2127 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
2130 /* This must use the same test as above rather than the
2131 more accurate clobbers_reg_p. */
2132 if (overlaps_hard_reg_set_p (callee_clobbers
, GET_MODE (p
->exp
),
2134 remove_from_table (p
, hash
);
2138 /* Given an expression X of type CONST,
2139 and ELT which is its table entry (or 0 if it
2140 is not in the hash table),
2141 return an alternate expression for X as a register plus integer.
2142 If none can be found, return 0. */
2145 use_related_value (rtx x
, struct table_elt
*elt
)
2147 struct table_elt
*relt
= 0;
2148 struct table_elt
*p
, *q
;
2149 HOST_WIDE_INT offset
;
2151 /* First, is there anything related known?
2152 If we have a table element, we can tell from that.
2153 Otherwise, must look it up. */
2155 if (elt
!= 0 && elt
->related_value
!= 0)
2157 else if (elt
== 0 && GET_CODE (x
) == CONST
)
2159 rtx subexp
= get_related_value (x
);
2161 relt
= lookup (subexp
,
2162 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2169 /* Search all related table entries for one that has an
2170 equivalent register. */
2175 /* This loop is strange in that it is executed in two different cases.
2176 The first is when X is already in the table. Then it is searching
2177 the RELATED_VALUE list of X's class (RELT). The second case is when
2178 X is not in the table. Then RELT points to a class for the related
2181 Ensure that, whatever case we are in, that we ignore classes that have
2182 the same value as X. */
2184 if (rtx_equal_p (x
, p
->exp
))
2187 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2194 p
= p
->related_value
;
2196 /* We went all the way around, so there is nothing to be found.
2197 Alternatively, perhaps RELT was in the table for some other reason
2198 and it has no related values recorded. */
2199 if (p
== relt
|| p
== 0)
2206 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2207 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2208 return plus_constant (q
->mode
, q
->exp
, offset
);
2212 /* Hash a string. Just add its bytes up. */
2213 static inline unsigned
2214 hash_rtx_string (const char *ps
)
2217 const unsigned char *p
= (const unsigned char *) ps
;
2226 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2227 When the callback returns true, we continue with the new rtx. */
2230 hash_rtx_cb (const_rtx x
, machine_mode mode
,
2231 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2232 bool have_reg_qty
, hash_rtx_callback_function cb
)
2238 machine_mode newmode
;
2241 /* Used to turn recursion into iteration. We can't rely on GCC's
2242 tail-recursion elimination since we need to keep accumulating values
2248 /* Invoke the callback first. */
2250 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2252 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2253 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2257 code
= GET_CODE (x
);
2262 unsigned int regno
= REGNO (x
);
2264 if (do_not_record_p
&& !reload_completed
)
2266 /* On some machines, we can't record any non-fixed hard register,
2267 because extending its life will cause reload problems. We
2268 consider ap, fp, sp, gp to be fixed for this purpose.
2270 We also consider CCmode registers to be fixed for this purpose;
2271 failure to do so leads to failure to simplify 0<100 type of
2274 On all machines, we can't record any global registers.
2275 Nor should we record any register that is in a small
2276 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2279 if (regno
>= FIRST_PSEUDO_REGISTER
)
2281 else if (x
== frame_pointer_rtx
2282 || x
== hard_frame_pointer_rtx
2283 || x
== arg_pointer_rtx
2284 || x
== stack_pointer_rtx
2285 || x
== pic_offset_table_rtx
)
2287 else if (global_regs
[regno
])
2289 else if (fixed_regs
[regno
])
2291 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2293 else if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
2295 else if (targetm
.class_likely_spilled_p (REGNO_REG_CLASS (regno
)))
2302 *do_not_record_p
= 1;
2307 hash
+= ((unsigned int) REG
<< 7);
2308 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2312 /* We handle SUBREG of a REG specially because the underlying
2313 reg changes its hash value with every value change; we don't
2314 want to have to forget unrelated subregs when one subreg changes. */
2317 if (REG_P (SUBREG_REG (x
)))
2319 hash
+= (((unsigned int) SUBREG
<< 7)
2320 + REGNO (SUBREG_REG (x
))
2321 + (constant_lower_bound (SUBREG_BYTE (x
))
2329 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2330 + (unsigned int) INTVAL (x
));
2333 case CONST_WIDE_INT
:
2334 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
2335 hash
+= CONST_WIDE_INT_ELT (x
, i
);
2338 case CONST_POLY_INT
:
2342 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
2343 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
2348 /* This is like the general case, except that it only counts
2349 the integers representing the constant. */
2350 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2351 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
2352 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2353 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2355 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2359 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2360 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2368 units
= const_vector_encoded_nelts (x
);
2370 for (i
= 0; i
< units
; ++i
)
2372 elt
= CONST_VECTOR_ENCODED_ELT (x
, i
);
2373 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2374 do_not_record_p
, hash_arg_in_memory_p
,
2381 /* Assume there is only one rtx object for any given label. */
2383 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2384 differences and differences between each stage's debugging dumps. */
2385 hash
+= (((unsigned int) LABEL_REF
<< 7)
2386 + CODE_LABEL_NUMBER (label_ref_label (x
)));
2391 /* Don't hash on the symbol's address to avoid bootstrap differences.
2392 Different hash values may cause expressions to be recorded in
2393 different orders and thus different registers to be used in the
2394 final assembler. This also avoids differences in the dump files
2395 between various stages. */
2397 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2400 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2402 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2407 /* We don't record if marked volatile or if BLKmode since we don't
2408 know the size of the move. */
2409 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2411 *do_not_record_p
= 1;
2414 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2415 *hash_arg_in_memory_p
= 1;
2417 /* Now that we have already found this special case,
2418 might as well speed it up as much as possible. */
2419 hash
+= (unsigned) MEM
;
2424 /* A USE that mentions non-volatile memory needs special
2425 handling since the MEM may be BLKmode which normally
2426 prevents an entry from being made. Pure calls are
2427 marked by a USE which mentions BLKmode memory.
2428 See calls.c:emit_call_1. */
2429 if (MEM_P (XEXP (x
, 0))
2430 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2432 hash
+= (unsigned) USE
;
2435 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2436 *hash_arg_in_memory_p
= 1;
2438 /* Now that we have already found this special case,
2439 might as well speed it up as much as possible. */
2440 hash
+= (unsigned) MEM
;
2455 case UNSPEC_VOLATILE
:
2456 if (do_not_record_p
) {
2457 *do_not_record_p
= 1;
2465 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2467 *do_not_record_p
= 1;
2472 /* We don't want to take the filename and line into account. */
2473 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2474 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2475 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2476 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2478 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2480 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2482 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2483 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2484 do_not_record_p
, hash_arg_in_memory_p
,
2487 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2490 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2491 x
= ASM_OPERANDS_INPUT (x
, 0);
2492 mode
= GET_MODE (x
);
2504 i
= GET_RTX_LENGTH (code
) - 1;
2505 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2506 fmt
= GET_RTX_FORMAT (code
);
2512 /* If we are about to do the last recursive call
2513 needed at this level, change it into iteration.
2514 This function is called enough to be worth it. */
2521 hash
+= hash_rtx_cb (XEXP (x
, i
), VOIDmode
, do_not_record_p
,
2522 hash_arg_in_memory_p
,
2527 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2528 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), VOIDmode
, do_not_record_p
,
2529 hash_arg_in_memory_p
,
2534 hash
+= hash_rtx_string (XSTR (x
, i
));
2538 hash
+= (unsigned int) XINT (x
, i
);
2542 hash
+= constant_lower_bound (SUBREG_BYTE (x
));
2557 /* Hash an rtx. We are careful to make sure the value is never negative.
2558 Equivalent registers hash identically.
2559 MODE is used in hashing for CONST_INTs only;
2560 otherwise the mode of X is used.
2562 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2564 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2565 a MEM rtx which does not have the MEM_READONLY_P flag set.
2567 Note that cse_insn knows that the hash code of a MEM expression
2568 is just (int) MEM plus the hash code of the address. */
2571 hash_rtx (const_rtx x
, machine_mode mode
, int *do_not_record_p
,
2572 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2574 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2575 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2578 /* Hash an rtx X for cse via hash_rtx.
2579 Stores 1 in do_not_record if any subexpression is volatile.
2580 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2581 does not have the MEM_READONLY_P flag set. */
2583 static inline unsigned
2584 canon_hash (rtx x
, machine_mode mode
)
2586 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2589 /* Like canon_hash but with no side effects, i.e. do_not_record
2590 and hash_arg_in_memory are not changed. */
2592 static inline unsigned
2593 safe_hash (rtx x
, machine_mode mode
)
2595 int dummy_do_not_record
;
2596 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2599 /* Return 1 iff X and Y would canonicalize into the same thing,
2600 without actually constructing the canonicalization of either one.
2601 If VALIDATE is nonzero,
2602 we assume X is an expression being processed from the rtl
2603 and Y was found in the hash table. We check register refs
2604 in Y for being marked as valid.
2606 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2609 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2615 /* Note: it is incorrect to assume an expression is equivalent to itself
2616 if VALIDATE is nonzero. */
2617 if (x
== y
&& !validate
)
2620 if (x
== 0 || y
== 0)
2623 code
= GET_CODE (x
);
2624 if (code
!= GET_CODE (y
))
2627 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2628 if (GET_MODE (x
) != GET_MODE (y
))
2631 /* MEMs referring to different address space are not equivalent. */
2632 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2643 return label_ref_label (x
) == label_ref_label (y
);
2646 return XSTR (x
, 0) == XSTR (y
, 0);
2650 return REGNO (x
) == REGNO (y
);
2653 unsigned int regno
= REGNO (y
);
2655 unsigned int endregno
= END_REGNO (y
);
2657 /* If the quantities are not the same, the expressions are not
2658 equivalent. If there are and we are not to validate, they
2659 are equivalent. Otherwise, ensure all regs are up-to-date. */
2661 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2667 for (i
= regno
; i
< endregno
; i
++)
2668 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2677 /* A volatile mem should not be considered equivalent to any
2679 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2682 /* Can't merge two expressions in different alias sets, since we
2683 can decide that the expression is transparent in a block when
2684 it isn't, due to it being set with the different alias set.
2686 Also, can't merge two expressions with different MEM_ATTRS.
2687 They could e.g. be two different entities allocated into the
2688 same space on the stack (see e.g. PR25130). In that case, the
2689 MEM addresses can be the same, even though the two MEMs are
2690 absolutely not equivalent.
2692 But because really all MEM attributes should be the same for
2693 equivalent MEMs, we just use the invariant that MEMs that have
2694 the same attributes share the same mem_attrs data structure. */
2695 if (!mem_attrs_eq_p (MEM_ATTRS (x
), MEM_ATTRS (y
)))
2698 /* If we are handling exceptions, we cannot consider two expressions
2699 with different trapping status as equivalent, because simple_mem
2700 might accept one and reject the other. */
2701 if (cfun
->can_throw_non_call_exceptions
2702 && (MEM_NOTRAP_P (x
) != MEM_NOTRAP_P (y
)))
2707 /* For commutative operations, check both orders. */
2715 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2717 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2718 validate
, for_gcse
))
2719 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2721 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2722 validate
, for_gcse
)));
2725 /* We don't use the generic code below because we want to
2726 disregard filename and line numbers. */
2728 /* A volatile asm isn't equivalent to any other. */
2729 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2732 if (GET_MODE (x
) != GET_MODE (y
)
2733 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2734 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2735 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2736 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2737 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2740 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2742 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2743 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2744 ASM_OPERANDS_INPUT (y
, i
),
2746 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2747 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2757 /* Compare the elements. If any pair of corresponding elements
2758 fail to match, return 0 for the whole thing. */
2760 fmt
= GET_RTX_FORMAT (code
);
2761 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2766 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2767 validate
, for_gcse
))
2772 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2774 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2775 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2776 validate
, for_gcse
))
2781 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2786 if (XINT (x
, i
) != XINT (y
, i
))
2791 if (XWINT (x
, i
) != XWINT (y
, i
))
2796 if (maybe_ne (SUBREG_BYTE (x
), SUBREG_BYTE (y
)))
2812 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2813 the result if necessary. INSN is as for canon_reg. */
2816 validate_canon_reg (rtx
*xloc
, rtx_insn
*insn
)
2820 rtx new_rtx
= canon_reg (*xloc
, insn
);
2822 /* If replacing pseudo with hard reg or vice versa, ensure the
2823 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2824 gcc_assert (insn
&& new_rtx
);
2825 validate_change (insn
, xloc
, new_rtx
, 1);
2829 /* Canonicalize an expression:
2830 replace each register reference inside it
2831 with the "oldest" equivalent register.
2833 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2834 after we make our substitution. The calls are made with IN_GROUP nonzero
2835 so apply_change_group must be called upon the outermost return from this
2836 function (unless INSN is zero). The result of apply_change_group can
2837 generally be discarded since the changes we are making are optional. */
2840 canon_reg (rtx x
, rtx_insn
*insn
)
2849 code
= GET_CODE (x
);
2866 struct qty_table_elem
*ent
;
2868 /* Never replace a hard reg, because hard regs can appear
2869 in more than one machine mode, and we must preserve the mode
2870 of each occurrence. Also, some hard regs appear in
2871 MEMs that are shared and mustn't be altered. Don't try to
2872 replace any reg that maps to a reg of class NO_REGS. */
2873 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2874 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2877 q
= REG_QTY (REGNO (x
));
2878 ent
= &qty_table
[q
];
2879 first
= ent
->first_reg
;
2880 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2881 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2882 : gen_rtx_REG (ent
->mode
, first
));
2889 fmt
= GET_RTX_FORMAT (code
);
2890 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2895 validate_canon_reg (&XEXP (x
, i
), insn
);
2896 else if (fmt
[i
] == 'E')
2897 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2898 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2904 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2905 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2906 what values are being compared.
2908 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2909 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2910 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2911 compared to produce cc0.
2913 The return value is the comparison operator and is either the code of
2914 A or the code corresponding to the inverse of the comparison. */
2916 static enum rtx_code
2917 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2918 machine_mode
*pmode1
, machine_mode
*pmode2
)
2921 hash_set
<rtx
> *visited
= NULL
;
2922 /* Set nonzero when we find something of interest. */
2925 arg1
= *parg1
, arg2
= *parg2
;
2927 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2929 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2931 int reverse_code
= 0;
2932 struct table_elt
*p
= 0;
2934 /* Remember state from previous iteration. */
2938 visited
= new hash_set
<rtx
>;
2943 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2944 On machines with CC0, this is the only case that can occur, since
2945 fold_rtx will return the COMPARE or item being compared with zero
2948 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2951 /* If ARG1 is a comparison operator and CODE is testing for
2952 STORE_FLAG_VALUE, get the inner arguments. */
2954 else if (COMPARISON_P (arg1
))
2956 #ifdef FLOAT_STORE_FLAG_VALUE
2957 REAL_VALUE_TYPE fsfv
;
2961 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2962 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2963 #ifdef FLOAT_STORE_FLAG_VALUE
2964 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2965 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2966 REAL_VALUE_NEGATIVE (fsfv
)))
2971 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2972 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2973 #ifdef FLOAT_STORE_FLAG_VALUE
2974 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2975 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2976 REAL_VALUE_NEGATIVE (fsfv
)))
2979 x
= arg1
, reverse_code
= 1;
2982 /* ??? We could also check for
2984 (ne (and (eq (...) (const_int 1))) (const_int 0))
2986 and related forms, but let's wait until we see them occurring. */
2989 /* Look up ARG1 in the hash table and see if it has an equivalence
2990 that lets us see what is being compared. */
2991 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
2994 p
= p
->first_same_value
;
2996 /* If what we compare is already known to be constant, that is as
2998 We need to break the loop in this case, because otherwise we
2999 can have an infinite loop when looking at a reg that is known
3000 to be a constant which is the same as a comparison of a reg
3001 against zero which appears later in the insn stream, which in
3002 turn is constant and the same as the comparison of the first reg
3008 for (; p
; p
= p
->next_same_value
)
3010 machine_mode inner_mode
= GET_MODE (p
->exp
);
3011 #ifdef FLOAT_STORE_FLAG_VALUE
3012 REAL_VALUE_TYPE fsfv
;
3015 /* If the entry isn't valid, skip it. */
3016 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3019 /* If it's a comparison we've used before, skip it. */
3020 if (visited
&& visited
->contains (p
->exp
))
3023 if (GET_CODE (p
->exp
) == COMPARE
3024 /* Another possibility is that this machine has a compare insn
3025 that includes the comparison code. In that case, ARG1 would
3026 be equivalent to a comparison operation that would set ARG1 to
3027 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3028 ORIG_CODE is the actual comparison being done; if it is an EQ,
3029 we must reverse ORIG_CODE. On machine with a negative value
3030 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3033 && val_signbit_known_set_p (inner_mode
,
3035 #ifdef FLOAT_STORE_FLAG_VALUE
3037 && SCALAR_FLOAT_MODE_P (inner_mode
)
3038 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3039 REAL_VALUE_NEGATIVE (fsfv
)))
3042 && COMPARISON_P (p
->exp
)))
3047 else if ((code
== EQ
3049 && val_signbit_known_set_p (inner_mode
,
3051 #ifdef FLOAT_STORE_FLAG_VALUE
3053 && SCALAR_FLOAT_MODE_P (inner_mode
)
3054 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3055 REAL_VALUE_NEGATIVE (fsfv
)))
3058 && COMPARISON_P (p
->exp
))
3065 /* If this non-trapping address, e.g. fp + constant, the
3066 equivalent is a better operand since it may let us predict
3067 the value of the comparison. */
3068 else if (!rtx_addr_can_trap_p (p
->exp
))
3075 /* If we didn't find a useful equivalence for ARG1, we are done.
3076 Otherwise, set up for the next iteration. */
3080 /* If we need to reverse the comparison, make sure that is
3081 possible -- we can't necessarily infer the value of GE from LT
3082 with floating-point operands. */
3085 enum rtx_code reversed
= reversed_comparison_code (x
, NULL
);
3086 if (reversed
== UNKNOWN
)
3091 else if (COMPARISON_P (x
))
3092 code
= GET_CODE (x
);
3093 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
3096 /* Return our results. Return the modes from before fold_rtx
3097 because fold_rtx might produce const_int, and then it's too late. */
3098 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
3099 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
3106 /* If X is a nontrivial arithmetic operation on an argument for which
3107 a constant value can be determined, return the result of operating
3108 on that value, as a constant. Otherwise, return X, possibly with
3109 one or more operands changed to a forward-propagated constant.
3111 If X is a register whose contents are known, we do NOT return
3112 those contents here; equiv_constant is called to perform that task.
3113 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3115 INSN is the insn that we may be modifying. If it is 0, make a copy
3116 of X before modifying it. */
3119 fold_rtx (rtx x
, rtx_insn
*insn
)
3129 /* Operands of X. */
3130 /* Workaround -Wmaybe-uninitialized false positive during
3131 profiledbootstrap by initializing them. */
3132 rtx folded_arg0
= NULL_RTX
;
3133 rtx folded_arg1
= NULL_RTX
;
3135 /* Constant equivalents of first three operands of X;
3136 0 when no such equivalent is known. */
3141 /* The mode of the first operand of X. We need this for sign and zero
3143 machine_mode mode_arg0
;
3148 /* Try to perform some initial simplifications on X. */
3149 code
= GET_CODE (x
);
3154 /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3155 than it would in other contexts. Basically its mode does not
3156 signify the size of the object read. That information is carried
3157 by size operand. If we happen to have a MEM of the appropriate
3158 mode in our tables with a constant value we could simplify the
3159 extraction incorrectly if we allowed substitution of that value
3163 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3173 /* No use simplifying an EXPR_LIST
3174 since they are used only for lists of args
3175 in a function call's REG_EQUAL note. */
3180 return prev_insn_cc0
;
3185 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3186 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3187 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3192 if (NO_FUNCTION_CSE
&& CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3196 /* Anything else goes through the loop below. */
3201 mode
= GET_MODE (x
);
3205 mode_arg0
= VOIDmode
;
3207 /* Try folding our operands.
3208 Then see which ones have constant values known. */
3210 fmt
= GET_RTX_FORMAT (code
);
3211 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3214 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3215 machine_mode mode_arg
= GET_MODE (folded_arg
);
3217 switch (GET_CODE (folded_arg
))
3222 const_arg
= equiv_constant (folded_arg
);
3229 const_arg
= folded_arg
;
3233 /* The cc0-user and cc0-setter may be in different blocks if
3234 the cc0-setter potentially traps. In that case PREV_INSN_CC0
3235 will have been cleared as we exited the block with the
3238 While we could potentially track cc0 in this case, it just
3239 doesn't seem to be worth it given that cc0 targets are not
3240 terribly common or important these days and trapping math
3241 is rarely used. The combination of those two conditions
3242 necessary to trip this situation is exceedingly rare in the
3246 const_arg
= NULL_RTX
;
3250 folded_arg
= prev_insn_cc0
;
3251 mode_arg
= prev_insn_cc0_mode
;
3252 const_arg
= equiv_constant (folded_arg
);
3257 folded_arg
= fold_rtx (folded_arg
, insn
);
3258 const_arg
= equiv_constant (folded_arg
);
3262 /* For the first three operands, see if the operand
3263 is constant or equivalent to a constant. */
3267 folded_arg0
= folded_arg
;
3268 const_arg0
= const_arg
;
3269 mode_arg0
= mode_arg
;
3272 folded_arg1
= folded_arg
;
3273 const_arg1
= const_arg
;
3276 const_arg2
= const_arg
;
3280 /* Pick the least expensive of the argument and an equivalent constant
3283 && const_arg
!= folded_arg
3284 && (COST_IN (const_arg
, mode_arg
, code
, i
)
3285 <= COST_IN (folded_arg
, mode_arg
, code
, i
))
3287 /* It's not safe to substitute the operand of a conversion
3288 operator with a constant, as the conversion's identity
3289 depends upon the mode of its operand. This optimization
3290 is handled by the call to simplify_unary_operation. */
3291 && (GET_RTX_CLASS (code
) != RTX_UNARY
3292 || GET_MODE (const_arg
) == mode_arg0
3293 || (code
!= ZERO_EXTEND
3294 && code
!= SIGN_EXTEND
3296 && code
!= FLOAT_TRUNCATE
3297 && code
!= FLOAT_EXTEND
3300 && code
!= UNSIGNED_FLOAT
3301 && code
!= UNSIGNED_FIX
)))
3302 folded_arg
= const_arg
;
3304 if (folded_arg
== XEXP (x
, i
))
3307 if (insn
== NULL_RTX
&& !changed
)
3310 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3315 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3316 consistent with the order in X. */
3317 if (canonicalize_change_group (insn
, x
))
3319 std::swap (const_arg0
, const_arg1
);
3320 std::swap (folded_arg0
, folded_arg1
);
3323 apply_change_group ();
3326 /* If X is an arithmetic operation, see if we can simplify it. */
3328 switch (GET_RTX_CLASS (code
))
3332 /* We can't simplify extension ops unless we know the
3334 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3335 && mode_arg0
== VOIDmode
)
3338 new_rtx
= simplify_unary_operation (code
, mode
,
3339 const_arg0
? const_arg0
: folded_arg0
,
3345 case RTX_COMM_COMPARE
:
3346 /* See what items are actually being compared and set FOLDED_ARG[01]
3347 to those values and CODE to the actual comparison code. If any are
3348 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3349 do anything if both operands are already known to be constant. */
3351 /* ??? Vector mode comparisons are not supported yet. */
3352 if (VECTOR_MODE_P (mode
))
3355 if (const_arg0
== 0 || const_arg1
== 0)
3357 struct table_elt
*p0
, *p1
;
3358 rtx true_rtx
, false_rtx
;
3359 machine_mode mode_arg1
;
3361 if (SCALAR_FLOAT_MODE_P (mode
))
3363 #ifdef FLOAT_STORE_FLAG_VALUE
3364 true_rtx
= (const_double_from_real_value
3365 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3367 true_rtx
= NULL_RTX
;
3369 false_rtx
= CONST0_RTX (mode
);
3373 true_rtx
= const_true_rtx
;
3374 false_rtx
= const0_rtx
;
3377 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3378 &mode_arg0
, &mode_arg1
);
3380 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3381 what kinds of things are being compared, so we can't do
3382 anything with this comparison. */
3384 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3387 const_arg0
= equiv_constant (folded_arg0
);
3388 const_arg1
= equiv_constant (folded_arg1
);
3390 /* If we do not now have two constants being compared, see
3391 if we can nevertheless deduce some things about the
3393 if (const_arg0
== 0 || const_arg1
== 0)
3395 if (const_arg1
!= NULL
)
3397 rtx cheapest_simplification
;
3400 struct table_elt
*p
;
3402 /* See if we can find an equivalent of folded_arg0
3403 that gets us a cheaper expression, possibly a
3404 constant through simplifications. */
3405 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3410 cheapest_simplification
= x
;
3411 cheapest_cost
= COST (x
, mode
);
3413 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3417 /* If the entry isn't valid, skip it. */
3418 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3421 /* Try to simplify using this equivalence. */
3423 = simplify_relational_operation (code
, mode
,
3428 if (simp_result
== NULL
)
3431 cost
= COST (simp_result
, mode
);
3432 if (cost
< cheapest_cost
)
3434 cheapest_cost
= cost
;
3435 cheapest_simplification
= simp_result
;
3439 /* If we have a cheaper expression now, use that
3440 and try folding it further, from the top. */
3441 if (cheapest_simplification
!= x
)
3442 return fold_rtx (copy_rtx (cheapest_simplification
),
3447 /* See if the two operands are the same. */
3449 if ((REG_P (folded_arg0
)
3450 && REG_P (folded_arg1
)
3451 && (REG_QTY (REGNO (folded_arg0
))
3452 == REG_QTY (REGNO (folded_arg1
))))
3453 || ((p0
= lookup (folded_arg0
,
3454 SAFE_HASH (folded_arg0
, mode_arg0
),
3456 && (p1
= lookup (folded_arg1
,
3457 SAFE_HASH (folded_arg1
, mode_arg0
),
3459 && p0
->first_same_value
== p1
->first_same_value
))
3460 folded_arg1
= folded_arg0
;
3462 /* If FOLDED_ARG0 is a register, see if the comparison we are
3463 doing now is either the same as we did before or the reverse
3464 (we only check the reverse if not floating-point). */
3465 else if (REG_P (folded_arg0
))
3467 int qty
= REG_QTY (REGNO (folded_arg0
));
3469 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3471 struct qty_table_elem
*ent
= &qty_table
[qty
];
3473 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3474 || (! FLOAT_MODE_P (mode_arg0
)
3475 && comparison_dominates_p (ent
->comparison_code
,
3476 reverse_condition (code
))))
3477 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3479 && rtx_equal_p (ent
->comparison_const
,
3481 || (REG_P (folded_arg1
)
3482 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3484 if (comparison_dominates_p (ent
->comparison_code
, code
))
3499 /* If we are comparing against zero, see if the first operand is
3500 equivalent to an IOR with a constant. If so, we may be able to
3501 determine the result of this comparison. */
3502 if (const_arg1
== const0_rtx
&& !const_arg0
)
3504 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3508 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3509 && CONST_INT_P (inner_const
)
3510 && INTVAL (inner_const
) != 0)
3511 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3515 rtx op0
= const_arg0
? const_arg0
: copy_rtx (folded_arg0
);
3516 rtx op1
= const_arg1
? const_arg1
: copy_rtx (folded_arg1
);
3517 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
,
3523 case RTX_COMM_ARITH
:
3527 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3528 with that LABEL_REF as its second operand. If so, the result is
3529 the first operand of that MINUS. This handles switches with an
3530 ADDR_DIFF_VEC table. */
3531 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3534 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3535 : lookup_as_function (folded_arg0
, MINUS
);
3537 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3538 && label_ref_label (XEXP (y
, 1)) == label_ref_label (const_arg1
))
3541 /* Now try for a CONST of a MINUS like the above. */
3542 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3543 : lookup_as_function (folded_arg0
, CONST
))) != 0
3544 && GET_CODE (XEXP (y
, 0)) == MINUS
3545 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3546 && label_ref_label (XEXP (XEXP (y
, 0), 1)) == label_ref_label (const_arg1
))
3547 return XEXP (XEXP (y
, 0), 0);
3550 /* Likewise if the operands are in the other order. */
3551 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3554 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3555 : lookup_as_function (folded_arg1
, MINUS
);
3557 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3558 && label_ref_label (XEXP (y
, 1)) == label_ref_label (const_arg0
))
3561 /* Now try for a CONST of a MINUS like the above. */
3562 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3563 : lookup_as_function (folded_arg1
, CONST
))) != 0
3564 && GET_CODE (XEXP (y
, 0)) == MINUS
3565 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3566 && label_ref_label (XEXP (XEXP (y
, 0), 1)) == label_ref_label (const_arg0
))
3567 return XEXP (XEXP (y
, 0), 0);
3570 /* If second operand is a register equivalent to a negative
3571 CONST_INT, see if we can find a register equivalent to the
3572 positive constant. Make a MINUS if so. Don't do this for
3573 a non-negative constant since we might then alternate between
3574 choosing positive and negative constants. Having the positive
3575 constant previously-used is the more common case. Be sure
3576 the resulting constant is non-negative; if const_arg1 were
3577 the smallest negative number this would overflow: depending
3578 on the mode, this would either just be the same value (and
3579 hence not save anything) or be incorrect. */
3580 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
)
3581 && INTVAL (const_arg1
) < 0
3582 /* This used to test
3584 -INTVAL (const_arg1) >= 0
3586 But The Sun V5.0 compilers mis-compiled that test. So
3587 instead we test for the problematic value in a more direct
3588 manner and hope the Sun compilers get it correct. */
3589 && INTVAL (const_arg1
) !=
3590 (HOST_WIDE_INT_1
<< (HOST_BITS_PER_WIDE_INT
- 1))
3591 && REG_P (folded_arg1
))
3593 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3595 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3598 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3600 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3601 canon_reg (p
->exp
, NULL
));
3606 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3607 If so, produce (PLUS Z C2-C). */
3608 if (const_arg1
!= 0 && poly_int_rtx_p (const_arg1
, &xval
))
3610 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3611 if (y
&& poly_int_rtx_p (XEXP (y
, 1)))
3612 return fold_rtx (plus_constant (mode
, copy_rtx (y
), -xval
),
3619 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3620 case IOR
: case AND
: case XOR
:
3622 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3623 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3624 is known to be of similar form, we may be able to replace the
3625 operation with a combined operation. This may eliminate the
3626 intermediate operation if every use is simplified in this way.
3627 Note that the similar optimization done by combine.c only works
3628 if the intermediate operation's result has only one reference. */
3630 if (REG_P (folded_arg0
)
3631 && const_arg1
&& CONST_INT_P (const_arg1
))
3634 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3635 rtx y
, inner_const
, new_const
;
3636 rtx canon_const_arg1
= const_arg1
;
3637 enum rtx_code associate_code
;
3640 && (INTVAL (const_arg1
) >= GET_MODE_UNIT_PRECISION (mode
)
3641 || INTVAL (const_arg1
) < 0))
3643 if (SHIFT_COUNT_TRUNCATED
)
3644 canon_const_arg1
= gen_int_shift_amount
3645 (mode
, (INTVAL (const_arg1
)
3646 & (GET_MODE_UNIT_BITSIZE (mode
) - 1)));
3651 y
= lookup_as_function (folded_arg0
, code
);
3655 /* If we have compiled a statement like
3656 "if (x == (x & mask1))", and now are looking at
3657 "x & mask2", we will have a case where the first operand
3658 of Y is the same as our first operand. Unless we detect
3659 this case, an infinite loop will result. */
3660 if (XEXP (y
, 0) == folded_arg0
)
3663 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3664 if (!inner_const
|| !CONST_INT_P (inner_const
))
3667 /* Don't associate these operations if they are a PLUS with the
3668 same constant and it is a power of two. These might be doable
3669 with a pre- or post-increment. Similarly for two subtracts of
3670 identical powers of two with post decrement. */
3672 if (code
== PLUS
&& const_arg1
== inner_const
3673 && ((HAVE_PRE_INCREMENT
3674 && pow2p_hwi (INTVAL (const_arg1
)))
3675 || (HAVE_POST_INCREMENT
3676 && pow2p_hwi (INTVAL (const_arg1
)))
3677 || (HAVE_PRE_DECREMENT
3678 && pow2p_hwi (- INTVAL (const_arg1
)))
3679 || (HAVE_POST_DECREMENT
3680 && pow2p_hwi (- INTVAL (const_arg1
)))))
3683 /* ??? Vector mode shifts by scalar
3684 shift operand are not supported yet. */
3685 if (is_shift
&& VECTOR_MODE_P (mode
))
3689 && (INTVAL (inner_const
) >= GET_MODE_UNIT_PRECISION (mode
)
3690 || INTVAL (inner_const
) < 0))
3692 if (SHIFT_COUNT_TRUNCATED
)
3693 inner_const
= gen_int_shift_amount
3694 (mode
, (INTVAL (inner_const
)
3695 & (GET_MODE_UNIT_BITSIZE (mode
) - 1)));
3700 /* Compute the code used to compose the constants. For example,
3701 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3703 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3705 new_const
= simplify_binary_operation (associate_code
, mode
,
3712 /* If we are associating shift operations, don't let this
3713 produce a shift of the size of the object or larger.
3714 This could occur when we follow a sign-extend by a right
3715 shift on a machine that does a sign-extend as a pair
3719 && CONST_INT_P (new_const
)
3720 && INTVAL (new_const
) >= GET_MODE_UNIT_PRECISION (mode
))
3722 /* As an exception, we can turn an ASHIFTRT of this
3723 form into a shift of the number of bits - 1. */
3724 if (code
== ASHIFTRT
)
3725 new_const
= gen_int_shift_amount
3726 (mode
, GET_MODE_UNIT_BITSIZE (mode
) - 1);
3727 else if (!side_effects_p (XEXP (y
, 0)))
3728 return CONST0_RTX (mode
);
3733 y
= copy_rtx (XEXP (y
, 0));
3735 /* If Y contains our first operand (the most common way this
3736 can happen is if Y is a MEM), we would do into an infinite
3737 loop if we tried to fold it. So don't in that case. */
3739 if (! reg_mentioned_p (folded_arg0
, y
))
3740 y
= fold_rtx (y
, insn
);
3742 return simplify_gen_binary (code
, mode
, y
, new_const
);
3746 case DIV
: case UDIV
:
3747 /* ??? The associative optimization performed immediately above is
3748 also possible for DIV and UDIV using associate_code of MULT.
3749 However, we would need extra code to verify that the
3750 multiplication does not overflow, that is, there is no overflow
3751 in the calculation of new_const. */
3758 new_rtx
= simplify_binary_operation (code
, mode
,
3759 const_arg0
? const_arg0
: folded_arg0
,
3760 const_arg1
? const_arg1
: folded_arg1
);
3764 /* (lo_sum (high X) X) is simply X. */
3765 if (code
== LO_SUM
&& const_arg0
!= 0
3766 && GET_CODE (const_arg0
) == HIGH
3767 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3772 case RTX_BITFIELD_OPS
:
3773 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3774 const_arg0
? const_arg0
: folded_arg0
,
3775 const_arg1
? const_arg1
: folded_arg1
,
3776 const_arg2
? const_arg2
: XEXP (x
, 2));
3783 return new_rtx
? new_rtx
: x
;
3786 /* Return a constant value currently equivalent to X.
3787 Return 0 if we don't know one. */
3790 equiv_constant (rtx x
)
3793 && REGNO_QTY_VALID_P (REGNO (x
)))
3795 int x_q
= REG_QTY (REGNO (x
));
3796 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3798 if (x_ent
->const_rtx
)
3799 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3802 if (x
== 0 || CONSTANT_P (x
))
3805 if (GET_CODE (x
) == SUBREG
)
3807 machine_mode mode
= GET_MODE (x
);
3808 machine_mode imode
= GET_MODE (SUBREG_REG (x
));
3811 /* See if we previously assigned a constant value to this SUBREG. */
3812 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3813 || (new_rtx
= lookup_as_function (x
, CONST_WIDE_INT
)) != 0
3814 || (NUM_POLY_INT_COEFFS
> 1
3815 && (new_rtx
= lookup_as_function (x
, CONST_POLY_INT
)) != 0)
3816 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3817 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3820 /* If we didn't and if doing so makes sense, see if we previously
3821 assigned a constant value to the enclosing word mode SUBREG. */
3822 if (known_lt (GET_MODE_SIZE (mode
), UNITS_PER_WORD
)
3823 && known_lt (UNITS_PER_WORD
, GET_MODE_SIZE (imode
)))
3825 poly_int64 byte
= (SUBREG_BYTE (x
)
3826 - subreg_lowpart_offset (mode
, word_mode
));
3827 if (known_ge (byte
, 0) && multiple_p (byte
, UNITS_PER_WORD
))
3829 rtx y
= gen_rtx_SUBREG (word_mode
, SUBREG_REG (x
), byte
);
3830 new_rtx
= lookup_as_function (y
, CONST_INT
);
3832 return gen_lowpart (mode
, new_rtx
);
3836 /* Otherwise see if we already have a constant for the inner REG,
3837 and if that is enough to calculate an equivalent constant for
3838 the subreg. Note that the upper bits of paradoxical subregs
3839 are undefined, so they cannot be said to equal anything. */
3840 if (REG_P (SUBREG_REG (x
))
3841 && !paradoxical_subreg_p (x
)
3842 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3843 return simplify_subreg (mode
, new_rtx
, imode
, SUBREG_BYTE (x
));
3848 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3849 the hash table in case its value was seen before. */
3853 struct table_elt
*elt
;
3855 x
= avoid_constant_pool_reference (x
);
3859 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3863 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3864 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3871 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3874 In certain cases, this can cause us to add an equivalence. For example,
3875 if we are following the taken case of
3877 we can add the fact that `i' and '2' are now equivalent.
3879 In any case, we can record that this comparison was passed. If the same
3880 comparison is seen later, we will know its value. */
3883 record_jump_equiv (rtx_insn
*insn
, bool taken
)
3885 int cond_known_true
;
3888 machine_mode mode
, mode0
, mode1
;
3889 int reversed_nonequality
= 0;
3892 /* Ensure this is the right kind of insn. */
3893 gcc_assert (any_condjump_p (insn
));
3895 set
= pc_set (insn
);
3897 /* See if this jump condition is known true or false. */
3899 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3901 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3903 /* Get the type of comparison being done and the operands being compared.
3904 If we had to reverse a non-equality condition, record that fact so we
3905 know that it isn't valid for floating-point. */
3906 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3907 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3908 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3910 /* On a cc0 target the cc0-setter and cc0-user may end up in different
3911 blocks. When that happens the tracking of the cc0-setter via
3912 PREV_INSN_CC0 is spoiled. That means that fold_rtx may return
3913 NULL_RTX. In those cases, there's nothing to record. */
3914 if (op0
== NULL_RTX
|| op1
== NULL_RTX
)
3917 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3918 if (! cond_known_true
)
3920 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3922 /* Don't remember if we can't find the inverse. */
3923 if (code
== UNKNOWN
)
3927 /* The mode is the mode of the non-constant. */
3929 if (mode1
!= VOIDmode
)
3932 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3935 /* Yet another form of subreg creation. In this case, we want something in
3936 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3939 record_jump_cond_subreg (machine_mode mode
, rtx op
)
3941 machine_mode op_mode
= GET_MODE (op
);
3942 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3944 return lowpart_subreg (mode
, op
, op_mode
);
3947 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3948 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3949 Make any useful entries we can with that information. Called from
3950 above function and called recursively. */
3953 record_jump_cond (enum rtx_code code
, machine_mode mode
, rtx op0
,
3954 rtx op1
, int reversed_nonequality
)
3956 unsigned op0_hash
, op1_hash
;
3957 int op0_in_memory
, op1_in_memory
;
3958 struct table_elt
*op0_elt
, *op1_elt
;
3960 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3961 we know that they are also equal in the smaller mode (this is also
3962 true for all smaller modes whether or not there is a SUBREG, but
3963 is not worth testing for with no SUBREG). */
3965 /* Note that GET_MODE (op0) may not equal MODE. */
3966 if (code
== EQ
&& paradoxical_subreg_p (op0
))
3968 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3969 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3971 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3972 reversed_nonequality
);
3975 if (code
== EQ
&& paradoxical_subreg_p (op1
))
3977 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3978 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3980 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3981 reversed_nonequality
);
3984 /* Similarly, if this is an NE comparison, and either is a SUBREG
3985 making a smaller mode, we know the whole thing is also NE. */
3987 /* Note that GET_MODE (op0) may not equal MODE;
3988 if we test MODE instead, we can get an infinite recursion
3989 alternating between two modes each wider than MODE. */
3992 && partial_subreg_p (op0
)
3993 && subreg_lowpart_p (op0
))
3995 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3996 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3998 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3999 reversed_nonequality
);
4003 && partial_subreg_p (op1
)
4004 && subreg_lowpart_p (op1
))
4006 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
4007 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
4009 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
4010 reversed_nonequality
);
4013 /* Hash both operands. */
4016 hash_arg_in_memory
= 0;
4017 op0_hash
= HASH (op0
, mode
);
4018 op0_in_memory
= hash_arg_in_memory
;
4024 hash_arg_in_memory
= 0;
4025 op1_hash
= HASH (op1
, mode
);
4026 op1_in_memory
= hash_arg_in_memory
;
4031 /* Look up both operands. */
4032 op0_elt
= lookup (op0
, op0_hash
, mode
);
4033 op1_elt
= lookup (op1
, op1_hash
, mode
);
4035 /* If both operands are already equivalent or if they are not in the
4036 table but are identical, do nothing. */
4037 if ((op0_elt
!= 0 && op1_elt
!= 0
4038 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
4039 || op0
== op1
|| rtx_equal_p (op0
, op1
))
4042 /* If we aren't setting two things equal all we can do is save this
4043 comparison. Similarly if this is floating-point. In the latter
4044 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4045 If we record the equality, we might inadvertently delete code
4046 whose intent was to change -0 to +0. */
4048 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
4050 struct qty_table_elem
*ent
;
4053 /* If we reversed a floating-point comparison, if OP0 is not a
4054 register, or if OP1 is neither a register or constant, we can't
4058 op1
= equiv_constant (op1
);
4060 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
4061 || !REG_P (op0
) || op1
== 0)
4064 /* Put OP0 in the hash table if it isn't already. This gives it a
4065 new quantity number. */
4068 if (insert_regs (op0
, NULL
, 0))
4070 rehash_using_reg (op0
);
4071 op0_hash
= HASH (op0
, mode
);
4073 /* If OP0 is contained in OP1, this changes its hash code
4074 as well. Faster to rehash than to check, except
4075 for the simple case of a constant. */
4076 if (! CONSTANT_P (op1
))
4077 op1_hash
= HASH (op1
,mode
);
4080 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4081 op0_elt
->in_memory
= op0_in_memory
;
4084 qty
= REG_QTY (REGNO (op0
));
4085 ent
= &qty_table
[qty
];
4087 ent
->comparison_code
= code
;
4090 /* Look it up again--in case op0 and op1 are the same. */
4091 op1_elt
= lookup (op1
, op1_hash
, mode
);
4093 /* Put OP1 in the hash table so it gets a new quantity number. */
4096 if (insert_regs (op1
, NULL
, 0))
4098 rehash_using_reg (op1
);
4099 op1_hash
= HASH (op1
, mode
);
4102 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4103 op1_elt
->in_memory
= op1_in_memory
;
4106 ent
->comparison_const
= NULL_RTX
;
4107 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
4111 ent
->comparison_const
= op1
;
4112 ent
->comparison_qty
= -1;
4118 /* If either side is still missing an equivalence, make it now,
4119 then merge the equivalences. */
4123 if (insert_regs (op0
, NULL
, 0))
4125 rehash_using_reg (op0
);
4126 op0_hash
= HASH (op0
, mode
);
4129 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4130 op0_elt
->in_memory
= op0_in_memory
;
4135 if (insert_regs (op1
, NULL
, 0))
4137 rehash_using_reg (op1
);
4138 op1_hash
= HASH (op1
, mode
);
4141 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4142 op1_elt
->in_memory
= op1_in_memory
;
4145 merge_equiv_classes (op0_elt
, op1_elt
);
4148 /* CSE processing for one instruction.
4150 Most "true" common subexpressions are mostly optimized away in GIMPLE,
4151 but the few that "leak through" are cleaned up by cse_insn, and complex
4152 addressing modes are often formed here.
4154 The main function is cse_insn, and between here and that function
4155 a couple of helper functions is defined to keep the size of cse_insn
4156 within reasonable proportions.
4158 Data is shared between the main and helper functions via STRUCT SET,
4159 that contains all data related for every set in the instruction that
4162 Note that cse_main processes all sets in the instruction. Most
4163 passes in GCC only process simple SET insns or single_set insns, but
4164 CSE processes insns with multiple sets as well. */
4166 /* Data on one SET contained in the instruction. */
4170 /* The SET rtx itself. */
4172 /* The SET_SRC of the rtx (the original value, if it is changing). */
4174 /* The hash-table element for the SET_SRC of the SET. */
4175 struct table_elt
*src_elt
;
4176 /* Hash value for the SET_SRC. */
4178 /* Hash value for the SET_DEST. */
4180 /* The SET_DEST, with SUBREG, etc., stripped. */
4182 /* Nonzero if the SET_SRC is in memory. */
4184 /* Nonzero if the SET_SRC contains something
4185 whose value cannot be predicted and understood. */
4187 /* Original machine mode, in case it becomes a CONST_INT.
4188 The size of this field should match the size of the mode
4189 field of struct rtx_def (see rtl.h). */
4190 ENUM_BITFIELD(machine_mode
) mode
: 8;
4191 /* Hash value of constant equivalent for SET_SRC. */
4192 unsigned src_const_hash
;
4193 /* A constant equivalent for SET_SRC, if any. */
4195 /* Table entry for constant equivalent for SET_SRC, if any. */
4196 struct table_elt
*src_const_elt
;
4197 /* Table entry for the destination address. */
4198 struct table_elt
*dest_addr_elt
;
4201 /* Special handling for (set REG0 REG1) where REG0 is the
4202 "cheapest", cheaper than REG1. After cse, REG1 will probably not
4203 be used in the sequel, so (if easily done) change this insn to
4204 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4205 that computed their value. Then REG1 will become a dead store
4206 and won't cloud the situation for later optimizations.
4208 Do not make this change if REG1 is a hard register, because it will
4209 then be used in the sequel and we may be changing a two-operand insn
4210 into a three-operand insn.
4212 This is the last transformation that cse_insn will try to do. */
4215 try_back_substitute_reg (rtx set
, rtx_insn
*insn
)
4217 rtx dest
= SET_DEST (set
);
4218 rtx src
= SET_SRC (set
);
4221 && REG_P (src
) && ! HARD_REGISTER_P (src
)
4222 && REGNO_QTY_VALID_P (REGNO (src
)))
4224 int src_q
= REG_QTY (REGNO (src
));
4225 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
4227 if (src_ent
->first_reg
== REGNO (dest
))
4229 /* Scan for the previous nonnote insn, but stop at a basic
4231 rtx_insn
*prev
= insn
;
4232 rtx_insn
*bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
4235 prev
= PREV_INSN (prev
);
4237 while (prev
!= bb_head
&& (NOTE_P (prev
) || DEBUG_INSN_P (prev
)));
4239 /* Do not swap the registers around if the previous instruction
4240 attaches a REG_EQUIV note to REG1.
4242 ??? It's not entirely clear whether we can transfer a REG_EQUIV
4243 from the pseudo that originally shadowed an incoming argument
4244 to another register. Some uses of REG_EQUIV might rely on it
4245 being attached to REG1 rather than REG2.
4247 This section previously turned the REG_EQUIV into a REG_EQUAL
4248 note. We cannot do that because REG_EQUIV may provide an
4249 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4250 if (NONJUMP_INSN_P (prev
)
4251 && GET_CODE (PATTERN (prev
)) == SET
4252 && SET_DEST (PATTERN (prev
)) == src
4253 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
4257 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
4258 validate_change (insn
, &SET_DEST (set
), src
, 1);
4259 validate_change (insn
, &SET_SRC (set
), dest
, 1);
4260 apply_change_group ();
4262 /* If INSN has a REG_EQUAL note, and this note mentions
4263 REG0, then we must delete it, because the value in
4264 REG0 has changed. If the note's value is REG1, we must
4265 also delete it because that is now this insn's dest. */
4266 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
4268 && (reg_mentioned_p (dest
, XEXP (note
, 0))
4269 || rtx_equal_p (src
, XEXP (note
, 0))))
4270 remove_note (insn
, note
);
4272 /* If INSN has a REG_ARGS_SIZE note, move it to PREV. */
4273 note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4276 remove_note (insn
, note
);
4277 gcc_assert (!find_reg_note (prev
, REG_ARGS_SIZE
, NULL_RTX
));
4278 set_unique_reg_note (prev
, REG_ARGS_SIZE
, XEXP (note
, 0));
4285 /* Record all the SETs in this instruction into SETS_PTR,
4286 and return the number of recorded sets. */
4288 find_sets_in_insn (rtx_insn
*insn
, struct set
**psets
)
4290 struct set
*sets
= *psets
;
4292 rtx x
= PATTERN (insn
);
4294 if (GET_CODE (x
) == SET
)
4296 /* Ignore SETs that are unconditional jumps.
4297 They never need cse processing, so this does not hurt.
4298 The reason is not efficiency but rather
4299 so that we can test at the end for instructions
4300 that have been simplified to unconditional jumps
4301 and not be misled by unchanged instructions
4302 that were unconditional jumps to begin with. */
4303 if (SET_DEST (x
) == pc_rtx
4304 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4306 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4307 The hard function value register is used only once, to copy to
4308 someplace else, so it isn't worth cse'ing. */
4309 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4312 sets
[n_sets
++].rtl
= x
;
4314 else if (GET_CODE (x
) == PARALLEL
)
4316 int i
, lim
= XVECLEN (x
, 0);
4318 /* Go over the expressions of the PARALLEL in forward order, to
4319 put them in the same order in the SETS array. */
4320 for (i
= 0; i
< lim
; i
++)
4322 rtx y
= XVECEXP (x
, 0, i
);
4323 if (GET_CODE (y
) == SET
)
4325 /* As above, we ignore unconditional jumps and call-insns and
4326 ignore the result of apply_change_group. */
4327 if (SET_DEST (y
) == pc_rtx
4328 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4330 else if (GET_CODE (SET_SRC (y
)) == CALL
)
4333 sets
[n_sets
++].rtl
= y
;
4341 /* Subroutine of canonicalize_insn. X is an ASM_OPERANDS in INSN. */
4344 canon_asm_operands (rtx x
, rtx_insn
*insn
)
4346 for (int i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
4348 rtx input
= ASM_OPERANDS_INPUT (x
, i
);
4349 if (!(REG_P (input
) && HARD_REGISTER_P (input
)))
4351 input
= canon_reg (input
, insn
);
4352 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
), input
, 1);
4357 /* Where possible, substitute every register reference in the N_SETS
4358 number of SETS in INSN with the canonical register.
4360 Register canonicalization propagatest the earliest register (i.e.
4361 one that is set before INSN) with the same value. This is a very
4362 useful, simple form of CSE, to clean up warts from expanding GIMPLE
4363 to RTL. For instance, a CONST for an address is usually expanded
4364 multiple times to loads into different registers, thus creating many
4365 subexpressions of the form:
4367 (set (reg1) (some_const))
4368 (set (mem (... reg1 ...) (thing)))
4369 (set (reg2) (some_const))
4370 (set (mem (... reg2 ...) (thing)))
4372 After canonicalizing, the code takes the following form:
4374 (set (reg1) (some_const))
4375 (set (mem (... reg1 ...) (thing)))
4376 (set (reg2) (some_const))
4377 (set (mem (... reg1 ...) (thing)))
4379 The set to reg2 is now trivially dead, and the memory reference (or
4380 address, or whatever) may be a candidate for further CSEing.
4382 In this function, the result of apply_change_group can be ignored;
4386 canonicalize_insn (rtx_insn
*insn
, struct set
**psets
, int n_sets
)
4388 struct set
*sets
= *psets
;
4390 rtx x
= PATTERN (insn
);
4395 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4396 if (GET_CODE (XEXP (tem
, 0)) != SET
)
4397 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4400 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
4402 canon_reg (SET_SRC (x
), insn
);
4403 apply_change_group ();
4404 fold_rtx (SET_SRC (x
), insn
);
4406 else if (GET_CODE (x
) == CLOBBER
)
4408 /* If we clobber memory, canon the address.
4409 This does nothing when a register is clobbered
4410 because we have already invalidated the reg. */
4411 if (MEM_P (XEXP (x
, 0)))
4412 canon_reg (XEXP (x
, 0), insn
);
4414 else if (GET_CODE (x
) == USE
4415 && ! (REG_P (XEXP (x
, 0))
4416 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4417 /* Canonicalize a USE of a pseudo register or memory location. */
4418 canon_reg (x
, insn
);
4419 else if (GET_CODE (x
) == ASM_OPERANDS
)
4420 canon_asm_operands (x
, insn
);
4421 else if (GET_CODE (x
) == CALL
)
4423 canon_reg (x
, insn
);
4424 apply_change_group ();
4427 else if (DEBUG_INSN_P (insn
))
4428 canon_reg (PATTERN (insn
), insn
);
4429 else if (GET_CODE (x
) == PARALLEL
)
4431 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4433 rtx y
= XVECEXP (x
, 0, i
);
4434 if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
4436 canon_reg (SET_SRC (y
), insn
);
4437 apply_change_group ();
4438 fold_rtx (SET_SRC (y
), insn
);
4440 else if (GET_CODE (y
) == CLOBBER
)
4442 if (MEM_P (XEXP (y
, 0)))
4443 canon_reg (XEXP (y
, 0), insn
);
4445 else if (GET_CODE (y
) == USE
4446 && ! (REG_P (XEXP (y
, 0))
4447 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4448 canon_reg (y
, insn
);
4449 else if (GET_CODE (y
) == ASM_OPERANDS
)
4450 canon_asm_operands (y
, insn
);
4451 else if (GET_CODE (y
) == CALL
)
4453 canon_reg (y
, insn
);
4454 apply_change_group ();
4460 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4461 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4463 /* We potentially will process this insn many times. Therefore,
4464 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4467 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4468 because cse_insn handles those specially. */
4469 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != STRICT_LOW_PART
4470 && rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
4471 remove_note (insn
, tem
);
4474 canon_reg (XEXP (tem
, 0), insn
);
4475 apply_change_group ();
4476 XEXP (tem
, 0) = fold_rtx (XEXP (tem
, 0), insn
);
4477 df_notes_rescan (insn
);
4481 /* Canonicalize sources and addresses of destinations.
4482 We do this in a separate pass to avoid problems when a MATCH_DUP is
4483 present in the insn pattern. In that case, we want to ensure that
4484 we don't break the duplicate nature of the pattern. So we will replace
4485 both operands at the same time. Otherwise, we would fail to find an
4486 equivalent substitution in the loop calling validate_change below.
4488 We used to suppress canonicalization of DEST if it appears in SRC,
4489 but we don't do this any more. */
4491 for (i
= 0; i
< n_sets
; i
++)
4493 rtx dest
= SET_DEST (sets
[i
].rtl
);
4494 rtx src
= SET_SRC (sets
[i
].rtl
);
4495 rtx new_rtx
= canon_reg (src
, insn
);
4497 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4499 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4501 validate_change (insn
, &XEXP (dest
, 1),
4502 canon_reg (XEXP (dest
, 1), insn
), 1);
4503 validate_change (insn
, &XEXP (dest
, 2),
4504 canon_reg (XEXP (dest
, 2), insn
), 1);
4507 while (GET_CODE (dest
) == SUBREG
4508 || GET_CODE (dest
) == ZERO_EXTRACT
4509 || GET_CODE (dest
) == STRICT_LOW_PART
)
4510 dest
= XEXP (dest
, 0);
4513 canon_reg (dest
, insn
);
4516 /* Now that we have done all the replacements, we can apply the change
4517 group and see if they all work. Note that this will cause some
4518 canonicalizations that would have worked individually not to be applied
4519 because some other canonicalization didn't work, but this should not
4522 The result of apply_change_group can be ignored; see canon_reg. */
4524 apply_change_group ();
4527 /* Main function of CSE.
4528 First simplify sources and addresses of all assignments
4529 in the instruction, using previously-computed equivalents values.
4530 Then install the new sources and destinations in the table
4531 of available values. */
4534 cse_insn (rtx_insn
*insn
)
4536 rtx x
= PATTERN (insn
);
4542 struct table_elt
*src_eqv_elt
= 0;
4543 int src_eqv_volatile
= 0;
4544 int src_eqv_in_memory
= 0;
4545 unsigned src_eqv_hash
= 0;
4547 struct set
*sets
= (struct set
*) 0;
4549 if (GET_CODE (x
) == SET
)
4550 sets
= XALLOCA (struct set
);
4551 else if (GET_CODE (x
) == PARALLEL
)
4552 sets
= XALLOCAVEC (struct set
, XVECLEN (x
, 0));
4555 /* Records what this insn does to set CC0. */
4557 this_insn_cc0_mode
= VOIDmode
;
4559 /* Find all regs explicitly clobbered in this insn,
4560 to ensure they are not replaced with any other regs
4561 elsewhere in this insn. */
4562 invalidate_from_sets_and_clobbers (insn
);
4564 /* Record all the SETs in this instruction. */
4565 n_sets
= find_sets_in_insn (insn
, &sets
);
4567 /* Substitute the canonical register where possible. */
4568 canonicalize_insn (insn
, &sets
, n_sets
);
4570 /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4571 if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT. The
4572 latter condition is necessary because SRC_EQV is handled specially for
4573 this case, and if it isn't set, then there will be no equivalence
4574 for the destination. */
4575 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4576 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4579 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != ZERO_EXTRACT
4580 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4581 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4582 src_eqv
= copy_rtx (XEXP (tem
, 0));
4583 /* If DEST is of the form ZERO_EXTACT, as in:
4584 (set (zero_extract:SI (reg:SI 119)
4585 (const_int 16 [0x10])
4586 (const_int 16 [0x10]))
4587 (const_int 51154 [0xc7d2]))
4588 REG_EQUAL note will specify the value of register (reg:SI 119) at this
4589 point. Note that this is different from SRC_EQV. We can however
4590 calculate SRC_EQV with the position and width of ZERO_EXTRACT. */
4591 else if (GET_CODE (SET_DEST (sets
[0].rtl
)) == ZERO_EXTRACT
4592 && CONST_INT_P (XEXP (tem
, 0))
4593 && CONST_INT_P (XEXP (SET_DEST (sets
[0].rtl
), 1))
4594 && CONST_INT_P (XEXP (SET_DEST (sets
[0].rtl
), 2)))
4596 rtx dest_reg
= XEXP (SET_DEST (sets
[0].rtl
), 0);
4597 /* This is the mode of XEXP (tem, 0) as well. */
4598 scalar_int_mode dest_mode
4599 = as_a
<scalar_int_mode
> (GET_MODE (dest_reg
));
4600 rtx width
= XEXP (SET_DEST (sets
[0].rtl
), 1);
4601 rtx pos
= XEXP (SET_DEST (sets
[0].rtl
), 2);
4602 HOST_WIDE_INT val
= INTVAL (XEXP (tem
, 0));
4605 if (BITS_BIG_ENDIAN
)
4606 shift
= (GET_MODE_PRECISION (dest_mode
)
4607 - INTVAL (pos
) - INTVAL (width
));
4609 shift
= INTVAL (pos
);
4610 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
4611 mask
= HOST_WIDE_INT_M1
;
4613 mask
= (HOST_WIDE_INT_1
<< INTVAL (width
)) - 1;
4614 val
= (val
>> shift
) & mask
;
4615 src_eqv
= GEN_INT (val
);
4619 /* Set sets[i].src_elt to the class each source belongs to.
4620 Detect assignments from or to volatile things
4621 and set set[i] to zero so they will be ignored
4622 in the rest of this function.
4624 Nothing in this loop changes the hash table or the register chains. */
4626 for (i
= 0; i
< n_sets
; i
++)
4628 bool repeat
= false;
4629 bool mem_noop_insn
= false;
4632 struct table_elt
*elt
= 0, *p
;
4636 rtx src_related
= 0;
4637 bool src_related_is_const_anchor
= false;
4638 struct table_elt
*src_const_elt
= 0;
4639 int src_cost
= MAX_COST
;
4640 int src_eqv_cost
= MAX_COST
;
4641 int src_folded_cost
= MAX_COST
;
4642 int src_related_cost
= MAX_COST
;
4643 int src_elt_cost
= MAX_COST
;
4644 int src_regcost
= MAX_COST
;
4645 int src_eqv_regcost
= MAX_COST
;
4646 int src_folded_regcost
= MAX_COST
;
4647 int src_related_regcost
= MAX_COST
;
4648 int src_elt_regcost
= MAX_COST
;
4649 /* Set nonzero if we need to call force_const_mem on with the
4650 contents of src_folded before using it. */
4651 int src_folded_force_flag
= 0;
4652 scalar_int_mode int_mode
;
4654 dest
= SET_DEST (sets
[i
].rtl
);
4655 src
= SET_SRC (sets
[i
].rtl
);
4657 /* If SRC is a constant that has no machine mode,
4658 hash it with the destination's machine mode.
4659 This way we can keep different modes separate. */
4661 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4662 sets
[i
].mode
= mode
;
4666 machine_mode eqvmode
= mode
;
4667 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4668 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4670 hash_arg_in_memory
= 0;
4671 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4673 /* Find the equivalence class for the equivalent expression. */
4676 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4678 src_eqv_volatile
= do_not_record
;
4679 src_eqv_in_memory
= hash_arg_in_memory
;
4682 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4683 value of the INNER register, not the destination. So it is not
4684 a valid substitution for the source. But save it for later. */
4685 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4688 src_eqv_here
= src_eqv
;
4690 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4691 simplified result, which may not necessarily be valid. */
4692 src_folded
= fold_rtx (src
, NULL
);
4695 /* ??? This caused bad code to be generated for the m68k port with -O2.
4696 Suppose src is (CONST_INT -1), and that after truncation src_folded
4697 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4698 At the end we will add src and src_const to the same equivalence
4699 class. We now have 3 and -1 on the same equivalence class. This
4700 causes later instructions to be mis-optimized. */
4701 /* If storing a constant in a bitfield, pre-truncate the constant
4702 so we will be able to record it later. */
4703 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4705 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4707 if (CONST_INT_P (src
)
4708 && CONST_INT_P (width
)
4709 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4710 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4712 = GEN_INT (INTVAL (src
) & ((HOST_WIDE_INT_1
4713 << INTVAL (width
)) - 1));
4717 /* Compute SRC's hash code, and also notice if it
4718 should not be recorded at all. In that case,
4719 prevent any further processing of this assignment. */
4721 hash_arg_in_memory
= 0;
4724 sets
[i
].src_hash
= HASH (src
, mode
);
4725 sets
[i
].src_volatile
= do_not_record
;
4726 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4728 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4729 a pseudo, do not record SRC. Using SRC as a replacement for
4730 anything else will be incorrect in that situation. Note that
4731 this usually occurs only for stack slots, in which case all the
4732 RTL would be referring to SRC, so we don't lose any optimization
4733 opportunities by not having SRC in the hash table. */
4736 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4738 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4739 sets
[i
].src_volatile
= 1;
4741 else if (GET_CODE (src
) == ASM_OPERANDS
4742 && GET_CODE (x
) == PARALLEL
)
4744 /* Do not record result of a non-volatile inline asm with
4745 more than one result. */
4747 sets
[i
].src_volatile
= 1;
4749 int j
, lim
= XVECLEN (x
, 0);
4750 for (j
= 0; j
< lim
; j
++)
4752 rtx y
= XVECEXP (x
, 0, j
);
4753 /* And do not record result of a non-volatile inline asm
4754 with "memory" clobber. */
4755 if (GET_CODE (y
) == CLOBBER
&& MEM_P (XEXP (y
, 0)))
4757 sets
[i
].src_volatile
= 1;
4764 /* It is no longer clear why we used to do this, but it doesn't
4765 appear to still be needed. So let's try without it since this
4766 code hurts cse'ing widened ops. */
4767 /* If source is a paradoxical subreg (such as QI treated as an SI),
4768 treat it as volatile. It may do the work of an SI in one context
4769 where the extra bits are not being used, but cannot replace an SI
4771 if (paradoxical_subreg_p (src
))
4772 sets
[i
].src_volatile
= 1;
4775 /* Locate all possible equivalent forms for SRC. Try to replace
4776 SRC in the insn with each cheaper equivalent.
4778 We have the following types of equivalents: SRC itself, a folded
4779 version, a value given in a REG_EQUAL note, or a value related
4782 Each of these equivalents may be part of an additional class
4783 of equivalents (if more than one is in the table, they must be in
4784 the same class; we check for this).
4786 If the source is volatile, we don't do any table lookups.
4788 We note any constant equivalent for possible later use in a
4791 if (!sets
[i
].src_volatile
)
4792 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4794 sets
[i
].src_elt
= elt
;
4796 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4798 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4800 /* The REG_EQUAL is indicating that two formerly distinct
4801 classes are now equivalent. So merge them. */
4802 merge_equiv_classes (elt
, src_eqv_elt
);
4803 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4804 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4810 else if (src_eqv_elt
)
4813 /* Try to find a constant somewhere and record it in `src_const'.
4814 Record its table element, if any, in `src_const_elt'. Look in
4815 any known equivalences first. (If the constant is not in the
4816 table, also set `sets[i].src_const_hash'). */
4818 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4822 src_const_elt
= elt
;
4827 && (CONSTANT_P (src_folded
)
4828 /* Consider (minus (label_ref L1) (label_ref L2)) as
4829 "constant" here so we will record it. This allows us
4830 to fold switch statements when an ADDR_DIFF_VEC is used. */
4831 || (GET_CODE (src_folded
) == MINUS
4832 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4833 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4834 src_const
= src_folded
, src_const_elt
= elt
;
4835 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4836 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4838 /* If we don't know if the constant is in the table, get its
4839 hash code and look it up. */
4840 if (src_const
&& src_const_elt
== 0)
4842 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4843 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4846 sets
[i
].src_const
= src_const
;
4847 sets
[i
].src_const_elt
= src_const_elt
;
4849 /* If the constant and our source are both in the table, mark them as
4850 equivalent. Otherwise, if a constant is in the table but the source
4851 isn't, set ELT to it. */
4852 if (src_const_elt
&& elt
4853 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4854 merge_equiv_classes (elt
, src_const_elt
);
4855 else if (src_const_elt
&& elt
== 0)
4856 elt
= src_const_elt
;
4858 /* See if there is a register linearly related to a constant
4859 equivalent of SRC. */
4861 && (GET_CODE (src_const
) == CONST
4862 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4864 src_related
= use_related_value (src_const
, src_const_elt
);
4867 struct table_elt
*src_related_elt
4868 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4869 if (src_related_elt
&& elt
)
4871 if (elt
->first_same_value
4872 != src_related_elt
->first_same_value
)
4873 /* This can occur when we previously saw a CONST
4874 involving a SYMBOL_REF and then see the SYMBOL_REF
4875 twice. Merge the involved classes. */
4876 merge_equiv_classes (elt
, src_related_elt
);
4879 src_related_elt
= 0;
4881 else if (src_related_elt
&& elt
== 0)
4882 elt
= src_related_elt
;
4886 /* See if we have a CONST_INT that is already in a register in a
4889 if (src_const
&& src_related
== 0 && CONST_INT_P (src_const
)
4890 && is_int_mode (mode
, &int_mode
)
4891 && GET_MODE_PRECISION (int_mode
) < BITS_PER_WORD
)
4893 opt_scalar_int_mode wider_mode_iter
;
4894 FOR_EACH_WIDER_MODE (wider_mode_iter
, int_mode
)
4896 scalar_int_mode wider_mode
= wider_mode_iter
.require ();
4897 if (GET_MODE_PRECISION (wider_mode
) > BITS_PER_WORD
)
4900 struct table_elt
*const_elt
4901 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4906 for (const_elt
= const_elt
->first_same_value
;
4907 const_elt
; const_elt
= const_elt
->next_same_value
)
4908 if (REG_P (const_elt
->exp
))
4910 src_related
= gen_lowpart (int_mode
, const_elt
->exp
);
4914 if (src_related
!= 0)
4919 /* Another possibility is that we have an AND with a constant in
4920 a mode narrower than a word. If so, it might have been generated
4921 as part of an "if" which would narrow the AND. If we already
4922 have done the AND in a wider mode, we can use a SUBREG of that
4925 if (flag_expensive_optimizations
&& ! src_related
4926 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
4927 && GET_CODE (src
) == AND
&& CONST_INT_P (XEXP (src
, 1))
4928 && GET_MODE_SIZE (int_mode
) < UNITS_PER_WORD
)
4930 opt_scalar_int_mode tmode_iter
;
4931 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4933 FOR_EACH_WIDER_MODE (tmode_iter
, int_mode
)
4935 scalar_int_mode tmode
= tmode_iter
.require ();
4936 if (GET_MODE_SIZE (tmode
) > UNITS_PER_WORD
)
4939 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4940 struct table_elt
*larger_elt
;
4944 PUT_MODE (new_and
, tmode
);
4945 XEXP (new_and
, 0) = inner
;
4946 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4947 if (larger_elt
== 0)
4950 for (larger_elt
= larger_elt
->first_same_value
;
4951 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4952 if (REG_P (larger_elt
->exp
))
4955 = gen_lowpart (int_mode
, larger_elt
->exp
);
4965 /* See if a MEM has already been loaded with a widening operation;
4966 if it has, we can use a subreg of that. Many CISC machines
4967 also have such operations, but this is only likely to be
4968 beneficial on these machines. */
4971 if (flag_expensive_optimizations
&& src_related
== 0
4972 && MEM_P (src
) && ! do_not_record
4973 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
4974 && (extend_op
= load_extend_op (int_mode
)) != UNKNOWN
)
4976 struct rtx_def memory_extend_buf
;
4977 rtx memory_extend_rtx
= &memory_extend_buf
;
4979 /* Set what we are trying to extend and the operation it might
4980 have been extended with. */
4981 memset (memory_extend_rtx
, 0, sizeof (*memory_extend_rtx
));
4982 PUT_CODE (memory_extend_rtx
, extend_op
);
4983 XEXP (memory_extend_rtx
, 0) = src
;
4985 opt_scalar_int_mode tmode_iter
;
4986 FOR_EACH_WIDER_MODE (tmode_iter
, int_mode
)
4988 struct table_elt
*larger_elt
;
4990 scalar_int_mode tmode
= tmode_iter
.require ();
4991 if (GET_MODE_SIZE (tmode
) > UNITS_PER_WORD
)
4994 PUT_MODE (memory_extend_rtx
, tmode
);
4995 larger_elt
= lookup (memory_extend_rtx
,
4996 HASH (memory_extend_rtx
, tmode
), tmode
);
4997 if (larger_elt
== 0)
5000 for (larger_elt
= larger_elt
->first_same_value
;
5001 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
5002 if (REG_P (larger_elt
->exp
))
5004 src_related
= gen_lowpart (int_mode
, larger_elt
->exp
);
5013 /* Try to express the constant using a register+offset expression
5014 derived from a constant anchor. */
5016 if (targetm
.const_anchor
5019 && GET_CODE (src_const
) == CONST_INT
)
5021 src_related
= try_const_anchors (src_const
, mode
);
5022 src_related_is_const_anchor
= src_related
!= NULL_RTX
;
5026 if (src
== src_folded
)
5029 /* At this point, ELT, if nonzero, points to a class of expressions
5030 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5031 and SRC_RELATED, if nonzero, each contain additional equivalent
5032 expressions. Prune these latter expressions by deleting expressions
5033 already in the equivalence class.
5035 Check for an equivalent identical to the destination. If found,
5036 this is the preferred equivalent since it will likely lead to
5037 elimination of the insn. Indicate this by placing it in
5041 elt
= elt
->first_same_value
;
5042 for (p
= elt
; p
; p
= p
->next_same_value
)
5044 enum rtx_code code
= GET_CODE (p
->exp
);
5046 /* If the expression is not valid, ignore it. Then we do not
5047 have to check for validity below. In most cases, we can use
5048 `rtx_equal_p', since canonicalization has already been done. */
5049 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
5052 /* Also skip paradoxical subregs, unless that's what we're
5054 if (paradoxical_subreg_p (p
->exp
)
5056 && GET_CODE (src
) == SUBREG
5057 && GET_MODE (src
) == GET_MODE (p
->exp
)
5058 && partial_subreg_p (GET_MODE (SUBREG_REG (src
)),
5059 GET_MODE (SUBREG_REG (p
->exp
)))))
5062 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
5064 else if (src_folded
&& GET_CODE (src_folded
) == code
5065 && rtx_equal_p (src_folded
, p
->exp
))
5067 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
5068 && rtx_equal_p (src_eqv_here
, p
->exp
))
5070 else if (src_related
&& GET_CODE (src_related
) == code
5071 && rtx_equal_p (src_related
, p
->exp
))
5074 /* This is the same as the destination of the insns, we want
5075 to prefer it. Copy it to src_related. The code below will
5076 then give it a negative cost. */
5077 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
5081 /* Find the cheapest valid equivalent, trying all the available
5082 possibilities. Prefer items not in the hash table to ones
5083 that are when they are equal cost. Note that we can never
5084 worsen an insn as the current contents will also succeed.
5085 If we find an equivalent identical to the destination, use it as best,
5086 since this insn will probably be eliminated in that case. */
5089 if (rtx_equal_p (src
, dest
))
5090 src_cost
= src_regcost
= -1;
5093 src_cost
= COST (src
, mode
);
5094 src_regcost
= approx_reg_cost (src
);
5100 if (rtx_equal_p (src_eqv_here
, dest
))
5101 src_eqv_cost
= src_eqv_regcost
= -1;
5104 src_eqv_cost
= COST (src_eqv_here
, mode
);
5105 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
5111 if (rtx_equal_p (src_folded
, dest
))
5112 src_folded_cost
= src_folded_regcost
= -1;
5115 src_folded_cost
= COST (src_folded
, mode
);
5116 src_folded_regcost
= approx_reg_cost (src_folded
);
5122 if (rtx_equal_p (src_related
, dest
))
5123 src_related_cost
= src_related_regcost
= -1;
5126 src_related_cost
= COST (src_related
, mode
);
5127 src_related_regcost
= approx_reg_cost (src_related
);
5129 /* If a const-anchor is used to synthesize a constant that
5130 normally requires multiple instructions then slightly prefer
5131 it over the original sequence. These instructions are likely
5132 to become redundant now. We can't compare against the cost
5133 of src_eqv_here because, on MIPS for example, multi-insn
5134 constants have zero cost; they are assumed to be hoisted from
5136 if (src_related_is_const_anchor
5137 && src_related_cost
== src_cost
5143 /* If this was an indirect jump insn, a known label will really be
5144 cheaper even though it looks more expensive. */
5145 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
5146 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
5148 /* Terminate loop when replacement made. This must terminate since
5149 the current contents will be tested and will always be valid. */
5154 /* Skip invalid entries. */
5155 while (elt
&& !REG_P (elt
->exp
)
5156 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5157 elt
= elt
->next_same_value
;
5159 /* A paradoxical subreg would be bad here: it'll be the right
5160 size, but later may be adjusted so that the upper bits aren't
5161 what we want. So reject it. */
5163 && paradoxical_subreg_p (elt
->exp
)
5164 /* It is okay, though, if the rtx we're trying to match
5165 will ignore any of the bits we can't predict. */
5167 && GET_CODE (src
) == SUBREG
5168 && GET_MODE (src
) == GET_MODE (elt
->exp
)
5169 && partial_subreg_p (GET_MODE (SUBREG_REG (src
)),
5170 GET_MODE (SUBREG_REG (elt
->exp
)))))
5172 elt
= elt
->next_same_value
;
5178 src_elt_cost
= elt
->cost
;
5179 src_elt_regcost
= elt
->regcost
;
5182 /* Find cheapest and skip it for the next time. For items
5183 of equal cost, use this order:
5184 src_folded, src, src_eqv, src_related and hash table entry. */
5186 && preferable (src_folded_cost
, src_folded_regcost
,
5187 src_cost
, src_regcost
) <= 0
5188 && preferable (src_folded_cost
, src_folded_regcost
,
5189 src_eqv_cost
, src_eqv_regcost
) <= 0
5190 && preferable (src_folded_cost
, src_folded_regcost
,
5191 src_related_cost
, src_related_regcost
) <= 0
5192 && preferable (src_folded_cost
, src_folded_regcost
,
5193 src_elt_cost
, src_elt_regcost
) <= 0)
5195 trial
= src_folded
, src_folded_cost
= MAX_COST
;
5196 if (src_folded_force_flag
)
5198 rtx forced
= force_const_mem (mode
, trial
);
5204 && preferable (src_cost
, src_regcost
,
5205 src_eqv_cost
, src_eqv_regcost
) <= 0
5206 && preferable (src_cost
, src_regcost
,
5207 src_related_cost
, src_related_regcost
) <= 0
5208 && preferable (src_cost
, src_regcost
,
5209 src_elt_cost
, src_elt_regcost
) <= 0)
5210 trial
= src
, src_cost
= MAX_COST
;
5211 else if (src_eqv_here
5212 && preferable (src_eqv_cost
, src_eqv_regcost
,
5213 src_related_cost
, src_related_regcost
) <= 0
5214 && preferable (src_eqv_cost
, src_eqv_regcost
,
5215 src_elt_cost
, src_elt_regcost
) <= 0)
5216 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
5217 else if (src_related
5218 && preferable (src_related_cost
, src_related_regcost
,
5219 src_elt_cost
, src_elt_regcost
) <= 0)
5220 trial
= src_related
, src_related_cost
= MAX_COST
;
5224 elt
= elt
->next_same_value
;
5225 src_elt_cost
= MAX_COST
;
5229 (set (reg:M N) (const_int A))
5230 (set (reg:M2 O) (const_int B))
5231 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5233 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5234 && CONST_INT_P (trial
)
5235 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5236 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5237 && REG_P (XEXP (SET_DEST (sets
[i
].rtl
), 0))
5239 (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets
[i
].rtl
))),
5240 INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))))
5241 && ((unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5242 + (unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5243 <= HOST_BITS_PER_WIDE_INT
))
5245 rtx dest_reg
= XEXP (SET_DEST (sets
[i
].rtl
), 0);
5246 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5247 rtx pos
= XEXP (SET_DEST (sets
[i
].rtl
), 2);
5248 unsigned int dest_hash
= HASH (dest_reg
, GET_MODE (dest_reg
));
5249 struct table_elt
*dest_elt
5250 = lookup (dest_reg
, dest_hash
, GET_MODE (dest_reg
));
5251 rtx dest_cst
= NULL
;
5254 for (p
= dest_elt
->first_same_value
; p
; p
= p
->next_same_value
)
5255 if (p
->is_const
&& CONST_INT_P (p
->exp
))
5262 HOST_WIDE_INT val
= INTVAL (dest_cst
);
5265 /* This is the mode of DEST_CST as well. */
5266 scalar_int_mode dest_mode
5267 = as_a
<scalar_int_mode
> (GET_MODE (dest_reg
));
5268 if (BITS_BIG_ENDIAN
)
5269 shift
= GET_MODE_PRECISION (dest_mode
)
5270 - INTVAL (pos
) - INTVAL (width
);
5272 shift
= INTVAL (pos
);
5273 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
5274 mask
= HOST_WIDE_INT_M1
;
5276 mask
= (HOST_WIDE_INT_1
<< INTVAL (width
)) - 1;
5277 val
&= ~(mask
<< shift
);
5278 val
|= (INTVAL (trial
) & mask
) << shift
;
5279 val
= trunc_int_for_mode (val
, dest_mode
);
5280 validate_unshare_change (insn
, &SET_DEST (sets
[i
].rtl
),
5282 validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5284 if (apply_change_group ())
5286 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5289 remove_note (insn
, note
);
5290 df_notes_rescan (insn
);
5294 src_eqv_volatile
= 0;
5295 src_eqv_in_memory
= 0;
5303 /* We don't normally have an insn matching (set (pc) (pc)), so
5304 check for this separately here. We will delete such an
5307 For other cases such as a table jump or conditional jump
5308 where we know the ultimate target, go ahead and replace the
5309 operand. While that may not make a valid insn, we will
5310 reemit the jump below (and also insert any necessary
5312 if (n_sets
== 1 && dest
== pc_rtx
5314 || (GET_CODE (trial
) == LABEL_REF
5315 && ! condjump_p (insn
))))
5317 /* Don't substitute non-local labels, this confuses CFG. */
5318 if (GET_CODE (trial
) == LABEL_REF
5319 && LABEL_REF_NONLOCAL_P (trial
))
5322 SET_SRC (sets
[i
].rtl
) = trial
;
5323 cse_jumps_altered
= true;
5327 /* Similarly, lots of targets don't allow no-op
5328 (set (mem x) (mem x)) moves. */
5329 else if (n_sets
== 1
5332 && rtx_equal_p (trial
, dest
)
5333 && !side_effects_p (dest
)
5334 && (cfun
->can_delete_dead_exceptions
5335 || insn_nothrow_p (insn
)))
5337 SET_SRC (sets
[i
].rtl
) = trial
;
5338 mem_noop_insn
= true;
5342 /* Reject certain invalid forms of CONST that we create. */
5343 else if (CONSTANT_P (trial
)
5344 && GET_CODE (trial
) == CONST
5345 /* Reject cases that will cause decode_rtx_const to
5346 die. On the alpha when simplifying a switch, we
5347 get (const (truncate (minus (label_ref)
5349 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
5350 /* Likewise on IA-64, except without the
5352 || (GET_CODE (XEXP (trial
, 0)) == MINUS
5353 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
5354 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
5355 /* Do nothing for this case. */
5358 /* Do not replace anything with a MEM, except the replacement
5359 is a no-op. This allows this loop to terminate. */
5360 else if (MEM_P (trial
) && !rtx_equal_p (trial
, SET_SRC(sets
[i
].rtl
)))
5361 /* Do nothing for this case. */
5364 /* Look for a substitution that makes a valid insn. */
5365 else if (validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5368 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
5370 /* The result of apply_change_group can be ignored; see
5373 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
5374 apply_change_group ();
5379 /* If we previously found constant pool entries for
5380 constants and this is a constant, try making a
5381 pool entry. Put it in src_folded unless we already have done
5382 this since that is where it likely came from. */
5384 else if (constant_pool_entries_cost
5385 && CONSTANT_P (trial
)
5387 || (!MEM_P (src_folded
)
5388 && ! src_folded_force_flag
))
5389 && GET_MODE_CLASS (mode
) != MODE_CC
5390 && mode
!= VOIDmode
)
5392 src_folded_force_flag
= 1;
5394 src_folded_cost
= constant_pool_entries_cost
;
5395 src_folded_regcost
= constant_pool_entries_regcost
;
5399 /* If we changed the insn too much, handle this set from scratch. */
5406 src
= SET_SRC (sets
[i
].rtl
);
5408 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5409 However, there is an important exception: If both are registers
5410 that are not the head of their equivalence class, replace SET_SRC
5411 with the head of the class. If we do not do this, we will have
5412 both registers live over a portion of the basic block. This way,
5413 their lifetimes will likely abut instead of overlapping. */
5415 && REGNO_QTY_VALID_P (REGNO (dest
)))
5417 int dest_q
= REG_QTY (REGNO (dest
));
5418 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
5420 if (dest_ent
->mode
== GET_MODE (dest
)
5421 && dest_ent
->first_reg
!= REGNO (dest
)
5422 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
5423 /* Don't do this if the original insn had a hard reg as
5424 SET_SRC or SET_DEST. */
5425 && (!REG_P (sets
[i
].src
)
5426 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
5427 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
5428 /* We can't call canon_reg here because it won't do anything if
5429 SRC is a hard register. */
5431 int src_q
= REG_QTY (REGNO (src
));
5432 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5433 int first
= src_ent
->first_reg
;
5435 = (first
>= FIRST_PSEUDO_REGISTER
5436 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
5438 /* We must use validate-change even for this, because this
5439 might be a special no-op instruction, suitable only to
5441 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
5444 /* If we had a constant that is cheaper than what we are now
5445 setting SRC to, use that constant. We ignored it when we
5446 thought we could make this into a no-op. */
5447 if (src_const
&& COST (src_const
, mode
) < COST (src
, mode
)
5448 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
5455 /* If we made a change, recompute SRC values. */
5456 if (src
!= sets
[i
].src
)
5459 hash_arg_in_memory
= 0;
5461 sets
[i
].src_hash
= HASH (src
, mode
);
5462 sets
[i
].src_volatile
= do_not_record
;
5463 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5464 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
5467 /* If this is a single SET, we are setting a register, and we have an
5468 equivalent constant, we want to add a REG_EQUAL note if the constant
5469 is different from the source. We don't want to do it for a constant
5470 pseudo since verifying that this pseudo hasn't been eliminated is a
5471 pain; moreover such a note won't help anything.
5473 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5474 which can be created for a reference to a compile time computable
5475 entry in a jump table. */
5479 && !REG_P (src_const
)
5480 && !(GET_CODE (src_const
) == SUBREG
5481 && REG_P (SUBREG_REG (src_const
)))
5482 && !(GET_CODE (src_const
) == CONST
5483 && GET_CODE (XEXP (src_const
, 0)) == MINUS
5484 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
5485 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
)
5486 && !rtx_equal_p (src
, src_const
))
5488 /* Make sure that the rtx is not shared. */
5489 src_const
= copy_rtx (src_const
);
5491 /* Record the actual constant value in a REG_EQUAL note,
5492 making a new one if one does not already exist. */
5493 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
5494 df_notes_rescan (insn
);
5497 /* Now deal with the destination. */
5500 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5501 while (GET_CODE (dest
) == SUBREG
5502 || GET_CODE (dest
) == ZERO_EXTRACT
5503 || GET_CODE (dest
) == STRICT_LOW_PART
)
5504 dest
= XEXP (dest
, 0);
5506 sets
[i
].inner_dest
= dest
;
5510 #ifdef PUSH_ROUNDING
5511 /* Stack pushes invalidate the stack pointer. */
5512 rtx addr
= XEXP (dest
, 0);
5513 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
5514 && XEXP (addr
, 0) == stack_pointer_rtx
)
5515 invalidate (stack_pointer_rtx
, VOIDmode
);
5517 dest
= fold_rtx (dest
, insn
);
5520 /* Compute the hash code of the destination now,
5521 before the effects of this instruction are recorded,
5522 since the register values used in the address computation
5523 are those before this instruction. */
5524 sets
[i
].dest_hash
= HASH (dest
, mode
);
5526 /* Don't enter a bit-field in the hash table
5527 because the value in it after the store
5528 may not equal what was stored, due to truncation. */
5530 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5532 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5534 if (src_const
!= 0 && CONST_INT_P (src_const
)
5535 && CONST_INT_P (width
)
5536 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5537 && ! (INTVAL (src_const
)
5538 & (HOST_WIDE_INT_M1U
<< INTVAL (width
))))
5539 /* Exception: if the value is constant,
5540 and it won't be truncated, record it. */
5544 /* This is chosen so that the destination will be invalidated
5545 but no new value will be recorded.
5546 We must invalidate because sometimes constant
5547 values can be recorded for bitfields. */
5548 sets
[i
].src_elt
= 0;
5549 sets
[i
].src_volatile
= 1;
5555 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5557 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5559 /* One less use of the label this insn used to jump to. */
5560 cse_cfg_altered
|= delete_insn_and_edges (insn
);
5561 cse_jumps_altered
= true;
5562 /* No more processing for this set. */
5566 /* Similarly for no-op MEM moves. */
5567 else if (mem_noop_insn
)
5569 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
5570 cse_cfg_altered
= true;
5571 cse_cfg_altered
|= delete_insn_and_edges (insn
);
5572 /* No more processing for this set. */
5576 /* If this SET is now setting PC to a label, we know it used to
5577 be a conditional or computed branch. */
5578 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5579 && !LABEL_REF_NONLOCAL_P (src
))
5581 /* We reemit the jump in as many cases as possible just in
5582 case the form of an unconditional jump is significantly
5583 different than a computed jump or conditional jump.
5585 If this insn has multiple sets, then reemitting the
5586 jump is nontrivial. So instead we just force rerecognition
5587 and hope for the best. */
5590 rtx_jump_insn
*new_rtx
;
5593 rtx_insn
*seq
= targetm
.gen_jump (XEXP (src
, 0));
5594 new_rtx
= emit_jump_insn_before (seq
, insn
);
5595 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5596 LABEL_NUSES (XEXP (src
, 0))++;
5598 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5599 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5602 XEXP (note
, 1) = NULL_RTX
;
5603 REG_NOTES (new_rtx
) = note
;
5606 cse_cfg_altered
|= delete_insn_and_edges (insn
);
5610 INSN_CODE (insn
) = -1;
5612 /* Do not bother deleting any unreachable code, let jump do it. */
5613 cse_jumps_altered
= true;
5617 /* If destination is volatile, invalidate it and then do no further
5618 processing for this assignment. */
5620 else if (do_not_record
)
5622 invalidate_dest (dest
);
5626 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5629 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5632 invalidate_dest (SET_DEST (sets
[i
].rtl
));
5637 /* If setting CC0, record what it was set to, or a constant, if it
5638 is equivalent to a constant. If it is being set to a floating-point
5639 value, make a COMPARE with the appropriate constant of 0. If we
5640 don't do this, later code can interpret this as a test against
5641 const0_rtx, which can cause problems if we try to put it into an
5642 insn as a floating-point operand. */
5643 if (dest
== cc0_rtx
)
5645 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5646 this_insn_cc0_mode
= mode
;
5647 if (FLOAT_MODE_P (mode
))
5648 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5653 /* Now enter all non-volatile source expressions in the hash table
5654 if they are not already present.
5655 Record their equivalence classes in src_elt.
5656 This way we can insert the corresponding destinations into
5657 the same classes even if the actual sources are no longer in them
5658 (having been invalidated). */
5660 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5661 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5663 struct table_elt
*elt
;
5664 struct table_elt
*classp
= sets
[0].src_elt
;
5665 rtx dest
= SET_DEST (sets
[0].rtl
);
5666 machine_mode eqvmode
= GET_MODE (dest
);
5668 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5670 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5673 if (insert_regs (src_eqv
, classp
, 0))
5675 rehash_using_reg (src_eqv
);
5676 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5678 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5679 elt
->in_memory
= src_eqv_in_memory
;
5682 /* Check to see if src_eqv_elt is the same as a set source which
5683 does not yet have an elt, and if so set the elt of the set source
5685 for (i
= 0; i
< n_sets
; i
++)
5686 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5687 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5688 sets
[i
].src_elt
= src_eqv_elt
;
5691 for (i
= 0; i
< n_sets
; i
++)
5692 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5693 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5695 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5697 /* REG_EQUAL in setting a STRICT_LOW_PART
5698 gives an equivalent for the entire destination register,
5699 not just for the subreg being stored in now.
5700 This is a more interesting equivalence, so we arrange later
5701 to treat the entire reg as the destination. */
5702 sets
[i
].src_elt
= src_eqv_elt
;
5703 sets
[i
].src_hash
= src_eqv_hash
;
5707 /* Insert source and constant equivalent into hash table, if not
5709 struct table_elt
*classp
= src_eqv_elt
;
5710 rtx src
= sets
[i
].src
;
5711 rtx dest
= SET_DEST (sets
[i
].rtl
);
5713 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5715 /* It's possible that we have a source value known to be
5716 constant but don't have a REG_EQUAL note on the insn.
5717 Lack of a note will mean src_eqv_elt will be NULL. This
5718 can happen where we've generated a SUBREG to access a
5719 CONST_INT that is already in a register in a wider mode.
5720 Ensure that the source expression is put in the proper
5723 classp
= sets
[i
].src_const_elt
;
5725 if (sets
[i
].src_elt
== 0)
5727 struct table_elt
*elt
;
5729 /* Note that these insert_regs calls cannot remove
5730 any of the src_elt's, because they would have failed to
5731 match if not still valid. */
5732 if (insert_regs (src
, classp
, 0))
5734 rehash_using_reg (src
);
5735 sets
[i
].src_hash
= HASH (src
, mode
);
5737 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5738 elt
->in_memory
= sets
[i
].src_in_memory
;
5739 /* If inline asm has any clobbers, ensure we only reuse
5740 existing inline asms and never try to put the ASM_OPERANDS
5741 into an insn that isn't inline asm. */
5742 if (GET_CODE (src
) == ASM_OPERANDS
5743 && GET_CODE (x
) == PARALLEL
)
5744 elt
->cost
= MAX_COST
;
5745 sets
[i
].src_elt
= classp
= elt
;
5747 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5748 && src
!= sets
[i
].src_const
5749 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5750 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5751 sets
[i
].src_const_hash
, mode
);
5754 else if (sets
[i
].src_elt
== 0)
5755 /* If we did not insert the source into the hash table (e.g., it was
5756 volatile), note the equivalence class for the REG_EQUAL value, if any,
5757 so that the destination goes into that class. */
5758 sets
[i
].src_elt
= src_eqv_elt
;
5760 /* Record destination addresses in the hash table. This allows us to
5761 check if they are invalidated by other sets. */
5762 for (i
= 0; i
< n_sets
; i
++)
5766 rtx x
= sets
[i
].inner_dest
;
5767 struct table_elt
*elt
;
5774 mode
= GET_MODE (x
);
5775 hash
= HASH (x
, mode
);
5776 elt
= lookup (x
, hash
, mode
);
5779 if (insert_regs (x
, NULL
, 0))
5781 rtx dest
= SET_DEST (sets
[i
].rtl
);
5783 rehash_using_reg (x
);
5784 hash
= HASH (x
, mode
);
5785 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5787 elt
= insert (x
, NULL
, hash
, mode
);
5790 sets
[i
].dest_addr_elt
= elt
;
5793 sets
[i
].dest_addr_elt
= NULL
;
5797 invalidate_from_clobbers (insn
);
5799 /* Some registers are invalidated by subroutine calls. Memory is
5800 invalidated by non-constant calls. */
5804 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5805 invalidate_memory ();
5807 /* For const/pure calls, invalidate any argument slots, because
5808 those are owned by the callee. */
5809 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
5810 if (GET_CODE (XEXP (tem
, 0)) == USE
5811 && MEM_P (XEXP (XEXP (tem
, 0), 0)))
5812 invalidate (XEXP (XEXP (tem
, 0), 0), VOIDmode
);
5813 invalidate_for_call (insn
);
5816 /* Now invalidate everything set by this instruction.
5817 If a SUBREG or other funny destination is being set,
5818 sets[i].rtl is still nonzero, so here we invalidate the reg
5819 a part of which is being set. */
5821 for (i
= 0; i
< n_sets
; i
++)
5824 /* We can't use the inner dest, because the mode associated with
5825 a ZERO_EXTRACT is significant. */
5826 rtx dest
= SET_DEST (sets
[i
].rtl
);
5828 /* Needed for registers to remove the register from its
5829 previous quantity's chain.
5830 Needed for memory if this is a nonvarying address, unless
5831 we have just done an invalidate_memory that covers even those. */
5832 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5833 invalidate (dest
, VOIDmode
);
5834 else if (MEM_P (dest
))
5835 invalidate (dest
, VOIDmode
);
5836 else if (GET_CODE (dest
) == STRICT_LOW_PART
5837 || GET_CODE (dest
) == ZERO_EXTRACT
)
5838 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5841 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5842 the regs restored by the longjmp come from a later time
5844 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5846 flush_hash_table ();
5850 /* Make sure registers mentioned in destinations
5851 are safe for use in an expression to be inserted.
5852 This removes from the hash table
5853 any invalid entry that refers to one of these registers.
5855 We don't care about the return value from mention_regs because
5856 we are going to hash the SET_DEST values unconditionally. */
5858 for (i
= 0; i
< n_sets
; i
++)
5862 rtx x
= SET_DEST (sets
[i
].rtl
);
5868 /* We used to rely on all references to a register becoming
5869 inaccessible when a register changes to a new quantity,
5870 since that changes the hash code. However, that is not
5871 safe, since after HASH_SIZE new quantities we get a
5872 hash 'collision' of a register with its own invalid
5873 entries. And since SUBREGs have been changed not to
5874 change their hash code with the hash code of the register,
5875 it wouldn't work any longer at all. So we have to check
5876 for any invalid references lying around now.
5877 This code is similar to the REG case in mention_regs,
5878 but it knows that reg_tick has been incremented, and
5879 it leaves reg_in_table as -1 . */
5880 unsigned int regno
= REGNO (x
);
5881 unsigned int endregno
= END_REGNO (x
);
5884 for (i
= regno
; i
< endregno
; i
++)
5886 if (REG_IN_TABLE (i
) >= 0)
5888 remove_invalid_refs (i
);
5889 REG_IN_TABLE (i
) = -1;
5896 /* We may have just removed some of the src_elt's from the hash table.
5897 So replace each one with the current head of the same class.
5898 Also check if destination addresses have been removed. */
5900 for (i
= 0; i
< n_sets
; i
++)
5903 if (sets
[i
].dest_addr_elt
5904 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5906 /* The elt was removed, which means this destination is not
5907 valid after this instruction. */
5908 sets
[i
].rtl
= NULL_RTX
;
5910 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5911 /* If elt was removed, find current head of same class,
5912 or 0 if nothing remains of that class. */
5914 struct table_elt
*elt
= sets
[i
].src_elt
;
5916 while (elt
&& elt
->prev_same_value
)
5917 elt
= elt
->prev_same_value
;
5919 while (elt
&& elt
->first_same_value
== 0)
5920 elt
= elt
->next_same_value
;
5921 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5925 /* Now insert the destinations into their equivalence classes. */
5927 for (i
= 0; i
< n_sets
; i
++)
5930 rtx dest
= SET_DEST (sets
[i
].rtl
);
5931 struct table_elt
*elt
;
5933 /* Don't record value if we are not supposed to risk allocating
5934 floating-point values in registers that might be wider than
5936 if ((flag_float_store
5938 && FLOAT_MODE_P (GET_MODE (dest
)))
5939 /* Don't record BLKmode values, because we don't know the
5940 size of it, and can't be sure that other BLKmode values
5941 have the same or smaller size. */
5942 || GET_MODE (dest
) == BLKmode
5943 /* If we didn't put a REG_EQUAL value or a source into the hash
5944 table, there is no point is recording DEST. */
5945 || sets
[i
].src_elt
== 0)
5948 /* STRICT_LOW_PART isn't part of the value BEING set,
5949 and neither is the SUBREG inside it.
5950 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5951 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5952 dest
= SUBREG_REG (XEXP (dest
, 0));
5954 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5955 /* Registers must also be inserted into chains for quantities. */
5956 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5958 /* If `insert_regs' changes something, the hash code must be
5960 rehash_using_reg (dest
);
5961 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5964 /* If DEST is a paradoxical SUBREG, don't record DEST since the bits
5965 outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined. */
5966 if (paradoxical_subreg_p (dest
))
5969 elt
= insert (dest
, sets
[i
].src_elt
,
5970 sets
[i
].dest_hash
, GET_MODE (dest
));
5972 /* If this is a constant, insert the constant anchors with the
5973 equivalent register-offset expressions using register DEST. */
5974 if (targetm
.const_anchor
5976 && SCALAR_INT_MODE_P (GET_MODE (dest
))
5977 && GET_CODE (sets
[i
].src_elt
->exp
) == CONST_INT
)
5978 insert_const_anchors (dest
, sets
[i
].src_elt
->exp
, GET_MODE (dest
));
5980 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
5981 && !MEM_READONLY_P (sets
[i
].inner_dest
));
5983 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5984 narrower than M2, and both M1 and M2 are the same number of words,
5985 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5986 make that equivalence as well.
5988 However, BAR may have equivalences for which gen_lowpart
5989 will produce a simpler value than gen_lowpart applied to
5990 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5991 BAR's equivalences. If we don't get a simplified form, make
5992 the SUBREG. It will not be used in an equivalence, but will
5993 cause two similar assignments to be detected.
5995 Note the loop below will find SUBREG_REG (DEST) since we have
5996 already entered SRC and DEST of the SET in the table. */
5998 if (GET_CODE (dest
) == SUBREG
5999 && (known_equal_after_align_down
6000 (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1,
6001 GET_MODE_SIZE (GET_MODE (dest
)) - 1,
6003 && !partial_subreg_p (dest
)
6004 && sets
[i
].src_elt
!= 0)
6006 machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
6007 struct table_elt
*elt
, *classp
= 0;
6009 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
6010 elt
= elt
->next_same_value
)
6014 struct table_elt
*src_elt
;
6016 /* Ignore invalid entries. */
6017 if (!REG_P (elt
->exp
)
6018 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
6021 /* We may have already been playing subreg games. If the
6022 mode is already correct for the destination, use it. */
6023 if (GET_MODE (elt
->exp
) == new_mode
)
6028 = subreg_lowpart_offset (new_mode
, GET_MODE (dest
));
6029 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
6030 GET_MODE (dest
), byte
);
6033 /* The call to simplify_gen_subreg fails if the value
6034 is VOIDmode, yet we can't do any simplification, e.g.
6035 for EXPR_LISTs denoting function call results.
6036 It is invalid to construct a SUBREG with a VOIDmode
6037 SUBREG_REG, hence a zero new_src means we can't do
6038 this substitution. */
6042 src_hash
= HASH (new_src
, new_mode
);
6043 src_elt
= lookup (new_src
, src_hash
, new_mode
);
6045 /* Put the new source in the hash table is if isn't
6049 if (insert_regs (new_src
, classp
, 0))
6051 rehash_using_reg (new_src
);
6052 src_hash
= HASH (new_src
, new_mode
);
6054 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
6055 src_elt
->in_memory
= elt
->in_memory
;
6056 if (GET_CODE (new_src
) == ASM_OPERANDS
6057 && elt
->cost
== MAX_COST
)
6058 src_elt
->cost
= MAX_COST
;
6060 else if (classp
&& classp
!= src_elt
->first_same_value
)
6061 /* Show that two things that we've seen before are
6062 actually the same. */
6063 merge_equiv_classes (src_elt
, classp
);
6065 classp
= src_elt
->first_same_value
;
6066 /* Ignore invalid entries. */
6068 && !REG_P (classp
->exp
)
6069 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
6070 classp
= classp
->next_same_value
;
6075 /* Special handling for (set REG0 REG1) where REG0 is the
6076 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6077 be used in the sequel, so (if easily done) change this insn to
6078 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6079 that computed their value. Then REG1 will become a dead store
6080 and won't cloud the situation for later optimizations.
6082 Do not make this change if REG1 is a hard register, because it will
6083 then be used in the sequel and we may be changing a two-operand insn
6084 into a three-operand insn.
6086 Also do not do this if we are operating on a copy of INSN. */
6088 if (n_sets
== 1 && sets
[0].rtl
)
6089 try_back_substitute_reg (sets
[0].rtl
, insn
);
6094 /* Remove from the hash table all expressions that reference memory. */
6097 invalidate_memory (void)
6100 struct table_elt
*p
, *next
;
6102 for (i
= 0; i
< HASH_SIZE
; i
++)
6103 for (p
= table
[i
]; p
; p
= next
)
6105 next
= p
->next_same_hash
;
6107 remove_from_table (p
, i
);
6111 /* Perform invalidation on the basis of everything about INSN,
6112 except for invalidating the actual places that are SET in it.
6113 This includes the places CLOBBERed, and anything that might
6114 alias with something that is SET or CLOBBERed. */
6117 invalidate_from_clobbers (rtx_insn
*insn
)
6119 rtx x
= PATTERN (insn
);
6121 if (GET_CODE (x
) == CLOBBER
)
6123 rtx ref
= XEXP (x
, 0);
6126 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6128 invalidate (ref
, VOIDmode
);
6129 else if (GET_CODE (ref
) == STRICT_LOW_PART
6130 || GET_CODE (ref
) == ZERO_EXTRACT
)
6131 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6134 else if (GET_CODE (x
) == PARALLEL
)
6137 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6139 rtx y
= XVECEXP (x
, 0, i
);
6140 if (GET_CODE (y
) == CLOBBER
)
6142 rtx ref
= XEXP (y
, 0);
6143 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6145 invalidate (ref
, VOIDmode
);
6146 else if (GET_CODE (ref
) == STRICT_LOW_PART
6147 || GET_CODE (ref
) == ZERO_EXTRACT
)
6148 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6154 /* Perform invalidation on the basis of everything about INSN.
6155 This includes the places CLOBBERed, and anything that might
6156 alias with something that is SET or CLOBBERed. */
6159 invalidate_from_sets_and_clobbers (rtx_insn
*insn
)
6162 rtx x
= PATTERN (insn
);
6166 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6168 rtx temx
= XEXP (tem
, 0);
6169 if (GET_CODE (temx
) == CLOBBER
)
6170 invalidate (SET_DEST (temx
), VOIDmode
);
6174 /* Ensure we invalidate the destination register of a CALL insn.
6175 This is necessary for machines where this register is a fixed_reg,
6176 because no other code would invalidate it. */
6177 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
6178 invalidate (SET_DEST (x
), VOIDmode
);
6180 else if (GET_CODE (x
) == PARALLEL
)
6184 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6186 rtx y
= XVECEXP (x
, 0, i
);
6187 if (GET_CODE (y
) == CLOBBER
)
6189 rtx clobbered
= XEXP (y
, 0);
6191 if (REG_P (clobbered
)
6192 || GET_CODE (clobbered
) == SUBREG
)
6193 invalidate (clobbered
, VOIDmode
);
6194 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
6195 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
6196 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
6198 else if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
6199 invalidate (SET_DEST (y
), VOIDmode
);
6204 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6205 and replace any registers in them with either an equivalent constant
6206 or the canonical form of the register. If we are inside an address,
6207 only do this if the address remains valid.
6209 OBJECT is 0 except when within a MEM in which case it is the MEM.
6211 Return the replacement for X. */
6214 cse_process_notes_1 (rtx x
, rtx object
, bool *changed
)
6216 enum rtx_code code
= GET_CODE (x
);
6217 const char *fmt
= GET_RTX_FORMAT (code
);
6232 validate_change (x
, &XEXP (x
, 0),
6233 cse_process_notes (XEXP (x
, 0), x
, changed
), 0);
6237 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
6238 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
, changed
);
6244 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
, changed
);
6251 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6252 /* We don't substitute VOIDmode constants into these rtx,
6253 since they would impede folding. */
6254 if (GET_MODE (new_rtx
) != VOIDmode
)
6255 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6259 case UNSIGNED_FLOAT
:
6261 rtx new_rtx
= cse_process_notes (XEXP (x
, 0), object
, changed
);
6262 /* We don't substitute negative VOIDmode constants into these rtx,
6263 since they would impede folding. */
6264 if (GET_MODE (new_rtx
) != VOIDmode
6265 || (CONST_INT_P (new_rtx
) && INTVAL (new_rtx
) >= 0)
6266 || (CONST_DOUBLE_P (new_rtx
) && CONST_DOUBLE_HIGH (new_rtx
) >= 0))
6267 validate_change (object
, &XEXP (x
, 0), new_rtx
, 0);
6272 i
= REG_QTY (REGNO (x
));
6274 /* Return a constant or a constant register. */
6275 if (REGNO_QTY_VALID_P (REGNO (x
)))
6277 struct qty_table_elem
*ent
= &qty_table
[i
];
6279 if (ent
->const_rtx
!= NULL_RTX
6280 && (CONSTANT_P (ent
->const_rtx
)
6281 || REG_P (ent
->const_rtx
)))
6283 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
6285 return copy_rtx (new_rtx
);
6289 /* Otherwise, canonicalize this register. */
6290 return canon_reg (x
, NULL
);
6296 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6298 validate_change (object
, &XEXP (x
, i
),
6299 cse_process_notes (XEXP (x
, i
), object
, changed
), 0);
6305 cse_process_notes (rtx x
, rtx object
, bool *changed
)
6307 rtx new_rtx
= cse_process_notes_1 (x
, object
, changed
);
6314 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6316 DATA is a pointer to a struct cse_basic_block_data, that is used to
6318 It is filled with a queue of basic blocks, starting with FIRST_BB
6319 and following a trace through the CFG.
6321 If all paths starting at FIRST_BB have been followed, or no new path
6322 starting at FIRST_BB can be constructed, this function returns FALSE.
6323 Otherwise, DATA->path is filled and the function returns TRUE indicating
6324 that a path to follow was found.
6326 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6327 block in the path will be FIRST_BB. */
6330 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
6337 bitmap_set_bit (cse_visited_basic_blocks
, first_bb
->index
);
6339 /* See if there is a previous path. */
6340 path_size
= data
->path_size
;
6342 /* There is a previous path. Make sure it started with FIRST_BB. */
6344 gcc_assert (data
->path
[0].bb
== first_bb
);
6346 /* There was only one basic block in the last path. Clear the path and
6347 return, so that paths starting at another basic block can be tried. */
6354 /* If the path was empty from the beginning, construct a new path. */
6356 data
->path
[path_size
++].bb
= first_bb
;
6359 /* Otherwise, path_size must be equal to or greater than 2, because
6360 a previous path exists that is at least two basic blocks long.
6362 Update the previous branch path, if any. If the last branch was
6363 previously along the branch edge, take the fallthrough edge now. */
6364 while (path_size
>= 2)
6366 basic_block last_bb_in_path
, previous_bb_in_path
;
6370 last_bb_in_path
= data
->path
[path_size
].bb
;
6371 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
6373 /* If we previously followed a path along the branch edge, try
6374 the fallthru edge now. */
6375 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
6376 && any_condjump_p (BB_END (previous_bb_in_path
))
6377 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
6378 && e
== BRANCH_EDGE (previous_bb_in_path
))
6380 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
6381 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6382 && single_pred_p (bb
)
6383 /* We used to assert here that we would only see blocks
6384 that we have not visited yet. But we may end up
6385 visiting basic blocks twice if the CFG has changed
6386 in this run of cse_main, because when the CFG changes
6387 the topological sort of the CFG also changes. A basic
6388 blocks that previously had more than two predecessors
6389 may now have a single predecessor, and become part of
6390 a path that starts at another basic block.
6392 We still want to visit each basic block only once, so
6393 halt the path here if we have already visited BB. */
6394 && !bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
))
6396 bitmap_set_bit (cse_visited_basic_blocks
, bb
->index
);
6397 data
->path
[path_size
++].bb
= bb
;
6402 data
->path
[path_size
].bb
= NULL
;
6405 /* If only one block remains in the path, bail. */
6413 /* Extend the path if possible. */
6416 bb
= data
->path
[path_size
- 1].bb
;
6417 while (bb
&& path_size
< PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
))
6419 if (single_succ_p (bb
))
6420 e
= single_succ_edge (bb
);
6421 else if (EDGE_COUNT (bb
->succs
) == 2
6422 && any_condjump_p (BB_END (bb
)))
6424 /* First try to follow the branch. If that doesn't lead
6425 to a useful path, follow the fallthru edge. */
6426 e
= BRANCH_EDGE (bb
);
6427 if (!single_pred_p (e
->dest
))
6428 e
= FALLTHRU_EDGE (bb
);
6434 && !((e
->flags
& EDGE_ABNORMAL_CALL
) && cfun
->has_nonlocal_label
)
6435 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6436 && single_pred_p (e
->dest
)
6437 /* Avoid visiting basic blocks twice. The large comment
6438 above explains why this can happen. */
6439 && !bitmap_bit_p (cse_visited_basic_blocks
, e
->dest
->index
))
6441 basic_block bb2
= e
->dest
;
6442 bitmap_set_bit (cse_visited_basic_blocks
, bb2
->index
);
6443 data
->path
[path_size
++].bb
= bb2
;
6452 data
->path_size
= path_size
;
6453 return path_size
!= 0;
6456 /* Dump the path in DATA to file F. NSETS is the number of sets
6460 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
6464 fprintf (f
, ";; Following path with %d sets: ", nsets
);
6465 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
6466 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
6472 /* Return true if BB has exception handling successor edges. */
6475 have_eh_succ_edges (basic_block bb
)
6480 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6481 if (e
->flags
& EDGE_EH
)
6488 /* Scan to the end of the path described by DATA. Return an estimate of
6489 the total number of SETs of all insns in the path. */
6492 cse_prescan_path (struct cse_basic_block_data
*data
)
6495 int path_size
= data
->path_size
;
6498 /* Scan to end of each basic block in the path. */
6499 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6504 bb
= data
->path
[path_entry
].bb
;
6506 FOR_BB_INSNS (bb
, insn
)
6511 /* A PARALLEL can have lots of SETs in it,
6512 especially if it is really an ASM_OPERANDS. */
6513 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6514 nsets
+= XVECLEN (PATTERN (insn
), 0);
6520 data
->nsets
= nsets
;
6523 /* Return true if the pattern of INSN uses a LABEL_REF for which
6524 there isn't a REG_LABEL_OPERAND note. */
6527 check_for_label_ref (rtx_insn
*insn
)
6529 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6530 note for it, we must rerun jump since it needs to place the note. If
6531 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6532 don't do this since no REG_LABEL_OPERAND will be added. */
6533 subrtx_iterator::array_type array
;
6534 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
6536 const_rtx x
= *iter
;
6537 if (GET_CODE (x
) == LABEL_REF
6538 && !LABEL_REF_NONLOCAL_P (x
)
6540 || !label_is_jump_target_p (label_ref_label (x
), insn
))
6541 && LABEL_P (label_ref_label (x
))
6542 && INSN_UID (label_ref_label (x
)) != 0
6543 && !find_reg_note (insn
, REG_LABEL_OPERAND
, label_ref_label (x
)))
6549 /* Process a single extended basic block described by EBB_DATA. */
6552 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
6554 int path_size
= ebb_data
->path_size
;
6558 /* Allocate the space needed by qty_table. */
6559 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
6562 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
6563 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
6564 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6569 bb
= ebb_data
->path
[path_entry
].bb
;
6571 /* Invalidate recorded information for eh regs if there is an EH
6572 edge pointing to that bb. */
6573 if (bb_has_eh_pred (bb
))
6577 FOR_EACH_ARTIFICIAL_DEF (def
, bb
->index
)
6578 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
6579 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6582 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6583 FOR_BB_INSNS (bb
, insn
)
6585 /* If we have processed 1,000 insns, flush the hash table to
6586 avoid extreme quadratic behavior. We must not include NOTEs
6587 in the count since there may be more of them when generating
6588 debugging information. If we clear the table at different
6589 times, code generated with -g -O might be different than code
6590 generated with -O but not -g.
6592 FIXME: This is a real kludge and needs to be done some other
6594 if (NONDEBUG_INSN_P (insn
)
6595 && num_insns
++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS
))
6597 flush_hash_table ();
6603 /* Process notes first so we have all notes in canonical forms
6604 when looking for duplicate operations. */
6605 if (REG_NOTES (insn
))
6607 bool changed
= false;
6608 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
),
6609 NULL_RTX
, &changed
);
6611 df_notes_rescan (insn
);
6616 /* If we haven't already found an insn where we added a LABEL_REF,
6618 if (INSN_P (insn
) && !recorded_label_ref
6619 && check_for_label_ref (insn
))
6620 recorded_label_ref
= true;
6622 if (HAVE_cc0
&& NONDEBUG_INSN_P (insn
))
6624 /* If the previous insn sets CC0 and this insn no
6625 longer references CC0, delete the previous insn.
6626 Here we use fact that nothing expects CC0 to be
6627 valid over an insn, which is true until the final
6629 rtx_insn
*prev_insn
;
6632 prev_insn
= prev_nonnote_nondebug_insn (insn
);
6633 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6634 && (tem
= single_set (prev_insn
)) != NULL_RTX
6635 && SET_DEST (tem
) == cc0_rtx
6636 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6637 delete_insn (prev_insn
);
6639 /* If this insn is not the last insn in the basic
6640 block, it will be PREV_INSN(insn) in the next
6641 iteration. If we recorded any CC0-related
6642 information for this insn, remember it. */
6643 if (insn
!= BB_END (bb
))
6645 prev_insn_cc0
= this_insn_cc0
;
6646 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6652 /* With non-call exceptions, we are not always able to update
6653 the CFG properly inside cse_insn. So clean up possibly
6654 redundant EH edges here. */
6655 if (cfun
->can_throw_non_call_exceptions
&& have_eh_succ_edges (bb
))
6656 cse_cfg_altered
|= purge_dead_edges (bb
);
6658 /* If we changed a conditional jump, we may have terminated
6659 the path we are following. Check that by verifying that
6660 the edge we would take still exists. If the edge does
6661 not exist anymore, purge the remainder of the path.
6662 Note that this will cause us to return to the caller. */
6663 if (path_entry
< path_size
- 1)
6665 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6666 if (!find_edge (bb
, next_bb
))
6672 /* If we truncate the path, we must also reset the
6673 visited bit on the remaining blocks in the path,
6674 or we will never visit them at all. */
6675 bitmap_clear_bit (cse_visited_basic_blocks
,
6676 ebb_data
->path
[path_size
].bb
->index
);
6677 ebb_data
->path
[path_size
].bb
= NULL
;
6679 while (path_size
- 1 != path_entry
);
6680 ebb_data
->path_size
= path_size
;
6684 /* If this is a conditional jump insn, record any known
6685 equivalences due to the condition being tested. */
6687 if (path_entry
< path_size
- 1
6688 && EDGE_COUNT (bb
->succs
) == 2
6690 && single_set (insn
)
6691 && any_condjump_p (insn
))
6693 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6694 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6695 record_jump_equiv (insn
, taken
);
6698 /* Clear the CC0-tracking related insns, they can't provide
6699 useful information across basic block boundaries. */
6703 gcc_assert (next_qty
<= max_qty
);
6709 /* Perform cse on the instructions of a function.
6710 F is the first instruction.
6711 NREGS is one plus the highest pseudo-reg number used in the instruction.
6713 Return 2 if jump optimizations should be redone due to simplifications
6714 in conditional jump instructions.
6715 Return 1 if the CFG should be cleaned up because it has been modified.
6716 Return 0 otherwise. */
6719 cse_main (rtx_insn
*f ATTRIBUTE_UNUSED
, int nregs
)
6721 struct cse_basic_block_data ebb_data
;
6723 int *rc_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
6726 /* CSE doesn't use dominane info but can invalidate it in different ways.
6727 For simplicity free dominance info here. */
6728 free_dominance_info (CDI_DOMINATORS
);
6730 df_set_flags (DF_LR_RUN_DCE
);
6731 df_note_add_problem ();
6733 df_set_flags (DF_DEFER_INSN_RESCAN
);
6735 reg_scan (get_insns (), max_reg_num ());
6736 init_cse_reg_info (nregs
);
6738 ebb_data
.path
= XNEWVEC (struct branch_path
,
6739 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH
));
6741 cse_cfg_altered
= false;
6742 cse_jumps_altered
= false;
6743 recorded_label_ref
= false;
6744 constant_pool_entries_cost
= 0;
6745 constant_pool_entries_regcost
= 0;
6746 ebb_data
.path_size
= 0;
6748 rtl_hooks
= cse_rtl_hooks
;
6751 init_alias_analysis ();
6753 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6755 /* Set up the table of already visited basic blocks. */
6756 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6757 bitmap_clear (cse_visited_basic_blocks
);
6759 /* Loop over basic blocks in reverse completion order (RPO),
6760 excluding the ENTRY and EXIT blocks. */
6761 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6763 while (i
< n_blocks
)
6765 /* Find the first block in the RPO queue that we have not yet
6766 processed before. */
6769 bb
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[i
++]);
6771 while (bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
)
6774 /* Find all paths starting with BB, and process them. */
6775 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6777 /* Pre-scan the path. */
6778 cse_prescan_path (&ebb_data
);
6780 /* If this basic block has no sets, skip it. */
6781 if (ebb_data
.nsets
== 0)
6784 /* Get a reasonable estimate for the maximum number of qty's
6785 needed for this path. For this, we take the number of sets
6786 and multiply that by MAX_RECOG_OPERANDS. */
6787 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6789 /* Dump the path we're about to process. */
6791 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6793 cse_extended_basic_block (&ebb_data
);
6798 end_alias_analysis ();
6799 free (reg_eqv_table
);
6800 free (ebb_data
.path
);
6801 sbitmap_free (cse_visited_basic_blocks
);
6803 rtl_hooks
= general_rtl_hooks
;
6805 if (cse_jumps_altered
|| recorded_label_ref
)
6807 else if (cse_cfg_altered
)
6813 /* Count the number of times registers are used (not set) in X.
6814 COUNTS is an array in which we accumulate the count, INCR is how much
6815 we count each register usage.
6817 Don't count a usage of DEST, which is the SET_DEST of a SET which
6818 contains X in its SET_SRC. This is because such a SET does not
6819 modify the liveness of DEST.
6820 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6821 We must then count uses of a SET_DEST regardless, because the insn can't be
6825 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6835 switch (code
= GET_CODE (x
))
6839 counts
[REGNO (x
)] += incr
;
6851 /* If we are clobbering a MEM, mark any registers inside the address
6853 if (MEM_P (XEXP (x
, 0)))
6854 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6858 /* Unless we are setting a REG, count everything in SET_DEST. */
6859 if (!REG_P (SET_DEST (x
)))
6860 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6861 count_reg_usage (SET_SRC (x
), counts
,
6862 dest
? dest
: SET_DEST (x
),
6872 /* We expect dest to be NULL_RTX here. If the insn may throw,
6873 or if it cannot be deleted due to side-effects, mark this fact
6874 by setting DEST to pc_rtx. */
6875 if ((!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (x
))
6876 || side_effects_p (PATTERN (x
)))
6878 if (code
== CALL_INSN
)
6879 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6880 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6882 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6885 note
= find_reg_equal_equiv_note (x
);
6888 rtx eqv
= XEXP (note
, 0);
6890 if (GET_CODE (eqv
) == EXPR_LIST
)
6891 /* This REG_EQUAL note describes the result of a function call.
6892 Process all the arguments. */
6895 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6896 eqv
= XEXP (eqv
, 1);
6898 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6900 count_reg_usage (eqv
, counts
, dest
, incr
);
6905 if (REG_NOTE_KIND (x
) == REG_EQUAL
6906 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6907 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6908 involving registers in the address. */
6909 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6910 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6912 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6916 /* Iterate over just the inputs, not the constraints as well. */
6917 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6918 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6929 fmt
= GET_RTX_FORMAT (code
);
6930 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6933 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6934 else if (fmt
[i
] == 'E')
6935 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6936 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6940 /* Return true if X is a dead register. */
6943 is_dead_reg (const_rtx x
, int *counts
)
6946 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6947 && counts
[REGNO (x
)] == 0);
6950 /* Return true if set is live. */
6952 set_live_p (rtx set
, rtx_insn
*insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6957 if (set_noop_p (set
))
6960 else if (GET_CODE (SET_DEST (set
)) == CC0
6961 && !side_effects_p (SET_SRC (set
))
6962 && ((tem
= next_nonnote_nondebug_insn (insn
)) == NULL_RTX
6964 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6966 else if (!is_dead_reg (SET_DEST (set
), counts
)
6967 || side_effects_p (SET_SRC (set
)))
6972 /* Return true if insn is live. */
6975 insn_live_p (rtx_insn
*insn
, int *counts
)
6978 if (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
6980 else if (GET_CODE (PATTERN (insn
)) == SET
)
6981 return set_live_p (PATTERN (insn
), insn
, counts
);
6982 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6984 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6986 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6988 if (GET_CODE (elt
) == SET
)
6990 if (set_live_p (elt
, insn
, counts
))
6993 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6998 else if (DEBUG_INSN_P (insn
))
7002 if (DEBUG_MARKER_INSN_P (insn
))
7005 for (next
= NEXT_INSN (insn
); next
; next
= NEXT_INSN (next
))
7008 else if (!DEBUG_INSN_P (next
))
7010 /* If we find an inspection point, such as a debug begin stmt,
7011 we want to keep the earlier debug insn. */
7012 else if (DEBUG_MARKER_INSN_P (next
))
7014 else if (INSN_VAR_LOCATION_DECL (insn
) == INSN_VAR_LOCATION_DECL (next
))
7023 /* Count the number of stores into pseudo. Callback for note_stores. */
7026 count_stores (rtx x
, const_rtx set ATTRIBUTE_UNUSED
, void *data
)
7028 int *counts
= (int *) data
;
7029 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
7030 counts
[REGNO (x
)]++;
7033 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
7034 pseudo doesn't have a replacement. COUNTS[X] is zero if register X
7035 is dead and REPLACEMENTS[X] is null if it has no replacemenet.
7036 Set *SEEN_REPL to true if we see a dead register that does have
7040 is_dead_debug_insn (const_rtx pat
, int *counts
, rtx
*replacements
,
7043 subrtx_iterator::array_type array
;
7044 FOR_EACH_SUBRTX (iter
, array
, pat
, NONCONST
)
7046 const_rtx x
= *iter
;
7047 if (is_dead_reg (x
, counts
))
7049 if (replacements
&& replacements
[REGNO (x
)] != NULL_RTX
)
7058 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7059 Callback for simplify_replace_fn_rtx. */
7062 replace_dead_reg (rtx x
, const_rtx old_rtx ATTRIBUTE_UNUSED
, void *data
)
7064 rtx
*replacements
= (rtx
*) data
;
7067 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
7068 && replacements
[REGNO (x
)] != NULL_RTX
)
7070 if (GET_MODE (x
) == GET_MODE (replacements
[REGNO (x
)]))
7071 return replacements
[REGNO (x
)];
7072 return lowpart_subreg (GET_MODE (x
), replacements
[REGNO (x
)],
7073 GET_MODE (replacements
[REGNO (x
)]));
7078 /* Scan all the insns and delete any that are dead; i.e., they store a register
7079 that is never used or they copy a register to itself.
7081 This is used to remove insns made obviously dead by cse, loop or other
7082 optimizations. It improves the heuristics in loop since it won't try to
7083 move dead invariants out of loops or make givs for dead quantities. The
7084 remaining passes of the compilation are also sped up. */
7087 delete_trivially_dead_insns (rtx_insn
*insns
, int nreg
)
7090 rtx_insn
*insn
, *prev
;
7091 rtx
*replacements
= NULL
;
7094 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
7095 /* First count the number of times each register is used. */
7096 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7098 counts
= XCNEWVEC (int, nreg
* 3);
7099 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7100 if (DEBUG_BIND_INSN_P (insn
))
7101 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7103 else if (INSN_P (insn
))
7105 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7106 note_stores (insn
, count_stores
, counts
+ nreg
* 2);
7108 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7109 First one counts how many times each pseudo is used outside
7110 of debug insns, second counts how many times each pseudo is
7111 used in debug insns and third counts how many times a pseudo
7116 counts
= XCNEWVEC (int, nreg
);
7117 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7119 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7120 /* If no debug insns can be present, COUNTS is just an array
7121 which counts how many times each pseudo is used. */
7123 /* Pseudo PIC register should be considered as used due to possible
7124 new usages generated. */
7125 if (!reload_completed
7126 && pic_offset_table_rtx
7127 && REGNO (pic_offset_table_rtx
) >= FIRST_PSEUDO_REGISTER
)
7128 counts
[REGNO (pic_offset_table_rtx
)]++;
7129 /* Go from the last insn to the first and delete insns that only set unused
7130 registers or copy a register to itself. As we delete an insn, remove
7131 usage counts for registers it uses.
7133 The first jump optimization pass may leave a real insn as the last
7134 insn in the function. We must not skip that insn or we may end
7135 up deleting code that is not really dead.
7137 If some otherwise unused register is only used in DEBUG_INSNs,
7138 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7139 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7140 has been created for the unused register, replace it with
7141 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
7142 for (insn
= get_last_insn (); insn
; insn
= prev
)
7146 prev
= PREV_INSN (insn
);
7150 live_insn
= insn_live_p (insn
, counts
);
7152 /* If this is a dead insn, delete it and show registers in it aren't
7155 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
7157 if (DEBUG_INSN_P (insn
))
7159 if (DEBUG_BIND_INSN_P (insn
))
7160 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7166 if (MAY_HAVE_DEBUG_BIND_INSNS
7167 && (set
= single_set (insn
)) != NULL_RTX
7168 && is_dead_reg (SET_DEST (set
), counts
)
7169 /* Used at least once in some DEBUG_INSN. */
7170 && counts
[REGNO (SET_DEST (set
)) + nreg
] > 0
7171 /* And set exactly once. */
7172 && counts
[REGNO (SET_DEST (set
)) + nreg
* 2] == 1
7173 && !side_effects_p (SET_SRC (set
))
7174 && asm_noperands (PATTERN (insn
)) < 0)
7176 rtx dval
, bind_var_loc
;
7179 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7180 dval
= make_debug_expr_from_rtl (SET_DEST (set
));
7182 /* Emit a debug bind insn before the insn in which
7185 gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set
)),
7186 DEBUG_EXPR_TREE_DECL (dval
),
7188 VAR_INIT_STATUS_INITIALIZED
);
7189 count_reg_usage (bind_var_loc
, counts
+ nreg
, NULL_RTX
, 1);
7191 bind
= emit_debug_insn_before (bind_var_loc
, insn
);
7192 df_insn_rescan (bind
);
7194 if (replacements
== NULL
)
7195 replacements
= XCNEWVEC (rtx
, nreg
);
7196 replacements
[REGNO (SET_DEST (set
))] = dval
;
7199 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
7202 cse_cfg_altered
|= delete_insn_and_edges (insn
);
7206 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7208 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
7209 if (DEBUG_BIND_INSN_P (insn
))
7211 /* If this debug insn references a dead register that wasn't replaced
7212 with an DEBUG_EXPR, reset the DEBUG_INSN. */
7213 bool seen_repl
= false;
7214 if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn
),
7215 counts
, replacements
, &seen_repl
))
7217 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
7218 df_insn_rescan (insn
);
7222 INSN_VAR_LOCATION_LOC (insn
)
7223 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn
),
7224 NULL_RTX
, replace_dead_reg
,
7226 df_insn_rescan (insn
);
7229 free (replacements
);
7232 if (dump_file
&& ndead
)
7233 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
7237 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
7241 /* If LOC contains references to NEWREG in a different mode, change them
7242 to use NEWREG instead. */
7245 cse_change_cc_mode (subrtx_ptr_iterator::array_type
&array
,
7246 rtx
*loc
, rtx_insn
*insn
, rtx newreg
)
7248 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
7254 && REGNO (x
) == REGNO (newreg
)
7255 && GET_MODE (x
) != GET_MODE (newreg
))
7257 validate_change (insn
, loc
, newreg
, 1);
7258 iter
.skip_subrtxes ();
7263 /* Change the mode of any reference to the register REGNO (NEWREG) to
7264 GET_MODE (NEWREG) in INSN. */
7267 cse_change_cc_mode_insn (rtx_insn
*insn
, rtx newreg
)
7274 subrtx_ptr_iterator::array_type array
;
7275 cse_change_cc_mode (array
, &PATTERN (insn
), insn
, newreg
);
7276 cse_change_cc_mode (array
, ®_NOTES (insn
), insn
, newreg
);
7278 /* If the following assertion was triggered, there is most probably
7279 something wrong with the cc_modes_compatible back end function.
7280 CC modes only can be considered compatible if the insn - with the mode
7281 replaced by any of the compatible modes - can still be recognized. */
7282 success
= apply_change_group ();
7283 gcc_assert (success
);
7286 /* Change the mode of any reference to the register REGNO (NEWREG) to
7287 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7288 any instruction which modifies NEWREG. */
7291 cse_change_cc_mode_insns (rtx_insn
*start
, rtx_insn
*end
, rtx newreg
)
7295 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
7297 if (! INSN_P (insn
))
7300 if (reg_set_p (newreg
, insn
))
7303 cse_change_cc_mode_insn (insn
, newreg
);
7307 /* BB is a basic block which finishes with CC_REG as a condition code
7308 register which is set to CC_SRC. Look through the successors of BB
7309 to find blocks which have a single predecessor (i.e., this one),
7310 and look through those blocks for an assignment to CC_REG which is
7311 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7312 permitted to change the mode of CC_SRC to a compatible mode. This
7313 returns VOIDmode if no equivalent assignments were found.
7314 Otherwise it returns the mode which CC_SRC should wind up with.
7315 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7316 but is passed unmodified down to recursive calls in order to prevent
7319 The main complexity in this function is handling the mode issues.
7320 We may have more than one duplicate which we can eliminate, and we
7321 try to find a mode which will work for multiple duplicates. */
7324 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
7325 bool can_change_mode
)
7329 unsigned int insn_count
;
7332 machine_mode modes
[2];
7333 rtx_insn
*last_insns
[2];
7338 /* We expect to have two successors. Look at both before picking
7339 the final mode for the comparison. If we have more successors
7340 (i.e., some sort of table jump, although that seems unlikely),
7341 then we require all beyond the first two to use the same
7344 found_equiv
= false;
7345 mode
= GET_MODE (cc_src
);
7347 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7352 if (e
->flags
& EDGE_COMPLEX
)
7355 if (EDGE_COUNT (e
->dest
->preds
) != 1
7356 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
7357 /* Avoid endless recursion on unreachable blocks. */
7358 || e
->dest
== orig_bb
)
7361 end
= NEXT_INSN (BB_END (e
->dest
));
7362 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
7366 if (! INSN_P (insn
))
7369 /* If CC_SRC is modified, we have to stop looking for
7370 something which uses it. */
7371 if (modified_in_p (cc_src
, insn
))
7374 /* Check whether INSN sets CC_REG to CC_SRC. */
7375 set
= single_set (insn
);
7377 && REG_P (SET_DEST (set
))
7378 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7381 machine_mode set_mode
;
7382 machine_mode comp_mode
;
7385 set_mode
= GET_MODE (SET_SRC (set
));
7386 comp_mode
= set_mode
;
7387 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
7389 else if (GET_CODE (cc_src
) == COMPARE
7390 && GET_CODE (SET_SRC (set
)) == COMPARE
7392 && rtx_equal_p (XEXP (cc_src
, 0),
7393 XEXP (SET_SRC (set
), 0))
7394 && rtx_equal_p (XEXP (cc_src
, 1),
7395 XEXP (SET_SRC (set
), 1)))
7398 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
7399 if (comp_mode
!= VOIDmode
7400 && (can_change_mode
|| comp_mode
== mode
))
7407 if (insn_count
< ARRAY_SIZE (insns
))
7409 insns
[insn_count
] = insn
;
7410 modes
[insn_count
] = set_mode
;
7411 last_insns
[insn_count
] = end
;
7414 if (mode
!= comp_mode
)
7416 gcc_assert (can_change_mode
);
7419 /* The modified insn will be re-recognized later. */
7420 PUT_MODE (cc_src
, mode
);
7425 if (set_mode
!= mode
)
7427 /* We found a matching expression in the
7428 wrong mode, but we don't have room to
7429 store it in the array. Punt. This case
7433 /* INSN sets CC_REG to a value equal to CC_SRC
7434 with the right mode. We can simply delete
7439 /* We found an instruction to delete. Keep looking,
7440 in the hopes of finding a three-way jump. */
7444 /* We found an instruction which sets the condition
7445 code, so don't look any farther. */
7449 /* If INSN sets CC_REG in some other way, don't look any
7451 if (reg_set_p (cc_reg
, insn
))
7455 /* If we fell off the bottom of the block, we can keep looking
7456 through successors. We pass CAN_CHANGE_MODE as false because
7457 we aren't prepared to handle compatibility between the
7458 further blocks and this block. */
7461 machine_mode submode
;
7463 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
7464 if (submode
!= VOIDmode
)
7466 gcc_assert (submode
== mode
);
7468 can_change_mode
= false;
7476 /* Now INSN_COUNT is the number of instructions we found which set
7477 CC_REG to a value equivalent to CC_SRC. The instructions are in
7478 INSNS. The modes used by those instructions are in MODES. */
7481 for (i
= 0; i
< insn_count
; ++i
)
7483 if (modes
[i
] != mode
)
7485 /* We need to change the mode of CC_REG in INSNS[i] and
7486 subsequent instructions. */
7489 if (GET_MODE (cc_reg
) == mode
)
7492 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7494 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
7498 cse_cfg_altered
|= delete_insn_and_edges (insns
[i
]);
7504 /* If we have a fixed condition code register (or two), walk through
7505 the instructions and try to eliminate duplicate assignments. */
7508 cse_condition_code_reg (void)
7510 unsigned int cc_regno_1
;
7511 unsigned int cc_regno_2
;
7516 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
7519 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
7520 if (cc_regno_2
!= INVALID_REGNUM
)
7521 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
7523 cc_reg_2
= NULL_RTX
;
7525 FOR_EACH_BB_FN (bb
, cfun
)
7527 rtx_insn
*last_insn
;
7530 rtx_insn
*cc_src_insn
;
7533 machine_mode orig_mode
;
7535 /* Look for blocks which end with a conditional jump based on a
7536 condition code register. Then look for the instruction which
7537 sets the condition code register. Then look through the
7538 successor blocks for instructions which set the condition
7539 code register to the same value. There are other possible
7540 uses of the condition code register, but these are by far the
7541 most common and the ones which we are most likely to be able
7544 last_insn
= BB_END (bb
);
7545 if (!JUMP_P (last_insn
))
7548 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
7550 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
7557 for (insn
= PREV_INSN (last_insn
);
7558 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
7559 insn
= PREV_INSN (insn
))
7563 if (! INSN_P (insn
))
7565 set
= single_set (insn
);
7567 && REG_P (SET_DEST (set
))
7568 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7571 cc_src
= SET_SRC (set
);
7574 else if (reg_set_p (cc_reg
, insn
))
7581 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
7584 /* Now CC_REG is a condition code register used for a
7585 conditional jump at the end of the block, and CC_SRC, in
7586 CC_SRC_INSN, is the value to which that condition code
7587 register is set, and CC_SRC is still meaningful at the end of
7590 orig_mode
= GET_MODE (cc_src
);
7591 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
7592 if (mode
!= VOIDmode
)
7594 gcc_assert (mode
== GET_MODE (cc_src
));
7595 if (mode
!= orig_mode
)
7597 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7599 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
7601 /* Do the same in the following insns that use the
7602 current value of CC_REG within BB. */
7603 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
7604 NEXT_INSN (last_insn
),
7612 /* Perform common subexpression elimination. Nonzero value from
7613 `cse_main' means that jumps were simplified and some code may now
7614 be unreachable, so do jump optimization again. */
7616 rest_of_handle_cse (void)
7621 dump_flow_info (dump_file
, dump_flags
);
7623 tem
= cse_main (get_insns (), max_reg_num ());
7625 /* If we are not running more CSE passes, then we are no longer
7626 expecting CSE to be run. But always rerun it in a cheap mode. */
7627 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
7631 timevar_push (TV_JUMP
);
7632 rebuild_jump_labels (get_insns ());
7633 cse_cfg_altered
|= cleanup_cfg (CLEANUP_CFG_CHANGED
);
7634 timevar_pop (TV_JUMP
);
7636 else if (tem
== 1 || optimize
> 1)
7637 cse_cfg_altered
|= cleanup_cfg (0);
7644 const pass_data pass_data_cse
=
7646 RTL_PASS
, /* type */
7648 OPTGROUP_NONE
, /* optinfo_flags */
7650 0, /* properties_required */
7651 0, /* properties_provided */
7652 0, /* properties_destroyed */
7653 0, /* todo_flags_start */
7654 TODO_df_finish
, /* todo_flags_finish */
7657 class pass_cse
: public rtl_opt_pass
7660 pass_cse (gcc::context
*ctxt
)
7661 : rtl_opt_pass (pass_data_cse
, ctxt
)
7664 /* opt_pass methods: */
7665 virtual bool gate (function
*) { return optimize
> 0; }
7666 virtual unsigned int execute (function
*) { return rest_of_handle_cse (); }
7668 }; // class pass_cse
7673 make_pass_cse (gcc::context
*ctxt
)
7675 return new pass_cse (ctxt
);
7679 /* Run second CSE pass after loop optimizations. */
7681 rest_of_handle_cse2 (void)
7686 dump_flow_info (dump_file
, dump_flags
);
7688 tem
= cse_main (get_insns (), max_reg_num ());
7690 /* Run a pass to eliminate duplicated assignments to condition code
7691 registers. We have to run this after bypass_jumps, because it
7692 makes it harder for that pass to determine whether a jump can be
7694 cse_condition_code_reg ();
7696 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7700 timevar_push (TV_JUMP
);
7701 rebuild_jump_labels (get_insns ());
7702 cse_cfg_altered
|= cleanup_cfg (CLEANUP_CFG_CHANGED
);
7703 timevar_pop (TV_JUMP
);
7706 cse_cfg_altered
|= cleanup_cfg (0);
7708 cse_not_expected
= 1;
7715 const pass_data pass_data_cse2
=
7717 RTL_PASS
, /* type */
7719 OPTGROUP_NONE
, /* optinfo_flags */
7720 TV_CSE2
, /* tv_id */
7721 0, /* properties_required */
7722 0, /* properties_provided */
7723 0, /* properties_destroyed */
7724 0, /* todo_flags_start */
7725 TODO_df_finish
, /* todo_flags_finish */
7728 class pass_cse2
: public rtl_opt_pass
7731 pass_cse2 (gcc::context
*ctxt
)
7732 : rtl_opt_pass (pass_data_cse2
, ctxt
)
7735 /* opt_pass methods: */
7736 virtual bool gate (function
*)
7738 return optimize
> 0 && flag_rerun_cse_after_loop
;
7741 virtual unsigned int execute (function
*) { return rest_of_handle_cse2 (); }
7743 }; // class pass_cse2
7748 make_pass_cse2 (gcc::context
*ctxt
)
7750 return new pass_cse2 (ctxt
);
7753 /* Run second CSE pass after loop optimizations. */
7755 rest_of_handle_cse_after_global_opts (void)
7760 /* We only want to do local CSE, so don't follow jumps. */
7761 save_cfj
= flag_cse_follow_jumps
;
7762 flag_cse_follow_jumps
= 0;
7764 rebuild_jump_labels (get_insns ());
7765 tem
= cse_main (get_insns (), max_reg_num ());
7766 cse_cfg_altered
|= purge_all_dead_edges ();
7767 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7769 cse_not_expected
= !flag_rerun_cse_after_loop
;
7771 /* If cse altered any jumps, rerun jump opts to clean things up. */
7774 timevar_push (TV_JUMP
);
7775 rebuild_jump_labels (get_insns ());
7776 cse_cfg_altered
|= cleanup_cfg (CLEANUP_CFG_CHANGED
);
7777 timevar_pop (TV_JUMP
);
7780 cse_cfg_altered
|= cleanup_cfg (0);
7782 flag_cse_follow_jumps
= save_cfj
;
7788 const pass_data pass_data_cse_after_global_opts
=
7790 RTL_PASS
, /* type */
7791 "cse_local", /* name */
7792 OPTGROUP_NONE
, /* optinfo_flags */
7794 0, /* properties_required */
7795 0, /* properties_provided */
7796 0, /* properties_destroyed */
7797 0, /* todo_flags_start */
7798 TODO_df_finish
, /* todo_flags_finish */
7801 class pass_cse_after_global_opts
: public rtl_opt_pass
7804 pass_cse_after_global_opts (gcc::context
*ctxt
)
7805 : rtl_opt_pass (pass_data_cse_after_global_opts
, ctxt
)
7808 /* opt_pass methods: */
7809 virtual bool gate (function
*)
7811 return optimize
> 0 && flag_rerun_cse_after_global_opts
;
7814 virtual unsigned int execute (function
*)
7816 return rest_of_handle_cse_after_global_opts ();
7819 }; // class pass_cse_after_global_opts
7824 make_pass_cse_after_global_opts (gcc::context
*ctxt
)
7826 return new pass_cse_after_global_opts (ctxt
);