1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "insn-config.h"
37 #include "cfgcleanup.h"
40 #include "rtlhooks-def.h"
41 #include "tree-pass.h"
45 #include "function-abi.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `REG_QTY (N)' records what quantity register N is currently thought
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, `REG_QTY (N)' will
88 equal -N - 1, which is always negative.
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 `REG_TICK' and `REG_IN_TABLE', accessors for members of
176 cse_reg_info, are used to detect this case. REG_TICK (i) is
177 incremented whenever a value is stored in register i.
178 REG_IN_TABLE (i) holds -1 if no references to register i have been
179 entered in the table; otherwise, it contains the value REG_TICK (i)
180 had when the references were entered. If we want to enter a
181 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
182 remove old references. Until we want to enter a new entry, the
183 mere fact that the two vectors don't match makes the entries be
184 ignored if anyone tries to match them.
186 Registers themselves are entered in the hash table as well as in
187 the equivalent-register chains. However, `REG_TICK' and
188 `REG_IN_TABLE' do not apply to expressions which are simple
189 register references. These expressions are removed from the table
190 immediately when they become invalid, and this can be done even if
191 we do not immediately search for all the expressions that refer to
194 A CLOBBER rtx in an instruction invalidates its operand for further
195 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
196 invalidates everything that resides in memory.
200 Constant expressions that differ only by an additive integer
201 are called related. When a constant expression is put in
202 the table, the related expression with no constant term
203 is also entered. These are made to point at each other
204 so that it is possible to find out if there exists any
205 register equivalent to an expression related to a given expression. */
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
217 /* Per-qty information tracking.
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
222 `mode' contains the machine mode of this quantity.
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
242 struct qty_table_elem
245 rtx_insn
*const_insn
;
246 rtx comparison_const
;
248 unsigned int first_reg
, last_reg
;
249 /* The sizes of these fields should match the sizes of the
250 code and mode fields of struct rtx_def (see rtl.h). */
251 ENUM_BITFIELD(rtx_code
) comparison_code
: 16;
252 ENUM_BITFIELD(machine_mode
) mode
: 8;
255 /* The table of all qtys, indexed by qty number. */
256 static struct qty_table_elem
*qty_table
;
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
262 Instead, we store below the current and last value assigned to CC0.
263 If it should happen to be a constant, it is stored in preference
264 to the actual assigned value. In case it is a constant, we store
265 the mode in which the constant should be interpreted. */
267 static rtx this_insn_cc0
, prev_insn_cc0
;
268 static machine_mode this_insn_cc0_mode
, prev_insn_cc0_mode
;
270 /* Insn being scanned. */
272 static rtx_insn
*this_insn
;
273 static bool optimize_this_for_speed_p
;
275 /* Index by register number, gives the number of the next (or
276 previous) register in the chain of registers sharing the same
279 Or -1 if this register is at the end of the chain.
281 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
283 /* Per-register equivalence chain. */
289 /* The table of all register equivalence chains. */
290 static struct reg_eqv_elem
*reg_eqv_table
;
294 /* The timestamp at which this register is initialized. */
295 unsigned int timestamp
;
297 /* The quantity number of the register's current contents. */
300 /* The number of times the register has been altered in the current
304 /* The REG_TICK value at which rtx's containing this register are
305 valid in the hash table. If this does not equal the current
306 reg_tick value, such expressions existing in the hash table are
310 /* The SUBREG that was set when REG_TICK was last incremented. Set
311 to -1 if the last store was to the whole register, not a subreg. */
312 unsigned int subreg_ticked
;
315 /* A table of cse_reg_info indexed by register numbers. */
316 static struct cse_reg_info
*cse_reg_info_table
;
318 /* The size of the above table. */
319 static unsigned int cse_reg_info_table_size
;
321 /* The index of the first entry that has not been initialized. */
322 static unsigned int cse_reg_info_table_first_uninitialized
;
324 /* The timestamp at the beginning of the current run of
325 cse_extended_basic_block. We increment this variable at the beginning of
326 the current run of cse_extended_basic_block. The timestamp field of a
327 cse_reg_info entry matches the value of this variable if and only
328 if the entry has been initialized during the current run of
329 cse_extended_basic_block. */
330 static unsigned int cse_reg_info_timestamp
;
332 /* A HARD_REG_SET containing all the hard registers for which there is
333 currently a REG expression in the hash table. Note the difference
334 from the above variables, which indicate if the REG is mentioned in some
335 expression in the table. */
337 static HARD_REG_SET hard_regs_in_table
;
339 /* True if CSE has altered the CFG. */
340 static bool cse_cfg_altered
;
342 /* True if CSE has altered conditional jump insns in such a way
343 that jump optimization should be redone. */
344 static bool cse_jumps_altered
;
346 /* True if we put a LABEL_REF into the hash table for an INSN
347 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
348 to put in the note. */
349 static bool recorded_label_ref
;
351 /* canon_hash stores 1 in do_not_record
352 if it notices a reference to CC0, PC, or some other volatile
355 static int do_not_record
;
357 /* canon_hash stores 1 in hash_arg_in_memory
358 if it notices a reference to memory within the expression being hashed. */
360 static int hash_arg_in_memory
;
362 /* The hash table contains buckets which are chains of `struct table_elt's,
363 each recording one expression's information.
364 That expression is in the `exp' field.
366 The canon_exp field contains a canonical (from the point of view of
367 alias analysis) version of the `exp' field.
369 Those elements with the same hash code are chained in both directions
370 through the `next_same_hash' and `prev_same_hash' fields.
372 Each set of expressions with equivalent values
373 are on a two-way chain through the `next_same_value'
374 and `prev_same_value' fields, and all point with
375 the `first_same_value' field at the first element in
376 that chain. The chain is in order of increasing cost.
377 Each element's cost value is in its `cost' field.
379 The `in_memory' field is nonzero for elements that
380 involve any reference to memory. These elements are removed
381 whenever a write is done to an unidentified location in memory.
382 To be safe, we assume that a memory address is unidentified unless
383 the address is either a symbol constant or a constant plus
384 the frame pointer or argument pointer.
386 The `related_value' field is used to connect related expressions
387 (that differ by adding an integer).
388 The related expressions are chained in a circular fashion.
389 `related_value' is zero for expressions for which this
392 The `cost' field stores the cost of this element's expression.
393 The `regcost' field stores the value returned by approx_reg_cost for
394 this element's expression.
396 The `is_const' flag is set if the element is a constant (including
399 The `flag' field is used as a temporary during some search routines.
401 The `mode' field is usually the same as GET_MODE (`exp'), but
402 if `exp' is a CONST_INT and has no machine mode then the `mode'
403 field is the mode it was being used as. Each constant is
404 recorded separately for each mode it is used with. */
410 struct table_elt
*next_same_hash
;
411 struct table_elt
*prev_same_hash
;
412 struct table_elt
*next_same_value
;
413 struct table_elt
*prev_same_value
;
414 struct table_elt
*first_same_value
;
415 struct table_elt
*related_value
;
418 /* The size of this field should match the size
419 of the mode field of struct rtx_def (see rtl.h). */
420 ENUM_BITFIELD(machine_mode
) mode
: 8;
426 /* We don't want a lot of buckets, because we rarely have very many
427 things stored in the hash table, and a lot of buckets slows
428 down a lot of loops that happen frequently. */
430 #define HASH_SIZE (1 << HASH_SHIFT)
431 #define HASH_MASK (HASH_SIZE - 1)
433 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
434 register (hard registers may require `do_not_record' to be set). */
437 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
438 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
439 : canon_hash (X, M)) & HASH_MASK)
441 /* Like HASH, but without side-effects. */
442 #define SAFE_HASH(X, M) \
443 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
444 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
445 : safe_hash (X, M)) & HASH_MASK)
447 /* Determine whether register number N is considered a fixed register for the
448 purpose of approximating register costs.
449 It is desirable to replace other regs with fixed regs, to reduce need for
451 A reg wins if it is either the frame pointer or designated as fixed. */
452 #define FIXED_REGNO_P(N) \
453 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454 || fixed_regs[N] || global_regs[N])
456 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
457 hard registers and pointers into the frame are the cheapest with a cost
458 of 0. Next come pseudos with a cost of one and other hard registers with
459 a cost of 2. Aside from these special cases, call `rtx_cost'. */
461 #define CHEAP_REGNO(N) \
462 (REGNO_PTR_FRAME_P (N) \
463 || (HARD_REGISTER_NUM_P (N) \
464 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
466 #define COST(X, MODE) \
467 (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
468 #define COST_IN(X, MODE, OUTER, OPNO) \
469 (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
471 /* Get the number of times this register has been updated in this
474 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
476 /* Get the point at which REG was recorded in the table. */
478 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
480 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
483 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
485 /* Get the quantity number for REG. */
487 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
489 /* Determine if the quantity number for register X represents a valid index
490 into the qty_table. */
492 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
494 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
496 #define CHEAPER(X, Y) \
497 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
499 static struct table_elt
*table
[HASH_SIZE
];
501 /* Chain of `struct table_elt's made so far for this function
502 but currently removed from the table. */
504 static struct table_elt
*free_element_chain
;
506 /* Set to the cost of a constant pool reference if one was found for a
507 symbolic constant. If this was found, it means we should try to
508 convert constants into constant pool entries if they don't fit in
511 static int constant_pool_entries_cost
;
512 static int constant_pool_entries_regcost
;
514 /* Trace a patch through the CFG. */
518 /* The basic block for this path entry. */
522 /* This data describes a block that will be processed by
523 cse_extended_basic_block. */
525 struct cse_basic_block_data
527 /* Total number of SETs in block. */
529 /* Size of current branch path, if any. */
531 /* Current path, indicating which basic_blocks will be processed. */
532 struct branch_path
*path
;
536 /* Pointers to the live in/live out bitmaps for the boundaries of the
538 static bitmap cse_ebb_live_in
, cse_ebb_live_out
;
540 /* A simple bitmap to track which basic blocks have been visited
541 already as part of an already processed extended basic block. */
542 static sbitmap cse_visited_basic_blocks
;
544 static bool fixed_base_plus_p (rtx x
);
545 static int notreg_cost (rtx
, machine_mode
, enum rtx_code
, int);
546 static int preferable (int, int, int, int);
547 static void new_basic_block (void);
548 static void make_new_qty (unsigned int, machine_mode
);
549 static void make_regs_eqv (unsigned int, unsigned int);
550 static void delete_reg_equiv (unsigned int);
551 static int mention_regs (rtx
);
552 static int insert_regs (rtx
, struct table_elt
*, int);
553 static void remove_from_table (struct table_elt
*, unsigned);
554 static void remove_pseudo_from_table (rtx
, unsigned);
555 static struct table_elt
*lookup (rtx
, unsigned, machine_mode
);
556 static struct table_elt
*lookup_for_remove (rtx
, unsigned, machine_mode
);
557 static rtx
lookup_as_function (rtx
, enum rtx_code
);
558 static struct table_elt
*insert_with_costs (rtx
, struct table_elt
*, unsigned,
559 machine_mode
, int, int);
560 static struct table_elt
*insert (rtx
, struct table_elt
*, unsigned,
562 static void merge_equiv_classes (struct table_elt
*, struct table_elt
*);
563 static void invalidate (rtx
, machine_mode
);
564 static void remove_invalid_refs (unsigned int);
565 static void remove_invalid_subreg_refs (unsigned int, poly_uint64
,
567 static void rehash_using_reg (rtx
);
568 static void invalidate_memory (void);
569 static rtx
use_related_value (rtx
, struct table_elt
*);
571 static inline unsigned canon_hash (rtx
, machine_mode
);
572 static inline unsigned safe_hash (rtx
, machine_mode
);
573 static inline unsigned hash_rtx_string (const char *);
575 static rtx
canon_reg (rtx
, rtx_insn
*);
576 static enum rtx_code
find_comparison_args (enum rtx_code
, rtx
*, rtx
*,
579 static rtx
fold_rtx (rtx
, rtx_insn
*);
580 static rtx
equiv_constant (rtx
);
581 static void record_jump_equiv (rtx_insn
*, bool);
582 static void record_jump_cond (enum rtx_code
, machine_mode
, rtx
, rtx
,
584 static void cse_insn (rtx_insn
*);
585 static void cse_prescan_path (struct cse_basic_block_data
*);
586 static void invalidate_from_clobbers (rtx_insn
*);
587 static void invalidate_from_sets_and_clobbers (rtx_insn
*);
588 static void cse_extended_basic_block (struct cse_basic_block_data
*);
589 extern void dump_class (struct table_elt
*);
590 static void get_cse_reg_info_1 (unsigned int regno
);
591 static struct cse_reg_info
* get_cse_reg_info (unsigned int regno
);
593 static void flush_hash_table (void);
594 static bool insn_live_p (rtx_insn
*, int *);
595 static bool set_live_p (rtx
, rtx_insn
*, int *);
596 static void cse_change_cc_mode_insn (rtx_insn
*, rtx
);
597 static void cse_change_cc_mode_insns (rtx_insn
*, rtx_insn
*, rtx
);
598 static machine_mode
cse_cc_succs (basic_block
, basic_block
, rtx
, rtx
,
602 #undef RTL_HOOKS_GEN_LOWPART
603 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
605 static const struct rtl_hooks cse_rtl_hooks
= RTL_HOOKS_INITIALIZER
;
607 /* Nonzero if X has the form (PLUS frame-pointer integer). */
610 fixed_base_plus_p (rtx x
)
612 switch (GET_CODE (x
))
615 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
)
617 if (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
622 if (!CONST_INT_P (XEXP (x
, 1)))
624 return fixed_base_plus_p (XEXP (x
, 0));
631 /* Dump the expressions in the equivalence class indicated by CLASSP.
632 This function is used only for debugging. */
634 dump_class (struct table_elt
*classp
)
636 struct table_elt
*elt
;
638 fprintf (stderr
, "Equivalence chain for ");
639 print_rtl (stderr
, classp
->exp
);
640 fprintf (stderr
, ": \n");
642 for (elt
= classp
->first_same_value
; elt
; elt
= elt
->next_same_value
)
644 print_rtl (stderr
, elt
->exp
);
645 fprintf (stderr
, "\n");
649 /* Return an estimate of the cost of the registers used in an rtx.
650 This is mostly the number of different REG expressions in the rtx;
651 however for some exceptions like fixed registers we use a cost of
652 0. If any other hard register reference occurs, return MAX_COST. */
655 approx_reg_cost (const_rtx x
)
658 subrtx_iterator::array_type array
;
659 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
664 unsigned int regno
= REGNO (x
);
665 if (!CHEAP_REGNO (regno
))
667 if (regno
< FIRST_PSEUDO_REGISTER
)
669 if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
681 /* Return a negative value if an rtx A, whose costs are given by COST_A
682 and REGCOST_A, is more desirable than an rtx B.
683 Return a positive value if A is less desirable, or 0 if the two are
686 preferable (int cost_a
, int regcost_a
, int cost_b
, int regcost_b
)
688 /* First, get rid of cases involving expressions that are entirely
690 if (cost_a
!= cost_b
)
692 if (cost_a
== MAX_COST
)
694 if (cost_b
== MAX_COST
)
698 /* Avoid extending lifetimes of hardregs. */
699 if (regcost_a
!= regcost_b
)
701 if (regcost_a
== MAX_COST
)
703 if (regcost_b
== MAX_COST
)
707 /* Normal operation costs take precedence. */
708 if (cost_a
!= cost_b
)
709 return cost_a
- cost_b
;
710 /* Only if these are identical consider effects on register pressure. */
711 if (regcost_a
!= regcost_b
)
712 return regcost_a
- regcost_b
;
716 /* Internal function, to compute cost when X is not a register; called
717 from COST macro to keep it simple. */
720 notreg_cost (rtx x
, machine_mode mode
, enum rtx_code outer
, int opno
)
722 scalar_int_mode int_mode
, inner_mode
;
723 return ((GET_CODE (x
) == SUBREG
724 && REG_P (SUBREG_REG (x
))
725 && is_int_mode (mode
, &int_mode
)
726 && is_int_mode (GET_MODE (SUBREG_REG (x
)), &inner_mode
)
727 && GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (inner_mode
)
728 && subreg_lowpart_p (x
)
729 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, inner_mode
))
731 : rtx_cost (x
, mode
, outer
, opno
, optimize_this_for_speed_p
) * 2);
735 /* Initialize CSE_REG_INFO_TABLE. */
738 init_cse_reg_info (unsigned int nregs
)
740 /* Do we need to grow the table? */
741 if (nregs
> cse_reg_info_table_size
)
743 unsigned int new_size
;
745 if (cse_reg_info_table_size
< 2048)
747 /* Compute a new size that is a power of 2 and no smaller
748 than the large of NREGS and 64. */
749 new_size
= (cse_reg_info_table_size
750 ? cse_reg_info_table_size
: 64);
752 while (new_size
< nregs
)
757 /* If we need a big table, allocate just enough to hold
762 /* Reallocate the table with NEW_SIZE entries. */
763 free (cse_reg_info_table
);
764 cse_reg_info_table
= XNEWVEC (struct cse_reg_info
, new_size
);
765 cse_reg_info_table_size
= new_size
;
766 cse_reg_info_table_first_uninitialized
= 0;
769 /* Do we have all of the first NREGS entries initialized? */
770 if (cse_reg_info_table_first_uninitialized
< nregs
)
772 unsigned int old_timestamp
= cse_reg_info_timestamp
- 1;
775 /* Put the old timestamp on newly allocated entries so that they
776 will all be considered out of date. We do not touch those
777 entries beyond the first NREGS entries to be nice to the
779 for (i
= cse_reg_info_table_first_uninitialized
; i
< nregs
; i
++)
780 cse_reg_info_table
[i
].timestamp
= old_timestamp
;
782 cse_reg_info_table_first_uninitialized
= nregs
;
786 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
789 get_cse_reg_info_1 (unsigned int regno
)
791 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
792 entry will be considered to have been initialized. */
793 cse_reg_info_table
[regno
].timestamp
= cse_reg_info_timestamp
;
795 /* Initialize the rest of the entry. */
796 cse_reg_info_table
[regno
].reg_tick
= 1;
797 cse_reg_info_table
[regno
].reg_in_table
= -1;
798 cse_reg_info_table
[regno
].subreg_ticked
= -1;
799 cse_reg_info_table
[regno
].reg_qty
= -regno
- 1;
802 /* Find a cse_reg_info entry for REGNO. */
804 static inline struct cse_reg_info
*
805 get_cse_reg_info (unsigned int regno
)
807 struct cse_reg_info
*p
= &cse_reg_info_table
[regno
];
809 /* If this entry has not been initialized, go ahead and initialize
811 if (p
->timestamp
!= cse_reg_info_timestamp
)
812 get_cse_reg_info_1 (regno
);
817 /* Clear the hash table and initialize each register with its own quantity,
818 for a new basic block. */
821 new_basic_block (void)
827 /* Invalidate cse_reg_info_table. */
828 cse_reg_info_timestamp
++;
830 /* Clear out hash table state for this pass. */
831 CLEAR_HARD_REG_SET (hard_regs_in_table
);
833 /* The per-quantity values used to be initialized here, but it is
834 much faster to initialize each as it is made in `make_new_qty'. */
836 for (i
= 0; i
< HASH_SIZE
; i
++)
838 struct table_elt
*first
;
843 struct table_elt
*last
= first
;
847 while (last
->next_same_hash
!= NULL
)
848 last
= last
->next_same_hash
;
850 /* Now relink this hash entire chain into
851 the free element list. */
853 last
->next_same_hash
= free_element_chain
;
854 free_element_chain
= first
;
861 /* Say that register REG contains a quantity in mode MODE not in any
862 register before and initialize that quantity. */
865 make_new_qty (unsigned int reg
, machine_mode mode
)
868 struct qty_table_elem
*ent
;
869 struct reg_eqv_elem
*eqv
;
871 gcc_assert (next_qty
< max_qty
);
873 q
= REG_QTY (reg
) = next_qty
++;
875 ent
->first_reg
= reg
;
878 ent
->const_rtx
= ent
->const_insn
= NULL
;
879 ent
->comparison_code
= UNKNOWN
;
881 eqv
= ®_eqv_table
[reg
];
882 eqv
->next
= eqv
->prev
= -1;
885 /* Make reg NEW equivalent to reg OLD.
886 OLD is not changing; NEW is. */
889 make_regs_eqv (unsigned int new_reg
, unsigned int old_reg
)
891 unsigned int lastr
, firstr
;
892 int q
= REG_QTY (old_reg
);
893 struct qty_table_elem
*ent
;
897 /* Nothing should become eqv until it has a "non-invalid" qty number. */
898 gcc_assert (REGNO_QTY_VALID_P (old_reg
));
900 REG_QTY (new_reg
) = q
;
901 firstr
= ent
->first_reg
;
902 lastr
= ent
->last_reg
;
904 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
905 hard regs. Among pseudos, if NEW will live longer than any other reg
906 of the same qty, and that is beyond the current basic block,
907 make it the new canonical replacement for this qty. */
908 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
909 /* Certain fixed registers might be of the class NO_REGS. This means
910 that not only can they not be allocated by the compiler, but
911 they cannot be used in substitutions or canonicalizations
913 && (new_reg
>= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new_reg
) != NO_REGS
)
914 && ((new_reg
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new_reg
))
915 || (new_reg
>= FIRST_PSEUDO_REGISTER
916 && (firstr
< FIRST_PSEUDO_REGISTER
917 || (bitmap_bit_p (cse_ebb_live_out
, new_reg
)
918 && !bitmap_bit_p (cse_ebb_live_out
, firstr
))
919 || (bitmap_bit_p (cse_ebb_live_in
, new_reg
)
920 && !bitmap_bit_p (cse_ebb_live_in
, firstr
))))))
922 reg_eqv_table
[firstr
].prev
= new_reg
;
923 reg_eqv_table
[new_reg
].next
= firstr
;
924 reg_eqv_table
[new_reg
].prev
= -1;
925 ent
->first_reg
= new_reg
;
929 /* If NEW is a hard reg (known to be non-fixed), insert at end.
930 Otherwise, insert before any non-fixed hard regs that are at the
931 end. Registers of class NO_REGS cannot be used as an
932 equivalent for anything. */
933 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_eqv_table
[lastr
].prev
>= 0
934 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
935 && new_reg
>= FIRST_PSEUDO_REGISTER
)
936 lastr
= reg_eqv_table
[lastr
].prev
;
937 reg_eqv_table
[new_reg
].next
= reg_eqv_table
[lastr
].next
;
938 if (reg_eqv_table
[lastr
].next
>= 0)
939 reg_eqv_table
[reg_eqv_table
[lastr
].next
].prev
= new_reg
;
941 qty_table
[q
].last_reg
= new_reg
;
942 reg_eqv_table
[lastr
].next
= new_reg
;
943 reg_eqv_table
[new_reg
].prev
= lastr
;
947 /* Remove REG from its equivalence class. */
950 delete_reg_equiv (unsigned int reg
)
952 struct qty_table_elem
*ent
;
953 int q
= REG_QTY (reg
);
956 /* If invalid, do nothing. */
957 if (! REGNO_QTY_VALID_P (reg
))
962 p
= reg_eqv_table
[reg
].prev
;
963 n
= reg_eqv_table
[reg
].next
;
966 reg_eqv_table
[n
].prev
= p
;
970 reg_eqv_table
[p
].next
= n
;
974 REG_QTY (reg
) = -reg
- 1;
977 /* Remove any invalid expressions from the hash table
978 that refer to any of the registers contained in expression X.
980 Make sure that newly inserted references to those registers
981 as subexpressions will be considered valid.
983 mention_regs is not called when a register itself
984 is being stored in the table.
986 Return 1 if we have done something that may have changed the hash code
1000 code
= GET_CODE (x
);
1003 unsigned int regno
= REGNO (x
);
1004 unsigned int endregno
= END_REGNO (x
);
1007 for (i
= regno
; i
< endregno
; i
++)
1009 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1010 remove_invalid_refs (i
);
1012 REG_IN_TABLE (i
) = REG_TICK (i
);
1013 SUBREG_TICKED (i
) = -1;
1019 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1020 pseudo if they don't use overlapping words. We handle only pseudos
1021 here for simplicity. */
1022 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
))
1023 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1025 unsigned int i
= REGNO (SUBREG_REG (x
));
1027 if (REG_IN_TABLE (i
) >= 0 && REG_IN_TABLE (i
) != REG_TICK (i
))
1029 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1030 the last store to this register really stored into this
1031 subreg, then remove the memory of this subreg.
1032 Otherwise, remove any memory of the entire register and
1033 all its subregs from the table. */
1034 if (REG_TICK (i
) - REG_IN_TABLE (i
) > 1
1035 || SUBREG_TICKED (i
) != REGNO (SUBREG_REG (x
)))
1036 remove_invalid_refs (i
);
1038 remove_invalid_subreg_refs (i
, SUBREG_BYTE (x
), GET_MODE (x
));
1041 REG_IN_TABLE (i
) = REG_TICK (i
);
1042 SUBREG_TICKED (i
) = REGNO (SUBREG_REG (x
));
1046 /* If X is a comparison or a COMPARE and either operand is a register
1047 that does not have a quantity, give it one. This is so that a later
1048 call to record_jump_equiv won't cause X to be assigned a different
1049 hash code and not found in the table after that call.
1051 It is not necessary to do this here, since rehash_using_reg can
1052 fix up the table later, but doing this here eliminates the need to
1053 call that expensive function in the most common case where the only
1054 use of the register is in the comparison. */
1056 if (code
== COMPARE
|| COMPARISON_P (x
))
1058 if (REG_P (XEXP (x
, 0))
1059 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
1060 if (insert_regs (XEXP (x
, 0), NULL
, 0))
1062 rehash_using_reg (XEXP (x
, 0));
1066 if (REG_P (XEXP (x
, 1))
1067 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
1068 if (insert_regs (XEXP (x
, 1), NULL
, 0))
1070 rehash_using_reg (XEXP (x
, 1));
1075 fmt
= GET_RTX_FORMAT (code
);
1076 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1078 changed
|= mention_regs (XEXP (x
, i
));
1079 else if (fmt
[i
] == 'E')
1080 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1081 changed
|= mention_regs (XVECEXP (x
, i
, j
));
1086 /* Update the register quantities for inserting X into the hash table
1087 with a value equivalent to CLASSP.
1088 (If the class does not contain a REG, it is irrelevant.)
1089 If MODIFIED is nonzero, X is a destination; it is being modified.
1090 Note that delete_reg_equiv should be called on a register
1091 before insert_regs is done on that register with MODIFIED != 0.
1093 Nonzero value means that elements of reg_qty have changed
1094 so X's hash code may be different. */
1097 insert_regs (rtx x
, struct table_elt
*classp
, int modified
)
1101 unsigned int regno
= REGNO (x
);
1104 /* If REGNO is in the equivalence table already but is of the
1105 wrong mode for that equivalence, don't do anything here. */
1107 qty_valid
= REGNO_QTY_VALID_P (regno
);
1110 struct qty_table_elem
*ent
= &qty_table
[REG_QTY (regno
)];
1112 if (ent
->mode
!= GET_MODE (x
))
1116 if (modified
|| ! qty_valid
)
1119 for (classp
= classp
->first_same_value
;
1121 classp
= classp
->next_same_value
)
1122 if (REG_P (classp
->exp
)
1123 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1125 unsigned c_regno
= REGNO (classp
->exp
);
1127 gcc_assert (REGNO_QTY_VALID_P (c_regno
));
1129 /* Suppose that 5 is hard reg and 100 and 101 are
1132 (set (reg:si 100) (reg:si 5))
1133 (set (reg:si 5) (reg:si 100))
1134 (set (reg:di 101) (reg:di 5))
1136 We would now set REG_QTY (101) = REG_QTY (5), but the
1137 entry for 5 is in SImode. When we use this later in
1138 copy propagation, we get the register in wrong mode. */
1139 if (qty_table
[REG_QTY (c_regno
)].mode
!= GET_MODE (x
))
1142 make_regs_eqv (regno
, c_regno
);
1146 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1147 than REG_IN_TABLE to find out if there was only a single preceding
1148 invalidation - for the SUBREG - or another one, which would be
1149 for the full register. However, if we find here that REG_TICK
1150 indicates that the register is invalid, it means that it has
1151 been invalidated in a separate operation. The SUBREG might be used
1152 now (then this is a recursive call), or we might use the full REG
1153 now and a SUBREG of it later. So bump up REG_TICK so that
1154 mention_regs will do the right thing. */
1156 && REG_IN_TABLE (regno
) >= 0
1157 && REG_TICK (regno
) == REG_IN_TABLE (regno
) + 1)
1159 make_new_qty (regno
, GET_MODE (x
));
1166 /* If X is a SUBREG, we will likely be inserting the inner register in the
1167 table. If that register doesn't have an assigned quantity number at
1168 this point but does later, the insertion that we will be doing now will
1169 not be accessible because its hash code will have changed. So assign
1170 a quantity number now. */
1172 else if (GET_CODE (x
) == SUBREG
&& REG_P (SUBREG_REG (x
))
1173 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1175 insert_regs (SUBREG_REG (x
), NULL
, 0);
1180 return mention_regs (x
);
1184 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1185 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1186 CST is equal to an anchor. */
1189 compute_const_anchors (rtx cst
,
1190 HOST_WIDE_INT
*lower_base
, HOST_WIDE_INT
*lower_offs
,
1191 HOST_WIDE_INT
*upper_base
, HOST_WIDE_INT
*upper_offs
)
1193 HOST_WIDE_INT n
= INTVAL (cst
);
1195 *lower_base
= n
& ~(targetm
.const_anchor
- 1);
1196 if (*lower_base
== n
)
1200 (n
+ (targetm
.const_anchor
- 1)) & ~(targetm
.const_anchor
- 1);
1201 *upper_offs
= n
- *upper_base
;
1202 *lower_offs
= n
- *lower_base
;
1206 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1209 insert_const_anchor (HOST_WIDE_INT anchor
, rtx reg
, HOST_WIDE_INT offs
,
1212 struct table_elt
*elt
;
1217 anchor_exp
= GEN_INT (anchor
);
1218 hash
= HASH (anchor_exp
, mode
);
1219 elt
= lookup (anchor_exp
, hash
, mode
);
1221 elt
= insert (anchor_exp
, NULL
, hash
, mode
);
1223 exp
= plus_constant (mode
, reg
, offs
);
1224 /* REG has just been inserted and the hash codes recomputed. */
1226 hash
= HASH (exp
, mode
);
1228 /* Use the cost of the register rather than the whole expression. When
1229 looking up constant anchors we will further offset the corresponding
1230 expression therefore it does not make sense to prefer REGs over
1231 reg-immediate additions. Prefer instead the oldest expression. Also
1232 don't prefer pseudos over hard regs so that we derive constants in
1233 argument registers from other argument registers rather than from the
1234 original pseudo that was used to synthesize the constant. */
1235 insert_with_costs (exp
, elt
, hash
, mode
, COST (reg
, mode
), 1);
1238 /* The constant CST is equivalent to the register REG. Create
1239 equivalences between the two anchors of CST and the corresponding
1240 register-offset expressions using REG. */
1243 insert_const_anchors (rtx reg
, rtx cst
, machine_mode mode
)
1245 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1247 if (!compute_const_anchors (cst
, &lower_base
, &lower_offs
,
1248 &upper_base
, &upper_offs
))
1251 /* Ignore anchors of value 0. Constants accessible from zero are
1253 if (lower_base
!= 0)
1254 insert_const_anchor (lower_base
, reg
, -lower_offs
, mode
);
1256 if (upper_base
!= 0)
1257 insert_const_anchor (upper_base
, reg
, -upper_offs
, mode
);
1260 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1261 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1262 valid expression. Return the cheapest and oldest of such expressions. In
1263 *OLD, return how old the resulting expression is compared to the other
1264 equivalent expressions. */
1267 find_reg_offset_for_const (struct table_elt
*anchor_elt
, HOST_WIDE_INT offs
,
1270 struct table_elt
*elt
;
1272 struct table_elt
*match_elt
;
1275 /* Find the cheapest and *oldest* expression to maximize the chance of
1276 reusing the same pseudo. */
1280 for (elt
= anchor_elt
->first_same_value
, idx
= 0;
1282 elt
= elt
->next_same_value
, idx
++)
1284 if (match_elt
&& CHEAPER (match_elt
, elt
))
1287 if (REG_P (elt
->exp
)
1288 || (GET_CODE (elt
->exp
) == PLUS
1289 && REG_P (XEXP (elt
->exp
, 0))
1290 && GET_CODE (XEXP (elt
->exp
, 1)) == CONST_INT
))
1294 /* Ignore expressions that are no longer valid. */
1295 if (!REG_P (elt
->exp
) && !exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
1298 x
= plus_constant (GET_MODE (elt
->exp
), elt
->exp
, offs
);
1300 || (GET_CODE (x
) == PLUS
1301 && IN_RANGE (INTVAL (XEXP (x
, 1)),
1302 -targetm
.const_anchor
,
1303 targetm
.const_anchor
- 1)))
1315 /* Try to express the constant SRC_CONST using a register+offset expression
1316 derived from a constant anchor. Return it if successful or NULL_RTX,
1320 try_const_anchors (rtx src_const
, machine_mode mode
)
1322 struct table_elt
*lower_elt
, *upper_elt
;
1323 HOST_WIDE_INT lower_base
, lower_offs
, upper_base
, upper_offs
;
1324 rtx lower_anchor_rtx
, upper_anchor_rtx
;
1325 rtx lower_exp
= NULL_RTX
, upper_exp
= NULL_RTX
;
1326 unsigned lower_old
, upper_old
;
1328 /* CONST_INT is used for CC modes, but we should leave those alone. */
1329 if (GET_MODE_CLASS (mode
) == MODE_CC
)
1332 gcc_assert (SCALAR_INT_MODE_P (mode
));
1333 if (!compute_const_anchors (src_const
, &lower_base
, &lower_offs
,
1334 &upper_base
, &upper_offs
))
1337 lower_anchor_rtx
= GEN_INT (lower_base
);
1338 upper_anchor_rtx
= GEN_INT (upper_base
);
1339 lower_elt
= lookup (lower_anchor_rtx
, HASH (lower_anchor_rtx
, mode
), mode
);
1340 upper_elt
= lookup (upper_anchor_rtx
, HASH (upper_anchor_rtx
, mode
), mode
);
1343 lower_exp
= find_reg_offset_for_const (lower_elt
, lower_offs
, &lower_old
);
1345 upper_exp
= find_reg_offset_for_const (upper_elt
, upper_offs
, &upper_old
);
1352 /* Return the older expression. */
1353 return (upper_old
> lower_old
? upper_exp
: lower_exp
);
1356 /* Look in or update the hash table. */
1358 /* Remove table element ELT from use in the table.
1359 HASH is its hash code, made using the HASH macro.
1360 It's an argument because often that is known in advance
1361 and we save much time not recomputing it. */
1364 remove_from_table (struct table_elt
*elt
, unsigned int hash
)
1369 /* Mark this element as removed. See cse_insn. */
1370 elt
->first_same_value
= 0;
1372 /* Remove the table element from its equivalence class. */
1375 struct table_elt
*prev
= elt
->prev_same_value
;
1376 struct table_elt
*next
= elt
->next_same_value
;
1379 next
->prev_same_value
= prev
;
1382 prev
->next_same_value
= next
;
1385 struct table_elt
*newfirst
= next
;
1388 next
->first_same_value
= newfirst
;
1389 next
= next
->next_same_value
;
1394 /* Remove the table element from its hash bucket. */
1397 struct table_elt
*prev
= elt
->prev_same_hash
;
1398 struct table_elt
*next
= elt
->next_same_hash
;
1401 next
->prev_same_hash
= prev
;
1404 prev
->next_same_hash
= next
;
1405 else if (table
[hash
] == elt
)
1409 /* This entry is not in the proper hash bucket. This can happen
1410 when two classes were merged by `merge_equiv_classes'. Search
1411 for the hash bucket that it heads. This happens only very
1412 rarely, so the cost is acceptable. */
1413 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1414 if (table
[hash
] == elt
)
1419 /* Remove the table element from its related-value circular chain. */
1421 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1423 struct table_elt
*p
= elt
->related_value
;
1425 while (p
->related_value
!= elt
)
1426 p
= p
->related_value
;
1427 p
->related_value
= elt
->related_value
;
1428 if (p
->related_value
== p
)
1429 p
->related_value
= 0;
1432 /* Now add it to the free element chain. */
1433 elt
->next_same_hash
= free_element_chain
;
1434 free_element_chain
= elt
;
1437 /* Same as above, but X is a pseudo-register. */
1440 remove_pseudo_from_table (rtx x
, unsigned int hash
)
1442 struct table_elt
*elt
;
1444 /* Because a pseudo-register can be referenced in more than one
1445 mode, we might have to remove more than one table entry. */
1446 while ((elt
= lookup_for_remove (x
, hash
, VOIDmode
)))
1447 remove_from_table (elt
, hash
);
1450 /* Look up X in the hash table and return its table element,
1451 or 0 if X is not in the table.
1453 MODE is the machine-mode of X, or if X is an integer constant
1454 with VOIDmode then MODE is the mode with which X will be used.
1456 Here we are satisfied to find an expression whose tree structure
1459 static struct table_elt
*
1460 lookup (rtx x
, unsigned int hash
, machine_mode mode
)
1462 struct table_elt
*p
;
1464 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1465 if (mode
== p
->mode
&& ((x
== p
->exp
&& REG_P (x
))
1466 || exp_equiv_p (x
, p
->exp
, !REG_P (x
), false)))
1472 /* Like `lookup' but don't care whether the table element uses invalid regs.
1473 Also ignore discrepancies in the machine mode of a register. */
1475 static struct table_elt
*
1476 lookup_for_remove (rtx x
, unsigned int hash
, machine_mode mode
)
1478 struct table_elt
*p
;
1482 unsigned int regno
= REGNO (x
);
1484 /* Don't check the machine mode when comparing registers;
1485 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1486 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1488 && REGNO (p
->exp
) == regno
)
1493 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1495 && (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, false)))
1502 /* Look for an expression equivalent to X and with code CODE.
1503 If one is found, return that expression. */
1506 lookup_as_function (rtx x
, enum rtx_code code
)
1509 = lookup (x
, SAFE_HASH (x
, VOIDmode
), GET_MODE (x
));
1514 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1515 if (GET_CODE (p
->exp
) == code
1516 /* Make sure this is a valid entry in the table. */
1517 && exp_equiv_p (p
->exp
, p
->exp
, 1, false))
1523 /* Insert X in the hash table, assuming HASH is its hash code and
1524 CLASSP is an element of the class it should go in (or 0 if a new
1525 class should be made). COST is the code of X and reg_cost is the
1526 cost of registers in X. It is inserted at the proper position to
1527 keep the class in the order cheapest first.
1529 MODE is the machine-mode of X, or if X is an integer constant
1530 with VOIDmode then MODE is the mode with which X will be used.
1532 For elements of equal cheapness, the most recent one
1533 goes in front, except that the first element in the list
1534 remains first unless a cheaper element is added. The order of
1535 pseudo-registers does not matter, as canon_reg will be called to
1536 find the cheapest when a register is retrieved from the table.
1538 The in_memory field in the hash table element is set to 0.
1539 The caller must set it nonzero if appropriate.
1541 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1542 and if insert_regs returns a nonzero value
1543 you must then recompute its hash code before calling here.
1545 If necessary, update table showing constant values of quantities. */
1547 static struct table_elt
*
1548 insert_with_costs (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1549 machine_mode mode
, int cost
, int reg_cost
)
1551 struct table_elt
*elt
;
1553 /* If X is a register and we haven't made a quantity for it,
1554 something is wrong. */
1555 gcc_assert (!REG_P (x
) || REGNO_QTY_VALID_P (REGNO (x
)));
1557 /* If X is a hard register, show it is being put in the table. */
1558 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1559 add_to_hard_reg_set (&hard_regs_in_table
, GET_MODE (x
), REGNO (x
));
1561 /* Put an element for X into the right hash bucket. */
1563 elt
= free_element_chain
;
1565 free_element_chain
= elt
->next_same_hash
;
1567 elt
= XNEW (struct table_elt
);
1570 elt
->canon_exp
= NULL_RTX
;
1572 elt
->regcost
= reg_cost
;
1573 elt
->next_same_value
= 0;
1574 elt
->prev_same_value
= 0;
1575 elt
->next_same_hash
= table
[hash
];
1576 elt
->prev_same_hash
= 0;
1577 elt
->related_value
= 0;
1580 elt
->is_const
= (CONSTANT_P (x
) || fixed_base_plus_p (x
));
1583 table
[hash
]->prev_same_hash
= elt
;
1586 /* Put it into the proper value-class. */
1589 classp
= classp
->first_same_value
;
1590 if (CHEAPER (elt
, classp
))
1591 /* Insert at the head of the class. */
1593 struct table_elt
*p
;
1594 elt
->next_same_value
= classp
;
1595 classp
->prev_same_value
= elt
;
1596 elt
->first_same_value
= elt
;
1598 for (p
= classp
; p
; p
= p
->next_same_value
)
1599 p
->first_same_value
= elt
;
1603 /* Insert not at head of the class. */
1604 /* Put it after the last element cheaper than X. */
1605 struct table_elt
*p
, *next
;
1608 (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1612 /* Put it after P and before NEXT. */
1613 elt
->next_same_value
= next
;
1615 next
->prev_same_value
= elt
;
1617 elt
->prev_same_value
= p
;
1618 p
->next_same_value
= elt
;
1619 elt
->first_same_value
= classp
;
1623 elt
->first_same_value
= elt
;
1625 /* If this is a constant being set equivalent to a register or a register
1626 being set equivalent to a constant, note the constant equivalence.
1628 If this is a constant, it cannot be equivalent to a different constant,
1629 and a constant is the only thing that can be cheaper than a register. So
1630 we know the register is the head of the class (before the constant was
1633 If this is a register that is not already known equivalent to a
1634 constant, we must check the entire class.
1636 If this is a register that is already known equivalent to an insn,
1637 update the qtys `const_insn' to show that `this_insn' is the latest
1638 insn making that quantity equivalent to the constant. */
1640 if (elt
->is_const
&& classp
&& REG_P (classp
->exp
)
1643 int exp_q
= REG_QTY (REGNO (classp
->exp
));
1644 struct qty_table_elem
*exp_ent
= &qty_table
[exp_q
];
1646 exp_ent
->const_rtx
= gen_lowpart (exp_ent
->mode
, x
);
1647 exp_ent
->const_insn
= this_insn
;
1652 && ! qty_table
[REG_QTY (REGNO (x
))].const_rtx
1655 struct table_elt
*p
;
1657 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1659 if (p
->is_const
&& !REG_P (p
->exp
))
1661 int x_q
= REG_QTY (REGNO (x
));
1662 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
1665 = gen_lowpart (GET_MODE (x
), p
->exp
);
1666 x_ent
->const_insn
= this_insn
;
1673 && qty_table
[REG_QTY (REGNO (x
))].const_rtx
1674 && GET_MODE (x
) == qty_table
[REG_QTY (REGNO (x
))].mode
)
1675 qty_table
[REG_QTY (REGNO (x
))].const_insn
= this_insn
;
1677 /* If this is a constant with symbolic value,
1678 and it has a term with an explicit integer value,
1679 link it up with related expressions. */
1680 if (GET_CODE (x
) == CONST
)
1682 rtx subexp
= get_related_value (x
);
1684 struct table_elt
*subelt
, *subelt_prev
;
1688 /* Get the integer-free subexpression in the hash table. */
1689 subhash
= SAFE_HASH (subexp
, mode
);
1690 subelt
= lookup (subexp
, subhash
, mode
);
1692 subelt
= insert (subexp
, NULL
, subhash
, mode
);
1693 /* Initialize SUBELT's circular chain if it has none. */
1694 if (subelt
->related_value
== 0)
1695 subelt
->related_value
= subelt
;
1696 /* Find the element in the circular chain that precedes SUBELT. */
1697 subelt_prev
= subelt
;
1698 while (subelt_prev
->related_value
!= subelt
)
1699 subelt_prev
= subelt_prev
->related_value
;
1700 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1701 This way the element that follows SUBELT is the oldest one. */
1702 elt
->related_value
= subelt_prev
->related_value
;
1703 subelt_prev
->related_value
= elt
;
1710 /* Wrap insert_with_costs by passing the default costs. */
1712 static struct table_elt
*
1713 insert (rtx x
, struct table_elt
*classp
, unsigned int hash
,
1716 return insert_with_costs (x
, classp
, hash
, mode
,
1717 COST (x
, mode
), approx_reg_cost (x
));
1721 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1722 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1723 the two classes equivalent.
1725 CLASS1 will be the surviving class; CLASS2 should not be used after this
1728 Any invalid entries in CLASS2 will not be copied. */
1731 merge_equiv_classes (struct table_elt
*class1
, struct table_elt
*class2
)
1733 struct table_elt
*elt
, *next
, *new_elt
;
1735 /* Ensure we start with the head of the classes. */
1736 class1
= class1
->first_same_value
;
1737 class2
= class2
->first_same_value
;
1739 /* If they were already equal, forget it. */
1740 if (class1
== class2
)
1743 for (elt
= class2
; elt
; elt
= next
)
1747 machine_mode mode
= elt
->mode
;
1749 next
= elt
->next_same_value
;
1751 /* Remove old entry, make a new one in CLASS1's class.
1752 Don't do this for invalid entries as we cannot find their
1753 hash code (it also isn't necessary). */
1754 if (REG_P (exp
) || exp_equiv_p (exp
, exp
, 1, false))
1756 bool need_rehash
= false;
1758 hash_arg_in_memory
= 0;
1759 hash
= HASH (exp
, mode
);
1763 need_rehash
= REGNO_QTY_VALID_P (REGNO (exp
));
1764 delete_reg_equiv (REGNO (exp
));
1767 if (REG_P (exp
) && REGNO (exp
) >= FIRST_PSEUDO_REGISTER
)
1768 remove_pseudo_from_table (exp
, hash
);
1770 remove_from_table (elt
, hash
);
1772 if (insert_regs (exp
, class1
, 0) || need_rehash
)
1774 rehash_using_reg (exp
);
1775 hash
= HASH (exp
, mode
);
1777 new_elt
= insert (exp
, class1
, hash
, mode
);
1778 new_elt
->in_memory
= hash_arg_in_memory
;
1779 if (GET_CODE (exp
) == ASM_OPERANDS
&& elt
->cost
== MAX_COST
)
1780 new_elt
->cost
= MAX_COST
;
1785 /* Flush the entire hash table. */
1788 flush_hash_table (void)
1791 struct table_elt
*p
;
1793 for (i
= 0; i
< HASH_SIZE
; i
++)
1794 for (p
= table
[i
]; p
; p
= table
[i
])
1796 /* Note that invalidate can remove elements
1797 after P in the current hash chain. */
1799 invalidate (p
->exp
, VOIDmode
);
1801 remove_from_table (p
, i
);
1805 /* Check whether an anti dependence exists between X and EXP. MODE and
1806 ADDR are as for canon_anti_dependence. */
1809 check_dependence (const_rtx x
, rtx exp
, machine_mode mode
, rtx addr
)
1811 subrtx_iterator::array_type array
;
1812 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
1814 const_rtx x
= *iter
;
1815 if (MEM_P (x
) && canon_anti_dependence (x
, true, exp
, mode
, addr
))
1821 /* Remove from the hash table, or mark as invalid, all expressions whose
1822 values could be altered by storing in register X. */
1825 invalidate_reg (rtx x
)
1827 gcc_assert (GET_CODE (x
) == REG
);
1829 /* If X is a register, dependencies on its contents are recorded
1830 through the qty number mechanism. Just change the qty number of
1831 the register, mark it as invalid for expressions that refer to it,
1832 and remove it itself. */
1833 unsigned int regno
= REGNO (x
);
1834 unsigned int hash
= HASH (x
, GET_MODE (x
));
1836 /* Remove REGNO from any quantity list it might be on and indicate
1837 that its value might have changed. If it is a pseudo, remove its
1838 entry from the hash table.
1840 For a hard register, we do the first two actions above for any
1841 additional hard registers corresponding to X. Then, if any of these
1842 registers are in the table, we must remove any REG entries that
1843 overlap these registers. */
1845 delete_reg_equiv (regno
);
1847 SUBREG_TICKED (regno
) = -1;
1849 if (regno
>= FIRST_PSEUDO_REGISTER
)
1850 remove_pseudo_from_table (x
, hash
);
1853 HOST_WIDE_INT in_table
= TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1854 unsigned int endregno
= END_REGNO (x
);
1856 struct table_elt
*p
, *next
;
1858 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1860 for (rn
= regno
+ 1; rn
< endregno
; rn
++)
1862 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, rn
);
1863 CLEAR_HARD_REG_BIT (hard_regs_in_table
, rn
);
1864 delete_reg_equiv (rn
);
1866 SUBREG_TICKED (rn
) = -1;
1870 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
1871 for (p
= table
[hash
]; p
; p
= next
)
1873 next
= p
->next_same_hash
;
1875 if (!REG_P (p
->exp
) || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1878 unsigned int tregno
= REGNO (p
->exp
);
1879 unsigned int tendregno
= END_REGNO (p
->exp
);
1880 if (tendregno
> regno
&& tregno
< endregno
)
1881 remove_from_table (p
, hash
);
1886 /* Remove from the hash table, or mark as invalid, all expressions whose
1887 values could be altered by storing in X. X is a register, a subreg, or
1888 a memory reference with nonvarying address (because, when a memory
1889 reference with a varying address is stored in, all memory references are
1890 removed by invalidate_memory so specific invalidation is superfluous).
1891 FULL_MODE, if not VOIDmode, indicates that this much should be
1892 invalidated instead of just the amount indicated by the mode of X. This
1893 is only used for bitfield stores into memory.
1895 A nonvarying address may be just a register or just a symbol reference,
1896 or it may be either of those plus a numeric offset. */
1899 invalidate (rtx x
, machine_mode full_mode
)
1902 struct table_elt
*p
;
1905 switch (GET_CODE (x
))
1912 invalidate (SUBREG_REG (x
), VOIDmode
);
1916 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; --i
)
1917 invalidate (XVECEXP (x
, 0, i
), VOIDmode
);
1921 /* This is part of a disjoint return value; extract the location in
1922 question ignoring the offset. */
1923 invalidate (XEXP (x
, 0), VOIDmode
);
1927 addr
= canon_rtx (get_addr (XEXP (x
, 0)));
1928 /* Calculate the canonical version of X here so that
1929 true_dependence doesn't generate new RTL for X on each call. */
1932 /* Remove all hash table elements that refer to overlapping pieces of
1934 if (full_mode
== VOIDmode
)
1935 full_mode
= GET_MODE (x
);
1937 for (i
= 0; i
< HASH_SIZE
; i
++)
1939 struct table_elt
*next
;
1941 for (p
= table
[i
]; p
; p
= next
)
1943 next
= p
->next_same_hash
;
1946 /* Just canonicalize the expression once;
1947 otherwise each time we call invalidate
1948 true_dependence will canonicalize the
1949 expression again. */
1951 p
->canon_exp
= canon_rtx (p
->exp
);
1952 if (check_dependence (p
->canon_exp
, x
, full_mode
, addr
))
1953 remove_from_table (p
, i
);
1964 /* Invalidate DEST. Used when DEST is not going to be added
1965 into the hash table for some reason, e.g. do_not_record
1969 invalidate_dest (rtx dest
)
1972 || GET_CODE (dest
) == SUBREG
1974 invalidate (dest
, VOIDmode
);
1975 else if (GET_CODE (dest
) == STRICT_LOW_PART
1976 || GET_CODE (dest
) == ZERO_EXTRACT
)
1977 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
1980 /* Remove all expressions that refer to register REGNO,
1981 since they are already invalid, and we are about to
1982 mark that register valid again and don't want the old
1983 expressions to reappear as valid. */
1986 remove_invalid_refs (unsigned int regno
)
1989 struct table_elt
*p
, *next
;
1991 for (i
= 0; i
< HASH_SIZE
; i
++)
1992 for (p
= table
[i
]; p
; p
= next
)
1994 next
= p
->next_same_hash
;
1995 if (!REG_P (p
->exp
) && refers_to_regno_p (regno
, p
->exp
))
1996 remove_from_table (p
, i
);
2000 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2003 remove_invalid_subreg_refs (unsigned int regno
, poly_uint64 offset
,
2007 struct table_elt
*p
, *next
;
2009 for (i
= 0; i
< HASH_SIZE
; i
++)
2010 for (p
= table
[i
]; p
; p
= next
)
2013 next
= p
->next_same_hash
;
2016 && (GET_CODE (exp
) != SUBREG
2017 || !REG_P (SUBREG_REG (exp
))
2018 || REGNO (SUBREG_REG (exp
)) != regno
2019 || ranges_maybe_overlap_p (SUBREG_BYTE (exp
),
2020 GET_MODE_SIZE (GET_MODE (exp
)),
2021 offset
, GET_MODE_SIZE (mode
)))
2022 && refers_to_regno_p (regno
, p
->exp
))
2023 remove_from_table (p
, i
);
2027 /* Recompute the hash codes of any valid entries in the hash table that
2028 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2030 This is called when we make a jump equivalence. */
2033 rehash_using_reg (rtx x
)
2036 struct table_elt
*p
, *next
;
2039 if (GET_CODE (x
) == SUBREG
)
2042 /* If X is not a register or if the register is known not to be in any
2043 valid entries in the table, we have no work to do. */
2046 || REG_IN_TABLE (REGNO (x
)) < 0
2047 || REG_IN_TABLE (REGNO (x
)) != REG_TICK (REGNO (x
)))
2050 /* Scan all hash chains looking for valid entries that mention X.
2051 If we find one and it is in the wrong hash chain, move it. */
2053 for (i
= 0; i
< HASH_SIZE
; i
++)
2054 for (p
= table
[i
]; p
; p
= next
)
2056 next
= p
->next_same_hash
;
2057 if (reg_mentioned_p (x
, p
->exp
)
2058 && exp_equiv_p (p
->exp
, p
->exp
, 1, false)
2059 && i
!= (hash
= SAFE_HASH (p
->exp
, p
->mode
)))
2061 if (p
->next_same_hash
)
2062 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
2064 if (p
->prev_same_hash
)
2065 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
2067 table
[i
] = p
->next_same_hash
;
2069 p
->next_same_hash
= table
[hash
];
2070 p
->prev_same_hash
= 0;
2072 table
[hash
]->prev_same_hash
= p
;
2078 /* Remove from the hash table any expression that is a call-clobbered
2079 register in INSN. Also update their TICK values. */
2082 invalidate_for_call (rtx_insn
*insn
)
2086 struct table_elt
*p
, *next
;
2088 hard_reg_set_iterator hrsi
;
2090 /* Go through all the hard registers. For each that might be clobbered
2091 in call insn INSN, remove the register from quantity chains and update
2092 reg_tick if defined. Also see if any of these registers is currently
2095 ??? We could be more precise for partially-clobbered registers,
2096 and only invalidate values that actually occupy the clobbered part
2097 of the registers. It doesn't seem worth the effort though, since
2098 we shouldn't see this situation much before RA. Whatever choice
2099 we make here has to be consistent with the table walk below,
2100 so any change to this test will require a change there too. */
2101 HARD_REG_SET callee_clobbers
2102 = insn_callee_abi (insn
).full_and_partial_reg_clobbers ();
2103 EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers
, 0, regno
, hrsi
)
2105 delete_reg_equiv (regno
);
2106 if (REG_TICK (regno
) >= 0)
2109 SUBREG_TICKED (regno
) = -1;
2111 in_table
|= (TEST_HARD_REG_BIT (hard_regs_in_table
, regno
) != 0);
2114 /* In the case where we have no call-clobbered hard registers in the
2115 table, we are done. Otherwise, scan the table and remove any
2116 entry that overlaps a call-clobbered register. */
2119 for (hash
= 0; hash
< HASH_SIZE
; hash
++)
2120 for (p
= table
[hash
]; p
; p
= next
)
2122 next
= p
->next_same_hash
;
2125 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
2128 /* This must use the same test as above rather than the
2129 more accurate clobbers_reg_p. */
2130 if (overlaps_hard_reg_set_p (callee_clobbers
, GET_MODE (p
->exp
),
2132 remove_from_table (p
, hash
);
2136 /* Given an expression X of type CONST,
2137 and ELT which is its table entry (or 0 if it
2138 is not in the hash table),
2139 return an alternate expression for X as a register plus integer.
2140 If none can be found, return 0. */
2143 use_related_value (rtx x
, struct table_elt
*elt
)
2145 struct table_elt
*relt
= 0;
2146 struct table_elt
*p
, *q
;
2147 HOST_WIDE_INT offset
;
2149 /* First, is there anything related known?
2150 If we have a table element, we can tell from that.
2151 Otherwise, must look it up. */
2153 if (elt
!= 0 && elt
->related_value
!= 0)
2155 else if (elt
== 0 && GET_CODE (x
) == CONST
)
2157 rtx subexp
= get_related_value (x
);
2159 relt
= lookup (subexp
,
2160 SAFE_HASH (subexp
, GET_MODE (subexp
)),
2167 /* Search all related table entries for one that has an
2168 equivalent register. */
2173 /* This loop is strange in that it is executed in two different cases.
2174 The first is when X is already in the table. Then it is searching
2175 the RELATED_VALUE list of X's class (RELT). The second case is when
2176 X is not in the table. Then RELT points to a class for the related
2179 Ensure that, whatever case we are in, that we ignore classes that have
2180 the same value as X. */
2182 if (rtx_equal_p (x
, p
->exp
))
2185 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
2192 p
= p
->related_value
;
2194 /* We went all the way around, so there is nothing to be found.
2195 Alternatively, perhaps RELT was in the table for some other reason
2196 and it has no related values recorded. */
2197 if (p
== relt
|| p
== 0)
2204 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
2205 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2206 return plus_constant (q
->mode
, q
->exp
, offset
);
2210 /* Hash a string. Just add its bytes up. */
2211 static inline unsigned
2212 hash_rtx_string (const char *ps
)
2215 const unsigned char *p
= (const unsigned char *) ps
;
2224 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2225 When the callback returns true, we continue with the new rtx. */
2228 hash_rtx_cb (const_rtx x
, machine_mode mode
,
2229 int *do_not_record_p
, int *hash_arg_in_memory_p
,
2230 bool have_reg_qty
, hash_rtx_callback_function cb
)
2236 machine_mode newmode
;
2239 /* Used to turn recursion into iteration. We can't rely on GCC's
2240 tail-recursion elimination since we need to keep accumulating values
2246 /* Invoke the callback first. */
2248 && ((*cb
) (x
, mode
, &newx
, &newmode
)))
2250 hash
+= hash_rtx_cb (newx
, newmode
, do_not_record_p
,
2251 hash_arg_in_memory_p
, have_reg_qty
, cb
);
2255 code
= GET_CODE (x
);
2260 unsigned int regno
= REGNO (x
);
2262 if (do_not_record_p
&& !reload_completed
)
2264 /* On some machines, we can't record any non-fixed hard register,
2265 because extending its life will cause reload problems. We
2266 consider ap, fp, sp, gp to be fixed for this purpose.
2268 We also consider CCmode registers to be fixed for this purpose;
2269 failure to do so leads to failure to simplify 0<100 type of
2272 On all machines, we can't record any global registers.
2273 Nor should we record any register that is in a small
2274 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2277 if (regno
>= FIRST_PSEUDO_REGISTER
)
2279 else if (x
== frame_pointer_rtx
2280 || x
== hard_frame_pointer_rtx
2281 || x
== arg_pointer_rtx
2282 || x
== stack_pointer_rtx
2283 || x
== pic_offset_table_rtx
)
2285 else if (global_regs
[regno
])
2287 else if (fixed_regs
[regno
])
2289 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_CC
)
2291 else if (targetm
.small_register_classes_for_mode_p (GET_MODE (x
)))
2293 else if (targetm
.class_likely_spilled_p (REGNO_REG_CLASS (regno
)))
2300 *do_not_record_p
= 1;
2305 hash
+= ((unsigned int) REG
<< 7);
2306 hash
+= (have_reg_qty
? (unsigned) REG_QTY (regno
) : regno
);
2310 /* We handle SUBREG of a REG specially because the underlying
2311 reg changes its hash value with every value change; we don't
2312 want to have to forget unrelated subregs when one subreg changes. */
2315 if (REG_P (SUBREG_REG (x
)))
2317 hash
+= (((unsigned int) SUBREG
<< 7)
2318 + REGNO (SUBREG_REG (x
))
2319 + (constant_lower_bound (SUBREG_BYTE (x
))
2327 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
2328 + (unsigned int) INTVAL (x
));
2331 case CONST_WIDE_INT
:
2332 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
2333 hash
+= CONST_WIDE_INT_ELT (x
, i
);
2336 case CONST_POLY_INT
:
2340 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
2341 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
2346 /* This is like the general case, except that it only counts
2347 the integers representing the constant. */
2348 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2349 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
2350 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
2351 + (unsigned int) CONST_DOUBLE_HIGH (x
));
2353 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
2357 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
2358 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
2366 units
= const_vector_encoded_nelts (x
);
2368 for (i
= 0; i
< units
; ++i
)
2370 elt
= CONST_VECTOR_ENCODED_ELT (x
, i
);
2371 hash
+= hash_rtx_cb (elt
, GET_MODE (elt
),
2372 do_not_record_p
, hash_arg_in_memory_p
,
2379 /* Assume there is only one rtx object for any given label. */
2381 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2382 differences and differences between each stage's debugging dumps. */
2383 hash
+= (((unsigned int) LABEL_REF
<< 7)
2384 + CODE_LABEL_NUMBER (label_ref_label (x
)));
2389 /* Don't hash on the symbol's address to avoid bootstrap differences.
2390 Different hash values may cause expressions to be recorded in
2391 different orders and thus different registers to be used in the
2392 final assembler. This also avoids differences in the dump files
2393 between various stages. */
2395 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
2398 h
+= (h
<< 7) + *p
++; /* ??? revisit */
2400 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
2405 /* We don't record if marked volatile or if BLKmode since we don't
2406 know the size of the move. */
2407 if (do_not_record_p
&& (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2409 *do_not_record_p
= 1;
2412 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2413 *hash_arg_in_memory_p
= 1;
2415 /* Now that we have already found this special case,
2416 might as well speed it up as much as possible. */
2417 hash
+= (unsigned) MEM
;
2422 /* A USE that mentions non-volatile memory needs special
2423 handling since the MEM may be BLKmode which normally
2424 prevents an entry from being made. Pure calls are
2425 marked by a USE which mentions BLKmode memory.
2426 See calls.c:emit_call_1. */
2427 if (MEM_P (XEXP (x
, 0))
2428 && ! MEM_VOLATILE_P (XEXP (x
, 0)))
2430 hash
+= (unsigned) USE
;
2433 if (hash_arg_in_memory_p
&& !MEM_READONLY_P (x
))
2434 *hash_arg_in_memory_p
= 1;
2436 /* Now that we have already found this special case,
2437 might as well speed it up as much as possible. */
2438 hash
+= (unsigned) MEM
;
2453 case UNSPEC_VOLATILE
:
2454 if (do_not_record_p
) {
2455 *do_not_record_p
= 1;
2463 if (do_not_record_p
&& MEM_VOLATILE_P (x
))
2465 *do_not_record_p
= 1;
2470 /* We don't want to take the filename and line into account. */
2471 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
2472 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x
))
2473 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
2474 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
2476 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2478 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2480 hash
+= (hash_rtx_cb (ASM_OPERANDS_INPUT (x
, i
),
2481 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
2482 do_not_record_p
, hash_arg_in_memory_p
,
2485 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
)));
2488 hash
+= hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
2489 x
= ASM_OPERANDS_INPUT (x
, 0);
2490 mode
= GET_MODE (x
);
2502 i
= GET_RTX_LENGTH (code
) - 1;
2503 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
2504 fmt
= GET_RTX_FORMAT (code
);
2510 /* If we are about to do the last recursive call
2511 needed at this level, change it into iteration.
2512 This function is called enough to be worth it. */
2519 hash
+= hash_rtx_cb (XEXP (x
, i
), VOIDmode
, do_not_record_p
,
2520 hash_arg_in_memory_p
,
2525 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2526 hash
+= hash_rtx_cb (XVECEXP (x
, i
, j
), VOIDmode
, do_not_record_p
,
2527 hash_arg_in_memory_p
,
2532 hash
+= hash_rtx_string (XSTR (x
, i
));
2536 hash
+= (unsigned int) XINT (x
, i
);
2540 hash
+= constant_lower_bound (SUBREG_BYTE (x
));
2555 /* Hash an rtx. We are careful to make sure the value is never negative.
2556 Equivalent registers hash identically.
2557 MODE is used in hashing for CONST_INTs only;
2558 otherwise the mode of X is used.
2560 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2562 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2563 a MEM rtx which does not have the MEM_READONLY_P flag set.
2565 Note that cse_insn knows that the hash code of a MEM expression
2566 is just (int) MEM plus the hash code of the address. */
2569 hash_rtx (const_rtx x
, machine_mode mode
, int *do_not_record_p
,
2570 int *hash_arg_in_memory_p
, bool have_reg_qty
)
2572 return hash_rtx_cb (x
, mode
, do_not_record_p
,
2573 hash_arg_in_memory_p
, have_reg_qty
, NULL
);
2576 /* Hash an rtx X for cse via hash_rtx.
2577 Stores 1 in do_not_record if any subexpression is volatile.
2578 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2579 does not have the MEM_READONLY_P flag set. */
2581 static inline unsigned
2582 canon_hash (rtx x
, machine_mode mode
)
2584 return hash_rtx (x
, mode
, &do_not_record
, &hash_arg_in_memory
, true);
2587 /* Like canon_hash but with no side effects, i.e. do_not_record
2588 and hash_arg_in_memory are not changed. */
2590 static inline unsigned
2591 safe_hash (rtx x
, machine_mode mode
)
2593 int dummy_do_not_record
;
2594 return hash_rtx (x
, mode
, &dummy_do_not_record
, NULL
, true);
2597 /* Return 1 iff X and Y would canonicalize into the same thing,
2598 without actually constructing the canonicalization of either one.
2599 If VALIDATE is nonzero,
2600 we assume X is an expression being processed from the rtl
2601 and Y was found in the hash table. We check register refs
2602 in Y for being marked as valid.
2604 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2607 exp_equiv_p (const_rtx x
, const_rtx y
, int validate
, bool for_gcse
)
2613 /* Note: it is incorrect to assume an expression is equivalent to itself
2614 if VALIDATE is nonzero. */
2615 if (x
== y
&& !validate
)
2618 if (x
== 0 || y
== 0)
2621 code
= GET_CODE (x
);
2622 if (code
!= GET_CODE (y
))
2625 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2626 if (GET_MODE (x
) != GET_MODE (y
))
2629 /* MEMs referring to different address space are not equivalent. */
2630 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2641 return label_ref_label (x
) == label_ref_label (y
);
2644 return XSTR (x
, 0) == XSTR (y
, 0);
2648 return REGNO (x
) == REGNO (y
);
2651 unsigned int regno
= REGNO (y
);
2653 unsigned int endregno
= END_REGNO (y
);
2655 /* If the quantities are not the same, the expressions are not
2656 equivalent. If there are and we are not to validate, they
2657 are equivalent. Otherwise, ensure all regs are up-to-date. */
2659 if (REG_QTY (REGNO (x
)) != REG_QTY (regno
))
2665 for (i
= regno
; i
< endregno
; i
++)
2666 if (REG_IN_TABLE (i
) != REG_TICK (i
))
2675 /* A volatile mem should not be considered equivalent to any
2677 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2680 /* Can't merge two expressions in different alias sets, since we
2681 can decide that the expression is transparent in a block when
2682 it isn't, due to it being set with the different alias set.
2684 Also, can't merge two expressions with different MEM_ATTRS.
2685 They could e.g. be two different entities allocated into the
2686 same space on the stack (see e.g. PR25130). In that case, the
2687 MEM addresses can be the same, even though the two MEMs are
2688 absolutely not equivalent.
2690 But because really all MEM attributes should be the same for
2691 equivalent MEMs, we just use the invariant that MEMs that have
2692 the same attributes share the same mem_attrs data structure. */
2693 if (!mem_attrs_eq_p (MEM_ATTRS (x
), MEM_ATTRS (y
)))
2696 /* If we are handling exceptions, we cannot consider two expressions
2697 with different trapping status as equivalent, because simple_mem
2698 might accept one and reject the other. */
2699 if (cfun
->can_throw_non_call_exceptions
2700 && (MEM_NOTRAP_P (x
) != MEM_NOTRAP_P (y
)))
2705 /* For commutative operations, check both orders. */
2713 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0),
2715 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2716 validate
, for_gcse
))
2717 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2719 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2720 validate
, for_gcse
)));
2723 /* We don't use the generic code below because we want to
2724 disregard filename and line numbers. */
2726 /* A volatile asm isn't equivalent to any other. */
2727 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2730 if (GET_MODE (x
) != GET_MODE (y
)
2731 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
2732 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2733 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
2734 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
2735 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
2738 if (ASM_OPERANDS_INPUT_LENGTH (x
))
2740 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
2741 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
2742 ASM_OPERANDS_INPUT (y
, i
),
2744 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
2745 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
2755 /* Compare the elements. If any pair of corresponding elements
2756 fail to match, return 0 for the whole thing. */
2758 fmt
= GET_RTX_FORMAT (code
);
2759 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2764 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
),
2765 validate
, for_gcse
))
2770 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2772 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2773 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2774 validate
, for_gcse
))
2779 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2784 if (XINT (x
, i
) != XINT (y
, i
))
2789 if (XWINT (x
, i
) != XWINT (y
, i
))
2794 if (maybe_ne (SUBREG_BYTE (x
), SUBREG_BYTE (y
)))
2810 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2811 the result if necessary. INSN is as for canon_reg. */
2814 validate_canon_reg (rtx
*xloc
, rtx_insn
*insn
)
2818 rtx new_rtx
= canon_reg (*xloc
, insn
);
2820 /* If replacing pseudo with hard reg or vice versa, ensure the
2821 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2822 gcc_assert (insn
&& new_rtx
);
2823 validate_change (insn
, xloc
, new_rtx
, 1);
2827 /* Canonicalize an expression:
2828 replace each register reference inside it
2829 with the "oldest" equivalent register.
2831 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2832 after we make our substitution. The calls are made with IN_GROUP nonzero
2833 so apply_change_group must be called upon the outermost return from this
2834 function (unless INSN is zero). The result of apply_change_group can
2835 generally be discarded since the changes we are making are optional. */
2838 canon_reg (rtx x
, rtx_insn
*insn
)
2847 code
= GET_CODE (x
);
2864 struct qty_table_elem
*ent
;
2866 /* Never replace a hard reg, because hard regs can appear
2867 in more than one machine mode, and we must preserve the mode
2868 of each occurrence. Also, some hard regs appear in
2869 MEMs that are shared and mustn't be altered. Don't try to
2870 replace any reg that maps to a reg of class NO_REGS. */
2871 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2872 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2875 q
= REG_QTY (REGNO (x
));
2876 ent
= &qty_table
[q
];
2877 first
= ent
->first_reg
;
2878 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2879 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2880 : gen_rtx_REG (ent
->mode
, first
));
2887 fmt
= GET_RTX_FORMAT (code
);
2888 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2893 validate_canon_reg (&XEXP (x
, i
), insn
);
2894 else if (fmt
[i
] == 'E')
2895 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2896 validate_canon_reg (&XVECEXP (x
, i
, j
), insn
);
2902 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2903 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2904 what values are being compared.
2906 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2907 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2908 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2909 compared to produce cc0.
2911 The return value is the comparison operator and is either the code of
2912 A or the code corresponding to the inverse of the comparison. */
2914 static enum rtx_code
2915 find_comparison_args (enum rtx_code code
, rtx
*parg1
, rtx
*parg2
,
2916 machine_mode
*pmode1
, machine_mode
*pmode2
)
2919 hash_set
<rtx
> *visited
= NULL
;
2920 /* Set nonzero when we find something of interest. */
2923 arg1
= *parg1
, arg2
= *parg2
;
2925 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2927 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2929 int reverse_code
= 0;
2930 struct table_elt
*p
= 0;
2932 /* Remember state from previous iteration. */
2936 visited
= new hash_set
<rtx
>;
2941 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2942 On machines with CC0, this is the only case that can occur, since
2943 fold_rtx will return the COMPARE or item being compared with zero
2946 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2949 /* If ARG1 is a comparison operator and CODE is testing for
2950 STORE_FLAG_VALUE, get the inner arguments. */
2952 else if (COMPARISON_P (arg1
))
2954 #ifdef FLOAT_STORE_FLAG_VALUE
2955 REAL_VALUE_TYPE fsfv
;
2959 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2960 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2961 #ifdef FLOAT_STORE_FLAG_VALUE
2962 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2963 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2964 REAL_VALUE_NEGATIVE (fsfv
)))
2969 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2970 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2971 #ifdef FLOAT_STORE_FLAG_VALUE
2972 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1
))
2973 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
2974 REAL_VALUE_NEGATIVE (fsfv
)))
2977 x
= arg1
, reverse_code
= 1;
2980 /* ??? We could also check for
2982 (ne (and (eq (...) (const_int 1))) (const_int 0))
2984 and related forms, but let's wait until we see them occurring. */
2987 /* Look up ARG1 in the hash table and see if it has an equivalence
2988 that lets us see what is being compared. */
2989 p
= lookup (arg1
, SAFE_HASH (arg1
, GET_MODE (arg1
)), GET_MODE (arg1
));
2992 p
= p
->first_same_value
;
2994 /* If what we compare is already known to be constant, that is as
2996 We need to break the loop in this case, because otherwise we
2997 can have an infinite loop when looking at a reg that is known
2998 to be a constant which is the same as a comparison of a reg
2999 against zero which appears later in the insn stream, which in
3000 turn is constant and the same as the comparison of the first reg
3006 for (; p
; p
= p
->next_same_value
)
3008 machine_mode inner_mode
= GET_MODE (p
->exp
);
3009 #ifdef FLOAT_STORE_FLAG_VALUE
3010 REAL_VALUE_TYPE fsfv
;
3013 /* If the entry isn't valid, skip it. */
3014 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3017 /* If it's a comparison we've used before, skip it. */
3018 if (visited
&& visited
->contains (p
->exp
))
3021 if (GET_CODE (p
->exp
) == COMPARE
3022 /* Another possibility is that this machine has a compare insn
3023 that includes the comparison code. In that case, ARG1 would
3024 be equivalent to a comparison operation that would set ARG1 to
3025 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3026 ORIG_CODE is the actual comparison being done; if it is an EQ,
3027 we must reverse ORIG_CODE. On machine with a negative value
3028 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3031 && val_signbit_known_set_p (inner_mode
,
3033 #ifdef FLOAT_STORE_FLAG_VALUE
3035 && SCALAR_FLOAT_MODE_P (inner_mode
)
3036 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3037 REAL_VALUE_NEGATIVE (fsfv
)))
3040 && COMPARISON_P (p
->exp
)))
3045 else if ((code
== EQ
3047 && val_signbit_known_set_p (inner_mode
,
3049 #ifdef FLOAT_STORE_FLAG_VALUE
3051 && SCALAR_FLOAT_MODE_P (inner_mode
)
3052 && (fsfv
= FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1
)),
3053 REAL_VALUE_NEGATIVE (fsfv
)))
3056 && COMPARISON_P (p
->exp
))
3063 /* If this non-trapping address, e.g. fp + constant, the
3064 equivalent is a better operand since it may let us predict
3065 the value of the comparison. */
3066 else if (!rtx_addr_can_trap_p (p
->exp
))
3073 /* If we didn't find a useful equivalence for ARG1, we are done.
3074 Otherwise, set up for the next iteration. */
3078 /* If we need to reverse the comparison, make sure that is
3079 possible -- we can't necessarily infer the value of GE from LT
3080 with floating-point operands. */
3083 enum rtx_code reversed
= reversed_comparison_code (x
, NULL
);
3084 if (reversed
== UNKNOWN
)
3089 else if (COMPARISON_P (x
))
3090 code
= GET_CODE (x
);
3091 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
3094 /* Return our results. Return the modes from before fold_rtx
3095 because fold_rtx might produce const_int, and then it's too late. */
3096 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
3097 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
3104 /* If X is a nontrivial arithmetic operation on an argument for which
3105 a constant value can be determined, return the result of operating
3106 on that value, as a constant. Otherwise, return X, possibly with
3107 one or more operands changed to a forward-propagated constant.
3109 If X is a register whose contents are known, we do NOT return
3110 those contents here; equiv_constant is called to perform that task.
3111 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3113 INSN is the insn that we may be modifying. If it is 0, make a copy
3114 of X before modifying it. */
3117 fold_rtx (rtx x
, rtx_insn
*insn
)
3127 /* Operands of X. */
3128 /* Workaround -Wmaybe-uninitialized false positive during
3129 profiledbootstrap by initializing them. */
3130 rtx folded_arg0
= NULL_RTX
;
3131 rtx folded_arg1
= NULL_RTX
;
3133 /* Constant equivalents of first three operands of X;
3134 0 when no such equivalent is known. */
3139 /* The mode of the first operand of X. We need this for sign and zero
3141 machine_mode mode_arg0
;
3146 /* Try to perform some initial simplifications on X. */
3147 code
= GET_CODE (x
);
3152 /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3153 than it would in other contexts. Basically its mode does not
3154 signify the size of the object read. That information is carried
3155 by size operand. If we happen to have a MEM of the appropriate
3156 mode in our tables with a constant value we could simplify the
3157 extraction incorrectly if we allowed substitution of that value
3161 if ((new_rtx
= equiv_constant (x
)) != NULL_RTX
)
3171 /* No use simplifying an EXPR_LIST
3172 since they are used only for lists of args
3173 in a function call's REG_EQUAL note. */
3178 return prev_insn_cc0
;
3183 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
3184 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
),
3185 fold_rtx (ASM_OPERANDS_INPUT (x
, i
), insn
), 0);
3190 if (NO_FUNCTION_CSE
&& CONSTANT_P (XEXP (XEXP (x
, 0), 0)))
3194 /* Anything else goes through the loop below. */
3199 mode
= GET_MODE (x
);
3203 mode_arg0
= VOIDmode
;
3205 /* Try folding our operands.
3206 Then see which ones have constant values known. */
3208 fmt
= GET_RTX_FORMAT (code
);
3209 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3212 rtx folded_arg
= XEXP (x
, i
), const_arg
;
3213 machine_mode mode_arg
= GET_MODE (folded_arg
);
3215 switch (GET_CODE (folded_arg
))
3220 const_arg
= equiv_constant (folded_arg
);
3227 const_arg
= folded_arg
;
3231 /* The cc0-user and cc0-setter may be in different blocks if
3232 the cc0-setter potentially traps. In that case PREV_INSN_CC0
3233 will have been cleared as we exited the block with the
3236 While we could potentially track cc0 in this case, it just
3237 doesn't seem to be worth it given that cc0 targets are not
3238 terribly common or important these days and trapping math
3239 is rarely used. The combination of those two conditions
3240 necessary to trip this situation is exceedingly rare in the
3244 const_arg
= NULL_RTX
;
3248 folded_arg
= prev_insn_cc0
;
3249 mode_arg
= prev_insn_cc0_mode
;
3250 const_arg
= equiv_constant (folded_arg
);
3255 folded_arg
= fold_rtx (folded_arg
, insn
);
3256 const_arg
= equiv_constant (folded_arg
);
3260 /* For the first three operands, see if the operand
3261 is constant or equivalent to a constant. */
3265 folded_arg0
= folded_arg
;
3266 const_arg0
= const_arg
;
3267 mode_arg0
= mode_arg
;
3270 folded_arg1
= folded_arg
;
3271 const_arg1
= const_arg
;
3274 const_arg2
= const_arg
;
3278 /* Pick the least expensive of the argument and an equivalent constant
3281 && const_arg
!= folded_arg
3282 && (COST_IN (const_arg
, mode_arg
, code
, i
)
3283 <= COST_IN (folded_arg
, mode_arg
, code
, i
))
3285 /* It's not safe to substitute the operand of a conversion
3286 operator with a constant, as the conversion's identity
3287 depends upon the mode of its operand. This optimization
3288 is handled by the call to simplify_unary_operation. */
3289 && (GET_RTX_CLASS (code
) != RTX_UNARY
3290 || GET_MODE (const_arg
) == mode_arg0
3291 || (code
!= ZERO_EXTEND
3292 && code
!= SIGN_EXTEND
3294 && code
!= FLOAT_TRUNCATE
3295 && code
!= FLOAT_EXTEND
3298 && code
!= UNSIGNED_FLOAT
3299 && code
!= UNSIGNED_FIX
)))
3300 folded_arg
= const_arg
;
3302 if (folded_arg
== XEXP (x
, i
))
3305 if (insn
== NULL_RTX
&& !changed
)
3308 validate_unshare_change (insn
, &XEXP (x
, i
), folded_arg
, 1);
3313 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3314 consistent with the order in X. */
3315 if (canonicalize_change_group (insn
, x
))
3317 std::swap (const_arg0
, const_arg1
);
3318 std::swap (folded_arg0
, folded_arg1
);
3321 apply_change_group ();
3324 /* If X is an arithmetic operation, see if we can simplify it. */
3326 switch (GET_RTX_CLASS (code
))
3330 /* We can't simplify extension ops unless we know the
3332 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
3333 && mode_arg0
== VOIDmode
)
3336 new_rtx
= simplify_unary_operation (code
, mode
,
3337 const_arg0
? const_arg0
: folded_arg0
,
3343 case RTX_COMM_COMPARE
:
3344 /* See what items are actually being compared and set FOLDED_ARG[01]
3345 to those values and CODE to the actual comparison code. If any are
3346 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3347 do anything if both operands are already known to be constant. */
3349 /* ??? Vector mode comparisons are not supported yet. */
3350 if (VECTOR_MODE_P (mode
))
3353 if (const_arg0
== 0 || const_arg1
== 0)
3355 struct table_elt
*p0
, *p1
;
3356 rtx true_rtx
, false_rtx
;
3357 machine_mode mode_arg1
;
3359 if (SCALAR_FLOAT_MODE_P (mode
))
3361 #ifdef FLOAT_STORE_FLAG_VALUE
3362 true_rtx
= (const_double_from_real_value
3363 (FLOAT_STORE_FLAG_VALUE (mode
), mode
));
3365 true_rtx
= NULL_RTX
;
3367 false_rtx
= CONST0_RTX (mode
);
3371 true_rtx
= const_true_rtx
;
3372 false_rtx
= const0_rtx
;
3375 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
3376 &mode_arg0
, &mode_arg1
);
3378 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3379 what kinds of things are being compared, so we can't do
3380 anything with this comparison. */
3382 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
3385 const_arg0
= equiv_constant (folded_arg0
);
3386 const_arg1
= equiv_constant (folded_arg1
);
3388 /* If we do not now have two constants being compared, see
3389 if we can nevertheless deduce some things about the
3391 if (const_arg0
== 0 || const_arg1
== 0)
3393 if (const_arg1
!= NULL
)
3395 rtx cheapest_simplification
;
3398 struct table_elt
*p
;
3400 /* See if we can find an equivalent of folded_arg0
3401 that gets us a cheaper expression, possibly a
3402 constant through simplifications. */
3403 p
= lookup (folded_arg0
, SAFE_HASH (folded_arg0
, mode_arg0
),
3408 cheapest_simplification
= x
;
3409 cheapest_cost
= COST (x
, mode
);
3411 for (p
= p
->first_same_value
; p
!= NULL
; p
= p
->next_same_value
)
3415 /* If the entry isn't valid, skip it. */
3416 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
3419 /* Try to simplify using this equivalence. */
3421 = simplify_relational_operation (code
, mode
,
3426 if (simp_result
== NULL
)
3429 cost
= COST (simp_result
, mode
);
3430 if (cost
< cheapest_cost
)
3432 cheapest_cost
= cost
;
3433 cheapest_simplification
= simp_result
;
3437 /* If we have a cheaper expression now, use that
3438 and try folding it further, from the top. */
3439 if (cheapest_simplification
!= x
)
3440 return fold_rtx (copy_rtx (cheapest_simplification
),
3445 /* See if the two operands are the same. */
3447 if ((REG_P (folded_arg0
)
3448 && REG_P (folded_arg1
)
3449 && (REG_QTY (REGNO (folded_arg0
))
3450 == REG_QTY (REGNO (folded_arg1
))))
3451 || ((p0
= lookup (folded_arg0
,
3452 SAFE_HASH (folded_arg0
, mode_arg0
),
3454 && (p1
= lookup (folded_arg1
,
3455 SAFE_HASH (folded_arg1
, mode_arg0
),
3457 && p0
->first_same_value
== p1
->first_same_value
))
3458 folded_arg1
= folded_arg0
;
3460 /* If FOLDED_ARG0 is a register, see if the comparison we are
3461 doing now is either the same as we did before or the reverse
3462 (we only check the reverse if not floating-point). */
3463 else if (REG_P (folded_arg0
))
3465 int qty
= REG_QTY (REGNO (folded_arg0
));
3467 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
)))
3469 struct qty_table_elem
*ent
= &qty_table
[qty
];
3471 if ((comparison_dominates_p (ent
->comparison_code
, code
)
3472 || (! FLOAT_MODE_P (mode_arg0
)
3473 && comparison_dominates_p (ent
->comparison_code
,
3474 reverse_condition (code
))))
3475 && (rtx_equal_p (ent
->comparison_const
, folded_arg1
)
3477 && rtx_equal_p (ent
->comparison_const
,
3479 || (REG_P (folded_arg1
)
3480 && (REG_QTY (REGNO (folded_arg1
)) == ent
->comparison_qty
))))
3482 if (comparison_dominates_p (ent
->comparison_code
, code
))
3497 /* If we are comparing against zero, see if the first operand is
3498 equivalent to an IOR with a constant. If so, we may be able to
3499 determine the result of this comparison. */
3500 if (const_arg1
== const0_rtx
&& !const_arg0
)
3502 rtx y
= lookup_as_function (folded_arg0
, IOR
);
3506 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
3507 && CONST_INT_P (inner_const
)
3508 && INTVAL (inner_const
) != 0)
3509 folded_arg0
= gen_rtx_IOR (mode_arg0
, XEXP (y
, 0), inner_const
);
3513 rtx op0
= const_arg0
? const_arg0
: copy_rtx (folded_arg0
);
3514 rtx op1
= const_arg1
? const_arg1
: copy_rtx (folded_arg1
);
3515 new_rtx
= simplify_relational_operation (code
, mode
, mode_arg0
,
3521 case RTX_COMM_ARITH
:
3525 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3526 with that LABEL_REF as its second operand. If so, the result is
3527 the first operand of that MINUS. This handles switches with an
3528 ADDR_DIFF_VEC table. */
3529 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
3532 = GET_CODE (folded_arg0
) == MINUS
? folded_arg0
3533 : lookup_as_function (folded_arg0
, MINUS
);
3535 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3536 && label_ref_label (XEXP (y
, 1)) == label_ref_label (const_arg1
))
3539 /* Now try for a CONST of a MINUS like the above. */
3540 if ((y
= (GET_CODE (folded_arg0
) == CONST
? folded_arg0
3541 : lookup_as_function (folded_arg0
, CONST
))) != 0
3542 && GET_CODE (XEXP (y
, 0)) == MINUS
3543 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3544 && label_ref_label (XEXP (XEXP (y
, 0), 1)) == label_ref_label (const_arg1
))
3545 return XEXP (XEXP (y
, 0), 0);
3548 /* Likewise if the operands are in the other order. */
3549 if (const_arg0
&& GET_CODE (const_arg0
) == LABEL_REF
)
3552 = GET_CODE (folded_arg1
) == MINUS
? folded_arg1
3553 : lookup_as_function (folded_arg1
, MINUS
);
3555 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
3556 && label_ref_label (XEXP (y
, 1)) == label_ref_label (const_arg0
))
3559 /* Now try for a CONST of a MINUS like the above. */
3560 if ((y
= (GET_CODE (folded_arg1
) == CONST
? folded_arg1
3561 : lookup_as_function (folded_arg1
, CONST
))) != 0
3562 && GET_CODE (XEXP (y
, 0)) == MINUS
3563 && GET_CODE (XEXP (XEXP (y
, 0), 1)) == LABEL_REF
3564 && label_ref_label (XEXP (XEXP (y
, 0), 1)) == label_ref_label (const_arg0
))
3565 return XEXP (XEXP (y
, 0), 0);
3568 /* If second operand is a register equivalent to a negative
3569 CONST_INT, see if we can find a register equivalent to the
3570 positive constant. Make a MINUS if so. Don't do this for
3571 a non-negative constant since we might then alternate between
3572 choosing positive and negative constants. Having the positive
3573 constant previously-used is the more common case. Be sure
3574 the resulting constant is non-negative; if const_arg1 were
3575 the smallest negative number this would overflow: depending
3576 on the mode, this would either just be the same value (and
3577 hence not save anything) or be incorrect. */
3578 if (const_arg1
!= 0 && CONST_INT_P (const_arg1
)
3579 && INTVAL (const_arg1
) < 0
3580 /* This used to test
3582 -INTVAL (const_arg1) >= 0
3584 But The Sun V5.0 compilers mis-compiled that test. So
3585 instead we test for the problematic value in a more direct
3586 manner and hope the Sun compilers get it correct. */
3587 && INTVAL (const_arg1
) !=
3588 (HOST_WIDE_INT_1
<< (HOST_BITS_PER_WIDE_INT
- 1))
3589 && REG_P (folded_arg1
))
3591 rtx new_const
= GEN_INT (-INTVAL (const_arg1
));
3593 = lookup (new_const
, SAFE_HASH (new_const
, mode
), mode
);
3596 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
3598 return simplify_gen_binary (MINUS
, mode
, folded_arg0
,
3599 canon_reg (p
->exp
, NULL
));
3604 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3605 If so, produce (PLUS Z C2-C). */
3606 if (const_arg1
!= 0 && poly_int_rtx_p (const_arg1
, &xval
))
3608 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
3609 if (y
&& poly_int_rtx_p (XEXP (y
, 1)))
3610 return fold_rtx (plus_constant (mode
, copy_rtx (y
), -xval
),
3617 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
3618 case IOR
: case AND
: case XOR
:
3620 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
3621 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3622 is known to be of similar form, we may be able to replace the
3623 operation with a combined operation. This may eliminate the
3624 intermediate operation if every use is simplified in this way.
3625 Note that the similar optimization done by combine.c only works
3626 if the intermediate operation's result has only one reference. */
3628 if (REG_P (folded_arg0
)
3629 && const_arg1
&& CONST_INT_P (const_arg1
))
3632 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
3633 rtx y
, inner_const
, new_const
;
3634 rtx canon_const_arg1
= const_arg1
;
3635 enum rtx_code associate_code
;
3638 && (INTVAL (const_arg1
) >= GET_MODE_UNIT_PRECISION (mode
)
3639 || INTVAL (const_arg1
) < 0))
3641 if (SHIFT_COUNT_TRUNCATED
)
3642 canon_const_arg1
= gen_int_shift_amount
3643 (mode
, (INTVAL (const_arg1
)
3644 & (GET_MODE_UNIT_BITSIZE (mode
) - 1)));
3649 y
= lookup_as_function (folded_arg0
, code
);
3653 /* If we have compiled a statement like
3654 "if (x == (x & mask1))", and now are looking at
3655 "x & mask2", we will have a case where the first operand
3656 of Y is the same as our first operand. Unless we detect
3657 this case, an infinite loop will result. */
3658 if (XEXP (y
, 0) == folded_arg0
)
3661 inner_const
= equiv_constant (fold_rtx (XEXP (y
, 1), 0));
3662 if (!inner_const
|| !CONST_INT_P (inner_const
))
3665 /* Don't associate these operations if they are a PLUS with the
3666 same constant and it is a power of two. These might be doable
3667 with a pre- or post-increment. Similarly for two subtracts of
3668 identical powers of two with post decrement. */
3670 if (code
== PLUS
&& const_arg1
== inner_const
3671 && ((HAVE_PRE_INCREMENT
3672 && pow2p_hwi (INTVAL (const_arg1
)))
3673 || (HAVE_POST_INCREMENT
3674 && pow2p_hwi (INTVAL (const_arg1
)))
3675 || (HAVE_PRE_DECREMENT
3676 && pow2p_hwi (- INTVAL (const_arg1
)))
3677 || (HAVE_POST_DECREMENT
3678 && pow2p_hwi (- INTVAL (const_arg1
)))))
3681 /* ??? Vector mode shifts by scalar
3682 shift operand are not supported yet. */
3683 if (is_shift
&& VECTOR_MODE_P (mode
))
3687 && (INTVAL (inner_const
) >= GET_MODE_UNIT_PRECISION (mode
)
3688 || INTVAL (inner_const
) < 0))
3690 if (SHIFT_COUNT_TRUNCATED
)
3691 inner_const
= gen_int_shift_amount
3692 (mode
, (INTVAL (inner_const
)
3693 & (GET_MODE_UNIT_BITSIZE (mode
) - 1)));
3698 /* Compute the code used to compose the constants. For example,
3699 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3701 associate_code
= (is_shift
|| code
== MINUS
? PLUS
: code
);
3703 new_const
= simplify_binary_operation (associate_code
, mode
,
3710 /* If we are associating shift operations, don't let this
3711 produce a shift of the size of the object or larger.
3712 This could occur when we follow a sign-extend by a right
3713 shift on a machine that does a sign-extend as a pair
3717 && CONST_INT_P (new_const
)
3718 && INTVAL (new_const
) >= GET_MODE_UNIT_PRECISION (mode
))
3720 /* As an exception, we can turn an ASHIFTRT of this
3721 form into a shift of the number of bits - 1. */
3722 if (code
== ASHIFTRT
)
3723 new_const
= gen_int_shift_amount
3724 (mode
, GET_MODE_UNIT_BITSIZE (mode
) - 1);
3725 else if (!side_effects_p (XEXP (y
, 0)))
3726 return CONST0_RTX (mode
);
3731 y
= copy_rtx (XEXP (y
, 0));
3733 /* If Y contains our first operand (the most common way this
3734 can happen is if Y is a MEM), we would do into an infinite
3735 loop if we tried to fold it. So don't in that case. */
3737 if (! reg_mentioned_p (folded_arg0
, y
))
3738 y
= fold_rtx (y
, insn
);
3740 return simplify_gen_binary (code
, mode
, y
, new_const
);
3744 case DIV
: case UDIV
:
3745 /* ??? The associative optimization performed immediately above is
3746 also possible for DIV and UDIV using associate_code of MULT.
3747 However, we would need extra code to verify that the
3748 multiplication does not overflow, that is, there is no overflow
3749 in the calculation of new_const. */
3756 new_rtx
= simplify_binary_operation (code
, mode
,
3757 const_arg0
? const_arg0
: folded_arg0
,
3758 const_arg1
? const_arg1
: folded_arg1
);
3762 /* (lo_sum (high X) X) is simply X. */
3763 if (code
== LO_SUM
&& const_arg0
!= 0
3764 && GET_CODE (const_arg0
) == HIGH
3765 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
3770 case RTX_BITFIELD_OPS
:
3771 new_rtx
= simplify_ternary_operation (code
, mode
, mode_arg0
,
3772 const_arg0
? const_arg0
: folded_arg0
,
3773 const_arg1
? const_arg1
: folded_arg1
,
3774 const_arg2
? const_arg2
: XEXP (x
, 2));
3781 return new_rtx
? new_rtx
: x
;
3784 /* Return a constant value currently equivalent to X.
3785 Return 0 if we don't know one. */
3788 equiv_constant (rtx x
)
3791 && REGNO_QTY_VALID_P (REGNO (x
)))
3793 int x_q
= REG_QTY (REGNO (x
));
3794 struct qty_table_elem
*x_ent
= &qty_table
[x_q
];
3796 if (x_ent
->const_rtx
)
3797 x
= gen_lowpart (GET_MODE (x
), x_ent
->const_rtx
);
3800 if (x
== 0 || CONSTANT_P (x
))
3803 if (GET_CODE (x
) == SUBREG
)
3805 machine_mode mode
= GET_MODE (x
);
3806 machine_mode imode
= GET_MODE (SUBREG_REG (x
));
3809 /* See if we previously assigned a constant value to this SUBREG. */
3810 if ((new_rtx
= lookup_as_function (x
, CONST_INT
)) != 0
3811 || (new_rtx
= lookup_as_function (x
, CONST_WIDE_INT
)) != 0
3812 || (NUM_POLY_INT_COEFFS
> 1
3813 && (new_rtx
= lookup_as_function (x
, CONST_POLY_INT
)) != 0)
3814 || (new_rtx
= lookup_as_function (x
, CONST_DOUBLE
)) != 0
3815 || (new_rtx
= lookup_as_function (x
, CONST_FIXED
)) != 0)
3818 /* If we didn't and if doing so makes sense, see if we previously
3819 assigned a constant value to the enclosing word mode SUBREG. */
3820 if (known_lt (GET_MODE_SIZE (mode
), UNITS_PER_WORD
)
3821 && known_lt (UNITS_PER_WORD
, GET_MODE_SIZE (imode
)))
3823 poly_int64 byte
= (SUBREG_BYTE (x
)
3824 - subreg_lowpart_offset (mode
, word_mode
));
3825 if (known_ge (byte
, 0) && multiple_p (byte
, UNITS_PER_WORD
))
3827 rtx y
= gen_rtx_SUBREG (word_mode
, SUBREG_REG (x
), byte
);
3828 new_rtx
= lookup_as_function (y
, CONST_INT
);
3830 return gen_lowpart (mode
, new_rtx
);
3834 /* Otherwise see if we already have a constant for the inner REG,
3835 and if that is enough to calculate an equivalent constant for
3836 the subreg. Note that the upper bits of paradoxical subregs
3837 are undefined, so they cannot be said to equal anything. */
3838 if (REG_P (SUBREG_REG (x
))
3839 && !paradoxical_subreg_p (x
)
3840 && (new_rtx
= equiv_constant (SUBREG_REG (x
))) != 0)
3841 return simplify_subreg (mode
, new_rtx
, imode
, SUBREG_BYTE (x
));
3846 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3847 the hash table in case its value was seen before. */
3851 struct table_elt
*elt
;
3853 x
= avoid_constant_pool_reference (x
);
3857 elt
= lookup (x
, SAFE_HASH (x
, GET_MODE (x
)), GET_MODE (x
));
3861 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
3862 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
3869 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3872 In certain cases, this can cause us to add an equivalence. For example,
3873 if we are following the taken case of
3875 we can add the fact that `i' and '2' are now equivalent.
3877 In any case, we can record that this comparison was passed. If the same
3878 comparison is seen later, we will know its value. */
3881 record_jump_equiv (rtx_insn
*insn
, bool taken
)
3883 int cond_known_true
;
3886 machine_mode mode
, mode0
, mode1
;
3887 int reversed_nonequality
= 0;
3890 /* Ensure this is the right kind of insn. */
3891 gcc_assert (any_condjump_p (insn
));
3893 set
= pc_set (insn
);
3895 /* See if this jump condition is known true or false. */
3897 cond_known_true
= (XEXP (SET_SRC (set
), 2) == pc_rtx
);
3899 cond_known_true
= (XEXP (SET_SRC (set
), 1) == pc_rtx
);
3901 /* Get the type of comparison being done and the operands being compared.
3902 If we had to reverse a non-equality condition, record that fact so we
3903 know that it isn't valid for floating-point. */
3904 code
= GET_CODE (XEXP (SET_SRC (set
), 0));
3905 op0
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 0), insn
);
3906 op1
= fold_rtx (XEXP (XEXP (SET_SRC (set
), 0), 1), insn
);
3908 /* On a cc0 target the cc0-setter and cc0-user may end up in different
3909 blocks. When that happens the tracking of the cc0-setter via
3910 PREV_INSN_CC0 is spoiled. That means that fold_rtx may return
3911 NULL_RTX. In those cases, there's nothing to record. */
3912 if (op0
== NULL_RTX
|| op1
== NULL_RTX
)
3915 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
3916 if (! cond_known_true
)
3918 code
= reversed_comparison_code_parts (code
, op0
, op1
, insn
);
3920 /* Don't remember if we can't find the inverse. */
3921 if (code
== UNKNOWN
)
3925 /* The mode is the mode of the non-constant. */
3927 if (mode1
!= VOIDmode
)
3930 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
3933 /* Yet another form of subreg creation. In this case, we want something in
3934 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3937 record_jump_cond_subreg (machine_mode mode
, rtx op
)
3939 machine_mode op_mode
= GET_MODE (op
);
3940 if (op_mode
== mode
|| op_mode
== VOIDmode
)
3942 return lowpart_subreg (mode
, op
, op_mode
);
3945 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3946 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3947 Make any useful entries we can with that information. Called from
3948 above function and called recursively. */
3951 record_jump_cond (enum rtx_code code
, machine_mode mode
, rtx op0
,
3952 rtx op1
, int reversed_nonequality
)
3954 unsigned op0_hash
, op1_hash
;
3955 int op0_in_memory
, op1_in_memory
;
3956 struct table_elt
*op0_elt
, *op1_elt
;
3958 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3959 we know that they are also equal in the smaller mode (this is also
3960 true for all smaller modes whether or not there is a SUBREG, but
3961 is not worth testing for with no SUBREG). */
3963 /* Note that GET_MODE (op0) may not equal MODE. */
3964 if (code
== EQ
&& paradoxical_subreg_p (op0
))
3966 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3967 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3969 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3970 reversed_nonequality
);
3973 if (code
== EQ
&& paradoxical_subreg_p (op1
))
3975 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
3976 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
3978 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
3979 reversed_nonequality
);
3982 /* Similarly, if this is an NE comparison, and either is a SUBREG
3983 making a smaller mode, we know the whole thing is also NE. */
3985 /* Note that GET_MODE (op0) may not equal MODE;
3986 if we test MODE instead, we can get an infinite recursion
3987 alternating between two modes each wider than MODE. */
3990 && partial_subreg_p (op0
)
3991 && subreg_lowpart_p (op0
))
3993 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
3994 rtx tem
= record_jump_cond_subreg (inner_mode
, op1
);
3996 record_jump_cond (code
, mode
, SUBREG_REG (op0
), tem
,
3997 reversed_nonequality
);
4001 && partial_subreg_p (op1
)
4002 && subreg_lowpart_p (op1
))
4004 machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
4005 rtx tem
= record_jump_cond_subreg (inner_mode
, op0
);
4007 record_jump_cond (code
, mode
, SUBREG_REG (op1
), tem
,
4008 reversed_nonequality
);
4011 /* Hash both operands. */
4014 hash_arg_in_memory
= 0;
4015 op0_hash
= HASH (op0
, mode
);
4016 op0_in_memory
= hash_arg_in_memory
;
4022 hash_arg_in_memory
= 0;
4023 op1_hash
= HASH (op1
, mode
);
4024 op1_in_memory
= hash_arg_in_memory
;
4029 /* Look up both operands. */
4030 op0_elt
= lookup (op0
, op0_hash
, mode
);
4031 op1_elt
= lookup (op1
, op1_hash
, mode
);
4033 /* If both operands are already equivalent or if they are not in the
4034 table but are identical, do nothing. */
4035 if ((op0_elt
!= 0 && op1_elt
!= 0
4036 && op0_elt
->first_same_value
== op1_elt
->first_same_value
)
4037 || op0
== op1
|| rtx_equal_p (op0
, op1
))
4040 /* If we aren't setting two things equal all we can do is save this
4041 comparison. Similarly if this is floating-point. In the latter
4042 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4043 If we record the equality, we might inadvertently delete code
4044 whose intent was to change -0 to +0. */
4046 if (code
!= EQ
|| FLOAT_MODE_P (GET_MODE (op0
)))
4048 struct qty_table_elem
*ent
;
4051 /* If we reversed a floating-point comparison, if OP0 is not a
4052 register, or if OP1 is neither a register or constant, we can't
4056 op1
= equiv_constant (op1
);
4058 if ((reversed_nonequality
&& FLOAT_MODE_P (mode
))
4059 || !REG_P (op0
) || op1
== 0)
4062 /* Put OP0 in the hash table if it isn't already. This gives it a
4063 new quantity number. */
4066 if (insert_regs (op0
, NULL
, 0))
4068 rehash_using_reg (op0
);
4069 op0_hash
= HASH (op0
, mode
);
4071 /* If OP0 is contained in OP1, this changes its hash code
4072 as well. Faster to rehash than to check, except
4073 for the simple case of a constant. */
4074 if (! CONSTANT_P (op1
))
4075 op1_hash
= HASH (op1
,mode
);
4078 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4079 op0_elt
->in_memory
= op0_in_memory
;
4082 qty
= REG_QTY (REGNO (op0
));
4083 ent
= &qty_table
[qty
];
4085 ent
->comparison_code
= code
;
4088 /* Look it up again--in case op0 and op1 are the same. */
4089 op1_elt
= lookup (op1
, op1_hash
, mode
);
4091 /* Put OP1 in the hash table so it gets a new quantity number. */
4094 if (insert_regs (op1
, NULL
, 0))
4096 rehash_using_reg (op1
);
4097 op1_hash
= HASH (op1
, mode
);
4100 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4101 op1_elt
->in_memory
= op1_in_memory
;
4104 ent
->comparison_const
= NULL_RTX
;
4105 ent
->comparison_qty
= REG_QTY (REGNO (op1
));
4109 ent
->comparison_const
= op1
;
4110 ent
->comparison_qty
= -1;
4116 /* If either side is still missing an equivalence, make it now,
4117 then merge the equivalences. */
4121 if (insert_regs (op0
, NULL
, 0))
4123 rehash_using_reg (op0
);
4124 op0_hash
= HASH (op0
, mode
);
4127 op0_elt
= insert (op0
, NULL
, op0_hash
, mode
);
4128 op0_elt
->in_memory
= op0_in_memory
;
4133 if (insert_regs (op1
, NULL
, 0))
4135 rehash_using_reg (op1
);
4136 op1_hash
= HASH (op1
, mode
);
4139 op1_elt
= insert (op1
, NULL
, op1_hash
, mode
);
4140 op1_elt
->in_memory
= op1_in_memory
;
4143 merge_equiv_classes (op0_elt
, op1_elt
);
4146 /* CSE processing for one instruction.
4148 Most "true" common subexpressions are mostly optimized away in GIMPLE,
4149 but the few that "leak through" are cleaned up by cse_insn, and complex
4150 addressing modes are often formed here.
4152 The main function is cse_insn, and between here and that function
4153 a couple of helper functions is defined to keep the size of cse_insn
4154 within reasonable proportions.
4156 Data is shared between the main and helper functions via STRUCT SET,
4157 that contains all data related for every set in the instruction that
4160 Note that cse_main processes all sets in the instruction. Most
4161 passes in GCC only process simple SET insns or single_set insns, but
4162 CSE processes insns with multiple sets as well. */
4164 /* Data on one SET contained in the instruction. */
4168 /* The SET rtx itself. */
4170 /* The SET_SRC of the rtx (the original value, if it is changing). */
4172 /* The hash-table element for the SET_SRC of the SET. */
4173 struct table_elt
*src_elt
;
4174 /* Hash value for the SET_SRC. */
4176 /* Hash value for the SET_DEST. */
4178 /* The SET_DEST, with SUBREG, etc., stripped. */
4180 /* Nonzero if the SET_SRC is in memory. */
4182 /* Nonzero if the SET_SRC contains something
4183 whose value cannot be predicted and understood. */
4185 /* Original machine mode, in case it becomes a CONST_INT.
4186 The size of this field should match the size of the mode
4187 field of struct rtx_def (see rtl.h). */
4188 ENUM_BITFIELD(machine_mode
) mode
: 8;
4189 /* Hash value of constant equivalent for SET_SRC. */
4190 unsigned src_const_hash
;
4191 /* A constant equivalent for SET_SRC, if any. */
4193 /* Table entry for constant equivalent for SET_SRC, if any. */
4194 struct table_elt
*src_const_elt
;
4195 /* Table entry for the destination address. */
4196 struct table_elt
*dest_addr_elt
;
4199 /* Special handling for (set REG0 REG1) where REG0 is the
4200 "cheapest", cheaper than REG1. After cse, REG1 will probably not
4201 be used in the sequel, so (if easily done) change this insn to
4202 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4203 that computed their value. Then REG1 will become a dead store
4204 and won't cloud the situation for later optimizations.
4206 Do not make this change if REG1 is a hard register, because it will
4207 then be used in the sequel and we may be changing a two-operand insn
4208 into a three-operand insn.
4210 This is the last transformation that cse_insn will try to do. */
4213 try_back_substitute_reg (rtx set
, rtx_insn
*insn
)
4215 rtx dest
= SET_DEST (set
);
4216 rtx src
= SET_SRC (set
);
4219 && REG_P (src
) && ! HARD_REGISTER_P (src
)
4220 && REGNO_QTY_VALID_P (REGNO (src
)))
4222 int src_q
= REG_QTY (REGNO (src
));
4223 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
4225 if (src_ent
->first_reg
== REGNO (dest
))
4227 /* Scan for the previous nonnote insn, but stop at a basic
4229 rtx_insn
*prev
= insn
;
4230 rtx_insn
*bb_head
= BB_HEAD (BLOCK_FOR_INSN (insn
));
4233 prev
= PREV_INSN (prev
);
4235 while (prev
!= bb_head
&& (NOTE_P (prev
) || DEBUG_INSN_P (prev
)));
4237 /* Do not swap the registers around if the previous instruction
4238 attaches a REG_EQUIV note to REG1.
4240 ??? It's not entirely clear whether we can transfer a REG_EQUIV
4241 from the pseudo that originally shadowed an incoming argument
4242 to another register. Some uses of REG_EQUIV might rely on it
4243 being attached to REG1 rather than REG2.
4245 This section previously turned the REG_EQUIV into a REG_EQUAL
4246 note. We cannot do that because REG_EQUIV may provide an
4247 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4248 if (NONJUMP_INSN_P (prev
)
4249 && GET_CODE (PATTERN (prev
)) == SET
4250 && SET_DEST (PATTERN (prev
)) == src
4251 && ! find_reg_note (prev
, REG_EQUIV
, NULL_RTX
))
4255 validate_change (prev
, &SET_DEST (PATTERN (prev
)), dest
, 1);
4256 validate_change (insn
, &SET_DEST (set
), src
, 1);
4257 validate_change (insn
, &SET_SRC (set
), dest
, 1);
4258 apply_change_group ();
4260 /* If INSN has a REG_EQUAL note, and this note mentions
4261 REG0, then we must delete it, because the value in
4262 REG0 has changed. If the note's value is REG1, we must
4263 also delete it because that is now this insn's dest. */
4264 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
4266 && (reg_mentioned_p (dest
, XEXP (note
, 0))
4267 || rtx_equal_p (src
, XEXP (note
, 0))))
4268 remove_note (insn
, note
);
4270 /* If INSN has a REG_ARGS_SIZE note, move it to PREV. */
4271 note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4274 remove_note (insn
, note
);
4275 gcc_assert (!find_reg_note (prev
, REG_ARGS_SIZE
, NULL_RTX
));
4276 set_unique_reg_note (prev
, REG_ARGS_SIZE
, XEXP (note
, 0));
4283 /* Record all the SETs in this instruction into SETS_PTR,
4284 and return the number of recorded sets. */
4286 find_sets_in_insn (rtx_insn
*insn
, struct set
**psets
)
4288 struct set
*sets
= *psets
;
4290 rtx x
= PATTERN (insn
);
4292 if (GET_CODE (x
) == SET
)
4294 /* Ignore SETs that are unconditional jumps.
4295 They never need cse processing, so this does not hurt.
4296 The reason is not efficiency but rather
4297 so that we can test at the end for instructions
4298 that have been simplified to unconditional jumps
4299 and not be misled by unchanged instructions
4300 that were unconditional jumps to begin with. */
4301 if (SET_DEST (x
) == pc_rtx
4302 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
4304 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4305 The hard function value register is used only once, to copy to
4306 someplace else, so it isn't worth cse'ing. */
4307 else if (GET_CODE (SET_SRC (x
)) == CALL
)
4310 sets
[n_sets
++].rtl
= x
;
4312 else if (GET_CODE (x
) == PARALLEL
)
4314 int i
, lim
= XVECLEN (x
, 0);
4316 /* Go over the expressions of the PARALLEL in forward order, to
4317 put them in the same order in the SETS array. */
4318 for (i
= 0; i
< lim
; i
++)
4320 rtx y
= XVECEXP (x
, 0, i
);
4321 if (GET_CODE (y
) == SET
)
4323 /* As above, we ignore unconditional jumps and call-insns and
4324 ignore the result of apply_change_group. */
4325 if (SET_DEST (y
) == pc_rtx
4326 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
4328 else if (GET_CODE (SET_SRC (y
)) == CALL
)
4331 sets
[n_sets
++].rtl
= y
;
4339 /* Subroutine of canonicalize_insn. X is an ASM_OPERANDS in INSN. */
4342 canon_asm_operands (rtx x
, rtx_insn
*insn
)
4344 for (int i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
4346 rtx input
= ASM_OPERANDS_INPUT (x
, i
);
4347 if (!(REG_P (input
) && HARD_REGISTER_P (input
)))
4349 input
= canon_reg (input
, insn
);
4350 validate_change (insn
, &ASM_OPERANDS_INPUT (x
, i
), input
, 1);
4355 /* Where possible, substitute every register reference in the N_SETS
4356 number of SETS in INSN with the canonical register.
4358 Register canonicalization propagatest the earliest register (i.e.
4359 one that is set before INSN) with the same value. This is a very
4360 useful, simple form of CSE, to clean up warts from expanding GIMPLE
4361 to RTL. For instance, a CONST for an address is usually expanded
4362 multiple times to loads into different registers, thus creating many
4363 subexpressions of the form:
4365 (set (reg1) (some_const))
4366 (set (mem (... reg1 ...) (thing)))
4367 (set (reg2) (some_const))
4368 (set (mem (... reg2 ...) (thing)))
4370 After canonicalizing, the code takes the following form:
4372 (set (reg1) (some_const))
4373 (set (mem (... reg1 ...) (thing)))
4374 (set (reg2) (some_const))
4375 (set (mem (... reg1 ...) (thing)))
4377 The set to reg2 is now trivially dead, and the memory reference (or
4378 address, or whatever) may be a candidate for further CSEing.
4380 In this function, the result of apply_change_group can be ignored;
4384 canonicalize_insn (rtx_insn
*insn
, struct set
**psets
, int n_sets
)
4386 struct set
*sets
= *psets
;
4388 rtx x
= PATTERN (insn
);
4393 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4394 if (GET_CODE (XEXP (tem
, 0)) != SET
)
4395 XEXP (tem
, 0) = canon_reg (XEXP (tem
, 0), insn
);
4398 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
4400 canon_reg (SET_SRC (x
), insn
);
4401 apply_change_group ();
4402 fold_rtx (SET_SRC (x
), insn
);
4404 else if (GET_CODE (x
) == CLOBBER
)
4406 /* If we clobber memory, canon the address.
4407 This does nothing when a register is clobbered
4408 because we have already invalidated the reg. */
4409 if (MEM_P (XEXP (x
, 0)))
4410 canon_reg (XEXP (x
, 0), insn
);
4412 else if (GET_CODE (x
) == USE
4413 && ! (REG_P (XEXP (x
, 0))
4414 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
4415 /* Canonicalize a USE of a pseudo register or memory location. */
4416 canon_reg (x
, insn
);
4417 else if (GET_CODE (x
) == ASM_OPERANDS
)
4418 canon_asm_operands (x
, insn
);
4419 else if (GET_CODE (x
) == CALL
)
4421 canon_reg (x
, insn
);
4422 apply_change_group ();
4425 else if (DEBUG_INSN_P (insn
))
4426 canon_reg (PATTERN (insn
), insn
);
4427 else if (GET_CODE (x
) == PARALLEL
)
4429 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
4431 rtx y
= XVECEXP (x
, 0, i
);
4432 if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
4434 canon_reg (SET_SRC (y
), insn
);
4435 apply_change_group ();
4436 fold_rtx (SET_SRC (y
), insn
);
4438 else if (GET_CODE (y
) == CLOBBER
)
4440 if (MEM_P (XEXP (y
, 0)))
4441 canon_reg (XEXP (y
, 0), insn
);
4443 else if (GET_CODE (y
) == USE
4444 && ! (REG_P (XEXP (y
, 0))
4445 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
4446 canon_reg (y
, insn
);
4447 else if (GET_CODE (y
) == ASM_OPERANDS
)
4448 canon_asm_operands (y
, insn
);
4449 else if (GET_CODE (y
) == CALL
)
4451 canon_reg (y
, insn
);
4452 apply_change_group ();
4458 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4459 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4461 /* We potentially will process this insn many times. Therefore,
4462 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4465 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4466 because cse_insn handles those specially. */
4467 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != STRICT_LOW_PART
4468 && rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
4469 remove_note (insn
, tem
);
4472 canon_reg (XEXP (tem
, 0), insn
);
4473 apply_change_group ();
4474 XEXP (tem
, 0) = fold_rtx (XEXP (tem
, 0), insn
);
4475 df_notes_rescan (insn
);
4479 /* Canonicalize sources and addresses of destinations.
4480 We do this in a separate pass to avoid problems when a MATCH_DUP is
4481 present in the insn pattern. In that case, we want to ensure that
4482 we don't break the duplicate nature of the pattern. So we will replace
4483 both operands at the same time. Otherwise, we would fail to find an
4484 equivalent substitution in the loop calling validate_change below.
4486 We used to suppress canonicalization of DEST if it appears in SRC,
4487 but we don't do this any more. */
4489 for (i
= 0; i
< n_sets
; i
++)
4491 rtx dest
= SET_DEST (sets
[i
].rtl
);
4492 rtx src
= SET_SRC (sets
[i
].rtl
);
4493 rtx new_rtx
= canon_reg (src
, insn
);
4495 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
4497 if (GET_CODE (dest
) == ZERO_EXTRACT
)
4499 validate_change (insn
, &XEXP (dest
, 1),
4500 canon_reg (XEXP (dest
, 1), insn
), 1);
4501 validate_change (insn
, &XEXP (dest
, 2),
4502 canon_reg (XEXP (dest
, 2), insn
), 1);
4505 while (GET_CODE (dest
) == SUBREG
4506 || GET_CODE (dest
) == ZERO_EXTRACT
4507 || GET_CODE (dest
) == STRICT_LOW_PART
)
4508 dest
= XEXP (dest
, 0);
4511 canon_reg (dest
, insn
);
4514 /* Now that we have done all the replacements, we can apply the change
4515 group and see if they all work. Note that this will cause some
4516 canonicalizations that would have worked individually not to be applied
4517 because some other canonicalization didn't work, but this should not
4520 The result of apply_change_group can be ignored; see canon_reg. */
4522 apply_change_group ();
4525 /* Main function of CSE.
4526 First simplify sources and addresses of all assignments
4527 in the instruction, using previously-computed equivalents values.
4528 Then install the new sources and destinations in the table
4529 of available values. */
4532 cse_insn (rtx_insn
*insn
)
4534 rtx x
= PATTERN (insn
);
4540 struct table_elt
*src_eqv_elt
= 0;
4541 int src_eqv_volatile
= 0;
4542 int src_eqv_in_memory
= 0;
4543 unsigned src_eqv_hash
= 0;
4545 struct set
*sets
= (struct set
*) 0;
4547 if (GET_CODE (x
) == SET
)
4548 sets
= XALLOCA (struct set
);
4549 else if (GET_CODE (x
) == PARALLEL
)
4550 sets
= XALLOCAVEC (struct set
, XVECLEN (x
, 0));
4553 /* Records what this insn does to set CC0. */
4555 this_insn_cc0_mode
= VOIDmode
;
4557 /* Find all regs explicitly clobbered in this insn,
4558 to ensure they are not replaced with any other regs
4559 elsewhere in this insn. */
4560 invalidate_from_sets_and_clobbers (insn
);
4562 /* Record all the SETs in this instruction. */
4563 n_sets
= find_sets_in_insn (insn
, &sets
);
4565 /* Substitute the canonical register where possible. */
4566 canonicalize_insn (insn
, &sets
, n_sets
);
4568 /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4569 if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT. The
4570 latter condition is necessary because SRC_EQV is handled specially for
4571 this case, and if it isn't set, then there will be no equivalence
4572 for the destination. */
4573 if (n_sets
== 1 && REG_NOTES (insn
) != 0
4574 && (tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
4577 if (GET_CODE (SET_DEST (sets
[0].rtl
)) != ZERO_EXTRACT
4578 && (! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
))
4579 || GET_CODE (SET_DEST (sets
[0].rtl
)) == STRICT_LOW_PART
))
4580 src_eqv
= copy_rtx (XEXP (tem
, 0));
4581 /* If DEST is of the form ZERO_EXTACT, as in:
4582 (set (zero_extract:SI (reg:SI 119)
4583 (const_int 16 [0x10])
4584 (const_int 16 [0x10]))
4585 (const_int 51154 [0xc7d2]))
4586 REG_EQUAL note will specify the value of register (reg:SI 119) at this
4587 point. Note that this is different from SRC_EQV. We can however
4588 calculate SRC_EQV with the position and width of ZERO_EXTRACT. */
4589 else if (GET_CODE (SET_DEST (sets
[0].rtl
)) == ZERO_EXTRACT
4590 && CONST_INT_P (XEXP (tem
, 0))
4591 && CONST_INT_P (XEXP (SET_DEST (sets
[0].rtl
), 1))
4592 && CONST_INT_P (XEXP (SET_DEST (sets
[0].rtl
), 2)))
4594 rtx dest_reg
= XEXP (SET_DEST (sets
[0].rtl
), 0);
4595 /* This is the mode of XEXP (tem, 0) as well. */
4596 scalar_int_mode dest_mode
4597 = as_a
<scalar_int_mode
> (GET_MODE (dest_reg
));
4598 rtx width
= XEXP (SET_DEST (sets
[0].rtl
), 1);
4599 rtx pos
= XEXP (SET_DEST (sets
[0].rtl
), 2);
4600 HOST_WIDE_INT val
= INTVAL (XEXP (tem
, 0));
4603 if (BITS_BIG_ENDIAN
)
4604 shift
= (GET_MODE_PRECISION (dest_mode
)
4605 - INTVAL (pos
) - INTVAL (width
));
4607 shift
= INTVAL (pos
);
4608 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
4609 mask
= HOST_WIDE_INT_M1
;
4611 mask
= (HOST_WIDE_INT_1
<< INTVAL (width
)) - 1;
4612 val
= (val
>> shift
) & mask
;
4613 src_eqv
= GEN_INT (val
);
4617 /* Set sets[i].src_elt to the class each source belongs to.
4618 Detect assignments from or to volatile things
4619 and set set[i] to zero so they will be ignored
4620 in the rest of this function.
4622 Nothing in this loop changes the hash table or the register chains. */
4624 for (i
= 0; i
< n_sets
; i
++)
4626 bool repeat
= false;
4627 bool noop_insn
= false;
4630 struct table_elt
*elt
= 0, *p
;
4634 rtx src_related
= 0;
4635 bool src_related_is_const_anchor
= false;
4636 struct table_elt
*src_const_elt
= 0;
4637 int src_cost
= MAX_COST
;
4638 int src_eqv_cost
= MAX_COST
;
4639 int src_folded_cost
= MAX_COST
;
4640 int src_related_cost
= MAX_COST
;
4641 int src_elt_cost
= MAX_COST
;
4642 int src_regcost
= MAX_COST
;
4643 int src_eqv_regcost
= MAX_COST
;
4644 int src_folded_regcost
= MAX_COST
;
4645 int src_related_regcost
= MAX_COST
;
4646 int src_elt_regcost
= MAX_COST
;
4647 /* Set nonzero if we need to call force_const_mem on with the
4648 contents of src_folded before using it. */
4649 int src_folded_force_flag
= 0;
4650 scalar_int_mode int_mode
;
4652 dest
= SET_DEST (sets
[i
].rtl
);
4653 src
= SET_SRC (sets
[i
].rtl
);
4655 /* If SRC is a constant that has no machine mode,
4656 hash it with the destination's machine mode.
4657 This way we can keep different modes separate. */
4659 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
4660 sets
[i
].mode
= mode
;
4664 machine_mode eqvmode
= mode
;
4665 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4666 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
4668 hash_arg_in_memory
= 0;
4669 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
4671 /* Find the equivalence class for the equivalent expression. */
4674 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, eqvmode
);
4676 src_eqv_volatile
= do_not_record
;
4677 src_eqv_in_memory
= hash_arg_in_memory
;
4680 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4681 value of the INNER register, not the destination. So it is not
4682 a valid substitution for the source. But save it for later. */
4683 if (GET_CODE (dest
) == STRICT_LOW_PART
)
4686 src_eqv_here
= src_eqv
;
4688 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4689 simplified result, which may not necessarily be valid. */
4690 src_folded
= fold_rtx (src
, NULL
);
4693 /* ??? This caused bad code to be generated for the m68k port with -O2.
4694 Suppose src is (CONST_INT -1), and that after truncation src_folded
4695 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4696 At the end we will add src and src_const to the same equivalence
4697 class. We now have 3 and -1 on the same equivalence class. This
4698 causes later instructions to be mis-optimized. */
4699 /* If storing a constant in a bitfield, pre-truncate the constant
4700 so we will be able to record it later. */
4701 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
4703 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
4705 if (CONST_INT_P (src
)
4706 && CONST_INT_P (width
)
4707 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
4708 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
4710 = GEN_INT (INTVAL (src
) & ((HOST_WIDE_INT_1
4711 << INTVAL (width
)) - 1));
4715 /* Compute SRC's hash code, and also notice if it
4716 should not be recorded at all. In that case,
4717 prevent any further processing of this assignment.
4719 We set DO_NOT_RECORD if the destination has a REG_UNUSED note.
4720 This avoids getting the source register into the tables, where it
4721 may be invalidated later (via REG_QTY), then trigger an ICE upon
4724 This is only a problem in multi-set insns. If it were a single
4725 set the dead copy would have been removed. If the RHS were anything
4726 but a simple REG, then we won't call insert_regs and thus there's
4727 no potential for triggering the ICE. */
4728 do_not_record
= (REG_P (dest
)
4730 && find_reg_note (insn
, REG_UNUSED
, dest
));
4731 hash_arg_in_memory
= 0;
4734 sets
[i
].src_hash
= HASH (src
, mode
);
4735 sets
[i
].src_volatile
= do_not_record
;
4736 sets
[i
].src_in_memory
= hash_arg_in_memory
;
4738 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4739 a pseudo, do not record SRC. Using SRC as a replacement for
4740 anything else will be incorrect in that situation. Note that
4741 this usually occurs only for stack slots, in which case all the
4742 RTL would be referring to SRC, so we don't lose any optimization
4743 opportunities by not having SRC in the hash table. */
4746 && find_reg_note (insn
, REG_EQUIV
, NULL_RTX
) != 0
4748 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
4749 sets
[i
].src_volatile
= 1;
4751 else if (GET_CODE (src
) == ASM_OPERANDS
4752 && GET_CODE (x
) == PARALLEL
)
4754 /* Do not record result of a non-volatile inline asm with
4755 more than one result. */
4757 sets
[i
].src_volatile
= 1;
4759 int j
, lim
= XVECLEN (x
, 0);
4760 for (j
= 0; j
< lim
; j
++)
4762 rtx y
= XVECEXP (x
, 0, j
);
4763 /* And do not record result of a non-volatile inline asm
4764 with "memory" clobber. */
4765 if (GET_CODE (y
) == CLOBBER
&& MEM_P (XEXP (y
, 0)))
4767 sets
[i
].src_volatile
= 1;
4774 /* It is no longer clear why we used to do this, but it doesn't
4775 appear to still be needed. So let's try without it since this
4776 code hurts cse'ing widened ops. */
4777 /* If source is a paradoxical subreg (such as QI treated as an SI),
4778 treat it as volatile. It may do the work of an SI in one context
4779 where the extra bits are not being used, but cannot replace an SI
4781 if (paradoxical_subreg_p (src
))
4782 sets
[i
].src_volatile
= 1;
4785 /* Locate all possible equivalent forms for SRC. Try to replace
4786 SRC in the insn with each cheaper equivalent.
4788 We have the following types of equivalents: SRC itself, a folded
4789 version, a value given in a REG_EQUAL note, or a value related
4792 Each of these equivalents may be part of an additional class
4793 of equivalents (if more than one is in the table, they must be in
4794 the same class; we check for this).
4796 If the source is volatile, we don't do any table lookups.
4798 We note any constant equivalent for possible later use in a
4801 if (!sets
[i
].src_volatile
)
4802 elt
= lookup (src
, sets
[i
].src_hash
, mode
);
4804 sets
[i
].src_elt
= elt
;
4806 if (elt
&& src_eqv_here
&& src_eqv_elt
)
4808 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
4810 /* The REG_EQUAL is indicating that two formerly distinct
4811 classes are now equivalent. So merge them. */
4812 merge_equiv_classes (elt
, src_eqv_elt
);
4813 src_eqv_hash
= HASH (src_eqv
, elt
->mode
);
4814 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash
, elt
->mode
);
4820 else if (src_eqv_elt
)
4823 /* Try to find a constant somewhere and record it in `src_const'.
4824 Record its table element, if any, in `src_const_elt'. Look in
4825 any known equivalences first. (If the constant is not in the
4826 table, also set `sets[i].src_const_hash'). */
4828 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
4832 src_const_elt
= elt
;
4837 && (CONSTANT_P (src_folded
)
4838 /* Consider (minus (label_ref L1) (label_ref L2)) as
4839 "constant" here so we will record it. This allows us
4840 to fold switch statements when an ADDR_DIFF_VEC is used. */
4841 || (GET_CODE (src_folded
) == MINUS
4842 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
4843 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
4844 src_const
= src_folded
, src_const_elt
= elt
;
4845 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
4846 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
4848 /* If we don't know if the constant is in the table, get its
4849 hash code and look it up. */
4850 if (src_const
&& src_const_elt
== 0)
4852 sets
[i
].src_const_hash
= HASH (src_const
, mode
);
4853 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash
, mode
);
4856 sets
[i
].src_const
= src_const
;
4857 sets
[i
].src_const_elt
= src_const_elt
;
4859 /* If the constant and our source are both in the table, mark them as
4860 equivalent. Otherwise, if a constant is in the table but the source
4861 isn't, set ELT to it. */
4862 if (src_const_elt
&& elt
4863 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
4864 merge_equiv_classes (elt
, src_const_elt
);
4865 else if (src_const_elt
&& elt
== 0)
4866 elt
= src_const_elt
;
4868 /* See if there is a register linearly related to a constant
4869 equivalent of SRC. */
4871 && (GET_CODE (src_const
) == CONST
4872 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
4874 src_related
= use_related_value (src_const
, src_const_elt
);
4877 struct table_elt
*src_related_elt
4878 = lookup (src_related
, HASH (src_related
, mode
), mode
);
4879 if (src_related_elt
&& elt
)
4881 if (elt
->first_same_value
4882 != src_related_elt
->first_same_value
)
4883 /* This can occur when we previously saw a CONST
4884 involving a SYMBOL_REF and then see the SYMBOL_REF
4885 twice. Merge the involved classes. */
4886 merge_equiv_classes (elt
, src_related_elt
);
4889 src_related_elt
= 0;
4891 else if (src_related_elt
&& elt
== 0)
4892 elt
= src_related_elt
;
4896 /* See if we have a CONST_INT that is already in a register in a
4899 if (src_const
&& src_related
== 0 && CONST_INT_P (src_const
)
4900 && is_int_mode (mode
, &int_mode
)
4901 && GET_MODE_PRECISION (int_mode
) < BITS_PER_WORD
)
4903 opt_scalar_int_mode wider_mode_iter
;
4904 FOR_EACH_WIDER_MODE (wider_mode_iter
, int_mode
)
4906 scalar_int_mode wider_mode
= wider_mode_iter
.require ();
4907 if (GET_MODE_PRECISION (wider_mode
) > BITS_PER_WORD
)
4910 struct table_elt
*const_elt
4911 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
4916 for (const_elt
= const_elt
->first_same_value
;
4917 const_elt
; const_elt
= const_elt
->next_same_value
)
4918 if (REG_P (const_elt
->exp
))
4920 src_related
= gen_lowpart (int_mode
, const_elt
->exp
);
4924 if (src_related
!= 0)
4929 /* Another possibility is that we have an AND with a constant in
4930 a mode narrower than a word. If so, it might have been generated
4931 as part of an "if" which would narrow the AND. If we already
4932 have done the AND in a wider mode, we can use a SUBREG of that
4935 if (flag_expensive_optimizations
&& ! src_related
4936 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
4937 && GET_CODE (src
) == AND
&& CONST_INT_P (XEXP (src
, 1))
4938 && GET_MODE_SIZE (int_mode
) < UNITS_PER_WORD
)
4940 opt_scalar_int_mode tmode_iter
;
4941 rtx new_and
= gen_rtx_AND (VOIDmode
, NULL_RTX
, XEXP (src
, 1));
4943 FOR_EACH_WIDER_MODE (tmode_iter
, int_mode
)
4945 scalar_int_mode tmode
= tmode_iter
.require ();
4946 if (GET_MODE_SIZE (tmode
) > UNITS_PER_WORD
)
4949 rtx inner
= gen_lowpart (tmode
, XEXP (src
, 0));
4950 struct table_elt
*larger_elt
;
4954 PUT_MODE (new_and
, tmode
);
4955 XEXP (new_and
, 0) = inner
;
4956 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
4957 if (larger_elt
== 0)
4960 for (larger_elt
= larger_elt
->first_same_value
;
4961 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
4962 if (REG_P (larger_elt
->exp
))
4965 = gen_lowpart (int_mode
, larger_elt
->exp
);
4975 /* See if a MEM has already been loaded with a widening operation;
4976 if it has, we can use a subreg of that. Many CISC machines
4977 also have such operations, but this is only likely to be
4978 beneficial on these machines. */
4981 if (flag_expensive_optimizations
&& src_related
== 0
4982 && MEM_P (src
) && ! do_not_record
4983 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
4984 && (extend_op
= load_extend_op (int_mode
)) != UNKNOWN
)
4986 struct rtx_def memory_extend_buf
;
4987 rtx memory_extend_rtx
= &memory_extend_buf
;
4989 /* Set what we are trying to extend and the operation it might
4990 have been extended with. */
4991 memset (memory_extend_rtx
, 0, sizeof (*memory_extend_rtx
));
4992 PUT_CODE (memory_extend_rtx
, extend_op
);
4993 XEXP (memory_extend_rtx
, 0) = src
;
4995 opt_scalar_int_mode tmode_iter
;
4996 FOR_EACH_WIDER_MODE (tmode_iter
, int_mode
)
4998 struct table_elt
*larger_elt
;
5000 scalar_int_mode tmode
= tmode_iter
.require ();
5001 if (GET_MODE_SIZE (tmode
) > UNITS_PER_WORD
)
5004 PUT_MODE (memory_extend_rtx
, tmode
);
5005 larger_elt
= lookup (memory_extend_rtx
,
5006 HASH (memory_extend_rtx
, tmode
), tmode
);
5007 if (larger_elt
== 0)
5010 for (larger_elt
= larger_elt
->first_same_value
;
5011 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
5012 if (REG_P (larger_elt
->exp
))
5014 src_related
= gen_lowpart (int_mode
, larger_elt
->exp
);
5023 /* Try to express the constant using a register+offset expression
5024 derived from a constant anchor. */
5026 if (targetm
.const_anchor
5029 && GET_CODE (src_const
) == CONST_INT
)
5031 src_related
= try_const_anchors (src_const
, mode
);
5032 src_related_is_const_anchor
= src_related
!= NULL_RTX
;
5036 if (src
== src_folded
)
5039 /* At this point, ELT, if nonzero, points to a class of expressions
5040 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5041 and SRC_RELATED, if nonzero, each contain additional equivalent
5042 expressions. Prune these latter expressions by deleting expressions
5043 already in the equivalence class.
5045 Check for an equivalent identical to the destination. If found,
5046 this is the preferred equivalent since it will likely lead to
5047 elimination of the insn. Indicate this by placing it in
5051 elt
= elt
->first_same_value
;
5052 for (p
= elt
; p
; p
= p
->next_same_value
)
5054 enum rtx_code code
= GET_CODE (p
->exp
);
5056 /* If the expression is not valid, ignore it. Then we do not
5057 have to check for validity below. In most cases, we can use
5058 `rtx_equal_p', since canonicalization has already been done. */
5059 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, false))
5062 /* Also skip paradoxical subregs, unless that's what we're
5064 if (paradoxical_subreg_p (p
->exp
)
5066 && GET_CODE (src
) == SUBREG
5067 && GET_MODE (src
) == GET_MODE (p
->exp
)
5068 && partial_subreg_p (GET_MODE (SUBREG_REG (src
)),
5069 GET_MODE (SUBREG_REG (p
->exp
)))))
5072 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
5074 else if (src_folded
&& GET_CODE (src_folded
) == code
5075 && rtx_equal_p (src_folded
, p
->exp
))
5077 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
5078 && rtx_equal_p (src_eqv_here
, p
->exp
))
5080 else if (src_related
&& GET_CODE (src_related
) == code
5081 && rtx_equal_p (src_related
, p
->exp
))
5084 /* This is the same as the destination of the insns, we want
5085 to prefer it. Copy it to src_related. The code below will
5086 then give it a negative cost. */
5087 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
5088 src_related
= p
->exp
;
5091 /* Find the cheapest valid equivalent, trying all the available
5092 possibilities. Prefer items not in the hash table to ones
5093 that are when they are equal cost. Note that we can never
5094 worsen an insn as the current contents will also succeed.
5095 If we find an equivalent identical to the destination, use it as best,
5096 since this insn will probably be eliminated in that case. */
5099 if (rtx_equal_p (src
, dest
))
5100 src_cost
= src_regcost
= -1;
5103 src_cost
= COST (src
, mode
);
5104 src_regcost
= approx_reg_cost (src
);
5110 if (rtx_equal_p (src_eqv_here
, dest
))
5111 src_eqv_cost
= src_eqv_regcost
= -1;
5114 src_eqv_cost
= COST (src_eqv_here
, mode
);
5115 src_eqv_regcost
= approx_reg_cost (src_eqv_here
);
5121 if (rtx_equal_p (src_folded
, dest
))
5122 src_folded_cost
= src_folded_regcost
= -1;
5125 src_folded_cost
= COST (src_folded
, mode
);
5126 src_folded_regcost
= approx_reg_cost (src_folded
);
5132 if (rtx_equal_p (src_related
, dest
))
5133 src_related_cost
= src_related_regcost
= -1;
5136 src_related_cost
= COST (src_related
, mode
);
5137 src_related_regcost
= approx_reg_cost (src_related
);
5139 /* If a const-anchor is used to synthesize a constant that
5140 normally requires multiple instructions then slightly prefer
5141 it over the original sequence. These instructions are likely
5142 to become redundant now. We can't compare against the cost
5143 of src_eqv_here because, on MIPS for example, multi-insn
5144 constants have zero cost; they are assumed to be hoisted from
5146 if (src_related_is_const_anchor
5147 && src_related_cost
== src_cost
5153 /* If this was an indirect jump insn, a known label will really be
5154 cheaper even though it looks more expensive. */
5155 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
5156 src_folded
= src_const
, src_folded_cost
= src_folded_regcost
= -1;
5158 /* Terminate loop when replacement made. This must terminate since
5159 the current contents will be tested and will always be valid. */
5164 /* Skip invalid entries. */
5165 while (elt
&& !REG_P (elt
->exp
)
5166 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
5167 elt
= elt
->next_same_value
;
5169 /* A paradoxical subreg would be bad here: it'll be the right
5170 size, but later may be adjusted so that the upper bits aren't
5171 what we want. So reject it. */
5173 && paradoxical_subreg_p (elt
->exp
)
5174 /* It is okay, though, if the rtx we're trying to match
5175 will ignore any of the bits we can't predict. */
5177 && GET_CODE (src
) == SUBREG
5178 && GET_MODE (src
) == GET_MODE (elt
->exp
)
5179 && partial_subreg_p (GET_MODE (SUBREG_REG (src
)),
5180 GET_MODE (SUBREG_REG (elt
->exp
)))))
5182 elt
= elt
->next_same_value
;
5188 src_elt_cost
= elt
->cost
;
5189 src_elt_regcost
= elt
->regcost
;
5192 /* Find cheapest and skip it for the next time. For items
5193 of equal cost, use this order:
5194 src_folded, src, src_eqv, src_related and hash table entry. */
5196 && preferable (src_folded_cost
, src_folded_regcost
,
5197 src_cost
, src_regcost
) <= 0
5198 && preferable (src_folded_cost
, src_folded_regcost
,
5199 src_eqv_cost
, src_eqv_regcost
) <= 0
5200 && preferable (src_folded_cost
, src_folded_regcost
,
5201 src_related_cost
, src_related_regcost
) <= 0
5202 && preferable (src_folded_cost
, src_folded_regcost
,
5203 src_elt_cost
, src_elt_regcost
) <= 0)
5205 trial
= src_folded
, src_folded_cost
= MAX_COST
;
5206 if (src_folded_force_flag
)
5208 rtx forced
= force_const_mem (mode
, trial
);
5214 && preferable (src_cost
, src_regcost
,
5215 src_eqv_cost
, src_eqv_regcost
) <= 0
5216 && preferable (src_cost
, src_regcost
,
5217 src_related_cost
, src_related_regcost
) <= 0
5218 && preferable (src_cost
, src_regcost
,
5219 src_elt_cost
, src_elt_regcost
) <= 0)
5220 trial
= src
, src_cost
= MAX_COST
;
5221 else if (src_eqv_here
5222 && preferable (src_eqv_cost
, src_eqv_regcost
,
5223 src_related_cost
, src_related_regcost
) <= 0
5224 && preferable (src_eqv_cost
, src_eqv_regcost
,
5225 src_elt_cost
, src_elt_regcost
) <= 0)
5226 trial
= src_eqv_here
, src_eqv_cost
= MAX_COST
;
5227 else if (src_related
5228 && preferable (src_related_cost
, src_related_regcost
,
5229 src_elt_cost
, src_elt_regcost
) <= 0)
5230 trial
= src_related
, src_related_cost
= MAX_COST
;
5234 elt
= elt
->next_same_value
;
5235 src_elt_cost
= MAX_COST
;
5239 (set (reg:M N) (const_int A))
5240 (set (reg:M2 O) (const_int B))
5241 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5243 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5244 && CONST_INT_P (trial
)
5245 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5246 && CONST_INT_P (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5247 && REG_P (XEXP (SET_DEST (sets
[i
].rtl
), 0))
5249 (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets
[i
].rtl
))),
5250 INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))))
5251 && ((unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 1))
5252 + (unsigned) INTVAL (XEXP (SET_DEST (sets
[i
].rtl
), 2))
5253 <= HOST_BITS_PER_WIDE_INT
))
5255 rtx dest_reg
= XEXP (SET_DEST (sets
[i
].rtl
), 0);
5256 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5257 rtx pos
= XEXP (SET_DEST (sets
[i
].rtl
), 2);
5258 unsigned int dest_hash
= HASH (dest_reg
, GET_MODE (dest_reg
));
5259 struct table_elt
*dest_elt
5260 = lookup (dest_reg
, dest_hash
, GET_MODE (dest_reg
));
5261 rtx dest_cst
= NULL
;
5264 for (p
= dest_elt
->first_same_value
; p
; p
= p
->next_same_value
)
5265 if (p
->is_const
&& CONST_INT_P (p
->exp
))
5272 HOST_WIDE_INT val
= INTVAL (dest_cst
);
5275 /* This is the mode of DEST_CST as well. */
5276 scalar_int_mode dest_mode
5277 = as_a
<scalar_int_mode
> (GET_MODE (dest_reg
));
5278 if (BITS_BIG_ENDIAN
)
5279 shift
= GET_MODE_PRECISION (dest_mode
)
5280 - INTVAL (pos
) - INTVAL (width
);
5282 shift
= INTVAL (pos
);
5283 if (INTVAL (width
) == HOST_BITS_PER_WIDE_INT
)
5284 mask
= HOST_WIDE_INT_M1
;
5286 mask
= (HOST_WIDE_INT_1
<< INTVAL (width
)) - 1;
5287 val
&= ~(mask
<< shift
);
5288 val
|= (INTVAL (trial
) & mask
) << shift
;
5289 val
= trunc_int_for_mode (val
, dest_mode
);
5290 validate_unshare_change (insn
, &SET_DEST (sets
[i
].rtl
),
5292 validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5294 if (apply_change_group ())
5296 rtx note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5299 remove_note (insn
, note
);
5300 df_notes_rescan (insn
);
5304 src_eqv_volatile
= 0;
5305 src_eqv_in_memory
= 0;
5313 /* We don't normally have an insn matching (set (pc) (pc)), so
5314 check for this separately here. We will delete such an
5317 For other cases such as a table jump or conditional jump
5318 where we know the ultimate target, go ahead and replace the
5319 operand. While that may not make a valid insn, we will
5320 reemit the jump below (and also insert any necessary
5322 if (n_sets
== 1 && dest
== pc_rtx
5324 || (GET_CODE (trial
) == LABEL_REF
5325 && ! condjump_p (insn
))))
5327 /* Don't substitute non-local labels, this confuses CFG. */
5328 if (GET_CODE (trial
) == LABEL_REF
5329 && LABEL_REF_NONLOCAL_P (trial
))
5332 SET_SRC (sets
[i
].rtl
) = trial
;
5333 cse_jumps_altered
= true;
5337 /* Similarly, lots of targets don't allow no-op
5338 (set (mem x) (mem x)) moves. Even (set (reg x) (reg x))
5339 might be impossible for certain registers (like CC registers). */
5340 else if (n_sets
== 1
5342 && (MEM_P (trial
) || REG_P (trial
))
5343 && rtx_equal_p (trial
, dest
)
5344 && !side_effects_p (dest
)
5345 && (cfun
->can_delete_dead_exceptions
5346 || insn_nothrow_p (insn
))
5347 /* We can only remove the later store if the earlier aliases
5348 at least all accesses the later one. */
5350 || ((MEM_ALIAS_SET (dest
) == MEM_ALIAS_SET (trial
)
5351 || alias_set_subset_of (MEM_ALIAS_SET (dest
),
5352 MEM_ALIAS_SET (trial
)))
5353 && (!MEM_EXPR (trial
)
5354 || refs_same_for_tbaa_p (MEM_EXPR (trial
),
5355 MEM_EXPR (dest
))))))
5357 SET_SRC (sets
[i
].rtl
) = trial
;
5362 /* Reject certain invalid forms of CONST that we create. */
5363 else if (CONSTANT_P (trial
)
5364 && GET_CODE (trial
) == CONST
5365 /* Reject cases that will cause decode_rtx_const to
5366 die. On the alpha when simplifying a switch, we
5367 get (const (truncate (minus (label_ref)
5369 && (GET_CODE (XEXP (trial
, 0)) == TRUNCATE
5370 /* Likewise on IA-64, except without the
5372 || (GET_CODE (XEXP (trial
, 0)) == MINUS
5373 && GET_CODE (XEXP (XEXP (trial
, 0), 0)) == LABEL_REF
5374 && GET_CODE (XEXP (XEXP (trial
, 0), 1)) == LABEL_REF
)))
5375 /* Do nothing for this case. */
5378 /* Do not replace anything with a MEM, except the replacement
5379 is a no-op. This allows this loop to terminate. */
5380 else if (MEM_P (trial
) && !rtx_equal_p (trial
, SET_SRC(sets
[i
].rtl
)))
5381 /* Do nothing for this case. */
5384 /* Look for a substitution that makes a valid insn. */
5385 else if (validate_unshare_change (insn
, &SET_SRC (sets
[i
].rtl
),
5388 rtx new_rtx
= canon_reg (SET_SRC (sets
[i
].rtl
), insn
);
5390 /* The result of apply_change_group can be ignored; see
5393 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_rtx
, 1);
5394 apply_change_group ();
5399 /* If we previously found constant pool entries for
5400 constants and this is a constant, try making a
5401 pool entry. Put it in src_folded unless we already have done
5402 this since that is where it likely came from. */
5404 else if (constant_pool_entries_cost
5405 && CONSTANT_P (trial
)
5407 || (!MEM_P (src_folded
)
5408 && ! src_folded_force_flag
))
5409 && GET_MODE_CLASS (mode
) != MODE_CC
5410 && mode
!= VOIDmode
)
5412 src_folded_force_flag
= 1;
5414 src_folded_cost
= constant_pool_entries_cost
;
5415 src_folded_regcost
= constant_pool_entries_regcost
;
5419 /* If we changed the insn too much, handle this set from scratch. */
5426 src
= SET_SRC (sets
[i
].rtl
);
5428 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5429 However, there is an important exception: If both are registers
5430 that are not the head of their equivalence class, replace SET_SRC
5431 with the head of the class. If we do not do this, we will have
5432 both registers live over a portion of the basic block. This way,
5433 their lifetimes will likely abut instead of overlapping. */
5435 && REGNO_QTY_VALID_P (REGNO (dest
)))
5437 int dest_q
= REG_QTY (REGNO (dest
));
5438 struct qty_table_elem
*dest_ent
= &qty_table
[dest_q
];
5440 if (dest_ent
->mode
== GET_MODE (dest
)
5441 && dest_ent
->first_reg
!= REGNO (dest
)
5442 && REG_P (src
) && REGNO (src
) == REGNO (dest
)
5443 /* Don't do this if the original insn had a hard reg as
5444 SET_SRC or SET_DEST. */
5445 && (!REG_P (sets
[i
].src
)
5446 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
)
5447 && (!REG_P (dest
) || REGNO (dest
) >= FIRST_PSEUDO_REGISTER
))
5448 /* We can't call canon_reg here because it won't do anything if
5449 SRC is a hard register. */
5451 int src_q
= REG_QTY (REGNO (src
));
5452 struct qty_table_elem
*src_ent
= &qty_table
[src_q
];
5453 int first
= src_ent
->first_reg
;
5455 = (first
>= FIRST_PSEUDO_REGISTER
5456 ? regno_reg_rtx
[first
] : gen_rtx_REG (GET_MODE (src
), first
));
5458 /* We must use validate-change even for this, because this
5459 might be a special no-op instruction, suitable only to
5461 if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new_src
, 0))
5464 /* If we had a constant that is cheaper than what we are now
5465 setting SRC to, use that constant. We ignored it when we
5466 thought we could make this into a no-op. */
5467 if (src_const
&& COST (src_const
, mode
) < COST (src
, mode
)
5468 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
5475 /* If we made a change, recompute SRC values. */
5476 if (src
!= sets
[i
].src
)
5479 hash_arg_in_memory
= 0;
5481 sets
[i
].src_hash
= HASH (src
, mode
);
5482 sets
[i
].src_volatile
= do_not_record
;
5483 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5484 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash
, mode
);
5487 /* If this is a single SET, we are setting a register, and we have an
5488 equivalent constant, we want to add a REG_EQUAL note if the constant
5489 is different from the source. We don't want to do it for a constant
5490 pseudo since verifying that this pseudo hasn't been eliminated is a
5491 pain; moreover such a note won't help anything.
5493 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5494 which can be created for a reference to a compile time computable
5495 entry in a jump table. */
5499 && !REG_P (src_const
)
5500 && !(GET_CODE (src_const
) == SUBREG
5501 && REG_P (SUBREG_REG (src_const
)))
5502 && !(GET_CODE (src_const
) == CONST
5503 && GET_CODE (XEXP (src_const
, 0)) == MINUS
5504 && GET_CODE (XEXP (XEXP (src_const
, 0), 0)) == LABEL_REF
5505 && GET_CODE (XEXP (XEXP (src_const
, 0), 1)) == LABEL_REF
)
5506 && !rtx_equal_p (src
, src_const
))
5508 /* Make sure that the rtx is not shared. */
5509 src_const
= copy_rtx (src_const
);
5511 /* Record the actual constant value in a REG_EQUAL note,
5512 making a new one if one does not already exist. */
5513 set_unique_reg_note (insn
, REG_EQUAL
, src_const
);
5514 df_notes_rescan (insn
);
5517 /* Now deal with the destination. */
5520 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5521 while (GET_CODE (dest
) == SUBREG
5522 || GET_CODE (dest
) == ZERO_EXTRACT
5523 || GET_CODE (dest
) == STRICT_LOW_PART
)
5524 dest
= XEXP (dest
, 0);
5526 sets
[i
].inner_dest
= dest
;
5530 #ifdef PUSH_ROUNDING
5531 /* Stack pushes invalidate the stack pointer. */
5532 rtx addr
= XEXP (dest
, 0);
5533 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
5534 && XEXP (addr
, 0) == stack_pointer_rtx
)
5535 invalidate (stack_pointer_rtx
, VOIDmode
);
5537 dest
= fold_rtx (dest
, insn
);
5540 /* Compute the hash code of the destination now,
5541 before the effects of this instruction are recorded,
5542 since the register values used in the address computation
5543 are those before this instruction. */
5544 sets
[i
].dest_hash
= HASH (dest
, mode
);
5546 /* Don't enter a bit-field in the hash table
5547 because the value in it after the store
5548 may not equal what was stored, due to truncation. */
5550 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
)
5552 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5554 if (src_const
!= 0 && CONST_INT_P (src_const
)
5555 && CONST_INT_P (width
)
5556 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
5557 && ! (INTVAL (src_const
)
5558 & (HOST_WIDE_INT_M1U
<< INTVAL (width
))))
5559 /* Exception: if the value is constant,
5560 and it won't be truncated, record it. */
5564 /* This is chosen so that the destination will be invalidated
5565 but no new value will be recorded.
5566 We must invalidate because sometimes constant
5567 values can be recorded for bitfields. */
5568 sets
[i
].src_elt
= 0;
5569 sets
[i
].src_volatile
= 1;
5575 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5577 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5579 /* One less use of the label this insn used to jump to. */
5580 cse_cfg_altered
|= delete_insn_and_edges (insn
);
5581 cse_jumps_altered
= true;
5582 /* No more processing for this set. */
5586 /* Similarly for no-op moves. */
5589 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
5590 cse_cfg_altered
= true;
5591 cse_cfg_altered
|= delete_insn_and_edges (insn
);
5592 /* No more processing for this set. */
5596 /* If this SET is now setting PC to a label, we know it used to
5597 be a conditional or computed branch. */
5598 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
5599 && !LABEL_REF_NONLOCAL_P (src
))
5601 /* We reemit the jump in as many cases as possible just in
5602 case the form of an unconditional jump is significantly
5603 different than a computed jump or conditional jump.
5605 If this insn has multiple sets, then reemitting the
5606 jump is nontrivial. So instead we just force rerecognition
5607 and hope for the best. */
5610 rtx_jump_insn
*new_rtx
;
5613 rtx_insn
*seq
= targetm
.gen_jump (XEXP (src
, 0));
5614 new_rtx
= emit_jump_insn_before (seq
, insn
);
5615 JUMP_LABEL (new_rtx
) = XEXP (src
, 0);
5616 LABEL_NUSES (XEXP (src
, 0))++;
5618 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5619 note
= find_reg_note (insn
, REG_NON_LOCAL_GOTO
, 0);
5622 XEXP (note
, 1) = NULL_RTX
;
5623 REG_NOTES (new_rtx
) = note
;
5626 cse_cfg_altered
|= delete_insn_and_edges (insn
);
5630 INSN_CODE (insn
) = -1;
5632 /* Do not bother deleting any unreachable code, let jump do it. */
5633 cse_jumps_altered
= true;
5637 /* If destination is volatile, invalidate it and then do no further
5638 processing for this assignment. */
5640 else if (do_not_record
)
5642 invalidate_dest (dest
);
5646 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5649 sets
[i
].dest_hash
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5652 invalidate_dest (SET_DEST (sets
[i
].rtl
));
5657 /* If setting CC0, record what it was set to, or a constant, if it
5658 is equivalent to a constant. If it is being set to a floating-point
5659 value, make a COMPARE with the appropriate constant of 0. If we
5660 don't do this, later code can interpret this as a test against
5661 const0_rtx, which can cause problems if we try to put it into an
5662 insn as a floating-point operand. */
5663 if (dest
== cc0_rtx
)
5665 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5666 this_insn_cc0_mode
= mode
;
5667 if (FLOAT_MODE_P (mode
))
5668 this_insn_cc0
= gen_rtx_COMPARE (VOIDmode
, this_insn_cc0
,
5673 /* Now enter all non-volatile source expressions in the hash table
5674 if they are not already present.
5675 Record their equivalence classes in src_elt.
5676 This way we can insert the corresponding destinations into
5677 the same classes even if the actual sources are no longer in them
5678 (having been invalidated). */
5680 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
5681 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
5683 struct table_elt
*elt
;
5684 struct table_elt
*classp
= sets
[0].src_elt
;
5685 rtx dest
= SET_DEST (sets
[0].rtl
);
5686 machine_mode eqvmode
= GET_MODE (dest
);
5688 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5690 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5693 if (insert_regs (src_eqv
, classp
, 0))
5695 rehash_using_reg (src_eqv
);
5696 src_eqv_hash
= HASH (src_eqv
, eqvmode
);
5698 elt
= insert (src_eqv
, classp
, src_eqv_hash
, eqvmode
);
5699 elt
->in_memory
= src_eqv_in_memory
;
5702 /* Check to see if src_eqv_elt is the same as a set source which
5703 does not yet have an elt, and if so set the elt of the set source
5705 for (i
= 0; i
< n_sets
; i
++)
5706 if (sets
[i
].rtl
&& sets
[i
].src_elt
== 0
5707 && rtx_equal_p (SET_SRC (sets
[i
].rtl
), src_eqv
))
5708 sets
[i
].src_elt
= src_eqv_elt
;
5711 for (i
= 0; i
< n_sets
; i
++)
5712 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
5713 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
5715 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
5717 /* REG_EQUAL in setting a STRICT_LOW_PART
5718 gives an equivalent for the entire destination register,
5719 not just for the subreg being stored in now.
5720 This is a more interesting equivalence, so we arrange later
5721 to treat the entire reg as the destination. */
5722 sets
[i
].src_elt
= src_eqv_elt
;
5723 sets
[i
].src_hash
= src_eqv_hash
;
5727 /* Insert source and constant equivalent into hash table, if not
5729 struct table_elt
*classp
= src_eqv_elt
;
5730 rtx src
= sets
[i
].src
;
5731 rtx dest
= SET_DEST (sets
[i
].rtl
);
5733 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5735 /* It's possible that we have a source value known to be
5736 constant but don't have a REG_EQUAL note on the insn.
5737 Lack of a note will mean src_eqv_elt will be NULL. This
5738 can happen where we've generated a SUBREG to access a
5739 CONST_INT that is already in a register in a wider mode.
5740 Ensure that the source expression is put in the proper
5743 classp
= sets
[i
].src_const_elt
;
5745 if (sets
[i
].src_elt
== 0)
5747 struct table_elt
*elt
;
5749 /* Note that these insert_regs calls cannot remove
5750 any of the src_elt's, because they would have failed to
5751 match if not still valid. */
5752 if (insert_regs (src
, classp
, 0))
5754 rehash_using_reg (src
);
5755 sets
[i
].src_hash
= HASH (src
, mode
);
5757 elt
= insert (src
, classp
, sets
[i
].src_hash
, mode
);
5758 elt
->in_memory
= sets
[i
].src_in_memory
;
5759 /* If inline asm has any clobbers, ensure we only reuse
5760 existing inline asms and never try to put the ASM_OPERANDS
5761 into an insn that isn't inline asm. */
5762 if (GET_CODE (src
) == ASM_OPERANDS
5763 && GET_CODE (x
) == PARALLEL
)
5764 elt
->cost
= MAX_COST
;
5765 sets
[i
].src_elt
= classp
= elt
;
5767 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
5768 && src
!= sets
[i
].src_const
5769 && ! rtx_equal_p (sets
[i
].src_const
, src
))
5770 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
5771 sets
[i
].src_const_hash
, mode
);
5774 else if (sets
[i
].src_elt
== 0)
5775 /* If we did not insert the source into the hash table (e.g., it was
5776 volatile), note the equivalence class for the REG_EQUAL value, if any,
5777 so that the destination goes into that class. */
5778 sets
[i
].src_elt
= src_eqv_elt
;
5780 /* Record destination addresses in the hash table. This allows us to
5781 check if they are invalidated by other sets. */
5782 for (i
= 0; i
< n_sets
; i
++)
5786 rtx x
= sets
[i
].inner_dest
;
5787 struct table_elt
*elt
;
5794 mode
= GET_MODE (x
);
5795 hash
= HASH (x
, mode
);
5796 elt
= lookup (x
, hash
, mode
);
5799 if (insert_regs (x
, NULL
, 0))
5801 rtx dest
= SET_DEST (sets
[i
].rtl
);
5803 rehash_using_reg (x
);
5804 hash
= HASH (x
, mode
);
5805 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5807 elt
= insert (x
, NULL
, hash
, mode
);
5810 sets
[i
].dest_addr_elt
= elt
;
5813 sets
[i
].dest_addr_elt
= NULL
;
5817 invalidate_from_clobbers (insn
);
5819 /* Some registers are invalidated by subroutine calls. Memory is
5820 invalidated by non-constant calls. */
5824 if (!(RTL_CONST_OR_PURE_CALL_P (insn
)))
5825 invalidate_memory ();
5827 /* For const/pure calls, invalidate any argument slots, because
5828 those are owned by the callee. */
5829 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
5830 if (GET_CODE (XEXP (tem
, 0)) == USE
5831 && MEM_P (XEXP (XEXP (tem
, 0), 0)))
5832 invalidate (XEXP (XEXP (tem
, 0), 0), VOIDmode
);
5833 invalidate_for_call (insn
);
5836 /* Now invalidate everything set by this instruction.
5837 If a SUBREG or other funny destination is being set,
5838 sets[i].rtl is still nonzero, so here we invalidate the reg
5839 a part of which is being set. */
5841 for (i
= 0; i
< n_sets
; i
++)
5844 /* We can't use the inner dest, because the mode associated with
5845 a ZERO_EXTRACT is significant. */
5846 rtx dest
= SET_DEST (sets
[i
].rtl
);
5848 /* Needed for registers to remove the register from its
5849 previous quantity's chain.
5850 Needed for memory if this is a nonvarying address, unless
5851 we have just done an invalidate_memory that covers even those. */
5852 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5853 invalidate (dest
, VOIDmode
);
5854 else if (MEM_P (dest
))
5855 invalidate (dest
, VOIDmode
);
5856 else if (GET_CODE (dest
) == STRICT_LOW_PART
5857 || GET_CODE (dest
) == ZERO_EXTRACT
)
5858 invalidate (XEXP (dest
, 0), GET_MODE (dest
));
5861 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5862 the regs restored by the longjmp come from a later time
5864 if (CALL_P (insn
) && find_reg_note (insn
, REG_SETJMP
, NULL
))
5866 flush_hash_table ();
5870 /* Make sure registers mentioned in destinations
5871 are safe for use in an expression to be inserted.
5872 This removes from the hash table
5873 any invalid entry that refers to one of these registers.
5875 We don't care about the return value from mention_regs because
5876 we are going to hash the SET_DEST values unconditionally. */
5878 for (i
= 0; i
< n_sets
; i
++)
5882 rtx x
= SET_DEST (sets
[i
].rtl
);
5888 /* We used to rely on all references to a register becoming
5889 inaccessible when a register changes to a new quantity,
5890 since that changes the hash code. However, that is not
5891 safe, since after HASH_SIZE new quantities we get a
5892 hash 'collision' of a register with its own invalid
5893 entries. And since SUBREGs have been changed not to
5894 change their hash code with the hash code of the register,
5895 it wouldn't work any longer at all. So we have to check
5896 for any invalid references lying around now.
5897 This code is similar to the REG case in mention_regs,
5898 but it knows that reg_tick has been incremented, and
5899 it leaves reg_in_table as -1 . */
5900 unsigned int regno
= REGNO (x
);
5901 unsigned int endregno
= END_REGNO (x
);
5904 for (i
= regno
; i
< endregno
; i
++)
5906 if (REG_IN_TABLE (i
) >= 0)
5908 remove_invalid_refs (i
);
5909 REG_IN_TABLE (i
) = -1;
5916 /* We may have just removed some of the src_elt's from the hash table.
5917 So replace each one with the current head of the same class.
5918 Also check if destination addresses have been removed. */
5920 for (i
= 0; i
< n_sets
; i
++)
5923 if (sets
[i
].dest_addr_elt
5924 && sets
[i
].dest_addr_elt
->first_same_value
== 0)
5926 /* The elt was removed, which means this destination is not
5927 valid after this instruction. */
5928 sets
[i
].rtl
= NULL_RTX
;
5930 else if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
5931 /* If elt was removed, find current head of same class,
5932 or 0 if nothing remains of that class. */
5934 struct table_elt
*elt
= sets
[i
].src_elt
;
5936 while (elt
&& elt
->prev_same_value
)
5937 elt
= elt
->prev_same_value
;
5939 while (elt
&& elt
->first_same_value
== 0)
5940 elt
= elt
->next_same_value
;
5941 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
5945 /* Now insert the destinations into their equivalence classes. */
5947 for (i
= 0; i
< n_sets
; i
++)
5950 rtx dest
= SET_DEST (sets
[i
].rtl
);
5951 struct table_elt
*elt
;
5953 /* Don't record value if we are not supposed to risk allocating
5954 floating-point values in registers that might be wider than
5956 if ((flag_float_store
5958 && FLOAT_MODE_P (GET_MODE (dest
)))
5959 /* Don't record BLKmode values, because we don't know the
5960 size of it, and can't be sure that other BLKmode values
5961 have the same or smaller size. */
5962 || GET_MODE (dest
) == BLKmode
5963 /* If we didn't put a REG_EQUAL value or a source into the hash
5964 table, there is no point is recording DEST. */
5965 || sets
[i
].src_elt
== 0)
5968 /* STRICT_LOW_PART isn't part of the value BEING set,
5969 and neither is the SUBREG inside it.
5970 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5971 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5972 dest
= SUBREG_REG (XEXP (dest
, 0));
5974 if (REG_P (dest
) || GET_CODE (dest
) == SUBREG
)
5975 /* Registers must also be inserted into chains for quantities. */
5976 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
5978 /* If `insert_regs' changes something, the hash code must be
5980 rehash_using_reg (dest
);
5981 sets
[i
].dest_hash
= HASH (dest
, GET_MODE (dest
));
5984 /* If DEST is a paradoxical SUBREG, don't record DEST since the bits
5985 outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined. */
5986 if (paradoxical_subreg_p (dest
))
5989 elt
= insert (dest
, sets
[i
].src_elt
,
5990 sets
[i
].dest_hash
, GET_MODE (dest
));
5992 /* If this is a constant, insert the constant anchors with the
5993 equivalent register-offset expressions using register DEST. */
5994 if (targetm
.const_anchor
5996 && SCALAR_INT_MODE_P (GET_MODE (dest
))
5997 && GET_CODE (sets
[i
].src_elt
->exp
) == CONST_INT
)
5998 insert_const_anchors (dest
, sets
[i
].src_elt
->exp
, GET_MODE (dest
));
6000 elt
->in_memory
= (MEM_P (sets
[i
].inner_dest
)
6001 && !MEM_READONLY_P (sets
[i
].inner_dest
));
6003 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6004 narrower than M2, and both M1 and M2 are the same number of words,
6005 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6006 make that equivalence as well.
6008 However, BAR may have equivalences for which gen_lowpart
6009 will produce a simpler value than gen_lowpart applied to
6010 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6011 BAR's equivalences. If we don't get a simplified form, make
6012 the SUBREG. It will not be used in an equivalence, but will
6013 cause two similar assignments to be detected.
6015 Note the loop below will find SUBREG_REG (DEST) since we have
6016 already entered SRC and DEST of the SET in the table. */
6018 if (GET_CODE (dest
) == SUBREG
6019 && (known_equal_after_align_down
6020 (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) - 1,
6021 GET_MODE_SIZE (GET_MODE (dest
)) - 1,
6023 && !partial_subreg_p (dest
)
6024 && sets
[i
].src_elt
!= 0)
6026 machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
6027 struct table_elt
*elt
, *classp
= 0;
6029 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
6030 elt
= elt
->next_same_value
)
6034 struct table_elt
*src_elt
;
6036 /* Ignore invalid entries. */
6037 if (!REG_P (elt
->exp
)
6038 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, false))
6041 /* We may have already been playing subreg games. If the
6042 mode is already correct for the destination, use it. */
6043 if (GET_MODE (elt
->exp
) == new_mode
)
6048 = subreg_lowpart_offset (new_mode
, GET_MODE (dest
));
6049 new_src
= simplify_gen_subreg (new_mode
, elt
->exp
,
6050 GET_MODE (dest
), byte
);
6053 /* The call to simplify_gen_subreg fails if the value
6054 is VOIDmode, yet we can't do any simplification, e.g.
6055 for EXPR_LISTs denoting function call results.
6056 It is invalid to construct a SUBREG with a VOIDmode
6057 SUBREG_REG, hence a zero new_src means we can't do
6058 this substitution. */
6062 src_hash
= HASH (new_src
, new_mode
);
6063 src_elt
= lookup (new_src
, src_hash
, new_mode
);
6065 /* Put the new source in the hash table is if isn't
6069 if (insert_regs (new_src
, classp
, 0))
6071 rehash_using_reg (new_src
);
6072 src_hash
= HASH (new_src
, new_mode
);
6074 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
6075 src_elt
->in_memory
= elt
->in_memory
;
6076 if (GET_CODE (new_src
) == ASM_OPERANDS
6077 && elt
->cost
== MAX_COST
)
6078 src_elt
->cost
= MAX_COST
;
6080 else if (classp
&& classp
!= src_elt
->first_same_value
)
6081 /* Show that two things that we've seen before are
6082 actually the same. */
6083 merge_equiv_classes (src_elt
, classp
);
6085 classp
= src_elt
->first_same_value
;
6086 /* Ignore invalid entries. */
6088 && !REG_P (classp
->exp
)
6089 && ! exp_equiv_p (classp
->exp
, classp
->exp
, 1, false))
6090 classp
= classp
->next_same_value
;
6095 /* Special handling for (set REG0 REG1) where REG0 is the
6096 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6097 be used in the sequel, so (if easily done) change this insn to
6098 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6099 that computed their value. Then REG1 will become a dead store
6100 and won't cloud the situation for later optimizations.
6102 Do not make this change if REG1 is a hard register, because it will
6103 then be used in the sequel and we may be changing a two-operand insn
6104 into a three-operand insn.
6106 Also do not do this if we are operating on a copy of INSN. */
6108 if (n_sets
== 1 && sets
[0].rtl
)
6109 try_back_substitute_reg (sets
[0].rtl
, insn
);
6114 /* Remove from the hash table all expressions that reference memory. */
6117 invalidate_memory (void)
6120 struct table_elt
*p
, *next
;
6122 for (i
= 0; i
< HASH_SIZE
; i
++)
6123 for (p
= table
[i
]; p
; p
= next
)
6125 next
= p
->next_same_hash
;
6127 remove_from_table (p
, i
);
6131 /* Perform invalidation on the basis of everything about INSN,
6132 except for invalidating the actual places that are SET in it.
6133 This includes the places CLOBBERed, and anything that might
6134 alias with something that is SET or CLOBBERed. */
6137 invalidate_from_clobbers (rtx_insn
*insn
)
6139 rtx x
= PATTERN (insn
);
6141 if (GET_CODE (x
) == CLOBBER
)
6143 rtx ref
= XEXP (x
, 0);
6146 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6148 invalidate (ref
, VOIDmode
);
6149 else if (GET_CODE (ref
) == STRICT_LOW_PART
6150 || GET_CODE (ref
) == ZERO_EXTRACT
)
6151 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6154 else if (GET_CODE (x
) == PARALLEL
)
6157 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6159 rtx y
= XVECEXP (x
, 0, i
);
6160 if (GET_CODE (y
) == CLOBBER
)
6162 rtx ref
= XEXP (y
, 0);
6163 if (REG_P (ref
) || GET_CODE (ref
) == SUBREG
6165 invalidate (ref
, VOIDmode
);
6166 else if (GET_CODE (ref
) == STRICT_LOW_PART
6167 || GET_CODE (ref
) == ZERO_EXTRACT
)
6168 invalidate (XEXP (ref
, 0), GET_MODE (ref
));
6174 /* Perform invalidation on the basis of everything about INSN.
6175 This includes the places CLOBBERed, and anything that might
6176 alias with something that is SET or CLOBBERed. */
6179 invalidate_from_sets_and_clobbers (rtx_insn
*insn
)
6182 rtx x
= PATTERN (insn
);
6186 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
6188 rtx temx
= XEXP (tem
, 0);
6189 if (GET_CODE (temx
) == CLOBBER
)
6190 invalidate (SET_DEST (temx
), VOIDmode
);
6194 /* Ensure we invalidate the destination register of a CALL insn.
6195 This is necessary for machines where this register is a fixed_reg,
6196 because no other code would invalidate it. */
6197 if (GET_CODE (x
) == SET
&& GET_CODE (SET_SRC (x
)) == CALL
)
6198 invalidate (SET_DEST (x
), VOIDmode
);
6200 else if (GET_CODE (x
) == PARALLEL
)
6204 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6206 rtx y
= XVECEXP (x
, 0, i
);
6207 if (GET_CODE (y
) == CLOBBER
)
6209 rtx clobbered
= XEXP (y
, 0);
6211 if (REG_P (clobbered
)
6212 || GET_CODE (clobbered
) == SUBREG
)
6213 invalidate (clobbered
, VOIDmode
);
6214 else if (GET_CODE (clobbered
) == STRICT_LOW_PART
6215 || GET_CODE (clobbered
) == ZERO_EXTRACT
)
6216 invalidate (XEXP (clobbered
, 0), GET_MODE (clobbered
));
6218 else if (GET_CODE (y
) == SET
&& GET_CODE (SET_SRC (y
)) == CALL
)
6219 invalidate (SET_DEST (y
), VOIDmode
);
6224 static rtx
cse_process_note (rtx
);
6226 /* A simplify_replace_fn_rtx callback for cse_process_note. Process X,
6227 part of the REG_NOTES of an insn. Replace any registers with either
6228 an equivalent constant or the canonical form of the register.
6229 Only replace addresses if the containing MEM remains valid.
6231 Return the replacement for X, or null if it should be simplified
6235 cse_process_note_1 (rtx x
, const_rtx
, void *)
6239 validate_change (x
, &XEXP (x
, 0), cse_process_note (XEXP (x
, 0)), false);
6245 int i
= REG_QTY (REGNO (x
));
6247 /* Return a constant or a constant register. */
6248 if (REGNO_QTY_VALID_P (REGNO (x
)))
6250 struct qty_table_elem
*ent
= &qty_table
[i
];
6252 if (ent
->const_rtx
!= NULL_RTX
6253 && (CONSTANT_P (ent
->const_rtx
)
6254 || REG_P (ent
->const_rtx
)))
6256 rtx new_rtx
= gen_lowpart (GET_MODE (x
), ent
->const_rtx
);
6258 return copy_rtx (new_rtx
);
6262 /* Otherwise, canonicalize this register. */
6263 return canon_reg (x
, NULL
);
6269 /* Process X, part of the REG_NOTES of an insn. Replace any registers in it
6270 with either an equivalent constant or the canonical form of the register.
6271 Only replace addresses if the containing MEM remains valid. */
6274 cse_process_note (rtx x
)
6276 return simplify_replace_fn_rtx (x
, NULL_RTX
, cse_process_note_1
, NULL
);
6280 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6282 DATA is a pointer to a struct cse_basic_block_data, that is used to
6284 It is filled with a queue of basic blocks, starting with FIRST_BB
6285 and following a trace through the CFG.
6287 If all paths starting at FIRST_BB have been followed, or no new path
6288 starting at FIRST_BB can be constructed, this function returns FALSE.
6289 Otherwise, DATA->path is filled and the function returns TRUE indicating
6290 that a path to follow was found.
6292 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6293 block in the path will be FIRST_BB. */
6296 cse_find_path (basic_block first_bb
, struct cse_basic_block_data
*data
,
6303 bitmap_set_bit (cse_visited_basic_blocks
, first_bb
->index
);
6305 /* See if there is a previous path. */
6306 path_size
= data
->path_size
;
6308 /* There is a previous path. Make sure it started with FIRST_BB. */
6310 gcc_assert (data
->path
[0].bb
== first_bb
);
6312 /* There was only one basic block in the last path. Clear the path and
6313 return, so that paths starting at another basic block can be tried. */
6320 /* If the path was empty from the beginning, construct a new path. */
6322 data
->path
[path_size
++].bb
= first_bb
;
6325 /* Otherwise, path_size must be equal to or greater than 2, because
6326 a previous path exists that is at least two basic blocks long.
6328 Update the previous branch path, if any. If the last branch was
6329 previously along the branch edge, take the fallthrough edge now. */
6330 while (path_size
>= 2)
6332 basic_block last_bb_in_path
, previous_bb_in_path
;
6336 last_bb_in_path
= data
->path
[path_size
].bb
;
6337 previous_bb_in_path
= data
->path
[path_size
- 1].bb
;
6339 /* If we previously followed a path along the branch edge, try
6340 the fallthru edge now. */
6341 if (EDGE_COUNT (previous_bb_in_path
->succs
) == 2
6342 && any_condjump_p (BB_END (previous_bb_in_path
))
6343 && (e
= find_edge (previous_bb_in_path
, last_bb_in_path
))
6344 && e
== BRANCH_EDGE (previous_bb_in_path
))
6346 bb
= FALLTHRU_EDGE (previous_bb_in_path
)->dest
;
6347 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6348 && single_pred_p (bb
)
6349 /* We used to assert here that we would only see blocks
6350 that we have not visited yet. But we may end up
6351 visiting basic blocks twice if the CFG has changed
6352 in this run of cse_main, because when the CFG changes
6353 the topological sort of the CFG also changes. A basic
6354 blocks that previously had more than two predecessors
6355 may now have a single predecessor, and become part of
6356 a path that starts at another basic block.
6358 We still want to visit each basic block only once, so
6359 halt the path here if we have already visited BB. */
6360 && !bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
))
6362 bitmap_set_bit (cse_visited_basic_blocks
, bb
->index
);
6363 data
->path
[path_size
++].bb
= bb
;
6368 data
->path
[path_size
].bb
= NULL
;
6371 /* If only one block remains in the path, bail. */
6379 /* Extend the path if possible. */
6382 bb
= data
->path
[path_size
- 1].bb
;
6383 while (bb
&& path_size
< param_max_cse_path_length
)
6385 if (single_succ_p (bb
))
6386 e
= single_succ_edge (bb
);
6387 else if (EDGE_COUNT (bb
->succs
) == 2
6388 && any_condjump_p (BB_END (bb
)))
6390 /* First try to follow the branch. If that doesn't lead
6391 to a useful path, follow the fallthru edge. */
6392 e
= BRANCH_EDGE (bb
);
6393 if (!single_pred_p (e
->dest
))
6394 e
= FALLTHRU_EDGE (bb
);
6400 && !((e
->flags
& EDGE_ABNORMAL_CALL
) && cfun
->has_nonlocal_label
)
6401 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
6402 && single_pred_p (e
->dest
)
6403 /* Avoid visiting basic blocks twice. The large comment
6404 above explains why this can happen. */
6405 && !bitmap_bit_p (cse_visited_basic_blocks
, e
->dest
->index
))
6407 basic_block bb2
= e
->dest
;
6408 bitmap_set_bit (cse_visited_basic_blocks
, bb2
->index
);
6409 data
->path
[path_size
++].bb
= bb2
;
6418 data
->path_size
= path_size
;
6419 return path_size
!= 0;
6422 /* Dump the path in DATA to file F. NSETS is the number of sets
6426 cse_dump_path (struct cse_basic_block_data
*data
, int nsets
, FILE *f
)
6430 fprintf (f
, ";; Following path with %d sets: ", nsets
);
6431 for (path_entry
= 0; path_entry
< data
->path_size
; path_entry
++)
6432 fprintf (f
, "%d ", (data
->path
[path_entry
].bb
)->index
);
6438 /* Return true if BB has exception handling successor edges. */
6441 have_eh_succ_edges (basic_block bb
)
6446 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6447 if (e
->flags
& EDGE_EH
)
6454 /* Scan to the end of the path described by DATA. Return an estimate of
6455 the total number of SETs of all insns in the path. */
6458 cse_prescan_path (struct cse_basic_block_data
*data
)
6461 int path_size
= data
->path_size
;
6464 /* Scan to end of each basic block in the path. */
6465 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6470 bb
= data
->path
[path_entry
].bb
;
6472 FOR_BB_INSNS (bb
, insn
)
6477 /* A PARALLEL can have lots of SETs in it,
6478 especially if it is really an ASM_OPERANDS. */
6479 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6480 nsets
+= XVECLEN (PATTERN (insn
), 0);
6486 data
->nsets
= nsets
;
6489 /* Return true if the pattern of INSN uses a LABEL_REF for which
6490 there isn't a REG_LABEL_OPERAND note. */
6493 check_for_label_ref (rtx_insn
*insn
)
6495 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6496 note for it, we must rerun jump since it needs to place the note. If
6497 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6498 don't do this since no REG_LABEL_OPERAND will be added. */
6499 subrtx_iterator::array_type array
;
6500 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
6502 const_rtx x
= *iter
;
6503 if (GET_CODE (x
) == LABEL_REF
6504 && !LABEL_REF_NONLOCAL_P (x
)
6506 || !label_is_jump_target_p (label_ref_label (x
), insn
))
6507 && LABEL_P (label_ref_label (x
))
6508 && INSN_UID (label_ref_label (x
)) != 0
6509 && !find_reg_note (insn
, REG_LABEL_OPERAND
, label_ref_label (x
)))
6515 /* Process a single extended basic block described by EBB_DATA. */
6518 cse_extended_basic_block (struct cse_basic_block_data
*ebb_data
)
6520 int path_size
= ebb_data
->path_size
;
6524 /* Allocate the space needed by qty_table. */
6525 qty_table
= XNEWVEC (struct qty_table_elem
, max_qty
);
6528 cse_ebb_live_in
= df_get_live_in (ebb_data
->path
[0].bb
);
6529 cse_ebb_live_out
= df_get_live_out (ebb_data
->path
[path_size
- 1].bb
);
6530 for (path_entry
= 0; path_entry
< path_size
; path_entry
++)
6535 bb
= ebb_data
->path
[path_entry
].bb
;
6537 /* Invalidate recorded information for eh regs if there is an EH
6538 edge pointing to that bb. */
6539 if (bb_has_eh_pred (bb
))
6543 FOR_EACH_ARTIFICIAL_DEF (def
, bb
->index
)
6544 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
6545 invalidate (DF_REF_REG (def
), GET_MODE (DF_REF_REG (def
)));
6548 optimize_this_for_speed_p
= optimize_bb_for_speed_p (bb
);
6549 FOR_BB_INSNS (bb
, insn
)
6551 /* If we have processed 1,000 insns, flush the hash table to
6552 avoid extreme quadratic behavior. We must not include NOTEs
6553 in the count since there may be more of them when generating
6554 debugging information. If we clear the table at different
6555 times, code generated with -g -O might be different than code
6556 generated with -O but not -g.
6558 FIXME: This is a real kludge and needs to be done some other
6560 if (NONDEBUG_INSN_P (insn
)
6561 && num_insns
++ > param_max_cse_insns
)
6563 flush_hash_table ();
6569 /* Process notes first so we have all notes in canonical forms
6570 when looking for duplicate operations. */
6571 bool changed
= false;
6572 for (rtx note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
6573 if (REG_NOTE_KIND (note
) == REG_EQUAL
)
6575 rtx newval
= cse_process_note (XEXP (note
, 0));
6576 if (newval
!= XEXP (note
, 0))
6578 XEXP (note
, 0) = newval
;
6583 df_notes_rescan (insn
);
6587 /* If we haven't already found an insn where we added a LABEL_REF,
6589 if (INSN_P (insn
) && !recorded_label_ref
6590 && check_for_label_ref (insn
))
6591 recorded_label_ref
= true;
6593 if (HAVE_cc0
&& NONDEBUG_INSN_P (insn
))
6595 /* If the previous insn sets CC0 and this insn no
6596 longer references CC0, delete the previous insn.
6597 Here we use fact that nothing expects CC0 to be
6598 valid over an insn, which is true until the final
6600 rtx_insn
*prev_insn
;
6603 prev_insn
= prev_nonnote_nondebug_insn (insn
);
6604 if (prev_insn
&& NONJUMP_INSN_P (prev_insn
)
6605 && (tem
= single_set (prev_insn
)) != NULL_RTX
6606 && SET_DEST (tem
) == cc0_rtx
6607 && ! reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
6608 delete_insn (prev_insn
);
6610 /* If this insn is not the last insn in the basic
6611 block, it will be PREV_INSN(insn) in the next
6612 iteration. If we recorded any CC0-related
6613 information for this insn, remember it. */
6614 if (insn
!= BB_END (bb
))
6616 prev_insn_cc0
= this_insn_cc0
;
6617 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6623 /* With non-call exceptions, we are not always able to update
6624 the CFG properly inside cse_insn. So clean up possibly
6625 redundant EH edges here. */
6626 if (cfun
->can_throw_non_call_exceptions
&& have_eh_succ_edges (bb
))
6627 cse_cfg_altered
|= purge_dead_edges (bb
);
6629 /* If we changed a conditional jump, we may have terminated
6630 the path we are following. Check that by verifying that
6631 the edge we would take still exists. If the edge does
6632 not exist anymore, purge the remainder of the path.
6633 Note that this will cause us to return to the caller. */
6634 if (path_entry
< path_size
- 1)
6636 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6637 if (!find_edge (bb
, next_bb
))
6643 /* If we truncate the path, we must also reset the
6644 visited bit on the remaining blocks in the path,
6645 or we will never visit them at all. */
6646 bitmap_clear_bit (cse_visited_basic_blocks
,
6647 ebb_data
->path
[path_size
].bb
->index
);
6648 ebb_data
->path
[path_size
].bb
= NULL
;
6650 while (path_size
- 1 != path_entry
);
6651 ebb_data
->path_size
= path_size
;
6655 /* If this is a conditional jump insn, record any known
6656 equivalences due to the condition being tested. */
6658 if (path_entry
< path_size
- 1
6659 && EDGE_COUNT (bb
->succs
) == 2
6661 && single_set (insn
)
6662 && any_condjump_p (insn
))
6664 basic_block next_bb
= ebb_data
->path
[path_entry
+ 1].bb
;
6665 bool taken
= (next_bb
== BRANCH_EDGE (bb
)->dest
);
6666 record_jump_equiv (insn
, taken
);
6669 /* Clear the CC0-tracking related insns, they can't provide
6670 useful information across basic block boundaries. */
6674 gcc_assert (next_qty
<= max_qty
);
6680 /* Perform cse on the instructions of a function.
6681 F is the first instruction.
6682 NREGS is one plus the highest pseudo-reg number used in the instruction.
6684 Return 2 if jump optimizations should be redone due to simplifications
6685 in conditional jump instructions.
6686 Return 1 if the CFG should be cleaned up because it has been modified.
6687 Return 0 otherwise. */
6690 cse_main (rtx_insn
*f ATTRIBUTE_UNUSED
, int nregs
)
6692 struct cse_basic_block_data ebb_data
;
6694 int *rc_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
6697 /* CSE doesn't use dominane info but can invalidate it in different ways.
6698 For simplicity free dominance info here. */
6699 free_dominance_info (CDI_DOMINATORS
);
6701 df_set_flags (DF_LR_RUN_DCE
);
6702 df_note_add_problem ();
6704 df_set_flags (DF_DEFER_INSN_RESCAN
);
6706 reg_scan (get_insns (), max_reg_num ());
6707 init_cse_reg_info (nregs
);
6709 ebb_data
.path
= XNEWVEC (struct branch_path
,
6710 param_max_cse_path_length
);
6712 cse_cfg_altered
= false;
6713 cse_jumps_altered
= false;
6714 recorded_label_ref
= false;
6715 constant_pool_entries_cost
= 0;
6716 constant_pool_entries_regcost
= 0;
6717 ebb_data
.path_size
= 0;
6719 rtl_hooks
= cse_rtl_hooks
;
6722 init_alias_analysis ();
6724 reg_eqv_table
= XNEWVEC (struct reg_eqv_elem
, nregs
);
6726 /* Set up the table of already visited basic blocks. */
6727 cse_visited_basic_blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
6728 bitmap_clear (cse_visited_basic_blocks
);
6730 /* Loop over basic blocks in reverse completion order (RPO),
6731 excluding the ENTRY and EXIT blocks. */
6732 n_blocks
= pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6734 while (i
< n_blocks
)
6736 /* Find the first block in the RPO queue that we have not yet
6737 processed before. */
6740 bb
= BASIC_BLOCK_FOR_FN (cfun
, rc_order
[i
++]);
6742 while (bitmap_bit_p (cse_visited_basic_blocks
, bb
->index
)
6745 /* Find all paths starting with BB, and process them. */
6746 while (cse_find_path (bb
, &ebb_data
, flag_cse_follow_jumps
))
6748 /* Pre-scan the path. */
6749 cse_prescan_path (&ebb_data
);
6751 /* If this basic block has no sets, skip it. */
6752 if (ebb_data
.nsets
== 0)
6755 /* Get a reasonable estimate for the maximum number of qty's
6756 needed for this path. For this, we take the number of sets
6757 and multiply that by MAX_RECOG_OPERANDS. */
6758 max_qty
= ebb_data
.nsets
* MAX_RECOG_OPERANDS
;
6760 /* Dump the path we're about to process. */
6762 cse_dump_path (&ebb_data
, ebb_data
.nsets
, dump_file
);
6764 cse_extended_basic_block (&ebb_data
);
6769 end_alias_analysis ();
6770 free (reg_eqv_table
);
6771 free (ebb_data
.path
);
6772 sbitmap_free (cse_visited_basic_blocks
);
6774 rtl_hooks
= general_rtl_hooks
;
6776 if (cse_jumps_altered
|| recorded_label_ref
)
6778 else if (cse_cfg_altered
)
6784 /* Count the number of times registers are used (not set) in X.
6785 COUNTS is an array in which we accumulate the count, INCR is how much
6786 we count each register usage.
6788 Don't count a usage of DEST, which is the SET_DEST of a SET which
6789 contains X in its SET_SRC. This is because such a SET does not
6790 modify the liveness of DEST.
6791 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6792 We must then count uses of a SET_DEST regardless, because the insn can't be
6796 count_reg_usage (rtx x
, int *counts
, rtx dest
, int incr
)
6806 switch (code
= GET_CODE (x
))
6810 counts
[REGNO (x
)] += incr
;
6822 /* If we are clobbering a MEM, mark any registers inside the address
6824 if (MEM_P (XEXP (x
, 0)))
6825 count_reg_usage (XEXP (XEXP (x
, 0), 0), counts
, NULL_RTX
, incr
);
6829 /* Unless we are setting a REG, count everything in SET_DEST. */
6830 if (!REG_P (SET_DEST (x
)))
6831 count_reg_usage (SET_DEST (x
), counts
, NULL_RTX
, incr
);
6832 count_reg_usage (SET_SRC (x
), counts
,
6833 dest
? dest
: SET_DEST (x
),
6843 /* We expect dest to be NULL_RTX here. If the insn may throw,
6844 or if it cannot be deleted due to side-effects, mark this fact
6845 by setting DEST to pc_rtx. */
6846 if ((!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (x
))
6847 || side_effects_p (PATTERN (x
)))
6849 if (code
== CALL_INSN
)
6850 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x
), counts
, dest
, incr
);
6851 count_reg_usage (PATTERN (x
), counts
, dest
, incr
);
6853 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6856 note
= find_reg_equal_equiv_note (x
);
6859 rtx eqv
= XEXP (note
, 0);
6861 if (GET_CODE (eqv
) == EXPR_LIST
)
6862 /* This REG_EQUAL note describes the result of a function call.
6863 Process all the arguments. */
6866 count_reg_usage (XEXP (eqv
, 0), counts
, dest
, incr
);
6867 eqv
= XEXP (eqv
, 1);
6869 while (eqv
&& GET_CODE (eqv
) == EXPR_LIST
);
6871 count_reg_usage (eqv
, counts
, dest
, incr
);
6876 if (REG_NOTE_KIND (x
) == REG_EQUAL
6877 || (REG_NOTE_KIND (x
) != REG_NONNEG
&& GET_CODE (XEXP (x
,0)) == USE
)
6878 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6879 involving registers in the address. */
6880 || GET_CODE (XEXP (x
, 0)) == CLOBBER
)
6881 count_reg_usage (XEXP (x
, 0), counts
, NULL_RTX
, incr
);
6883 count_reg_usage (XEXP (x
, 1), counts
, NULL_RTX
, incr
);
6887 /* Iterate over just the inputs, not the constraints as well. */
6888 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
6889 count_reg_usage (ASM_OPERANDS_INPUT (x
, i
), counts
, dest
, incr
);
6900 fmt
= GET_RTX_FORMAT (code
);
6901 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6904 count_reg_usage (XEXP (x
, i
), counts
, dest
, incr
);
6905 else if (fmt
[i
] == 'E')
6906 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6907 count_reg_usage (XVECEXP (x
, i
, j
), counts
, dest
, incr
);
6911 /* Return true if X is a dead register. */
6914 is_dead_reg (const_rtx x
, int *counts
)
6917 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
6918 && counts
[REGNO (x
)] == 0);
6921 /* Return true if set is live. */
6923 set_live_p (rtx set
, rtx_insn
*insn ATTRIBUTE_UNUSED
, /* Only used with HAVE_cc0. */
6928 if (set_noop_p (set
))
6931 else if (GET_CODE (SET_DEST (set
)) == CC0
6932 && !side_effects_p (SET_SRC (set
))
6933 && ((tem
= next_nonnote_nondebug_insn (insn
)) == NULL_RTX
6935 || !reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
6937 else if (!is_dead_reg (SET_DEST (set
), counts
)
6938 || side_effects_p (SET_SRC (set
)))
6943 /* Return true if insn is live. */
6946 insn_live_p (rtx_insn
*insn
, int *counts
)
6949 if (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
6951 else if (GET_CODE (PATTERN (insn
)) == SET
)
6952 return set_live_p (PATTERN (insn
), insn
, counts
);
6953 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
6955 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6957 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
6959 if (GET_CODE (elt
) == SET
)
6961 if (set_live_p (elt
, insn
, counts
))
6964 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
6969 else if (DEBUG_INSN_P (insn
))
6973 if (DEBUG_MARKER_INSN_P (insn
))
6976 for (next
= NEXT_INSN (insn
); next
; next
= NEXT_INSN (next
))
6979 else if (!DEBUG_INSN_P (next
))
6981 /* If we find an inspection point, such as a debug begin stmt,
6982 we want to keep the earlier debug insn. */
6983 else if (DEBUG_MARKER_INSN_P (next
))
6985 else if (INSN_VAR_LOCATION_DECL (insn
) == INSN_VAR_LOCATION_DECL (next
))
6994 /* Count the number of stores into pseudo. Callback for note_stores. */
6997 count_stores (rtx x
, const_rtx set ATTRIBUTE_UNUSED
, void *data
)
6999 int *counts
= (int *) data
;
7000 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
7001 counts
[REGNO (x
)]++;
7004 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
7005 pseudo doesn't have a replacement. COUNTS[X] is zero if register X
7006 is dead and REPLACEMENTS[X] is null if it has no replacemenet.
7007 Set *SEEN_REPL to true if we see a dead register that does have
7011 is_dead_debug_insn (const_rtx pat
, int *counts
, rtx
*replacements
,
7014 subrtx_iterator::array_type array
;
7015 FOR_EACH_SUBRTX (iter
, array
, pat
, NONCONST
)
7017 const_rtx x
= *iter
;
7018 if (is_dead_reg (x
, counts
))
7020 if (replacements
&& replacements
[REGNO (x
)] != NULL_RTX
)
7029 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7030 Callback for simplify_replace_fn_rtx. */
7033 replace_dead_reg (rtx x
, const_rtx old_rtx ATTRIBUTE_UNUSED
, void *data
)
7035 rtx
*replacements
= (rtx
*) data
;
7038 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
7039 && replacements
[REGNO (x
)] != NULL_RTX
)
7041 if (GET_MODE (x
) == GET_MODE (replacements
[REGNO (x
)]))
7042 return replacements
[REGNO (x
)];
7043 return lowpart_subreg (GET_MODE (x
), replacements
[REGNO (x
)],
7044 GET_MODE (replacements
[REGNO (x
)]));
7049 /* Scan all the insns and delete any that are dead; i.e., they store a register
7050 that is never used or they copy a register to itself.
7052 This is used to remove insns made obviously dead by cse, loop or other
7053 optimizations. It improves the heuristics in loop since it won't try to
7054 move dead invariants out of loops or make givs for dead quantities. The
7055 remaining passes of the compilation are also sped up. */
7058 delete_trivially_dead_insns (rtx_insn
*insns
, int nreg
)
7061 rtx_insn
*insn
, *prev
;
7062 rtx
*replacements
= NULL
;
7065 timevar_push (TV_DELETE_TRIVIALLY_DEAD
);
7066 /* First count the number of times each register is used. */
7067 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7069 counts
= XCNEWVEC (int, nreg
* 3);
7070 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7071 if (DEBUG_BIND_INSN_P (insn
))
7072 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7074 else if (INSN_P (insn
))
7076 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7077 note_stores (insn
, count_stores
, counts
+ nreg
* 2);
7079 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7080 First one counts how many times each pseudo is used outside
7081 of debug insns, second counts how many times each pseudo is
7082 used in debug insns and third counts how many times a pseudo
7087 counts
= XCNEWVEC (int, nreg
);
7088 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7090 count_reg_usage (insn
, counts
, NULL_RTX
, 1);
7091 /* If no debug insns can be present, COUNTS is just an array
7092 which counts how many times each pseudo is used. */
7094 /* Pseudo PIC register should be considered as used due to possible
7095 new usages generated. */
7096 if (!reload_completed
7097 && pic_offset_table_rtx
7098 && REGNO (pic_offset_table_rtx
) >= FIRST_PSEUDO_REGISTER
)
7099 counts
[REGNO (pic_offset_table_rtx
)]++;
7100 /* Go from the last insn to the first and delete insns that only set unused
7101 registers or copy a register to itself. As we delete an insn, remove
7102 usage counts for registers it uses.
7104 The first jump optimization pass may leave a real insn as the last
7105 insn in the function. We must not skip that insn or we may end
7106 up deleting code that is not really dead.
7108 If some otherwise unused register is only used in DEBUG_INSNs,
7109 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7110 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7111 has been created for the unused register, replace it with
7112 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
7113 for (insn
= get_last_insn (); insn
; insn
= prev
)
7117 prev
= PREV_INSN (insn
);
7121 live_insn
= insn_live_p (insn
, counts
);
7123 /* If this is a dead insn, delete it and show registers in it aren't
7126 if (! live_insn
&& dbg_cnt (delete_trivial_dead
))
7128 if (DEBUG_INSN_P (insn
))
7130 if (DEBUG_BIND_INSN_P (insn
))
7131 count_reg_usage (INSN_VAR_LOCATION_LOC (insn
), counts
+ nreg
,
7137 if (MAY_HAVE_DEBUG_BIND_INSNS
7138 && (set
= single_set (insn
)) != NULL_RTX
7139 && is_dead_reg (SET_DEST (set
), counts
)
7140 /* Used at least once in some DEBUG_INSN. */
7141 && counts
[REGNO (SET_DEST (set
)) + nreg
] > 0
7142 /* And set exactly once. */
7143 && counts
[REGNO (SET_DEST (set
)) + nreg
* 2] == 1
7144 && !side_effects_p (SET_SRC (set
))
7145 && asm_noperands (PATTERN (insn
)) < 0)
7147 rtx dval
, bind_var_loc
;
7150 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7151 dval
= make_debug_expr_from_rtl (SET_DEST (set
));
7153 /* Emit a debug bind insn before the insn in which
7156 gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set
)),
7157 DEBUG_EXPR_TREE_DECL (dval
),
7159 VAR_INIT_STATUS_INITIALIZED
);
7160 count_reg_usage (bind_var_loc
, counts
+ nreg
, NULL_RTX
, 1);
7162 bind
= emit_debug_insn_before (bind_var_loc
, insn
);
7163 df_insn_rescan (bind
);
7165 if (replacements
== NULL
)
7166 replacements
= XCNEWVEC (rtx
, nreg
);
7167 replacements
[REGNO (SET_DEST (set
))] = dval
;
7170 count_reg_usage (insn
, counts
, NULL_RTX
, -1);
7173 cse_cfg_altered
|= delete_insn_and_edges (insn
);
7177 if (MAY_HAVE_DEBUG_BIND_INSNS
)
7179 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
7180 if (DEBUG_BIND_INSN_P (insn
))
7182 /* If this debug insn references a dead register that wasn't replaced
7183 with an DEBUG_EXPR, reset the DEBUG_INSN. */
7184 bool seen_repl
= false;
7185 if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn
),
7186 counts
, replacements
, &seen_repl
))
7188 INSN_VAR_LOCATION_LOC (insn
) = gen_rtx_UNKNOWN_VAR_LOC ();
7189 df_insn_rescan (insn
);
7193 INSN_VAR_LOCATION_LOC (insn
)
7194 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn
),
7195 NULL_RTX
, replace_dead_reg
,
7197 df_insn_rescan (insn
);
7200 free (replacements
);
7203 if (dump_file
&& ndead
)
7204 fprintf (dump_file
, "Deleted %i trivially dead insns\n",
7208 timevar_pop (TV_DELETE_TRIVIALLY_DEAD
);
7212 /* If LOC contains references to NEWREG in a different mode, change them
7213 to use NEWREG instead. */
7216 cse_change_cc_mode (subrtx_ptr_iterator::array_type
&array
,
7217 rtx
*loc
, rtx_insn
*insn
, rtx newreg
)
7219 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
7225 && REGNO (x
) == REGNO (newreg
)
7226 && GET_MODE (x
) != GET_MODE (newreg
))
7228 validate_change (insn
, loc
, newreg
, 1);
7229 iter
.skip_subrtxes ();
7234 /* Change the mode of any reference to the register REGNO (NEWREG) to
7235 GET_MODE (NEWREG) in INSN. */
7238 cse_change_cc_mode_insn (rtx_insn
*insn
, rtx newreg
)
7245 subrtx_ptr_iterator::array_type array
;
7246 cse_change_cc_mode (array
, &PATTERN (insn
), insn
, newreg
);
7247 cse_change_cc_mode (array
, ®_NOTES (insn
), insn
, newreg
);
7249 /* If the following assertion was triggered, there is most probably
7250 something wrong with the cc_modes_compatible back end function.
7251 CC modes only can be considered compatible if the insn - with the mode
7252 replaced by any of the compatible modes - can still be recognized. */
7253 success
= apply_change_group ();
7254 gcc_assert (success
);
7257 /* Change the mode of any reference to the register REGNO (NEWREG) to
7258 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7259 any instruction which modifies NEWREG. */
7262 cse_change_cc_mode_insns (rtx_insn
*start
, rtx_insn
*end
, rtx newreg
)
7266 for (insn
= start
; insn
!= end
; insn
= NEXT_INSN (insn
))
7268 if (! INSN_P (insn
))
7271 if (reg_set_p (newreg
, insn
))
7274 cse_change_cc_mode_insn (insn
, newreg
);
7278 /* BB is a basic block which finishes with CC_REG as a condition code
7279 register which is set to CC_SRC. Look through the successors of BB
7280 to find blocks which have a single predecessor (i.e., this one),
7281 and look through those blocks for an assignment to CC_REG which is
7282 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7283 permitted to change the mode of CC_SRC to a compatible mode. This
7284 returns VOIDmode if no equivalent assignments were found.
7285 Otherwise it returns the mode which CC_SRC should wind up with.
7286 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7287 but is passed unmodified down to recursive calls in order to prevent
7290 The main complexity in this function is handling the mode issues.
7291 We may have more than one duplicate which we can eliminate, and we
7292 try to find a mode which will work for multiple duplicates. */
7295 cse_cc_succs (basic_block bb
, basic_block orig_bb
, rtx cc_reg
, rtx cc_src
,
7296 bool can_change_mode
)
7300 unsigned int insn_count
;
7303 machine_mode modes
[2];
7304 rtx_insn
*last_insns
[2];
7309 /* We expect to have two successors. Look at both before picking
7310 the final mode for the comparison. If we have more successors
7311 (i.e., some sort of table jump, although that seems unlikely),
7312 then we require all beyond the first two to use the same
7315 found_equiv
= false;
7316 mode
= GET_MODE (cc_src
);
7318 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7323 if (e
->flags
& EDGE_COMPLEX
)
7326 if (EDGE_COUNT (e
->dest
->preds
) != 1
7327 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
7328 /* Avoid endless recursion on unreachable blocks. */
7329 || e
->dest
== orig_bb
)
7332 end
= NEXT_INSN (BB_END (e
->dest
));
7333 for (insn
= BB_HEAD (e
->dest
); insn
!= end
; insn
= NEXT_INSN (insn
))
7337 if (! INSN_P (insn
))
7340 /* If CC_SRC is modified, we have to stop looking for
7341 something which uses it. */
7342 if (modified_in_p (cc_src
, insn
))
7345 /* Check whether INSN sets CC_REG to CC_SRC. */
7346 set
= single_set (insn
);
7348 && REG_P (SET_DEST (set
))
7349 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7352 machine_mode set_mode
;
7353 machine_mode comp_mode
;
7356 set_mode
= GET_MODE (SET_SRC (set
));
7357 comp_mode
= set_mode
;
7358 if (rtx_equal_p (cc_src
, SET_SRC (set
)))
7360 else if (GET_CODE (cc_src
) == COMPARE
7361 && GET_CODE (SET_SRC (set
)) == COMPARE
7363 && rtx_equal_p (XEXP (cc_src
, 0),
7364 XEXP (SET_SRC (set
), 0))
7365 && rtx_equal_p (XEXP (cc_src
, 1),
7366 XEXP (SET_SRC (set
), 1)))
7369 comp_mode
= targetm
.cc_modes_compatible (mode
, set_mode
);
7370 if (comp_mode
!= VOIDmode
7371 && (can_change_mode
|| comp_mode
== mode
))
7378 if (insn_count
< ARRAY_SIZE (insns
))
7380 insns
[insn_count
] = insn
;
7381 modes
[insn_count
] = set_mode
;
7382 last_insns
[insn_count
] = end
;
7385 if (mode
!= comp_mode
)
7387 gcc_assert (can_change_mode
);
7390 /* The modified insn will be re-recognized later. */
7391 PUT_MODE (cc_src
, mode
);
7396 if (set_mode
!= mode
)
7398 /* We found a matching expression in the
7399 wrong mode, but we don't have room to
7400 store it in the array. Punt. This case
7404 /* INSN sets CC_REG to a value equal to CC_SRC
7405 with the right mode. We can simply delete
7410 /* We found an instruction to delete. Keep looking,
7411 in the hopes of finding a three-way jump. */
7415 /* We found an instruction which sets the condition
7416 code, so don't look any farther. */
7420 /* If INSN sets CC_REG in some other way, don't look any
7422 if (reg_set_p (cc_reg
, insn
))
7426 /* If we fell off the bottom of the block, we can keep looking
7427 through successors. We pass CAN_CHANGE_MODE as false because
7428 we aren't prepared to handle compatibility between the
7429 further blocks and this block. */
7432 machine_mode submode
;
7434 submode
= cse_cc_succs (e
->dest
, orig_bb
, cc_reg
, cc_src
, false);
7435 if (submode
!= VOIDmode
)
7437 gcc_assert (submode
== mode
);
7439 can_change_mode
= false;
7447 /* Now INSN_COUNT is the number of instructions we found which set
7448 CC_REG to a value equivalent to CC_SRC. The instructions are in
7449 INSNS. The modes used by those instructions are in MODES. */
7452 for (i
= 0; i
< insn_count
; ++i
)
7454 if (modes
[i
] != mode
)
7456 /* We need to change the mode of CC_REG in INSNS[i] and
7457 subsequent instructions. */
7460 if (GET_MODE (cc_reg
) == mode
)
7463 newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7465 cse_change_cc_mode_insns (NEXT_INSN (insns
[i
]), last_insns
[i
],
7469 cse_cfg_altered
|= delete_insn_and_edges (insns
[i
]);
7475 /* If we have a fixed condition code register (or two), walk through
7476 the instructions and try to eliminate duplicate assignments. */
7479 cse_condition_code_reg (void)
7481 unsigned int cc_regno_1
;
7482 unsigned int cc_regno_2
;
7487 if (! targetm
.fixed_condition_code_regs (&cc_regno_1
, &cc_regno_2
))
7490 cc_reg_1
= gen_rtx_REG (CCmode
, cc_regno_1
);
7491 if (cc_regno_2
!= INVALID_REGNUM
)
7492 cc_reg_2
= gen_rtx_REG (CCmode
, cc_regno_2
);
7494 cc_reg_2
= NULL_RTX
;
7496 FOR_EACH_BB_FN (bb
, cfun
)
7498 rtx_insn
*last_insn
;
7501 rtx_insn
*cc_src_insn
;
7504 machine_mode orig_mode
;
7506 /* Look for blocks which end with a conditional jump based on a
7507 condition code register. Then look for the instruction which
7508 sets the condition code register. Then look through the
7509 successor blocks for instructions which set the condition
7510 code register to the same value. There are other possible
7511 uses of the condition code register, but these are by far the
7512 most common and the ones which we are most likely to be able
7515 last_insn
= BB_END (bb
);
7516 if (!JUMP_P (last_insn
))
7519 if (reg_referenced_p (cc_reg_1
, PATTERN (last_insn
)))
7521 else if (cc_reg_2
&& reg_referenced_p (cc_reg_2
, PATTERN (last_insn
)))
7528 for (insn
= PREV_INSN (last_insn
);
7529 insn
&& insn
!= PREV_INSN (BB_HEAD (bb
));
7530 insn
= PREV_INSN (insn
))
7534 if (! INSN_P (insn
))
7536 set
= single_set (insn
);
7538 && REG_P (SET_DEST (set
))
7539 && REGNO (SET_DEST (set
)) == REGNO (cc_reg
))
7542 cc_src
= SET_SRC (set
);
7545 else if (reg_set_p (cc_reg
, insn
))
7552 if (modified_between_p (cc_src
, cc_src_insn
, NEXT_INSN (last_insn
)))
7555 /* Now CC_REG is a condition code register used for a
7556 conditional jump at the end of the block, and CC_SRC, in
7557 CC_SRC_INSN, is the value to which that condition code
7558 register is set, and CC_SRC is still meaningful at the end of
7561 orig_mode
= GET_MODE (cc_src
);
7562 mode
= cse_cc_succs (bb
, bb
, cc_reg
, cc_src
, true);
7563 if (mode
!= VOIDmode
)
7565 gcc_assert (mode
== GET_MODE (cc_src
));
7566 if (mode
!= orig_mode
)
7568 rtx newreg
= gen_rtx_REG (mode
, REGNO (cc_reg
));
7570 cse_change_cc_mode_insn (cc_src_insn
, newreg
);
7572 /* Do the same in the following insns that use the
7573 current value of CC_REG within BB. */
7574 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn
),
7575 NEXT_INSN (last_insn
),
7583 /* Perform common subexpression elimination. Nonzero value from
7584 `cse_main' means that jumps were simplified and some code may now
7585 be unreachable, so do jump optimization again. */
7587 rest_of_handle_cse (void)
7592 dump_flow_info (dump_file
, dump_flags
);
7594 tem
= cse_main (get_insns (), max_reg_num ());
7596 /* If we are not running more CSE passes, then we are no longer
7597 expecting CSE to be run. But always rerun it in a cheap mode. */
7598 cse_not_expected
= !flag_rerun_cse_after_loop
&& !flag_gcse
;
7602 timevar_push (TV_JUMP
);
7603 rebuild_jump_labels (get_insns ());
7604 cse_cfg_altered
|= cleanup_cfg (CLEANUP_CFG_CHANGED
);
7605 timevar_pop (TV_JUMP
);
7607 else if (tem
== 1 || optimize
> 1)
7608 cse_cfg_altered
|= cleanup_cfg (0);
7615 const pass_data pass_data_cse
=
7617 RTL_PASS
, /* type */
7619 OPTGROUP_NONE
, /* optinfo_flags */
7621 0, /* properties_required */
7622 0, /* properties_provided */
7623 0, /* properties_destroyed */
7624 0, /* todo_flags_start */
7625 TODO_df_finish
, /* todo_flags_finish */
7628 class pass_cse
: public rtl_opt_pass
7631 pass_cse (gcc::context
*ctxt
)
7632 : rtl_opt_pass (pass_data_cse
, ctxt
)
7635 /* opt_pass methods: */
7636 virtual bool gate (function
*) { return optimize
> 0; }
7637 virtual unsigned int execute (function
*) { return rest_of_handle_cse (); }
7639 }; // class pass_cse
7644 make_pass_cse (gcc::context
*ctxt
)
7646 return new pass_cse (ctxt
);
7650 /* Run second CSE pass after loop optimizations. */
7652 rest_of_handle_cse2 (void)
7657 dump_flow_info (dump_file
, dump_flags
);
7659 tem
= cse_main (get_insns (), max_reg_num ());
7661 /* Run a pass to eliminate duplicated assignments to condition code
7662 registers. We have to run this after bypass_jumps, because it
7663 makes it harder for that pass to determine whether a jump can be
7665 cse_condition_code_reg ();
7667 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7671 timevar_push (TV_JUMP
);
7672 rebuild_jump_labels (get_insns ());
7673 cse_cfg_altered
|= cleanup_cfg (CLEANUP_CFG_CHANGED
);
7674 timevar_pop (TV_JUMP
);
7676 else if (tem
== 1 || cse_cfg_altered
)
7677 cse_cfg_altered
|= cleanup_cfg (0);
7679 cse_not_expected
= 1;
7686 const pass_data pass_data_cse2
=
7688 RTL_PASS
, /* type */
7690 OPTGROUP_NONE
, /* optinfo_flags */
7691 TV_CSE2
, /* tv_id */
7692 0, /* properties_required */
7693 0, /* properties_provided */
7694 0, /* properties_destroyed */
7695 0, /* todo_flags_start */
7696 TODO_df_finish
, /* todo_flags_finish */
7699 class pass_cse2
: public rtl_opt_pass
7702 pass_cse2 (gcc::context
*ctxt
)
7703 : rtl_opt_pass (pass_data_cse2
, ctxt
)
7706 /* opt_pass methods: */
7707 virtual bool gate (function
*)
7709 return optimize
> 0 && flag_rerun_cse_after_loop
;
7712 virtual unsigned int execute (function
*) { return rest_of_handle_cse2 (); }
7714 }; // class pass_cse2
7719 make_pass_cse2 (gcc::context
*ctxt
)
7721 return new pass_cse2 (ctxt
);
7724 /* Run second CSE pass after loop optimizations. */
7726 rest_of_handle_cse_after_global_opts (void)
7731 /* We only want to do local CSE, so don't follow jumps. */
7732 save_cfj
= flag_cse_follow_jumps
;
7733 flag_cse_follow_jumps
= 0;
7735 rebuild_jump_labels (get_insns ());
7736 tem
= cse_main (get_insns (), max_reg_num ());
7737 cse_cfg_altered
|= purge_all_dead_edges ();
7738 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7740 cse_not_expected
= !flag_rerun_cse_after_loop
;
7742 /* If cse altered any jumps, rerun jump opts to clean things up. */
7745 timevar_push (TV_JUMP
);
7746 rebuild_jump_labels (get_insns ());
7747 cse_cfg_altered
|= cleanup_cfg (CLEANUP_CFG_CHANGED
);
7748 timevar_pop (TV_JUMP
);
7750 else if (tem
== 1 || cse_cfg_altered
)
7751 cse_cfg_altered
|= cleanup_cfg (0);
7753 flag_cse_follow_jumps
= save_cfj
;
7759 const pass_data pass_data_cse_after_global_opts
=
7761 RTL_PASS
, /* type */
7762 "cse_local", /* name */
7763 OPTGROUP_NONE
, /* optinfo_flags */
7765 0, /* properties_required */
7766 0, /* properties_provided */
7767 0, /* properties_destroyed */
7768 0, /* todo_flags_start */
7769 TODO_df_finish
, /* todo_flags_finish */
7772 class pass_cse_after_global_opts
: public rtl_opt_pass
7775 pass_cse_after_global_opts (gcc::context
*ctxt
)
7776 : rtl_opt_pass (pass_data_cse_after_global_opts
, ctxt
)
7779 /* opt_pass methods: */
7780 virtual bool gate (function
*)
7782 return optimize
> 0 && flag_rerun_cse_after_global_opts
;
7785 virtual unsigned int execute (function
*)
7787 return rest_of_handle_cse_after_global_opts ();
7790 }; // class pass_cse_after_global_opts
7795 make_pass_cse_after_global_opts (gcc::context
*ctxt
)
7797 return new pass_cse_after_global_opts (ctxt
);