1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "hard-reg-set.h"
30 #include "insn-config.h"
38 #include "statistics.h"
39 #include "double-int.h"
41 #include "fixed-value.h"
54 #include "insn-codes.h"
58 #include "dominance.h"
62 #include "cfgcleanup.h"
63 #include "basic-block.h"
67 #include "diagnostic-core.h"
70 #include "tree-pass.h"
74 static int reload_cse_noop_set_p (rtx
);
75 static bool reload_cse_simplify (rtx_insn
*, rtx
);
76 static void reload_cse_regs_1 (void);
77 static int reload_cse_simplify_set (rtx
, rtx_insn
*);
78 static int reload_cse_simplify_operands (rtx_insn
*, rtx
);
80 static void reload_combine (void);
81 static void reload_combine_note_use (rtx
*, rtx_insn
*, int, rtx
);
82 static void reload_combine_note_store (rtx
, const_rtx
, void *);
84 static bool reload_cse_move2add (rtx_insn
*);
85 static void move2add_note_store (rtx
, const_rtx
, void *);
87 /* Call cse / combine like post-reload optimization phases.
88 FIRST is the first instruction. */
91 reload_cse_regs (rtx_insn
*first ATTRIBUTE_UNUSED
)
96 moves_converted
= reload_cse_move2add (first
);
97 if (flag_expensive_optimizations
)
101 reload_cse_regs_1 ();
105 /* See whether a single set SET is a noop. */
107 reload_cse_noop_set_p (rtx set
)
109 if (cselib_reg_set_mode (SET_DEST (set
)) != GET_MODE (SET_DEST (set
)))
112 return rtx_equal_for_cselib_p (SET_DEST (set
), SET_SRC (set
));
115 /* Try to simplify INSN. Return true if the CFG may have changed. */
117 reload_cse_simplify (rtx_insn
*insn
, rtx testreg
)
119 rtx body
= PATTERN (insn
);
120 basic_block insn_bb
= BLOCK_FOR_INSN (insn
);
121 unsigned insn_bb_succs
= EDGE_COUNT (insn_bb
->succs
);
123 if (GET_CODE (body
) == SET
)
127 /* Simplify even if we may think it is a no-op.
128 We may think a memory load of a value smaller than WORD_SIZE
129 is redundant because we haven't taken into account possible
130 implicit extension. reload_cse_simplify_set() will bring
131 this out, so it's safer to simplify before we delete. */
132 count
+= reload_cse_simplify_set (body
, insn
);
134 if (!count
&& reload_cse_noop_set_p (body
))
136 rtx value
= SET_DEST (body
);
138 && ! REG_FUNCTION_VALUE_P (value
))
140 if (check_for_inc_dec (insn
))
141 delete_insn_and_edges (insn
);
142 /* We're done with this insn. */
147 apply_change_group ();
149 reload_cse_simplify_operands (insn
, testreg
);
151 else if (GET_CODE (body
) == PARALLEL
)
155 rtx value
= NULL_RTX
;
157 /* Registers mentioned in the clobber list for an asm cannot be reused
158 within the body of the asm. Invalidate those registers now so that
159 we don't try to substitute values for them. */
160 if (asm_noperands (body
) >= 0)
162 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
164 rtx part
= XVECEXP (body
, 0, i
);
165 if (GET_CODE (part
) == CLOBBER
&& REG_P (XEXP (part
, 0)))
166 cselib_invalidate_rtx (XEXP (part
, 0));
170 /* If every action in a PARALLEL is a noop, we can delete
171 the entire PARALLEL. */
172 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
174 rtx part
= XVECEXP (body
, 0, i
);
175 if (GET_CODE (part
) == SET
)
177 if (! reload_cse_noop_set_p (part
))
179 if (REG_P (SET_DEST (part
))
180 && REG_FUNCTION_VALUE_P (SET_DEST (part
)))
184 value
= SET_DEST (part
);
187 else if (GET_CODE (part
) != CLOBBER
)
193 if (check_for_inc_dec (insn
))
194 delete_insn_and_edges (insn
);
195 /* We're done with this insn. */
199 /* It's not a no-op, but we can try to simplify it. */
200 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
201 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
202 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
205 apply_change_group ();
207 reload_cse_simplify_operands (insn
, testreg
);
211 return (EDGE_COUNT (insn_bb
->succs
) != insn_bb_succs
);
214 /* Do a very simple CSE pass over the hard registers.
216 This function detects no-op moves where we happened to assign two
217 different pseudo-registers to the same hard register, and then
218 copied one to the other. Reload will generate a useless
219 instruction copying a register to itself.
221 This function also detects cases where we load a value from memory
222 into two different registers, and (if memory is more expensive than
223 registers) changes it to simply copy the first register into the
226 Another optimization is performed that scans the operands of each
227 instruction to see whether the value is already available in a
228 hard register. It then replaces the operand with the hard register
229 if possible, much like an optional reload would. */
232 reload_cse_regs_1 (void)
234 bool cfg_changed
= false;
237 rtx testreg
= gen_rtx_REG (VOIDmode
, -1);
239 cselib_init (CSELIB_RECORD_MEMORY
);
240 init_alias_analysis ();
242 FOR_EACH_BB_FN (bb
, cfun
)
243 FOR_BB_INSNS (bb
, insn
)
246 cfg_changed
|= reload_cse_simplify (insn
, testreg
);
248 cselib_process_insn (insn
);
252 end_alias_analysis ();
258 /* Try to simplify a single SET instruction. SET is the set pattern.
259 INSN is the instruction it came from.
260 This function only handles one case: if we set a register to a value
261 which is not a register, we try to find that value in some other register
262 and change the set into a register copy. */
265 reload_cse_simplify_set (rtx set
, rtx_insn
*insn
)
273 struct elt_loc_list
*l
;
274 #ifdef LOAD_EXTEND_OP
275 enum rtx_code extend_op
= UNKNOWN
;
277 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
279 dreg
= true_regnum (SET_DEST (set
));
284 if (side_effects_p (src
) || true_regnum (src
) >= 0)
287 dclass
= REGNO_REG_CLASS (dreg
);
289 #ifdef LOAD_EXTEND_OP
290 /* When replacing a memory with a register, we need to honor assumptions
291 that combine made wrt the contents of sign bits. We'll do this by
292 generating an extend instruction instead of a reg->reg copy. Thus
293 the destination must be a register that we can widen. */
295 && GET_MODE_BITSIZE (GET_MODE (src
)) < BITS_PER_WORD
296 && (extend_op
= LOAD_EXTEND_OP (GET_MODE (src
))) != UNKNOWN
297 && !REG_P (SET_DEST (set
)))
301 val
= cselib_lookup (src
, GET_MODE (SET_DEST (set
)), 0, VOIDmode
);
305 /* If memory loads are cheaper than register copies, don't change them. */
307 old_cost
= memory_move_cost (GET_MODE (src
), dclass
, true);
308 else if (REG_P (src
))
309 old_cost
= register_move_cost (GET_MODE (src
),
310 REGNO_REG_CLASS (REGNO (src
)), dclass
);
312 old_cost
= set_src_cost (src
, speed
);
314 for (l
= val
->locs
; l
; l
= l
->next
)
316 rtx this_rtx
= l
->loc
;
319 if (CONSTANT_P (this_rtx
) && ! references_value_p (this_rtx
, 0))
321 #ifdef LOAD_EXTEND_OP
322 if (extend_op
!= UNKNOWN
)
326 if (!CONST_SCALAR_INT_P (this_rtx
))
332 result
= wide_int::from (std::make_pair (this_rtx
,
334 BITS_PER_WORD
, UNSIGNED
);
337 result
= wide_int::from (std::make_pair (this_rtx
,
339 BITS_PER_WORD
, SIGNED
);
344 this_rtx
= immed_wide_int_const (result
, word_mode
);
347 this_cost
= set_src_cost (this_rtx
, speed
);
349 else if (REG_P (this_rtx
))
351 #ifdef LOAD_EXTEND_OP
352 if (extend_op
!= UNKNOWN
)
354 this_rtx
= gen_rtx_fmt_e (extend_op
, word_mode
, this_rtx
);
355 this_cost
= set_src_cost (this_rtx
, speed
);
359 this_cost
= register_move_cost (GET_MODE (this_rtx
),
360 REGNO_REG_CLASS (REGNO (this_rtx
)),
366 /* If equal costs, prefer registers over anything else. That
367 tends to lead to smaller instructions on some machines. */
368 if (this_cost
< old_cost
369 || (this_cost
== old_cost
371 && !REG_P (SET_SRC (set
))))
373 #ifdef LOAD_EXTEND_OP
374 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
))) < BITS_PER_WORD
375 && extend_op
!= UNKNOWN
376 #ifdef CANNOT_CHANGE_MODE_CLASS
377 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
379 REGNO_REG_CLASS (REGNO (SET_DEST (set
))))
383 rtx wide_dest
= gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
)));
384 ORIGINAL_REGNO (wide_dest
) = ORIGINAL_REGNO (SET_DEST (set
));
385 validate_change (insn
, &SET_DEST (set
), wide_dest
, 1);
389 validate_unshare_change (insn
, &SET_SRC (set
), this_rtx
, 1);
390 old_cost
= this_cost
, did_change
= 1;
397 /* Try to replace operands in INSN with equivalent values that are already
398 in registers. This can be viewed as optional reloading.
400 For each non-register operand in the insn, see if any hard regs are
401 known to be equivalent to that operand. Record the alternatives which
402 can accept these hard registers. Among all alternatives, select the
403 ones which are better or equal to the one currently matching, where
404 "better" is in terms of '?' and '!' constraints. Among the remaining
405 alternatives, select the one which replaces most operands with
409 reload_cse_simplify_operands (rtx_insn
*insn
, rtx testreg
)
413 /* For each operand, all registers that are equivalent to it. */
414 HARD_REG_SET equiv_regs
[MAX_RECOG_OPERANDS
];
416 const char *constraints
[MAX_RECOG_OPERANDS
];
418 /* Vector recording how bad an alternative is. */
419 int *alternative_reject
;
420 /* Vector recording how many registers can be introduced by choosing
422 int *alternative_nregs
;
423 /* Array of vectors recording, for each operand and each alternative,
424 which hard register to substitute, or -1 if the operand should be
426 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
427 /* Array of alternatives, sorted in order of decreasing desirability. */
428 int *alternative_order
;
430 extract_constrain_insn (insn
);
432 if (recog_data
.n_alternatives
== 0 || recog_data
.n_operands
== 0)
435 alternative_reject
= XALLOCAVEC (int, recog_data
.n_alternatives
);
436 alternative_nregs
= XALLOCAVEC (int, recog_data
.n_alternatives
);
437 alternative_order
= XALLOCAVEC (int, recog_data
.n_alternatives
);
438 memset (alternative_reject
, 0, recog_data
.n_alternatives
* sizeof (int));
439 memset (alternative_nregs
, 0, recog_data
.n_alternatives
* sizeof (int));
441 /* For each operand, find out which regs are equivalent. */
442 for (i
= 0; i
< recog_data
.n_operands
; i
++)
445 struct elt_loc_list
*l
;
448 CLEAR_HARD_REG_SET (equiv_regs
[i
]);
450 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
451 right, so avoid the problem here. Likewise if we have a constant
452 and the insn pattern doesn't tell us the mode we need. */
453 if (LABEL_P (recog_data
.operand
[i
])
454 || (CONSTANT_P (recog_data
.operand
[i
])
455 && recog_data
.operand_mode
[i
] == VOIDmode
))
458 op
= recog_data
.operand
[i
];
459 #ifdef LOAD_EXTEND_OP
461 && GET_MODE_BITSIZE (GET_MODE (op
)) < BITS_PER_WORD
462 && LOAD_EXTEND_OP (GET_MODE (op
)) != UNKNOWN
)
464 rtx set
= single_set (insn
);
466 /* We might have multiple sets, some of which do implicit
467 extension. Punt on this for now. */
470 /* If the destination is also a MEM or a STRICT_LOW_PART, no
472 Also, if there is an explicit extension, we don't have to
473 worry about an implicit one. */
474 else if (MEM_P (SET_DEST (set
))
475 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
476 || GET_CODE (SET_SRC (set
)) == ZERO_EXTEND
477 || GET_CODE (SET_SRC (set
)) == SIGN_EXTEND
)
478 ; /* Continue ordinary processing. */
479 #ifdef CANNOT_CHANGE_MODE_CLASS
480 /* If the register cannot change mode to word_mode, it follows that
481 it cannot have been used in word_mode. */
482 else if (REG_P (SET_DEST (set
))
483 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
485 REGNO_REG_CLASS (REGNO (SET_DEST (set
)))))
486 ; /* Continue ordinary processing. */
488 /* If this is a straight load, make the extension explicit. */
489 else if (REG_P (SET_DEST (set
))
490 && recog_data
.n_operands
== 2
491 && SET_SRC (set
) == op
492 && SET_DEST (set
) == recog_data
.operand
[1-i
])
494 validate_change (insn
, recog_data
.operand_loc
[i
],
495 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op
)),
498 validate_change (insn
, recog_data
.operand_loc
[1-i
],
499 gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
))),
501 if (! apply_change_group ())
503 return reload_cse_simplify_operands (insn
, testreg
);
506 /* ??? There might be arithmetic operations with memory that are
507 safe to optimize, but is it worth the trouble? */
510 #endif /* LOAD_EXTEND_OP */
511 if (side_effects_p (op
))
513 v
= cselib_lookup (op
, recog_data
.operand_mode
[i
], 0, VOIDmode
);
517 for (l
= v
->locs
; l
; l
= l
->next
)
519 SET_HARD_REG_BIT (equiv_regs
[i
], REGNO (l
->loc
));
522 alternative_mask preferred
= get_preferred_alternatives (insn
);
523 for (i
= 0; i
< recog_data
.n_operands
; i
++)
529 op_alt_regno
[i
] = XALLOCAVEC (int, recog_data
.n_alternatives
);
530 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
531 op_alt_regno
[i
][j
] = -1;
533 p
= constraints
[i
] = recog_data
.constraints
[i
];
534 mode
= recog_data
.operand_mode
[i
];
536 /* Add the reject values for each alternative given by the constraints
545 alternative_reject
[j
] += 3;
547 alternative_reject
[j
] += 300;
550 /* We won't change operands which are already registers. We
551 also don't want to modify output operands. */
552 regno
= true_regnum (recog_data
.operand
[i
]);
554 || constraints
[i
][0] == '='
555 || constraints
[i
][0] == '+')
558 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
560 enum reg_class rclass
= NO_REGS
;
562 if (! TEST_HARD_REG_BIT (equiv_regs
[i
], regno
))
565 SET_REGNO_RAW (testreg
, regno
);
566 PUT_MODE (testreg
, mode
);
568 /* We found a register equal to this operand. Now look for all
569 alternatives that can accept this register and have not been
570 assigned a register they can use yet. */
580 rclass
= reg_class_subunion
[rclass
][GENERAL_REGS
];
585 = (reg_class_subunion
587 [reg_class_for_constraint (lookup_constraint (p
))]);
591 /* See if REGNO fits this alternative, and set it up as the
592 replacement register if we don't have one for this
593 alternative yet and the operand being replaced is not
594 a cheap CONST_INT. */
595 if (op_alt_regno
[i
][j
] == -1
596 && TEST_BIT (preferred
, j
)
597 && reg_fits_class_p (testreg
, rclass
, 0, mode
)
598 && (!CONST_INT_P (recog_data
.operand
[i
])
599 || (set_src_cost (recog_data
.operand
[i
],
600 optimize_bb_for_speed_p
601 (BLOCK_FOR_INSN (insn
)))
602 > set_src_cost (testreg
,
603 optimize_bb_for_speed_p
604 (BLOCK_FOR_INSN (insn
))))))
606 alternative_nregs
[j
]++;
607 op_alt_regno
[i
][j
] = regno
;
613 p
+= CONSTRAINT_LEN (c
, p
);
621 /* Record all alternatives which are better or equal to the currently
622 matching one in the alternative_order array. */
623 for (i
= j
= 0; i
< recog_data
.n_alternatives
; i
++)
624 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
625 alternative_order
[j
++] = i
;
626 recog_data
.n_alternatives
= j
;
628 /* Sort it. Given a small number of alternatives, a dumb algorithm
629 won't hurt too much. */
630 for (i
= 0; i
< recog_data
.n_alternatives
- 1; i
++)
633 int best_reject
= alternative_reject
[alternative_order
[i
]];
634 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
636 for (j
= i
+ 1; j
< recog_data
.n_alternatives
; j
++)
638 int this_reject
= alternative_reject
[alternative_order
[j
]];
639 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
641 if (this_reject
< best_reject
642 || (this_reject
== best_reject
&& this_nregs
> best_nregs
))
645 best_reject
= this_reject
;
646 best_nregs
= this_nregs
;
650 std::swap (alternative_order
[best
], alternative_order
[i
]);
653 /* Substitute the operands as determined by op_alt_regno for the best
655 j
= alternative_order
[0];
657 for (i
= 0; i
< recog_data
.n_operands
; i
++)
659 machine_mode mode
= recog_data
.operand_mode
[i
];
660 if (op_alt_regno
[i
][j
] == -1)
663 validate_change (insn
, recog_data
.operand_loc
[i
],
664 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
667 for (i
= recog_data
.n_dups
- 1; i
>= 0; i
--)
669 int op
= recog_data
.dup_num
[i
];
670 machine_mode mode
= recog_data
.operand_mode
[op
];
672 if (op_alt_regno
[op
][j
] == -1)
675 validate_change (insn
, recog_data
.dup_loc
[i
],
676 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
679 return apply_change_group ();
682 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
684 This code might also be useful when reload gave up on reg+reg addressing
685 because of clashes between the return register and INDEX_REG_CLASS. */
687 /* The maximum number of uses of a register we can keep track of to
688 replace them with reg+reg addressing. */
689 #define RELOAD_COMBINE_MAX_USES 16
691 /* Describes a recorded use of a register. */
694 /* The insn where a register has been used. */
696 /* Points to the memory reference enclosing the use, if any, NULL_RTX
699 /* Location of the register within INSN. */
701 /* The reverse uid of the insn. */
705 /* If the register is used in some unknown fashion, USE_INDEX is negative.
706 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
707 indicates where it is first set or clobbered.
708 Otherwise, USE_INDEX is the index of the last encountered use of the
709 register (which is first among these we have seen since we scan backwards).
710 USE_RUID indicates the first encountered, i.e. last, of these uses.
711 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
712 with a constant offset; OFFSET contains this constant in that case.
713 STORE_RUID is always meaningful if we only want to use a value in a
714 register in a different place: it denotes the next insn in the insn
715 stream (i.e. the last encountered) that sets or clobbers the register.
716 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
719 struct reg_use reg_use
[RELOAD_COMBINE_MAX_USES
];
725 bool all_offsets_match
;
726 } reg_state
[FIRST_PSEUDO_REGISTER
];
728 /* Reverse linear uid. This is increased in reload_combine while scanning
729 the instructions from last to first. It is used to set last_label_ruid
730 and the store_ruid / use_ruid fields in reg_state. */
731 static int reload_combine_ruid
;
733 /* The RUID of the last label we encountered in reload_combine. */
734 static int last_label_ruid
;
736 /* The RUID of the last jump we encountered in reload_combine. */
737 static int last_jump_ruid
;
739 /* The register numbers of the first and last index register. A value of
740 -1 in LAST_INDEX_REG indicates that we've previously computed these
741 values and found no suitable index registers. */
742 static int first_index_reg
= -1;
743 static int last_index_reg
;
745 #define LABEL_LIVE(LABEL) \
746 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
748 /* Subroutine of reload_combine_split_ruids, called to fix up a single
749 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
752 reload_combine_split_one_ruid (int *pruid
, int split_ruid
)
754 if (*pruid
> split_ruid
)
758 /* Called when we insert a new insn in a position we've already passed in
759 the scan. Examine all our state, increasing all ruids that are higher
760 than SPLIT_RUID by one in order to make room for a new insn. */
763 reload_combine_split_ruids (int split_ruid
)
767 reload_combine_split_one_ruid (&reload_combine_ruid
, split_ruid
);
768 reload_combine_split_one_ruid (&last_label_ruid
, split_ruid
);
769 reload_combine_split_one_ruid (&last_jump_ruid
, split_ruid
);
771 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
773 int j
, idx
= reg_state
[i
].use_index
;
774 reload_combine_split_one_ruid (®_state
[i
].use_ruid
, split_ruid
);
775 reload_combine_split_one_ruid (®_state
[i
].store_ruid
, split_ruid
);
776 reload_combine_split_one_ruid (®_state
[i
].real_store_ruid
,
780 for (j
= idx
; j
< RELOAD_COMBINE_MAX_USES
; j
++)
782 reload_combine_split_one_ruid (®_state
[i
].reg_use
[j
].ruid
,
788 /* Called when we are about to rescan a previously encountered insn with
789 reload_combine_note_use after modifying some part of it. This clears all
790 information about uses in that particular insn. */
793 reload_combine_purge_insn_uses (rtx_insn
*insn
)
797 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
799 int j
, k
, idx
= reg_state
[i
].use_index
;
802 j
= k
= RELOAD_COMBINE_MAX_USES
;
805 if (reg_state
[i
].reg_use
[j
].insn
!= insn
)
809 reg_state
[i
].reg_use
[k
] = reg_state
[i
].reg_use
[j
];
812 reg_state
[i
].use_index
= k
;
816 /* Called when we need to forget about all uses of REGNO after an insn
817 which is identified by RUID. */
820 reload_combine_purge_reg_uses_after_ruid (unsigned regno
, int ruid
)
822 int j
, k
, idx
= reg_state
[regno
].use_index
;
825 j
= k
= RELOAD_COMBINE_MAX_USES
;
828 if (reg_state
[regno
].reg_use
[j
].ruid
>= ruid
)
832 reg_state
[regno
].reg_use
[k
] = reg_state
[regno
].reg_use
[j
];
835 reg_state
[regno
].use_index
= k
;
838 /* Find the use of REGNO with the ruid that is highest among those
839 lower than RUID_LIMIT, and return it if it is the only use of this
840 reg in the insn. Return NULL otherwise. */
842 static struct reg_use
*
843 reload_combine_closest_single_use (unsigned regno
, int ruid_limit
)
845 int i
, best_ruid
= 0;
846 int use_idx
= reg_state
[regno
].use_index
;
847 struct reg_use
*retval
;
852 for (i
= use_idx
; i
< RELOAD_COMBINE_MAX_USES
; i
++)
854 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
855 int this_ruid
= use
->ruid
;
856 if (this_ruid
>= ruid_limit
)
858 if (this_ruid
> best_ruid
)
860 best_ruid
= this_ruid
;
863 else if (this_ruid
== best_ruid
)
866 if (last_label_ruid
>= best_ruid
)
871 /* After we've moved an add insn, fix up any debug insns that occur
872 between the old location of the add and the new location. REG is
873 the destination register of the add insn; REPLACEMENT is the
874 SET_SRC of the add. FROM and TO specify the range in which we
875 should make this change on debug insns. */
878 fixup_debug_insns (rtx reg
, rtx replacement
, rtx_insn
*from
, rtx_insn
*to
)
881 for (insn
= from
; insn
!= to
; insn
= NEXT_INSN (insn
))
885 if (!DEBUG_INSN_P (insn
))
888 t
= INSN_VAR_LOCATION_LOC (insn
);
889 t
= simplify_replace_rtx (t
, reg
, replacement
);
890 validate_change (insn
, &INSN_VAR_LOCATION_LOC (insn
), t
, 0);
894 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
895 with SRC in the insn described by USE, taking costs into account. Return
896 true if we made the replacement. */
899 try_replace_in_use (struct reg_use
*use
, rtx reg
, rtx src
)
901 rtx_insn
*use_insn
= use
->insn
;
902 rtx mem
= use
->containing_mem
;
903 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn
));
907 addr_space_t as
= MEM_ADDR_SPACE (mem
);
908 rtx oldaddr
= XEXP (mem
, 0);
909 rtx newaddr
= NULL_RTX
;
910 int old_cost
= address_cost (oldaddr
, GET_MODE (mem
), as
, speed
);
913 newaddr
= simplify_replace_rtx (oldaddr
, reg
, src
);
914 if (memory_address_addr_space_p (GET_MODE (mem
), newaddr
, as
))
916 XEXP (mem
, 0) = newaddr
;
917 new_cost
= address_cost (newaddr
, GET_MODE (mem
), as
, speed
);
918 XEXP (mem
, 0) = oldaddr
;
919 if (new_cost
<= old_cost
920 && validate_change (use_insn
,
921 &XEXP (mem
, 0), newaddr
, 0))
927 rtx new_set
= single_set (use_insn
);
929 && REG_P (SET_DEST (new_set
))
930 && GET_CODE (SET_SRC (new_set
)) == PLUS
931 && REG_P (XEXP (SET_SRC (new_set
), 0))
932 && CONSTANT_P (XEXP (SET_SRC (new_set
), 1)))
935 int old_cost
= set_src_cost (SET_SRC (new_set
), speed
);
937 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set
), 0), reg
));
938 new_src
= simplify_replace_rtx (SET_SRC (new_set
), reg
, src
);
940 if (set_src_cost (new_src
, speed
) <= old_cost
941 && validate_change (use_insn
, &SET_SRC (new_set
),
949 /* Called by reload_combine when scanning INSN. This function tries to detect
950 patterns where a constant is added to a register, and the result is used
952 Return true if no further processing is needed on INSN; false if it wasn't
953 recognized and should be handled normally. */
956 reload_combine_recognize_const_pattern (rtx_insn
*insn
)
958 int from_ruid
= reload_combine_ruid
;
959 rtx set
, pat
, reg
, src
, addreg
;
963 rtx_insn
*add_moved_after_insn
= NULL
;
964 int add_moved_after_ruid
= 0;
965 int clobbered_regno
= -1;
967 set
= single_set (insn
);
971 reg
= SET_DEST (set
);
974 || hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] != 1
975 || GET_MODE (reg
) != Pmode
976 || reg
== stack_pointer_rtx
)
981 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
982 uses of REG1 inside an address, or inside another add insn. If
983 possible and profitable, merge the addition into subsequent
985 if (GET_CODE (src
) != PLUS
986 || !REG_P (XEXP (src
, 0))
987 || !CONSTANT_P (XEXP (src
, 1)))
990 addreg
= XEXP (src
, 0);
991 must_move_add
= rtx_equal_p (reg
, addreg
);
993 pat
= PATTERN (insn
);
994 if (must_move_add
&& set
!= pat
)
996 /* We have to be careful when moving the add; apart from the
997 single_set there may also be clobbers. Recognize one special
998 case, that of one clobber alongside the set (likely a clobber
999 of the CC register). */
1000 gcc_assert (GET_CODE (PATTERN (insn
)) == PARALLEL
);
1001 if (XVECLEN (pat
, 0) != 2 || XVECEXP (pat
, 0, 0) != set
1002 || GET_CODE (XVECEXP (pat
, 0, 1)) != CLOBBER
1003 || !REG_P (XEXP (XVECEXP (pat
, 0, 1), 0)))
1005 clobbered_regno
= REGNO (XEXP (XVECEXP (pat
, 0, 1), 0));
1010 use
= reload_combine_closest_single_use (regno
, from_ruid
);
1013 /* Start the search for the next use from here. */
1014 from_ruid
= use
->ruid
;
1016 if (use
&& GET_MODE (*use
->usep
) == Pmode
)
1018 bool delete_add
= false;
1019 rtx_insn
*use_insn
= use
->insn
;
1020 int use_ruid
= use
->ruid
;
1022 /* Avoid moving the add insn past a jump. */
1023 if (must_move_add
&& use_ruid
<= last_jump_ruid
)
1026 /* If the add clobbers another hard reg in parallel, don't move
1027 it past a real set of this hard reg. */
1028 if (must_move_add
&& clobbered_regno
>= 0
1029 && reg_state
[clobbered_regno
].real_store_ruid
>= use_ruid
)
1032 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
1033 if (HAVE_cc0
&& must_move_add
&& sets_cc0_p (PATTERN (use_insn
)))
1036 gcc_assert (reg_state
[regno
].store_ruid
<= use_ruid
);
1037 /* Avoid moving a use of ADDREG past a point where it is stored. */
1038 if (reg_state
[REGNO (addreg
)].store_ruid
> use_ruid
)
1041 /* We also must not move the addition past an insn that sets
1042 the same register, unless we can combine two add insns. */
1043 if (must_move_add
&& reg_state
[regno
].store_ruid
== use_ruid
)
1045 if (use
->containing_mem
== NULL_RTX
)
1051 if (try_replace_in_use (use
, reg
, src
))
1053 reload_combine_purge_insn_uses (use_insn
);
1054 reload_combine_note_use (&PATTERN (use_insn
), use_insn
,
1055 use_ruid
, NULL_RTX
);
1059 fixup_debug_insns (reg
, src
, insn
, use_insn
);
1065 add_moved_after_insn
= use_insn
;
1066 add_moved_after_ruid
= use_ruid
;
1071 /* If we get here, we couldn't handle this use. */
1077 if (!must_move_add
|| add_moved_after_insn
== NULL_RTX
)
1078 /* Process the add normally. */
1081 fixup_debug_insns (reg
, src
, insn
, add_moved_after_insn
);
1083 reorder_insns (insn
, insn
, add_moved_after_insn
);
1084 reload_combine_purge_reg_uses_after_ruid (regno
, add_moved_after_ruid
);
1085 reload_combine_split_ruids (add_moved_after_ruid
- 1);
1086 reload_combine_note_use (&PATTERN (insn
), insn
,
1087 add_moved_after_ruid
, NULL_RTX
);
1088 reg_state
[regno
].store_ruid
= add_moved_after_ruid
;
1093 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1094 can handle and improve. Return true if no further processing is needed on
1095 INSN; false if it wasn't recognized and should be handled normally. */
1098 reload_combine_recognize_pattern (rtx_insn
*insn
)
1103 set
= single_set (insn
);
1104 if (set
== NULL_RTX
)
1107 reg
= SET_DEST (set
);
1108 src
= SET_SRC (set
);
1110 || hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] != 1)
1113 regno
= REGNO (reg
);
1115 /* Look for (set (REGX) (CONST_INT))
1116 (set (REGX) (PLUS (REGX) (REGY)))
1118 ... (MEM (REGX)) ...
1120 (set (REGZ) (CONST_INT))
1122 ... (MEM (PLUS (REGZ) (REGY)))... .
1124 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1125 and that we know all uses of REGX before it dies.
1126 Also, explicitly check that REGX != REGY; our life information
1127 does not yet show whether REGY changes in this insn. */
1129 if (GET_CODE (src
) == PLUS
1130 && reg_state
[regno
].all_offsets_match
1131 && last_index_reg
!= -1
1132 && REG_P (XEXP (src
, 1))
1133 && rtx_equal_p (XEXP (src
, 0), reg
)
1134 && !rtx_equal_p (XEXP (src
, 1), reg
)
1135 && reg_state
[regno
].use_index
>= 0
1136 && reg_state
[regno
].use_index
< RELOAD_COMBINE_MAX_USES
1137 && last_label_ruid
< reg_state
[regno
].use_ruid
)
1139 rtx base
= XEXP (src
, 1);
1140 rtx_insn
*prev
= prev_nonnote_nondebug_insn (insn
);
1141 rtx prev_set
= prev
? single_set (prev
) : NULL_RTX
;
1142 rtx index_reg
= NULL_RTX
;
1143 rtx reg_sum
= NULL_RTX
;
1146 /* Now we need to set INDEX_REG to an index register (denoted as
1147 REGZ in the illustration above) and REG_SUM to the expression
1148 register+register that we want to use to substitute uses of REG
1149 (typically in MEMs) with. First check REG and BASE for being
1150 index registers; we can use them even if they are not dead. */
1151 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], regno
)
1152 || TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
1160 /* Otherwise, look for a free index register. Since we have
1161 checked above that neither REG nor BASE are index registers,
1162 if we find anything at all, it will be different from these
1164 for (i
= first_index_reg
; i
<= last_index_reg
; i
++)
1166 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], i
)
1167 && reg_state
[i
].use_index
== RELOAD_COMBINE_MAX_USES
1168 && reg_state
[i
].store_ruid
<= reg_state
[regno
].use_ruid
1169 && (call_used_regs
[i
] || df_regs_ever_live_p (i
))
1170 && (!frame_pointer_needed
|| i
!= HARD_FRAME_POINTER_REGNUM
)
1171 && !fixed_regs
[i
] && !global_regs
[i
]
1172 && hard_regno_nregs
[i
][GET_MODE (reg
)] == 1
1173 && targetm
.hard_regno_scratch_ok (i
))
1175 index_reg
= gen_rtx_REG (GET_MODE (reg
), i
);
1176 reg_sum
= gen_rtx_PLUS (GET_MODE (reg
), index_reg
, base
);
1182 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1183 (REGY), i.e. BASE, is not clobbered before the last use we'll
1187 && CONST_INT_P (SET_SRC (prev_set
))
1188 && rtx_equal_p (SET_DEST (prev_set
), reg
)
1189 && (reg_state
[REGNO (base
)].store_ruid
1190 <= reg_state
[regno
].use_ruid
))
1192 /* Change destination register and, if necessary, the constant
1193 value in PREV, the constant loading instruction. */
1194 validate_change (prev
, &SET_DEST (prev_set
), index_reg
, 1);
1195 if (reg_state
[regno
].offset
!= const0_rtx
)
1196 validate_change (prev
,
1197 &SET_SRC (prev_set
),
1198 GEN_INT (INTVAL (SET_SRC (prev_set
))
1199 + INTVAL (reg_state
[regno
].offset
)),
1202 /* Now for every use of REG that we have recorded, replace REG
1204 for (i
= reg_state
[regno
].use_index
;
1205 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1206 validate_unshare_change (reg_state
[regno
].reg_use
[i
].insn
,
1207 reg_state
[regno
].reg_use
[i
].usep
,
1208 /* Each change must have its own
1212 if (apply_change_group ())
1214 struct reg_use
*lowest_ruid
= NULL
;
1216 /* For every new use of REG_SUM, we have to record the use
1217 of BASE therein, i.e. operand 1. */
1218 for (i
= reg_state
[regno
].use_index
;
1219 i
< RELOAD_COMBINE_MAX_USES
; i
++)
1221 struct reg_use
*use
= reg_state
[regno
].reg_use
+ i
;
1222 reload_combine_note_use (&XEXP (*use
->usep
, 1), use
->insn
,
1223 use
->ruid
, use
->containing_mem
);
1224 if (lowest_ruid
== NULL
|| use
->ruid
< lowest_ruid
->ruid
)
1228 fixup_debug_insns (reg
, reg_sum
, insn
, lowest_ruid
->insn
);
1230 /* Delete the reg-reg addition. */
1233 if (reg_state
[regno
].offset
!= const0_rtx
)
1234 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1236 remove_reg_equal_equiv_notes (prev
);
1238 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
1247 reload_combine (void)
1249 rtx_insn
*insn
, *prev
;
1252 int min_labelno
, n_labels
;
1253 HARD_REG_SET ever_live_at_start
, *label_live
;
1255 /* To avoid wasting too much time later searching for an index register,
1256 determine the minimum and maximum index register numbers. */
1257 if (INDEX_REG_CLASS
== NO_REGS
)
1258 last_index_reg
= -1;
1259 else if (first_index_reg
== -1 && last_index_reg
== 0)
1261 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1262 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], r
))
1264 if (first_index_reg
== -1)
1265 first_index_reg
= r
;
1270 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1271 to -1 so we'll know to quit early the next time we get here. */
1272 if (first_index_reg
== -1)
1274 last_index_reg
= -1;
1279 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1280 information is a bit fuzzy immediately after reload, but it's
1281 still good enough to determine which registers are live at a jump
1283 min_labelno
= get_first_label_num ();
1284 n_labels
= max_label_num () - min_labelno
;
1285 label_live
= XNEWVEC (HARD_REG_SET
, n_labels
);
1286 CLEAR_HARD_REG_SET (ever_live_at_start
);
1288 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
1290 insn
= BB_HEAD (bb
);
1294 bitmap live_in
= df_get_live_in (bb
);
1296 REG_SET_TO_HARD_REG_SET (live
, live_in
);
1297 compute_use_by_pseudos (&live
, live_in
);
1298 COPY_HARD_REG_SET (LABEL_LIVE (insn
), live
);
1299 IOR_HARD_REG_SET (ever_live_at_start
, live
);
1303 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1304 last_label_ruid
= last_jump_ruid
= reload_combine_ruid
= 0;
1305 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1307 reg_state
[r
].store_ruid
= 0;
1308 reg_state
[r
].real_store_ruid
= 0;
1310 reg_state
[r
].use_index
= -1;
1312 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1315 for (insn
= get_last_insn (); insn
; insn
= prev
)
1317 bool control_flow_insn
;
1320 prev
= PREV_INSN (insn
);
1322 /* We cannot do our optimization across labels. Invalidating all the use
1323 information we have would be costly, so we just note where the label
1324 is and then later disable any optimization that would cross it. */
1326 last_label_ruid
= reload_combine_ruid
;
1327 else if (BARRIER_P (insn
))
1329 /* Crossing a barrier resets all the use information. */
1330 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1331 if (! fixed_regs
[r
])
1332 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1334 else if (INSN_P (insn
) && volatile_insn_p (PATTERN (insn
)))
1335 /* Optimizations across insns being marked as volatile must be
1336 prevented. All the usage information is invalidated
1338 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1340 && reg_state
[r
].use_index
!= RELOAD_COMBINE_MAX_USES
)
1341 reg_state
[r
].use_index
= -1;
1343 if (! NONDEBUG_INSN_P (insn
))
1346 reload_combine_ruid
++;
1348 control_flow_insn
= control_flow_insn_p (insn
);
1349 if (control_flow_insn
)
1350 last_jump_ruid
= reload_combine_ruid
;
1352 if (reload_combine_recognize_const_pattern (insn
)
1353 || reload_combine_recognize_pattern (insn
))
1356 note_stores (PATTERN (insn
), reload_combine_note_store
, NULL
);
1362 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1363 if (call_used_regs
[r
])
1365 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
1366 reg_state
[r
].store_ruid
= reload_combine_ruid
;
1369 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
1370 link
= XEXP (link
, 1))
1372 rtx setuse
= XEXP (link
, 0);
1373 rtx usage_rtx
= XEXP (setuse
, 0);
1374 if ((GET_CODE (setuse
) == USE
|| GET_CODE (setuse
) == CLOBBER
)
1375 && REG_P (usage_rtx
))
1378 unsigned int start_reg
= REGNO (usage_rtx
);
1379 unsigned int num_regs
1380 = hard_regno_nregs
[start_reg
][GET_MODE (usage_rtx
)];
1381 unsigned int end_reg
= start_reg
+ num_regs
- 1;
1382 for (i
= start_reg
; i
<= end_reg
; i
++)
1383 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
1385 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1386 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1389 reg_state
[i
].use_index
= -1;
1394 if (control_flow_insn
&& !ANY_RETURN_P (PATTERN (insn
)))
1396 /* Non-spill registers might be used at the call destination in
1397 some unknown fashion, so we have to mark the unknown use. */
1400 if ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
1401 && JUMP_LABEL (insn
))
1403 if (ANY_RETURN_P (JUMP_LABEL (insn
)))
1406 live
= &LABEL_LIVE (JUMP_LABEL (insn
));
1409 live
= &ever_live_at_start
;
1412 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
1413 if (TEST_HARD_REG_BIT (*live
, r
))
1414 reg_state
[r
].use_index
= -1;
1417 reload_combine_note_use (&PATTERN (insn
), insn
, reload_combine_ruid
,
1420 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1422 if (REG_NOTE_KIND (note
) == REG_INC
&& REG_P (XEXP (note
, 0)))
1424 int regno
= REGNO (XEXP (note
, 0));
1425 reg_state
[regno
].store_ruid
= reload_combine_ruid
;
1426 reg_state
[regno
].real_store_ruid
= reload_combine_ruid
;
1427 reg_state
[regno
].use_index
= -1;
1435 /* Check if DST is a register or a subreg of a register; if it is,
1436 update store_ruid, real_store_ruid and use_index in the reg_state
1437 structure accordingly. Called via note_stores from reload_combine. */
1440 reload_combine_note_store (rtx dst
, const_rtx set
, void *data ATTRIBUTE_UNUSED
)
1444 machine_mode mode
= GET_MODE (dst
);
1446 if (GET_CODE (dst
) == SUBREG
)
1448 regno
= subreg_regno_offset (REGNO (SUBREG_REG (dst
)),
1449 GET_MODE (SUBREG_REG (dst
)),
1452 dst
= SUBREG_REG (dst
);
1455 /* Some targets do argument pushes without adding REG_INC notes. */
1459 dst
= XEXP (dst
, 0);
1460 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
1461 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
1462 || GET_CODE (dst
) == PRE_MODIFY
|| GET_CODE (dst
) == POST_MODIFY
)
1464 regno
= REGNO (XEXP (dst
, 0));
1465 mode
= GET_MODE (XEXP (dst
, 0));
1466 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1468 /* We could probably do better, but for now mark the register
1469 as used in an unknown fashion and set/clobbered at this
1471 reg_state
[i
].use_index
= -1;
1472 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1473 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1482 regno
+= REGNO (dst
);
1484 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1485 careful with registers / register parts that are not full words.
1486 Similarly for ZERO_EXTRACT. */
1487 if (GET_CODE (SET_DEST (set
)) == ZERO_EXTRACT
1488 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
)
1490 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1492 reg_state
[i
].use_index
= -1;
1493 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1494 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1499 for (i
= hard_regno_nregs
[regno
][mode
] - 1 + regno
; i
>= regno
; i
--)
1501 reg_state
[i
].store_ruid
= reload_combine_ruid
;
1502 if (GET_CODE (set
) == SET
)
1503 reg_state
[i
].real_store_ruid
= reload_combine_ruid
;
1504 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
1509 /* XP points to a piece of rtl that has to be checked for any uses of
1511 *XP is the pattern of INSN, or a part of it.
1512 Called from reload_combine, and recursively by itself. */
1514 reload_combine_note_use (rtx
*xp
, rtx_insn
*insn
, int ruid
, rtx containing_mem
)
1517 enum rtx_code code
= x
->code
;
1520 rtx offset
= const0_rtx
; /* For the REG case below. */
1525 if (REG_P (SET_DEST (x
)))
1527 reload_combine_note_use (&SET_SRC (x
), insn
, ruid
, NULL_RTX
);
1533 /* If this is the USE of a return value, we can't change it. */
1534 if (REG_P (XEXP (x
, 0)) && REG_FUNCTION_VALUE_P (XEXP (x
, 0)))
1536 /* Mark the return register as used in an unknown fashion. */
1537 rtx reg
= XEXP (x
, 0);
1538 int regno
= REGNO (reg
);
1539 int nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
1541 while (--nregs
>= 0)
1542 reg_state
[regno
+ nregs
].use_index
= -1;
1548 if (REG_P (SET_DEST (x
)))
1550 /* No spurious CLOBBERs of pseudo registers may remain. */
1551 gcc_assert (REGNO (SET_DEST (x
)) < FIRST_PSEUDO_REGISTER
);
1557 /* We are interested in (plus (reg) (const_int)) . */
1558 if (!REG_P (XEXP (x
, 0))
1559 || !CONST_INT_P (XEXP (x
, 1)))
1561 offset
= XEXP (x
, 1);
1566 int regno
= REGNO (x
);
1570 /* No spurious USEs of pseudo registers may remain. */
1571 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
1573 nregs
= hard_regno_nregs
[regno
][GET_MODE (x
)];
1575 /* We can't substitute into multi-hard-reg uses. */
1578 while (--nregs
>= 0)
1579 reg_state
[regno
+ nregs
].use_index
= -1;
1583 /* We may be called to update uses in previously seen insns.
1584 Don't add uses beyond the last store we saw. */
1585 if (ruid
< reg_state
[regno
].store_ruid
)
1588 /* If this register is already used in some unknown fashion, we
1590 If we decrement the index from zero to -1, we can't store more
1591 uses, so this register becomes used in an unknown fashion. */
1592 use_index
= --reg_state
[regno
].use_index
;
1596 if (use_index
== RELOAD_COMBINE_MAX_USES
- 1)
1598 /* This is the first use of this register we have seen since we
1599 marked it as dead. */
1600 reg_state
[regno
].offset
= offset
;
1601 reg_state
[regno
].all_offsets_match
= true;
1602 reg_state
[regno
].use_ruid
= ruid
;
1606 if (reg_state
[regno
].use_ruid
> ruid
)
1607 reg_state
[regno
].use_ruid
= ruid
;
1609 if (! rtx_equal_p (offset
, reg_state
[regno
].offset
))
1610 reg_state
[regno
].all_offsets_match
= false;
1613 reg_state
[regno
].reg_use
[use_index
].insn
= insn
;
1614 reg_state
[regno
].reg_use
[use_index
].ruid
= ruid
;
1615 reg_state
[regno
].reg_use
[use_index
].containing_mem
= containing_mem
;
1616 reg_state
[regno
].reg_use
[use_index
].usep
= xp
;
1628 /* Recursively process the components of X. */
1629 fmt
= GET_RTX_FORMAT (code
);
1630 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1633 reload_combine_note_use (&XEXP (x
, i
), insn
, ruid
, containing_mem
);
1634 else if (fmt
[i
] == 'E')
1636 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1637 reload_combine_note_use (&XVECEXP (x
, i
, j
), insn
, ruid
,
1643 /* See if we can reduce the cost of a constant by replacing a move
1644 with an add. We track situations in which a register is set to a
1645 constant or to a register plus a constant. */
1646 /* We cannot do our optimization across labels. Invalidating all the
1647 information about register contents we have would be costly, so we
1648 use move2add_last_label_luid to note where the label is and then
1649 later disable any optimization that would cross it.
1650 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1651 are only valid if reg_set_luid[n] is greater than
1652 move2add_last_label_luid.
1653 For a set that established a new (potential) base register with
1654 non-constant value, we use move2add_luid from the place where the
1655 setting insn is encountered; registers based off that base then
1656 get the same reg_set_luid. Constants all get
1657 move2add_last_label_luid + 1 as their reg_set_luid. */
1658 static int reg_set_luid
[FIRST_PSEUDO_REGISTER
];
1660 /* If reg_base_reg[n] is negative, register n has been set to
1661 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1662 If reg_base_reg[n] is non-negative, register n has been set to the
1663 sum of reg_offset[n] and the value of register reg_base_reg[n]
1664 before reg_set_luid[n], calculated in mode reg_mode[n] .
1665 For multi-hard-register registers, all but the first one are
1666 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1667 marks it as invalid. */
1668 static HOST_WIDE_INT reg_offset
[FIRST_PSEUDO_REGISTER
];
1669 static int reg_base_reg
[FIRST_PSEUDO_REGISTER
];
1670 static rtx reg_symbol_ref
[FIRST_PSEUDO_REGISTER
];
1671 static machine_mode reg_mode
[FIRST_PSEUDO_REGISTER
];
1673 /* move2add_luid is linearly increased while scanning the instructions
1674 from first to last. It is used to set reg_set_luid in
1675 reload_cse_move2add and move2add_note_store. */
1676 static int move2add_luid
;
1678 /* move2add_last_label_luid is set whenever a label is found. Labels
1679 invalidate all previously collected reg_offset data. */
1680 static int move2add_last_label_luid
;
1682 /* ??? We don't know how zero / sign extension is handled, hence we
1683 can't go from a narrower to a wider mode. */
1684 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1685 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1686 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1687 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
1689 /* Record that REG is being set to a value with the mode of REG. */
1692 move2add_record_mode (rtx reg
)
1695 machine_mode mode
= GET_MODE (reg
);
1697 if (GET_CODE (reg
) == SUBREG
)
1699 regno
= subreg_regno (reg
);
1700 nregs
= subreg_nregs (reg
);
1702 else if (REG_P (reg
))
1704 regno
= REGNO (reg
);
1705 nregs
= hard_regno_nregs
[regno
][mode
];
1709 for (int i
= nregs
- 1; i
> 0; i
--)
1710 reg_mode
[regno
+ i
] = BLKmode
;
1711 reg_mode
[regno
] = mode
;
1714 /* Record that REG is being set to the sum of SYM and OFF. */
1717 move2add_record_sym_value (rtx reg
, rtx sym
, rtx off
)
1719 int regno
= REGNO (reg
);
1721 move2add_record_mode (reg
);
1722 reg_set_luid
[regno
] = move2add_luid
;
1723 reg_base_reg
[regno
] = -1;
1724 reg_symbol_ref
[regno
] = sym
;
1725 reg_offset
[regno
] = INTVAL (off
);
1728 /* Check if REGNO contains a valid value in MODE. */
1731 move2add_valid_value_p (int regno
, machine_mode mode
)
1733 if (reg_set_luid
[regno
] <= move2add_last_label_luid
)
1736 if (mode
!= reg_mode
[regno
])
1738 if (!MODES_OK_FOR_MOVE2ADD (mode
, reg_mode
[regno
]))
1740 /* The value loaded into regno in reg_mode[regno] is also valid in
1741 mode after truncation only if (REG:mode regno) is the lowpart of
1742 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1743 regno of the lowpart might be different. */
1744 int s_off
= subreg_lowpart_offset (mode
, reg_mode
[regno
]);
1745 s_off
= subreg_regno_offset (regno
, reg_mode
[regno
], s_off
, mode
);
1747 /* We could in principle adjust regno, check reg_mode[regno] to be
1748 BLKmode, and return s_off to the caller (vs. -1 for failure),
1749 but we currently have no callers that could make use of this
1754 for (int i
= hard_regno_nregs
[regno
][mode
] - 1; i
> 0; i
--)
1755 if (reg_mode
[regno
+ i
] != BLKmode
)
1760 /* This function is called with INSN that sets REG to (SYM + OFF),
1761 while REG is known to already have value (SYM + offset).
1762 This function tries to change INSN into an add instruction
1763 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1764 It also updates the information about REG's known value.
1765 Return true if we made a change. */
1768 move2add_use_add2_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1770 rtx pat
= PATTERN (insn
);
1771 rtx src
= SET_SRC (pat
);
1772 int regno
= REGNO (reg
);
1773 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[regno
],
1775 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1776 bool changed
= false;
1778 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1779 use (set (reg) (reg)) instead.
1780 We don't delete this insn, nor do we convert it into a
1781 note, to avoid losing register notes or the return
1782 value flag. jump2 already knows how to get rid of
1784 if (new_src
== const0_rtx
)
1786 /* If the constants are different, this is a
1787 truncation, that, if turned into (set (reg)
1788 (reg)), would be discarded. Maybe we should
1789 try a truncMN pattern? */
1790 if (INTVAL (off
) == reg_offset
[regno
])
1791 changed
= validate_change (insn
, &SET_SRC (pat
), reg
, 0);
1795 struct full_rtx_costs oldcst
, newcst
;
1796 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
1798 get_full_set_rtx_cost (pat
, &oldcst
);
1799 SET_SRC (pat
) = tem
;
1800 get_full_set_rtx_cost (pat
, &newcst
);
1801 SET_SRC (pat
) = src
;
1803 if (costs_lt_p (&newcst
, &oldcst
, speed
)
1804 && have_add2_insn (reg
, new_src
))
1805 changed
= validate_change (insn
, &SET_SRC (pat
), tem
, 0);
1806 else if (sym
== NULL_RTX
&& GET_MODE (reg
) != BImode
)
1808 machine_mode narrow_mode
;
1809 for (narrow_mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1810 narrow_mode
!= VOIDmode
1811 && narrow_mode
!= GET_MODE (reg
);
1812 narrow_mode
= GET_MODE_WIDER_MODE (narrow_mode
))
1814 if (have_insn_for (STRICT_LOW_PART
, narrow_mode
)
1815 && ((reg_offset
[regno
] & ~GET_MODE_MASK (narrow_mode
))
1816 == (INTVAL (off
) & ~GET_MODE_MASK (narrow_mode
))))
1818 rtx narrow_reg
= gen_lowpart_common (narrow_mode
, reg
);
1819 rtx narrow_src
= gen_int_mode (INTVAL (off
),
1822 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode
,
1825 get_full_set_rtx_cost (new_set
, &newcst
);
1826 if (costs_lt_p (&newcst
, &oldcst
, speed
))
1828 changed
= validate_change (insn
, &PATTERN (insn
),
1837 move2add_record_sym_value (reg
, sym
, off
);
1842 /* This function is called with INSN that sets REG to (SYM + OFF),
1843 but REG doesn't have known value (SYM + offset). This function
1844 tries to find another register which is known to already have
1845 value (SYM + offset) and change INSN into an add instruction
1846 (set (REG) (plus (the found register) (OFF - offset))) if such
1847 a register is found. It also updates the information about
1849 Return true iff we made a change. */
1852 move2add_use_add3_insn (rtx reg
, rtx sym
, rtx off
, rtx_insn
*insn
)
1854 rtx pat
= PATTERN (insn
);
1855 rtx src
= SET_SRC (pat
);
1856 int regno
= REGNO (reg
);
1858 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
1860 bool changed
= false;
1861 struct full_rtx_costs oldcst
, newcst
, mincst
;
1864 init_costs_to_max (&mincst
);
1865 get_full_set_rtx_cost (pat
, &oldcst
);
1867 plus_expr
= gen_rtx_PLUS (GET_MODE (reg
), reg
, const0_rtx
);
1868 SET_SRC (pat
) = plus_expr
;
1870 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1871 if (move2add_valid_value_p (i
, GET_MODE (reg
))
1872 && reg_base_reg
[i
] < 0
1873 && reg_symbol_ref
[i
] != NULL_RTX
1874 && rtx_equal_p (sym
, reg_symbol_ref
[i
]))
1876 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[i
],
1878 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1879 use (set (reg) (reg)) instead.
1880 We don't delete this insn, nor do we convert it into a
1881 note, to avoid losing register notes or the return
1882 value flag. jump2 already knows how to get rid of
1884 if (new_src
== const0_rtx
)
1886 init_costs_to_zero (&mincst
);
1892 XEXP (plus_expr
, 1) = new_src
;
1893 get_full_set_rtx_cost (pat
, &newcst
);
1895 if (costs_lt_p (&newcst
, &mincst
, speed
))
1902 SET_SRC (pat
) = src
;
1904 if (costs_lt_p (&mincst
, &oldcst
, speed
))
1908 tem
= gen_rtx_REG (GET_MODE (reg
), min_regno
);
1911 rtx new_src
= gen_int_mode (UINTVAL (off
) - reg_offset
[min_regno
],
1913 tem
= gen_rtx_PLUS (GET_MODE (reg
), tem
, new_src
);
1915 if (validate_change (insn
, &SET_SRC (pat
), tem
, 0))
1918 reg_set_luid
[regno
] = move2add_luid
;
1919 move2add_record_sym_value (reg
, sym
, off
);
1923 /* Convert move insns with constant inputs to additions if they are cheaper.
1924 Return true if any changes were made. */
1926 reload_cse_move2add (rtx_insn
*first
)
1930 bool changed
= false;
1932 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1934 reg_set_luid
[i
] = 0;
1936 reg_base_reg
[i
] = 0;
1937 reg_symbol_ref
[i
] = NULL_RTX
;
1938 reg_mode
[i
] = VOIDmode
;
1941 move2add_last_label_luid
= 0;
1943 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
), move2add_luid
++)
1949 move2add_last_label_luid
= move2add_luid
;
1950 /* We're going to increment move2add_luid twice after a
1951 label, so that we can use move2add_last_label_luid + 1 as
1952 the luid for constants. */
1956 if (! INSN_P (insn
))
1958 pat
= PATTERN (insn
);
1959 /* For simplicity, we only perform this optimization on
1960 straightforward SETs. */
1961 if (GET_CODE (pat
) == SET
1962 && REG_P (SET_DEST (pat
)))
1964 rtx reg
= SET_DEST (pat
);
1965 int regno
= REGNO (reg
);
1966 rtx src
= SET_SRC (pat
);
1968 /* Check if we have valid information on the contents of this
1969 register in the mode of REG. */
1970 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
1971 && dbg_cnt (cse2_move2add
))
1973 /* Try to transform (set (REGX) (CONST_INT A))
1975 (set (REGX) (CONST_INT B))
1977 (set (REGX) (CONST_INT A))
1979 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1981 (set (REGX) (CONST_INT A))
1983 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1986 if (CONST_INT_P (src
)
1987 && reg_base_reg
[regno
] < 0
1988 && reg_symbol_ref
[regno
] == NULL_RTX
)
1990 changed
|= move2add_use_add2_insn (reg
, NULL_RTX
, src
, insn
);
1994 /* Try to transform (set (REGX) (REGY))
1995 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1998 (set (REGX) (PLUS (REGX) (CONST_INT B)))
2001 (set (REGX) (PLUS (REGX) (CONST_INT A)))
2003 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
2004 else if (REG_P (src
)
2005 && reg_set_luid
[regno
] == reg_set_luid
[REGNO (src
)]
2006 && reg_base_reg
[regno
] == reg_base_reg
[REGNO (src
)]
2007 && move2add_valid_value_p (REGNO (src
), GET_MODE (reg
)))
2009 rtx_insn
*next
= next_nonnote_nondebug_insn (insn
);
2012 set
= single_set (next
);
2014 && SET_DEST (set
) == reg
2015 && GET_CODE (SET_SRC (set
)) == PLUS
2016 && XEXP (SET_SRC (set
), 0) == reg
2017 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
2019 rtx src3
= XEXP (SET_SRC (set
), 1);
2020 unsigned HOST_WIDE_INT added_offset
= UINTVAL (src3
);
2021 HOST_WIDE_INT base_offset
= reg_offset
[REGNO (src
)];
2022 HOST_WIDE_INT regno_offset
= reg_offset
[regno
];
2024 gen_int_mode (added_offset
2028 bool success
= false;
2029 bool speed
= optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
));
2031 if (new_src
== const0_rtx
)
2032 /* See above why we create (set (reg) (reg)) here. */
2034 = validate_change (next
, &SET_SRC (set
), reg
, 0);
2037 rtx old_src
= SET_SRC (set
);
2038 struct full_rtx_costs oldcst
, newcst
;
2039 rtx tem
= gen_rtx_PLUS (GET_MODE (reg
), reg
, new_src
);
2041 get_full_set_rtx_cost (set
, &oldcst
);
2042 SET_SRC (set
) = tem
;
2043 get_full_set_src_cost (tem
, &newcst
);
2044 SET_SRC (set
) = old_src
;
2045 costs_add_n_insns (&oldcst
, 1);
2047 if (costs_lt_p (&newcst
, &oldcst
, speed
)
2048 && have_add2_insn (reg
, new_src
))
2050 rtx newpat
= gen_rtx_SET (reg
, tem
);
2052 = validate_change (next
, &PATTERN (next
),
2060 move2add_record_mode (reg
);
2062 = trunc_int_for_mode (added_offset
+ base_offset
,
2070 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2072 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2074 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2076 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2077 if ((GET_CODE (src
) == SYMBOL_REF
2078 || (GET_CODE (src
) == CONST
2079 && GET_CODE (XEXP (src
, 0)) == PLUS
2080 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == SYMBOL_REF
2081 && CONST_INT_P (XEXP (XEXP (src
, 0), 1))))
2082 && dbg_cnt (cse2_move2add
))
2086 if (GET_CODE (src
) == SYMBOL_REF
)
2093 sym
= XEXP (XEXP (src
, 0), 0);
2094 off
= XEXP (XEXP (src
, 0), 1);
2097 /* If the reg already contains the value which is sum of
2098 sym and some constant value, we can use an add2 insn. */
2099 if (move2add_valid_value_p (regno
, GET_MODE (reg
))
2100 && reg_base_reg
[regno
] < 0
2101 && reg_symbol_ref
[regno
] != NULL_RTX
2102 && rtx_equal_p (sym
, reg_symbol_ref
[regno
]))
2103 changed
|= move2add_use_add2_insn (reg
, sym
, off
, insn
);
2105 /* Otherwise, we have to find a register whose value is sum
2106 of sym and some constant value. */
2108 changed
|= move2add_use_add3_insn (reg
, sym
, off
, insn
);
2114 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
2116 if (REG_NOTE_KIND (note
) == REG_INC
2117 && REG_P (XEXP (note
, 0)))
2119 /* Reset the information about this register. */
2120 int regno
= REGNO (XEXP (note
, 0));
2121 if (regno
< FIRST_PSEUDO_REGISTER
)
2123 move2add_record_mode (XEXP (note
, 0));
2124 reg_mode
[regno
] = VOIDmode
;
2128 note_stores (PATTERN (insn
), move2add_note_store
, insn
);
2130 /* If INSN is a conditional branch, we try to extract an
2131 implicit set out of it. */
2132 if (any_condjump_p (insn
))
2134 rtx cnd
= fis_get_condition (insn
);
2137 && GET_CODE (cnd
) == NE
2138 && REG_P (XEXP (cnd
, 0))
2139 && !reg_set_p (XEXP (cnd
, 0), insn
)
2140 /* The following two checks, which are also in
2141 move2add_note_store, are intended to reduce the
2142 number of calls to gen_rtx_SET to avoid memory
2143 allocation if possible. */
2144 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd
, 0)))
2145 && hard_regno_nregs
[REGNO (XEXP (cnd
, 0))][GET_MODE (XEXP (cnd
, 0))] == 1
2146 && CONST_INT_P (XEXP (cnd
, 1)))
2149 gen_rtx_SET (XEXP (cnd
, 0), XEXP (cnd
, 1));
2150 move2add_note_store (SET_DEST (implicit_set
), implicit_set
, insn
);
2154 /* If this is a CALL_INSN, all call used registers are stored with
2158 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
2160 if (call_used_regs
[i
])
2161 /* Reset the information about this register. */
2162 reg_mode
[i
] = VOIDmode
;
2169 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2171 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2172 Called from reload_cse_move2add via note_stores. */
2175 move2add_note_store (rtx dst
, const_rtx set
, void *data
)
2177 rtx_insn
*insn
= (rtx_insn
*) data
;
2178 unsigned int regno
= 0;
2179 machine_mode mode
= GET_MODE (dst
);
2181 /* Some targets do argument pushes without adding REG_INC notes. */
2185 dst
= XEXP (dst
, 0);
2186 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
2187 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
)
2188 reg_mode
[REGNO (XEXP (dst
, 0))] = VOIDmode
;
2192 if (GET_CODE (dst
) == SUBREG
)
2193 regno
= subreg_regno (dst
);
2194 else if (REG_P (dst
))
2195 regno
= REGNO (dst
);
2199 if (SCALAR_INT_MODE_P (mode
)
2200 && GET_CODE (set
) == SET
)
2202 rtx note
, sym
= NULL_RTX
;
2205 note
= find_reg_equal_equiv_note (insn
);
2206 if (note
&& GET_CODE (XEXP (note
, 0)) == SYMBOL_REF
)
2208 sym
= XEXP (note
, 0);
2211 else if (note
&& GET_CODE (XEXP (note
, 0)) == CONST
2212 && GET_CODE (XEXP (XEXP (note
, 0), 0)) == PLUS
2213 && GET_CODE (XEXP (XEXP (XEXP (note
, 0), 0), 0)) == SYMBOL_REF
2214 && CONST_INT_P (XEXP (XEXP (XEXP (note
, 0), 0), 1)))
2216 sym
= XEXP (XEXP (XEXP (note
, 0), 0), 0);
2217 off
= XEXP (XEXP (XEXP (note
, 0), 0), 1);
2220 if (sym
!= NULL_RTX
)
2222 move2add_record_sym_value (dst
, sym
, off
);
2227 if (SCALAR_INT_MODE_P (mode
)
2228 && GET_CODE (set
) == SET
2229 && GET_CODE (SET_DEST (set
)) != ZERO_EXTRACT
2230 && GET_CODE (SET_DEST (set
)) != STRICT_LOW_PART
)
2232 rtx src
= SET_SRC (set
);
2234 unsigned HOST_WIDE_INT offset
;
2237 switch (GET_CODE (src
))
2240 if (REG_P (XEXP (src
, 0)))
2242 base_reg
= XEXP (src
, 0);
2244 if (CONST_INT_P (XEXP (src
, 1)))
2245 offset
= UINTVAL (XEXP (src
, 1));
2246 else if (REG_P (XEXP (src
, 1))
2247 && move2add_valid_value_p (REGNO (XEXP (src
, 1)), mode
))
2249 if (reg_base_reg
[REGNO (XEXP (src
, 1))] < 0
2250 && reg_symbol_ref
[REGNO (XEXP (src
, 1))] == NULL_RTX
)
2251 offset
= reg_offset
[REGNO (XEXP (src
, 1))];
2252 /* Maybe the first register is known to be a
2254 else if (move2add_valid_value_p (REGNO (base_reg
), mode
)
2255 && reg_base_reg
[REGNO (base_reg
)] < 0
2256 && reg_symbol_ref
[REGNO (base_reg
)] == NULL_RTX
)
2258 offset
= reg_offset
[REGNO (base_reg
)];
2259 base_reg
= XEXP (src
, 1);
2278 /* Start tracking the register as a constant. */
2279 reg_base_reg
[regno
] = -1;
2280 reg_symbol_ref
[regno
] = NULL_RTX
;
2281 reg_offset
[regno
] = INTVAL (SET_SRC (set
));
2282 /* We assign the same luid to all registers set to constants. */
2283 reg_set_luid
[regno
] = move2add_last_label_luid
+ 1;
2284 move2add_record_mode (dst
);
2291 base_regno
= REGNO (base_reg
);
2292 /* If information about the base register is not valid, set it
2293 up as a new base register, pretending its value is known
2294 starting from the current insn. */
2295 if (!move2add_valid_value_p (base_regno
, mode
))
2297 reg_base_reg
[base_regno
] = base_regno
;
2298 reg_symbol_ref
[base_regno
] = NULL_RTX
;
2299 reg_offset
[base_regno
] = 0;
2300 reg_set_luid
[base_regno
] = move2add_luid
;
2301 gcc_assert (GET_MODE (base_reg
) == mode
);
2302 move2add_record_mode (base_reg
);
2305 /* Copy base information from our base register. */
2306 reg_set_luid
[regno
] = reg_set_luid
[base_regno
];
2307 reg_base_reg
[regno
] = reg_base_reg
[base_regno
];
2308 reg_symbol_ref
[regno
] = reg_symbol_ref
[base_regno
];
2310 /* Compute the sum of the offsets or constants. */
2312 = trunc_int_for_mode (offset
+ reg_offset
[base_regno
], mode
);
2314 move2add_record_mode (dst
);
2319 /* Invalidate the contents of the register. */
2320 move2add_record_mode (dst
);
2321 reg_mode
[regno
] = VOIDmode
;
2327 const pass_data pass_data_postreload_cse
=
2329 RTL_PASS
, /* type */
2330 "postreload", /* name */
2331 OPTGROUP_NONE
, /* optinfo_flags */
2332 TV_RELOAD_CSE_REGS
, /* tv_id */
2333 0, /* properties_required */
2334 0, /* properties_provided */
2335 0, /* properties_destroyed */
2336 0, /* todo_flags_start */
2337 TODO_df_finish
, /* todo_flags_finish */
2340 class pass_postreload_cse
: public rtl_opt_pass
2343 pass_postreload_cse (gcc::context
*ctxt
)
2344 : rtl_opt_pass (pass_data_postreload_cse
, ctxt
)
2347 /* opt_pass methods: */
2348 virtual bool gate (function
*) { return (optimize
> 0 && reload_completed
); }
2350 virtual unsigned int execute (function
*);
2352 }; // class pass_postreload_cse
2355 pass_postreload_cse::execute (function
*fun
)
2357 if (!dbg_cnt (postreload_cse
))
2360 /* Do a very simple CSE pass over just the hard registers. */
2361 reload_cse_regs (get_insns ());
2362 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2363 Remove any EH edges associated with them. */
2364 if (fun
->can_throw_non_call_exceptions
2365 && purge_all_dead_edges ())
2374 make_pass_postreload_cse (gcc::context
*ctxt
)
2376 return new pass_postreload_cse (ctxt
);