]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/postreload.c
Remove COPY_HARD_REG_SET
[thirdparty/gcc.git] / gcc / postreload.c
CommitLineData
15e35479 1/* Perform simple optimizations to clean up the result of reload.
a5544970 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
15e35479
KH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
15e35479
KH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
15e35479
KH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
15e35479 25#include "rtl.h"
957060b5
AM
26#include "tree.h"
27#include "predict.h"
c7131fb2 28#include "df.h"
4d0cdd0c 29#include "memmodel.h"
15e35479 30#include "tm_p.h"
957060b5
AM
31#include "optabs.h"
32#include "regs.h"
33#include "emit-rtl.h"
34#include "recog.h"
957060b5 35
60393bbc
AM
36#include "cfgrtl.h"
37#include "cfgbuild.h"
38#include "cfgcleanup.h"
15e35479 39#include "reload.h"
15e35479 40#include "cselib.h"
ef330312 41#include "tree-pass.h"
6fb5fa3c 42#include "dbgcnt.h"
15e35479 43
0c20a65f 44static int reload_cse_noop_set_p (rtx);
f90af2e0 45static bool reload_cse_simplify (rtx_insn *, rtx);
3a15c2cf 46static void reload_cse_regs_1 (void);
f90af2e0
DM
47static int reload_cse_simplify_set (rtx, rtx_insn *);
48static int reload_cse_simplify_operands (rtx_insn *, rtx);
15e35479 49
0c20a65f 50static void reload_combine (void);
f90af2e0 51static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx);
7bc980e1 52static void reload_combine_note_store (rtx, const_rtx, void *);
15e35479 53
f90af2e0 54static bool reload_cse_move2add (rtx_insn *);
7bc980e1 55static void move2add_note_store (rtx, const_rtx, void *);
15e35479
KH
56
57/* Call cse / combine like post-reload optimization phases.
58 FIRST is the first instruction. */
8bb91f49
SB
59
60static void
f90af2e0 61reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED)
15e35479 62{
dc0d5a57 63 bool moves_converted;
3a15c2cf 64 reload_cse_regs_1 ();
15e35479 65 reload_combine ();
dc0d5a57 66 moves_converted = reload_cse_move2add (first);
15e35479 67 if (flag_expensive_optimizations)
dc0d5a57
BS
68 {
69 if (moves_converted)
70 reload_combine ();
3a15c2cf 71 reload_cse_regs_1 ();
dc0d5a57 72 }
15e35479
KH
73}
74
75/* See whether a single set SET is a noop. */
76static int
0c20a65f 77reload_cse_noop_set_p (rtx set)
15e35479
KH
78{
79 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
80 return 0;
81
82 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
83}
84
3a15c2cf
SB
85/* Try to simplify INSN. Return true if the CFG may have changed. */
86static bool
f90af2e0 87reload_cse_simplify (rtx_insn *insn, rtx testreg)
15e35479
KH
88{
89 rtx body = PATTERN (insn);
3a15c2cf
SB
90 basic_block insn_bb = BLOCK_FOR_INSN (insn);
91 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
15e35479 92
d5a752eb
AV
93 /* If NO_FUNCTION_CSE has been set by the target, then we should not try
94 to cse function calls. */
95 if (NO_FUNCTION_CSE && CALL_P (insn))
96 return false;
97
15e35479
KH
98 if (GET_CODE (body) == SET)
99 {
100 int count = 0;
101
102 /* Simplify even if we may think it is a no-op.
103 We may think a memory load of a value smaller than WORD_SIZE
104 is redundant because we haven't taken into account possible
105 implicit extension. reload_cse_simplify_set() will bring
106 this out, so it's safer to simplify before we delete. */
107 count += reload_cse_simplify_set (body, insn);
108
109 if (!count && reload_cse_noop_set_p (body))
110 {
9e582b1d
JR
111 if (check_for_inc_dec (insn))
112 delete_insn_and_edges (insn);
3a15c2cf
SB
113 /* We're done with this insn. */
114 goto done;
15e35479
KH
115 }
116
117 if (count > 0)
118 apply_change_group ();
119 else
120 reload_cse_simplify_operands (insn, testreg);
121 }
122 else if (GET_CODE (body) == PARALLEL)
123 {
124 int i;
125 int count = 0;
126 rtx value = NULL_RTX;
127
0d87c765
RH
128 /* Registers mentioned in the clobber list for an asm cannot be reused
129 within the body of the asm. Invalidate those registers now so that
130 we don't try to substitute values for them. */
131 if (asm_noperands (body) >= 0)
132 {
133 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
134 {
135 rtx part = XVECEXP (body, 0, i);
8df47bdf
AH
136 /* asms can only have full clobbers, not clobber_highs. */
137 gcc_assert (GET_CODE (part) != CLOBBER_HIGH);
0d87c765
RH
138 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
139 cselib_invalidate_rtx (XEXP (part, 0));
140 }
141 }
142
15e35479
KH
143 /* If every action in a PARALLEL is a noop, we can delete
144 the entire PARALLEL. */
145 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
146 {
147 rtx part = XVECEXP (body, 0, i);
148 if (GET_CODE (part) == SET)
149 {
150 if (! reload_cse_noop_set_p (part))
151 break;
152 if (REG_P (SET_DEST (part))
153 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
154 {
155 if (value)
156 break;
157 value = SET_DEST (part);
158 }
159 }
c6a26179 160 else if (GET_CODE (part) != CLOBBER
8df47bdf 161 && GET_CODE (part) != CLOBBER_HIGH
c6a26179 162 && GET_CODE (part) != USE)
15e35479
KH
163 break;
164 }
165
166 if (i < 0)
167 {
9e582b1d
JR
168 if (check_for_inc_dec (insn))
169 delete_insn_and_edges (insn);
15e35479 170 /* We're done with this insn. */
3a15c2cf 171 goto done;
15e35479
KH
172 }
173
174 /* It's not a no-op, but we can try to simplify it. */
175 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
176 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
177 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
178
179 if (count > 0)
180 apply_change_group ();
181 else
182 reload_cse_simplify_operands (insn, testreg);
183 }
3a15c2cf
SB
184
185done:
186 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
15e35479
KH
187}
188
189/* Do a very simple CSE pass over the hard registers.
190
191 This function detects no-op moves where we happened to assign two
192 different pseudo-registers to the same hard register, and then
193 copied one to the other. Reload will generate a useless
194 instruction copying a register to itself.
195
196 This function also detects cases where we load a value from memory
197 into two different registers, and (if memory is more expensive than
198 registers) changes it to simply copy the first register into the
199 second register.
200
201 Another optimization is performed that scans the operands of each
202 instruction to see whether the value is already available in a
203 hard register. It then replaces the operand with the hard register
204 if possible, much like an optional reload would. */
205
206static void
3a15c2cf 207reload_cse_regs_1 (void)
15e35479 208{
3a15c2cf
SB
209 bool cfg_changed = false;
210 basic_block bb;
f90af2e0 211 rtx_insn *insn;
c3dc5e66 212 rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
15e35479 213
457eeaae 214 cselib_init (CSELIB_RECORD_MEMORY);
15e35479
KH
215 init_alias_analysis ();
216
11cd3bed 217 FOR_EACH_BB_FN (bb, cfun)
3a15c2cf
SB
218 FOR_BB_INSNS (bb, insn)
219 {
220 if (INSN_P (insn))
221 cfg_changed |= reload_cse_simplify (insn, testreg);
15e35479 222
3a15c2cf
SB
223 cselib_process_insn (insn);
224 }
15e35479
KH
225
226 /* Clean up. */
227 end_alias_analysis ();
228 cselib_finish ();
3a15c2cf
SB
229 if (cfg_changed)
230 cleanup_cfg (0);
15e35479
KH
231}
232
233/* Try to simplify a single SET instruction. SET is the set pattern.
234 INSN is the instruction it came from.
235 This function only handles one case: if we set a register to a value
236 which is not a register, we try to find that value in some other register
237 and change the set into a register copy. */
238
239static int
f90af2e0 240reload_cse_simplify_set (rtx set, rtx_insn *insn)
15e35479
KH
241{
242 int did_change = 0;
243 int dreg;
244 rtx src;
6f76a878 245 reg_class_t dclass;
15e35479
KH
246 int old_cost;
247 cselib_val *val;
248 struct elt_loc_list *l;
f822d252 249 enum rtx_code extend_op = UNKNOWN;
f40751dd 250 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
15e35479
KH
251
252 dreg = true_regnum (SET_DEST (set));
253 if (dreg < 0)
254 return 0;
255
256 src = SET_SRC (set);
257 if (side_effects_p (src) || true_regnum (src) >= 0)
258 return 0;
259
260 dclass = REGNO_REG_CLASS (dreg);
261
15e35479
KH
262 /* When replacing a memory with a register, we need to honor assumptions
263 that combine made wrt the contents of sign bits. We'll do this by
264 generating an extend instruction instead of a reg->reg copy. Thus
265 the destination must be a register that we can widen. */
3c0cb5de 266 if (MEM_P (src)
3712c7a3 267 && (extend_op = load_extend_op (GET_MODE (src))) != UNKNOWN
f8cfc6aa 268 && !REG_P (SET_DEST (set)))
15e35479 269 return 0;
15e35479 270
4deef538 271 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
b2948a2c
KH
272 if (! val)
273 return 0;
274
15e35479 275 /* If memory loads are cheaper than register copies, don't change them. */
3c0cb5de 276 if (MEM_P (src))
f5c21ef3 277 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
f8cfc6aa 278 else if (REG_P (src))
de8f4b07 279 old_cost = register_move_cost (GET_MODE (src),
15e35479
KH
280 REGNO_REG_CLASS (REGNO (src)), dclass);
281 else
e548c9df 282 old_cost = set_src_cost (src, GET_MODE (SET_DEST (set)), speed);
15e35479 283
15e35479
KH
284 for (l = val->locs; l; l = l->next)
285 {
286 rtx this_rtx = l->loc;
287 int this_cost;
288
289 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
290 {
f822d252 291 if (extend_op != UNKNOWN)
15e35479 292 {
807e902e 293 wide_int result;
15e35479 294
807e902e 295 if (!CONST_SCALAR_INT_P (this_rtx))
15e35479
KH
296 continue;
297
15e35479
KH
298 switch (extend_op)
299 {
300 case ZERO_EXTEND:
f079167a
RS
301 result = wide_int::from (rtx_mode_t (this_rtx,
302 GET_MODE (src)),
807e902e 303 BITS_PER_WORD, UNSIGNED);
15e35479
KH
304 break;
305 case SIGN_EXTEND:
f079167a
RS
306 result = wide_int::from (rtx_mode_t (this_rtx,
307 GET_MODE (src)),
807e902e
KZ
308 BITS_PER_WORD, SIGNED);
309 break;
15e35479 310 default:
e16acfcd 311 gcc_unreachable ();
15e35479 312 }
807e902e 313 this_rtx = immed_wide_int_const (result, word_mode);
15e35479 314 }
f1657f05 315
e548c9df 316 this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed);
15e35479 317 }
f8cfc6aa 318 else if (REG_P (this_rtx))
15e35479 319 {
f822d252 320 if (extend_op != UNKNOWN)
15e35479
KH
321 {
322 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
e548c9df 323 this_cost = set_src_cost (this_rtx, word_mode, speed);
15e35479
KH
324 }
325 else
de8f4b07 326 this_cost = register_move_cost (GET_MODE (this_rtx),
15e35479
KH
327 REGNO_REG_CLASS (REGNO (this_rtx)),
328 dclass);
329 }
330 else
331 continue;
332
333 /* If equal costs, prefer registers over anything else. That
334 tends to lead to smaller instructions on some machines. */
335 if (this_cost < old_cost
336 || (this_cost == old_cost
f8cfc6aa
JQ
337 && REG_P (this_rtx)
338 && !REG_P (SET_SRC (set))))
15e35479 339 {
3712c7a3 340 if (extend_op != UNKNOWN
0d803030
RS
341 && REG_CAN_CHANGE_MODE_P (REGNO (SET_DEST (set)),
342 GET_MODE (SET_DEST (set)), word_mode))
15e35479
KH
343 {
344 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
345 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
346 validate_change (insn, &SET_DEST (set), wide_dest, 1);
347 }
15e35479 348
95e88efd 349 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
15e35479
KH
350 old_cost = this_cost, did_change = 1;
351 }
352 }
353
354 return did_change;
355}
356
357/* Try to replace operands in INSN with equivalent values that are already
358 in registers. This can be viewed as optional reloading.
359
360 For each non-register operand in the insn, see if any hard regs are
361 known to be equivalent to that operand. Record the alternatives which
362 can accept these hard registers. Among all alternatives, select the
363 ones which are better or equal to the one currently matching, where
364 "better" is in terms of '?' and '!' constraints. Among the remaining
365 alternatives, select the one which replaces most operands with
366 hard registers. */
367
368static int
f90af2e0 369reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
15e35479
KH
370{
371 int i, j;
372
373 /* For each operand, all registers that are equivalent to it. */
374 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
375
376 const char *constraints[MAX_RECOG_OPERANDS];
377
378 /* Vector recording how bad an alternative is. */
379 int *alternative_reject;
380 /* Vector recording how many registers can be introduced by choosing
381 this alternative. */
382 int *alternative_nregs;
383 /* Array of vectors recording, for each operand and each alternative,
384 which hard register to substitute, or -1 if the operand should be
385 left as it is. */
386 int *op_alt_regno[MAX_RECOG_OPERANDS];
387 /* Array of alternatives, sorted in order of decreasing desirability. */
388 int *alternative_order;
389
75d25a02 390 extract_constrain_insn (insn);
15e35479
KH
391
392 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
393 return 0;
394
d3bfe4de
KG
395 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
396 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
397 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
703ad42b
KG
398 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
399 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
15e35479
KH
400
401 /* For each operand, find out which regs are equivalent. */
402 for (i = 0; i < recog_data.n_operands; i++)
403 {
404 cselib_val *v;
405 struct elt_loc_list *l;
115df136 406 rtx op;
15e35479
KH
407
408 CLEAR_HARD_REG_SET (equiv_regs[i]);
409
410 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
854dde43
JJ
411 right, so avoid the problem here. Similarly NOTE_INSN_DELETED_LABEL.
412 Likewise if we have a constant and the insn pattern doesn't tell us
413 the mode we need. */
4b4bf941 414 if (LABEL_P (recog_data.operand[i])
854dde43
JJ
415 || (NOTE_P (recog_data.operand[i])
416 && NOTE_KIND (recog_data.operand[i]) == NOTE_INSN_DELETED_LABEL)
15e35479
KH
417 || (CONSTANT_P (recog_data.operand[i])
418 && recog_data.operand_mode[i] == VOIDmode))
419 continue;
420
115df136 421 op = recog_data.operand[i];
3712c7a3 422 if (MEM_P (op) && load_extend_op (GET_MODE (op)) != UNKNOWN)
115df136
R
423 {
424 rtx set = single_set (insn);
425
1f52178b 426 /* We might have multiple sets, some of which do implicit
115df136
R
427 extension. Punt on this for now. */
428 if (! set)
429 continue;
1f838355 430 /* If the destination is also a MEM or a STRICT_LOW_PART, no
115df136
R
431 extension applies.
432 Also, if there is an explicit extension, we don't have to
433 worry about an implicit one. */
3c0cb5de 434 else if (MEM_P (SET_DEST (set))
115df136
R
435 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
436 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
437 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
438 ; /* Continue ordinary processing. */
7be4d808
R
439 /* If the register cannot change mode to word_mode, it follows that
440 it cannot have been used in word_mode. */
f8cfc6aa 441 else if (REG_P (SET_DEST (set))
0d803030
RS
442 && !REG_CAN_CHANGE_MODE_P (REGNO (SET_DEST (set)),
443 GET_MODE (SET_DEST (set)),
444 word_mode))
7be4d808 445 ; /* Continue ordinary processing. */
115df136 446 /* If this is a straight load, make the extension explicit. */
f8cfc6aa 447 else if (REG_P (SET_DEST (set))
115df136
R
448 && recog_data.n_operands == 2
449 && SET_SRC (set) == op
450 && SET_DEST (set) == recog_data.operand[1-i])
451 {
452 validate_change (insn, recog_data.operand_loc[i],
3712c7a3 453 gen_rtx_fmt_e (load_extend_op (GET_MODE (op)),
115df136
R
454 word_mode, op),
455 1);
456 validate_change (insn, recog_data.operand_loc[1-i],
457 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
458 1);
459 if (! apply_change_group ())
460 return 0;
461 return reload_cse_simplify_operands (insn, testreg);
462 }
463 else
464 /* ??? There might be arithmetic operations with memory that are
465 safe to optimize, but is it worth the trouble? */
466 continue;
467 }
f1657f05 468
3f82421f
PH
469 if (side_effects_p (op))
470 continue;
4deef538 471 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
15e35479
KH
472 if (! v)
473 continue;
474
475 for (l = v->locs; l; l = l->next)
f8cfc6aa 476 if (REG_P (l->loc))
15e35479
KH
477 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
478 }
479
9840b2fa 480 alternative_mask preferred = get_preferred_alternatives (insn);
15e35479
KH
481 for (i = 0; i < recog_data.n_operands; i++)
482 {
ef4bddc2 483 machine_mode mode;
15e35479
KH
484 int regno;
485 const char *p;
486
d3bfe4de 487 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
15e35479
KH
488 for (j = 0; j < recog_data.n_alternatives; j++)
489 op_alt_regno[i][j] = -1;
490
491 p = constraints[i] = recog_data.constraints[i];
492 mode = recog_data.operand_mode[i];
493
494 /* Add the reject values for each alternative given by the constraints
495 for this operand. */
496 j = 0;
497 while (*p != '\0')
498 {
499 char c = *p++;
500 if (c == ',')
501 j++;
502 else if (c == '?')
503 alternative_reject[j] += 3;
504 else if (c == '!')
505 alternative_reject[j] += 300;
506 }
507
508 /* We won't change operands which are already registers. We
509 also don't want to modify output operands. */
510 regno = true_regnum (recog_data.operand[i]);
511 if (regno >= 0
512 || constraints[i][0] == '='
513 || constraints[i][0] == '+')
514 continue;
515
516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
517 {
bbbbb16a 518 enum reg_class rclass = NO_REGS;
15e35479
KH
519
520 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
521 continue;
522
8deccbb7 523 set_mode_and_regno (testreg, mode, regno);
15e35479
KH
524
525 /* We found a register equal to this operand. Now look for all
526 alternatives that can accept this register and have not been
527 assigned a register they can use yet. */
528 j = 0;
529 p = constraints[i];
530 for (;;)
531 {
532 char c = *p;
533
534 switch (c)
535 {
8677664e
RS
536 case 'g':
537 rclass = reg_class_subunion[rclass][GENERAL_REGS];
15e35479
KH
538 break;
539
540 default:
d858f359 541 rclass
15e35479 542 = (reg_class_subunion
777e635f
RS
543 [rclass]
544 [reg_class_for_constraint (lookup_constraint (p))]);
15e35479
KH
545 break;
546
547 case ',': case '\0':
548 /* See if REGNO fits this alternative, and set it up as the
549 replacement register if we don't have one for this
550 alternative yet and the operand being replaced is not
551 a cheap CONST_INT. */
552 if (op_alt_regno[i][j] == -1
9840b2fa 553 && TEST_BIT (preferred, j)
d858f359 554 && reg_fits_class_p (testreg, rclass, 0, mode)
481683e1 555 && (!CONST_INT_P (recog_data.operand[i])
e548c9df 556 || (set_src_cost (recog_data.operand[i], mode,
5e8f01f4
RS
557 optimize_bb_for_speed_p
558 (BLOCK_FOR_INSN (insn)))
e548c9df 559 > set_src_cost (testreg, mode,
5e8f01f4
RS
560 optimize_bb_for_speed_p
561 (BLOCK_FOR_INSN (insn))))))
15e35479
KH
562 {
563 alternative_nregs[j]++;
564 op_alt_regno[i][j] = regno;
565 }
566 j++;
bbbbb16a 567 rclass = NO_REGS;
15e35479
KH
568 break;
569 }
570 p += CONSTRAINT_LEN (c, p);
571
572 if (c == '\0')
573 break;
574 }
575 }
576 }
577
578 /* Record all alternatives which are better or equal to the currently
579 matching one in the alternative_order array. */
580 for (i = j = 0; i < recog_data.n_alternatives; i++)
581 if (alternative_reject[i] <= alternative_reject[which_alternative])
582 alternative_order[j++] = i;
583 recog_data.n_alternatives = j;
584
585 /* Sort it. Given a small number of alternatives, a dumb algorithm
586 won't hurt too much. */
587 for (i = 0; i < recog_data.n_alternatives - 1; i++)
588 {
589 int best = i;
590 int best_reject = alternative_reject[alternative_order[i]];
591 int best_nregs = alternative_nregs[alternative_order[i]];
15e35479
KH
592
593 for (j = i + 1; j < recog_data.n_alternatives; j++)
594 {
595 int this_reject = alternative_reject[alternative_order[j]];
596 int this_nregs = alternative_nregs[alternative_order[j]];
597
598 if (this_reject < best_reject
8a4c09c8 599 || (this_reject == best_reject && this_nregs > best_nregs))
15e35479
KH
600 {
601 best = j;
602 best_reject = this_reject;
603 best_nregs = this_nregs;
604 }
605 }
606
fab27f52 607 std::swap (alternative_order[best], alternative_order[i]);
15e35479
KH
608 }
609
610 /* Substitute the operands as determined by op_alt_regno for the best
611 alternative. */
612 j = alternative_order[0];
613
614 for (i = 0; i < recog_data.n_operands; i++)
615 {
ef4bddc2 616 machine_mode mode = recog_data.operand_mode[i];
15e35479
KH
617 if (op_alt_regno[i][j] == -1)
618 continue;
619
620 validate_change (insn, recog_data.operand_loc[i],
621 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
622 }
623
624 for (i = recog_data.n_dups - 1; i >= 0; i--)
625 {
626 int op = recog_data.dup_num[i];
ef4bddc2 627 machine_mode mode = recog_data.operand_mode[op];
15e35479
KH
628
629 if (op_alt_regno[op][j] == -1)
630 continue;
631
632 validate_change (insn, recog_data.dup_loc[i],
633 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
634 }
635
636 return apply_change_group ();
637}
638\f
639/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
640 addressing now.
641 This code might also be useful when reload gave up on reg+reg addressing
642 because of clashes between the return register and INDEX_REG_CLASS. */
643
644/* The maximum number of uses of a register we can keep track of to
645 replace them with reg+reg addressing. */
dc0d5a57 646#define RELOAD_COMBINE_MAX_USES 16
15e35479 647
dc0d5a57
BS
648/* Describes a recorded use of a register. */
649struct reg_use
650{
651 /* The insn where a register has been used. */
f90af2e0 652 rtx_insn *insn;
dc0d5a57
BS
653 /* Points to the memory reference enclosing the use, if any, NULL_RTX
654 otherwise. */
655 rtx containing_mem;
073a8998 656 /* Location of the register within INSN. */
dc0d5a57
BS
657 rtx *usep;
658 /* The reverse uid of the insn. */
659 int ruid;
660};
15e35479
KH
661
662/* If the register is used in some unknown fashion, USE_INDEX is negative.
663 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
dc0d5a57 664 indicates where it is first set or clobbered.
15e35479 665 Otherwise, USE_INDEX is the index of the last encountered use of the
dc0d5a57
BS
666 register (which is first among these we have seen since we scan backwards).
667 USE_RUID indicates the first encountered, i.e. last, of these uses.
668 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
669 with a constant offset; OFFSET contains this constant in that case.
15e35479
KH
670 STORE_RUID is always meaningful if we only want to use a value in a
671 register in a different place: it denotes the next insn in the insn
dc0d5a57 672 stream (i.e. the last encountered) that sets or clobbers the register.
8df47bdf
AH
673 REAL_STORE_RUID is similar, but clobbers are ignored when updating it.
674 EXPR is the expression used when storing the register. */
15e35479
KH
675static struct
676 {
677 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
15e35479 678 rtx offset;
dc0d5a57 679 int use_index;
15e35479 680 int store_ruid;
dc0d5a57 681 int real_store_ruid;
15e35479 682 int use_ruid;
dc0d5a57 683 bool all_offsets_match;
8df47bdf 684 rtx expr;
15e35479
KH
685 } reg_state[FIRST_PSEUDO_REGISTER];
686
687/* Reverse linear uid. This is increased in reload_combine while scanning
688 the instructions from last to first. It is used to set last_label_ruid
689 and the store_ruid / use_ruid fields in reg_state. */
690static int reload_combine_ruid;
691
67bb0206
BS
692/* The RUID of the last label we encountered in reload_combine. */
693static int last_label_ruid;
694
dc0d5a57
BS
695/* The RUID of the last jump we encountered in reload_combine. */
696static int last_jump_ruid;
697
67bb0206
BS
698/* The register numbers of the first and last index register. A value of
699 -1 in LAST_INDEX_REG indicates that we've previously computed these
700 values and found no suitable index registers. */
701static int first_index_reg = -1;
702static int last_index_reg;
703
15e35479
KH
704#define LABEL_LIVE(LABEL) \
705 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
706
dc0d5a57
BS
707/* Subroutine of reload_combine_split_ruids, called to fix up a single
708 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
709
710static inline void
711reload_combine_split_one_ruid (int *pruid, int split_ruid)
712{
713 if (*pruid > split_ruid)
714 (*pruid)++;
715}
716
717/* Called when we insert a new insn in a position we've already passed in
718 the scan. Examine all our state, increasing all ruids that are higher
719 than SPLIT_RUID by one in order to make room for a new insn. */
720
721static void
722reload_combine_split_ruids (int split_ruid)
723{
724 unsigned i;
725
726 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
727 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
728 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
729
730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
731 {
732 int j, idx = reg_state[i].use_index;
733 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
734 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
735 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
736 split_ruid);
737 if (idx < 0)
738 continue;
739 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
740 {
741 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
742 split_ruid);
743 }
744 }
745}
746
747/* Called when we are about to rescan a previously encountered insn with
748 reload_combine_note_use after modifying some part of it. This clears all
749 information about uses in that particular insn. */
750
751static void
f90af2e0 752reload_combine_purge_insn_uses (rtx_insn *insn)
dc0d5a57
BS
753{
754 unsigned i;
755
756 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
757 {
758 int j, k, idx = reg_state[i].use_index;
759 if (idx < 0)
760 continue;
761 j = k = RELOAD_COMBINE_MAX_USES;
762 while (j-- > idx)
763 {
764 if (reg_state[i].reg_use[j].insn != insn)
765 {
766 k--;
767 if (k != j)
768 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
769 }
770 }
771 reg_state[i].use_index = k;
772 }
773}
774
775/* Called when we need to forget about all uses of REGNO after an insn
776 which is identified by RUID. */
777
778static void
779reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
780{
781 int j, k, idx = reg_state[regno].use_index;
782 if (idx < 0)
783 return;
784 j = k = RELOAD_COMBINE_MAX_USES;
785 while (j-- > idx)
786 {
787 if (reg_state[regno].reg_use[j].ruid >= ruid)
788 {
789 k--;
790 if (k != j)
791 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
792 }
793 }
794 reg_state[regno].use_index = k;
795}
796
797/* Find the use of REGNO with the ruid that is highest among those
798 lower than RUID_LIMIT, and return it if it is the only use of this
08bd6876 799 reg in the insn. Return NULL otherwise. */
dc0d5a57
BS
800
801static struct reg_use *
802reload_combine_closest_single_use (unsigned regno, int ruid_limit)
803{
804 int i, best_ruid = 0;
805 int use_idx = reg_state[regno].use_index;
806 struct reg_use *retval;
807
808 if (use_idx < 0)
809 return NULL;
810 retval = NULL;
811 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
812 {
b1d5eee8
BS
813 struct reg_use *use = reg_state[regno].reg_use + i;
814 int this_ruid = use->ruid;
dc0d5a57
BS
815 if (this_ruid >= ruid_limit)
816 continue;
817 if (this_ruid > best_ruid)
818 {
819 best_ruid = this_ruid;
08bd6876 820 retval = use;
dc0d5a57 821 }
08bd6876 822 else if (this_ruid == best_ruid)
dc0d5a57
BS
823 retval = NULL;
824 }
825 if (last_label_ruid >= best_ruid)
826 return NULL;
827 return retval;
828}
829
caa4a250
BS
830/* After we've moved an add insn, fix up any debug insns that occur
831 between the old location of the add and the new location. REG is
832 the destination register of the add insn; REPLACEMENT is the
833 SET_SRC of the add. FROM and TO specify the range in which we
834 should make this change on debug insns. */
b1d5eee8
BS
835
836static void
f90af2e0 837fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to)
b1d5eee8 838{
f90af2e0 839 rtx_insn *insn;
caa4a250 840 for (insn = from; insn != to; insn = NEXT_INSN (insn))
b1d5eee8
BS
841 {
842 rtx t;
caa4a250 843
65f4b875 844 if (!DEBUG_BIND_INSN_P (insn))
b1d5eee8 845 continue;
caa4a250
BS
846
847 t = INSN_VAR_LOCATION_LOC (insn);
08bd6876 848 t = simplify_replace_rtx (t, reg, replacement);
caa4a250 849 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
b1d5eee8
BS
850 }
851}
852
a78e242c
BS
853/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
854 with SRC in the insn described by USE, taking costs into account. Return
855 true if we made the replacement. */
856
857static bool
858try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
859{
f90af2e0 860 rtx_insn *use_insn = use->insn;
a78e242c
BS
861 rtx mem = use->containing_mem;
862 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
863
864 if (mem != NULL_RTX)
865 {
866 addr_space_t as = MEM_ADDR_SPACE (mem);
867 rtx oldaddr = XEXP (mem, 0);
868 rtx newaddr = NULL_RTX;
869 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
870 int new_cost;
871
872 newaddr = simplify_replace_rtx (oldaddr, reg, src);
873 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
874 {
875 XEXP (mem, 0) = newaddr;
876 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
877 XEXP (mem, 0) = oldaddr;
878 if (new_cost <= old_cost
879 && validate_change (use_insn,
880 &XEXP (mem, 0), newaddr, 0))
881 return true;
882 }
883 }
884 else
885 {
886 rtx new_set = single_set (use_insn);
887 if (new_set
888 && REG_P (SET_DEST (new_set))
889 && GET_CODE (SET_SRC (new_set)) == PLUS
890 && REG_P (XEXP (SET_SRC (new_set), 0))
891 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
892 {
893 rtx new_src;
e548c9df
AM
894 machine_mode mode = GET_MODE (SET_DEST (new_set));
895 int old_cost = set_src_cost (SET_SRC (new_set), mode, speed);
a78e242c
BS
896
897 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
898 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
899
e548c9df 900 if (set_src_cost (new_src, mode, speed) <= old_cost
a78e242c
BS
901 && validate_change (use_insn, &SET_SRC (new_set),
902 new_src, 0))
903 return true;
904 }
905 }
906 return false;
907}
908
dc0d5a57
BS
909/* Called by reload_combine when scanning INSN. This function tries to detect
910 patterns where a constant is added to a register, and the result is used
911 in an address.
912 Return true if no further processing is needed on INSN; false if it wasn't
913 recognized and should be handled normally. */
914
915static bool
f90af2e0 916reload_combine_recognize_const_pattern (rtx_insn *insn)
dc0d5a57
BS
917{
918 int from_ruid = reload_combine_ruid;
919 rtx set, pat, reg, src, addreg;
920 unsigned int regno;
921 struct reg_use *use;
922 bool must_move_add;
f90af2e0 923 rtx_insn *add_moved_after_insn = NULL;
dc0d5a57
BS
924 int add_moved_after_ruid = 0;
925 int clobbered_regno = -1;
926
927 set = single_set (insn);
928 if (set == NULL_RTX)
929 return false;
930
931 reg = SET_DEST (set);
932 src = SET_SRC (set);
933 if (!REG_P (reg)
dc8afb70 934 || REG_NREGS (reg) != 1
dc0d5a57
BS
935 || GET_MODE (reg) != Pmode
936 || reg == stack_pointer_rtx)
937 return false;
938
939 regno = REGNO (reg);
940
941 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
942 uses of REG1 inside an address, or inside another add insn. If
943 possible and profitable, merge the addition into subsequent
944 uses. */
945 if (GET_CODE (src) != PLUS
946 || !REG_P (XEXP (src, 0))
947 || !CONSTANT_P (XEXP (src, 1)))
948 return false;
949
950 addreg = XEXP (src, 0);
951 must_move_add = rtx_equal_p (reg, addreg);
952
953 pat = PATTERN (insn);
954 if (must_move_add && set != pat)
955 {
956 /* We have to be careful when moving the add; apart from the
957 single_set there may also be clobbers. Recognize one special
958 case, that of one clobber alongside the set (likely a clobber
959 of the CC register). */
960 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
961 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
962 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
963 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
964 return false;
965 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
966 }
967
968 do
969 {
970 use = reload_combine_closest_single_use (regno, from_ruid);
971
972 if (use)
973 /* Start the search for the next use from here. */
974 from_ruid = use->ruid;
975
976 if (use && GET_MODE (*use->usep) == Pmode)
977 {
a78e242c 978 bool delete_add = false;
f90af2e0 979 rtx_insn *use_insn = use->insn;
dc0d5a57 980 int use_ruid = use->ruid;
dc0d5a57
BS
981
982 /* Avoid moving the add insn past a jump. */
b1d5eee8 983 if (must_move_add && use_ruid <= last_jump_ruid)
dc0d5a57
BS
984 break;
985
986 /* If the add clobbers another hard reg in parallel, don't move
987 it past a real set of this hard reg. */
988 if (must_move_add && clobbered_regno >= 0
989 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
990 break;
991
3b8ff89f 992 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
058eb3b0 993 if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn)))
3b8ff89f 994 break;
3b8ff89f 995
62036819
BS
996 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
997 /* Avoid moving a use of ADDREG past a point where it is stored. */
a78e242c 998 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
dc0d5a57
BS
999 break;
1000
a78e242c
BS
1001 /* We also must not move the addition past an insn that sets
1002 the same register, unless we can combine two add insns. */
1003 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
dc0d5a57 1004 {
a78e242c
BS
1005 if (use->containing_mem == NULL_RTX)
1006 delete_add = true;
1007 else
1008 break;
dc0d5a57 1009 }
dc0d5a57 1010
a78e242c
BS
1011 if (try_replace_in_use (use, reg, src))
1012 {
1013 reload_combine_purge_insn_uses (use_insn);
1014 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1015 use_ruid, NULL_RTX);
dc0d5a57 1016
a78e242c
BS
1017 if (delete_add)
1018 {
1019 fixup_debug_insns (reg, src, insn, use_insn);
1020 delete_insn (insn);
1021 return true;
1022 }
1023 if (must_move_add)
1024 {
1025 add_moved_after_insn = use_insn;
1026 add_moved_after_ruid = use_ruid;
dc0d5a57 1027 }
a78e242c 1028 continue;
dc0d5a57 1029 }
dc0d5a57 1030 }
62036819
BS
1031 /* If we get here, we couldn't handle this use. */
1032 if (must_move_add)
1033 break;
dc0d5a57
BS
1034 }
1035 while (use);
1036
1037 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1038 /* Process the add normally. */
1039 return false;
1040
caa4a250
BS
1041 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1042
dc0d5a57
BS
1043 reorder_insns (insn, insn, add_moved_after_insn);
1044 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1045 reload_combine_split_ruids (add_moved_after_ruid - 1);
1046 reload_combine_note_use (&PATTERN (insn), insn,
1047 add_moved_after_ruid, NULL_RTX);
1048 reg_state[regno].store_ruid = add_moved_after_ruid;
1049
1050 return true;
1051}
1052
67bb0206
BS
1053/* Called by reload_combine when scanning INSN. Try to detect a pattern we
1054 can handle and improve. Return true if no further processing is needed on
1055 INSN; false if it wasn't recognized and should be handled normally. */
1056
1057static bool
f90af2e0 1058reload_combine_recognize_pattern (rtx_insn *insn)
67bb0206
BS
1059{
1060 rtx set, reg, src;
67bb0206 1061
dc0d5a57
BS
1062 set = single_set (insn);
1063 if (set == NULL_RTX)
1064 return false;
1065
1066 reg = SET_DEST (set);
1067 src = SET_SRC (set);
dc8afb70 1068 if (!REG_P (reg) || REG_NREGS (reg) != 1)
dc0d5a57
BS
1069 return false;
1070
201d49e9
BS
1071 unsigned int regno = REGNO (reg);
1072 machine_mode mode = GET_MODE (reg);
1073
1074 if (reg_state[regno].use_index < 0
1075 || reg_state[regno].use_index >= RELOAD_COMBINE_MAX_USES)
1076 return false;
1077
1078 for (int i = reg_state[regno].use_index;
1079 i < RELOAD_COMBINE_MAX_USES; i++)
1080 {
1081 struct reg_use *use = reg_state[regno].reg_use + i;
1082 if (GET_MODE (*use->usep) != mode)
1083 return false;
1084 }
dc0d5a57 1085
67bb0206
BS
1086 /* Look for (set (REGX) (CONST_INT))
1087 (set (REGX) (PLUS (REGX) (REGY)))
1088 ...
1089 ... (MEM (REGX)) ...
1090 and convert it to
1091 (set (REGZ) (CONST_INT))
1092 ...
1093 ... (MEM (PLUS (REGZ) (REGY)))... .
1094
1095 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1096 and that we know all uses of REGX before it dies.
1097 Also, explicitly check that REGX != REGY; our life information
1098 does not yet show whether REGY changes in this insn. */
67bb0206
BS
1099
1100 if (GET_CODE (src) == PLUS
dc0d5a57
BS
1101 && reg_state[regno].all_offsets_match
1102 && last_index_reg != -1
67bb0206
BS
1103 && REG_P (XEXP (src, 1))
1104 && rtx_equal_p (XEXP (src, 0), reg)
1105 && !rtx_equal_p (XEXP (src, 1), reg)
1106 && last_label_ruid < reg_state[regno].use_ruid)
1107 {
1108 rtx base = XEXP (src, 1);
f90af2e0 1109 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
67bb0206
BS
1110 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1111 rtx index_reg = NULL_RTX;
1112 rtx reg_sum = NULL_RTX;
1113 int i;
1114
1115 /* Now we need to set INDEX_REG to an index register (denoted as
1116 REGZ in the illustration above) and REG_SUM to the expression
1117 register+register that we want to use to substitute uses of REG
1118 (typically in MEMs) with. First check REG and BASE for being
1119 index registers; we can use them even if they are not dead. */
1120 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1121 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1122 REGNO (base)))
1123 {
1124 index_reg = reg;
1125 reg_sum = src;
1126 }
1127 else
1128 {
1129 /* Otherwise, look for a free index register. Since we have
1130 checked above that neither REG nor BASE are index registers,
1131 if we find anything at all, it will be different from these
1132 two registers. */
1133 for (i = first_index_reg; i <= last_index_reg; i++)
1134 {
1135 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1136 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1137 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
08bd6876
BS
1138 && (call_used_regs[i] || df_regs_ever_live_p (i))
1139 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1140 && !fixed_regs[i] && !global_regs[i]
ad474626 1141 && hard_regno_nregs (i, GET_MODE (reg)) == 1
08bd6876 1142 && targetm.hard_regno_scratch_ok (i))
67bb0206
BS
1143 {
1144 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1145 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1146 break;
1147 }
1148 }
1149 }
1150
1151 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1152 (REGY), i.e. BASE, is not clobbered before the last use we'll
1153 create. */
1154 if (reg_sum
1155 && prev_set
1156 && CONST_INT_P (SET_SRC (prev_set))
1157 && rtx_equal_p (SET_DEST (prev_set), reg)
67bb0206
BS
1158 && (reg_state[REGNO (base)].store_ruid
1159 <= reg_state[regno].use_ruid))
1160 {
1161 /* Change destination register and, if necessary, the constant
1162 value in PREV, the constant loading instruction. */
1163 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1164 if (reg_state[regno].offset != const0_rtx)
927fb0bc
JJ
1165 {
1166 HOST_WIDE_INT c
1167 = trunc_int_for_mode (UINTVAL (SET_SRC (prev_set))
1168 + UINTVAL (reg_state[regno].offset),
1169 GET_MODE (index_reg));
1170 validate_change (prev, &SET_SRC (prev_set), GEN_INT (c), 1);
1171 }
67bb0206
BS
1172
1173 /* Now for every use of REG that we have recorded, replace REG
1174 with REG_SUM. */
1175 for (i = reg_state[regno].use_index;
1176 i < RELOAD_COMBINE_MAX_USES; i++)
1177 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1178 reg_state[regno].reg_use[i].usep,
1179 /* Each change must have its own
1180 replacement. */
1181 reg_sum, 1);
1182
1183 if (apply_change_group ())
1184 {
caa4a250
BS
1185 struct reg_use *lowest_ruid = NULL;
1186
67bb0206
BS
1187 /* For every new use of REG_SUM, we have to record the use
1188 of BASE therein, i.e. operand 1. */
1189 for (i = reg_state[regno].use_index;
1190 i < RELOAD_COMBINE_MAX_USES; i++)
caa4a250
BS
1191 {
1192 struct reg_use *use = reg_state[regno].reg_use + i;
1193 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1194 use->ruid, use->containing_mem);
1195 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1196 lowest_ruid = use;
1197 }
1198
1199 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
67bb0206 1200
67bb0206
BS
1201 /* Delete the reg-reg addition. */
1202 delete_insn (insn);
1203
a5a4add7
JJ
1204 if (reg_state[regno].offset != const0_rtx
1205 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1206 are now invalid. */
1207 && remove_reg_equal_equiv_notes (prev))
1208 df_notes_rescan (prev);
67bb0206
BS
1209
1210 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
67bb0206
BS
1211 return true;
1212 }
1213 }
1214 }
1215 return false;
1216}
1217
15e35479 1218static void
0c20a65f 1219reload_combine (void)
15e35479 1220{
f90af2e0 1221 rtx_insn *insn, *prev;
15e35479
KH
1222 basic_block bb;
1223 unsigned int r;
15e35479
KH
1224 int min_labelno, n_labels;
1225 HARD_REG_SET ever_live_at_start, *label_live;
1226
15e35479
KH
1227 /* To avoid wasting too much time later searching for an index register,
1228 determine the minimum and maximum index register numbers. */
67bb0206
BS
1229 if (INDEX_REG_CLASS == NO_REGS)
1230 last_index_reg = -1;
1231 else if (first_index_reg == -1 && last_index_reg == 0)
1232 {
1233 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1234 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1235 {
1236 if (first_index_reg == -1)
1237 first_index_reg = r;
1238
1239 last_index_reg = r;
1240 }
1241
1242 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1243 to -1 so we'll know to quit early the next time we get here. */
1244 if (first_index_reg == -1)
1245 {
1246 last_index_reg = -1;
1247 return;
1248 }
1249 }
15e35479 1250
15e35479
KH
1251 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1252 information is a bit fuzzy immediately after reload, but it's
1253 still good enough to determine which registers are live at a jump
1254 destination. */
1255 min_labelno = get_first_label_num ();
1256 n_labels = max_label_num () - min_labelno;
5ed6ace5 1257 label_live = XNEWVEC (HARD_REG_SET, n_labels);
15e35479
KH
1258 CLEAR_HARD_REG_SET (ever_live_at_start);
1259
4f42035e 1260 FOR_EACH_BB_REVERSE_FN (bb, cfun)
15e35479 1261 {
a813c111 1262 insn = BB_HEAD (bb);
4b4bf941 1263 if (LABEL_P (insn))
15e35479
KH
1264 {
1265 HARD_REG_SET live;
89a95777 1266 bitmap live_in = df_get_live_in (bb);
15e35479 1267
89a95777
KZ
1268 REG_SET_TO_HARD_REG_SET (live, live_in);
1269 compute_use_by_pseudos (&live, live_in);
6576d245 1270 LABEL_LIVE (insn) = live;
15e35479
KH
1271 IOR_HARD_REG_SET (ever_live_at_start, live);
1272 }
1273 }
1274
1275 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
dc0d5a57 1276 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
15e35479
KH
1277 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1278 {
dc0d5a57
BS
1279 reg_state[r].store_ruid = 0;
1280 reg_state[r].real_store_ruid = 0;
15e35479
KH
1281 if (fixed_regs[r])
1282 reg_state[r].use_index = -1;
1283 else
1284 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1285 }
1286
dc0d5a57 1287 for (insn = get_last_insn (); insn; insn = prev)
15e35479 1288 {
7ad93142 1289 bool control_flow_insn;
15e35479
KH
1290 rtx note;
1291
dc0d5a57
BS
1292 prev = PREV_INSN (insn);
1293
15e35479
KH
1294 /* We cannot do our optimization across labels. Invalidating all the use
1295 information we have would be costly, so we just note where the label
1296 is and then later disable any optimization that would cross it. */
4b4bf941 1297 if (LABEL_P (insn))
15e35479 1298 last_label_ruid = reload_combine_ruid;
2195c9be
AK
1299 else if (BARRIER_P (insn))
1300 {
1301 /* Crossing a barrier resets all the use information. */
1302 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1303 if (! fixed_regs[r])
15e35479 1304 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
2195c9be
AK
1305 }
1306 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1307 /* Optimizations across insns being marked as volatile must be
1308 prevented. All the usage information is invalidated
1309 here. */
1310 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1311 if (! fixed_regs[r]
1312 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1313 reg_state[r].use_index = -1;
15e35479 1314
caa4a250 1315 if (! NONDEBUG_INSN_P (insn))
15e35479
KH
1316 continue;
1317
1318 reload_combine_ruid++;
1319
7ad93142
EB
1320 control_flow_insn = control_flow_insn_p (insn);
1321 if (control_flow_insn)
dc0d5a57
BS
1322 last_jump_ruid = reload_combine_ruid;
1323
1324 if (reload_combine_recognize_const_pattern (insn)
1325 || reload_combine_recognize_pattern (insn))
67bb0206 1326 continue;
15e35479 1327
e8448ba5 1328 note_stores (insn, reload_combine_note_store, NULL);
15e35479 1329
4b4bf941 1330 if (CALL_P (insn))
15e35479
KH
1331 {
1332 rtx link;
97ded4cd
TV
1333 HARD_REG_SET used_regs;
1334
1335 get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
15e35479
KH
1336
1337 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
97ded4cd 1338 if (TEST_HARD_REG_BIT (used_regs, r))
15e35479
KH
1339 {
1340 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1341 reg_state[r].store_ruid = reload_combine_ruid;
1342 }
1343
1344 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1345 link = XEXP (link, 1))
1346 {
e384e6b5
BS
1347 rtx setuse = XEXP (link, 0);
1348 rtx usage_rtx = XEXP (setuse, 0);
8df47bdf 1349
e8448ba5 1350 if (GET_CODE (setuse) == USE && REG_P (usage_rtx))
15e35479 1351 {
53d1bae9
RS
1352 unsigned int end_regno = END_REGNO (usage_rtx);
1353 for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
e8448ba5 1354 reg_state[i].use_index = -1;
15e35479
KH
1355 }
1356 }
15e35479 1357 }
18c33e03 1358
57895947 1359 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
15e35479
KH
1360 {
1361 /* Non-spill registers might be used at the call destination in
1362 some unknown fashion, so we have to mark the unknown use. */
1363 HARD_REG_SET *live;
1364
1365 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1366 && JUMP_LABEL (insn))
57895947
EB
1367 {
1368 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1369 live = NULL;
1370 else
1371 live = &LABEL_LIVE (JUMP_LABEL (insn));
1372 }
15e35479
KH
1373 else
1374 live = &ever_live_at_start;
1375
57895947
EB
1376 if (live)
1377 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1378 if (TEST_HARD_REG_BIT (*live, r))
1379 reg_state[r].use_index = -1;
15e35479
KH
1380 }
1381
7ad93142
EB
1382 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1383 NULL_RTX);
1384
15e35479
KH
1385 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1386 {
7ad93142 1387 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
15e35479
KH
1388 {
1389 int regno = REGNO (XEXP (note, 0));
15e35479 1390 reg_state[regno].store_ruid = reload_combine_ruid;
dc0d5a57 1391 reg_state[regno].real_store_ruid = reload_combine_ruid;
15e35479
KH
1392 reg_state[regno].use_index = -1;
1393 }
1394 }
1395 }
1396
1397 free (label_live);
1398}
1399
1400/* Check if DST is a register or a subreg of a register; if it is,
dc0d5a57
BS
1401 update store_ruid, real_store_ruid and use_index in the reg_state
1402 structure accordingly. Called via note_stores from reload_combine. */
15e35479
KH
1403
1404static void
7bc980e1 1405reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
15e35479
KH
1406{
1407 int regno = 0;
1408 int i;
ef4bddc2 1409 machine_mode mode = GET_MODE (dst);
15e35479
KH
1410
1411 if (GET_CODE (dst) == SUBREG)
1412 {
1413 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1414 GET_MODE (SUBREG_REG (dst)),
1415 SUBREG_BYTE (dst),
1416 GET_MODE (dst));
1417 dst = SUBREG_REG (dst);
1418 }
12c2b0ad
JL
1419
1420 /* Some targets do argument pushes without adding REG_INC notes. */
1421
1422 if (MEM_P (dst))
1423 {
1424 dst = XEXP (dst, 0);
1425 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
96676a5d
JJ
1426 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1427 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
12c2b0ad 1428 {
53d1bae9
RS
1429 unsigned int end_regno = END_REGNO (XEXP (dst, 0));
1430 for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i)
12c2b0ad
JL
1431 {
1432 /* We could probably do better, but for now mark the register
1433 as used in an unknown fashion and set/clobbered at this
1434 insn. */
1435 reg_state[i].use_index = -1;
1436 reg_state[i].store_ruid = reload_combine_ruid;
1437 reg_state[i].real_store_ruid = reload_combine_ruid;
1438 }
1439 }
1440 else
1441 return;
1442 }
1443
f8cfc6aa 1444 if (!REG_P (dst))
15e35479
KH
1445 return;
1446 regno += REGNO (dst);
1447
1448 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1449 careful with registers / register parts that are not full words.
46d096a3 1450 Similarly for ZERO_EXTRACT. */
dc0d5a57 1451 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
15e35479
KH
1452 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1453 {
4edd6298 1454 for (i = end_hard_regno (mode, regno) - 1; i >= regno; i--)
15e35479
KH
1455 {
1456 reg_state[i].use_index = -1;
1457 reg_state[i].store_ruid = reload_combine_ruid;
dc0d5a57 1458 reg_state[i].real_store_ruid = reload_combine_ruid;
15e35479
KH
1459 }
1460 }
1461 else
1462 {
4edd6298 1463 for (i = end_hard_regno (mode, regno) - 1; i >= regno; i--)
15e35479
KH
1464 {
1465 reg_state[i].store_ruid = reload_combine_ruid;
dc0d5a57
BS
1466 if (GET_CODE (set) == SET)
1467 reg_state[i].real_store_ruid = reload_combine_ruid;
15e35479
KH
1468 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1469 }
1470 }
1471}
1472
1473/* XP points to a piece of rtl that has to be checked for any uses of
1474 registers.
1475 *XP is the pattern of INSN, or a part of it.
1476 Called from reload_combine, and recursively by itself. */
1477static void
f90af2e0 1478reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem)
15e35479
KH
1479{
1480 rtx x = *xp;
1481 enum rtx_code code = x->code;
1482 const char *fmt;
1483 int i, j;
1484 rtx offset = const0_rtx; /* For the REG case below. */
1485
1486 switch (code)
1487 {
1488 case SET:
f8cfc6aa 1489 if (REG_P (SET_DEST (x)))
15e35479 1490 {
dc0d5a57 1491 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
15e35479
KH
1492 return;
1493 }
1494 break;
1495
1496 case USE:
1497 /* If this is the USE of a return value, we can't change it. */
f8cfc6aa 1498 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
15e35479 1499 {
53d1bae9 1500 /* Mark the return register as used in an unknown fashion. */
15e35479 1501 rtx reg = XEXP (x, 0);
53d1bae9
RS
1502 unsigned int end_regno = END_REGNO (reg);
1503 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
1504 reg_state[regno].use_index = -1;
15e35479
KH
1505 return;
1506 }
1507 break;
1508
1509 case CLOBBER:
f8cfc6aa 1510 if (REG_P (SET_DEST (x)))
15e35479
KH
1511 {
1512 /* No spurious CLOBBERs of pseudo registers may remain. */
e16acfcd 1513 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
15e35479
KH
1514 return;
1515 }
1516 break;
1517
8df47bdf
AH
1518 case CLOBBER_HIGH:
1519 gcc_assert (REG_P (SET_DEST (x)));
1520 return;
1521
15e35479
KH
1522 case PLUS:
1523 /* We are interested in (plus (reg) (const_int)) . */
f8cfc6aa 1524 if (!REG_P (XEXP (x, 0))
481683e1 1525 || !CONST_INT_P (XEXP (x, 1)))
15e35479
KH
1526 break;
1527 offset = XEXP (x, 1);
1528 x = XEXP (x, 0);
1529 /* Fall through. */
1530 case REG:
1531 {
1532 int regno = REGNO (x);
1533 int use_index;
1534 int nregs;
1535
1536 /* No spurious USEs of pseudo registers may remain. */
e16acfcd 1537 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
15e35479 1538
dc8afb70 1539 nregs = REG_NREGS (x);
15e35479
KH
1540
1541 /* We can't substitute into multi-hard-reg uses. */
1542 if (nregs > 1)
1543 {
1544 while (--nregs >= 0)
1545 reg_state[regno + nregs].use_index = -1;
1546 return;
1547 }
1548
08bd6876
BS
1549 /* We may be called to update uses in previously seen insns.
1550 Don't add uses beyond the last store we saw. */
1551 if (ruid < reg_state[regno].store_ruid)
1552 return;
1553
15e35479
KH
1554 /* If this register is already used in some unknown fashion, we
1555 can't do anything.
1556 If we decrement the index from zero to -1, we can't store more
1557 uses, so this register becomes used in an unknown fashion. */
1558 use_index = --reg_state[regno].use_index;
1559 if (use_index < 0)
1560 return;
1561
dc0d5a57 1562 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
15e35479
KH
1563 {
1564 /* This is the first use of this register we have seen since we
1565 marked it as dead. */
1566 reg_state[regno].offset = offset;
dc0d5a57
BS
1567 reg_state[regno].all_offsets_match = true;
1568 reg_state[regno].use_ruid = ruid;
15e35479 1569 }
b1d5eee8
BS
1570 else
1571 {
1572 if (reg_state[regno].use_ruid > ruid)
1573 reg_state[regno].use_ruid = ruid;
1574
1575 if (! rtx_equal_p (offset, reg_state[regno].offset))
1576 reg_state[regno].all_offsets_match = false;
1577 }
dc0d5a57 1578
15e35479 1579 reg_state[regno].reg_use[use_index].insn = insn;
dc0d5a57
BS
1580 reg_state[regno].reg_use[use_index].ruid = ruid;
1581 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
15e35479
KH
1582 reg_state[regno].reg_use[use_index].usep = xp;
1583 return;
1584 }
1585
dc0d5a57
BS
1586 case MEM:
1587 containing_mem = x;
1588 break;
1589
15e35479
KH
1590 default:
1591 break;
1592 }
1593
1594 /* Recursively process the components of X. */
1595 fmt = GET_RTX_FORMAT (code);
1596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1597 {
1598 if (fmt[i] == 'e')
dc0d5a57 1599 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
15e35479
KH
1600 else if (fmt[i] == 'E')
1601 {
1602 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
dc0d5a57
BS
1603 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1604 containing_mem);
15e35479
KH
1605 }
1606 }
1607}
1608\f
1609/* See if we can reduce the cost of a constant by replacing a move
1610 with an add. We track situations in which a register is set to a
1611 constant or to a register plus a constant. */
1612/* We cannot do our optimization across labels. Invalidating all the
1613 information about register contents we have would be costly, so we
1614 use move2add_last_label_luid to note where the label is and then
1615 later disable any optimization that would cross it.
7beb0596
JZ
1616 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1617 are only valid if reg_set_luid[n] is greater than
7894bc6b
JR
1618 move2add_last_label_luid.
1619 For a set that established a new (potential) base register with
1620 non-constant value, we use move2add_luid from the place where the
1621 setting insn is encountered; registers based off that base then
1622 get the same reg_set_luid. Constants all get
1623 move2add_last_label_luid + 1 as their reg_set_luid. */
15e35479
KH
1624static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1625
1626/* If reg_base_reg[n] is negative, register n has been set to
7beb0596 1627 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
15e35479
KH
1628 If reg_base_reg[n] is non-negative, register n has been set to the
1629 sum of reg_offset[n] and the value of register reg_base_reg[n]
7894bc6b
JR
1630 before reg_set_luid[n], calculated in mode reg_mode[n] .
1631 For multi-hard-register registers, all but the first one are
1632 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1633 marks it as invalid. */
15e35479
KH
1634static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1635static int reg_base_reg[FIRST_PSEUDO_REGISTER];
7beb0596 1636static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
ef4bddc2 1637static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
15e35479
KH
1638
1639/* move2add_luid is linearly increased while scanning the instructions
1640 from first to last. It is used to set reg_set_luid in
1641 reload_cse_move2add and move2add_note_store. */
1642static int move2add_luid;
1643
1644/* move2add_last_label_luid is set whenever a label is found. Labels
1645 invalidate all previously collected reg_offset data. */
1646static int move2add_last_label_luid;
1647
1648/* ??? We don't know how zero / sign extension is handled, hence we
1649 can't go from a narrower to a wider mode. */
1650#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1651 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1652 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
d0edd768 1653 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
15e35479 1654
7894bc6b
JR
1655/* Record that REG is being set to a value with the mode of REG. */
1656
1657static void
1658move2add_record_mode (rtx reg)
1659{
1660 int regno, nregs;
ef4bddc2 1661 machine_mode mode = GET_MODE (reg);
7894bc6b
JR
1662
1663 if (GET_CODE (reg) == SUBREG)
1664 {
1665 regno = subreg_regno (reg);
1666 nregs = subreg_nregs (reg);
1667 }
1668 else if (REG_P (reg))
1669 {
1670 regno = REGNO (reg);
dc8afb70 1671 nregs = REG_NREGS (reg);
7894bc6b
JR
1672 }
1673 else
1674 gcc_unreachable ();
1675 for (int i = nregs - 1; i > 0; i--)
1676 reg_mode[regno + i] = BLKmode;
1677 reg_mode[regno] = mode;
1678}
1679
1680/* Record that REG is being set to the sum of SYM and OFF. */
1681
1682static void
1683move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1684{
1685 int regno = REGNO (reg);
1686
1687 move2add_record_mode (reg);
1688 reg_set_luid[regno] = move2add_luid;
1689 reg_base_reg[regno] = -1;
1690 reg_symbol_ref[regno] = sym;
1691 reg_offset[regno] = INTVAL (off);
1692}
1693
1694/* Check if REGNO contains a valid value in MODE. */
1695
1696static bool
1e047eed 1697move2add_valid_value_p (int regno, scalar_int_mode mode)
7894bc6b 1698{
ca035367 1699 if (reg_set_luid[regno] <= move2add_last_label_luid)
7894bc6b
JR
1700 return false;
1701
ca035367
JR
1702 if (mode != reg_mode[regno])
1703 {
6b9c3dec
RS
1704 scalar_int_mode old_mode;
1705 if (!is_a <scalar_int_mode> (reg_mode[regno], &old_mode)
1706 || !MODES_OK_FOR_MOVE2ADD (mode, old_mode))
ca035367
JR
1707 return false;
1708 /* The value loaded into regno in reg_mode[regno] is also valid in
1709 mode after truncation only if (REG:mode regno) is the lowpart of
1710 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1711 regno of the lowpart might be different. */
91914e56 1712 poly_int64 s_off = subreg_lowpart_offset (mode, old_mode);
6b9c3dec 1713 s_off = subreg_regno_offset (regno, old_mode, s_off, mode);
91914e56 1714 if (maybe_ne (s_off, 0))
ca035367
JR
1715 /* We could in principle adjust regno, check reg_mode[regno] to be
1716 BLKmode, and return s_off to the caller (vs. -1 for failure),
1717 but we currently have no callers that could make use of this
1718 information. */
1719 return false;
1720 }
1721
4edd6298
RS
1722 for (int i = end_hard_regno (mode, regno) - 1; i > regno; i--)
1723 if (reg_mode[i] != BLKmode)
7894bc6b
JR
1724 return false;
1725 return true;
1726}
1727
1e047eed
RS
1728/* This function is called with INSN that sets REG (of mode MODE)
1729 to (SYM + OFF), while REG is known to already have value (SYM + offset).
7beb0596
JZ
1730 This function tries to change INSN into an add instruction
1731 (set (REG) (plus (REG) (OFF - offset))) using the known value.
dc0d5a57
BS
1732 It also updates the information about REG's known value.
1733 Return true if we made a change. */
7beb0596 1734
dc0d5a57 1735static bool
1e047eed
RS
1736move2add_use_add2_insn (scalar_int_mode mode, rtx reg, rtx sym, rtx off,
1737 rtx_insn *insn)
7beb0596
JZ
1738{
1739 rtx pat = PATTERN (insn);
1740 rtx src = SET_SRC (pat);
1741 int regno = REGNO (reg);
1e047eed 1742 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno], mode);
7beb0596 1743 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
dc0d5a57 1744 bool changed = false;
7beb0596
JZ
1745
1746 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1747 use (set (reg) (reg)) instead.
1748 We don't delete this insn, nor do we convert it into a
1749 note, to avoid losing register notes or the return
1750 value flag. jump2 already knows how to get rid of
1751 no-op moves. */
1752 if (new_src == const0_rtx)
1753 {
1754 /* If the constants are different, this is a
1755 truncation, that, if turned into (set (reg)
1756 (reg)), would be discarded. Maybe we should
1757 try a truncMN pattern? */
1758 if (INTVAL (off) == reg_offset [regno])
dc0d5a57 1759 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
7beb0596 1760 }
22939744 1761 else
7beb0596 1762 {
22939744 1763 struct full_rtx_costs oldcst, newcst;
1e047eed 1764 rtx tem = gen_rtx_PLUS (mode, reg, new_src);
22939744 1765
d51102f3 1766 get_full_set_rtx_cost (pat, &oldcst);
22939744 1767 SET_SRC (pat) = tem;
d51102f3 1768 get_full_set_rtx_cost (pat, &newcst);
22939744
BS
1769 SET_SRC (pat) = src;
1770
1771 if (costs_lt_p (&newcst, &oldcst, speed)
1772 && have_add2_insn (reg, new_src))
1773 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1e047eed 1774 else if (sym == NULL_RTX && mode != BImode)
7beb0596 1775 {
1e047eed
RS
1776 scalar_int_mode narrow_mode;
1777 FOR_EACH_MODE_UNTIL (narrow_mode, mode)
7beb0596 1778 {
22939744
BS
1779 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1780 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1781 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1782 {
b49eefa5 1783 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
22939744
BS
1784 rtx narrow_src = gen_int_mode (INTVAL (off),
1785 narrow_mode);
1786 rtx new_set
f7df4a84 1787 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,
22939744
BS
1788 narrow_reg),
1789 narrow_src);
a0f37b26
AS
1790 get_full_set_rtx_cost (new_set, &newcst);
1791 if (costs_lt_p (&newcst, &oldcst, speed))
1792 {
1793 changed = validate_change (insn, &PATTERN (insn),
1794 new_set, 0);
1795 if (changed)
1796 break;
1797 }
22939744 1798 }
7beb0596
JZ
1799 }
1800 }
1801 }
7894bc6b 1802 move2add_record_sym_value (reg, sym, off);
dc0d5a57 1803 return changed;
7beb0596
JZ
1804}
1805
1806
1e047eed
RS
1807/* This function is called with INSN that sets REG (of mode MODE) to
1808 (SYM + OFF), but REG doesn't have known value (SYM + offset). This
1809 function tries to find another register which is known to already have
7beb0596
JZ
1810 value (SYM + offset) and change INSN into an add instruction
1811 (set (REG) (plus (the found register) (OFF - offset))) if such
1812 a register is found. It also updates the information about
dc0d5a57
BS
1813 REG's known value.
1814 Return true iff we made a change. */
7beb0596 1815
dc0d5a57 1816static bool
1e047eed
RS
1817move2add_use_add3_insn (scalar_int_mode mode, rtx reg, rtx sym, rtx off,
1818 rtx_insn *insn)
7beb0596
JZ
1819{
1820 rtx pat = PATTERN (insn);
1821 rtx src = SET_SRC (pat);
1822 int regno = REGNO (reg);
5676e87d 1823 int min_regno = 0;
7beb0596
JZ
1824 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1825 int i;
dc0d5a57 1826 bool changed = false;
22939744
BS
1827 struct full_rtx_costs oldcst, newcst, mincst;
1828 rtx plus_expr;
1829
1830 init_costs_to_max (&mincst);
d51102f3 1831 get_full_set_rtx_cost (pat, &oldcst);
22939744
BS
1832
1833 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1834 SET_SRC (pat) = plus_expr;
7beb0596
JZ
1835
1836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1e047eed 1837 if (move2add_valid_value_p (i, mode)
7beb0596
JZ
1838 && reg_base_reg[i] < 0
1839 && reg_symbol_ref[i] != NULL_RTX
1840 && rtx_equal_p (sym, reg_symbol_ref[i]))
1841 {
e15eb172 1842 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
7beb0596
JZ
1843 GET_MODE (reg));
1844 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1845 use (set (reg) (reg)) instead.
1846 We don't delete this insn, nor do we convert it into a
1847 note, to avoid losing register notes or the return
1848 value flag. jump2 already knows how to get rid of
1849 no-op moves. */
1850 if (new_src == const0_rtx)
1851 {
22939744 1852 init_costs_to_zero (&mincst);
7beb0596
JZ
1853 min_regno = i;
1854 break;
1855 }
1856 else
1857 {
22939744 1858 XEXP (plus_expr, 1) = new_src;
d51102f3 1859 get_full_set_rtx_cost (pat, &newcst);
22939744
BS
1860
1861 if (costs_lt_p (&newcst, &mincst, speed))
7beb0596 1862 {
22939744 1863 mincst = newcst;
7beb0596
JZ
1864 min_regno = i;
1865 }
1866 }
1867 }
22939744 1868 SET_SRC (pat) = src;
7beb0596 1869
22939744 1870 if (costs_lt_p (&mincst, &oldcst, speed))
7beb0596
JZ
1871 {
1872 rtx tem;
1873
1874 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1875 if (i != min_regno)
1876 {
e15eb172 1877 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
7beb0596
JZ
1878 GET_MODE (reg));
1879 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1880 }
dc0d5a57
BS
1881 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1882 changed = true;
7beb0596
JZ
1883 }
1884 reg_set_luid[regno] = move2add_luid;
7894bc6b 1885 move2add_record_sym_value (reg, sym, off);
dc0d5a57 1886 return changed;
7beb0596
JZ
1887}
1888
dc0d5a57
BS
1889/* Convert move insns with constant inputs to additions if they are cheaper.
1890 Return true if any changes were made. */
1891static bool
f90af2e0 1892reload_cse_move2add (rtx_insn *first)
15e35479
KH
1893{
1894 int i;
f90af2e0 1895 rtx_insn *insn;
dc0d5a57 1896 bool changed = false;
15e35479
KH
1897
1898 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
7beb0596
JZ
1899 {
1900 reg_set_luid[i] = 0;
1901 reg_offset[i] = 0;
1902 reg_base_reg[i] = 0;
1903 reg_symbol_ref[i] = NULL_RTX;
1904 reg_mode[i] = VOIDmode;
1905 }
15e35479
KH
1906
1907 move2add_last_label_luid = 0;
1908 move2add_luid = 2;
1909 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1910 {
1911 rtx pat, note;
1912
4b4bf941 1913 if (LABEL_P (insn))
15e35479
KH
1914 {
1915 move2add_last_label_luid = move2add_luid;
1916 /* We're going to increment move2add_luid twice after a
1917 label, so that we can use move2add_last_label_luid + 1 as
1918 the luid for constants. */
1919 move2add_luid++;
1920 continue;
1921 }
1922 if (! INSN_P (insn))
1923 continue;
1924 pat = PATTERN (insn);
1925 /* For simplicity, we only perform this optimization on
1926 straightforward SETs. */
1e047eed 1927 scalar_int_mode mode;
15e35479 1928 if (GET_CODE (pat) == SET
1e047eed
RS
1929 && REG_P (SET_DEST (pat))
1930 && is_a <scalar_int_mode> (GET_MODE (SET_DEST (pat)), &mode))
15e35479
KH
1931 {
1932 rtx reg = SET_DEST (pat);
1933 int regno = REGNO (reg);
1934 rtx src = SET_SRC (pat);
1935
1936 /* Check if we have valid information on the contents of this
1937 register in the mode of REG. */
1e047eed 1938 if (move2add_valid_value_p (regno, mode)
6fb5fa3c 1939 && dbg_cnt (cse2_move2add))
15e35479
KH
1940 {
1941 /* Try to transform (set (REGX) (CONST_INT A))
1942 ...
1943 (set (REGX) (CONST_INT B))
1944 to
1945 (set (REGX) (CONST_INT A))
1946 ...
1947 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1948 or
1949 (set (REGX) (CONST_INT A))
1950 ...
1951 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1952 */
1953
7beb0596
JZ
1954 if (CONST_INT_P (src)
1955 && reg_base_reg[regno] < 0
1956 && reg_symbol_ref[regno] == NULL_RTX)
15e35479 1957 {
1e047eed
RS
1958 changed |= move2add_use_add2_insn (mode, reg, NULL_RTX,
1959 src, insn);
15e35479
KH
1960 continue;
1961 }
1962
1963 /* Try to transform (set (REGX) (REGY))
1964 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1965 ...
1966 (set (REGX) (REGY))
1967 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1968 to
1969 (set (REGX) (REGY))
1970 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1971 ...
1972 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
f8cfc6aa 1973 else if (REG_P (src)
15e35479
KH
1974 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1975 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
1e047eed 1976 && move2add_valid_value_p (REGNO (src), mode))
15e35479 1977 {
f90af2e0 1978 rtx_insn *next = next_nonnote_nondebug_insn (insn);
15e35479
KH
1979 rtx set = NULL_RTX;
1980 if (next)
1981 set = single_set (next);
1982 if (set
1983 && SET_DEST (set) == reg
1984 && GET_CODE (SET_SRC (set)) == PLUS
1985 && XEXP (SET_SRC (set), 0) == reg
481683e1 1986 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
15e35479
KH
1987 {
1988 rtx src3 = XEXP (SET_SRC (set), 1);
e15eb172 1989 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
15e35479
KH
1990 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1991 HOST_WIDE_INT regno_offset = reg_offset[regno];
1992 rtx new_src =
bb80db7b
KH
1993 gen_int_mode (added_offset
1994 + base_offset
1995 - regno_offset,
1e047eed 1996 mode);
f40751dd
JH
1997 bool success = false;
1998 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
15e35479
KH
1999
2000 if (new_src == const0_rtx)
2001 /* See above why we create (set (reg) (reg)) here. */
2002 success
2003 = validate_change (next, &SET_SRC (set), reg, 0);
22939744 2004 else
15e35479 2005 {
22939744
BS
2006 rtx old_src = SET_SRC (set);
2007 struct full_rtx_costs oldcst, newcst;
1e047eed 2008 rtx tem = gen_rtx_PLUS (mode, reg, new_src);
22939744 2009
d51102f3 2010 get_full_set_rtx_cost (set, &oldcst);
22939744 2011 SET_SRC (set) = tem;
1e047eed 2012 get_full_set_src_cost (tem, mode, &newcst);
22939744
BS
2013 SET_SRC (set) = old_src;
2014 costs_add_n_insns (&oldcst, 1);
2015
2016 if (costs_lt_p (&newcst, &oldcst, speed)
2017 && have_add2_insn (reg, new_src))
2018 {
f7df4a84 2019 rtx newpat = gen_rtx_SET (reg, tem);
22939744
BS
2020 success
2021 = validate_change (next, &PATTERN (next),
2022 newpat, 0);
2023 }
15e35479
KH
2024 }
2025 if (success)
2026 delete_insn (insn);
dc0d5a57 2027 changed |= success;
15e35479 2028 insn = next;
7894bc6b
JR
2029 move2add_record_mode (reg);
2030 reg_offset[regno]
2031 = trunc_int_for_mode (added_offset + base_offset,
1e047eed 2032 mode);
15e35479
KH
2033 continue;
2034 }
2035 }
2036 }
7beb0596
JZ
2037
2038 /* Try to transform
2039 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2040 ...
2041 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2042 to
2043 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2044 ...
2045 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2046 if ((GET_CODE (src) == SYMBOL_REF
2047 || (GET_CODE (src) == CONST
2048 && GET_CODE (XEXP (src, 0)) == PLUS
2049 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2050 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2051 && dbg_cnt (cse2_move2add))
2052 {
2053 rtx sym, off;
2054
2055 if (GET_CODE (src) == SYMBOL_REF)
2056 {
2057 sym = src;
2058 off = const0_rtx;
2059 }
2060 else
2061 {
2062 sym = XEXP (XEXP (src, 0), 0);
2063 off = XEXP (XEXP (src, 0), 1);
2064 }
2065
2066 /* If the reg already contains the value which is sum of
2067 sym and some constant value, we can use an add2 insn. */
1e047eed 2068 if (move2add_valid_value_p (regno, mode)
7beb0596
JZ
2069 && reg_base_reg[regno] < 0
2070 && reg_symbol_ref[regno] != NULL_RTX
2071 && rtx_equal_p (sym, reg_symbol_ref[regno]))
1e047eed 2072 changed |= move2add_use_add2_insn (mode, reg, sym, off, insn);
7beb0596
JZ
2073
2074 /* Otherwise, we have to find a register whose value is sum
2075 of sym and some constant value. */
2076 else
1e047eed 2077 changed |= move2add_use_add3_insn (mode, reg, sym, off, insn);
7beb0596
JZ
2078
2079 continue;
2080 }
15e35479
KH
2081 }
2082
2083 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2084 {
2085 if (REG_NOTE_KIND (note) == REG_INC
f8cfc6aa 2086 && REG_P (XEXP (note, 0)))
15e35479
KH
2087 {
2088 /* Reset the information about this register. */
2089 int regno = REGNO (XEXP (note, 0));
2090 if (regno < FIRST_PSEUDO_REGISTER)
7894bc6b
JR
2091 {
2092 move2add_record_mode (XEXP (note, 0));
2093 reg_mode[regno] = VOIDmode;
2094 }
15e35479
KH
2095 }
2096 }
e8448ba5 2097 note_stores (insn, move2add_note_store, insn);
15e35479
KH
2098
2099 /* If INSN is a conditional branch, we try to extract an
2100 implicit set out of it. */
c4cdb8e1 2101 if (any_condjump_p (insn))
15e35479
KH
2102 {
2103 rtx cnd = fis_get_condition (insn);
2104
2105 if (cnd != NULL_RTX
2106 && GET_CODE (cnd) == NE
f8cfc6aa 2107 && REG_P (XEXP (cnd, 0))
c4cdb8e1 2108 && !reg_set_p (XEXP (cnd, 0), insn)
15e35479
KH
2109 /* The following two checks, which are also in
2110 move2add_note_store, are intended to reduce the
2111 number of calls to gen_rtx_SET to avoid memory
2112 allocation if possible. */
2113 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
dc8afb70 2114 && REG_NREGS (XEXP (cnd, 0)) == 1
481683e1 2115 && CONST_INT_P (XEXP (cnd, 1)))
15e35479
KH
2116 {
2117 rtx implicit_set =
f7df4a84 2118 gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1));
7beb0596 2119 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
15e35479
KH
2120 }
2121 }
2122
2123 /* If this is a CALL_INSN, all call used registers are stored with
2124 unknown values. */
4b4bf941 2125 if (CALL_P (insn))
15e35479
KH
2126 {
2127 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2128 {
2129 if (call_used_regs[i])
2130 /* Reset the information about this register. */
7894bc6b 2131 reg_mode[i] = VOIDmode;
15e35479
KH
2132 }
2133 }
2134 }
dc0d5a57 2135 return changed;
15e35479
KH
2136}
2137
7beb0596
JZ
2138/* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2139 contains SET.
15e35479
KH
2140 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2141 Called from reload_cse_move2add via note_stores. */
2142
2143static void
7beb0596 2144move2add_note_store (rtx dst, const_rtx set, void *data)
15e35479 2145{
f90af2e0 2146 rtx_insn *insn = (rtx_insn *) data;
15e35479 2147 unsigned int regno = 0;
b0567726 2148 scalar_int_mode mode;
15e35479 2149
15e35479
KH
2150 /* Some targets do argument pushes without adding REG_INC notes. */
2151
3c0cb5de 2152 if (MEM_P (dst))
15e35479
KH
2153 {
2154 dst = XEXP (dst, 0);
2155 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2156 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
7894bc6b 2157 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
15e35479
KH
2158 return;
2159 }
15e35479 2160
7894bc6b
JR
2161 if (GET_CODE (dst) == SUBREG)
2162 regno = subreg_regno (dst);
2163 else if (REG_P (dst))
2164 regno = REGNO (dst);
2165 else
2166 return;
15e35479 2167
b0567726
RS
2168 if (!is_a <scalar_int_mode> (GET_MODE (dst), &mode))
2169 goto invalidate;
2170
2171 if (GET_CODE (set) == SET)
7beb0596
JZ
2172 {
2173 rtx note, sym = NULL_RTX;
7894bc6b 2174 rtx off;
7beb0596
JZ
2175
2176 note = find_reg_equal_equiv_note (insn);
2177 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2178 {
2179 sym = XEXP (note, 0);
7894bc6b 2180 off = const0_rtx;
7beb0596
JZ
2181 }
2182 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2183 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2184 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2185 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2186 {
2187 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
7894bc6b 2188 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
7beb0596
JZ
2189 }
2190
2191 if (sym != NULL_RTX)
2192 {
7894bc6b 2193 move2add_record_sym_value (dst, sym, off);
7beb0596
JZ
2194 return;
2195 }
2196 }
2197
b0567726 2198 if (GET_CODE (set) == SET
15e35479 2199 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
15e35479
KH
2200 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2201 {
2202 rtx src = SET_SRC (set);
2203 rtx base_reg;
e15eb172 2204 unsigned HOST_WIDE_INT offset;
15e35479 2205 int base_regno;
15e35479
KH
2206
2207 switch (GET_CODE (src))
2208 {
2209 case PLUS:
f8cfc6aa 2210 if (REG_P (XEXP (src, 0)))
15e35479
KH
2211 {
2212 base_reg = XEXP (src, 0);
2213
481683e1 2214 if (CONST_INT_P (XEXP (src, 1)))
e15eb172 2215 offset = UINTVAL (XEXP (src, 1));
f8cfc6aa 2216 else if (REG_P (XEXP (src, 1))
7894bc6b 2217 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
15e35479 2218 {
27d5e204
CLT
2219 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2220 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
15e35479
KH
2221 offset = reg_offset[REGNO (XEXP (src, 1))];
2222 /* Maybe the first register is known to be a
2223 constant. */
7894bc6b 2224 else if (move2add_valid_value_p (REGNO (base_reg), mode)
27d5e204
CLT
2225 && reg_base_reg[REGNO (base_reg)] < 0
2226 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
15e35479
KH
2227 {
2228 offset = reg_offset[REGNO (base_reg)];
2229 base_reg = XEXP (src, 1);
2230 }
2231 else
2232 goto invalidate;
2233 }
2234 else
2235 goto invalidate;
2236
2237 break;
2238 }
2239
2240 goto invalidate;
2241
2242 case REG:
2243 base_reg = src;
2244 offset = 0;
2245 break;
2246
2247 case CONST_INT:
2248 /* Start tracking the register as a constant. */
2249 reg_base_reg[regno] = -1;
7beb0596 2250 reg_symbol_ref[regno] = NULL_RTX;
15e35479
KH
2251 reg_offset[regno] = INTVAL (SET_SRC (set));
2252 /* We assign the same luid to all registers set to constants. */
2253 reg_set_luid[regno] = move2add_last_label_luid + 1;
7894bc6b 2254 move2add_record_mode (dst);
15e35479
KH
2255 return;
2256
2257 default:
7894bc6b 2258 goto invalidate;
15e35479
KH
2259 }
2260
2261 base_regno = REGNO (base_reg);
2262 /* If information about the base register is not valid, set it
2263 up as a new base register, pretending its value is known
2264 starting from the current insn. */
7894bc6b 2265 if (!move2add_valid_value_p (base_regno, mode))
15e35479
KH
2266 {
2267 reg_base_reg[base_regno] = base_regno;
7beb0596 2268 reg_symbol_ref[base_regno] = NULL_RTX;
15e35479
KH
2269 reg_offset[base_regno] = 0;
2270 reg_set_luid[base_regno] = move2add_luid;
7894bc6b
JR
2271 gcc_assert (GET_MODE (base_reg) == mode);
2272 move2add_record_mode (base_reg);
15e35479 2273 }
15e35479
KH
2274
2275 /* Copy base information from our base register. */
2276 reg_set_luid[regno] = reg_set_luid[base_regno];
2277 reg_base_reg[regno] = reg_base_reg[base_regno];
7beb0596 2278 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
15e35479
KH
2279
2280 /* Compute the sum of the offsets or constants. */
7894bc6b
JR
2281 reg_offset[regno]
2282 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2283
2284 move2add_record_mode (dst);
15e35479 2285 }
8df47bdf
AH
2286 else if (GET_CODE (set) == CLOBBER_HIGH)
2287 {
2288 /* Only invalidate if actually clobbered. */
2289 if (reg_mode[regno] == BLKmode
2290 || reg_is_clobbered_by_clobber_high (regno, reg_mode[regno], dst))
2291 goto invalidate;
2292 }
15e35479
KH
2293 else
2294 {
7894bc6b
JR
2295 invalidate:
2296 /* Invalidate the contents of the register. */
2297 move2add_record_mode (dst);
2298 reg_mode[regno] = VOIDmode;
15e35479
KH
2299 }
2300}
ef330312 2301\f
27a4cd48
DM
2302namespace {
2303
2304const pass_data pass_data_postreload_cse =
ef330312 2305{
27a4cd48
DM
2306 RTL_PASS, /* type */
2307 "postreload", /* name */
2308 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2309 TV_RELOAD_CSE_REGS, /* tv_id */
2310 0, /* properties_required */
2311 0, /* properties_provided */
2312 0, /* properties_destroyed */
2313 0, /* todo_flags_start */
3bea341f 2314 TODO_df_finish, /* todo_flags_finish */
ef330312 2315};
27a4cd48
DM
2316
2317class pass_postreload_cse : public rtl_opt_pass
2318{
2319public:
c3284718
RS
2320 pass_postreload_cse (gcc::context *ctxt)
2321 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
27a4cd48
DM
2322 {}
2323
2324 /* opt_pass methods: */
1a3d085c
TS
2325 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2326
be55bfe6 2327 virtual unsigned int execute (function *);
27a4cd48
DM
2328
2329}; // class pass_postreload_cse
2330
be55bfe6
TS
2331unsigned int
2332pass_postreload_cse::execute (function *fun)
2333{
2334 if (!dbg_cnt (postreload_cse))
2335 return 0;
2336
2337 /* Do a very simple CSE pass over just the hard registers. */
2338 reload_cse_regs (get_insns ());
2339 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2340 Remove any EH edges associated with them. */
2341 if (fun->can_throw_non_call_exceptions
2342 && purge_all_dead_edges ())
2343 cleanup_cfg (0);
2344
2345 return 0;
2346}
2347
27a4cd48
DM
2348} // anon namespace
2349
2350rtl_opt_pass *
2351make_pass_postreload_cse (gcc::context *ctxt)
2352{
2353 return new pass_postreload_cse (ctxt);
2354}