]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/postreload.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / postreload.c
CommitLineData
15e35479 1/* Perform simple optimizations to clean up the result of reload.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
15e35479
KH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
15e35479
KH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
15e35479
KH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2
AM
23#include "backend.h"
24#include "tree.h"
15e35479 25#include "rtl.h"
c7131fb2
AM
26#include "df.h"
27
15e35479
KH
28#include "tm_p.h"
29#include "obstack.h"
30#include "insn-config.h"
31#include "flags.h"
36566b39 32#include "alias.h"
36566b39
PK
33#include "expmed.h"
34#include "dojump.h"
35#include "explow.h"
36#include "calls.h"
37#include "emit-rtl.h"
38#include "varasm.h"
39#include "stmt.h"
15e35479 40#include "expr.h"
b0710fe1 41#include "insn-codes.h"
15e35479
KH
42#include "optabs.h"
43#include "regs.h"
60393bbc
AM
44#include "cfgrtl.h"
45#include "cfgbuild.h"
46#include "cfgcleanup.h"
15e35479
KH
47#include "reload.h"
48#include "recog.h"
a78a26f1 49#include "alloc-pool.h"
15e35479 50#include "cselib.h"
718f9c0f 51#include "diagnostic-core.h"
15e35479 52#include "except.h"
08bd6876 53#include "target.h"
ef330312 54#include "tree-pass.h"
6fb5fa3c 55#include "dbgcnt.h"
15e35479 56
0c20a65f 57static int reload_cse_noop_set_p (rtx);
f90af2e0 58static bool reload_cse_simplify (rtx_insn *, rtx);
3a15c2cf 59static void reload_cse_regs_1 (void);
f90af2e0
DM
60static int reload_cse_simplify_set (rtx, rtx_insn *);
61static int reload_cse_simplify_operands (rtx_insn *, rtx);
15e35479 62
0c20a65f 63static void reload_combine (void);
f90af2e0 64static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx);
7bc980e1 65static void reload_combine_note_store (rtx, const_rtx, void *);
15e35479 66
f90af2e0 67static bool reload_cse_move2add (rtx_insn *);
7bc980e1 68static void move2add_note_store (rtx, const_rtx, void *);
15e35479
KH
69
70/* Call cse / combine like post-reload optimization phases.
71 FIRST is the first instruction. */
8bb91f49
SB
72
73static void
f90af2e0 74reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED)
15e35479 75{
dc0d5a57 76 bool moves_converted;
3a15c2cf 77 reload_cse_regs_1 ();
15e35479 78 reload_combine ();
dc0d5a57 79 moves_converted = reload_cse_move2add (first);
15e35479 80 if (flag_expensive_optimizations)
dc0d5a57
BS
81 {
82 if (moves_converted)
83 reload_combine ();
3a15c2cf 84 reload_cse_regs_1 ();
dc0d5a57 85 }
15e35479
KH
86}
87
88/* See whether a single set SET is a noop. */
89static int
0c20a65f 90reload_cse_noop_set_p (rtx set)
15e35479
KH
91{
92 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
93 return 0;
94
95 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
96}
97
3a15c2cf
SB
98/* Try to simplify INSN. Return true if the CFG may have changed. */
99static bool
f90af2e0 100reload_cse_simplify (rtx_insn *insn, rtx testreg)
15e35479
KH
101{
102 rtx body = PATTERN (insn);
3a15c2cf
SB
103 basic_block insn_bb = BLOCK_FOR_INSN (insn);
104 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
15e35479
KH
105
106 if (GET_CODE (body) == SET)
107 {
108 int count = 0;
109
110 /* Simplify even if we may think it is a no-op.
111 We may think a memory load of a value smaller than WORD_SIZE
112 is redundant because we haven't taken into account possible
113 implicit extension. reload_cse_simplify_set() will bring
114 this out, so it's safer to simplify before we delete. */
115 count += reload_cse_simplify_set (body, insn);
116
117 if (!count && reload_cse_noop_set_p (body))
118 {
119 rtx value = SET_DEST (body);
120 if (REG_P (value)
121 && ! REG_FUNCTION_VALUE_P (value))
122 value = 0;
9e582b1d
JR
123 if (check_for_inc_dec (insn))
124 delete_insn_and_edges (insn);
3a15c2cf
SB
125 /* We're done with this insn. */
126 goto done;
15e35479
KH
127 }
128
129 if (count > 0)
130 apply_change_group ();
131 else
132 reload_cse_simplify_operands (insn, testreg);
133 }
134 else if (GET_CODE (body) == PARALLEL)
135 {
136 int i;
137 int count = 0;
138 rtx value = NULL_RTX;
139
0d87c765
RH
140 /* Registers mentioned in the clobber list for an asm cannot be reused
141 within the body of the asm. Invalidate those registers now so that
142 we don't try to substitute values for them. */
143 if (asm_noperands (body) >= 0)
144 {
145 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
146 {
147 rtx part = XVECEXP (body, 0, i);
148 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
149 cselib_invalidate_rtx (XEXP (part, 0));
150 }
151 }
152
15e35479
KH
153 /* If every action in a PARALLEL is a noop, we can delete
154 the entire PARALLEL. */
155 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
156 {
157 rtx part = XVECEXP (body, 0, i);
158 if (GET_CODE (part) == SET)
159 {
160 if (! reload_cse_noop_set_p (part))
161 break;
162 if (REG_P (SET_DEST (part))
163 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
164 {
165 if (value)
166 break;
167 value = SET_DEST (part);
168 }
169 }
170 else if (GET_CODE (part) != CLOBBER)
171 break;
172 }
173
174 if (i < 0)
175 {
9e582b1d
JR
176 if (check_for_inc_dec (insn))
177 delete_insn_and_edges (insn);
15e35479 178 /* We're done with this insn. */
3a15c2cf 179 goto done;
15e35479
KH
180 }
181
182 /* It's not a no-op, but we can try to simplify it. */
183 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
184 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
185 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
186
187 if (count > 0)
188 apply_change_group ();
189 else
190 reload_cse_simplify_operands (insn, testreg);
191 }
3a15c2cf
SB
192
193done:
194 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
15e35479
KH
195}
196
197/* Do a very simple CSE pass over the hard registers.
198
199 This function detects no-op moves where we happened to assign two
200 different pseudo-registers to the same hard register, and then
201 copied one to the other. Reload will generate a useless
202 instruction copying a register to itself.
203
204 This function also detects cases where we load a value from memory
205 into two different registers, and (if memory is more expensive than
206 registers) changes it to simply copy the first register into the
207 second register.
208
209 Another optimization is performed that scans the operands of each
210 instruction to see whether the value is already available in a
211 hard register. It then replaces the operand with the hard register
212 if possible, much like an optional reload would. */
213
214static void
3a15c2cf 215reload_cse_regs_1 (void)
15e35479 216{
3a15c2cf
SB
217 bool cfg_changed = false;
218 basic_block bb;
f90af2e0 219 rtx_insn *insn;
c3dc5e66 220 rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
15e35479 221
457eeaae 222 cselib_init (CSELIB_RECORD_MEMORY);
15e35479
KH
223 init_alias_analysis ();
224
11cd3bed 225 FOR_EACH_BB_FN (bb, cfun)
3a15c2cf
SB
226 FOR_BB_INSNS (bb, insn)
227 {
228 if (INSN_P (insn))
229 cfg_changed |= reload_cse_simplify (insn, testreg);
15e35479 230
3a15c2cf
SB
231 cselib_process_insn (insn);
232 }
15e35479
KH
233
234 /* Clean up. */
235 end_alias_analysis ();
236 cselib_finish ();
3a15c2cf
SB
237 if (cfg_changed)
238 cleanup_cfg (0);
15e35479
KH
239}
240
241/* Try to simplify a single SET instruction. SET is the set pattern.
242 INSN is the instruction it came from.
243 This function only handles one case: if we set a register to a value
244 which is not a register, we try to find that value in some other register
245 and change the set into a register copy. */
246
247static int
f90af2e0 248reload_cse_simplify_set (rtx set, rtx_insn *insn)
15e35479
KH
249{
250 int did_change = 0;
251 int dreg;
252 rtx src;
6f76a878 253 reg_class_t dclass;
15e35479
KH
254 int old_cost;
255 cselib_val *val;
256 struct elt_loc_list *l;
257#ifdef LOAD_EXTEND_OP
f822d252 258 enum rtx_code extend_op = UNKNOWN;
15e35479 259#endif
f40751dd 260 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
15e35479
KH
261
262 dreg = true_regnum (SET_DEST (set));
263 if (dreg < 0)
264 return 0;
265
266 src = SET_SRC (set);
267 if (side_effects_p (src) || true_regnum (src) >= 0)
268 return 0;
269
270 dclass = REGNO_REG_CLASS (dreg);
271
272#ifdef LOAD_EXTEND_OP
273 /* When replacing a memory with a register, we need to honor assumptions
274 that combine made wrt the contents of sign bits. We'll do this by
275 generating an extend instruction instead of a reg->reg copy. Thus
276 the destination must be a register that we can widen. */
3c0cb5de 277 if (MEM_P (src)
15e35479 278 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
f822d252 279 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
f8cfc6aa 280 && !REG_P (SET_DEST (set)))
15e35479
KH
281 return 0;
282#endif
283
4deef538 284 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
b2948a2c
KH
285 if (! val)
286 return 0;
287
15e35479 288 /* If memory loads are cheaper than register copies, don't change them. */
3c0cb5de 289 if (MEM_P (src))
f5c21ef3 290 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
f8cfc6aa 291 else if (REG_P (src))
de8f4b07 292 old_cost = register_move_cost (GET_MODE (src),
15e35479
KH
293 REGNO_REG_CLASS (REGNO (src)), dclass);
294 else
5e8f01f4 295 old_cost = set_src_cost (src, speed);
15e35479 296
15e35479
KH
297 for (l = val->locs; l; l = l->next)
298 {
299 rtx this_rtx = l->loc;
300 int this_cost;
301
302 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
303 {
304#ifdef LOAD_EXTEND_OP
f822d252 305 if (extend_op != UNKNOWN)
15e35479 306 {
807e902e 307 wide_int result;
15e35479 308
807e902e 309 if (!CONST_SCALAR_INT_P (this_rtx))
15e35479
KH
310 continue;
311
15e35479
KH
312 switch (extend_op)
313 {
314 case ZERO_EXTEND:
807e902e
KZ
315 result = wide_int::from (std::make_pair (this_rtx,
316 GET_MODE (src)),
317 BITS_PER_WORD, UNSIGNED);
15e35479
KH
318 break;
319 case SIGN_EXTEND:
807e902e
KZ
320 result = wide_int::from (std::make_pair (this_rtx,
321 GET_MODE (src)),
322 BITS_PER_WORD, SIGNED);
323 break;
15e35479 324 default:
e16acfcd 325 gcc_unreachable ();
15e35479 326 }
807e902e 327 this_rtx = immed_wide_int_const (result, word_mode);
15e35479
KH
328 }
329#endif
5e8f01f4 330 this_cost = set_src_cost (this_rtx, speed);
15e35479 331 }
f8cfc6aa 332 else if (REG_P (this_rtx))
15e35479
KH
333 {
334#ifdef LOAD_EXTEND_OP
f822d252 335 if (extend_op != UNKNOWN)
15e35479
KH
336 {
337 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
5e8f01f4 338 this_cost = set_src_cost (this_rtx, speed);
15e35479
KH
339 }
340 else
341#endif
de8f4b07 342 this_cost = register_move_cost (GET_MODE (this_rtx),
15e35479
KH
343 REGNO_REG_CLASS (REGNO (this_rtx)),
344 dclass);
345 }
346 else
347 continue;
348
349 /* If equal costs, prefer registers over anything else. That
350 tends to lead to smaller instructions on some machines. */
351 if (this_cost < old_cost
352 || (this_cost == old_cost
f8cfc6aa
JQ
353 && REG_P (this_rtx)
354 && !REG_P (SET_SRC (set))))
15e35479
KH
355 {
356#ifdef LOAD_EXTEND_OP
357 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
f822d252 358 && extend_op != UNKNOWN
15e35479
KH
359#ifdef CANNOT_CHANGE_MODE_CLASS
360 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
361 word_mode,
362 REGNO_REG_CLASS (REGNO (SET_DEST (set))))
363#endif
364 )
365 {
366 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
367 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
368 validate_change (insn, &SET_DEST (set), wide_dest, 1);
369 }
370#endif
371
95e88efd 372 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
15e35479
KH
373 old_cost = this_cost, did_change = 1;
374 }
375 }
376
377 return did_change;
378}
379
380/* Try to replace operands in INSN with equivalent values that are already
381 in registers. This can be viewed as optional reloading.
382
383 For each non-register operand in the insn, see if any hard regs are
384 known to be equivalent to that operand. Record the alternatives which
385 can accept these hard registers. Among all alternatives, select the
386 ones which are better or equal to the one currently matching, where
387 "better" is in terms of '?' and '!' constraints. Among the remaining
388 alternatives, select the one which replaces most operands with
389 hard registers. */
390
391static int
f90af2e0 392reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
15e35479
KH
393{
394 int i, j;
395
396 /* For each operand, all registers that are equivalent to it. */
397 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
398
399 const char *constraints[MAX_RECOG_OPERANDS];
400
401 /* Vector recording how bad an alternative is. */
402 int *alternative_reject;
403 /* Vector recording how many registers can be introduced by choosing
404 this alternative. */
405 int *alternative_nregs;
406 /* Array of vectors recording, for each operand and each alternative,
407 which hard register to substitute, or -1 if the operand should be
408 left as it is. */
409 int *op_alt_regno[MAX_RECOG_OPERANDS];
410 /* Array of alternatives, sorted in order of decreasing desirability. */
411 int *alternative_order;
412
75d25a02 413 extract_constrain_insn (insn);
15e35479
KH
414
415 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
416 return 0;
417
d3bfe4de
KG
418 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
419 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
420 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
703ad42b
KG
421 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
422 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
15e35479
KH
423
424 /* For each operand, find out which regs are equivalent. */
425 for (i = 0; i < recog_data.n_operands; i++)
426 {
427 cselib_val *v;
428 struct elt_loc_list *l;
115df136 429 rtx op;
15e35479
KH
430
431 CLEAR_HARD_REG_SET (equiv_regs[i]);
432
433 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
434 right, so avoid the problem here. Likewise if we have a constant
435 and the insn pattern doesn't tell us the mode we need. */
4b4bf941 436 if (LABEL_P (recog_data.operand[i])
15e35479
KH
437 || (CONSTANT_P (recog_data.operand[i])
438 && recog_data.operand_mode[i] == VOIDmode))
439 continue;
440
115df136 441 op = recog_data.operand[i];
115df136 442#ifdef LOAD_EXTEND_OP
3c0cb5de 443 if (MEM_P (op)
0f900dfa
JJ
444 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
445 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
115df136
R
446 {
447 rtx set = single_set (insn);
448
1f52178b 449 /* We might have multiple sets, some of which do implicit
115df136
R
450 extension. Punt on this for now. */
451 if (! set)
452 continue;
1f838355 453 /* If the destination is also a MEM or a STRICT_LOW_PART, no
115df136
R
454 extension applies.
455 Also, if there is an explicit extension, we don't have to
456 worry about an implicit one. */
3c0cb5de 457 else if (MEM_P (SET_DEST (set))
115df136
R
458 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
459 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
460 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
461 ; /* Continue ordinary processing. */
7be4d808
R
462#ifdef CANNOT_CHANGE_MODE_CLASS
463 /* If the register cannot change mode to word_mode, it follows that
464 it cannot have been used in word_mode. */
f8cfc6aa 465 else if (REG_P (SET_DEST (set))
7be4d808
R
466 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
467 word_mode,
468 REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
469 ; /* Continue ordinary processing. */
470#endif
115df136 471 /* If this is a straight load, make the extension explicit. */
f8cfc6aa 472 else if (REG_P (SET_DEST (set))
115df136
R
473 && recog_data.n_operands == 2
474 && SET_SRC (set) == op
475 && SET_DEST (set) == recog_data.operand[1-i])
476 {
477 validate_change (insn, recog_data.operand_loc[i],
0f900dfa 478 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
115df136
R
479 word_mode, op),
480 1);
481 validate_change (insn, recog_data.operand_loc[1-i],
482 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
483 1);
484 if (! apply_change_group ())
485 return 0;
486 return reload_cse_simplify_operands (insn, testreg);
487 }
488 else
489 /* ??? There might be arithmetic operations with memory that are
490 safe to optimize, but is it worth the trouble? */
491 continue;
492 }
493#endif /* LOAD_EXTEND_OP */
3f82421f
PH
494 if (side_effects_p (op))
495 continue;
4deef538 496 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
15e35479
KH
497 if (! v)
498 continue;
499
500 for (l = v->locs; l; l = l->next)
f8cfc6aa 501 if (REG_P (l->loc))
15e35479
KH
502 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
503 }
504
9840b2fa 505 alternative_mask preferred = get_preferred_alternatives (insn);
15e35479
KH
506 for (i = 0; i < recog_data.n_operands; i++)
507 {
ef4bddc2 508 machine_mode mode;
15e35479
KH
509 int regno;
510 const char *p;
511
d3bfe4de 512 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
15e35479
KH
513 for (j = 0; j < recog_data.n_alternatives; j++)
514 op_alt_regno[i][j] = -1;
515
516 p = constraints[i] = recog_data.constraints[i];
517 mode = recog_data.operand_mode[i];
518
519 /* Add the reject values for each alternative given by the constraints
520 for this operand. */
521 j = 0;
522 while (*p != '\0')
523 {
524 char c = *p++;
525 if (c == ',')
526 j++;
527 else if (c == '?')
528 alternative_reject[j] += 3;
529 else if (c == '!')
530 alternative_reject[j] += 300;
531 }
532
533 /* We won't change operands which are already registers. We
534 also don't want to modify output operands. */
535 regno = true_regnum (recog_data.operand[i]);
536 if (regno >= 0
537 || constraints[i][0] == '='
538 || constraints[i][0] == '+')
539 continue;
540
541 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
542 {
bbbbb16a 543 enum reg_class rclass = NO_REGS;
15e35479
KH
544
545 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
546 continue;
547
8deccbb7 548 set_mode_and_regno (testreg, mode, regno);
15e35479
KH
549
550 /* We found a register equal to this operand. Now look for all
551 alternatives that can accept this register and have not been
552 assigned a register they can use yet. */
553 j = 0;
554 p = constraints[i];
555 for (;;)
556 {
557 char c = *p;
558
559 switch (c)
560 {
8677664e
RS
561 case 'g':
562 rclass = reg_class_subunion[rclass][GENERAL_REGS];
15e35479
KH
563 break;
564
565 default:
d858f359 566 rclass
15e35479 567 = (reg_class_subunion
777e635f
RS
568 [rclass]
569 [reg_class_for_constraint (lookup_constraint (p))]);
15e35479
KH
570 break;
571
572 case ',': case '\0':
573 /* See if REGNO fits this alternative, and set it up as the
574 replacement register if we don't have one for this
575 alternative yet and the operand being replaced is not
576 a cheap CONST_INT. */
577 if (op_alt_regno[i][j] == -1
9840b2fa 578 && TEST_BIT (preferred, j)
d858f359 579 && reg_fits_class_p (testreg, rclass, 0, mode)
481683e1 580 && (!CONST_INT_P (recog_data.operand[i])
5e8f01f4
RS
581 || (set_src_cost (recog_data.operand[i],
582 optimize_bb_for_speed_p
583 (BLOCK_FOR_INSN (insn)))
584 > set_src_cost (testreg,
585 optimize_bb_for_speed_p
586 (BLOCK_FOR_INSN (insn))))))
15e35479
KH
587 {
588 alternative_nregs[j]++;
589 op_alt_regno[i][j] = regno;
590 }
591 j++;
bbbbb16a 592 rclass = NO_REGS;
15e35479
KH
593 break;
594 }
595 p += CONSTRAINT_LEN (c, p);
596
597 if (c == '\0')
598 break;
599 }
600 }
601 }
602
603 /* Record all alternatives which are better or equal to the currently
604 matching one in the alternative_order array. */
605 for (i = j = 0; i < recog_data.n_alternatives; i++)
606 if (alternative_reject[i] <= alternative_reject[which_alternative])
607 alternative_order[j++] = i;
608 recog_data.n_alternatives = j;
609
610 /* Sort it. Given a small number of alternatives, a dumb algorithm
611 won't hurt too much. */
612 for (i = 0; i < recog_data.n_alternatives - 1; i++)
613 {
614 int best = i;
615 int best_reject = alternative_reject[alternative_order[i]];
616 int best_nregs = alternative_nregs[alternative_order[i]];
15e35479
KH
617
618 for (j = i + 1; j < recog_data.n_alternatives; j++)
619 {
620 int this_reject = alternative_reject[alternative_order[j]];
621 int this_nregs = alternative_nregs[alternative_order[j]];
622
623 if (this_reject < best_reject
8a4c09c8 624 || (this_reject == best_reject && this_nregs > best_nregs))
15e35479
KH
625 {
626 best = j;
627 best_reject = this_reject;
628 best_nregs = this_nregs;
629 }
630 }
631
fab27f52 632 std::swap (alternative_order[best], alternative_order[i]);
15e35479
KH
633 }
634
635 /* Substitute the operands as determined by op_alt_regno for the best
636 alternative. */
637 j = alternative_order[0];
638
639 for (i = 0; i < recog_data.n_operands; i++)
640 {
ef4bddc2 641 machine_mode mode = recog_data.operand_mode[i];
15e35479
KH
642 if (op_alt_regno[i][j] == -1)
643 continue;
644
645 validate_change (insn, recog_data.operand_loc[i],
646 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
647 }
648
649 for (i = recog_data.n_dups - 1; i >= 0; i--)
650 {
651 int op = recog_data.dup_num[i];
ef4bddc2 652 machine_mode mode = recog_data.operand_mode[op];
15e35479
KH
653
654 if (op_alt_regno[op][j] == -1)
655 continue;
656
657 validate_change (insn, recog_data.dup_loc[i],
658 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
659 }
660
661 return apply_change_group ();
662}
663\f
664/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
665 addressing now.
666 This code might also be useful when reload gave up on reg+reg addressing
667 because of clashes between the return register and INDEX_REG_CLASS. */
668
669/* The maximum number of uses of a register we can keep track of to
670 replace them with reg+reg addressing. */
dc0d5a57 671#define RELOAD_COMBINE_MAX_USES 16
15e35479 672
dc0d5a57
BS
673/* Describes a recorded use of a register. */
674struct reg_use
675{
676 /* The insn where a register has been used. */
f90af2e0 677 rtx_insn *insn;
dc0d5a57
BS
678 /* Points to the memory reference enclosing the use, if any, NULL_RTX
679 otherwise. */
680 rtx containing_mem;
073a8998 681 /* Location of the register within INSN. */
dc0d5a57
BS
682 rtx *usep;
683 /* The reverse uid of the insn. */
684 int ruid;
685};
15e35479
KH
686
687/* If the register is used in some unknown fashion, USE_INDEX is negative.
688 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
dc0d5a57 689 indicates where it is first set or clobbered.
15e35479 690 Otherwise, USE_INDEX is the index of the last encountered use of the
dc0d5a57
BS
691 register (which is first among these we have seen since we scan backwards).
692 USE_RUID indicates the first encountered, i.e. last, of these uses.
693 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
694 with a constant offset; OFFSET contains this constant in that case.
15e35479
KH
695 STORE_RUID is always meaningful if we only want to use a value in a
696 register in a different place: it denotes the next insn in the insn
dc0d5a57
BS
697 stream (i.e. the last encountered) that sets or clobbers the register.
698 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
15e35479
KH
699static struct
700 {
701 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
15e35479 702 rtx offset;
dc0d5a57 703 int use_index;
15e35479 704 int store_ruid;
dc0d5a57 705 int real_store_ruid;
15e35479 706 int use_ruid;
dc0d5a57 707 bool all_offsets_match;
15e35479
KH
708 } reg_state[FIRST_PSEUDO_REGISTER];
709
710/* Reverse linear uid. This is increased in reload_combine while scanning
711 the instructions from last to first. It is used to set last_label_ruid
712 and the store_ruid / use_ruid fields in reg_state. */
713static int reload_combine_ruid;
714
67bb0206
BS
715/* The RUID of the last label we encountered in reload_combine. */
716static int last_label_ruid;
717
dc0d5a57
BS
718/* The RUID of the last jump we encountered in reload_combine. */
719static int last_jump_ruid;
720
67bb0206
BS
721/* The register numbers of the first and last index register. A value of
722 -1 in LAST_INDEX_REG indicates that we've previously computed these
723 values and found no suitable index registers. */
724static int first_index_reg = -1;
725static int last_index_reg;
726
15e35479
KH
727#define LABEL_LIVE(LABEL) \
728 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
729
dc0d5a57
BS
730/* Subroutine of reload_combine_split_ruids, called to fix up a single
731 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
732
733static inline void
734reload_combine_split_one_ruid (int *pruid, int split_ruid)
735{
736 if (*pruid > split_ruid)
737 (*pruid)++;
738}
739
740/* Called when we insert a new insn in a position we've already passed in
741 the scan. Examine all our state, increasing all ruids that are higher
742 than SPLIT_RUID by one in order to make room for a new insn. */
743
744static void
745reload_combine_split_ruids (int split_ruid)
746{
747 unsigned i;
748
749 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
750 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
751 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
752
753 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
754 {
755 int j, idx = reg_state[i].use_index;
756 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
757 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
758 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
759 split_ruid);
760 if (idx < 0)
761 continue;
762 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
763 {
764 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
765 split_ruid);
766 }
767 }
768}
769
770/* Called when we are about to rescan a previously encountered insn with
771 reload_combine_note_use after modifying some part of it. This clears all
772 information about uses in that particular insn. */
773
774static void
f90af2e0 775reload_combine_purge_insn_uses (rtx_insn *insn)
dc0d5a57
BS
776{
777 unsigned i;
778
779 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
780 {
781 int j, k, idx = reg_state[i].use_index;
782 if (idx < 0)
783 continue;
784 j = k = RELOAD_COMBINE_MAX_USES;
785 while (j-- > idx)
786 {
787 if (reg_state[i].reg_use[j].insn != insn)
788 {
789 k--;
790 if (k != j)
791 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
792 }
793 }
794 reg_state[i].use_index = k;
795 }
796}
797
798/* Called when we need to forget about all uses of REGNO after an insn
799 which is identified by RUID. */
800
801static void
802reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
803{
804 int j, k, idx = reg_state[regno].use_index;
805 if (idx < 0)
806 return;
807 j = k = RELOAD_COMBINE_MAX_USES;
808 while (j-- > idx)
809 {
810 if (reg_state[regno].reg_use[j].ruid >= ruid)
811 {
812 k--;
813 if (k != j)
814 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
815 }
816 }
817 reg_state[regno].use_index = k;
818}
819
820/* Find the use of REGNO with the ruid that is highest among those
821 lower than RUID_LIMIT, and return it if it is the only use of this
08bd6876 822 reg in the insn. Return NULL otherwise. */
dc0d5a57
BS
823
824static struct reg_use *
825reload_combine_closest_single_use (unsigned regno, int ruid_limit)
826{
827 int i, best_ruid = 0;
828 int use_idx = reg_state[regno].use_index;
829 struct reg_use *retval;
830
831 if (use_idx < 0)
832 return NULL;
833 retval = NULL;
834 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
835 {
b1d5eee8
BS
836 struct reg_use *use = reg_state[regno].reg_use + i;
837 int this_ruid = use->ruid;
dc0d5a57
BS
838 if (this_ruid >= ruid_limit)
839 continue;
840 if (this_ruid > best_ruid)
841 {
842 best_ruid = this_ruid;
08bd6876 843 retval = use;
dc0d5a57 844 }
08bd6876 845 else if (this_ruid == best_ruid)
dc0d5a57
BS
846 retval = NULL;
847 }
848 if (last_label_ruid >= best_ruid)
849 return NULL;
850 return retval;
851}
852
caa4a250
BS
853/* After we've moved an add insn, fix up any debug insns that occur
854 between the old location of the add and the new location. REG is
855 the destination register of the add insn; REPLACEMENT is the
856 SET_SRC of the add. FROM and TO specify the range in which we
857 should make this change on debug insns. */
b1d5eee8
BS
858
859static void
f90af2e0 860fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to)
b1d5eee8 861{
f90af2e0 862 rtx_insn *insn;
caa4a250 863 for (insn = from; insn != to; insn = NEXT_INSN (insn))
b1d5eee8
BS
864 {
865 rtx t;
caa4a250
BS
866
867 if (!DEBUG_INSN_P (insn))
b1d5eee8 868 continue;
caa4a250
BS
869
870 t = INSN_VAR_LOCATION_LOC (insn);
08bd6876 871 t = simplify_replace_rtx (t, reg, replacement);
caa4a250 872 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
b1d5eee8
BS
873 }
874}
875
a78e242c
BS
876/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
877 with SRC in the insn described by USE, taking costs into account. Return
878 true if we made the replacement. */
879
880static bool
881try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
882{
f90af2e0 883 rtx_insn *use_insn = use->insn;
a78e242c
BS
884 rtx mem = use->containing_mem;
885 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
886
887 if (mem != NULL_RTX)
888 {
889 addr_space_t as = MEM_ADDR_SPACE (mem);
890 rtx oldaddr = XEXP (mem, 0);
891 rtx newaddr = NULL_RTX;
892 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
893 int new_cost;
894
895 newaddr = simplify_replace_rtx (oldaddr, reg, src);
896 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
897 {
898 XEXP (mem, 0) = newaddr;
899 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
900 XEXP (mem, 0) = oldaddr;
901 if (new_cost <= old_cost
902 && validate_change (use_insn,
903 &XEXP (mem, 0), newaddr, 0))
904 return true;
905 }
906 }
907 else
908 {
909 rtx new_set = single_set (use_insn);
910 if (new_set
911 && REG_P (SET_DEST (new_set))
912 && GET_CODE (SET_SRC (new_set)) == PLUS
913 && REG_P (XEXP (SET_SRC (new_set), 0))
914 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
915 {
916 rtx new_src;
5e8f01f4 917 int old_cost = set_src_cost (SET_SRC (new_set), speed);
a78e242c
BS
918
919 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
920 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
921
5e8f01f4 922 if (set_src_cost (new_src, speed) <= old_cost
a78e242c
BS
923 && validate_change (use_insn, &SET_SRC (new_set),
924 new_src, 0))
925 return true;
926 }
927 }
928 return false;
929}
930
dc0d5a57
BS
931/* Called by reload_combine when scanning INSN. This function tries to detect
932 patterns where a constant is added to a register, and the result is used
933 in an address.
934 Return true if no further processing is needed on INSN; false if it wasn't
935 recognized and should be handled normally. */
936
937static bool
f90af2e0 938reload_combine_recognize_const_pattern (rtx_insn *insn)
dc0d5a57
BS
939{
940 int from_ruid = reload_combine_ruid;
941 rtx set, pat, reg, src, addreg;
942 unsigned int regno;
943 struct reg_use *use;
944 bool must_move_add;
f90af2e0 945 rtx_insn *add_moved_after_insn = NULL;
dc0d5a57
BS
946 int add_moved_after_ruid = 0;
947 int clobbered_regno = -1;
948
949 set = single_set (insn);
950 if (set == NULL_RTX)
951 return false;
952
953 reg = SET_DEST (set);
954 src = SET_SRC (set);
955 if (!REG_P (reg)
dc8afb70 956 || REG_NREGS (reg) != 1
dc0d5a57
BS
957 || GET_MODE (reg) != Pmode
958 || reg == stack_pointer_rtx)
959 return false;
960
961 regno = REGNO (reg);
962
963 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
964 uses of REG1 inside an address, or inside another add insn. If
965 possible and profitable, merge the addition into subsequent
966 uses. */
967 if (GET_CODE (src) != PLUS
968 || !REG_P (XEXP (src, 0))
969 || !CONSTANT_P (XEXP (src, 1)))
970 return false;
971
972 addreg = XEXP (src, 0);
973 must_move_add = rtx_equal_p (reg, addreg);
974
975 pat = PATTERN (insn);
976 if (must_move_add && set != pat)
977 {
978 /* We have to be careful when moving the add; apart from the
979 single_set there may also be clobbers. Recognize one special
980 case, that of one clobber alongside the set (likely a clobber
981 of the CC register). */
982 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
983 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
984 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
985 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
986 return false;
987 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
988 }
989
990 do
991 {
992 use = reload_combine_closest_single_use (regno, from_ruid);
993
994 if (use)
995 /* Start the search for the next use from here. */
996 from_ruid = use->ruid;
997
998 if (use && GET_MODE (*use->usep) == Pmode)
999 {
a78e242c 1000 bool delete_add = false;
f90af2e0 1001 rtx_insn *use_insn = use->insn;
dc0d5a57 1002 int use_ruid = use->ruid;
dc0d5a57
BS
1003
1004 /* Avoid moving the add insn past a jump. */
b1d5eee8 1005 if (must_move_add && use_ruid <= last_jump_ruid)
dc0d5a57
BS
1006 break;
1007
1008 /* If the add clobbers another hard reg in parallel, don't move
1009 it past a real set of this hard reg. */
1010 if (must_move_add && clobbered_regno >= 0
1011 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
1012 break;
1013
3b8ff89f 1014 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
058eb3b0 1015 if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn)))
3b8ff89f 1016 break;
3b8ff89f 1017
62036819
BS
1018 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
1019 /* Avoid moving a use of ADDREG past a point where it is stored. */
a78e242c 1020 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
dc0d5a57
BS
1021 break;
1022
a78e242c
BS
1023 /* We also must not move the addition past an insn that sets
1024 the same register, unless we can combine two add insns. */
1025 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
dc0d5a57 1026 {
a78e242c
BS
1027 if (use->containing_mem == NULL_RTX)
1028 delete_add = true;
1029 else
1030 break;
dc0d5a57 1031 }
dc0d5a57 1032
a78e242c
BS
1033 if (try_replace_in_use (use, reg, src))
1034 {
1035 reload_combine_purge_insn_uses (use_insn);
1036 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1037 use_ruid, NULL_RTX);
dc0d5a57 1038
a78e242c
BS
1039 if (delete_add)
1040 {
1041 fixup_debug_insns (reg, src, insn, use_insn);
1042 delete_insn (insn);
1043 return true;
1044 }
1045 if (must_move_add)
1046 {
1047 add_moved_after_insn = use_insn;
1048 add_moved_after_ruid = use_ruid;
dc0d5a57 1049 }
a78e242c 1050 continue;
dc0d5a57 1051 }
dc0d5a57 1052 }
62036819
BS
1053 /* If we get here, we couldn't handle this use. */
1054 if (must_move_add)
1055 break;
dc0d5a57
BS
1056 }
1057 while (use);
1058
1059 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1060 /* Process the add normally. */
1061 return false;
1062
caa4a250
BS
1063 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1064
dc0d5a57
BS
1065 reorder_insns (insn, insn, add_moved_after_insn);
1066 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1067 reload_combine_split_ruids (add_moved_after_ruid - 1);
1068 reload_combine_note_use (&PATTERN (insn), insn,
1069 add_moved_after_ruid, NULL_RTX);
1070 reg_state[regno].store_ruid = add_moved_after_ruid;
1071
1072 return true;
1073}
1074
67bb0206
BS
1075/* Called by reload_combine when scanning INSN. Try to detect a pattern we
1076 can handle and improve. Return true if no further processing is needed on
1077 INSN; false if it wasn't recognized and should be handled normally. */
1078
1079static bool
f90af2e0 1080reload_combine_recognize_pattern (rtx_insn *insn)
67bb0206
BS
1081{
1082 rtx set, reg, src;
1083 unsigned int regno;
1084
dc0d5a57
BS
1085 set = single_set (insn);
1086 if (set == NULL_RTX)
1087 return false;
1088
1089 reg = SET_DEST (set);
1090 src = SET_SRC (set);
dc8afb70 1091 if (!REG_P (reg) || REG_NREGS (reg) != 1)
dc0d5a57
BS
1092 return false;
1093
1094 regno = REGNO (reg);
1095
67bb0206
BS
1096 /* Look for (set (REGX) (CONST_INT))
1097 (set (REGX) (PLUS (REGX) (REGY)))
1098 ...
1099 ... (MEM (REGX)) ...
1100 and convert it to
1101 (set (REGZ) (CONST_INT))
1102 ...
1103 ... (MEM (PLUS (REGZ) (REGY)))... .
1104
1105 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1106 and that we know all uses of REGX before it dies.
1107 Also, explicitly check that REGX != REGY; our life information
1108 does not yet show whether REGY changes in this insn. */
67bb0206
BS
1109
1110 if (GET_CODE (src) == PLUS
dc0d5a57
BS
1111 && reg_state[regno].all_offsets_match
1112 && last_index_reg != -1
67bb0206
BS
1113 && REG_P (XEXP (src, 1))
1114 && rtx_equal_p (XEXP (src, 0), reg)
1115 && !rtx_equal_p (XEXP (src, 1), reg)
08bd6876
BS
1116 && reg_state[regno].use_index >= 0
1117 && reg_state[regno].use_index < RELOAD_COMBINE_MAX_USES
67bb0206
BS
1118 && last_label_ruid < reg_state[regno].use_ruid)
1119 {
1120 rtx base = XEXP (src, 1);
f90af2e0 1121 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
67bb0206
BS
1122 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1123 rtx index_reg = NULL_RTX;
1124 rtx reg_sum = NULL_RTX;
1125 int i;
1126
1127 /* Now we need to set INDEX_REG to an index register (denoted as
1128 REGZ in the illustration above) and REG_SUM to the expression
1129 register+register that we want to use to substitute uses of REG
1130 (typically in MEMs) with. First check REG and BASE for being
1131 index registers; we can use them even if they are not dead. */
1132 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1133 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1134 REGNO (base)))
1135 {
1136 index_reg = reg;
1137 reg_sum = src;
1138 }
1139 else
1140 {
1141 /* Otherwise, look for a free index register. Since we have
1142 checked above that neither REG nor BASE are index registers,
1143 if we find anything at all, it will be different from these
1144 two registers. */
1145 for (i = first_index_reg; i <= last_index_reg; i++)
1146 {
1147 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1148 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1149 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
08bd6876
BS
1150 && (call_used_regs[i] || df_regs_ever_live_p (i))
1151 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1152 && !fixed_regs[i] && !global_regs[i]
1153 && hard_regno_nregs[i][GET_MODE (reg)] == 1
1154 && targetm.hard_regno_scratch_ok (i))
67bb0206
BS
1155 {
1156 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1157 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1158 break;
1159 }
1160 }
1161 }
1162
1163 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1164 (REGY), i.e. BASE, is not clobbered before the last use we'll
1165 create. */
1166 if (reg_sum
1167 && prev_set
1168 && CONST_INT_P (SET_SRC (prev_set))
1169 && rtx_equal_p (SET_DEST (prev_set), reg)
67bb0206
BS
1170 && (reg_state[REGNO (base)].store_ruid
1171 <= reg_state[regno].use_ruid))
1172 {
1173 /* Change destination register and, if necessary, the constant
1174 value in PREV, the constant loading instruction. */
1175 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1176 if (reg_state[regno].offset != const0_rtx)
1177 validate_change (prev,
1178 &SET_SRC (prev_set),
1179 GEN_INT (INTVAL (SET_SRC (prev_set))
1180 + INTVAL (reg_state[regno].offset)),
1181 1);
1182
1183 /* Now for every use of REG that we have recorded, replace REG
1184 with REG_SUM. */
1185 for (i = reg_state[regno].use_index;
1186 i < RELOAD_COMBINE_MAX_USES; i++)
1187 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1188 reg_state[regno].reg_use[i].usep,
1189 /* Each change must have its own
1190 replacement. */
1191 reg_sum, 1);
1192
1193 if (apply_change_group ())
1194 {
caa4a250
BS
1195 struct reg_use *lowest_ruid = NULL;
1196
67bb0206
BS
1197 /* For every new use of REG_SUM, we have to record the use
1198 of BASE therein, i.e. operand 1. */
1199 for (i = reg_state[regno].use_index;
1200 i < RELOAD_COMBINE_MAX_USES; i++)
caa4a250
BS
1201 {
1202 struct reg_use *use = reg_state[regno].reg_use + i;
1203 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1204 use->ruid, use->containing_mem);
1205 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1206 lowest_ruid = use;
1207 }
1208
1209 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
67bb0206 1210
67bb0206
BS
1211 /* Delete the reg-reg addition. */
1212 delete_insn (insn);
1213
1214 if (reg_state[regno].offset != const0_rtx)
1215 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1216 are now invalid. */
1217 remove_reg_equal_equiv_notes (prev);
1218
1219 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
67bb0206
BS
1220 return true;
1221 }
1222 }
1223 }
1224 return false;
1225}
1226
15e35479 1227static void
0c20a65f 1228reload_combine (void)
15e35479 1229{
f90af2e0 1230 rtx_insn *insn, *prev;
15e35479
KH
1231 basic_block bb;
1232 unsigned int r;
15e35479
KH
1233 int min_labelno, n_labels;
1234 HARD_REG_SET ever_live_at_start, *label_live;
1235
15e35479
KH
1236 /* To avoid wasting too much time later searching for an index register,
1237 determine the minimum and maximum index register numbers. */
67bb0206
BS
1238 if (INDEX_REG_CLASS == NO_REGS)
1239 last_index_reg = -1;
1240 else if (first_index_reg == -1 && last_index_reg == 0)
1241 {
1242 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1243 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1244 {
1245 if (first_index_reg == -1)
1246 first_index_reg = r;
1247
1248 last_index_reg = r;
1249 }
1250
1251 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1252 to -1 so we'll know to quit early the next time we get here. */
1253 if (first_index_reg == -1)
1254 {
1255 last_index_reg = -1;
1256 return;
1257 }
1258 }
15e35479 1259
15e35479
KH
1260 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1261 information is a bit fuzzy immediately after reload, but it's
1262 still good enough to determine which registers are live at a jump
1263 destination. */
1264 min_labelno = get_first_label_num ();
1265 n_labels = max_label_num () - min_labelno;
5ed6ace5 1266 label_live = XNEWVEC (HARD_REG_SET, n_labels);
15e35479
KH
1267 CLEAR_HARD_REG_SET (ever_live_at_start);
1268
4f42035e 1269 FOR_EACH_BB_REVERSE_FN (bb, cfun)
15e35479 1270 {
a813c111 1271 insn = BB_HEAD (bb);
4b4bf941 1272 if (LABEL_P (insn))
15e35479
KH
1273 {
1274 HARD_REG_SET live;
89a95777 1275 bitmap live_in = df_get_live_in (bb);
15e35479 1276
89a95777
KZ
1277 REG_SET_TO_HARD_REG_SET (live, live_in);
1278 compute_use_by_pseudos (&live, live_in);
15e35479
KH
1279 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1280 IOR_HARD_REG_SET (ever_live_at_start, live);
1281 }
1282 }
1283
1284 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
dc0d5a57 1285 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
15e35479
KH
1286 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1287 {
dc0d5a57
BS
1288 reg_state[r].store_ruid = 0;
1289 reg_state[r].real_store_ruid = 0;
15e35479
KH
1290 if (fixed_regs[r])
1291 reg_state[r].use_index = -1;
1292 else
1293 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1294 }
1295
dc0d5a57 1296 for (insn = get_last_insn (); insn; insn = prev)
15e35479 1297 {
7ad93142 1298 bool control_flow_insn;
15e35479
KH
1299 rtx note;
1300
dc0d5a57
BS
1301 prev = PREV_INSN (insn);
1302
15e35479
KH
1303 /* We cannot do our optimization across labels. Invalidating all the use
1304 information we have would be costly, so we just note where the label
1305 is and then later disable any optimization that would cross it. */
4b4bf941 1306 if (LABEL_P (insn))
15e35479 1307 last_label_ruid = reload_combine_ruid;
2195c9be
AK
1308 else if (BARRIER_P (insn))
1309 {
1310 /* Crossing a barrier resets all the use information. */
1311 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1312 if (! fixed_regs[r])
15e35479 1313 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
2195c9be
AK
1314 }
1315 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1316 /* Optimizations across insns being marked as volatile must be
1317 prevented. All the usage information is invalidated
1318 here. */
1319 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1320 if (! fixed_regs[r]
1321 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1322 reg_state[r].use_index = -1;
15e35479 1323
caa4a250 1324 if (! NONDEBUG_INSN_P (insn))
15e35479
KH
1325 continue;
1326
1327 reload_combine_ruid++;
1328
7ad93142
EB
1329 control_flow_insn = control_flow_insn_p (insn);
1330 if (control_flow_insn)
dc0d5a57
BS
1331 last_jump_ruid = reload_combine_ruid;
1332
1333 if (reload_combine_recognize_const_pattern (insn)
1334 || reload_combine_recognize_pattern (insn))
67bb0206 1335 continue;
15e35479
KH
1336
1337 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1338
4b4bf941 1339 if (CALL_P (insn))
15e35479
KH
1340 {
1341 rtx link;
97ded4cd
TV
1342 HARD_REG_SET used_regs;
1343
1344 get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
15e35479
KH
1345
1346 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
97ded4cd 1347 if (TEST_HARD_REG_BIT (used_regs, r))
15e35479
KH
1348 {
1349 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1350 reg_state[r].store_ruid = reload_combine_ruid;
1351 }
1352
1353 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1354 link = XEXP (link, 1))
1355 {
e384e6b5
BS
1356 rtx setuse = XEXP (link, 0);
1357 rtx usage_rtx = XEXP (setuse, 0);
1358 if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
1359 && REG_P (usage_rtx))
15e35479 1360 {
53d1bae9
RS
1361 unsigned int end_regno = END_REGNO (usage_rtx);
1362 for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
15e35479
KH
1363 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1364 {
1365 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1366 reg_state[i].store_ruid = reload_combine_ruid;
1367 }
1368 else
1369 reg_state[i].use_index = -1;
1370 }
1371 }
15e35479 1372 }
18c33e03 1373
57895947 1374 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
15e35479
KH
1375 {
1376 /* Non-spill registers might be used at the call destination in
1377 some unknown fashion, so we have to mark the unknown use. */
1378 HARD_REG_SET *live;
1379
1380 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1381 && JUMP_LABEL (insn))
57895947
EB
1382 {
1383 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1384 live = NULL;
1385 else
1386 live = &LABEL_LIVE (JUMP_LABEL (insn));
1387 }
15e35479
KH
1388 else
1389 live = &ever_live_at_start;
1390
57895947
EB
1391 if (live)
1392 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1393 if (TEST_HARD_REG_BIT (*live, r))
1394 reg_state[r].use_index = -1;
15e35479
KH
1395 }
1396
7ad93142
EB
1397 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1398 NULL_RTX);
1399
15e35479
KH
1400 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1401 {
7ad93142 1402 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
15e35479
KH
1403 {
1404 int regno = REGNO (XEXP (note, 0));
15e35479 1405 reg_state[regno].store_ruid = reload_combine_ruid;
dc0d5a57 1406 reg_state[regno].real_store_ruid = reload_combine_ruid;
15e35479
KH
1407 reg_state[regno].use_index = -1;
1408 }
1409 }
1410 }
1411
1412 free (label_live);
1413}
1414
1415/* Check if DST is a register or a subreg of a register; if it is,
dc0d5a57
BS
1416 update store_ruid, real_store_ruid and use_index in the reg_state
1417 structure accordingly. Called via note_stores from reload_combine. */
15e35479
KH
1418
1419static void
7bc980e1 1420reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
15e35479
KH
1421{
1422 int regno = 0;
1423 int i;
ef4bddc2 1424 machine_mode mode = GET_MODE (dst);
15e35479
KH
1425
1426 if (GET_CODE (dst) == SUBREG)
1427 {
1428 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1429 GET_MODE (SUBREG_REG (dst)),
1430 SUBREG_BYTE (dst),
1431 GET_MODE (dst));
1432 dst = SUBREG_REG (dst);
1433 }
12c2b0ad
JL
1434
1435 /* Some targets do argument pushes without adding REG_INC notes. */
1436
1437 if (MEM_P (dst))
1438 {
1439 dst = XEXP (dst, 0);
1440 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
96676a5d
JJ
1441 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1442 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
12c2b0ad 1443 {
53d1bae9
RS
1444 unsigned int end_regno = END_REGNO (XEXP (dst, 0));
1445 for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i)
12c2b0ad
JL
1446 {
1447 /* We could probably do better, but for now mark the register
1448 as used in an unknown fashion and set/clobbered at this
1449 insn. */
1450 reg_state[i].use_index = -1;
1451 reg_state[i].store_ruid = reload_combine_ruid;
1452 reg_state[i].real_store_ruid = reload_combine_ruid;
1453 }
1454 }
1455 else
1456 return;
1457 }
1458
f8cfc6aa 1459 if (!REG_P (dst))
15e35479
KH
1460 return;
1461 regno += REGNO (dst);
1462
1463 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1464 careful with registers / register parts that are not full words.
46d096a3 1465 Similarly for ZERO_EXTRACT. */
dc0d5a57 1466 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
15e35479
KH
1467 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1468 {
66fd46b6 1469 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
15e35479
KH
1470 {
1471 reg_state[i].use_index = -1;
1472 reg_state[i].store_ruid = reload_combine_ruid;
dc0d5a57 1473 reg_state[i].real_store_ruid = reload_combine_ruid;
15e35479
KH
1474 }
1475 }
1476 else
1477 {
66fd46b6 1478 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
15e35479
KH
1479 {
1480 reg_state[i].store_ruid = reload_combine_ruid;
dc0d5a57
BS
1481 if (GET_CODE (set) == SET)
1482 reg_state[i].real_store_ruid = reload_combine_ruid;
15e35479
KH
1483 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1484 }
1485 }
1486}
1487
1488/* XP points to a piece of rtl that has to be checked for any uses of
1489 registers.
1490 *XP is the pattern of INSN, or a part of it.
1491 Called from reload_combine, and recursively by itself. */
1492static void
f90af2e0 1493reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem)
15e35479
KH
1494{
1495 rtx x = *xp;
1496 enum rtx_code code = x->code;
1497 const char *fmt;
1498 int i, j;
1499 rtx offset = const0_rtx; /* For the REG case below. */
1500
1501 switch (code)
1502 {
1503 case SET:
f8cfc6aa 1504 if (REG_P (SET_DEST (x)))
15e35479 1505 {
dc0d5a57 1506 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
15e35479
KH
1507 return;
1508 }
1509 break;
1510
1511 case USE:
1512 /* If this is the USE of a return value, we can't change it. */
f8cfc6aa 1513 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
15e35479 1514 {
53d1bae9 1515 /* Mark the return register as used in an unknown fashion. */
15e35479 1516 rtx reg = XEXP (x, 0);
53d1bae9
RS
1517 unsigned int end_regno = END_REGNO (reg);
1518 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
1519 reg_state[regno].use_index = -1;
15e35479
KH
1520 return;
1521 }
1522 break;
1523
1524 case CLOBBER:
f8cfc6aa 1525 if (REG_P (SET_DEST (x)))
15e35479
KH
1526 {
1527 /* No spurious CLOBBERs of pseudo registers may remain. */
e16acfcd 1528 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
15e35479
KH
1529 return;
1530 }
1531 break;
1532
1533 case PLUS:
1534 /* We are interested in (plus (reg) (const_int)) . */
f8cfc6aa 1535 if (!REG_P (XEXP (x, 0))
481683e1 1536 || !CONST_INT_P (XEXP (x, 1)))
15e35479
KH
1537 break;
1538 offset = XEXP (x, 1);
1539 x = XEXP (x, 0);
1540 /* Fall through. */
1541 case REG:
1542 {
1543 int regno = REGNO (x);
1544 int use_index;
1545 int nregs;
1546
1547 /* No spurious USEs of pseudo registers may remain. */
e16acfcd 1548 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
15e35479 1549
dc8afb70 1550 nregs = REG_NREGS (x);
15e35479
KH
1551
1552 /* We can't substitute into multi-hard-reg uses. */
1553 if (nregs > 1)
1554 {
1555 while (--nregs >= 0)
1556 reg_state[regno + nregs].use_index = -1;
1557 return;
1558 }
1559
08bd6876
BS
1560 /* We may be called to update uses in previously seen insns.
1561 Don't add uses beyond the last store we saw. */
1562 if (ruid < reg_state[regno].store_ruid)
1563 return;
1564
15e35479
KH
1565 /* If this register is already used in some unknown fashion, we
1566 can't do anything.
1567 If we decrement the index from zero to -1, we can't store more
1568 uses, so this register becomes used in an unknown fashion. */
1569 use_index = --reg_state[regno].use_index;
1570 if (use_index < 0)
1571 return;
1572
dc0d5a57 1573 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
15e35479
KH
1574 {
1575 /* This is the first use of this register we have seen since we
1576 marked it as dead. */
1577 reg_state[regno].offset = offset;
dc0d5a57
BS
1578 reg_state[regno].all_offsets_match = true;
1579 reg_state[regno].use_ruid = ruid;
15e35479 1580 }
b1d5eee8
BS
1581 else
1582 {
1583 if (reg_state[regno].use_ruid > ruid)
1584 reg_state[regno].use_ruid = ruid;
1585
1586 if (! rtx_equal_p (offset, reg_state[regno].offset))
1587 reg_state[regno].all_offsets_match = false;
1588 }
dc0d5a57 1589
15e35479 1590 reg_state[regno].reg_use[use_index].insn = insn;
dc0d5a57
BS
1591 reg_state[regno].reg_use[use_index].ruid = ruid;
1592 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
15e35479
KH
1593 reg_state[regno].reg_use[use_index].usep = xp;
1594 return;
1595 }
1596
dc0d5a57
BS
1597 case MEM:
1598 containing_mem = x;
1599 break;
1600
15e35479
KH
1601 default:
1602 break;
1603 }
1604
1605 /* Recursively process the components of X. */
1606 fmt = GET_RTX_FORMAT (code);
1607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1608 {
1609 if (fmt[i] == 'e')
dc0d5a57 1610 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
15e35479
KH
1611 else if (fmt[i] == 'E')
1612 {
1613 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
dc0d5a57
BS
1614 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1615 containing_mem);
15e35479
KH
1616 }
1617 }
1618}
1619\f
1620/* See if we can reduce the cost of a constant by replacing a move
1621 with an add. We track situations in which a register is set to a
1622 constant or to a register plus a constant. */
1623/* We cannot do our optimization across labels. Invalidating all the
1624 information about register contents we have would be costly, so we
1625 use move2add_last_label_luid to note where the label is and then
1626 later disable any optimization that would cross it.
7beb0596
JZ
1627 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1628 are only valid if reg_set_luid[n] is greater than
7894bc6b
JR
1629 move2add_last_label_luid.
1630 For a set that established a new (potential) base register with
1631 non-constant value, we use move2add_luid from the place where the
1632 setting insn is encountered; registers based off that base then
1633 get the same reg_set_luid. Constants all get
1634 move2add_last_label_luid + 1 as their reg_set_luid. */
15e35479
KH
1635static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1636
1637/* If reg_base_reg[n] is negative, register n has been set to
7beb0596 1638 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
15e35479
KH
1639 If reg_base_reg[n] is non-negative, register n has been set to the
1640 sum of reg_offset[n] and the value of register reg_base_reg[n]
7894bc6b
JR
1641 before reg_set_luid[n], calculated in mode reg_mode[n] .
1642 For multi-hard-register registers, all but the first one are
1643 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1644 marks it as invalid. */
15e35479
KH
1645static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1646static int reg_base_reg[FIRST_PSEUDO_REGISTER];
7beb0596 1647static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
ef4bddc2 1648static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
15e35479
KH
1649
1650/* move2add_luid is linearly increased while scanning the instructions
1651 from first to last. It is used to set reg_set_luid in
1652 reload_cse_move2add and move2add_note_store. */
1653static int move2add_luid;
1654
1655/* move2add_last_label_luid is set whenever a label is found. Labels
1656 invalidate all previously collected reg_offset data. */
1657static int move2add_last_label_luid;
1658
1659/* ??? We don't know how zero / sign extension is handled, hence we
1660 can't go from a narrower to a wider mode. */
1661#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1662 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1663 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
d0edd768 1664 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
15e35479 1665
7894bc6b
JR
1666/* Record that REG is being set to a value with the mode of REG. */
1667
1668static void
1669move2add_record_mode (rtx reg)
1670{
1671 int regno, nregs;
ef4bddc2 1672 machine_mode mode = GET_MODE (reg);
7894bc6b
JR
1673
1674 if (GET_CODE (reg) == SUBREG)
1675 {
1676 regno = subreg_regno (reg);
1677 nregs = subreg_nregs (reg);
1678 }
1679 else if (REG_P (reg))
1680 {
1681 regno = REGNO (reg);
dc8afb70 1682 nregs = REG_NREGS (reg);
7894bc6b
JR
1683 }
1684 else
1685 gcc_unreachable ();
1686 for (int i = nregs - 1; i > 0; i--)
1687 reg_mode[regno + i] = BLKmode;
1688 reg_mode[regno] = mode;
1689}
1690
1691/* Record that REG is being set to the sum of SYM and OFF. */
1692
1693static void
1694move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1695{
1696 int regno = REGNO (reg);
1697
1698 move2add_record_mode (reg);
1699 reg_set_luid[regno] = move2add_luid;
1700 reg_base_reg[regno] = -1;
1701 reg_symbol_ref[regno] = sym;
1702 reg_offset[regno] = INTVAL (off);
1703}
1704
1705/* Check if REGNO contains a valid value in MODE. */
1706
1707static bool
ef4bddc2 1708move2add_valid_value_p (int regno, machine_mode mode)
7894bc6b 1709{
ca035367 1710 if (reg_set_luid[regno] <= move2add_last_label_luid)
7894bc6b
JR
1711 return false;
1712
ca035367
JR
1713 if (mode != reg_mode[regno])
1714 {
1715 if (!MODES_OK_FOR_MOVE2ADD (mode, reg_mode[regno]))
1716 return false;
1717 /* The value loaded into regno in reg_mode[regno] is also valid in
1718 mode after truncation only if (REG:mode regno) is the lowpart of
1719 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1720 regno of the lowpart might be different. */
1721 int s_off = subreg_lowpart_offset (mode, reg_mode[regno]);
1722 s_off = subreg_regno_offset (regno, reg_mode[regno], s_off, mode);
1723 if (s_off != 0)
1724 /* We could in principle adjust regno, check reg_mode[regno] to be
1725 BLKmode, and return s_off to the caller (vs. -1 for failure),
1726 but we currently have no callers that could make use of this
1727 information. */
1728 return false;
1729 }
1730
7894bc6b
JR
1731 for (int i = hard_regno_nregs[regno][mode] - 1; i > 0; i--)
1732 if (reg_mode[regno + i] != BLKmode)
1733 return false;
1734 return true;
1735}
1736
7beb0596
JZ
1737/* This function is called with INSN that sets REG to (SYM + OFF),
1738 while REG is known to already have value (SYM + offset).
1739 This function tries to change INSN into an add instruction
1740 (set (REG) (plus (REG) (OFF - offset))) using the known value.
dc0d5a57
BS
1741 It also updates the information about REG's known value.
1742 Return true if we made a change. */
7beb0596 1743
dc0d5a57 1744static bool
f90af2e0 1745move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
7beb0596
JZ
1746{
1747 rtx pat = PATTERN (insn);
1748 rtx src = SET_SRC (pat);
1749 int regno = REGNO (reg);
e15eb172 1750 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno],
7beb0596
JZ
1751 GET_MODE (reg));
1752 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
dc0d5a57 1753 bool changed = false;
7beb0596
JZ
1754
1755 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1756 use (set (reg) (reg)) instead.
1757 We don't delete this insn, nor do we convert it into a
1758 note, to avoid losing register notes or the return
1759 value flag. jump2 already knows how to get rid of
1760 no-op moves. */
1761 if (new_src == const0_rtx)
1762 {
1763 /* If the constants are different, this is a
1764 truncation, that, if turned into (set (reg)
1765 (reg)), would be discarded. Maybe we should
1766 try a truncMN pattern? */
1767 if (INTVAL (off) == reg_offset [regno])
dc0d5a57 1768 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
7beb0596 1769 }
22939744 1770 else
7beb0596 1771 {
22939744 1772 struct full_rtx_costs oldcst, newcst;
7beb0596 1773 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
22939744 1774
d51102f3 1775 get_full_set_rtx_cost (pat, &oldcst);
22939744 1776 SET_SRC (pat) = tem;
d51102f3 1777 get_full_set_rtx_cost (pat, &newcst);
22939744
BS
1778 SET_SRC (pat) = src;
1779
1780 if (costs_lt_p (&newcst, &oldcst, speed)
1781 && have_add2_insn (reg, new_src))
1782 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1783 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
7beb0596 1784 {
ef4bddc2 1785 machine_mode narrow_mode;
22939744
BS
1786 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1787 narrow_mode != VOIDmode
1788 && narrow_mode != GET_MODE (reg);
1789 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
7beb0596 1790 {
22939744
BS
1791 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1792 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1793 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1794 {
b49eefa5 1795 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
22939744
BS
1796 rtx narrow_src = gen_int_mode (INTVAL (off),
1797 narrow_mode);
1798 rtx new_set
f7df4a84 1799 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,
22939744
BS
1800 narrow_reg),
1801 narrow_src);
a0f37b26
AS
1802 get_full_set_rtx_cost (new_set, &newcst);
1803 if (costs_lt_p (&newcst, &oldcst, speed))
1804 {
1805 changed = validate_change (insn, &PATTERN (insn),
1806 new_set, 0);
1807 if (changed)
1808 break;
1809 }
22939744 1810 }
7beb0596
JZ
1811 }
1812 }
1813 }
7894bc6b 1814 move2add_record_sym_value (reg, sym, off);
dc0d5a57 1815 return changed;
7beb0596
JZ
1816}
1817
1818
1819/* This function is called with INSN that sets REG to (SYM + OFF),
1820 but REG doesn't have known value (SYM + offset). This function
1821 tries to find another register which is known to already have
1822 value (SYM + offset) and change INSN into an add instruction
1823 (set (REG) (plus (the found register) (OFF - offset))) if such
1824 a register is found. It also updates the information about
dc0d5a57
BS
1825 REG's known value.
1826 Return true iff we made a change. */
7beb0596 1827
dc0d5a57 1828static bool
f90af2e0 1829move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
7beb0596
JZ
1830{
1831 rtx pat = PATTERN (insn);
1832 rtx src = SET_SRC (pat);
1833 int regno = REGNO (reg);
5676e87d 1834 int min_regno = 0;
7beb0596
JZ
1835 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1836 int i;
dc0d5a57 1837 bool changed = false;
22939744
BS
1838 struct full_rtx_costs oldcst, newcst, mincst;
1839 rtx plus_expr;
1840
1841 init_costs_to_max (&mincst);
d51102f3 1842 get_full_set_rtx_cost (pat, &oldcst);
22939744
BS
1843
1844 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1845 SET_SRC (pat) = plus_expr;
7beb0596
JZ
1846
1847 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7894bc6b 1848 if (move2add_valid_value_p (i, GET_MODE (reg))
7beb0596
JZ
1849 && reg_base_reg[i] < 0
1850 && reg_symbol_ref[i] != NULL_RTX
1851 && rtx_equal_p (sym, reg_symbol_ref[i]))
1852 {
e15eb172 1853 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
7beb0596
JZ
1854 GET_MODE (reg));
1855 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1856 use (set (reg) (reg)) instead.
1857 We don't delete this insn, nor do we convert it into a
1858 note, to avoid losing register notes or the return
1859 value flag. jump2 already knows how to get rid of
1860 no-op moves. */
1861 if (new_src == const0_rtx)
1862 {
22939744 1863 init_costs_to_zero (&mincst);
7beb0596
JZ
1864 min_regno = i;
1865 break;
1866 }
1867 else
1868 {
22939744 1869 XEXP (plus_expr, 1) = new_src;
d51102f3 1870 get_full_set_rtx_cost (pat, &newcst);
22939744
BS
1871
1872 if (costs_lt_p (&newcst, &mincst, speed))
7beb0596 1873 {
22939744 1874 mincst = newcst;
7beb0596
JZ
1875 min_regno = i;
1876 }
1877 }
1878 }
22939744 1879 SET_SRC (pat) = src;
7beb0596 1880
22939744 1881 if (costs_lt_p (&mincst, &oldcst, speed))
7beb0596
JZ
1882 {
1883 rtx tem;
1884
1885 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1886 if (i != min_regno)
1887 {
e15eb172 1888 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
7beb0596
JZ
1889 GET_MODE (reg));
1890 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1891 }
dc0d5a57
BS
1892 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1893 changed = true;
7beb0596
JZ
1894 }
1895 reg_set_luid[regno] = move2add_luid;
7894bc6b 1896 move2add_record_sym_value (reg, sym, off);
dc0d5a57 1897 return changed;
7beb0596
JZ
1898}
1899
dc0d5a57
BS
1900/* Convert move insns with constant inputs to additions if they are cheaper.
1901 Return true if any changes were made. */
1902static bool
f90af2e0 1903reload_cse_move2add (rtx_insn *first)
15e35479
KH
1904{
1905 int i;
f90af2e0 1906 rtx_insn *insn;
dc0d5a57 1907 bool changed = false;
15e35479
KH
1908
1909 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
7beb0596
JZ
1910 {
1911 reg_set_luid[i] = 0;
1912 reg_offset[i] = 0;
1913 reg_base_reg[i] = 0;
1914 reg_symbol_ref[i] = NULL_RTX;
1915 reg_mode[i] = VOIDmode;
1916 }
15e35479
KH
1917
1918 move2add_last_label_luid = 0;
1919 move2add_luid = 2;
1920 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1921 {
1922 rtx pat, note;
1923
4b4bf941 1924 if (LABEL_P (insn))
15e35479
KH
1925 {
1926 move2add_last_label_luid = move2add_luid;
1927 /* We're going to increment move2add_luid twice after a
1928 label, so that we can use move2add_last_label_luid + 1 as
1929 the luid for constants. */
1930 move2add_luid++;
1931 continue;
1932 }
1933 if (! INSN_P (insn))
1934 continue;
1935 pat = PATTERN (insn);
1936 /* For simplicity, we only perform this optimization on
1937 straightforward SETs. */
1938 if (GET_CODE (pat) == SET
f8cfc6aa 1939 && REG_P (SET_DEST (pat)))
15e35479
KH
1940 {
1941 rtx reg = SET_DEST (pat);
1942 int regno = REGNO (reg);
1943 rtx src = SET_SRC (pat);
1944
1945 /* Check if we have valid information on the contents of this
1946 register in the mode of REG. */
7894bc6b 1947 if (move2add_valid_value_p (regno, GET_MODE (reg))
6fb5fa3c 1948 && dbg_cnt (cse2_move2add))
15e35479
KH
1949 {
1950 /* Try to transform (set (REGX) (CONST_INT A))
1951 ...
1952 (set (REGX) (CONST_INT B))
1953 to
1954 (set (REGX) (CONST_INT A))
1955 ...
1956 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1957 or
1958 (set (REGX) (CONST_INT A))
1959 ...
1960 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1961 */
1962
7beb0596
JZ
1963 if (CONST_INT_P (src)
1964 && reg_base_reg[regno] < 0
1965 && reg_symbol_ref[regno] == NULL_RTX)
15e35479 1966 {
dc0d5a57 1967 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
15e35479
KH
1968 continue;
1969 }
1970
1971 /* Try to transform (set (REGX) (REGY))
1972 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1973 ...
1974 (set (REGX) (REGY))
1975 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1976 to
1977 (set (REGX) (REGY))
1978 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1979 ...
1980 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
f8cfc6aa 1981 else if (REG_P (src)
15e35479
KH
1982 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1983 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
7894bc6b 1984 && move2add_valid_value_p (REGNO (src), GET_MODE (reg)))
15e35479 1985 {
f90af2e0 1986 rtx_insn *next = next_nonnote_nondebug_insn (insn);
15e35479
KH
1987 rtx set = NULL_RTX;
1988 if (next)
1989 set = single_set (next);
1990 if (set
1991 && SET_DEST (set) == reg
1992 && GET_CODE (SET_SRC (set)) == PLUS
1993 && XEXP (SET_SRC (set), 0) == reg
481683e1 1994 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
15e35479
KH
1995 {
1996 rtx src3 = XEXP (SET_SRC (set), 1);
e15eb172 1997 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
15e35479
KH
1998 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1999 HOST_WIDE_INT regno_offset = reg_offset[regno];
2000 rtx new_src =
bb80db7b
KH
2001 gen_int_mode (added_offset
2002 + base_offset
2003 - regno_offset,
2004 GET_MODE (reg));
f40751dd
JH
2005 bool success = false;
2006 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
15e35479
KH
2007
2008 if (new_src == const0_rtx)
2009 /* See above why we create (set (reg) (reg)) here. */
2010 success
2011 = validate_change (next, &SET_SRC (set), reg, 0);
22939744 2012 else
15e35479 2013 {
22939744
BS
2014 rtx old_src = SET_SRC (set);
2015 struct full_rtx_costs oldcst, newcst;
2016 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
2017
d51102f3 2018 get_full_set_rtx_cost (set, &oldcst);
22939744 2019 SET_SRC (set) = tem;
5e8f01f4 2020 get_full_set_src_cost (tem, &newcst);
22939744
BS
2021 SET_SRC (set) = old_src;
2022 costs_add_n_insns (&oldcst, 1);
2023
2024 if (costs_lt_p (&newcst, &oldcst, speed)
2025 && have_add2_insn (reg, new_src))
2026 {
f7df4a84 2027 rtx newpat = gen_rtx_SET (reg, tem);
22939744
BS
2028 success
2029 = validate_change (next, &PATTERN (next),
2030 newpat, 0);
2031 }
15e35479
KH
2032 }
2033 if (success)
2034 delete_insn (insn);
dc0d5a57 2035 changed |= success;
15e35479 2036 insn = next;
7894bc6b
JR
2037 move2add_record_mode (reg);
2038 reg_offset[regno]
2039 = trunc_int_for_mode (added_offset + base_offset,
2040 GET_MODE (reg));
15e35479
KH
2041 continue;
2042 }
2043 }
2044 }
7beb0596
JZ
2045
2046 /* Try to transform
2047 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2048 ...
2049 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2050 to
2051 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2052 ...
2053 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2054 if ((GET_CODE (src) == SYMBOL_REF
2055 || (GET_CODE (src) == CONST
2056 && GET_CODE (XEXP (src, 0)) == PLUS
2057 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2058 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2059 && dbg_cnt (cse2_move2add))
2060 {
2061 rtx sym, off;
2062
2063 if (GET_CODE (src) == SYMBOL_REF)
2064 {
2065 sym = src;
2066 off = const0_rtx;
2067 }
2068 else
2069 {
2070 sym = XEXP (XEXP (src, 0), 0);
2071 off = XEXP (XEXP (src, 0), 1);
2072 }
2073
2074 /* If the reg already contains the value which is sum of
2075 sym and some constant value, we can use an add2 insn. */
7894bc6b 2076 if (move2add_valid_value_p (regno, GET_MODE (reg))
7beb0596
JZ
2077 && reg_base_reg[regno] < 0
2078 && reg_symbol_ref[regno] != NULL_RTX
2079 && rtx_equal_p (sym, reg_symbol_ref[regno]))
dc0d5a57 2080 changed |= move2add_use_add2_insn (reg, sym, off, insn);
7beb0596
JZ
2081
2082 /* Otherwise, we have to find a register whose value is sum
2083 of sym and some constant value. */
2084 else
dc0d5a57 2085 changed |= move2add_use_add3_insn (reg, sym, off, insn);
7beb0596
JZ
2086
2087 continue;
2088 }
15e35479
KH
2089 }
2090
2091 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2092 {
2093 if (REG_NOTE_KIND (note) == REG_INC
f8cfc6aa 2094 && REG_P (XEXP (note, 0)))
15e35479
KH
2095 {
2096 /* Reset the information about this register. */
2097 int regno = REGNO (XEXP (note, 0));
2098 if (regno < FIRST_PSEUDO_REGISTER)
7894bc6b
JR
2099 {
2100 move2add_record_mode (XEXP (note, 0));
2101 reg_mode[regno] = VOIDmode;
2102 }
15e35479
KH
2103 }
2104 }
7beb0596 2105 note_stores (PATTERN (insn), move2add_note_store, insn);
15e35479
KH
2106
2107 /* If INSN is a conditional branch, we try to extract an
2108 implicit set out of it. */
c4cdb8e1 2109 if (any_condjump_p (insn))
15e35479
KH
2110 {
2111 rtx cnd = fis_get_condition (insn);
2112
2113 if (cnd != NULL_RTX
2114 && GET_CODE (cnd) == NE
f8cfc6aa 2115 && REG_P (XEXP (cnd, 0))
c4cdb8e1 2116 && !reg_set_p (XEXP (cnd, 0), insn)
15e35479
KH
2117 /* The following two checks, which are also in
2118 move2add_note_store, are intended to reduce the
2119 number of calls to gen_rtx_SET to avoid memory
2120 allocation if possible. */
2121 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
dc8afb70 2122 && REG_NREGS (XEXP (cnd, 0)) == 1
481683e1 2123 && CONST_INT_P (XEXP (cnd, 1)))
15e35479
KH
2124 {
2125 rtx implicit_set =
f7df4a84 2126 gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1));
7beb0596 2127 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
15e35479
KH
2128 }
2129 }
2130
2131 /* If this is a CALL_INSN, all call used registers are stored with
2132 unknown values. */
4b4bf941 2133 if (CALL_P (insn))
15e35479
KH
2134 {
2135 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2136 {
2137 if (call_used_regs[i])
2138 /* Reset the information about this register. */
7894bc6b 2139 reg_mode[i] = VOIDmode;
15e35479
KH
2140 }
2141 }
2142 }
dc0d5a57 2143 return changed;
15e35479
KH
2144}
2145
7beb0596
JZ
2146/* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2147 contains SET.
15e35479
KH
2148 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2149 Called from reload_cse_move2add via note_stores. */
2150
2151static void
7beb0596 2152move2add_note_store (rtx dst, const_rtx set, void *data)
15e35479 2153{
f90af2e0 2154 rtx_insn *insn = (rtx_insn *) data;
15e35479 2155 unsigned int regno = 0;
ef4bddc2 2156 machine_mode mode = GET_MODE (dst);
15e35479 2157
15e35479
KH
2158 /* Some targets do argument pushes without adding REG_INC notes. */
2159
3c0cb5de 2160 if (MEM_P (dst))
15e35479
KH
2161 {
2162 dst = XEXP (dst, 0);
2163 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2164 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
7894bc6b 2165 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
15e35479
KH
2166 return;
2167 }
15e35479 2168
7894bc6b
JR
2169 if (GET_CODE (dst) == SUBREG)
2170 regno = subreg_regno (dst);
2171 else if (REG_P (dst))
2172 regno = REGNO (dst);
2173 else
2174 return;
15e35479 2175
7894bc6b
JR
2176 if (SCALAR_INT_MODE_P (mode)
2177 && GET_CODE (set) == SET)
7beb0596
JZ
2178 {
2179 rtx note, sym = NULL_RTX;
7894bc6b 2180 rtx off;
7beb0596
JZ
2181
2182 note = find_reg_equal_equiv_note (insn);
2183 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2184 {
2185 sym = XEXP (note, 0);
7894bc6b 2186 off = const0_rtx;
7beb0596
JZ
2187 }
2188 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2189 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2190 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2191 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2192 {
2193 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
7894bc6b 2194 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
7beb0596
JZ
2195 }
2196
2197 if (sym != NULL_RTX)
2198 {
7894bc6b 2199 move2add_record_sym_value (dst, sym, off);
7beb0596
JZ
2200 return;
2201 }
2202 }
2203
7894bc6b
JR
2204 if (SCALAR_INT_MODE_P (mode)
2205 && GET_CODE (set) == SET
15e35479 2206 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
15e35479
KH
2207 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2208 {
2209 rtx src = SET_SRC (set);
2210 rtx base_reg;
e15eb172 2211 unsigned HOST_WIDE_INT offset;
15e35479 2212 int base_regno;
15e35479
KH
2213
2214 switch (GET_CODE (src))
2215 {
2216 case PLUS:
f8cfc6aa 2217 if (REG_P (XEXP (src, 0)))
15e35479
KH
2218 {
2219 base_reg = XEXP (src, 0);
2220
481683e1 2221 if (CONST_INT_P (XEXP (src, 1)))
e15eb172 2222 offset = UINTVAL (XEXP (src, 1));
f8cfc6aa 2223 else if (REG_P (XEXP (src, 1))
7894bc6b 2224 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
15e35479 2225 {
27d5e204
CLT
2226 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2227 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
15e35479
KH
2228 offset = reg_offset[REGNO (XEXP (src, 1))];
2229 /* Maybe the first register is known to be a
2230 constant. */
7894bc6b 2231 else if (move2add_valid_value_p (REGNO (base_reg), mode)
27d5e204
CLT
2232 && reg_base_reg[REGNO (base_reg)] < 0
2233 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
15e35479
KH
2234 {
2235 offset = reg_offset[REGNO (base_reg)];
2236 base_reg = XEXP (src, 1);
2237 }
2238 else
2239 goto invalidate;
2240 }
2241 else
2242 goto invalidate;
2243
2244 break;
2245 }
2246
2247 goto invalidate;
2248
2249 case REG:
2250 base_reg = src;
2251 offset = 0;
2252 break;
2253
2254 case CONST_INT:
2255 /* Start tracking the register as a constant. */
2256 reg_base_reg[regno] = -1;
7beb0596 2257 reg_symbol_ref[regno] = NULL_RTX;
15e35479
KH
2258 reg_offset[regno] = INTVAL (SET_SRC (set));
2259 /* We assign the same luid to all registers set to constants. */
2260 reg_set_luid[regno] = move2add_last_label_luid + 1;
7894bc6b 2261 move2add_record_mode (dst);
15e35479
KH
2262 return;
2263
2264 default:
7894bc6b 2265 goto invalidate;
15e35479
KH
2266 }
2267
2268 base_regno = REGNO (base_reg);
2269 /* If information about the base register is not valid, set it
2270 up as a new base register, pretending its value is known
2271 starting from the current insn. */
7894bc6b 2272 if (!move2add_valid_value_p (base_regno, mode))
15e35479
KH
2273 {
2274 reg_base_reg[base_regno] = base_regno;
7beb0596 2275 reg_symbol_ref[base_regno] = NULL_RTX;
15e35479
KH
2276 reg_offset[base_regno] = 0;
2277 reg_set_luid[base_regno] = move2add_luid;
7894bc6b
JR
2278 gcc_assert (GET_MODE (base_reg) == mode);
2279 move2add_record_mode (base_reg);
15e35479 2280 }
15e35479
KH
2281
2282 /* Copy base information from our base register. */
2283 reg_set_luid[regno] = reg_set_luid[base_regno];
2284 reg_base_reg[regno] = reg_base_reg[base_regno];
7beb0596 2285 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
15e35479
KH
2286
2287 /* Compute the sum of the offsets or constants. */
7894bc6b
JR
2288 reg_offset[regno]
2289 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2290
2291 move2add_record_mode (dst);
15e35479
KH
2292 }
2293 else
2294 {
7894bc6b
JR
2295 invalidate:
2296 /* Invalidate the contents of the register. */
2297 move2add_record_mode (dst);
2298 reg_mode[regno] = VOIDmode;
15e35479
KH
2299 }
2300}
ef330312 2301\f
27a4cd48
DM
2302namespace {
2303
2304const pass_data pass_data_postreload_cse =
ef330312 2305{
27a4cd48
DM
2306 RTL_PASS, /* type */
2307 "postreload", /* name */
2308 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2309 TV_RELOAD_CSE_REGS, /* tv_id */
2310 0, /* properties_required */
2311 0, /* properties_provided */
2312 0, /* properties_destroyed */
2313 0, /* todo_flags_start */
3bea341f 2314 TODO_df_finish, /* todo_flags_finish */
ef330312 2315};
27a4cd48
DM
2316
2317class pass_postreload_cse : public rtl_opt_pass
2318{
2319public:
c3284718
RS
2320 pass_postreload_cse (gcc::context *ctxt)
2321 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
27a4cd48
DM
2322 {}
2323
2324 /* opt_pass methods: */
1a3d085c
TS
2325 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2326
be55bfe6 2327 virtual unsigned int execute (function *);
27a4cd48
DM
2328
2329}; // class pass_postreload_cse
2330
be55bfe6
TS
2331unsigned int
2332pass_postreload_cse::execute (function *fun)
2333{
2334 if (!dbg_cnt (postreload_cse))
2335 return 0;
2336
2337 /* Do a very simple CSE pass over just the hard registers. */
2338 reload_cse_regs (get_insns ());
2339 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2340 Remove any EH edges associated with them. */
2341 if (fun->can_throw_non_call_exceptions
2342 && purge_all_dead_edges ())
2343 cleanup_cfg (0);
2344
2345 return 0;
2346}
2347
27a4cd48
DM
2348} // anon namespace
2349
2350rtl_opt_pass *
2351make_pass_postreload_cse (gcc::context *ctxt)
2352{
2353 return new pass_postreload_cse (ctxt);
2354}