]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/postreload.c
* config/i386/i386.md (*sibcall_memory): Rename from *sibcall_intern.
[thirdparty/gcc.git] / gcc / postreload.c
CommitLineData
8f8cadbc 1/* Perform simple optimizations to clean up the result of reload.
3aea1f79 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
8f8cadbc 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
8f8cadbc 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
8f8cadbc 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24
25#include "machmode.h"
26#include "hard-reg-set.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "obstack.h"
30#include "insn-config.h"
31#include "flags.h"
32#include "function.h"
33#include "expr.h"
34#include "optabs.h"
35#include "regs.h"
36#include "basic-block.h"
37#include "reload.h"
38#include "recog.h"
8f8cadbc 39#include "cselib.h"
0b205f4c 40#include "diagnostic-core.h"
8f8cadbc 41#include "except.h"
42#include "tree.h"
727047d0 43#include "target.h"
77fce4cd 44#include "tree-pass.h"
3072d30e 45#include "df.h"
46#include "dbgcnt.h"
8f8cadbc 47
3ad4992f 48static int reload_cse_noop_set_p (rtx);
26709122 49static bool reload_cse_simplify (rtx, rtx);
50static void reload_cse_regs_1 (void);
3ad4992f 51static int reload_cse_simplify_set (rtx, rtx);
52static int reload_cse_simplify_operands (rtx, rtx);
8f8cadbc 53
3ad4992f 54static void reload_combine (void);
d83ccc81 55static void reload_combine_note_use (rtx *, rtx, int, rtx);
81a410b1 56static void reload_combine_note_store (rtx, const_rtx, void *);
8f8cadbc 57
d83ccc81 58static bool reload_cse_move2add (rtx);
81a410b1 59static void move2add_note_store (rtx, const_rtx, void *);
8f8cadbc 60
61/* Call cse / combine like post-reload optimization phases.
62 FIRST is the first instruction. */
98799adc 63
64static void
3ad4992f 65reload_cse_regs (rtx first ATTRIBUTE_UNUSED)
8f8cadbc 66{
d83ccc81 67 bool moves_converted;
26709122 68 reload_cse_regs_1 ();
8f8cadbc 69 reload_combine ();
d83ccc81 70 moves_converted = reload_cse_move2add (first);
8f8cadbc 71 if (flag_expensive_optimizations)
d83ccc81 72 {
73 if (moves_converted)
74 reload_combine ();
26709122 75 reload_cse_regs_1 ();
d83ccc81 76 }
8f8cadbc 77}
78
79/* See whether a single set SET is a noop. */
80static int
3ad4992f 81reload_cse_noop_set_p (rtx set)
8f8cadbc 82{
83 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
84 return 0;
85
86 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
87}
88
26709122 89/* Try to simplify INSN. Return true if the CFG may have changed. */
90static bool
3ad4992f 91reload_cse_simplify (rtx insn, rtx testreg)
8f8cadbc 92{
93 rtx body = PATTERN (insn);
26709122 94 basic_block insn_bb = BLOCK_FOR_INSN (insn);
95 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
8f8cadbc 96
97 if (GET_CODE (body) == SET)
98 {
99 int count = 0;
100
101 /* Simplify even if we may think it is a no-op.
102 We may think a memory load of a value smaller than WORD_SIZE
103 is redundant because we haven't taken into account possible
104 implicit extension. reload_cse_simplify_set() will bring
105 this out, so it's safer to simplify before we delete. */
106 count += reload_cse_simplify_set (body, insn);
107
108 if (!count && reload_cse_noop_set_p (body))
109 {
110 rtx value = SET_DEST (body);
111 if (REG_P (value)
112 && ! REG_FUNCTION_VALUE_P (value))
113 value = 0;
5a9ecd4a 114 if (check_for_inc_dec (insn))
115 delete_insn_and_edges (insn);
26709122 116 /* We're done with this insn. */
117 goto done;
8f8cadbc 118 }
119
120 if (count > 0)
121 apply_change_group ();
122 else
123 reload_cse_simplify_operands (insn, testreg);
124 }
125 else if (GET_CODE (body) == PARALLEL)
126 {
127 int i;
128 int count = 0;
129 rtx value = NULL_RTX;
130
17883489 131 /* Registers mentioned in the clobber list for an asm cannot be reused
132 within the body of the asm. Invalidate those registers now so that
133 we don't try to substitute values for them. */
134 if (asm_noperands (body) >= 0)
135 {
136 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
137 {
138 rtx part = XVECEXP (body, 0, i);
139 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
140 cselib_invalidate_rtx (XEXP (part, 0));
141 }
142 }
143
8f8cadbc 144 /* If every action in a PARALLEL is a noop, we can delete
145 the entire PARALLEL. */
146 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
147 {
148 rtx part = XVECEXP (body, 0, i);
149 if (GET_CODE (part) == SET)
150 {
151 if (! reload_cse_noop_set_p (part))
152 break;
153 if (REG_P (SET_DEST (part))
154 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
155 {
156 if (value)
157 break;
158 value = SET_DEST (part);
159 }
160 }
161 else if (GET_CODE (part) != CLOBBER)
162 break;
163 }
164
165 if (i < 0)
166 {
5a9ecd4a 167 if (check_for_inc_dec (insn))
168 delete_insn_and_edges (insn);
8f8cadbc 169 /* We're done with this insn. */
26709122 170 goto done;
8f8cadbc 171 }
172
173 /* It's not a no-op, but we can try to simplify it. */
174 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
175 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
176 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
177
178 if (count > 0)
179 apply_change_group ();
180 else
181 reload_cse_simplify_operands (insn, testreg);
182 }
26709122 183
184done:
185 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
8f8cadbc 186}
187
188/* Do a very simple CSE pass over the hard registers.
189
190 This function detects no-op moves where we happened to assign two
191 different pseudo-registers to the same hard register, and then
192 copied one to the other. Reload will generate a useless
193 instruction copying a register to itself.
194
195 This function also detects cases where we load a value from memory
196 into two different registers, and (if memory is more expensive than
197 registers) changes it to simply copy the first register into the
198 second register.
199
200 Another optimization is performed that scans the operands of each
201 instruction to see whether the value is already available in a
202 hard register. It then replaces the operand with the hard register
203 if possible, much like an optional reload would. */
204
205static void
26709122 206reload_cse_regs_1 (void)
8f8cadbc 207{
26709122 208 bool cfg_changed = false;
209 basic_block bb;
201f6961 210 rtx insn;
8f8cadbc 211 rtx testreg = gen_rtx_REG (VOIDmode, -1);
212
35af0188 213 cselib_init (CSELIB_RECORD_MEMORY);
8f8cadbc 214 init_alias_analysis ();
215
fc00614f 216 FOR_EACH_BB_FN (bb, cfun)
26709122 217 FOR_BB_INSNS (bb, insn)
218 {
219 if (INSN_P (insn))
220 cfg_changed |= reload_cse_simplify (insn, testreg);
8f8cadbc 221
26709122 222 cselib_process_insn (insn);
223 }
8f8cadbc 224
225 /* Clean up. */
226 end_alias_analysis ();
227 cselib_finish ();
26709122 228 if (cfg_changed)
229 cleanup_cfg (0);
8f8cadbc 230}
231
232/* Try to simplify a single SET instruction. SET is the set pattern.
233 INSN is the instruction it came from.
234 This function only handles one case: if we set a register to a value
235 which is not a register, we try to find that value in some other register
236 and change the set into a register copy. */
237
238static int
3ad4992f 239reload_cse_simplify_set (rtx set, rtx insn)
8f8cadbc 240{
241 int did_change = 0;
242 int dreg;
243 rtx src;
ade444a4 244 reg_class_t dclass;
8f8cadbc 245 int old_cost;
246 cselib_val *val;
247 struct elt_loc_list *l;
248#ifdef LOAD_EXTEND_OP
21f1e711 249 enum rtx_code extend_op = UNKNOWN;
8f8cadbc 250#endif
f529eb25 251 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
8f8cadbc 252
253 dreg = true_regnum (SET_DEST (set));
254 if (dreg < 0)
255 return 0;
256
257 src = SET_SRC (set);
258 if (side_effects_p (src) || true_regnum (src) >= 0)
259 return 0;
260
261 dclass = REGNO_REG_CLASS (dreg);
262
263#ifdef LOAD_EXTEND_OP
264 /* When replacing a memory with a register, we need to honor assumptions
265 that combine made wrt the contents of sign bits. We'll do this by
266 generating an extend instruction instead of a reg->reg copy. Thus
267 the destination must be a register that we can widen. */
e16ceb8e 268 if (MEM_P (src)
8f8cadbc 269 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
21f1e711 270 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
8ad4c111 271 && !REG_P (SET_DEST (set)))
8f8cadbc 272 return 0;
273#endif
274
1f864115 275 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
3be01943 276 if (! val)
277 return 0;
278
8f8cadbc 279 /* If memory loads are cheaper than register copies, don't change them. */
e16ceb8e 280 if (MEM_P (src))
251a613e 281 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
8ad4c111 282 else if (REG_P (src))
e6078fbb 283 old_cost = register_move_cost (GET_MODE (src),
8f8cadbc 284 REGNO_REG_CLASS (REGNO (src)), dclass);
285 else
7013e87c 286 old_cost = set_src_cost (src, speed);
8f8cadbc 287
8f8cadbc 288 for (l = val->locs; l; l = l->next)
289 {
290 rtx this_rtx = l->loc;
291 int this_cost;
292
293 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
294 {
295#ifdef LOAD_EXTEND_OP
21f1e711 296 if (extend_op != UNKNOWN)
8f8cadbc 297 {
e913b5cd 298 wide_int result;
8f8cadbc 299
e913b5cd 300 if (!CONST_SCALAR_INT_P (this_rtx))
8f8cadbc 301 continue;
302
8f8cadbc 303 switch (extend_op)
304 {
305 case ZERO_EXTEND:
ecc41f48 306 result = wide_int::from (std::make_pair (this_rtx,
307 GET_MODE (src)),
308 BITS_PER_WORD, UNSIGNED);
8f8cadbc 309 break;
310 case SIGN_EXTEND:
ecc41f48 311 result = wide_int::from (std::make_pair (this_rtx,
312 GET_MODE (src)),
313 BITS_PER_WORD, SIGNED);
e913b5cd 314 break;
8f8cadbc 315 default:
876760f6 316 gcc_unreachable ();
8f8cadbc 317 }
ecc41f48 318 this_rtx = immed_wide_int_const (result, word_mode);
8f8cadbc 319 }
320#endif
7013e87c 321 this_cost = set_src_cost (this_rtx, speed);
8f8cadbc 322 }
8ad4c111 323 else if (REG_P (this_rtx))
8f8cadbc 324 {
325#ifdef LOAD_EXTEND_OP
21f1e711 326 if (extend_op != UNKNOWN)
8f8cadbc 327 {
328 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
7013e87c 329 this_cost = set_src_cost (this_rtx, speed);
8f8cadbc 330 }
331 else
332#endif
e6078fbb 333 this_cost = register_move_cost (GET_MODE (this_rtx),
8f8cadbc 334 REGNO_REG_CLASS (REGNO (this_rtx)),
335 dclass);
336 }
337 else
338 continue;
339
340 /* If equal costs, prefer registers over anything else. That
341 tends to lead to smaller instructions on some machines. */
342 if (this_cost < old_cost
343 || (this_cost == old_cost
8ad4c111 344 && REG_P (this_rtx)
345 && !REG_P (SET_SRC (set))))
8f8cadbc 346 {
347#ifdef LOAD_EXTEND_OP
348 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
21f1e711 349 && extend_op != UNKNOWN
8f8cadbc 350#ifdef CANNOT_CHANGE_MODE_CLASS
351 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
352 word_mode,
353 REGNO_REG_CLASS (REGNO (SET_DEST (set))))
354#endif
355 )
356 {
357 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
358 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
359 validate_change (insn, &SET_DEST (set), wide_dest, 1);
360 }
361#endif
362
11d686e2 363 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
8f8cadbc 364 old_cost = this_cost, did_change = 1;
365 }
366 }
367
368 return did_change;
369}
370
371/* Try to replace operands in INSN with equivalent values that are already
372 in registers. This can be viewed as optional reloading.
373
374 For each non-register operand in the insn, see if any hard regs are
375 known to be equivalent to that operand. Record the alternatives which
376 can accept these hard registers. Among all alternatives, select the
377 ones which are better or equal to the one currently matching, where
378 "better" is in terms of '?' and '!' constraints. Among the remaining
379 alternatives, select the one which replaces most operands with
380 hard registers. */
381
382static int
3ad4992f 383reload_cse_simplify_operands (rtx insn, rtx testreg)
8f8cadbc 384{
385 int i, j;
386
387 /* For each operand, all registers that are equivalent to it. */
388 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
389
390 const char *constraints[MAX_RECOG_OPERANDS];
391
392 /* Vector recording how bad an alternative is. */
393 int *alternative_reject;
394 /* Vector recording how many registers can be introduced by choosing
395 this alternative. */
396 int *alternative_nregs;
397 /* Array of vectors recording, for each operand and each alternative,
398 which hard register to substitute, or -1 if the operand should be
399 left as it is. */
400 int *op_alt_regno[MAX_RECOG_OPERANDS];
401 /* Array of alternatives, sorted in order of decreasing desirability. */
402 int *alternative_order;
403
404 extract_insn (insn);
405
406 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
407 return 0;
408
409 /* Figure out which alternative currently matches. */
410 if (! constrain_operands (1))
411 fatal_insn_not_found (insn);
412
4077bf7a 413 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
414 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
415 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
f0af5a88 416 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
417 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8f8cadbc 418
419 /* For each operand, find out which regs are equivalent. */
420 for (i = 0; i < recog_data.n_operands; i++)
421 {
422 cselib_val *v;
423 struct elt_loc_list *l;
9d9e3c81 424 rtx op;
8f8cadbc 425
426 CLEAR_HARD_REG_SET (equiv_regs[i]);
427
428 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
429 right, so avoid the problem here. Likewise if we have a constant
430 and the insn pattern doesn't tell us the mode we need. */
6d7dc5b9 431 if (LABEL_P (recog_data.operand[i])
8f8cadbc 432 || (CONSTANT_P (recog_data.operand[i])
433 && recog_data.operand_mode[i] == VOIDmode))
434 continue;
435
9d9e3c81 436 op = recog_data.operand[i];
9d9e3c81 437#ifdef LOAD_EXTEND_OP
e16ceb8e 438 if (MEM_P (op)
f018d957 439 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
440 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
9d9e3c81 441 {
442 rtx set = single_set (insn);
443
4885b286 444 /* We might have multiple sets, some of which do implicit
9d9e3c81 445 extension. Punt on this for now. */
446 if (! set)
447 continue;
86481e89 448 /* If the destination is also a MEM or a STRICT_LOW_PART, no
9d9e3c81 449 extension applies.
450 Also, if there is an explicit extension, we don't have to
451 worry about an implicit one. */
e16ceb8e 452 else if (MEM_P (SET_DEST (set))
9d9e3c81 453 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
454 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
455 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
456 ; /* Continue ordinary processing. */
a091e4f5 457#ifdef CANNOT_CHANGE_MODE_CLASS
458 /* If the register cannot change mode to word_mode, it follows that
459 it cannot have been used in word_mode. */
8ad4c111 460 else if (REG_P (SET_DEST (set))
a091e4f5 461 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
462 word_mode,
463 REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
464 ; /* Continue ordinary processing. */
465#endif
9d9e3c81 466 /* If this is a straight load, make the extension explicit. */
8ad4c111 467 else if (REG_P (SET_DEST (set))
9d9e3c81 468 && recog_data.n_operands == 2
469 && SET_SRC (set) == op
470 && SET_DEST (set) == recog_data.operand[1-i])
471 {
472 validate_change (insn, recog_data.operand_loc[i],
f018d957 473 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
9d9e3c81 474 word_mode, op),
475 1);
476 validate_change (insn, recog_data.operand_loc[1-i],
477 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
478 1);
479 if (! apply_change_group ())
480 return 0;
481 return reload_cse_simplify_operands (insn, testreg);
482 }
483 else
484 /* ??? There might be arithmetic operations with memory that are
485 safe to optimize, but is it worth the trouble? */
486 continue;
487 }
488#endif /* LOAD_EXTEND_OP */
017b7047 489 if (side_effects_p (op))
490 continue;
1f864115 491 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
8f8cadbc 492 if (! v)
493 continue;
494
495 for (l = v->locs; l; l = l->next)
8ad4c111 496 if (REG_P (l->loc))
8f8cadbc 497 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
498 }
499
500 for (i = 0; i < recog_data.n_operands; i++)
501 {
502 enum machine_mode mode;
503 int regno;
504 const char *p;
505
4077bf7a 506 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
8f8cadbc 507 for (j = 0; j < recog_data.n_alternatives; j++)
508 op_alt_regno[i][j] = -1;
509
510 p = constraints[i] = recog_data.constraints[i];
511 mode = recog_data.operand_mode[i];
512
513 /* Add the reject values for each alternative given by the constraints
514 for this operand. */
515 j = 0;
516 while (*p != '\0')
517 {
518 char c = *p++;
519 if (c == ',')
520 j++;
521 else if (c == '?')
522 alternative_reject[j] += 3;
523 else if (c == '!')
524 alternative_reject[j] += 300;
525 }
526
527 /* We won't change operands which are already registers. We
528 also don't want to modify output operands. */
529 regno = true_regnum (recog_data.operand[i]);
530 if (regno >= 0
531 || constraints[i][0] == '='
532 || constraints[i][0] == '+')
533 continue;
534
535 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
536 {
b9c74b4d 537 enum reg_class rclass = NO_REGS;
8f8cadbc 538
539 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
540 continue;
541
7f1819d3 542 SET_REGNO_RAW (testreg, regno);
8f8cadbc 543 PUT_MODE (testreg, mode);
544
545 /* We found a register equal to this operand. Now look for all
546 alternatives that can accept this register and have not been
547 assigned a register they can use yet. */
548 j = 0;
549 p = constraints[i];
550 for (;;)
551 {
552 char c = *p;
553
554 switch (c)
555 {
69449463 556 case 'g':
557 rclass = reg_class_subunion[rclass][GENERAL_REGS];
8f8cadbc 558 break;
559
560 default:
6659485c 561 rclass
8f8cadbc 562 = (reg_class_subunion
79bc09fb 563 [rclass]
564 [reg_class_for_constraint (lookup_constraint (p))]);
8f8cadbc 565 break;
566
567 case ',': case '\0':
568 /* See if REGNO fits this alternative, and set it up as the
569 replacement register if we don't have one for this
570 alternative yet and the operand being replaced is not
571 a cheap CONST_INT. */
572 if (op_alt_regno[i][j] == -1
d2b854bc 573 && TEST_BIT (recog_data.enabled_alternatives, j)
6659485c 574 && reg_fits_class_p (testreg, rclass, 0, mode)
971ba038 575 && (!CONST_INT_P (recog_data.operand[i])
7013e87c 576 || (set_src_cost (recog_data.operand[i],
577 optimize_bb_for_speed_p
578 (BLOCK_FOR_INSN (insn)))
579 > set_src_cost (testreg,
580 optimize_bb_for_speed_p
581 (BLOCK_FOR_INSN (insn))))))
8f8cadbc 582 {
583 alternative_nregs[j]++;
584 op_alt_regno[i][j] = regno;
585 }
586 j++;
b9c74b4d 587 rclass = NO_REGS;
8f8cadbc 588 break;
589 }
590 p += CONSTRAINT_LEN (c, p);
591
592 if (c == '\0')
593 break;
594 }
595 }
596 }
597
598 /* Record all alternatives which are better or equal to the currently
599 matching one in the alternative_order array. */
600 for (i = j = 0; i < recog_data.n_alternatives; i++)
601 if (alternative_reject[i] <= alternative_reject[which_alternative])
602 alternative_order[j++] = i;
603 recog_data.n_alternatives = j;
604
605 /* Sort it. Given a small number of alternatives, a dumb algorithm
606 won't hurt too much. */
607 for (i = 0; i < recog_data.n_alternatives - 1; i++)
608 {
609 int best = i;
610 int best_reject = alternative_reject[alternative_order[i]];
611 int best_nregs = alternative_nregs[alternative_order[i]];
612 int tmp;
613
614 for (j = i + 1; j < recog_data.n_alternatives; j++)
615 {
616 int this_reject = alternative_reject[alternative_order[j]];
617 int this_nregs = alternative_nregs[alternative_order[j]];
618
619 if (this_reject < best_reject
c2d0cf41 620 || (this_reject == best_reject && this_nregs > best_nregs))
8f8cadbc 621 {
622 best = j;
623 best_reject = this_reject;
624 best_nregs = this_nregs;
625 }
626 }
627
628 tmp = alternative_order[best];
629 alternative_order[best] = alternative_order[i];
630 alternative_order[i] = tmp;
631 }
632
633 /* Substitute the operands as determined by op_alt_regno for the best
634 alternative. */
635 j = alternative_order[0];
636
637 for (i = 0; i < recog_data.n_operands; i++)
638 {
639 enum machine_mode mode = recog_data.operand_mode[i];
640 if (op_alt_regno[i][j] == -1)
641 continue;
642
643 validate_change (insn, recog_data.operand_loc[i],
644 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
645 }
646
647 for (i = recog_data.n_dups - 1; i >= 0; i--)
648 {
649 int op = recog_data.dup_num[i];
650 enum machine_mode mode = recog_data.operand_mode[op];
651
652 if (op_alt_regno[op][j] == -1)
653 continue;
654
655 validate_change (insn, recog_data.dup_loc[i],
656 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
657 }
658
659 return apply_change_group ();
660}
661\f
662/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
663 addressing now.
664 This code might also be useful when reload gave up on reg+reg addressing
665 because of clashes between the return register and INDEX_REG_CLASS. */
666
667/* The maximum number of uses of a register we can keep track of to
668 replace them with reg+reg addressing. */
d83ccc81 669#define RELOAD_COMBINE_MAX_USES 16
8f8cadbc 670
d83ccc81 671/* Describes a recorded use of a register. */
672struct reg_use
673{
674 /* The insn where a register has been used. */
675 rtx insn;
676 /* Points to the memory reference enclosing the use, if any, NULL_RTX
677 otherwise. */
678 rtx containing_mem;
9d75589a 679 /* Location of the register within INSN. */
d83ccc81 680 rtx *usep;
681 /* The reverse uid of the insn. */
682 int ruid;
683};
8f8cadbc 684
685/* If the register is used in some unknown fashion, USE_INDEX is negative.
686 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
d83ccc81 687 indicates where it is first set or clobbered.
8f8cadbc 688 Otherwise, USE_INDEX is the index of the last encountered use of the
d83ccc81 689 register (which is first among these we have seen since we scan backwards).
690 USE_RUID indicates the first encountered, i.e. last, of these uses.
691 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
692 with a constant offset; OFFSET contains this constant in that case.
8f8cadbc 693 STORE_RUID is always meaningful if we only want to use a value in a
694 register in a different place: it denotes the next insn in the insn
d83ccc81 695 stream (i.e. the last encountered) that sets or clobbers the register.
696 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
8f8cadbc 697static struct
698 {
699 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8f8cadbc 700 rtx offset;
d83ccc81 701 int use_index;
8f8cadbc 702 int store_ruid;
d83ccc81 703 int real_store_ruid;
8f8cadbc 704 int use_ruid;
d83ccc81 705 bool all_offsets_match;
8f8cadbc 706 } reg_state[FIRST_PSEUDO_REGISTER];
707
708/* Reverse linear uid. This is increased in reload_combine while scanning
709 the instructions from last to first. It is used to set last_label_ruid
710 and the store_ruid / use_ruid fields in reg_state. */
711static int reload_combine_ruid;
712
fb79f695 713/* The RUID of the last label we encountered in reload_combine. */
714static int last_label_ruid;
715
d83ccc81 716/* The RUID of the last jump we encountered in reload_combine. */
717static int last_jump_ruid;
718
fb79f695 719/* The register numbers of the first and last index register. A value of
720 -1 in LAST_INDEX_REG indicates that we've previously computed these
721 values and found no suitable index registers. */
722static int first_index_reg = -1;
723static int last_index_reg;
724
8f8cadbc 725#define LABEL_LIVE(LABEL) \
726 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
727
d83ccc81 728/* Subroutine of reload_combine_split_ruids, called to fix up a single
729 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
730
731static inline void
732reload_combine_split_one_ruid (int *pruid, int split_ruid)
733{
734 if (*pruid > split_ruid)
735 (*pruid)++;
736}
737
738/* Called when we insert a new insn in a position we've already passed in
739 the scan. Examine all our state, increasing all ruids that are higher
740 than SPLIT_RUID by one in order to make room for a new insn. */
741
742static void
743reload_combine_split_ruids (int split_ruid)
744{
745 unsigned i;
746
747 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
748 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
749 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
750
751 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
752 {
753 int j, idx = reg_state[i].use_index;
754 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
755 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
756 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
757 split_ruid);
758 if (idx < 0)
759 continue;
760 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
761 {
762 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
763 split_ruid);
764 }
765 }
766}
767
768/* Called when we are about to rescan a previously encountered insn with
769 reload_combine_note_use after modifying some part of it. This clears all
770 information about uses in that particular insn. */
771
772static void
773reload_combine_purge_insn_uses (rtx insn)
774{
775 unsigned i;
776
777 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
778 {
779 int j, k, idx = reg_state[i].use_index;
780 if (idx < 0)
781 continue;
782 j = k = RELOAD_COMBINE_MAX_USES;
783 while (j-- > idx)
784 {
785 if (reg_state[i].reg_use[j].insn != insn)
786 {
787 k--;
788 if (k != j)
789 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
790 }
791 }
792 reg_state[i].use_index = k;
793 }
794}
795
796/* Called when we need to forget about all uses of REGNO after an insn
797 which is identified by RUID. */
798
799static void
800reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
801{
802 int j, k, idx = reg_state[regno].use_index;
803 if (idx < 0)
804 return;
805 j = k = RELOAD_COMBINE_MAX_USES;
806 while (j-- > idx)
807 {
808 if (reg_state[regno].reg_use[j].ruid >= ruid)
809 {
810 k--;
811 if (k != j)
812 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
813 }
814 }
815 reg_state[regno].use_index = k;
816}
817
818/* Find the use of REGNO with the ruid that is highest among those
819 lower than RUID_LIMIT, and return it if it is the only use of this
727047d0 820 reg in the insn. Return NULL otherwise. */
d83ccc81 821
822static struct reg_use *
823reload_combine_closest_single_use (unsigned regno, int ruid_limit)
824{
825 int i, best_ruid = 0;
826 int use_idx = reg_state[regno].use_index;
827 struct reg_use *retval;
828
829 if (use_idx < 0)
830 return NULL;
831 retval = NULL;
832 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
833 {
0ead6a7d 834 struct reg_use *use = reg_state[regno].reg_use + i;
835 int this_ruid = use->ruid;
d83ccc81 836 if (this_ruid >= ruid_limit)
837 continue;
838 if (this_ruid > best_ruid)
839 {
840 best_ruid = this_ruid;
727047d0 841 retval = use;
d83ccc81 842 }
727047d0 843 else if (this_ruid == best_ruid)
d83ccc81 844 retval = NULL;
845 }
846 if (last_label_ruid >= best_ruid)
847 return NULL;
848 return retval;
849}
850
65069495 851/* After we've moved an add insn, fix up any debug insns that occur
852 between the old location of the add and the new location. REG is
853 the destination register of the add insn; REPLACEMENT is the
854 SET_SRC of the add. FROM and TO specify the range in which we
855 should make this change on debug insns. */
0ead6a7d 856
857static void
65069495 858fixup_debug_insns (rtx reg, rtx replacement, rtx from, rtx to)
0ead6a7d 859{
65069495 860 rtx insn;
861 for (insn = from; insn != to; insn = NEXT_INSN (insn))
0ead6a7d 862 {
863 rtx t;
65069495 864
865 if (!DEBUG_INSN_P (insn))
0ead6a7d 866 continue;
65069495 867
868 t = INSN_VAR_LOCATION_LOC (insn);
727047d0 869 t = simplify_replace_rtx (t, reg, replacement);
65069495 870 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
0ead6a7d 871 }
872}
873
692ec7c8 874/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
875 with SRC in the insn described by USE, taking costs into account. Return
876 true if we made the replacement. */
877
878static bool
879try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
880{
881 rtx use_insn = use->insn;
882 rtx mem = use->containing_mem;
883 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
884
885 if (mem != NULL_RTX)
886 {
887 addr_space_t as = MEM_ADDR_SPACE (mem);
888 rtx oldaddr = XEXP (mem, 0);
889 rtx newaddr = NULL_RTX;
890 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
891 int new_cost;
892
893 newaddr = simplify_replace_rtx (oldaddr, reg, src);
894 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
895 {
896 XEXP (mem, 0) = newaddr;
897 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
898 XEXP (mem, 0) = oldaddr;
899 if (new_cost <= old_cost
900 && validate_change (use_insn,
901 &XEXP (mem, 0), newaddr, 0))
902 return true;
903 }
904 }
905 else
906 {
907 rtx new_set = single_set (use_insn);
908 if (new_set
909 && REG_P (SET_DEST (new_set))
910 && GET_CODE (SET_SRC (new_set)) == PLUS
911 && REG_P (XEXP (SET_SRC (new_set), 0))
912 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
913 {
914 rtx new_src;
7013e87c 915 int old_cost = set_src_cost (SET_SRC (new_set), speed);
692ec7c8 916
917 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
918 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
919
7013e87c 920 if (set_src_cost (new_src, speed) <= old_cost
692ec7c8 921 && validate_change (use_insn, &SET_SRC (new_set),
922 new_src, 0))
923 return true;
924 }
925 }
926 return false;
927}
928
d83ccc81 929/* Called by reload_combine when scanning INSN. This function tries to detect
930 patterns where a constant is added to a register, and the result is used
931 in an address.
932 Return true if no further processing is needed on INSN; false if it wasn't
933 recognized and should be handled normally. */
934
935static bool
936reload_combine_recognize_const_pattern (rtx insn)
937{
938 int from_ruid = reload_combine_ruid;
939 rtx set, pat, reg, src, addreg;
940 unsigned int regno;
941 struct reg_use *use;
942 bool must_move_add;
943 rtx add_moved_after_insn = NULL_RTX;
944 int add_moved_after_ruid = 0;
945 int clobbered_regno = -1;
946
947 set = single_set (insn);
948 if (set == NULL_RTX)
949 return false;
950
951 reg = SET_DEST (set);
952 src = SET_SRC (set);
953 if (!REG_P (reg)
954 || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1
955 || GET_MODE (reg) != Pmode
956 || reg == stack_pointer_rtx)
957 return false;
958
959 regno = REGNO (reg);
960
961 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
962 uses of REG1 inside an address, or inside another add insn. If
963 possible and profitable, merge the addition into subsequent
964 uses. */
965 if (GET_CODE (src) != PLUS
966 || !REG_P (XEXP (src, 0))
967 || !CONSTANT_P (XEXP (src, 1)))
968 return false;
969
970 addreg = XEXP (src, 0);
971 must_move_add = rtx_equal_p (reg, addreg);
972
973 pat = PATTERN (insn);
974 if (must_move_add && set != pat)
975 {
976 /* We have to be careful when moving the add; apart from the
977 single_set there may also be clobbers. Recognize one special
978 case, that of one clobber alongside the set (likely a clobber
979 of the CC register). */
980 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
981 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
982 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
983 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
984 return false;
985 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
986 }
987
988 do
989 {
990 use = reload_combine_closest_single_use (regno, from_ruid);
991
992 if (use)
993 /* Start the search for the next use from here. */
994 from_ruid = use->ruid;
995
996 if (use && GET_MODE (*use->usep) == Pmode)
997 {
692ec7c8 998 bool delete_add = false;
d83ccc81 999 rtx use_insn = use->insn;
1000 int use_ruid = use->ruid;
d83ccc81 1001
1002 /* Avoid moving the add insn past a jump. */
0ead6a7d 1003 if (must_move_add && use_ruid <= last_jump_ruid)
d83ccc81 1004 break;
1005
1006 /* If the add clobbers another hard reg in parallel, don't move
1007 it past a real set of this hard reg. */
1008 if (must_move_add && clobbered_regno >= 0
1009 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
1010 break;
1011
33b7314b 1012#ifdef HAVE_cc0
1013 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
1014 if (must_move_add && sets_cc0_p (PATTERN (use_insn)))
1015 break;
1016#endif
1017
6aba0ea1 1018 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
1019 /* Avoid moving a use of ADDREG past a point where it is stored. */
692ec7c8 1020 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
d83ccc81 1021 break;
1022
692ec7c8 1023 /* We also must not move the addition past an insn that sets
1024 the same register, unless we can combine two add insns. */
1025 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
d83ccc81 1026 {
692ec7c8 1027 if (use->containing_mem == NULL_RTX)
1028 delete_add = true;
1029 else
1030 break;
d83ccc81 1031 }
d83ccc81 1032
692ec7c8 1033 if (try_replace_in_use (use, reg, src))
1034 {
1035 reload_combine_purge_insn_uses (use_insn);
1036 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1037 use_ruid, NULL_RTX);
d83ccc81 1038
692ec7c8 1039 if (delete_add)
1040 {
1041 fixup_debug_insns (reg, src, insn, use_insn);
1042 delete_insn (insn);
1043 return true;
1044 }
1045 if (must_move_add)
1046 {
1047 add_moved_after_insn = use_insn;
1048 add_moved_after_ruid = use_ruid;
d83ccc81 1049 }
692ec7c8 1050 continue;
d83ccc81 1051 }
d83ccc81 1052 }
6aba0ea1 1053 /* If we get here, we couldn't handle this use. */
1054 if (must_move_add)
1055 break;
d83ccc81 1056 }
1057 while (use);
1058
1059 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1060 /* Process the add normally. */
1061 return false;
1062
65069495 1063 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1064
d83ccc81 1065 reorder_insns (insn, insn, add_moved_after_insn);
1066 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1067 reload_combine_split_ruids (add_moved_after_ruid - 1);
1068 reload_combine_note_use (&PATTERN (insn), insn,
1069 add_moved_after_ruid, NULL_RTX);
1070 reg_state[regno].store_ruid = add_moved_after_ruid;
1071
1072 return true;
1073}
1074
fb79f695 1075/* Called by reload_combine when scanning INSN. Try to detect a pattern we
1076 can handle and improve. Return true if no further processing is needed on
1077 INSN; false if it wasn't recognized and should be handled normally. */
1078
1079static bool
1080reload_combine_recognize_pattern (rtx insn)
1081{
1082 rtx set, reg, src;
1083 unsigned int regno;
1084
d83ccc81 1085 set = single_set (insn);
1086 if (set == NULL_RTX)
1087 return false;
1088
1089 reg = SET_DEST (set);
1090 src = SET_SRC (set);
1091 if (!REG_P (reg)
1092 || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1)
1093 return false;
1094
1095 regno = REGNO (reg);
1096
fb79f695 1097 /* Look for (set (REGX) (CONST_INT))
1098 (set (REGX) (PLUS (REGX) (REGY)))
1099 ...
1100 ... (MEM (REGX)) ...
1101 and convert it to
1102 (set (REGZ) (CONST_INT))
1103 ...
1104 ... (MEM (PLUS (REGZ) (REGY)))... .
1105
1106 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1107 and that we know all uses of REGX before it dies.
1108 Also, explicitly check that REGX != REGY; our life information
1109 does not yet show whether REGY changes in this insn. */
fb79f695 1110
1111 if (GET_CODE (src) == PLUS
d83ccc81 1112 && reg_state[regno].all_offsets_match
1113 && last_index_reg != -1
fb79f695 1114 && REG_P (XEXP (src, 1))
1115 && rtx_equal_p (XEXP (src, 0), reg)
1116 && !rtx_equal_p (XEXP (src, 1), reg)
727047d0 1117 && reg_state[regno].use_index >= 0
1118 && reg_state[regno].use_index < RELOAD_COMBINE_MAX_USES
fb79f695 1119 && last_label_ruid < reg_state[regno].use_ruid)
1120 {
1121 rtx base = XEXP (src, 1);
5b8537a8 1122 rtx prev = prev_nonnote_nondebug_insn (insn);
fb79f695 1123 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1124 rtx index_reg = NULL_RTX;
1125 rtx reg_sum = NULL_RTX;
1126 int i;
1127
1128 /* Now we need to set INDEX_REG to an index register (denoted as
1129 REGZ in the illustration above) and REG_SUM to the expression
1130 register+register that we want to use to substitute uses of REG
1131 (typically in MEMs) with. First check REG and BASE for being
1132 index registers; we can use them even if they are not dead. */
1133 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1134 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1135 REGNO (base)))
1136 {
1137 index_reg = reg;
1138 reg_sum = src;
1139 }
1140 else
1141 {
1142 /* Otherwise, look for a free index register. Since we have
1143 checked above that neither REG nor BASE are index registers,
1144 if we find anything at all, it will be different from these
1145 two registers. */
1146 for (i = first_index_reg; i <= last_index_reg; i++)
1147 {
1148 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1149 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1150 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
727047d0 1151 && (call_used_regs[i] || df_regs_ever_live_p (i))
1152 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1153 && !fixed_regs[i] && !global_regs[i]
1154 && hard_regno_nregs[i][GET_MODE (reg)] == 1
1155 && targetm.hard_regno_scratch_ok (i))
fb79f695 1156 {
1157 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1158 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1159 break;
1160 }
1161 }
1162 }
1163
1164 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1165 (REGY), i.e. BASE, is not clobbered before the last use we'll
1166 create. */
1167 if (reg_sum
1168 && prev_set
1169 && CONST_INT_P (SET_SRC (prev_set))
1170 && rtx_equal_p (SET_DEST (prev_set), reg)
fb79f695 1171 && (reg_state[REGNO (base)].store_ruid
1172 <= reg_state[regno].use_ruid))
1173 {
1174 /* Change destination register and, if necessary, the constant
1175 value in PREV, the constant loading instruction. */
1176 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1177 if (reg_state[regno].offset != const0_rtx)
1178 validate_change (prev,
1179 &SET_SRC (prev_set),
1180 GEN_INT (INTVAL (SET_SRC (prev_set))
1181 + INTVAL (reg_state[regno].offset)),
1182 1);
1183
1184 /* Now for every use of REG that we have recorded, replace REG
1185 with REG_SUM. */
1186 for (i = reg_state[regno].use_index;
1187 i < RELOAD_COMBINE_MAX_USES; i++)
1188 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1189 reg_state[regno].reg_use[i].usep,
1190 /* Each change must have its own
1191 replacement. */
1192 reg_sum, 1);
1193
1194 if (apply_change_group ())
1195 {
65069495 1196 struct reg_use *lowest_ruid = NULL;
1197
fb79f695 1198 /* For every new use of REG_SUM, we have to record the use
1199 of BASE therein, i.e. operand 1. */
1200 for (i = reg_state[regno].use_index;
1201 i < RELOAD_COMBINE_MAX_USES; i++)
65069495 1202 {
1203 struct reg_use *use = reg_state[regno].reg_use + i;
1204 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1205 use->ruid, use->containing_mem);
1206 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1207 lowest_ruid = use;
1208 }
1209
1210 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
fb79f695 1211
fb79f695 1212 /* Delete the reg-reg addition. */
1213 delete_insn (insn);
1214
1215 if (reg_state[regno].offset != const0_rtx)
1216 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1217 are now invalid. */
1218 remove_reg_equal_equiv_notes (prev);
1219
1220 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
fb79f695 1221 return true;
1222 }
1223 }
1224 }
1225 return false;
1226}
1227
8f8cadbc 1228static void
3ad4992f 1229reload_combine (void)
8f8cadbc 1230{
d83ccc81 1231 rtx insn, prev;
8f8cadbc 1232 basic_block bb;
1233 unsigned int r;
8f8cadbc 1234 int min_labelno, n_labels;
1235 HARD_REG_SET ever_live_at_start, *label_live;
1236
8f8cadbc 1237 /* To avoid wasting too much time later searching for an index register,
1238 determine the minimum and maximum index register numbers. */
fb79f695 1239 if (INDEX_REG_CLASS == NO_REGS)
1240 last_index_reg = -1;
1241 else if (first_index_reg == -1 && last_index_reg == 0)
1242 {
1243 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1244 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1245 {
1246 if (first_index_reg == -1)
1247 first_index_reg = r;
1248
1249 last_index_reg = r;
1250 }
1251
1252 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1253 to -1 so we'll know to quit early the next time we get here. */
1254 if (first_index_reg == -1)
1255 {
1256 last_index_reg = -1;
1257 return;
1258 }
1259 }
8f8cadbc 1260
8f8cadbc 1261 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1262 information is a bit fuzzy immediately after reload, but it's
1263 still good enough to determine which registers are live at a jump
1264 destination. */
1265 min_labelno = get_first_label_num ();
1266 n_labels = max_label_num () - min_labelno;
4c36ffe6 1267 label_live = XNEWVEC (HARD_REG_SET, n_labels);
8f8cadbc 1268 CLEAR_HARD_REG_SET (ever_live_at_start);
1269
7a46197b 1270 FOR_EACH_BB_REVERSE_FN (bb, cfun)
8f8cadbc 1271 {
5496dbfc 1272 insn = BB_HEAD (bb);
6d7dc5b9 1273 if (LABEL_P (insn))
8f8cadbc 1274 {
1275 HARD_REG_SET live;
deb2741b 1276 bitmap live_in = df_get_live_in (bb);
8f8cadbc 1277
deb2741b 1278 REG_SET_TO_HARD_REG_SET (live, live_in);
1279 compute_use_by_pseudos (&live, live_in);
8f8cadbc 1280 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1281 IOR_HARD_REG_SET (ever_live_at_start, live);
1282 }
1283 }
1284
1285 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
d83ccc81 1286 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
8f8cadbc 1287 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1288 {
d83ccc81 1289 reg_state[r].store_ruid = 0;
1290 reg_state[r].real_store_ruid = 0;
8f8cadbc 1291 if (fixed_regs[r])
1292 reg_state[r].use_index = -1;
1293 else
1294 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1295 }
1296
d83ccc81 1297 for (insn = get_last_insn (); insn; insn = prev)
8f8cadbc 1298 {
8b52f64e 1299 bool control_flow_insn;
8f8cadbc 1300 rtx note;
1301
d83ccc81 1302 prev = PREV_INSN (insn);
1303
8f8cadbc 1304 /* We cannot do our optimization across labels. Invalidating all the use
1305 information we have would be costly, so we just note where the label
1306 is and then later disable any optimization that would cross it. */
6d7dc5b9 1307 if (LABEL_P (insn))
8f8cadbc 1308 last_label_ruid = reload_combine_ruid;
19f69355 1309 else if (BARRIER_P (insn))
1310 {
1311 /* Crossing a barrier resets all the use information. */
1312 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1313 if (! fixed_regs[r])
8f8cadbc 1314 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
19f69355 1315 }
1316 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1317 /* Optimizations across insns being marked as volatile must be
1318 prevented. All the usage information is invalidated
1319 here. */
1320 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1321 if (! fixed_regs[r]
1322 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1323 reg_state[r].use_index = -1;
8f8cadbc 1324
65069495 1325 if (! NONDEBUG_INSN_P (insn))
8f8cadbc 1326 continue;
1327
1328 reload_combine_ruid++;
1329
8b52f64e 1330 control_flow_insn = control_flow_insn_p (insn);
1331 if (control_flow_insn)
d83ccc81 1332 last_jump_ruid = reload_combine_ruid;
1333
1334 if (reload_combine_recognize_const_pattern (insn)
1335 || reload_combine_recognize_pattern (insn))
fb79f695 1336 continue;
8f8cadbc 1337
1338 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1339
6d7dc5b9 1340 if (CALL_P (insn))
8f8cadbc 1341 {
1342 rtx link;
1343
1344 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1345 if (call_used_regs[r])
1346 {
1347 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1348 reg_state[r].store_ruid = reload_combine_ruid;
1349 }
1350
1351 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1352 link = XEXP (link, 1))
1353 {
c8010b80 1354 rtx setuse = XEXP (link, 0);
1355 rtx usage_rtx = XEXP (setuse, 0);
1356 if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
1357 && REG_P (usage_rtx))
8f8cadbc 1358 {
1359 unsigned int i;
1360 unsigned int start_reg = REGNO (usage_rtx);
8b52f64e 1361 unsigned int num_regs
1362 = hard_regno_nregs[start_reg][GET_MODE (usage_rtx)];
1363 unsigned int end_reg = start_reg + num_regs - 1;
8f8cadbc 1364 for (i = start_reg; i <= end_reg; i++)
1365 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1366 {
1367 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1368 reg_state[i].store_ruid = reload_combine_ruid;
1369 }
1370 else
1371 reg_state[i].use_index = -1;
1372 }
1373 }
8f8cadbc 1374 }
f4979459 1375
7777a939 1376 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
8f8cadbc 1377 {
1378 /* Non-spill registers might be used at the call destination in
1379 some unknown fashion, so we have to mark the unknown use. */
1380 HARD_REG_SET *live;
1381
1382 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1383 && JUMP_LABEL (insn))
7777a939 1384 {
1385 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1386 live = NULL;
1387 else
1388 live = &LABEL_LIVE (JUMP_LABEL (insn));
1389 }
8f8cadbc 1390 else
1391 live = &ever_live_at_start;
1392
7777a939 1393 if (live)
1394 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1395 if (TEST_HARD_REG_BIT (*live, r))
1396 reg_state[r].use_index = -1;
8f8cadbc 1397 }
1398
8b52f64e 1399 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1400 NULL_RTX);
1401
8f8cadbc 1402 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1403 {
8b52f64e 1404 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
8f8cadbc 1405 {
1406 int regno = REGNO (XEXP (note, 0));
8f8cadbc 1407 reg_state[regno].store_ruid = reload_combine_ruid;
d83ccc81 1408 reg_state[regno].real_store_ruid = reload_combine_ruid;
8f8cadbc 1409 reg_state[regno].use_index = -1;
1410 }
1411 }
1412 }
1413
1414 free (label_live);
1415}
1416
1417/* Check if DST is a register or a subreg of a register; if it is,
d83ccc81 1418 update store_ruid, real_store_ruid and use_index in the reg_state
1419 structure accordingly. Called via note_stores from reload_combine. */
8f8cadbc 1420
1421static void
81a410b1 1422reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
8f8cadbc 1423{
1424 int regno = 0;
1425 int i;
1426 enum machine_mode mode = GET_MODE (dst);
1427
1428 if (GET_CODE (dst) == SUBREG)
1429 {
1430 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1431 GET_MODE (SUBREG_REG (dst)),
1432 SUBREG_BYTE (dst),
1433 GET_MODE (dst));
1434 dst = SUBREG_REG (dst);
1435 }
fe6524b0 1436
1437 /* Some targets do argument pushes without adding REG_INC notes. */
1438
1439 if (MEM_P (dst))
1440 {
1441 dst = XEXP (dst, 0);
1442 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
a5dda0b9 1443 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1444 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
fe6524b0 1445 {
1446 regno = REGNO (XEXP (dst, 0));
1447 mode = GET_MODE (XEXP (dst, 0));
1448 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1449 {
1450 /* We could probably do better, but for now mark the register
1451 as used in an unknown fashion and set/clobbered at this
1452 insn. */
1453 reg_state[i].use_index = -1;
1454 reg_state[i].store_ruid = reload_combine_ruid;
1455 reg_state[i].real_store_ruid = reload_combine_ruid;
1456 }
1457 }
1458 else
1459 return;
1460 }
1461
8ad4c111 1462 if (!REG_P (dst))
8f8cadbc 1463 return;
1464 regno += REGNO (dst);
1465
1466 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1467 careful with registers / register parts that are not full words.
476d094d 1468 Similarly for ZERO_EXTRACT. */
d83ccc81 1469 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
8f8cadbc 1470 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1471 {
67d6c12b 1472 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
8f8cadbc 1473 {
1474 reg_state[i].use_index = -1;
1475 reg_state[i].store_ruid = reload_combine_ruid;
d83ccc81 1476 reg_state[i].real_store_ruid = reload_combine_ruid;
8f8cadbc 1477 }
1478 }
1479 else
1480 {
67d6c12b 1481 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
8f8cadbc 1482 {
1483 reg_state[i].store_ruid = reload_combine_ruid;
d83ccc81 1484 if (GET_CODE (set) == SET)
1485 reg_state[i].real_store_ruid = reload_combine_ruid;
8f8cadbc 1486 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1487 }
1488 }
1489}
1490
1491/* XP points to a piece of rtl that has to be checked for any uses of
1492 registers.
1493 *XP is the pattern of INSN, or a part of it.
1494 Called from reload_combine, and recursively by itself. */
1495static void
d83ccc81 1496reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem)
8f8cadbc 1497{
1498 rtx x = *xp;
1499 enum rtx_code code = x->code;
1500 const char *fmt;
1501 int i, j;
1502 rtx offset = const0_rtx; /* For the REG case below. */
1503
1504 switch (code)
1505 {
1506 case SET:
8ad4c111 1507 if (REG_P (SET_DEST (x)))
8f8cadbc 1508 {
d83ccc81 1509 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
8f8cadbc 1510 return;
1511 }
1512 break;
1513
1514 case USE:
1515 /* If this is the USE of a return value, we can't change it. */
8ad4c111 1516 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
8f8cadbc 1517 {
1518 /* Mark the return register as used in an unknown fashion. */
1519 rtx reg = XEXP (x, 0);
1520 int regno = REGNO (reg);
67d6c12b 1521 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8f8cadbc 1522
1523 while (--nregs >= 0)
1524 reg_state[regno + nregs].use_index = -1;
1525 return;
1526 }
1527 break;
1528
1529 case CLOBBER:
8ad4c111 1530 if (REG_P (SET_DEST (x)))
8f8cadbc 1531 {
1532 /* No spurious CLOBBERs of pseudo registers may remain. */
876760f6 1533 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
8f8cadbc 1534 return;
1535 }
1536 break;
1537
1538 case PLUS:
1539 /* We are interested in (plus (reg) (const_int)) . */
8ad4c111 1540 if (!REG_P (XEXP (x, 0))
971ba038 1541 || !CONST_INT_P (XEXP (x, 1)))
8f8cadbc 1542 break;
1543 offset = XEXP (x, 1);
1544 x = XEXP (x, 0);
1545 /* Fall through. */
1546 case REG:
1547 {
1548 int regno = REGNO (x);
1549 int use_index;
1550 int nregs;
1551
1552 /* No spurious USEs of pseudo registers may remain. */
876760f6 1553 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
8f8cadbc 1554
67d6c12b 1555 nregs = hard_regno_nregs[regno][GET_MODE (x)];
8f8cadbc 1556
1557 /* We can't substitute into multi-hard-reg uses. */
1558 if (nregs > 1)
1559 {
1560 while (--nregs >= 0)
1561 reg_state[regno + nregs].use_index = -1;
1562 return;
1563 }
1564
727047d0 1565 /* We may be called to update uses in previously seen insns.
1566 Don't add uses beyond the last store we saw. */
1567 if (ruid < reg_state[regno].store_ruid)
1568 return;
1569
8f8cadbc 1570 /* If this register is already used in some unknown fashion, we
1571 can't do anything.
1572 If we decrement the index from zero to -1, we can't store more
1573 uses, so this register becomes used in an unknown fashion. */
1574 use_index = --reg_state[regno].use_index;
1575 if (use_index < 0)
1576 return;
1577
d83ccc81 1578 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
8f8cadbc 1579 {
1580 /* This is the first use of this register we have seen since we
1581 marked it as dead. */
1582 reg_state[regno].offset = offset;
d83ccc81 1583 reg_state[regno].all_offsets_match = true;
1584 reg_state[regno].use_ruid = ruid;
8f8cadbc 1585 }
0ead6a7d 1586 else
1587 {
1588 if (reg_state[regno].use_ruid > ruid)
1589 reg_state[regno].use_ruid = ruid;
1590
1591 if (! rtx_equal_p (offset, reg_state[regno].offset))
1592 reg_state[regno].all_offsets_match = false;
1593 }
d83ccc81 1594
8f8cadbc 1595 reg_state[regno].reg_use[use_index].insn = insn;
d83ccc81 1596 reg_state[regno].reg_use[use_index].ruid = ruid;
1597 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
8f8cadbc 1598 reg_state[regno].reg_use[use_index].usep = xp;
1599 return;
1600 }
1601
d83ccc81 1602 case MEM:
1603 containing_mem = x;
1604 break;
1605
8f8cadbc 1606 default:
1607 break;
1608 }
1609
1610 /* Recursively process the components of X. */
1611 fmt = GET_RTX_FORMAT (code);
1612 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1613 {
1614 if (fmt[i] == 'e')
d83ccc81 1615 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
8f8cadbc 1616 else if (fmt[i] == 'E')
1617 {
1618 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
d83ccc81 1619 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1620 containing_mem);
8f8cadbc 1621 }
1622 }
1623}
1624\f
1625/* See if we can reduce the cost of a constant by replacing a move
1626 with an add. We track situations in which a register is set to a
1627 constant or to a register plus a constant. */
1628/* We cannot do our optimization across labels. Invalidating all the
1629 information about register contents we have would be costly, so we
1630 use move2add_last_label_luid to note where the label is and then
1631 later disable any optimization that would cross it.
6132c0d0 1632 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1633 are only valid if reg_set_luid[n] is greater than
b6b86e87 1634 move2add_last_label_luid.
1635 For a set that established a new (potential) base register with
1636 non-constant value, we use move2add_luid from the place where the
1637 setting insn is encountered; registers based off that base then
1638 get the same reg_set_luid. Constants all get
1639 move2add_last_label_luid + 1 as their reg_set_luid. */
8f8cadbc 1640static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1641
1642/* If reg_base_reg[n] is negative, register n has been set to
6132c0d0 1643 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
8f8cadbc 1644 If reg_base_reg[n] is non-negative, register n has been set to the
1645 sum of reg_offset[n] and the value of register reg_base_reg[n]
b6b86e87 1646 before reg_set_luid[n], calculated in mode reg_mode[n] .
1647 For multi-hard-register registers, all but the first one are
1648 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1649 marks it as invalid. */
8f8cadbc 1650static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1651static int reg_base_reg[FIRST_PSEUDO_REGISTER];
6132c0d0 1652static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
8f8cadbc 1653static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
1654
1655/* move2add_luid is linearly increased while scanning the instructions
1656 from first to last. It is used to set reg_set_luid in
1657 reload_cse_move2add and move2add_note_store. */
1658static int move2add_luid;
1659
1660/* move2add_last_label_luid is set whenever a label is found. Labels
1661 invalidate all previously collected reg_offset data. */
1662static int move2add_last_label_luid;
1663
1664/* ??? We don't know how zero / sign extension is handled, hence we
1665 can't go from a narrower to a wider mode. */
1666#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1667 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1668 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
396f2130 1669 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
8f8cadbc 1670
b6b86e87 1671/* Record that REG is being set to a value with the mode of REG. */
1672
1673static void
1674move2add_record_mode (rtx reg)
1675{
1676 int regno, nregs;
1677 enum machine_mode mode = GET_MODE (reg);
1678
1679 if (GET_CODE (reg) == SUBREG)
1680 {
1681 regno = subreg_regno (reg);
1682 nregs = subreg_nregs (reg);
1683 }
1684 else if (REG_P (reg))
1685 {
1686 regno = REGNO (reg);
1687 nregs = hard_regno_nregs[regno][mode];
1688 }
1689 else
1690 gcc_unreachable ();
1691 for (int i = nregs - 1; i > 0; i--)
1692 reg_mode[regno + i] = BLKmode;
1693 reg_mode[regno] = mode;
1694}
1695
1696/* Record that REG is being set to the sum of SYM and OFF. */
1697
1698static void
1699move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1700{
1701 int regno = REGNO (reg);
1702
1703 move2add_record_mode (reg);
1704 reg_set_luid[regno] = move2add_luid;
1705 reg_base_reg[regno] = -1;
1706 reg_symbol_ref[regno] = sym;
1707 reg_offset[regno] = INTVAL (off);
1708}
1709
1710/* Check if REGNO contains a valid value in MODE. */
1711
1712static bool
1713move2add_valid_value_p (int regno, enum machine_mode mode)
1714{
5bea3269 1715 if (reg_set_luid[regno] <= move2add_last_label_luid)
b6b86e87 1716 return false;
1717
5bea3269 1718 if (mode != reg_mode[regno])
1719 {
1720 if (!MODES_OK_FOR_MOVE2ADD (mode, reg_mode[regno]))
1721 return false;
1722 /* The value loaded into regno in reg_mode[regno] is also valid in
1723 mode after truncation only if (REG:mode regno) is the lowpart of
1724 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1725 regno of the lowpart might be different. */
1726 int s_off = subreg_lowpart_offset (mode, reg_mode[regno]);
1727 s_off = subreg_regno_offset (regno, reg_mode[regno], s_off, mode);
1728 if (s_off != 0)
1729 /* We could in principle adjust regno, check reg_mode[regno] to be
1730 BLKmode, and return s_off to the caller (vs. -1 for failure),
1731 but we currently have no callers that could make use of this
1732 information. */
1733 return false;
1734 }
1735
b6b86e87 1736 for (int i = hard_regno_nregs[regno][mode] - 1; i > 0; i--)
1737 if (reg_mode[regno + i] != BLKmode)
1738 return false;
1739 return true;
1740}
1741
6132c0d0 1742/* This function is called with INSN that sets REG to (SYM + OFF),
1743 while REG is known to already have value (SYM + offset).
1744 This function tries to change INSN into an add instruction
1745 (set (REG) (plus (REG) (OFF - offset))) using the known value.
d83ccc81 1746 It also updates the information about REG's known value.
1747 Return true if we made a change. */
6132c0d0 1748
d83ccc81 1749static bool
6132c0d0 1750move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn)
1751{
1752 rtx pat = PATTERN (insn);
1753 rtx src = SET_SRC (pat);
1754 int regno = REGNO (reg);
60141df0 1755 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno],
6132c0d0 1756 GET_MODE (reg));
1757 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
d83ccc81 1758 bool changed = false;
6132c0d0 1759
1760 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1761 use (set (reg) (reg)) instead.
1762 We don't delete this insn, nor do we convert it into a
1763 note, to avoid losing register notes or the return
1764 value flag. jump2 already knows how to get rid of
1765 no-op moves. */
1766 if (new_src == const0_rtx)
1767 {
1768 /* If the constants are different, this is a
1769 truncation, that, if turned into (set (reg)
1770 (reg)), would be discarded. Maybe we should
1771 try a truncMN pattern? */
1772 if (INTVAL (off) == reg_offset [regno])
d83ccc81 1773 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
6132c0d0 1774 }
c9a03487 1775 else
6132c0d0 1776 {
c9a03487 1777 struct full_rtx_costs oldcst, newcst;
6132c0d0 1778 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
c9a03487 1779
b72d459f 1780 get_full_set_rtx_cost (pat, &oldcst);
c9a03487 1781 SET_SRC (pat) = tem;
b72d459f 1782 get_full_set_rtx_cost (pat, &newcst);
c9a03487 1783 SET_SRC (pat) = src;
1784
1785 if (costs_lt_p (&newcst, &oldcst, speed)
1786 && have_add2_insn (reg, new_src))
1787 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1788 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
6132c0d0 1789 {
c9a03487 1790 enum machine_mode narrow_mode;
1791 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1792 narrow_mode != VOIDmode
1793 && narrow_mode != GET_MODE (reg);
1794 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
6132c0d0 1795 {
c9a03487 1796 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1797 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1798 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1799 {
17ce39e3 1800 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
c9a03487 1801 rtx narrow_src = gen_int_mode (INTVAL (off),
1802 narrow_mode);
1803 rtx new_set
1804 = gen_rtx_SET (VOIDmode,
1805 gen_rtx_STRICT_LOW_PART (VOIDmode,
1806 narrow_reg),
1807 narrow_src);
1808 changed = validate_change (insn, &PATTERN (insn),
1809 new_set, 0);
1810 if (changed)
1811 break;
1812 }
6132c0d0 1813 }
1814 }
1815 }
b6b86e87 1816 move2add_record_sym_value (reg, sym, off);
d83ccc81 1817 return changed;
6132c0d0 1818}
1819
1820
1821/* This function is called with INSN that sets REG to (SYM + OFF),
1822 but REG doesn't have known value (SYM + offset). This function
1823 tries to find another register which is known to already have
1824 value (SYM + offset) and change INSN into an add instruction
1825 (set (REG) (plus (the found register) (OFF - offset))) if such
1826 a register is found. It also updates the information about
d83ccc81 1827 REG's known value.
1828 Return true iff we made a change. */
6132c0d0 1829
d83ccc81 1830static bool
6132c0d0 1831move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn)
1832{
1833 rtx pat = PATTERN (insn);
1834 rtx src = SET_SRC (pat);
1835 int regno = REGNO (reg);
c2130a4b 1836 int min_regno = 0;
6132c0d0 1837 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1838 int i;
d83ccc81 1839 bool changed = false;
c9a03487 1840 struct full_rtx_costs oldcst, newcst, mincst;
1841 rtx plus_expr;
1842
1843 init_costs_to_max (&mincst);
b72d459f 1844 get_full_set_rtx_cost (pat, &oldcst);
c9a03487 1845
1846 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1847 SET_SRC (pat) = plus_expr;
6132c0d0 1848
1849 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
b6b86e87 1850 if (move2add_valid_value_p (i, GET_MODE (reg))
6132c0d0 1851 && reg_base_reg[i] < 0
1852 && reg_symbol_ref[i] != NULL_RTX
1853 && rtx_equal_p (sym, reg_symbol_ref[i]))
1854 {
60141df0 1855 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
6132c0d0 1856 GET_MODE (reg));
1857 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1858 use (set (reg) (reg)) instead.
1859 We don't delete this insn, nor do we convert it into a
1860 note, to avoid losing register notes or the return
1861 value flag. jump2 already knows how to get rid of
1862 no-op moves. */
1863 if (new_src == const0_rtx)
1864 {
c9a03487 1865 init_costs_to_zero (&mincst);
6132c0d0 1866 min_regno = i;
1867 break;
1868 }
1869 else
1870 {
c9a03487 1871 XEXP (plus_expr, 1) = new_src;
b72d459f 1872 get_full_set_rtx_cost (pat, &newcst);
c9a03487 1873
1874 if (costs_lt_p (&newcst, &mincst, speed))
6132c0d0 1875 {
c9a03487 1876 mincst = newcst;
6132c0d0 1877 min_regno = i;
1878 }
1879 }
1880 }
c9a03487 1881 SET_SRC (pat) = src;
6132c0d0 1882
c9a03487 1883 if (costs_lt_p (&mincst, &oldcst, speed))
6132c0d0 1884 {
1885 rtx tem;
1886
1887 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1888 if (i != min_regno)
1889 {
60141df0 1890 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
6132c0d0 1891 GET_MODE (reg));
1892 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1893 }
d83ccc81 1894 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1895 changed = true;
6132c0d0 1896 }
1897 reg_set_luid[regno] = move2add_luid;
b6b86e87 1898 move2add_record_sym_value (reg, sym, off);
d83ccc81 1899 return changed;
6132c0d0 1900}
1901
d83ccc81 1902/* Convert move insns with constant inputs to additions if they are cheaper.
1903 Return true if any changes were made. */
1904static bool
3ad4992f 1905reload_cse_move2add (rtx first)
8f8cadbc 1906{
1907 int i;
1908 rtx insn;
d83ccc81 1909 bool changed = false;
8f8cadbc 1910
1911 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
6132c0d0 1912 {
1913 reg_set_luid[i] = 0;
1914 reg_offset[i] = 0;
1915 reg_base_reg[i] = 0;
1916 reg_symbol_ref[i] = NULL_RTX;
1917 reg_mode[i] = VOIDmode;
1918 }
8f8cadbc 1919
1920 move2add_last_label_luid = 0;
1921 move2add_luid = 2;
1922 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1923 {
1924 rtx pat, note;
1925
6d7dc5b9 1926 if (LABEL_P (insn))
8f8cadbc 1927 {
1928 move2add_last_label_luid = move2add_luid;
1929 /* We're going to increment move2add_luid twice after a
1930 label, so that we can use move2add_last_label_luid + 1 as
1931 the luid for constants. */
1932 move2add_luid++;
1933 continue;
1934 }
1935 if (! INSN_P (insn))
1936 continue;
1937 pat = PATTERN (insn);
1938 /* For simplicity, we only perform this optimization on
1939 straightforward SETs. */
1940 if (GET_CODE (pat) == SET
8ad4c111 1941 && REG_P (SET_DEST (pat)))
8f8cadbc 1942 {
1943 rtx reg = SET_DEST (pat);
1944 int regno = REGNO (reg);
1945 rtx src = SET_SRC (pat);
1946
1947 /* Check if we have valid information on the contents of this
1948 register in the mode of REG. */
b6b86e87 1949 if (move2add_valid_value_p (regno, GET_MODE (reg))
3072d30e 1950 && dbg_cnt (cse2_move2add))
8f8cadbc 1951 {
1952 /* Try to transform (set (REGX) (CONST_INT A))
1953 ...
1954 (set (REGX) (CONST_INT B))
1955 to
1956 (set (REGX) (CONST_INT A))
1957 ...
1958 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1959 or
1960 (set (REGX) (CONST_INT A))
1961 ...
1962 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1963 */
1964
6132c0d0 1965 if (CONST_INT_P (src)
1966 && reg_base_reg[regno] < 0
1967 && reg_symbol_ref[regno] == NULL_RTX)
8f8cadbc 1968 {
d83ccc81 1969 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
8f8cadbc 1970 continue;
1971 }
1972
1973 /* Try to transform (set (REGX) (REGY))
1974 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1975 ...
1976 (set (REGX) (REGY))
1977 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1978 to
1979 (set (REGX) (REGY))
1980 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1981 ...
1982 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
8ad4c111 1983 else if (REG_P (src)
8f8cadbc 1984 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1985 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
b6b86e87 1986 && move2add_valid_value_p (REGNO (src), GET_MODE (reg)))
8f8cadbc 1987 {
5b8537a8 1988 rtx next = next_nonnote_nondebug_insn (insn);
8f8cadbc 1989 rtx set = NULL_RTX;
1990 if (next)
1991 set = single_set (next);
1992 if (set
1993 && SET_DEST (set) == reg
1994 && GET_CODE (SET_SRC (set)) == PLUS
1995 && XEXP (SET_SRC (set), 0) == reg
971ba038 1996 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
8f8cadbc 1997 {
1998 rtx src3 = XEXP (SET_SRC (set), 1);
60141df0 1999 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
8f8cadbc 2000 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
2001 HOST_WIDE_INT regno_offset = reg_offset[regno];
2002 rtx new_src =
69e41517 2003 gen_int_mode (added_offset
2004 + base_offset
2005 - regno_offset,
2006 GET_MODE (reg));
f529eb25 2007 bool success = false;
2008 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
8f8cadbc 2009
2010 if (new_src == const0_rtx)
2011 /* See above why we create (set (reg) (reg)) here. */
2012 success
2013 = validate_change (next, &SET_SRC (set), reg, 0);
c9a03487 2014 else
8f8cadbc 2015 {
c9a03487 2016 rtx old_src = SET_SRC (set);
2017 struct full_rtx_costs oldcst, newcst;
2018 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
2019
b72d459f 2020 get_full_set_rtx_cost (set, &oldcst);
c9a03487 2021 SET_SRC (set) = tem;
7013e87c 2022 get_full_set_src_cost (tem, &newcst);
c9a03487 2023 SET_SRC (set) = old_src;
2024 costs_add_n_insns (&oldcst, 1);
2025
2026 if (costs_lt_p (&newcst, &oldcst, speed)
2027 && have_add2_insn (reg, new_src))
2028 {
2029 rtx newpat = gen_rtx_SET (VOIDmode, reg, tem);
2030 success
2031 = validate_change (next, &PATTERN (next),
2032 newpat, 0);
2033 }
8f8cadbc 2034 }
2035 if (success)
2036 delete_insn (insn);
d83ccc81 2037 changed |= success;
8f8cadbc 2038 insn = next;
b6b86e87 2039 move2add_record_mode (reg);
2040 reg_offset[regno]
2041 = trunc_int_for_mode (added_offset + base_offset,
2042 GET_MODE (reg));
8f8cadbc 2043 continue;
2044 }
2045 }
2046 }
6132c0d0 2047
2048 /* Try to transform
2049 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2050 ...
2051 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2052 to
2053 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2054 ...
2055 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2056 if ((GET_CODE (src) == SYMBOL_REF
2057 || (GET_CODE (src) == CONST
2058 && GET_CODE (XEXP (src, 0)) == PLUS
2059 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2060 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2061 && dbg_cnt (cse2_move2add))
2062 {
2063 rtx sym, off;
2064
2065 if (GET_CODE (src) == SYMBOL_REF)
2066 {
2067 sym = src;
2068 off = const0_rtx;
2069 }
2070 else
2071 {
2072 sym = XEXP (XEXP (src, 0), 0);
2073 off = XEXP (XEXP (src, 0), 1);
2074 }
2075
2076 /* If the reg already contains the value which is sum of
2077 sym and some constant value, we can use an add2 insn. */
b6b86e87 2078 if (move2add_valid_value_p (regno, GET_MODE (reg))
6132c0d0 2079 && reg_base_reg[regno] < 0
2080 && reg_symbol_ref[regno] != NULL_RTX
2081 && rtx_equal_p (sym, reg_symbol_ref[regno]))
d83ccc81 2082 changed |= move2add_use_add2_insn (reg, sym, off, insn);
6132c0d0 2083
2084 /* Otherwise, we have to find a register whose value is sum
2085 of sym and some constant value. */
2086 else
d83ccc81 2087 changed |= move2add_use_add3_insn (reg, sym, off, insn);
6132c0d0 2088
2089 continue;
2090 }
8f8cadbc 2091 }
2092
2093 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2094 {
2095 if (REG_NOTE_KIND (note) == REG_INC
8ad4c111 2096 && REG_P (XEXP (note, 0)))
8f8cadbc 2097 {
2098 /* Reset the information about this register. */
2099 int regno = REGNO (XEXP (note, 0));
2100 if (regno < FIRST_PSEUDO_REGISTER)
b6b86e87 2101 {
2102 move2add_record_mode (XEXP (note, 0));
2103 reg_mode[regno] = VOIDmode;
2104 }
8f8cadbc 2105 }
2106 }
6132c0d0 2107 note_stores (PATTERN (insn), move2add_note_store, insn);
8f8cadbc 2108
2109 /* If INSN is a conditional branch, we try to extract an
2110 implicit set out of it. */
f222bc3b 2111 if (any_condjump_p (insn))
8f8cadbc 2112 {
2113 rtx cnd = fis_get_condition (insn);
2114
2115 if (cnd != NULL_RTX
2116 && GET_CODE (cnd) == NE
8ad4c111 2117 && REG_P (XEXP (cnd, 0))
f222bc3b 2118 && !reg_set_p (XEXP (cnd, 0), insn)
8f8cadbc 2119 /* The following two checks, which are also in
2120 move2add_note_store, are intended to reduce the
2121 number of calls to gen_rtx_SET to avoid memory
2122 allocation if possible. */
2123 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
67d6c12b 2124 && hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1
971ba038 2125 && CONST_INT_P (XEXP (cnd, 1)))
8f8cadbc 2126 {
2127 rtx implicit_set =
2128 gen_rtx_SET (VOIDmode, XEXP (cnd, 0), XEXP (cnd, 1));
6132c0d0 2129 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
8f8cadbc 2130 }
2131 }
2132
2133 /* If this is a CALL_INSN, all call used registers are stored with
2134 unknown values. */
6d7dc5b9 2135 if (CALL_P (insn))
8f8cadbc 2136 {
2137 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2138 {
2139 if (call_used_regs[i])
2140 /* Reset the information about this register. */
b6b86e87 2141 reg_mode[i] = VOIDmode;
8f8cadbc 2142 }
2143 }
2144 }
d83ccc81 2145 return changed;
8f8cadbc 2146}
2147
6132c0d0 2148/* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2149 contains SET.
8f8cadbc 2150 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2151 Called from reload_cse_move2add via note_stores. */
2152
2153static void
6132c0d0 2154move2add_note_store (rtx dst, const_rtx set, void *data)
8f8cadbc 2155{
6132c0d0 2156 rtx insn = (rtx) data;
8f8cadbc 2157 unsigned int regno = 0;
8f8cadbc 2158 enum machine_mode mode = GET_MODE (dst);
2159
8f8cadbc 2160 /* Some targets do argument pushes without adding REG_INC notes. */
2161
e16ceb8e 2162 if (MEM_P (dst))
8f8cadbc 2163 {
2164 dst = XEXP (dst, 0);
2165 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2166 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
b6b86e87 2167 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
8f8cadbc 2168 return;
2169 }
8f8cadbc 2170
b6b86e87 2171 if (GET_CODE (dst) == SUBREG)
2172 regno = subreg_regno (dst);
2173 else if (REG_P (dst))
2174 regno = REGNO (dst);
2175 else
2176 return;
8f8cadbc 2177
b6b86e87 2178 if (SCALAR_INT_MODE_P (mode)
2179 && GET_CODE (set) == SET)
6132c0d0 2180 {
2181 rtx note, sym = NULL_RTX;
b6b86e87 2182 rtx off;
6132c0d0 2183
2184 note = find_reg_equal_equiv_note (insn);
2185 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2186 {
2187 sym = XEXP (note, 0);
b6b86e87 2188 off = const0_rtx;
6132c0d0 2189 }
2190 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2191 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2192 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2193 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2194 {
2195 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
b6b86e87 2196 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
6132c0d0 2197 }
2198
2199 if (sym != NULL_RTX)
2200 {
b6b86e87 2201 move2add_record_sym_value (dst, sym, off);
6132c0d0 2202 return;
2203 }
2204 }
2205
b6b86e87 2206 if (SCALAR_INT_MODE_P (mode)
2207 && GET_CODE (set) == SET
8f8cadbc 2208 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
8f8cadbc 2209 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2210 {
2211 rtx src = SET_SRC (set);
2212 rtx base_reg;
60141df0 2213 unsigned HOST_WIDE_INT offset;
8f8cadbc 2214 int base_regno;
8f8cadbc 2215
2216 switch (GET_CODE (src))
2217 {
2218 case PLUS:
8ad4c111 2219 if (REG_P (XEXP (src, 0)))
8f8cadbc 2220 {
2221 base_reg = XEXP (src, 0);
2222
971ba038 2223 if (CONST_INT_P (XEXP (src, 1)))
60141df0 2224 offset = UINTVAL (XEXP (src, 1));
8ad4c111 2225 else if (REG_P (XEXP (src, 1))
b6b86e87 2226 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
8f8cadbc 2227 {
c389f975 2228 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2229 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
8f8cadbc 2230 offset = reg_offset[REGNO (XEXP (src, 1))];
2231 /* Maybe the first register is known to be a
2232 constant. */
b6b86e87 2233 else if (move2add_valid_value_p (REGNO (base_reg), mode)
c389f975 2234 && reg_base_reg[REGNO (base_reg)] < 0
2235 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
8f8cadbc 2236 {
2237 offset = reg_offset[REGNO (base_reg)];
2238 base_reg = XEXP (src, 1);
2239 }
2240 else
2241 goto invalidate;
2242 }
2243 else
2244 goto invalidate;
2245
2246 break;
2247 }
2248
2249 goto invalidate;
2250
2251 case REG:
2252 base_reg = src;
2253 offset = 0;
2254 break;
2255
2256 case CONST_INT:
2257 /* Start tracking the register as a constant. */
2258 reg_base_reg[regno] = -1;
6132c0d0 2259 reg_symbol_ref[regno] = NULL_RTX;
8f8cadbc 2260 reg_offset[regno] = INTVAL (SET_SRC (set));
2261 /* We assign the same luid to all registers set to constants. */
2262 reg_set_luid[regno] = move2add_last_label_luid + 1;
b6b86e87 2263 move2add_record_mode (dst);
8f8cadbc 2264 return;
2265
2266 default:
b6b86e87 2267 goto invalidate;
8f8cadbc 2268 }
2269
2270 base_regno = REGNO (base_reg);
2271 /* If information about the base register is not valid, set it
2272 up as a new base register, pretending its value is known
2273 starting from the current insn. */
b6b86e87 2274 if (!move2add_valid_value_p (base_regno, mode))
8f8cadbc 2275 {
2276 reg_base_reg[base_regno] = base_regno;
6132c0d0 2277 reg_symbol_ref[base_regno] = NULL_RTX;
8f8cadbc 2278 reg_offset[base_regno] = 0;
2279 reg_set_luid[base_regno] = move2add_luid;
b6b86e87 2280 gcc_assert (GET_MODE (base_reg) == mode);
2281 move2add_record_mode (base_reg);
8f8cadbc 2282 }
8f8cadbc 2283
2284 /* Copy base information from our base register. */
2285 reg_set_luid[regno] = reg_set_luid[base_regno];
2286 reg_base_reg[regno] = reg_base_reg[base_regno];
6132c0d0 2287 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
8f8cadbc 2288
2289 /* Compute the sum of the offsets or constants. */
b6b86e87 2290 reg_offset[regno]
2291 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2292
2293 move2add_record_mode (dst);
8f8cadbc 2294 }
2295 else
2296 {
b6b86e87 2297 invalidate:
2298 /* Invalidate the contents of the register. */
2299 move2add_record_mode (dst);
2300 reg_mode[regno] = VOIDmode;
8f8cadbc 2301 }
2302}
77fce4cd 2303\f
cbe8bda8 2304namespace {
2305
2306const pass_data pass_data_postreload_cse =
77fce4cd 2307{
cbe8bda8 2308 RTL_PASS, /* type */
2309 "postreload", /* name */
2310 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2311 true, /* has_execute */
2312 TV_RELOAD_CSE_REGS, /* tv_id */
2313 0, /* properties_required */
2314 0, /* properties_provided */
2315 0, /* properties_destroyed */
2316 0, /* todo_flags_start */
8b88439e 2317 TODO_df_finish, /* todo_flags_finish */
77fce4cd 2318};
cbe8bda8 2319
2320class pass_postreload_cse : public rtl_opt_pass
2321{
2322public:
9af5ce0c 2323 pass_postreload_cse (gcc::context *ctxt)
2324 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
cbe8bda8 2325 {}
2326
2327 /* opt_pass methods: */
31315c24 2328 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2329
65b0537f 2330 virtual unsigned int execute (function *);
cbe8bda8 2331
2332}; // class pass_postreload_cse
2333
65b0537f 2334unsigned int
2335pass_postreload_cse::execute (function *fun)
2336{
2337 if (!dbg_cnt (postreload_cse))
2338 return 0;
2339
2340 /* Do a very simple CSE pass over just the hard registers. */
2341 reload_cse_regs (get_insns ());
2342 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2343 Remove any EH edges associated with them. */
2344 if (fun->can_throw_non_call_exceptions
2345 && purge_all_dead_edges ())
2346 cleanup_cfg (0);
2347
2348 return 0;
2349}
2350
cbe8bda8 2351} // anon namespace
2352
2353rtl_opt_pass *
2354make_pass_postreload_cse (gcc::context *ctxt)
2355{
2356 return new pass_postreload_cse (ctxt);
2357}