]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/postreload.c
Add a "compact" mode to print_rtx_function
[thirdparty/gcc.git] / gcc / postreload.c
CommitLineData
8f8cadbc 1/* Perform simple optimizations to clean up the result of reload.
f1717362 2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
8f8cadbc 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
8f8cadbc 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
8f8cadbc 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
8f8cadbc 25#include "rtl.h"
7c29e30e 26#include "tree.h"
27#include "predict.h"
9ef16211 28#include "df.h"
8f8cadbc 29#include "tm_p.h"
7c29e30e 30#include "optabs.h"
31#include "regs.h"
32#include "emit-rtl.h"
33#include "recog.h"
7c29e30e 34
94ea8568 35#include "cfgrtl.h"
36#include "cfgbuild.h"
37#include "cfgcleanup.h"
8f8cadbc 38#include "reload.h"
8f8cadbc 39#include "cselib.h"
77fce4cd 40#include "tree-pass.h"
3072d30e 41#include "dbgcnt.h"
8f8cadbc 42
5fe18e78 43#ifndef LOAD_EXTEND_OP
44#define LOAD_EXTEND_OP(M) UNKNOWN
45#endif
46
3ad4992f 47static int reload_cse_noop_set_p (rtx);
3aeaa53f 48static bool reload_cse_simplify (rtx_insn *, rtx);
26709122 49static void reload_cse_regs_1 (void);
3aeaa53f 50static int reload_cse_simplify_set (rtx, rtx_insn *);
51static int reload_cse_simplify_operands (rtx_insn *, rtx);
8f8cadbc 52
3ad4992f 53static void reload_combine (void);
3aeaa53f 54static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx);
81a410b1 55static void reload_combine_note_store (rtx, const_rtx, void *);
8f8cadbc 56
3aeaa53f 57static bool reload_cse_move2add (rtx_insn *);
81a410b1 58static void move2add_note_store (rtx, const_rtx, void *);
8f8cadbc 59
60/* Call cse / combine like post-reload optimization phases.
61 FIRST is the first instruction. */
98799adc 62
63static void
3aeaa53f 64reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED)
8f8cadbc 65{
d83ccc81 66 bool moves_converted;
26709122 67 reload_cse_regs_1 ();
8f8cadbc 68 reload_combine ();
d83ccc81 69 moves_converted = reload_cse_move2add (first);
8f8cadbc 70 if (flag_expensive_optimizations)
d83ccc81 71 {
72 if (moves_converted)
73 reload_combine ();
26709122 74 reload_cse_regs_1 ();
d83ccc81 75 }
8f8cadbc 76}
77
78/* See whether a single set SET is a noop. */
79static int
3ad4992f 80reload_cse_noop_set_p (rtx set)
8f8cadbc 81{
82 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
83 return 0;
84
85 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
86}
87
26709122 88/* Try to simplify INSN. Return true if the CFG may have changed. */
89static bool
3aeaa53f 90reload_cse_simplify (rtx_insn *insn, rtx testreg)
8f8cadbc 91{
92 rtx body = PATTERN (insn);
26709122 93 basic_block insn_bb = BLOCK_FOR_INSN (insn);
94 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
8f8cadbc 95
96 if (GET_CODE (body) == SET)
97 {
98 int count = 0;
99
100 /* Simplify even if we may think it is a no-op.
101 We may think a memory load of a value smaller than WORD_SIZE
102 is redundant because we haven't taken into account possible
103 implicit extension. reload_cse_simplify_set() will bring
104 this out, so it's safer to simplify before we delete. */
105 count += reload_cse_simplify_set (body, insn);
106
107 if (!count && reload_cse_noop_set_p (body))
108 {
5a9ecd4a 109 if (check_for_inc_dec (insn))
110 delete_insn_and_edges (insn);
26709122 111 /* We're done with this insn. */
112 goto done;
8f8cadbc 113 }
114
115 if (count > 0)
116 apply_change_group ();
117 else
118 reload_cse_simplify_operands (insn, testreg);
119 }
120 else if (GET_CODE (body) == PARALLEL)
121 {
122 int i;
123 int count = 0;
124 rtx value = NULL_RTX;
125
17883489 126 /* Registers mentioned in the clobber list for an asm cannot be reused
127 within the body of the asm. Invalidate those registers now so that
128 we don't try to substitute values for them. */
129 if (asm_noperands (body) >= 0)
130 {
131 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
132 {
133 rtx part = XVECEXP (body, 0, i);
134 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
135 cselib_invalidate_rtx (XEXP (part, 0));
136 }
137 }
138
8f8cadbc 139 /* If every action in a PARALLEL is a noop, we can delete
140 the entire PARALLEL. */
141 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
142 {
143 rtx part = XVECEXP (body, 0, i);
144 if (GET_CODE (part) == SET)
145 {
146 if (! reload_cse_noop_set_p (part))
147 break;
148 if (REG_P (SET_DEST (part))
149 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
150 {
151 if (value)
152 break;
153 value = SET_DEST (part);
154 }
155 }
430d0b16 156 else if (GET_CODE (part) != CLOBBER
157 && GET_CODE (part) != USE)
8f8cadbc 158 break;
159 }
160
161 if (i < 0)
162 {
5a9ecd4a 163 if (check_for_inc_dec (insn))
164 delete_insn_and_edges (insn);
8f8cadbc 165 /* We're done with this insn. */
26709122 166 goto done;
8f8cadbc 167 }
168
169 /* It's not a no-op, but we can try to simplify it. */
170 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
171 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
172 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
173
174 if (count > 0)
175 apply_change_group ();
176 else
177 reload_cse_simplify_operands (insn, testreg);
178 }
26709122 179
180done:
181 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
8f8cadbc 182}
183
184/* Do a very simple CSE pass over the hard registers.
185
186 This function detects no-op moves where we happened to assign two
187 different pseudo-registers to the same hard register, and then
188 copied one to the other. Reload will generate a useless
189 instruction copying a register to itself.
190
191 This function also detects cases where we load a value from memory
192 into two different registers, and (if memory is more expensive than
193 registers) changes it to simply copy the first register into the
194 second register.
195
196 Another optimization is performed that scans the operands of each
197 instruction to see whether the value is already available in a
198 hard register. It then replaces the operand with the hard register
199 if possible, much like an optional reload would. */
200
201static void
26709122 202reload_cse_regs_1 (void)
8f8cadbc 203{
26709122 204 bool cfg_changed = false;
205 basic_block bb;
3aeaa53f 206 rtx_insn *insn;
dcd6d0f4 207 rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
8f8cadbc 208
35af0188 209 cselib_init (CSELIB_RECORD_MEMORY);
8f8cadbc 210 init_alias_analysis ();
211
fc00614f 212 FOR_EACH_BB_FN (bb, cfun)
26709122 213 FOR_BB_INSNS (bb, insn)
214 {
215 if (INSN_P (insn))
216 cfg_changed |= reload_cse_simplify (insn, testreg);
8f8cadbc 217
26709122 218 cselib_process_insn (insn);
219 }
8f8cadbc 220
221 /* Clean up. */
222 end_alias_analysis ();
223 cselib_finish ();
26709122 224 if (cfg_changed)
225 cleanup_cfg (0);
8f8cadbc 226}
227
228/* Try to simplify a single SET instruction. SET is the set pattern.
229 INSN is the instruction it came from.
230 This function only handles one case: if we set a register to a value
231 which is not a register, we try to find that value in some other register
232 and change the set into a register copy. */
233
234static int
3aeaa53f 235reload_cse_simplify_set (rtx set, rtx_insn *insn)
8f8cadbc 236{
237 int did_change = 0;
238 int dreg;
239 rtx src;
ade444a4 240 reg_class_t dclass;
8f8cadbc 241 int old_cost;
242 cselib_val *val;
243 struct elt_loc_list *l;
21f1e711 244 enum rtx_code extend_op = UNKNOWN;
f529eb25 245 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
8f8cadbc 246
247 dreg = true_regnum (SET_DEST (set));
248 if (dreg < 0)
249 return 0;
250
251 src = SET_SRC (set);
252 if (side_effects_p (src) || true_regnum (src) >= 0)
253 return 0;
254
255 dclass = REGNO_REG_CLASS (dreg);
256
8f8cadbc 257 /* When replacing a memory with a register, we need to honor assumptions
258 that combine made wrt the contents of sign bits. We'll do this by
259 generating an extend instruction instead of a reg->reg copy. Thus
260 the destination must be a register that we can widen. */
e16ceb8e 261 if (MEM_P (src)
8f8cadbc 262 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
21f1e711 263 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
8ad4c111 264 && !REG_P (SET_DEST (set)))
8f8cadbc 265 return 0;
8f8cadbc 266
1f864115 267 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
3be01943 268 if (! val)
269 return 0;
270
8f8cadbc 271 /* If memory loads are cheaper than register copies, don't change them. */
e16ceb8e 272 if (MEM_P (src))
251a613e 273 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
8ad4c111 274 else if (REG_P (src))
e6078fbb 275 old_cost = register_move_cost (GET_MODE (src),
8f8cadbc 276 REGNO_REG_CLASS (REGNO (src)), dclass);
277 else
5ae4887d 278 old_cost = set_src_cost (src, GET_MODE (SET_DEST (set)), speed);
8f8cadbc 279
8f8cadbc 280 for (l = val->locs; l; l = l->next)
281 {
282 rtx this_rtx = l->loc;
283 int this_cost;
284
285 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
286 {
21f1e711 287 if (extend_op != UNKNOWN)
8f8cadbc 288 {
e913b5cd 289 wide_int result;
8f8cadbc 290
e913b5cd 291 if (!CONST_SCALAR_INT_P (this_rtx))
8f8cadbc 292 continue;
293
8f8cadbc 294 switch (extend_op)
295 {
296 case ZERO_EXTEND:
ecc41f48 297 result = wide_int::from (std::make_pair (this_rtx,
298 GET_MODE (src)),
299 BITS_PER_WORD, UNSIGNED);
8f8cadbc 300 break;
301 case SIGN_EXTEND:
ecc41f48 302 result = wide_int::from (std::make_pair (this_rtx,
303 GET_MODE (src)),
304 BITS_PER_WORD, SIGNED);
e913b5cd 305 break;
8f8cadbc 306 default:
876760f6 307 gcc_unreachable ();
8f8cadbc 308 }
ecc41f48 309 this_rtx = immed_wide_int_const (result, word_mode);
8f8cadbc 310 }
5fe18e78 311
5ae4887d 312 this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed);
8f8cadbc 313 }
8ad4c111 314 else if (REG_P (this_rtx))
8f8cadbc 315 {
21f1e711 316 if (extend_op != UNKNOWN)
8f8cadbc 317 {
318 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
5ae4887d 319 this_cost = set_src_cost (this_rtx, word_mode, speed);
8f8cadbc 320 }
321 else
e6078fbb 322 this_cost = register_move_cost (GET_MODE (this_rtx),
8f8cadbc 323 REGNO_REG_CLASS (REGNO (this_rtx)),
324 dclass);
325 }
326 else
327 continue;
328
329 /* If equal costs, prefer registers over anything else. That
330 tends to lead to smaller instructions on some machines. */
331 if (this_cost < old_cost
332 || (this_cost == old_cost
8ad4c111 333 && REG_P (this_rtx)
334 && !REG_P (SET_SRC (set))))
8f8cadbc 335 {
8f8cadbc 336 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
21f1e711 337 && extend_op != UNKNOWN
8f8cadbc 338#ifdef CANNOT_CHANGE_MODE_CLASS
339 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
340 word_mode,
341 REGNO_REG_CLASS (REGNO (SET_DEST (set))))
342#endif
343 )
344 {
345 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
346 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
347 validate_change (insn, &SET_DEST (set), wide_dest, 1);
348 }
8f8cadbc 349
11d686e2 350 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
8f8cadbc 351 old_cost = this_cost, did_change = 1;
352 }
353 }
354
355 return did_change;
356}
357
358/* Try to replace operands in INSN with equivalent values that are already
359 in registers. This can be viewed as optional reloading.
360
361 For each non-register operand in the insn, see if any hard regs are
362 known to be equivalent to that operand. Record the alternatives which
363 can accept these hard registers. Among all alternatives, select the
364 ones which are better or equal to the one currently matching, where
365 "better" is in terms of '?' and '!' constraints. Among the remaining
366 alternatives, select the one which replaces most operands with
367 hard registers. */
368
369static int
3aeaa53f 370reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
8f8cadbc 371{
372 int i, j;
373
374 /* For each operand, all registers that are equivalent to it. */
375 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
376
377 const char *constraints[MAX_RECOG_OPERANDS];
378
379 /* Vector recording how bad an alternative is. */
380 int *alternative_reject;
381 /* Vector recording how many registers can be introduced by choosing
382 this alternative. */
383 int *alternative_nregs;
384 /* Array of vectors recording, for each operand and each alternative,
385 which hard register to substitute, or -1 if the operand should be
386 left as it is. */
387 int *op_alt_regno[MAX_RECOG_OPERANDS];
388 /* Array of alternatives, sorted in order of decreasing desirability. */
389 int *alternative_order;
390
835b8178 391 extract_constrain_insn (insn);
8f8cadbc 392
393 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
394 return 0;
395
4077bf7a 396 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
397 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
398 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
f0af5a88 399 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
400 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8f8cadbc 401
402 /* For each operand, find out which regs are equivalent. */
403 for (i = 0; i < recog_data.n_operands; i++)
404 {
405 cselib_val *v;
406 struct elt_loc_list *l;
9d9e3c81 407 rtx op;
8f8cadbc 408
409 CLEAR_HARD_REG_SET (equiv_regs[i]);
410
411 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
412 right, so avoid the problem here. Likewise if we have a constant
413 and the insn pattern doesn't tell us the mode we need. */
6d7dc5b9 414 if (LABEL_P (recog_data.operand[i])
8f8cadbc 415 || (CONSTANT_P (recog_data.operand[i])
416 && recog_data.operand_mode[i] == VOIDmode))
417 continue;
418
9d9e3c81 419 op = recog_data.operand[i];
e16ceb8e 420 if (MEM_P (op)
f018d957 421 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
422 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
9d9e3c81 423 {
424 rtx set = single_set (insn);
425
4885b286 426 /* We might have multiple sets, some of which do implicit
9d9e3c81 427 extension. Punt on this for now. */
428 if (! set)
429 continue;
86481e89 430 /* If the destination is also a MEM or a STRICT_LOW_PART, no
9d9e3c81 431 extension applies.
432 Also, if there is an explicit extension, we don't have to
433 worry about an implicit one. */
e16ceb8e 434 else if (MEM_P (SET_DEST (set))
9d9e3c81 435 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
436 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
437 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
438 ; /* Continue ordinary processing. */
a091e4f5 439#ifdef CANNOT_CHANGE_MODE_CLASS
440 /* If the register cannot change mode to word_mode, it follows that
441 it cannot have been used in word_mode. */
8ad4c111 442 else if (REG_P (SET_DEST (set))
a091e4f5 443 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
444 word_mode,
445 REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
446 ; /* Continue ordinary processing. */
447#endif
9d9e3c81 448 /* If this is a straight load, make the extension explicit. */
8ad4c111 449 else if (REG_P (SET_DEST (set))
9d9e3c81 450 && recog_data.n_operands == 2
451 && SET_SRC (set) == op
452 && SET_DEST (set) == recog_data.operand[1-i])
453 {
454 validate_change (insn, recog_data.operand_loc[i],
f018d957 455 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
9d9e3c81 456 word_mode, op),
457 1);
458 validate_change (insn, recog_data.operand_loc[1-i],
459 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
460 1);
461 if (! apply_change_group ())
462 return 0;
463 return reload_cse_simplify_operands (insn, testreg);
464 }
465 else
466 /* ??? There might be arithmetic operations with memory that are
467 safe to optimize, but is it worth the trouble? */
468 continue;
469 }
5fe18e78 470
017b7047 471 if (side_effects_p (op))
472 continue;
1f864115 473 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
8f8cadbc 474 if (! v)
475 continue;
476
477 for (l = v->locs; l; l = l->next)
8ad4c111 478 if (REG_P (l->loc))
8f8cadbc 479 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
480 }
481
e1a797ad 482 alternative_mask preferred = get_preferred_alternatives (insn);
8f8cadbc 483 for (i = 0; i < recog_data.n_operands; i++)
484 {
3754d046 485 machine_mode mode;
8f8cadbc 486 int regno;
487 const char *p;
488
4077bf7a 489 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
8f8cadbc 490 for (j = 0; j < recog_data.n_alternatives; j++)
491 op_alt_regno[i][j] = -1;
492
493 p = constraints[i] = recog_data.constraints[i];
494 mode = recog_data.operand_mode[i];
495
496 /* Add the reject values for each alternative given by the constraints
497 for this operand. */
498 j = 0;
499 while (*p != '\0')
500 {
501 char c = *p++;
502 if (c == ',')
503 j++;
504 else if (c == '?')
505 alternative_reject[j] += 3;
506 else if (c == '!')
507 alternative_reject[j] += 300;
508 }
509
510 /* We won't change operands which are already registers. We
511 also don't want to modify output operands. */
512 regno = true_regnum (recog_data.operand[i]);
513 if (regno >= 0
514 || constraints[i][0] == '='
515 || constraints[i][0] == '+')
516 continue;
517
518 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
519 {
b9c74b4d 520 enum reg_class rclass = NO_REGS;
8f8cadbc 521
522 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
523 continue;
524
937ca48e 525 set_mode_and_regno (testreg, mode, regno);
8f8cadbc 526
527 /* We found a register equal to this operand. Now look for all
528 alternatives that can accept this register and have not been
529 assigned a register they can use yet. */
530 j = 0;
531 p = constraints[i];
532 for (;;)
533 {
534 char c = *p;
535
536 switch (c)
537 {
69449463 538 case 'g':
539 rclass = reg_class_subunion[rclass][GENERAL_REGS];
8f8cadbc 540 break;
541
542 default:
6659485c 543 rclass
8f8cadbc 544 = (reg_class_subunion
79bc09fb 545 [rclass]
546 [reg_class_for_constraint (lookup_constraint (p))]);
8f8cadbc 547 break;
548
549 case ',': case '\0':
550 /* See if REGNO fits this alternative, and set it up as the
551 replacement register if we don't have one for this
552 alternative yet and the operand being replaced is not
553 a cheap CONST_INT. */
554 if (op_alt_regno[i][j] == -1
e1a797ad 555 && TEST_BIT (preferred, j)
6659485c 556 && reg_fits_class_p (testreg, rclass, 0, mode)
971ba038 557 && (!CONST_INT_P (recog_data.operand[i])
5ae4887d 558 || (set_src_cost (recog_data.operand[i], mode,
7013e87c 559 optimize_bb_for_speed_p
560 (BLOCK_FOR_INSN (insn)))
5ae4887d 561 > set_src_cost (testreg, mode,
7013e87c 562 optimize_bb_for_speed_p
563 (BLOCK_FOR_INSN (insn))))))
8f8cadbc 564 {
565 alternative_nregs[j]++;
566 op_alt_regno[i][j] = regno;
567 }
568 j++;
b9c74b4d 569 rclass = NO_REGS;
8f8cadbc 570 break;
571 }
572 p += CONSTRAINT_LEN (c, p);
573
574 if (c == '\0')
575 break;
576 }
577 }
578 }
579
580 /* Record all alternatives which are better or equal to the currently
581 matching one in the alternative_order array. */
582 for (i = j = 0; i < recog_data.n_alternatives; i++)
583 if (alternative_reject[i] <= alternative_reject[which_alternative])
584 alternative_order[j++] = i;
585 recog_data.n_alternatives = j;
586
587 /* Sort it. Given a small number of alternatives, a dumb algorithm
588 won't hurt too much. */
589 for (i = 0; i < recog_data.n_alternatives - 1; i++)
590 {
591 int best = i;
592 int best_reject = alternative_reject[alternative_order[i]];
593 int best_nregs = alternative_nregs[alternative_order[i]];
8f8cadbc 594
595 for (j = i + 1; j < recog_data.n_alternatives; j++)
596 {
597 int this_reject = alternative_reject[alternative_order[j]];
598 int this_nregs = alternative_nregs[alternative_order[j]];
599
600 if (this_reject < best_reject
c2d0cf41 601 || (this_reject == best_reject && this_nregs > best_nregs))
8f8cadbc 602 {
603 best = j;
604 best_reject = this_reject;
605 best_nregs = this_nregs;
606 }
607 }
608
dfcf26a5 609 std::swap (alternative_order[best], alternative_order[i]);
8f8cadbc 610 }
611
612 /* Substitute the operands as determined by op_alt_regno for the best
613 alternative. */
614 j = alternative_order[0];
615
616 for (i = 0; i < recog_data.n_operands; i++)
617 {
3754d046 618 machine_mode mode = recog_data.operand_mode[i];
8f8cadbc 619 if (op_alt_regno[i][j] == -1)
620 continue;
621
622 validate_change (insn, recog_data.operand_loc[i],
623 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
624 }
625
626 for (i = recog_data.n_dups - 1; i >= 0; i--)
627 {
628 int op = recog_data.dup_num[i];
3754d046 629 machine_mode mode = recog_data.operand_mode[op];
8f8cadbc 630
631 if (op_alt_regno[op][j] == -1)
632 continue;
633
634 validate_change (insn, recog_data.dup_loc[i],
635 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
636 }
637
638 return apply_change_group ();
639}
640\f
641/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
642 addressing now.
643 This code might also be useful when reload gave up on reg+reg addressing
644 because of clashes between the return register and INDEX_REG_CLASS. */
645
646/* The maximum number of uses of a register we can keep track of to
647 replace them with reg+reg addressing. */
d83ccc81 648#define RELOAD_COMBINE_MAX_USES 16
8f8cadbc 649
d83ccc81 650/* Describes a recorded use of a register. */
651struct reg_use
652{
653 /* The insn where a register has been used. */
3aeaa53f 654 rtx_insn *insn;
d83ccc81 655 /* Points to the memory reference enclosing the use, if any, NULL_RTX
656 otherwise. */
657 rtx containing_mem;
9d75589a 658 /* Location of the register within INSN. */
d83ccc81 659 rtx *usep;
660 /* The reverse uid of the insn. */
661 int ruid;
662};
8f8cadbc 663
664/* If the register is used in some unknown fashion, USE_INDEX is negative.
665 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
d83ccc81 666 indicates where it is first set or clobbered.
8f8cadbc 667 Otherwise, USE_INDEX is the index of the last encountered use of the
d83ccc81 668 register (which is first among these we have seen since we scan backwards).
669 USE_RUID indicates the first encountered, i.e. last, of these uses.
670 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
671 with a constant offset; OFFSET contains this constant in that case.
8f8cadbc 672 STORE_RUID is always meaningful if we only want to use a value in a
673 register in a different place: it denotes the next insn in the insn
d83ccc81 674 stream (i.e. the last encountered) that sets or clobbers the register.
675 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
8f8cadbc 676static struct
677 {
678 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8f8cadbc 679 rtx offset;
d83ccc81 680 int use_index;
8f8cadbc 681 int store_ruid;
d83ccc81 682 int real_store_ruid;
8f8cadbc 683 int use_ruid;
d83ccc81 684 bool all_offsets_match;
8f8cadbc 685 } reg_state[FIRST_PSEUDO_REGISTER];
686
687/* Reverse linear uid. This is increased in reload_combine while scanning
688 the instructions from last to first. It is used to set last_label_ruid
689 and the store_ruid / use_ruid fields in reg_state. */
690static int reload_combine_ruid;
691
fb79f695 692/* The RUID of the last label we encountered in reload_combine. */
693static int last_label_ruid;
694
d83ccc81 695/* The RUID of the last jump we encountered in reload_combine. */
696static int last_jump_ruid;
697
fb79f695 698/* The register numbers of the first and last index register. A value of
699 -1 in LAST_INDEX_REG indicates that we've previously computed these
700 values and found no suitable index registers. */
701static int first_index_reg = -1;
702static int last_index_reg;
703
8f8cadbc 704#define LABEL_LIVE(LABEL) \
705 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
706
d83ccc81 707/* Subroutine of reload_combine_split_ruids, called to fix up a single
708 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
709
710static inline void
711reload_combine_split_one_ruid (int *pruid, int split_ruid)
712{
713 if (*pruid > split_ruid)
714 (*pruid)++;
715}
716
717/* Called when we insert a new insn in a position we've already passed in
718 the scan. Examine all our state, increasing all ruids that are higher
719 than SPLIT_RUID by one in order to make room for a new insn. */
720
721static void
722reload_combine_split_ruids (int split_ruid)
723{
724 unsigned i;
725
726 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
727 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
728 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
729
730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
731 {
732 int j, idx = reg_state[i].use_index;
733 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
734 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
735 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
736 split_ruid);
737 if (idx < 0)
738 continue;
739 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
740 {
741 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
742 split_ruid);
743 }
744 }
745}
746
747/* Called when we are about to rescan a previously encountered insn with
748 reload_combine_note_use after modifying some part of it. This clears all
749 information about uses in that particular insn. */
750
751static void
3aeaa53f 752reload_combine_purge_insn_uses (rtx_insn *insn)
d83ccc81 753{
754 unsigned i;
755
756 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
757 {
758 int j, k, idx = reg_state[i].use_index;
759 if (idx < 0)
760 continue;
761 j = k = RELOAD_COMBINE_MAX_USES;
762 while (j-- > idx)
763 {
764 if (reg_state[i].reg_use[j].insn != insn)
765 {
766 k--;
767 if (k != j)
768 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
769 }
770 }
771 reg_state[i].use_index = k;
772 }
773}
774
775/* Called when we need to forget about all uses of REGNO after an insn
776 which is identified by RUID. */
777
778static void
779reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
780{
781 int j, k, idx = reg_state[regno].use_index;
782 if (idx < 0)
783 return;
784 j = k = RELOAD_COMBINE_MAX_USES;
785 while (j-- > idx)
786 {
787 if (reg_state[regno].reg_use[j].ruid >= ruid)
788 {
789 k--;
790 if (k != j)
791 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
792 }
793 }
794 reg_state[regno].use_index = k;
795}
796
797/* Find the use of REGNO with the ruid that is highest among those
798 lower than RUID_LIMIT, and return it if it is the only use of this
727047d0 799 reg in the insn. Return NULL otherwise. */
d83ccc81 800
801static struct reg_use *
802reload_combine_closest_single_use (unsigned regno, int ruid_limit)
803{
804 int i, best_ruid = 0;
805 int use_idx = reg_state[regno].use_index;
806 struct reg_use *retval;
807
808 if (use_idx < 0)
809 return NULL;
810 retval = NULL;
811 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
812 {
0ead6a7d 813 struct reg_use *use = reg_state[regno].reg_use + i;
814 int this_ruid = use->ruid;
d83ccc81 815 if (this_ruid >= ruid_limit)
816 continue;
817 if (this_ruid > best_ruid)
818 {
819 best_ruid = this_ruid;
727047d0 820 retval = use;
d83ccc81 821 }
727047d0 822 else if (this_ruid == best_ruid)
d83ccc81 823 retval = NULL;
824 }
825 if (last_label_ruid >= best_ruid)
826 return NULL;
827 return retval;
828}
829
65069495 830/* After we've moved an add insn, fix up any debug insns that occur
831 between the old location of the add and the new location. REG is
832 the destination register of the add insn; REPLACEMENT is the
833 SET_SRC of the add. FROM and TO specify the range in which we
834 should make this change on debug insns. */
0ead6a7d 835
836static void
3aeaa53f 837fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to)
0ead6a7d 838{
3aeaa53f 839 rtx_insn *insn;
65069495 840 for (insn = from; insn != to; insn = NEXT_INSN (insn))
0ead6a7d 841 {
842 rtx t;
65069495 843
844 if (!DEBUG_INSN_P (insn))
0ead6a7d 845 continue;
65069495 846
847 t = INSN_VAR_LOCATION_LOC (insn);
727047d0 848 t = simplify_replace_rtx (t, reg, replacement);
65069495 849 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
0ead6a7d 850 }
851}
852
692ec7c8 853/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
854 with SRC in the insn described by USE, taking costs into account. Return
855 true if we made the replacement. */
856
857static bool
858try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
859{
3aeaa53f 860 rtx_insn *use_insn = use->insn;
692ec7c8 861 rtx mem = use->containing_mem;
862 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
863
864 if (mem != NULL_RTX)
865 {
866 addr_space_t as = MEM_ADDR_SPACE (mem);
867 rtx oldaddr = XEXP (mem, 0);
868 rtx newaddr = NULL_RTX;
869 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
870 int new_cost;
871
872 newaddr = simplify_replace_rtx (oldaddr, reg, src);
873 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
874 {
875 XEXP (mem, 0) = newaddr;
876 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
877 XEXP (mem, 0) = oldaddr;
878 if (new_cost <= old_cost
879 && validate_change (use_insn,
880 &XEXP (mem, 0), newaddr, 0))
881 return true;
882 }
883 }
884 else
885 {
886 rtx new_set = single_set (use_insn);
887 if (new_set
888 && REG_P (SET_DEST (new_set))
889 && GET_CODE (SET_SRC (new_set)) == PLUS
890 && REG_P (XEXP (SET_SRC (new_set), 0))
891 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
892 {
893 rtx new_src;
5ae4887d 894 machine_mode mode = GET_MODE (SET_DEST (new_set));
895 int old_cost = set_src_cost (SET_SRC (new_set), mode, speed);
692ec7c8 896
897 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
898 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
899
5ae4887d 900 if (set_src_cost (new_src, mode, speed) <= old_cost
692ec7c8 901 && validate_change (use_insn, &SET_SRC (new_set),
902 new_src, 0))
903 return true;
904 }
905 }
906 return false;
907}
908
d83ccc81 909/* Called by reload_combine when scanning INSN. This function tries to detect
910 patterns where a constant is added to a register, and the result is used
911 in an address.
912 Return true if no further processing is needed on INSN; false if it wasn't
913 recognized and should be handled normally. */
914
915static bool
3aeaa53f 916reload_combine_recognize_const_pattern (rtx_insn *insn)
d83ccc81 917{
918 int from_ruid = reload_combine_ruid;
919 rtx set, pat, reg, src, addreg;
920 unsigned int regno;
921 struct reg_use *use;
922 bool must_move_add;
3aeaa53f 923 rtx_insn *add_moved_after_insn = NULL;
d83ccc81 924 int add_moved_after_ruid = 0;
925 int clobbered_regno = -1;
926
927 set = single_set (insn);
928 if (set == NULL_RTX)
929 return false;
930
931 reg = SET_DEST (set);
932 src = SET_SRC (set);
933 if (!REG_P (reg)
0933f1d9 934 || REG_NREGS (reg) != 1
d83ccc81 935 || GET_MODE (reg) != Pmode
936 || reg == stack_pointer_rtx)
937 return false;
938
939 regno = REGNO (reg);
940
941 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
942 uses of REG1 inside an address, or inside another add insn. If
943 possible and profitable, merge the addition into subsequent
944 uses. */
945 if (GET_CODE (src) != PLUS
946 || !REG_P (XEXP (src, 0))
947 || !CONSTANT_P (XEXP (src, 1)))
948 return false;
949
950 addreg = XEXP (src, 0);
951 must_move_add = rtx_equal_p (reg, addreg);
952
953 pat = PATTERN (insn);
954 if (must_move_add && set != pat)
955 {
956 /* We have to be careful when moving the add; apart from the
957 single_set there may also be clobbers. Recognize one special
958 case, that of one clobber alongside the set (likely a clobber
959 of the CC register). */
960 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
961 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
962 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
963 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
964 return false;
965 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
966 }
967
968 do
969 {
970 use = reload_combine_closest_single_use (regno, from_ruid);
971
972 if (use)
973 /* Start the search for the next use from here. */
974 from_ruid = use->ruid;
975
976 if (use && GET_MODE (*use->usep) == Pmode)
977 {
692ec7c8 978 bool delete_add = false;
3aeaa53f 979 rtx_insn *use_insn = use->insn;
d83ccc81 980 int use_ruid = use->ruid;
d83ccc81 981
982 /* Avoid moving the add insn past a jump. */
0ead6a7d 983 if (must_move_add && use_ruid <= last_jump_ruid)
d83ccc81 984 break;
985
986 /* If the add clobbers another hard reg in parallel, don't move
987 it past a real set of this hard reg. */
988 if (must_move_add && clobbered_regno >= 0
989 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
990 break;
991
33b7314b 992 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
ff900b8e 993 if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn)))
33b7314b 994 break;
33b7314b 995
6aba0ea1 996 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
997 /* Avoid moving a use of ADDREG past a point where it is stored. */
692ec7c8 998 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
d83ccc81 999 break;
1000
692ec7c8 1001 /* We also must not move the addition past an insn that sets
1002 the same register, unless we can combine two add insns. */
1003 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
d83ccc81 1004 {
692ec7c8 1005 if (use->containing_mem == NULL_RTX)
1006 delete_add = true;
1007 else
1008 break;
d83ccc81 1009 }
d83ccc81 1010
692ec7c8 1011 if (try_replace_in_use (use, reg, src))
1012 {
1013 reload_combine_purge_insn_uses (use_insn);
1014 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1015 use_ruid, NULL_RTX);
d83ccc81 1016
692ec7c8 1017 if (delete_add)
1018 {
1019 fixup_debug_insns (reg, src, insn, use_insn);
1020 delete_insn (insn);
1021 return true;
1022 }
1023 if (must_move_add)
1024 {
1025 add_moved_after_insn = use_insn;
1026 add_moved_after_ruid = use_ruid;
d83ccc81 1027 }
692ec7c8 1028 continue;
d83ccc81 1029 }
d83ccc81 1030 }
6aba0ea1 1031 /* If we get here, we couldn't handle this use. */
1032 if (must_move_add)
1033 break;
d83ccc81 1034 }
1035 while (use);
1036
1037 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1038 /* Process the add normally. */
1039 return false;
1040
65069495 1041 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1042
d83ccc81 1043 reorder_insns (insn, insn, add_moved_after_insn);
1044 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1045 reload_combine_split_ruids (add_moved_after_ruid - 1);
1046 reload_combine_note_use (&PATTERN (insn), insn,
1047 add_moved_after_ruid, NULL_RTX);
1048 reg_state[regno].store_ruid = add_moved_after_ruid;
1049
1050 return true;
1051}
1052
fb79f695 1053/* Called by reload_combine when scanning INSN. Try to detect a pattern we
1054 can handle and improve. Return true if no further processing is needed on
1055 INSN; false if it wasn't recognized and should be handled normally. */
1056
1057static bool
3aeaa53f 1058reload_combine_recognize_pattern (rtx_insn *insn)
fb79f695 1059{
1060 rtx set, reg, src;
fb79f695 1061
d83ccc81 1062 set = single_set (insn);
1063 if (set == NULL_RTX)
1064 return false;
1065
1066 reg = SET_DEST (set);
1067 src = SET_SRC (set);
0933f1d9 1068 if (!REG_P (reg) || REG_NREGS (reg) != 1)
d83ccc81 1069 return false;
1070
d8ec06ae 1071 unsigned int regno = REGNO (reg);
1072 machine_mode mode = GET_MODE (reg);
1073
1074 if (reg_state[regno].use_index < 0
1075 || reg_state[regno].use_index >= RELOAD_COMBINE_MAX_USES)
1076 return false;
1077
1078 for (int i = reg_state[regno].use_index;
1079 i < RELOAD_COMBINE_MAX_USES; i++)
1080 {
1081 struct reg_use *use = reg_state[regno].reg_use + i;
1082 if (GET_MODE (*use->usep) != mode)
1083 return false;
1084 }
d83ccc81 1085
fb79f695 1086 /* Look for (set (REGX) (CONST_INT))
1087 (set (REGX) (PLUS (REGX) (REGY)))
1088 ...
1089 ... (MEM (REGX)) ...
1090 and convert it to
1091 (set (REGZ) (CONST_INT))
1092 ...
1093 ... (MEM (PLUS (REGZ) (REGY)))... .
1094
1095 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1096 and that we know all uses of REGX before it dies.
1097 Also, explicitly check that REGX != REGY; our life information
1098 does not yet show whether REGY changes in this insn. */
fb79f695 1099
1100 if (GET_CODE (src) == PLUS
d83ccc81 1101 && reg_state[regno].all_offsets_match
1102 && last_index_reg != -1
fb79f695 1103 && REG_P (XEXP (src, 1))
1104 && rtx_equal_p (XEXP (src, 0), reg)
1105 && !rtx_equal_p (XEXP (src, 1), reg)
1106 && last_label_ruid < reg_state[regno].use_ruid)
1107 {
1108 rtx base = XEXP (src, 1);
3aeaa53f 1109 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
fb79f695 1110 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1111 rtx index_reg = NULL_RTX;
1112 rtx reg_sum = NULL_RTX;
1113 int i;
1114
1115 /* Now we need to set INDEX_REG to an index register (denoted as
1116 REGZ in the illustration above) and REG_SUM to the expression
1117 register+register that we want to use to substitute uses of REG
1118 (typically in MEMs) with. First check REG and BASE for being
1119 index registers; we can use them even if they are not dead. */
1120 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1121 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1122 REGNO (base)))
1123 {
1124 index_reg = reg;
1125 reg_sum = src;
1126 }
1127 else
1128 {
1129 /* Otherwise, look for a free index register. Since we have
1130 checked above that neither REG nor BASE are index registers,
1131 if we find anything at all, it will be different from these
1132 two registers. */
1133 for (i = first_index_reg; i <= last_index_reg; i++)
1134 {
1135 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1136 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1137 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
727047d0 1138 && (call_used_regs[i] || df_regs_ever_live_p (i))
1139 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1140 && !fixed_regs[i] && !global_regs[i]
1141 && hard_regno_nregs[i][GET_MODE (reg)] == 1
1142 && targetm.hard_regno_scratch_ok (i))
fb79f695 1143 {
1144 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1145 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1146 break;
1147 }
1148 }
1149 }
1150
1151 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1152 (REGY), i.e. BASE, is not clobbered before the last use we'll
1153 create. */
1154 if (reg_sum
1155 && prev_set
1156 && CONST_INT_P (SET_SRC (prev_set))
1157 && rtx_equal_p (SET_DEST (prev_set), reg)
fb79f695 1158 && (reg_state[REGNO (base)].store_ruid
1159 <= reg_state[regno].use_ruid))
1160 {
1161 /* Change destination register and, if necessary, the constant
1162 value in PREV, the constant loading instruction. */
1163 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1164 if (reg_state[regno].offset != const0_rtx)
1165 validate_change (prev,
1166 &SET_SRC (prev_set),
1167 GEN_INT (INTVAL (SET_SRC (prev_set))
1168 + INTVAL (reg_state[regno].offset)),
1169 1);
1170
1171 /* Now for every use of REG that we have recorded, replace REG
1172 with REG_SUM. */
1173 for (i = reg_state[regno].use_index;
1174 i < RELOAD_COMBINE_MAX_USES; i++)
1175 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1176 reg_state[regno].reg_use[i].usep,
1177 /* Each change must have its own
1178 replacement. */
1179 reg_sum, 1);
1180
1181 if (apply_change_group ())
1182 {
65069495 1183 struct reg_use *lowest_ruid = NULL;
1184
fb79f695 1185 /* For every new use of REG_SUM, we have to record the use
1186 of BASE therein, i.e. operand 1. */
1187 for (i = reg_state[regno].use_index;
1188 i < RELOAD_COMBINE_MAX_USES; i++)
65069495 1189 {
1190 struct reg_use *use = reg_state[regno].reg_use + i;
1191 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1192 use->ruid, use->containing_mem);
1193 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1194 lowest_ruid = use;
1195 }
1196
1197 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
fb79f695 1198
fb79f695 1199 /* Delete the reg-reg addition. */
1200 delete_insn (insn);
1201
1202 if (reg_state[regno].offset != const0_rtx)
1203 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1204 are now invalid. */
1205 remove_reg_equal_equiv_notes (prev);
1206
1207 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
fb79f695 1208 return true;
1209 }
1210 }
1211 }
1212 return false;
1213}
1214
8f8cadbc 1215static void
3ad4992f 1216reload_combine (void)
8f8cadbc 1217{
3aeaa53f 1218 rtx_insn *insn, *prev;
8f8cadbc 1219 basic_block bb;
1220 unsigned int r;
8f8cadbc 1221 int min_labelno, n_labels;
1222 HARD_REG_SET ever_live_at_start, *label_live;
1223
8f8cadbc 1224 /* To avoid wasting too much time later searching for an index register,
1225 determine the minimum and maximum index register numbers. */
fb79f695 1226 if (INDEX_REG_CLASS == NO_REGS)
1227 last_index_reg = -1;
1228 else if (first_index_reg == -1 && last_index_reg == 0)
1229 {
1230 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1231 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1232 {
1233 if (first_index_reg == -1)
1234 first_index_reg = r;
1235
1236 last_index_reg = r;
1237 }
1238
1239 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1240 to -1 so we'll know to quit early the next time we get here. */
1241 if (first_index_reg == -1)
1242 {
1243 last_index_reg = -1;
1244 return;
1245 }
1246 }
8f8cadbc 1247
8f8cadbc 1248 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1249 information is a bit fuzzy immediately after reload, but it's
1250 still good enough to determine which registers are live at a jump
1251 destination. */
1252 min_labelno = get_first_label_num ();
1253 n_labels = max_label_num () - min_labelno;
4c36ffe6 1254 label_live = XNEWVEC (HARD_REG_SET, n_labels);
8f8cadbc 1255 CLEAR_HARD_REG_SET (ever_live_at_start);
1256
7a46197b 1257 FOR_EACH_BB_REVERSE_FN (bb, cfun)
8f8cadbc 1258 {
5496dbfc 1259 insn = BB_HEAD (bb);
6d7dc5b9 1260 if (LABEL_P (insn))
8f8cadbc 1261 {
1262 HARD_REG_SET live;
deb2741b 1263 bitmap live_in = df_get_live_in (bb);
8f8cadbc 1264
deb2741b 1265 REG_SET_TO_HARD_REG_SET (live, live_in);
1266 compute_use_by_pseudos (&live, live_in);
8f8cadbc 1267 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1268 IOR_HARD_REG_SET (ever_live_at_start, live);
1269 }
1270 }
1271
1272 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
d83ccc81 1273 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
8f8cadbc 1274 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1275 {
d83ccc81 1276 reg_state[r].store_ruid = 0;
1277 reg_state[r].real_store_ruid = 0;
8f8cadbc 1278 if (fixed_regs[r])
1279 reg_state[r].use_index = -1;
1280 else
1281 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1282 }
1283
d83ccc81 1284 for (insn = get_last_insn (); insn; insn = prev)
8f8cadbc 1285 {
8b52f64e 1286 bool control_flow_insn;
8f8cadbc 1287 rtx note;
1288
d83ccc81 1289 prev = PREV_INSN (insn);
1290
8f8cadbc 1291 /* We cannot do our optimization across labels. Invalidating all the use
1292 information we have would be costly, so we just note where the label
1293 is and then later disable any optimization that would cross it. */
6d7dc5b9 1294 if (LABEL_P (insn))
8f8cadbc 1295 last_label_ruid = reload_combine_ruid;
19f69355 1296 else if (BARRIER_P (insn))
1297 {
1298 /* Crossing a barrier resets all the use information. */
1299 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1300 if (! fixed_regs[r])
8f8cadbc 1301 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
19f69355 1302 }
1303 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1304 /* Optimizations across insns being marked as volatile must be
1305 prevented. All the usage information is invalidated
1306 here. */
1307 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1308 if (! fixed_regs[r]
1309 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1310 reg_state[r].use_index = -1;
8f8cadbc 1311
65069495 1312 if (! NONDEBUG_INSN_P (insn))
8f8cadbc 1313 continue;
1314
1315 reload_combine_ruid++;
1316
8b52f64e 1317 control_flow_insn = control_flow_insn_p (insn);
1318 if (control_flow_insn)
d83ccc81 1319 last_jump_ruid = reload_combine_ruid;
1320
1321 if (reload_combine_recognize_const_pattern (insn)
1322 || reload_combine_recognize_pattern (insn))
fb79f695 1323 continue;
8f8cadbc 1324
1325 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1326
6d7dc5b9 1327 if (CALL_P (insn))
8f8cadbc 1328 {
1329 rtx link;
30326fda 1330 HARD_REG_SET used_regs;
1331
1332 get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
8f8cadbc 1333
1334 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
30326fda 1335 if (TEST_HARD_REG_BIT (used_regs, r))
8f8cadbc 1336 {
1337 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1338 reg_state[r].store_ruid = reload_combine_ruid;
1339 }
1340
1341 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1342 link = XEXP (link, 1))
1343 {
c8010b80 1344 rtx setuse = XEXP (link, 0);
1345 rtx usage_rtx = XEXP (setuse, 0);
1346 if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
1347 && REG_P (usage_rtx))
8f8cadbc 1348 {
6a298741 1349 unsigned int end_regno = END_REGNO (usage_rtx);
1350 for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
8f8cadbc 1351 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1352 {
1353 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1354 reg_state[i].store_ruid = reload_combine_ruid;
1355 }
1356 else
1357 reg_state[i].use_index = -1;
1358 }
1359 }
8f8cadbc 1360 }
f4979459 1361
7777a939 1362 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
8f8cadbc 1363 {
1364 /* Non-spill registers might be used at the call destination in
1365 some unknown fashion, so we have to mark the unknown use. */
1366 HARD_REG_SET *live;
1367
1368 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1369 && JUMP_LABEL (insn))
7777a939 1370 {
1371 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1372 live = NULL;
1373 else
1374 live = &LABEL_LIVE (JUMP_LABEL (insn));
1375 }
8f8cadbc 1376 else
1377 live = &ever_live_at_start;
1378
7777a939 1379 if (live)
1380 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1381 if (TEST_HARD_REG_BIT (*live, r))
1382 reg_state[r].use_index = -1;
8f8cadbc 1383 }
1384
8b52f64e 1385 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1386 NULL_RTX);
1387
8f8cadbc 1388 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1389 {
8b52f64e 1390 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
8f8cadbc 1391 {
1392 int regno = REGNO (XEXP (note, 0));
8f8cadbc 1393 reg_state[regno].store_ruid = reload_combine_ruid;
d83ccc81 1394 reg_state[regno].real_store_ruid = reload_combine_ruid;
8f8cadbc 1395 reg_state[regno].use_index = -1;
1396 }
1397 }
1398 }
1399
1400 free (label_live);
1401}
1402
1403/* Check if DST is a register or a subreg of a register; if it is,
d83ccc81 1404 update store_ruid, real_store_ruid and use_index in the reg_state
1405 structure accordingly. Called via note_stores from reload_combine. */
8f8cadbc 1406
1407static void
81a410b1 1408reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
8f8cadbc 1409{
1410 int regno = 0;
1411 int i;
3754d046 1412 machine_mode mode = GET_MODE (dst);
8f8cadbc 1413
1414 if (GET_CODE (dst) == SUBREG)
1415 {
1416 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1417 GET_MODE (SUBREG_REG (dst)),
1418 SUBREG_BYTE (dst),
1419 GET_MODE (dst));
1420 dst = SUBREG_REG (dst);
1421 }
fe6524b0 1422
1423 /* Some targets do argument pushes without adding REG_INC notes. */
1424
1425 if (MEM_P (dst))
1426 {
1427 dst = XEXP (dst, 0);
1428 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
a5dda0b9 1429 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1430 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
fe6524b0 1431 {
6a298741 1432 unsigned int end_regno = END_REGNO (XEXP (dst, 0));
1433 for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i)
fe6524b0 1434 {
1435 /* We could probably do better, but for now mark the register
1436 as used in an unknown fashion and set/clobbered at this
1437 insn. */
1438 reg_state[i].use_index = -1;
1439 reg_state[i].store_ruid = reload_combine_ruid;
1440 reg_state[i].real_store_ruid = reload_combine_ruid;
1441 }
1442 }
1443 else
1444 return;
1445 }
1446
8ad4c111 1447 if (!REG_P (dst))
8f8cadbc 1448 return;
1449 regno += REGNO (dst);
1450
1451 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1452 careful with registers / register parts that are not full words.
476d094d 1453 Similarly for ZERO_EXTRACT. */
d83ccc81 1454 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
8f8cadbc 1455 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1456 {
67d6c12b 1457 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
8f8cadbc 1458 {
1459 reg_state[i].use_index = -1;
1460 reg_state[i].store_ruid = reload_combine_ruid;
d83ccc81 1461 reg_state[i].real_store_ruid = reload_combine_ruid;
8f8cadbc 1462 }
1463 }
1464 else
1465 {
67d6c12b 1466 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
8f8cadbc 1467 {
1468 reg_state[i].store_ruid = reload_combine_ruid;
d83ccc81 1469 if (GET_CODE (set) == SET)
1470 reg_state[i].real_store_ruid = reload_combine_ruid;
8f8cadbc 1471 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1472 }
1473 }
1474}
1475
1476/* XP points to a piece of rtl that has to be checked for any uses of
1477 registers.
1478 *XP is the pattern of INSN, or a part of it.
1479 Called from reload_combine, and recursively by itself. */
1480static void
3aeaa53f 1481reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem)
8f8cadbc 1482{
1483 rtx x = *xp;
1484 enum rtx_code code = x->code;
1485 const char *fmt;
1486 int i, j;
1487 rtx offset = const0_rtx; /* For the REG case below. */
1488
1489 switch (code)
1490 {
1491 case SET:
8ad4c111 1492 if (REG_P (SET_DEST (x)))
8f8cadbc 1493 {
d83ccc81 1494 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
8f8cadbc 1495 return;
1496 }
1497 break;
1498
1499 case USE:
1500 /* If this is the USE of a return value, we can't change it. */
8ad4c111 1501 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
8f8cadbc 1502 {
6a298741 1503 /* Mark the return register as used in an unknown fashion. */
8f8cadbc 1504 rtx reg = XEXP (x, 0);
6a298741 1505 unsigned int end_regno = END_REGNO (reg);
1506 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
1507 reg_state[regno].use_index = -1;
8f8cadbc 1508 return;
1509 }
1510 break;
1511
1512 case CLOBBER:
8ad4c111 1513 if (REG_P (SET_DEST (x)))
8f8cadbc 1514 {
1515 /* No spurious CLOBBERs of pseudo registers may remain. */
876760f6 1516 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
8f8cadbc 1517 return;
1518 }
1519 break;
1520
1521 case PLUS:
1522 /* We are interested in (plus (reg) (const_int)) . */
8ad4c111 1523 if (!REG_P (XEXP (x, 0))
971ba038 1524 || !CONST_INT_P (XEXP (x, 1)))
8f8cadbc 1525 break;
1526 offset = XEXP (x, 1);
1527 x = XEXP (x, 0);
1528 /* Fall through. */
1529 case REG:
1530 {
1531 int regno = REGNO (x);
1532 int use_index;
1533 int nregs;
1534
1535 /* No spurious USEs of pseudo registers may remain. */
876760f6 1536 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
8f8cadbc 1537
0933f1d9 1538 nregs = REG_NREGS (x);
8f8cadbc 1539
1540 /* We can't substitute into multi-hard-reg uses. */
1541 if (nregs > 1)
1542 {
1543 while (--nregs >= 0)
1544 reg_state[regno + nregs].use_index = -1;
1545 return;
1546 }
1547
727047d0 1548 /* We may be called to update uses in previously seen insns.
1549 Don't add uses beyond the last store we saw. */
1550 if (ruid < reg_state[regno].store_ruid)
1551 return;
1552
8f8cadbc 1553 /* If this register is already used in some unknown fashion, we
1554 can't do anything.
1555 If we decrement the index from zero to -1, we can't store more
1556 uses, so this register becomes used in an unknown fashion. */
1557 use_index = --reg_state[regno].use_index;
1558 if (use_index < 0)
1559 return;
1560
d83ccc81 1561 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
8f8cadbc 1562 {
1563 /* This is the first use of this register we have seen since we
1564 marked it as dead. */
1565 reg_state[regno].offset = offset;
d83ccc81 1566 reg_state[regno].all_offsets_match = true;
1567 reg_state[regno].use_ruid = ruid;
8f8cadbc 1568 }
0ead6a7d 1569 else
1570 {
1571 if (reg_state[regno].use_ruid > ruid)
1572 reg_state[regno].use_ruid = ruid;
1573
1574 if (! rtx_equal_p (offset, reg_state[regno].offset))
1575 reg_state[regno].all_offsets_match = false;
1576 }
d83ccc81 1577
8f8cadbc 1578 reg_state[regno].reg_use[use_index].insn = insn;
d83ccc81 1579 reg_state[regno].reg_use[use_index].ruid = ruid;
1580 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
8f8cadbc 1581 reg_state[regno].reg_use[use_index].usep = xp;
1582 return;
1583 }
1584
d83ccc81 1585 case MEM:
1586 containing_mem = x;
1587 break;
1588
8f8cadbc 1589 default:
1590 break;
1591 }
1592
1593 /* Recursively process the components of X. */
1594 fmt = GET_RTX_FORMAT (code);
1595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1596 {
1597 if (fmt[i] == 'e')
d83ccc81 1598 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
8f8cadbc 1599 else if (fmt[i] == 'E')
1600 {
1601 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
d83ccc81 1602 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1603 containing_mem);
8f8cadbc 1604 }
1605 }
1606}
1607\f
1608/* See if we can reduce the cost of a constant by replacing a move
1609 with an add. We track situations in which a register is set to a
1610 constant or to a register plus a constant. */
1611/* We cannot do our optimization across labels. Invalidating all the
1612 information about register contents we have would be costly, so we
1613 use move2add_last_label_luid to note where the label is and then
1614 later disable any optimization that would cross it.
6132c0d0 1615 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1616 are only valid if reg_set_luid[n] is greater than
b6b86e87 1617 move2add_last_label_luid.
1618 For a set that established a new (potential) base register with
1619 non-constant value, we use move2add_luid from the place where the
1620 setting insn is encountered; registers based off that base then
1621 get the same reg_set_luid. Constants all get
1622 move2add_last_label_luid + 1 as their reg_set_luid. */
8f8cadbc 1623static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1624
1625/* If reg_base_reg[n] is negative, register n has been set to
6132c0d0 1626 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
8f8cadbc 1627 If reg_base_reg[n] is non-negative, register n has been set to the
1628 sum of reg_offset[n] and the value of register reg_base_reg[n]
b6b86e87 1629 before reg_set_luid[n], calculated in mode reg_mode[n] .
1630 For multi-hard-register registers, all but the first one are
1631 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1632 marks it as invalid. */
8f8cadbc 1633static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1634static int reg_base_reg[FIRST_PSEUDO_REGISTER];
6132c0d0 1635static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
3754d046 1636static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
8f8cadbc 1637
1638/* move2add_luid is linearly increased while scanning the instructions
1639 from first to last. It is used to set reg_set_luid in
1640 reload_cse_move2add and move2add_note_store. */
1641static int move2add_luid;
1642
1643/* move2add_last_label_luid is set whenever a label is found. Labels
1644 invalidate all previously collected reg_offset data. */
1645static int move2add_last_label_luid;
1646
1647/* ??? We don't know how zero / sign extension is handled, hence we
1648 can't go from a narrower to a wider mode. */
1649#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1650 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1651 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
396f2130 1652 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
8f8cadbc 1653
b6b86e87 1654/* Record that REG is being set to a value with the mode of REG. */
1655
1656static void
1657move2add_record_mode (rtx reg)
1658{
1659 int regno, nregs;
3754d046 1660 machine_mode mode = GET_MODE (reg);
b6b86e87 1661
1662 if (GET_CODE (reg) == SUBREG)
1663 {
1664 regno = subreg_regno (reg);
1665 nregs = subreg_nregs (reg);
1666 }
1667 else if (REG_P (reg))
1668 {
1669 regno = REGNO (reg);
0933f1d9 1670 nregs = REG_NREGS (reg);
b6b86e87 1671 }
1672 else
1673 gcc_unreachable ();
1674 for (int i = nregs - 1; i > 0; i--)
1675 reg_mode[regno + i] = BLKmode;
1676 reg_mode[regno] = mode;
1677}
1678
1679/* Record that REG is being set to the sum of SYM and OFF. */
1680
1681static void
1682move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1683{
1684 int regno = REGNO (reg);
1685
1686 move2add_record_mode (reg);
1687 reg_set_luid[regno] = move2add_luid;
1688 reg_base_reg[regno] = -1;
1689 reg_symbol_ref[regno] = sym;
1690 reg_offset[regno] = INTVAL (off);
1691}
1692
1693/* Check if REGNO contains a valid value in MODE. */
1694
1695static bool
3754d046 1696move2add_valid_value_p (int regno, machine_mode mode)
b6b86e87 1697{
5bea3269 1698 if (reg_set_luid[regno] <= move2add_last_label_luid)
b6b86e87 1699 return false;
1700
5bea3269 1701 if (mode != reg_mode[regno])
1702 {
1703 if (!MODES_OK_FOR_MOVE2ADD (mode, reg_mode[regno]))
1704 return false;
1705 /* The value loaded into regno in reg_mode[regno] is also valid in
1706 mode after truncation only if (REG:mode regno) is the lowpart of
1707 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1708 regno of the lowpart might be different. */
1709 int s_off = subreg_lowpart_offset (mode, reg_mode[regno]);
1710 s_off = subreg_regno_offset (regno, reg_mode[regno], s_off, mode);
1711 if (s_off != 0)
1712 /* We could in principle adjust regno, check reg_mode[regno] to be
1713 BLKmode, and return s_off to the caller (vs. -1 for failure),
1714 but we currently have no callers that could make use of this
1715 information. */
1716 return false;
1717 }
1718
b6b86e87 1719 for (int i = hard_regno_nregs[regno][mode] - 1; i > 0; i--)
1720 if (reg_mode[regno + i] != BLKmode)
1721 return false;
1722 return true;
1723}
1724
6132c0d0 1725/* This function is called with INSN that sets REG to (SYM + OFF),
1726 while REG is known to already have value (SYM + offset).
1727 This function tries to change INSN into an add instruction
1728 (set (REG) (plus (REG) (OFF - offset))) using the known value.
d83ccc81 1729 It also updates the information about REG's known value.
1730 Return true if we made a change. */
6132c0d0 1731
d83ccc81 1732static bool
3aeaa53f 1733move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
6132c0d0 1734{
1735 rtx pat = PATTERN (insn);
1736 rtx src = SET_SRC (pat);
1737 int regno = REGNO (reg);
60141df0 1738 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno],
6132c0d0 1739 GET_MODE (reg));
1740 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
d83ccc81 1741 bool changed = false;
6132c0d0 1742
1743 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1744 use (set (reg) (reg)) instead.
1745 We don't delete this insn, nor do we convert it into a
1746 note, to avoid losing register notes or the return
1747 value flag. jump2 already knows how to get rid of
1748 no-op moves. */
1749 if (new_src == const0_rtx)
1750 {
1751 /* If the constants are different, this is a
1752 truncation, that, if turned into (set (reg)
1753 (reg)), would be discarded. Maybe we should
1754 try a truncMN pattern? */
1755 if (INTVAL (off) == reg_offset [regno])
d83ccc81 1756 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
6132c0d0 1757 }
c9a03487 1758 else
6132c0d0 1759 {
c9a03487 1760 struct full_rtx_costs oldcst, newcst;
6132c0d0 1761 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
c9a03487 1762
b72d459f 1763 get_full_set_rtx_cost (pat, &oldcst);
c9a03487 1764 SET_SRC (pat) = tem;
b72d459f 1765 get_full_set_rtx_cost (pat, &newcst);
c9a03487 1766 SET_SRC (pat) = src;
1767
1768 if (costs_lt_p (&newcst, &oldcst, speed)
1769 && have_add2_insn (reg, new_src))
1770 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1771 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
6132c0d0 1772 {
3754d046 1773 machine_mode narrow_mode;
c9a03487 1774 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1775 narrow_mode != VOIDmode
1776 && narrow_mode != GET_MODE (reg);
1777 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
6132c0d0 1778 {
c9a03487 1779 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1780 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1781 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1782 {
17ce39e3 1783 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
c9a03487 1784 rtx narrow_src = gen_int_mode (INTVAL (off),
1785 narrow_mode);
1786 rtx new_set
d1f9b275 1787 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,
c9a03487 1788 narrow_reg),
1789 narrow_src);
d4177981 1790 get_full_set_rtx_cost (new_set, &newcst);
1791 if (costs_lt_p (&newcst, &oldcst, speed))
1792 {
1793 changed = validate_change (insn, &PATTERN (insn),
1794 new_set, 0);
1795 if (changed)
1796 break;
1797 }
c9a03487 1798 }
6132c0d0 1799 }
1800 }
1801 }
b6b86e87 1802 move2add_record_sym_value (reg, sym, off);
d83ccc81 1803 return changed;
6132c0d0 1804}
1805
1806
1807/* This function is called with INSN that sets REG to (SYM + OFF),
1808 but REG doesn't have known value (SYM + offset). This function
1809 tries to find another register which is known to already have
1810 value (SYM + offset) and change INSN into an add instruction
1811 (set (REG) (plus (the found register) (OFF - offset))) if such
1812 a register is found. It also updates the information about
d83ccc81 1813 REG's known value.
1814 Return true iff we made a change. */
6132c0d0 1815
d83ccc81 1816static bool
3aeaa53f 1817move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
6132c0d0 1818{
1819 rtx pat = PATTERN (insn);
1820 rtx src = SET_SRC (pat);
1821 int regno = REGNO (reg);
c2130a4b 1822 int min_regno = 0;
6132c0d0 1823 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1824 int i;
d83ccc81 1825 bool changed = false;
c9a03487 1826 struct full_rtx_costs oldcst, newcst, mincst;
1827 rtx plus_expr;
1828
1829 init_costs_to_max (&mincst);
b72d459f 1830 get_full_set_rtx_cost (pat, &oldcst);
c9a03487 1831
1832 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1833 SET_SRC (pat) = plus_expr;
6132c0d0 1834
1835 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
b6b86e87 1836 if (move2add_valid_value_p (i, GET_MODE (reg))
6132c0d0 1837 && reg_base_reg[i] < 0
1838 && reg_symbol_ref[i] != NULL_RTX
1839 && rtx_equal_p (sym, reg_symbol_ref[i]))
1840 {
60141df0 1841 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
6132c0d0 1842 GET_MODE (reg));
1843 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1844 use (set (reg) (reg)) instead.
1845 We don't delete this insn, nor do we convert it into a
1846 note, to avoid losing register notes or the return
1847 value flag. jump2 already knows how to get rid of
1848 no-op moves. */
1849 if (new_src == const0_rtx)
1850 {
c9a03487 1851 init_costs_to_zero (&mincst);
6132c0d0 1852 min_regno = i;
1853 break;
1854 }
1855 else
1856 {
c9a03487 1857 XEXP (plus_expr, 1) = new_src;
b72d459f 1858 get_full_set_rtx_cost (pat, &newcst);
c9a03487 1859
1860 if (costs_lt_p (&newcst, &mincst, speed))
6132c0d0 1861 {
c9a03487 1862 mincst = newcst;
6132c0d0 1863 min_regno = i;
1864 }
1865 }
1866 }
c9a03487 1867 SET_SRC (pat) = src;
6132c0d0 1868
c9a03487 1869 if (costs_lt_p (&mincst, &oldcst, speed))
6132c0d0 1870 {
1871 rtx tem;
1872
1873 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1874 if (i != min_regno)
1875 {
60141df0 1876 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
6132c0d0 1877 GET_MODE (reg));
1878 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1879 }
d83ccc81 1880 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1881 changed = true;
6132c0d0 1882 }
1883 reg_set_luid[regno] = move2add_luid;
b6b86e87 1884 move2add_record_sym_value (reg, sym, off);
d83ccc81 1885 return changed;
6132c0d0 1886}
1887
d83ccc81 1888/* Convert move insns with constant inputs to additions if they are cheaper.
1889 Return true if any changes were made. */
1890static bool
3aeaa53f 1891reload_cse_move2add (rtx_insn *first)
8f8cadbc 1892{
1893 int i;
3aeaa53f 1894 rtx_insn *insn;
d83ccc81 1895 bool changed = false;
8f8cadbc 1896
1897 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
6132c0d0 1898 {
1899 reg_set_luid[i] = 0;
1900 reg_offset[i] = 0;
1901 reg_base_reg[i] = 0;
1902 reg_symbol_ref[i] = NULL_RTX;
1903 reg_mode[i] = VOIDmode;
1904 }
8f8cadbc 1905
1906 move2add_last_label_luid = 0;
1907 move2add_luid = 2;
1908 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1909 {
1910 rtx pat, note;
1911
6d7dc5b9 1912 if (LABEL_P (insn))
8f8cadbc 1913 {
1914 move2add_last_label_luid = move2add_luid;
1915 /* We're going to increment move2add_luid twice after a
1916 label, so that we can use move2add_last_label_luid + 1 as
1917 the luid for constants. */
1918 move2add_luid++;
1919 continue;
1920 }
1921 if (! INSN_P (insn))
1922 continue;
1923 pat = PATTERN (insn);
1924 /* For simplicity, we only perform this optimization on
1925 straightforward SETs. */
1926 if (GET_CODE (pat) == SET
8ad4c111 1927 && REG_P (SET_DEST (pat)))
8f8cadbc 1928 {
1929 rtx reg = SET_DEST (pat);
1930 int regno = REGNO (reg);
1931 rtx src = SET_SRC (pat);
1932
1933 /* Check if we have valid information on the contents of this
1934 register in the mode of REG. */
b6b86e87 1935 if (move2add_valid_value_p (regno, GET_MODE (reg))
3072d30e 1936 && dbg_cnt (cse2_move2add))
8f8cadbc 1937 {
1938 /* Try to transform (set (REGX) (CONST_INT A))
1939 ...
1940 (set (REGX) (CONST_INT B))
1941 to
1942 (set (REGX) (CONST_INT A))
1943 ...
1944 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1945 or
1946 (set (REGX) (CONST_INT A))
1947 ...
1948 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1949 */
1950
6132c0d0 1951 if (CONST_INT_P (src)
1952 && reg_base_reg[regno] < 0
1953 && reg_symbol_ref[regno] == NULL_RTX)
8f8cadbc 1954 {
d83ccc81 1955 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
8f8cadbc 1956 continue;
1957 }
1958
1959 /* Try to transform (set (REGX) (REGY))
1960 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1961 ...
1962 (set (REGX) (REGY))
1963 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1964 to
1965 (set (REGX) (REGY))
1966 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1967 ...
1968 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
8ad4c111 1969 else if (REG_P (src)
8f8cadbc 1970 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1971 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
b6b86e87 1972 && move2add_valid_value_p (REGNO (src), GET_MODE (reg)))
8f8cadbc 1973 {
3aeaa53f 1974 rtx_insn *next = next_nonnote_nondebug_insn (insn);
8f8cadbc 1975 rtx set = NULL_RTX;
1976 if (next)
1977 set = single_set (next);
1978 if (set
1979 && SET_DEST (set) == reg
1980 && GET_CODE (SET_SRC (set)) == PLUS
1981 && XEXP (SET_SRC (set), 0) == reg
971ba038 1982 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
8f8cadbc 1983 {
1984 rtx src3 = XEXP (SET_SRC (set), 1);
60141df0 1985 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
8f8cadbc 1986 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1987 HOST_WIDE_INT regno_offset = reg_offset[regno];
1988 rtx new_src =
69e41517 1989 gen_int_mode (added_offset
1990 + base_offset
1991 - regno_offset,
1992 GET_MODE (reg));
f529eb25 1993 bool success = false;
1994 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
8f8cadbc 1995
1996 if (new_src == const0_rtx)
1997 /* See above why we create (set (reg) (reg)) here. */
1998 success
1999 = validate_change (next, &SET_SRC (set), reg, 0);
c9a03487 2000 else
8f8cadbc 2001 {
c9a03487 2002 rtx old_src = SET_SRC (set);
2003 struct full_rtx_costs oldcst, newcst;
2004 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
2005
b72d459f 2006 get_full_set_rtx_cost (set, &oldcst);
c9a03487 2007 SET_SRC (set) = tem;
5ae4887d 2008 get_full_set_src_cost (tem, GET_MODE (reg), &newcst);
c9a03487 2009 SET_SRC (set) = old_src;
2010 costs_add_n_insns (&oldcst, 1);
2011
2012 if (costs_lt_p (&newcst, &oldcst, speed)
2013 && have_add2_insn (reg, new_src))
2014 {
d1f9b275 2015 rtx newpat = gen_rtx_SET (reg, tem);
c9a03487 2016 success
2017 = validate_change (next, &PATTERN (next),
2018 newpat, 0);
2019 }
8f8cadbc 2020 }
2021 if (success)
2022 delete_insn (insn);
d83ccc81 2023 changed |= success;
8f8cadbc 2024 insn = next;
b6b86e87 2025 move2add_record_mode (reg);
2026 reg_offset[regno]
2027 = trunc_int_for_mode (added_offset + base_offset,
2028 GET_MODE (reg));
8f8cadbc 2029 continue;
2030 }
2031 }
2032 }
6132c0d0 2033
2034 /* Try to transform
2035 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2036 ...
2037 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2038 to
2039 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2040 ...
2041 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2042 if ((GET_CODE (src) == SYMBOL_REF
2043 || (GET_CODE (src) == CONST
2044 && GET_CODE (XEXP (src, 0)) == PLUS
2045 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2046 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2047 && dbg_cnt (cse2_move2add))
2048 {
2049 rtx sym, off;
2050
2051 if (GET_CODE (src) == SYMBOL_REF)
2052 {
2053 sym = src;
2054 off = const0_rtx;
2055 }
2056 else
2057 {
2058 sym = XEXP (XEXP (src, 0), 0);
2059 off = XEXP (XEXP (src, 0), 1);
2060 }
2061
2062 /* If the reg already contains the value which is sum of
2063 sym and some constant value, we can use an add2 insn. */
b6b86e87 2064 if (move2add_valid_value_p (regno, GET_MODE (reg))
6132c0d0 2065 && reg_base_reg[regno] < 0
2066 && reg_symbol_ref[regno] != NULL_RTX
2067 && rtx_equal_p (sym, reg_symbol_ref[regno]))
d83ccc81 2068 changed |= move2add_use_add2_insn (reg, sym, off, insn);
6132c0d0 2069
2070 /* Otherwise, we have to find a register whose value is sum
2071 of sym and some constant value. */
2072 else
d83ccc81 2073 changed |= move2add_use_add3_insn (reg, sym, off, insn);
6132c0d0 2074
2075 continue;
2076 }
8f8cadbc 2077 }
2078
2079 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2080 {
2081 if (REG_NOTE_KIND (note) == REG_INC
8ad4c111 2082 && REG_P (XEXP (note, 0)))
8f8cadbc 2083 {
2084 /* Reset the information about this register. */
2085 int regno = REGNO (XEXP (note, 0));
2086 if (regno < FIRST_PSEUDO_REGISTER)
b6b86e87 2087 {
2088 move2add_record_mode (XEXP (note, 0));
2089 reg_mode[regno] = VOIDmode;
2090 }
8f8cadbc 2091 }
2092 }
6132c0d0 2093 note_stores (PATTERN (insn), move2add_note_store, insn);
8f8cadbc 2094
2095 /* If INSN is a conditional branch, we try to extract an
2096 implicit set out of it. */
f222bc3b 2097 if (any_condjump_p (insn))
8f8cadbc 2098 {
2099 rtx cnd = fis_get_condition (insn);
2100
2101 if (cnd != NULL_RTX
2102 && GET_CODE (cnd) == NE
8ad4c111 2103 && REG_P (XEXP (cnd, 0))
f222bc3b 2104 && !reg_set_p (XEXP (cnd, 0), insn)
8f8cadbc 2105 /* The following two checks, which are also in
2106 move2add_note_store, are intended to reduce the
2107 number of calls to gen_rtx_SET to avoid memory
2108 allocation if possible. */
2109 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
0933f1d9 2110 && REG_NREGS (XEXP (cnd, 0)) == 1
971ba038 2111 && CONST_INT_P (XEXP (cnd, 1)))
8f8cadbc 2112 {
2113 rtx implicit_set =
d1f9b275 2114 gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1));
6132c0d0 2115 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
8f8cadbc 2116 }
2117 }
2118
2119 /* If this is a CALL_INSN, all call used registers are stored with
2120 unknown values. */
6d7dc5b9 2121 if (CALL_P (insn))
8f8cadbc 2122 {
39bde736 2123 rtx link;
2124
8f8cadbc 2125 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2126 {
2127 if (call_used_regs[i])
2128 /* Reset the information about this register. */
b6b86e87 2129 reg_mode[i] = VOIDmode;
8f8cadbc 2130 }
39bde736 2131
2132 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
2133 link = XEXP (link, 1))
2134 {
2135 rtx setuse = XEXP (link, 0);
2136 rtx usage_rtx = XEXP (setuse, 0);
2137 if (GET_CODE (setuse) == CLOBBER
2138 && REG_P (usage_rtx))
2139 {
2140 unsigned int end_regno = END_REGNO (usage_rtx);
2141 for (unsigned int r = REGNO (usage_rtx); r < end_regno; ++r)
2142 /* Reset the information about this register. */
2143 reg_mode[r] = VOIDmode;
2144 }
2145 }
8f8cadbc 2146 }
2147 }
d83ccc81 2148 return changed;
8f8cadbc 2149}
2150
6132c0d0 2151/* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2152 contains SET.
8f8cadbc 2153 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2154 Called from reload_cse_move2add via note_stores. */
2155
2156static void
6132c0d0 2157move2add_note_store (rtx dst, const_rtx set, void *data)
8f8cadbc 2158{
3aeaa53f 2159 rtx_insn *insn = (rtx_insn *) data;
8f8cadbc 2160 unsigned int regno = 0;
3754d046 2161 machine_mode mode = GET_MODE (dst);
8f8cadbc 2162
8f8cadbc 2163 /* Some targets do argument pushes without adding REG_INC notes. */
2164
e16ceb8e 2165 if (MEM_P (dst))
8f8cadbc 2166 {
2167 dst = XEXP (dst, 0);
2168 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2169 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
b6b86e87 2170 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
8f8cadbc 2171 return;
2172 }
8f8cadbc 2173
b6b86e87 2174 if (GET_CODE (dst) == SUBREG)
2175 regno = subreg_regno (dst);
2176 else if (REG_P (dst))
2177 regno = REGNO (dst);
2178 else
2179 return;
8f8cadbc 2180
b6b86e87 2181 if (SCALAR_INT_MODE_P (mode)
2182 && GET_CODE (set) == SET)
6132c0d0 2183 {
2184 rtx note, sym = NULL_RTX;
b6b86e87 2185 rtx off;
6132c0d0 2186
2187 note = find_reg_equal_equiv_note (insn);
2188 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2189 {
2190 sym = XEXP (note, 0);
b6b86e87 2191 off = const0_rtx;
6132c0d0 2192 }
2193 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2194 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2195 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2196 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2197 {
2198 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
b6b86e87 2199 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
6132c0d0 2200 }
2201
2202 if (sym != NULL_RTX)
2203 {
b6b86e87 2204 move2add_record_sym_value (dst, sym, off);
6132c0d0 2205 return;
2206 }
2207 }
2208
b6b86e87 2209 if (SCALAR_INT_MODE_P (mode)
2210 && GET_CODE (set) == SET
8f8cadbc 2211 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
8f8cadbc 2212 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2213 {
2214 rtx src = SET_SRC (set);
2215 rtx base_reg;
60141df0 2216 unsigned HOST_WIDE_INT offset;
8f8cadbc 2217 int base_regno;
8f8cadbc 2218
2219 switch (GET_CODE (src))
2220 {
2221 case PLUS:
8ad4c111 2222 if (REG_P (XEXP (src, 0)))
8f8cadbc 2223 {
2224 base_reg = XEXP (src, 0);
2225
971ba038 2226 if (CONST_INT_P (XEXP (src, 1)))
60141df0 2227 offset = UINTVAL (XEXP (src, 1));
8ad4c111 2228 else if (REG_P (XEXP (src, 1))
b6b86e87 2229 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
8f8cadbc 2230 {
c389f975 2231 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2232 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
8f8cadbc 2233 offset = reg_offset[REGNO (XEXP (src, 1))];
2234 /* Maybe the first register is known to be a
2235 constant. */
b6b86e87 2236 else if (move2add_valid_value_p (REGNO (base_reg), mode)
c389f975 2237 && reg_base_reg[REGNO (base_reg)] < 0
2238 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
8f8cadbc 2239 {
2240 offset = reg_offset[REGNO (base_reg)];
2241 base_reg = XEXP (src, 1);
2242 }
2243 else
2244 goto invalidate;
2245 }
2246 else
2247 goto invalidate;
2248
2249 break;
2250 }
2251
2252 goto invalidate;
2253
2254 case REG:
2255 base_reg = src;
2256 offset = 0;
2257 break;
2258
2259 case CONST_INT:
2260 /* Start tracking the register as a constant. */
2261 reg_base_reg[regno] = -1;
6132c0d0 2262 reg_symbol_ref[regno] = NULL_RTX;
8f8cadbc 2263 reg_offset[regno] = INTVAL (SET_SRC (set));
2264 /* We assign the same luid to all registers set to constants. */
2265 reg_set_luid[regno] = move2add_last_label_luid + 1;
b6b86e87 2266 move2add_record_mode (dst);
8f8cadbc 2267 return;
2268
2269 default:
b6b86e87 2270 goto invalidate;
8f8cadbc 2271 }
2272
2273 base_regno = REGNO (base_reg);
2274 /* If information about the base register is not valid, set it
2275 up as a new base register, pretending its value is known
2276 starting from the current insn. */
b6b86e87 2277 if (!move2add_valid_value_p (base_regno, mode))
8f8cadbc 2278 {
2279 reg_base_reg[base_regno] = base_regno;
6132c0d0 2280 reg_symbol_ref[base_regno] = NULL_RTX;
8f8cadbc 2281 reg_offset[base_regno] = 0;
2282 reg_set_luid[base_regno] = move2add_luid;
b6b86e87 2283 gcc_assert (GET_MODE (base_reg) == mode);
2284 move2add_record_mode (base_reg);
8f8cadbc 2285 }
8f8cadbc 2286
2287 /* Copy base information from our base register. */
2288 reg_set_luid[regno] = reg_set_luid[base_regno];
2289 reg_base_reg[regno] = reg_base_reg[base_regno];
6132c0d0 2290 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
8f8cadbc 2291
2292 /* Compute the sum of the offsets or constants. */
b6b86e87 2293 reg_offset[regno]
2294 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2295
2296 move2add_record_mode (dst);
8f8cadbc 2297 }
2298 else
2299 {
b6b86e87 2300 invalidate:
2301 /* Invalidate the contents of the register. */
2302 move2add_record_mode (dst);
2303 reg_mode[regno] = VOIDmode;
8f8cadbc 2304 }
2305}
77fce4cd 2306\f
cbe8bda8 2307namespace {
2308
2309const pass_data pass_data_postreload_cse =
77fce4cd 2310{
cbe8bda8 2311 RTL_PASS, /* type */
2312 "postreload", /* name */
2313 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2314 TV_RELOAD_CSE_REGS, /* tv_id */
2315 0, /* properties_required */
2316 0, /* properties_provided */
2317 0, /* properties_destroyed */
2318 0, /* todo_flags_start */
8b88439e 2319 TODO_df_finish, /* todo_flags_finish */
77fce4cd 2320};
cbe8bda8 2321
2322class pass_postreload_cse : public rtl_opt_pass
2323{
2324public:
9af5ce0c 2325 pass_postreload_cse (gcc::context *ctxt)
2326 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
cbe8bda8 2327 {}
2328
2329 /* opt_pass methods: */
31315c24 2330 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2331
65b0537f 2332 virtual unsigned int execute (function *);
cbe8bda8 2333
2334}; // class pass_postreload_cse
2335
65b0537f 2336unsigned int
2337pass_postreload_cse::execute (function *fun)
2338{
2339 if (!dbg_cnt (postreload_cse))
2340 return 0;
2341
2342 /* Do a very simple CSE pass over just the hard registers. */
2343 reload_cse_regs (get_insns ());
2344 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2345 Remove any EH edges associated with them. */
2346 if (fun->can_throw_non_call_exceptions
2347 && purge_all_dead_edges ())
2348 cleanup_cfg (0);
2349
2350 return 0;
2351}
2352
cbe8bda8 2353} // anon namespace
2354
2355rtl_opt_pass *
2356make_pass_postreload_cse (gcc::context *ctxt)
2357{
2358 return new pass_postreload_cse (ctxt);
2359}