]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/postreload.c
Fix non-standard behaviour of std::istream_iterator
[thirdparty/gcc.git] / gcc / postreload.c
CommitLineData
8f8cadbc 1/* Perform simple optimizations to clean up the result of reload.
fbd26352 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
8f8cadbc 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
8f8cadbc 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
8f8cadbc 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
8f8cadbc 25#include "rtl.h"
7c29e30e 26#include "tree.h"
27#include "predict.h"
9ef16211 28#include "df.h"
ad7b10a2 29#include "memmodel.h"
8f8cadbc 30#include "tm_p.h"
7c29e30e 31#include "optabs.h"
32#include "regs.h"
33#include "emit-rtl.h"
34#include "recog.h"
7c29e30e 35
94ea8568 36#include "cfgrtl.h"
37#include "cfgbuild.h"
38#include "cfgcleanup.h"
8f8cadbc 39#include "reload.h"
8f8cadbc 40#include "cselib.h"
77fce4cd 41#include "tree-pass.h"
3072d30e 42#include "dbgcnt.h"
8f8cadbc 43
3ad4992f 44static int reload_cse_noop_set_p (rtx);
3aeaa53f 45static bool reload_cse_simplify (rtx_insn *, rtx);
26709122 46static void reload_cse_regs_1 (void);
3aeaa53f 47static int reload_cse_simplify_set (rtx, rtx_insn *);
48static int reload_cse_simplify_operands (rtx_insn *, rtx);
8f8cadbc 49
3ad4992f 50static void reload_combine (void);
3aeaa53f 51static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx);
81a410b1 52static void reload_combine_note_store (rtx, const_rtx, void *);
8f8cadbc 53
3aeaa53f 54static bool reload_cse_move2add (rtx_insn *);
81a410b1 55static void move2add_note_store (rtx, const_rtx, void *);
8f8cadbc 56
57/* Call cse / combine like post-reload optimization phases.
58 FIRST is the first instruction. */
98799adc 59
60static void
3aeaa53f 61reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED)
8f8cadbc 62{
d83ccc81 63 bool moves_converted;
26709122 64 reload_cse_regs_1 ();
8f8cadbc 65 reload_combine ();
d83ccc81 66 moves_converted = reload_cse_move2add (first);
8f8cadbc 67 if (flag_expensive_optimizations)
d83ccc81 68 {
69 if (moves_converted)
70 reload_combine ();
26709122 71 reload_cse_regs_1 ();
d83ccc81 72 }
8f8cadbc 73}
74
75/* See whether a single set SET is a noop. */
76static int
3ad4992f 77reload_cse_noop_set_p (rtx set)
8f8cadbc 78{
79 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
80 return 0;
81
82 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
83}
84
26709122 85/* Try to simplify INSN. Return true if the CFG may have changed. */
86static bool
3aeaa53f 87reload_cse_simplify (rtx_insn *insn, rtx testreg)
8f8cadbc 88{
89 rtx body = PATTERN (insn);
26709122 90 basic_block insn_bb = BLOCK_FOR_INSN (insn);
91 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
8f8cadbc 92
94f4da1b 93 /* If NO_FUNCTION_CSE has been set by the target, then we should not try
94 to cse function calls. */
95 if (NO_FUNCTION_CSE && CALL_P (insn))
96 return false;
97
8f8cadbc 98 if (GET_CODE (body) == SET)
99 {
100 int count = 0;
101
102 /* Simplify even if we may think it is a no-op.
103 We may think a memory load of a value smaller than WORD_SIZE
104 is redundant because we haven't taken into account possible
105 implicit extension. reload_cse_simplify_set() will bring
106 this out, so it's safer to simplify before we delete. */
107 count += reload_cse_simplify_set (body, insn);
108
109 if (!count && reload_cse_noop_set_p (body))
110 {
5a9ecd4a 111 if (check_for_inc_dec (insn))
112 delete_insn_and_edges (insn);
26709122 113 /* We're done with this insn. */
114 goto done;
8f8cadbc 115 }
116
117 if (count > 0)
118 apply_change_group ();
119 else
120 reload_cse_simplify_operands (insn, testreg);
121 }
122 else if (GET_CODE (body) == PARALLEL)
123 {
124 int i;
125 int count = 0;
126 rtx value = NULL_RTX;
127
17883489 128 /* Registers mentioned in the clobber list for an asm cannot be reused
129 within the body of the asm. Invalidate those registers now so that
130 we don't try to substitute values for them. */
131 if (asm_noperands (body) >= 0)
132 {
133 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
134 {
135 rtx part = XVECEXP (body, 0, i);
70bdfe23 136 /* asms can only have full clobbers, not clobber_highs. */
137 gcc_assert (GET_CODE (part) != CLOBBER_HIGH);
17883489 138 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
139 cselib_invalidate_rtx (XEXP (part, 0));
140 }
141 }
142
8f8cadbc 143 /* If every action in a PARALLEL is a noop, we can delete
144 the entire PARALLEL. */
145 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
146 {
147 rtx part = XVECEXP (body, 0, i);
148 if (GET_CODE (part) == SET)
149 {
150 if (! reload_cse_noop_set_p (part))
151 break;
152 if (REG_P (SET_DEST (part))
153 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
154 {
155 if (value)
156 break;
157 value = SET_DEST (part);
158 }
159 }
430d0b16 160 else if (GET_CODE (part) != CLOBBER
70bdfe23 161 && GET_CODE (part) != CLOBBER_HIGH
430d0b16 162 && GET_CODE (part) != USE)
8f8cadbc 163 break;
164 }
165
166 if (i < 0)
167 {
5a9ecd4a 168 if (check_for_inc_dec (insn))
169 delete_insn_and_edges (insn);
8f8cadbc 170 /* We're done with this insn. */
26709122 171 goto done;
8f8cadbc 172 }
173
174 /* It's not a no-op, but we can try to simplify it. */
175 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
176 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
177 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
178
179 if (count > 0)
180 apply_change_group ();
181 else
182 reload_cse_simplify_operands (insn, testreg);
183 }
26709122 184
185done:
186 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
8f8cadbc 187}
188
189/* Do a very simple CSE pass over the hard registers.
190
191 This function detects no-op moves where we happened to assign two
192 different pseudo-registers to the same hard register, and then
193 copied one to the other. Reload will generate a useless
194 instruction copying a register to itself.
195
196 This function also detects cases where we load a value from memory
197 into two different registers, and (if memory is more expensive than
198 registers) changes it to simply copy the first register into the
199 second register.
200
201 Another optimization is performed that scans the operands of each
202 instruction to see whether the value is already available in a
203 hard register. It then replaces the operand with the hard register
204 if possible, much like an optional reload would. */
205
206static void
26709122 207reload_cse_regs_1 (void)
8f8cadbc 208{
26709122 209 bool cfg_changed = false;
210 basic_block bb;
3aeaa53f 211 rtx_insn *insn;
dcd6d0f4 212 rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
8f8cadbc 213
35af0188 214 cselib_init (CSELIB_RECORD_MEMORY);
8f8cadbc 215 init_alias_analysis ();
216
fc00614f 217 FOR_EACH_BB_FN (bb, cfun)
26709122 218 FOR_BB_INSNS (bb, insn)
219 {
220 if (INSN_P (insn))
221 cfg_changed |= reload_cse_simplify (insn, testreg);
8f8cadbc 222
26709122 223 cselib_process_insn (insn);
224 }
8f8cadbc 225
226 /* Clean up. */
227 end_alias_analysis ();
228 cselib_finish ();
26709122 229 if (cfg_changed)
230 cleanup_cfg (0);
8f8cadbc 231}
232
233/* Try to simplify a single SET instruction. SET is the set pattern.
234 INSN is the instruction it came from.
235 This function only handles one case: if we set a register to a value
236 which is not a register, we try to find that value in some other register
237 and change the set into a register copy. */
238
239static int
3aeaa53f 240reload_cse_simplify_set (rtx set, rtx_insn *insn)
8f8cadbc 241{
242 int did_change = 0;
243 int dreg;
244 rtx src;
ade444a4 245 reg_class_t dclass;
8f8cadbc 246 int old_cost;
247 cselib_val *val;
248 struct elt_loc_list *l;
21f1e711 249 enum rtx_code extend_op = UNKNOWN;
f529eb25 250 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
8f8cadbc 251
252 dreg = true_regnum (SET_DEST (set));
253 if (dreg < 0)
254 return 0;
255
256 src = SET_SRC (set);
257 if (side_effects_p (src) || true_regnum (src) >= 0)
258 return 0;
259
260 dclass = REGNO_REG_CLASS (dreg);
261
8f8cadbc 262 /* When replacing a memory with a register, we need to honor assumptions
263 that combine made wrt the contents of sign bits. We'll do this by
264 generating an extend instruction instead of a reg->reg copy. Thus
265 the destination must be a register that we can widen. */
e16ceb8e 266 if (MEM_P (src)
e73fe78f 267 && (extend_op = load_extend_op (GET_MODE (src))) != UNKNOWN
8ad4c111 268 && !REG_P (SET_DEST (set)))
8f8cadbc 269 return 0;
8f8cadbc 270
1f864115 271 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
3be01943 272 if (! val)
273 return 0;
274
8f8cadbc 275 /* If memory loads are cheaper than register copies, don't change them. */
e16ceb8e 276 if (MEM_P (src))
251a613e 277 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
8ad4c111 278 else if (REG_P (src))
e6078fbb 279 old_cost = register_move_cost (GET_MODE (src),
8f8cadbc 280 REGNO_REG_CLASS (REGNO (src)), dclass);
281 else
5ae4887d 282 old_cost = set_src_cost (src, GET_MODE (SET_DEST (set)), speed);
8f8cadbc 283
8f8cadbc 284 for (l = val->locs; l; l = l->next)
285 {
286 rtx this_rtx = l->loc;
287 int this_cost;
288
289 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
290 {
21f1e711 291 if (extend_op != UNKNOWN)
8f8cadbc 292 {
e913b5cd 293 wide_int result;
8f8cadbc 294
e913b5cd 295 if (!CONST_SCALAR_INT_P (this_rtx))
8f8cadbc 296 continue;
297
8f8cadbc 298 switch (extend_op)
299 {
300 case ZERO_EXTEND:
c67875ad 301 result = wide_int::from (rtx_mode_t (this_rtx,
302 GET_MODE (src)),
ecc41f48 303 BITS_PER_WORD, UNSIGNED);
8f8cadbc 304 break;
305 case SIGN_EXTEND:
c67875ad 306 result = wide_int::from (rtx_mode_t (this_rtx,
307 GET_MODE (src)),
ecc41f48 308 BITS_PER_WORD, SIGNED);
e913b5cd 309 break;
8f8cadbc 310 default:
876760f6 311 gcc_unreachable ();
8f8cadbc 312 }
ecc41f48 313 this_rtx = immed_wide_int_const (result, word_mode);
8f8cadbc 314 }
5fe18e78 315
5ae4887d 316 this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed);
8f8cadbc 317 }
8ad4c111 318 else if (REG_P (this_rtx))
8f8cadbc 319 {
21f1e711 320 if (extend_op != UNKNOWN)
8f8cadbc 321 {
322 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
5ae4887d 323 this_cost = set_src_cost (this_rtx, word_mode, speed);
8f8cadbc 324 }
325 else
e6078fbb 326 this_cost = register_move_cost (GET_MODE (this_rtx),
8f8cadbc 327 REGNO_REG_CLASS (REGNO (this_rtx)),
328 dclass);
329 }
330 else
331 continue;
332
333 /* If equal costs, prefer registers over anything else. That
334 tends to lead to smaller instructions on some machines. */
335 if (this_cost < old_cost
336 || (this_cost == old_cost
8ad4c111 337 && REG_P (this_rtx)
338 && !REG_P (SET_SRC (set))))
8f8cadbc 339 {
e73fe78f 340 if (extend_op != UNKNOWN
b56a9dbc 341 && REG_CAN_CHANGE_MODE_P (REGNO (SET_DEST (set)),
342 GET_MODE (SET_DEST (set)), word_mode))
8f8cadbc 343 {
344 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
345 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
346 validate_change (insn, &SET_DEST (set), wide_dest, 1);
347 }
8f8cadbc 348
11d686e2 349 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
8f8cadbc 350 old_cost = this_cost, did_change = 1;
351 }
352 }
353
354 return did_change;
355}
356
357/* Try to replace operands in INSN with equivalent values that are already
358 in registers. This can be viewed as optional reloading.
359
360 For each non-register operand in the insn, see if any hard regs are
361 known to be equivalent to that operand. Record the alternatives which
362 can accept these hard registers. Among all alternatives, select the
363 ones which are better or equal to the one currently matching, where
364 "better" is in terms of '?' and '!' constraints. Among the remaining
365 alternatives, select the one which replaces most operands with
366 hard registers. */
367
368static int
3aeaa53f 369reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
8f8cadbc 370{
371 int i, j;
372
373 /* For each operand, all registers that are equivalent to it. */
374 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
375
376 const char *constraints[MAX_RECOG_OPERANDS];
377
378 /* Vector recording how bad an alternative is. */
379 int *alternative_reject;
380 /* Vector recording how many registers can be introduced by choosing
381 this alternative. */
382 int *alternative_nregs;
383 /* Array of vectors recording, for each operand and each alternative,
384 which hard register to substitute, or -1 if the operand should be
385 left as it is. */
386 int *op_alt_regno[MAX_RECOG_OPERANDS];
387 /* Array of alternatives, sorted in order of decreasing desirability. */
388 int *alternative_order;
389
835b8178 390 extract_constrain_insn (insn);
8f8cadbc 391
392 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
393 return 0;
394
4077bf7a 395 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
396 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
397 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
f0af5a88 398 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
399 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8f8cadbc 400
401 /* For each operand, find out which regs are equivalent. */
402 for (i = 0; i < recog_data.n_operands; i++)
403 {
404 cselib_val *v;
405 struct elt_loc_list *l;
9d9e3c81 406 rtx op;
8f8cadbc 407
408 CLEAR_HARD_REG_SET (equiv_regs[i]);
409
410 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
dca0c3a8 411 right, so avoid the problem here. Similarly NOTE_INSN_DELETED_LABEL.
412 Likewise if we have a constant and the insn pattern doesn't tell us
413 the mode we need. */
6d7dc5b9 414 if (LABEL_P (recog_data.operand[i])
dca0c3a8 415 || (NOTE_P (recog_data.operand[i])
416 && NOTE_KIND (recog_data.operand[i]) == NOTE_INSN_DELETED_LABEL)
8f8cadbc 417 || (CONSTANT_P (recog_data.operand[i])
418 && recog_data.operand_mode[i] == VOIDmode))
419 continue;
420
9d9e3c81 421 op = recog_data.operand[i];
e73fe78f 422 if (MEM_P (op) && load_extend_op (GET_MODE (op)) != UNKNOWN)
9d9e3c81 423 {
424 rtx set = single_set (insn);
425
4885b286 426 /* We might have multiple sets, some of which do implicit
9d9e3c81 427 extension. Punt on this for now. */
428 if (! set)
429 continue;
86481e89 430 /* If the destination is also a MEM or a STRICT_LOW_PART, no
9d9e3c81 431 extension applies.
432 Also, if there is an explicit extension, we don't have to
433 worry about an implicit one. */
e16ceb8e 434 else if (MEM_P (SET_DEST (set))
9d9e3c81 435 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
436 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
437 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
438 ; /* Continue ordinary processing. */
a091e4f5 439 /* If the register cannot change mode to word_mode, it follows that
440 it cannot have been used in word_mode. */
8ad4c111 441 else if (REG_P (SET_DEST (set))
b56a9dbc 442 && !REG_CAN_CHANGE_MODE_P (REGNO (SET_DEST (set)),
443 GET_MODE (SET_DEST (set)),
444 word_mode))
a091e4f5 445 ; /* Continue ordinary processing. */
9d9e3c81 446 /* If this is a straight load, make the extension explicit. */
8ad4c111 447 else if (REG_P (SET_DEST (set))
9d9e3c81 448 && recog_data.n_operands == 2
449 && SET_SRC (set) == op
450 && SET_DEST (set) == recog_data.operand[1-i])
451 {
452 validate_change (insn, recog_data.operand_loc[i],
e73fe78f 453 gen_rtx_fmt_e (load_extend_op (GET_MODE (op)),
9d9e3c81 454 word_mode, op),
455 1);
456 validate_change (insn, recog_data.operand_loc[1-i],
457 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
458 1);
459 if (! apply_change_group ())
460 return 0;
461 return reload_cse_simplify_operands (insn, testreg);
462 }
463 else
464 /* ??? There might be arithmetic operations with memory that are
465 safe to optimize, but is it worth the trouble? */
466 continue;
467 }
5fe18e78 468
017b7047 469 if (side_effects_p (op))
470 continue;
1f864115 471 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
8f8cadbc 472 if (! v)
473 continue;
474
475 for (l = v->locs; l; l = l->next)
8ad4c111 476 if (REG_P (l->loc))
8f8cadbc 477 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
478 }
479
e1a797ad 480 alternative_mask preferred = get_preferred_alternatives (insn);
8f8cadbc 481 for (i = 0; i < recog_data.n_operands; i++)
482 {
3754d046 483 machine_mode mode;
8f8cadbc 484 int regno;
485 const char *p;
486
4077bf7a 487 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
8f8cadbc 488 for (j = 0; j < recog_data.n_alternatives; j++)
489 op_alt_regno[i][j] = -1;
490
491 p = constraints[i] = recog_data.constraints[i];
492 mode = recog_data.operand_mode[i];
493
494 /* Add the reject values for each alternative given by the constraints
495 for this operand. */
496 j = 0;
497 while (*p != '\0')
498 {
499 char c = *p++;
500 if (c == ',')
501 j++;
502 else if (c == '?')
503 alternative_reject[j] += 3;
504 else if (c == '!')
505 alternative_reject[j] += 300;
506 }
507
508 /* We won't change operands which are already registers. We
509 also don't want to modify output operands. */
510 regno = true_regnum (recog_data.operand[i]);
511 if (regno >= 0
512 || constraints[i][0] == '='
513 || constraints[i][0] == '+')
514 continue;
515
516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
517 {
b9c74b4d 518 enum reg_class rclass = NO_REGS;
8f8cadbc 519
520 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
521 continue;
522
937ca48e 523 set_mode_and_regno (testreg, mode, regno);
8f8cadbc 524
525 /* We found a register equal to this operand. Now look for all
526 alternatives that can accept this register and have not been
527 assigned a register they can use yet. */
528 j = 0;
529 p = constraints[i];
530 for (;;)
531 {
532 char c = *p;
533
534 switch (c)
535 {
69449463 536 case 'g':
537 rclass = reg_class_subunion[rclass][GENERAL_REGS];
8f8cadbc 538 break;
539
540 default:
6659485c 541 rclass
8f8cadbc 542 = (reg_class_subunion
79bc09fb 543 [rclass]
544 [reg_class_for_constraint (lookup_constraint (p))]);
8f8cadbc 545 break;
546
547 case ',': case '\0':
548 /* See if REGNO fits this alternative, and set it up as the
549 replacement register if we don't have one for this
550 alternative yet and the operand being replaced is not
551 a cheap CONST_INT. */
552 if (op_alt_regno[i][j] == -1
e1a797ad 553 && TEST_BIT (preferred, j)
6659485c 554 && reg_fits_class_p (testreg, rclass, 0, mode)
971ba038 555 && (!CONST_INT_P (recog_data.operand[i])
5ae4887d 556 || (set_src_cost (recog_data.operand[i], mode,
7013e87c 557 optimize_bb_for_speed_p
558 (BLOCK_FOR_INSN (insn)))
5ae4887d 559 > set_src_cost (testreg, mode,
7013e87c 560 optimize_bb_for_speed_p
561 (BLOCK_FOR_INSN (insn))))))
8f8cadbc 562 {
563 alternative_nregs[j]++;
564 op_alt_regno[i][j] = regno;
565 }
566 j++;
b9c74b4d 567 rclass = NO_REGS;
8f8cadbc 568 break;
569 }
570 p += CONSTRAINT_LEN (c, p);
571
572 if (c == '\0')
573 break;
574 }
575 }
576 }
577
578 /* Record all alternatives which are better or equal to the currently
579 matching one in the alternative_order array. */
580 for (i = j = 0; i < recog_data.n_alternatives; i++)
581 if (alternative_reject[i] <= alternative_reject[which_alternative])
582 alternative_order[j++] = i;
583 recog_data.n_alternatives = j;
584
585 /* Sort it. Given a small number of alternatives, a dumb algorithm
586 won't hurt too much. */
587 for (i = 0; i < recog_data.n_alternatives - 1; i++)
588 {
589 int best = i;
590 int best_reject = alternative_reject[alternative_order[i]];
591 int best_nregs = alternative_nregs[alternative_order[i]];
8f8cadbc 592
593 for (j = i + 1; j < recog_data.n_alternatives; j++)
594 {
595 int this_reject = alternative_reject[alternative_order[j]];
596 int this_nregs = alternative_nregs[alternative_order[j]];
597
598 if (this_reject < best_reject
c2d0cf41 599 || (this_reject == best_reject && this_nregs > best_nregs))
8f8cadbc 600 {
601 best = j;
602 best_reject = this_reject;
603 best_nregs = this_nregs;
604 }
605 }
606
dfcf26a5 607 std::swap (alternative_order[best], alternative_order[i]);
8f8cadbc 608 }
609
610 /* Substitute the operands as determined by op_alt_regno for the best
611 alternative. */
612 j = alternative_order[0];
613
614 for (i = 0; i < recog_data.n_operands; i++)
615 {
3754d046 616 machine_mode mode = recog_data.operand_mode[i];
8f8cadbc 617 if (op_alt_regno[i][j] == -1)
618 continue;
619
620 validate_change (insn, recog_data.operand_loc[i],
621 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
622 }
623
624 for (i = recog_data.n_dups - 1; i >= 0; i--)
625 {
626 int op = recog_data.dup_num[i];
3754d046 627 machine_mode mode = recog_data.operand_mode[op];
8f8cadbc 628
629 if (op_alt_regno[op][j] == -1)
630 continue;
631
632 validate_change (insn, recog_data.dup_loc[i],
633 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
634 }
635
636 return apply_change_group ();
637}
638\f
639/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
640 addressing now.
641 This code might also be useful when reload gave up on reg+reg addressing
642 because of clashes between the return register and INDEX_REG_CLASS. */
643
644/* The maximum number of uses of a register we can keep track of to
645 replace them with reg+reg addressing. */
d83ccc81 646#define RELOAD_COMBINE_MAX_USES 16
8f8cadbc 647
d83ccc81 648/* Describes a recorded use of a register. */
649struct reg_use
650{
651 /* The insn where a register has been used. */
3aeaa53f 652 rtx_insn *insn;
d83ccc81 653 /* Points to the memory reference enclosing the use, if any, NULL_RTX
654 otherwise. */
655 rtx containing_mem;
9d75589a 656 /* Location of the register within INSN. */
d83ccc81 657 rtx *usep;
658 /* The reverse uid of the insn. */
659 int ruid;
660};
8f8cadbc 661
662/* If the register is used in some unknown fashion, USE_INDEX is negative.
663 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
d83ccc81 664 indicates where it is first set or clobbered.
8f8cadbc 665 Otherwise, USE_INDEX is the index of the last encountered use of the
d83ccc81 666 register (which is first among these we have seen since we scan backwards).
667 USE_RUID indicates the first encountered, i.e. last, of these uses.
668 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
669 with a constant offset; OFFSET contains this constant in that case.
8f8cadbc 670 STORE_RUID is always meaningful if we only want to use a value in a
671 register in a different place: it denotes the next insn in the insn
d83ccc81 672 stream (i.e. the last encountered) that sets or clobbers the register.
70bdfe23 673 REAL_STORE_RUID is similar, but clobbers are ignored when updating it.
674 EXPR is the expression used when storing the register. */
8f8cadbc 675static struct
676 {
677 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8f8cadbc 678 rtx offset;
d83ccc81 679 int use_index;
8f8cadbc 680 int store_ruid;
d83ccc81 681 int real_store_ruid;
8f8cadbc 682 int use_ruid;
d83ccc81 683 bool all_offsets_match;
70bdfe23 684 rtx expr;
8f8cadbc 685 } reg_state[FIRST_PSEUDO_REGISTER];
686
687/* Reverse linear uid. This is increased in reload_combine while scanning
688 the instructions from last to first. It is used to set last_label_ruid
689 and the store_ruid / use_ruid fields in reg_state. */
690static int reload_combine_ruid;
691
fb79f695 692/* The RUID of the last label we encountered in reload_combine. */
693static int last_label_ruid;
694
d83ccc81 695/* The RUID of the last jump we encountered in reload_combine. */
696static int last_jump_ruid;
697
fb79f695 698/* The register numbers of the first and last index register. A value of
699 -1 in LAST_INDEX_REG indicates that we've previously computed these
700 values and found no suitable index registers. */
701static int first_index_reg = -1;
702static int last_index_reg;
703
8f8cadbc 704#define LABEL_LIVE(LABEL) \
705 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
706
d83ccc81 707/* Subroutine of reload_combine_split_ruids, called to fix up a single
708 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
709
710static inline void
711reload_combine_split_one_ruid (int *pruid, int split_ruid)
712{
713 if (*pruid > split_ruid)
714 (*pruid)++;
715}
716
717/* Called when we insert a new insn in a position we've already passed in
718 the scan. Examine all our state, increasing all ruids that are higher
719 than SPLIT_RUID by one in order to make room for a new insn. */
720
721static void
722reload_combine_split_ruids (int split_ruid)
723{
724 unsigned i;
725
726 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
727 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
728 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
729
730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
731 {
732 int j, idx = reg_state[i].use_index;
733 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
734 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
735 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
736 split_ruid);
737 if (idx < 0)
738 continue;
739 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
740 {
741 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
742 split_ruid);
743 }
744 }
745}
746
747/* Called when we are about to rescan a previously encountered insn with
748 reload_combine_note_use after modifying some part of it. This clears all
749 information about uses in that particular insn. */
750
751static void
3aeaa53f 752reload_combine_purge_insn_uses (rtx_insn *insn)
d83ccc81 753{
754 unsigned i;
755
756 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
757 {
758 int j, k, idx = reg_state[i].use_index;
759 if (idx < 0)
760 continue;
761 j = k = RELOAD_COMBINE_MAX_USES;
762 while (j-- > idx)
763 {
764 if (reg_state[i].reg_use[j].insn != insn)
765 {
766 k--;
767 if (k != j)
768 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
769 }
770 }
771 reg_state[i].use_index = k;
772 }
773}
774
775/* Called when we need to forget about all uses of REGNO after an insn
776 which is identified by RUID. */
777
778static void
779reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
780{
781 int j, k, idx = reg_state[regno].use_index;
782 if (idx < 0)
783 return;
784 j = k = RELOAD_COMBINE_MAX_USES;
785 while (j-- > idx)
786 {
787 if (reg_state[regno].reg_use[j].ruid >= ruid)
788 {
789 k--;
790 if (k != j)
791 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
792 }
793 }
794 reg_state[regno].use_index = k;
795}
796
797/* Find the use of REGNO with the ruid that is highest among those
798 lower than RUID_LIMIT, and return it if it is the only use of this
727047d0 799 reg in the insn. Return NULL otherwise. */
d83ccc81 800
801static struct reg_use *
802reload_combine_closest_single_use (unsigned regno, int ruid_limit)
803{
804 int i, best_ruid = 0;
805 int use_idx = reg_state[regno].use_index;
806 struct reg_use *retval;
807
808 if (use_idx < 0)
809 return NULL;
810 retval = NULL;
811 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
812 {
0ead6a7d 813 struct reg_use *use = reg_state[regno].reg_use + i;
814 int this_ruid = use->ruid;
d83ccc81 815 if (this_ruid >= ruid_limit)
816 continue;
817 if (this_ruid > best_ruid)
818 {
819 best_ruid = this_ruid;
727047d0 820 retval = use;
d83ccc81 821 }
727047d0 822 else if (this_ruid == best_ruid)
d83ccc81 823 retval = NULL;
824 }
825 if (last_label_ruid >= best_ruid)
826 return NULL;
827 return retval;
828}
829
65069495 830/* After we've moved an add insn, fix up any debug insns that occur
831 between the old location of the add and the new location. REG is
832 the destination register of the add insn; REPLACEMENT is the
833 SET_SRC of the add. FROM and TO specify the range in which we
834 should make this change on debug insns. */
0ead6a7d 835
836static void
3aeaa53f 837fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to)
0ead6a7d 838{
3aeaa53f 839 rtx_insn *insn;
65069495 840 for (insn = from; insn != to; insn = NEXT_INSN (insn))
0ead6a7d 841 {
842 rtx t;
65069495 843
bce107d7 844 if (!DEBUG_BIND_INSN_P (insn))
0ead6a7d 845 continue;
65069495 846
847 t = INSN_VAR_LOCATION_LOC (insn);
727047d0 848 t = simplify_replace_rtx (t, reg, replacement);
65069495 849 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
0ead6a7d 850 }
851}
852
692ec7c8 853/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
854 with SRC in the insn described by USE, taking costs into account. Return
855 true if we made the replacement. */
856
857static bool
858try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
859{
3aeaa53f 860 rtx_insn *use_insn = use->insn;
692ec7c8 861 rtx mem = use->containing_mem;
862 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
863
864 if (mem != NULL_RTX)
865 {
866 addr_space_t as = MEM_ADDR_SPACE (mem);
867 rtx oldaddr = XEXP (mem, 0);
868 rtx newaddr = NULL_RTX;
869 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
870 int new_cost;
871
872 newaddr = simplify_replace_rtx (oldaddr, reg, src);
873 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
874 {
875 XEXP (mem, 0) = newaddr;
876 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
877 XEXP (mem, 0) = oldaddr;
878 if (new_cost <= old_cost
879 && validate_change (use_insn,
880 &XEXP (mem, 0), newaddr, 0))
881 return true;
882 }
883 }
884 else
885 {
886 rtx new_set = single_set (use_insn);
887 if (new_set
888 && REG_P (SET_DEST (new_set))
889 && GET_CODE (SET_SRC (new_set)) == PLUS
890 && REG_P (XEXP (SET_SRC (new_set), 0))
891 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
892 {
893 rtx new_src;
5ae4887d 894 machine_mode mode = GET_MODE (SET_DEST (new_set));
895 int old_cost = set_src_cost (SET_SRC (new_set), mode, speed);
692ec7c8 896
897 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
898 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
899
5ae4887d 900 if (set_src_cost (new_src, mode, speed) <= old_cost
692ec7c8 901 && validate_change (use_insn, &SET_SRC (new_set),
902 new_src, 0))
903 return true;
904 }
905 }
906 return false;
907}
908
d83ccc81 909/* Called by reload_combine when scanning INSN. This function tries to detect
910 patterns where a constant is added to a register, and the result is used
911 in an address.
912 Return true if no further processing is needed on INSN; false if it wasn't
913 recognized and should be handled normally. */
914
915static bool
3aeaa53f 916reload_combine_recognize_const_pattern (rtx_insn *insn)
d83ccc81 917{
918 int from_ruid = reload_combine_ruid;
919 rtx set, pat, reg, src, addreg;
920 unsigned int regno;
921 struct reg_use *use;
922 bool must_move_add;
3aeaa53f 923 rtx_insn *add_moved_after_insn = NULL;
d83ccc81 924 int add_moved_after_ruid = 0;
925 int clobbered_regno = -1;
926
927 set = single_set (insn);
928 if (set == NULL_RTX)
929 return false;
930
931 reg = SET_DEST (set);
932 src = SET_SRC (set);
933 if (!REG_P (reg)
0933f1d9 934 || REG_NREGS (reg) != 1
d83ccc81 935 || GET_MODE (reg) != Pmode
936 || reg == stack_pointer_rtx)
937 return false;
938
939 regno = REGNO (reg);
940
941 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
942 uses of REG1 inside an address, or inside another add insn. If
943 possible and profitable, merge the addition into subsequent
944 uses. */
945 if (GET_CODE (src) != PLUS
946 || !REG_P (XEXP (src, 0))
947 || !CONSTANT_P (XEXP (src, 1)))
948 return false;
949
950 addreg = XEXP (src, 0);
951 must_move_add = rtx_equal_p (reg, addreg);
952
953 pat = PATTERN (insn);
954 if (must_move_add && set != pat)
955 {
956 /* We have to be careful when moving the add; apart from the
957 single_set there may also be clobbers. Recognize one special
958 case, that of one clobber alongside the set (likely a clobber
959 of the CC register). */
960 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
961 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
962 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
963 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
964 return false;
965 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
966 }
967
968 do
969 {
970 use = reload_combine_closest_single_use (regno, from_ruid);
971
972 if (use)
973 /* Start the search for the next use from here. */
974 from_ruid = use->ruid;
975
976 if (use && GET_MODE (*use->usep) == Pmode)
977 {
692ec7c8 978 bool delete_add = false;
3aeaa53f 979 rtx_insn *use_insn = use->insn;
d83ccc81 980 int use_ruid = use->ruid;
d83ccc81 981
982 /* Avoid moving the add insn past a jump. */
0ead6a7d 983 if (must_move_add && use_ruid <= last_jump_ruid)
d83ccc81 984 break;
985
986 /* If the add clobbers another hard reg in parallel, don't move
987 it past a real set of this hard reg. */
988 if (must_move_add && clobbered_regno >= 0
989 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
990 break;
991
33b7314b 992 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
ff900b8e 993 if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn)))
33b7314b 994 break;
33b7314b 995
6aba0ea1 996 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
997 /* Avoid moving a use of ADDREG past a point where it is stored. */
692ec7c8 998 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
d83ccc81 999 break;
1000
692ec7c8 1001 /* We also must not move the addition past an insn that sets
1002 the same register, unless we can combine two add insns. */
1003 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
d83ccc81 1004 {
692ec7c8 1005 if (use->containing_mem == NULL_RTX)
1006 delete_add = true;
1007 else
1008 break;
d83ccc81 1009 }
d83ccc81 1010
692ec7c8 1011 if (try_replace_in_use (use, reg, src))
1012 {
1013 reload_combine_purge_insn_uses (use_insn);
1014 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1015 use_ruid, NULL_RTX);
d83ccc81 1016
692ec7c8 1017 if (delete_add)
1018 {
1019 fixup_debug_insns (reg, src, insn, use_insn);
1020 delete_insn (insn);
1021 return true;
1022 }
1023 if (must_move_add)
1024 {
1025 add_moved_after_insn = use_insn;
1026 add_moved_after_ruid = use_ruid;
d83ccc81 1027 }
692ec7c8 1028 continue;
d83ccc81 1029 }
d83ccc81 1030 }
6aba0ea1 1031 /* If we get here, we couldn't handle this use. */
1032 if (must_move_add)
1033 break;
d83ccc81 1034 }
1035 while (use);
1036
1037 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1038 /* Process the add normally. */
1039 return false;
1040
65069495 1041 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1042
d83ccc81 1043 reorder_insns (insn, insn, add_moved_after_insn);
1044 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1045 reload_combine_split_ruids (add_moved_after_ruid - 1);
1046 reload_combine_note_use (&PATTERN (insn), insn,
1047 add_moved_after_ruid, NULL_RTX);
1048 reg_state[regno].store_ruid = add_moved_after_ruid;
1049
1050 return true;
1051}
1052
fb79f695 1053/* Called by reload_combine when scanning INSN. Try to detect a pattern we
1054 can handle and improve. Return true if no further processing is needed on
1055 INSN; false if it wasn't recognized and should be handled normally. */
1056
1057static bool
3aeaa53f 1058reload_combine_recognize_pattern (rtx_insn *insn)
fb79f695 1059{
1060 rtx set, reg, src;
fb79f695 1061
d83ccc81 1062 set = single_set (insn);
1063 if (set == NULL_RTX)
1064 return false;
1065
1066 reg = SET_DEST (set);
1067 src = SET_SRC (set);
0933f1d9 1068 if (!REG_P (reg) || REG_NREGS (reg) != 1)
d83ccc81 1069 return false;
1070
d8ec06ae 1071 unsigned int regno = REGNO (reg);
1072 machine_mode mode = GET_MODE (reg);
1073
1074 if (reg_state[regno].use_index < 0
1075 || reg_state[regno].use_index >= RELOAD_COMBINE_MAX_USES)
1076 return false;
1077
1078 for (int i = reg_state[regno].use_index;
1079 i < RELOAD_COMBINE_MAX_USES; i++)
1080 {
1081 struct reg_use *use = reg_state[regno].reg_use + i;
1082 if (GET_MODE (*use->usep) != mode)
1083 return false;
1084 }
d83ccc81 1085
fb79f695 1086 /* Look for (set (REGX) (CONST_INT))
1087 (set (REGX) (PLUS (REGX) (REGY)))
1088 ...
1089 ... (MEM (REGX)) ...
1090 and convert it to
1091 (set (REGZ) (CONST_INT))
1092 ...
1093 ... (MEM (PLUS (REGZ) (REGY)))... .
1094
1095 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1096 and that we know all uses of REGX before it dies.
1097 Also, explicitly check that REGX != REGY; our life information
1098 does not yet show whether REGY changes in this insn. */
fb79f695 1099
1100 if (GET_CODE (src) == PLUS
d83ccc81 1101 && reg_state[regno].all_offsets_match
1102 && last_index_reg != -1
fb79f695 1103 && REG_P (XEXP (src, 1))
1104 && rtx_equal_p (XEXP (src, 0), reg)
1105 && !rtx_equal_p (XEXP (src, 1), reg)
1106 && last_label_ruid < reg_state[regno].use_ruid)
1107 {
1108 rtx base = XEXP (src, 1);
3aeaa53f 1109 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
fb79f695 1110 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1111 rtx index_reg = NULL_RTX;
1112 rtx reg_sum = NULL_RTX;
1113 int i;
1114
1115 /* Now we need to set INDEX_REG to an index register (denoted as
1116 REGZ in the illustration above) and REG_SUM to the expression
1117 register+register that we want to use to substitute uses of REG
1118 (typically in MEMs) with. First check REG and BASE for being
1119 index registers; we can use them even if they are not dead. */
1120 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1121 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1122 REGNO (base)))
1123 {
1124 index_reg = reg;
1125 reg_sum = src;
1126 }
1127 else
1128 {
1129 /* Otherwise, look for a free index register. Since we have
1130 checked above that neither REG nor BASE are index registers,
1131 if we find anything at all, it will be different from these
1132 two registers. */
1133 for (i = first_index_reg; i <= last_index_reg; i++)
1134 {
1135 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1136 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1137 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
727047d0 1138 && (call_used_regs[i] || df_regs_ever_live_p (i))
1139 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1140 && !fixed_regs[i] && !global_regs[i]
92d2aec3 1141 && hard_regno_nregs (i, GET_MODE (reg)) == 1
727047d0 1142 && targetm.hard_regno_scratch_ok (i))
fb79f695 1143 {
1144 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1145 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1146 break;
1147 }
1148 }
1149 }
1150
1151 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1152 (REGY), i.e. BASE, is not clobbered before the last use we'll
1153 create. */
1154 if (reg_sum
1155 && prev_set
1156 && CONST_INT_P (SET_SRC (prev_set))
1157 && rtx_equal_p (SET_DEST (prev_set), reg)
fb79f695 1158 && (reg_state[REGNO (base)].store_ruid
1159 <= reg_state[regno].use_ruid))
1160 {
1161 /* Change destination register and, if necessary, the constant
1162 value in PREV, the constant loading instruction. */
1163 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1164 if (reg_state[regno].offset != const0_rtx)
31c1512f 1165 {
1166 HOST_WIDE_INT c
1167 = trunc_int_for_mode (UINTVAL (SET_SRC (prev_set))
1168 + UINTVAL (reg_state[regno].offset),
1169 GET_MODE (index_reg));
1170 validate_change (prev, &SET_SRC (prev_set), GEN_INT (c), 1);
1171 }
fb79f695 1172
1173 /* Now for every use of REG that we have recorded, replace REG
1174 with REG_SUM. */
1175 for (i = reg_state[regno].use_index;
1176 i < RELOAD_COMBINE_MAX_USES; i++)
1177 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1178 reg_state[regno].reg_use[i].usep,
1179 /* Each change must have its own
1180 replacement. */
1181 reg_sum, 1);
1182
1183 if (apply_change_group ())
1184 {
65069495 1185 struct reg_use *lowest_ruid = NULL;
1186
fb79f695 1187 /* For every new use of REG_SUM, we have to record the use
1188 of BASE therein, i.e. operand 1. */
1189 for (i = reg_state[regno].use_index;
1190 i < RELOAD_COMBINE_MAX_USES; i++)
65069495 1191 {
1192 struct reg_use *use = reg_state[regno].reg_use + i;
1193 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1194 use->ruid, use->containing_mem);
1195 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1196 lowest_ruid = use;
1197 }
1198
1199 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
fb79f695 1200
fb79f695 1201 /* Delete the reg-reg addition. */
1202 delete_insn (insn);
1203
d20ae451 1204 if (reg_state[regno].offset != const0_rtx
1205 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1206 are now invalid. */
1207 && remove_reg_equal_equiv_notes (prev))
1208 df_notes_rescan (prev);
fb79f695 1209
1210 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
fb79f695 1211 return true;
1212 }
1213 }
1214 }
1215 return false;
1216}
1217
8f8cadbc 1218static void
3ad4992f 1219reload_combine (void)
8f8cadbc 1220{
3aeaa53f 1221 rtx_insn *insn, *prev;
8f8cadbc 1222 basic_block bb;
1223 unsigned int r;
8f8cadbc 1224 int min_labelno, n_labels;
1225 HARD_REG_SET ever_live_at_start, *label_live;
1226
8f8cadbc 1227 /* To avoid wasting too much time later searching for an index register,
1228 determine the minimum and maximum index register numbers. */
fb79f695 1229 if (INDEX_REG_CLASS == NO_REGS)
1230 last_index_reg = -1;
1231 else if (first_index_reg == -1 && last_index_reg == 0)
1232 {
1233 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1234 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1235 {
1236 if (first_index_reg == -1)
1237 first_index_reg = r;
1238
1239 last_index_reg = r;
1240 }
1241
1242 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1243 to -1 so we'll know to quit early the next time we get here. */
1244 if (first_index_reg == -1)
1245 {
1246 last_index_reg = -1;
1247 return;
1248 }
1249 }
8f8cadbc 1250
8f8cadbc 1251 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1252 information is a bit fuzzy immediately after reload, but it's
1253 still good enough to determine which registers are live at a jump
1254 destination. */
1255 min_labelno = get_first_label_num ();
1256 n_labels = max_label_num () - min_labelno;
4c36ffe6 1257 label_live = XNEWVEC (HARD_REG_SET, n_labels);
8f8cadbc 1258 CLEAR_HARD_REG_SET (ever_live_at_start);
1259
7a46197b 1260 FOR_EACH_BB_REVERSE_FN (bb, cfun)
8f8cadbc 1261 {
5496dbfc 1262 insn = BB_HEAD (bb);
6d7dc5b9 1263 if (LABEL_P (insn))
8f8cadbc 1264 {
1265 HARD_REG_SET live;
deb2741b 1266 bitmap live_in = df_get_live_in (bb);
8f8cadbc 1267
deb2741b 1268 REG_SET_TO_HARD_REG_SET (live, live_in);
1269 compute_use_by_pseudos (&live, live_in);
8f8cadbc 1270 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1271 IOR_HARD_REG_SET (ever_live_at_start, live);
1272 }
1273 }
1274
1275 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
d83ccc81 1276 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
8f8cadbc 1277 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1278 {
d83ccc81 1279 reg_state[r].store_ruid = 0;
1280 reg_state[r].real_store_ruid = 0;
8f8cadbc 1281 if (fixed_regs[r])
1282 reg_state[r].use_index = -1;
1283 else
1284 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1285 }
1286
d83ccc81 1287 for (insn = get_last_insn (); insn; insn = prev)
8f8cadbc 1288 {
8b52f64e 1289 bool control_flow_insn;
8f8cadbc 1290 rtx note;
1291
d83ccc81 1292 prev = PREV_INSN (insn);
1293
8f8cadbc 1294 /* We cannot do our optimization across labels. Invalidating all the use
1295 information we have would be costly, so we just note where the label
1296 is and then later disable any optimization that would cross it. */
6d7dc5b9 1297 if (LABEL_P (insn))
8f8cadbc 1298 last_label_ruid = reload_combine_ruid;
19f69355 1299 else if (BARRIER_P (insn))
1300 {
1301 /* Crossing a barrier resets all the use information. */
1302 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1303 if (! fixed_regs[r])
8f8cadbc 1304 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
19f69355 1305 }
1306 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1307 /* Optimizations across insns being marked as volatile must be
1308 prevented. All the usage information is invalidated
1309 here. */
1310 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1311 if (! fixed_regs[r]
1312 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1313 reg_state[r].use_index = -1;
8f8cadbc 1314
65069495 1315 if (! NONDEBUG_INSN_P (insn))
8f8cadbc 1316 continue;
1317
1318 reload_combine_ruid++;
1319
8b52f64e 1320 control_flow_insn = control_flow_insn_p (insn);
1321 if (control_flow_insn)
d83ccc81 1322 last_jump_ruid = reload_combine_ruid;
1323
1324 if (reload_combine_recognize_const_pattern (insn)
1325 || reload_combine_recognize_pattern (insn))
fb79f695 1326 continue;
8f8cadbc 1327
1328 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1329
6d7dc5b9 1330 if (CALL_P (insn))
8f8cadbc 1331 {
1332 rtx link;
30326fda 1333 HARD_REG_SET used_regs;
1334
1335 get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
8f8cadbc 1336
1337 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
30326fda 1338 if (TEST_HARD_REG_BIT (used_regs, r))
8f8cadbc 1339 {
1340 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1341 reg_state[r].store_ruid = reload_combine_ruid;
1342 }
1343
1344 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1345 link = XEXP (link, 1))
1346 {
c8010b80 1347 rtx setuse = XEXP (link, 0);
1348 rtx usage_rtx = XEXP (setuse, 0);
70bdfe23 1349 /* We could support CLOBBER_HIGH and treat it in the same way as
1350 HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that yet. */
1351 gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH);
1352
c8010b80 1353 if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
1354 && REG_P (usage_rtx))
8f8cadbc 1355 {
6a298741 1356 unsigned int end_regno = END_REGNO (usage_rtx);
1357 for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
8f8cadbc 1358 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1359 {
1360 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1361 reg_state[i].store_ruid = reload_combine_ruid;
1362 }
1363 else
1364 reg_state[i].use_index = -1;
1365 }
1366 }
8f8cadbc 1367 }
f4979459 1368
7777a939 1369 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
8f8cadbc 1370 {
1371 /* Non-spill registers might be used at the call destination in
1372 some unknown fashion, so we have to mark the unknown use. */
1373 HARD_REG_SET *live;
1374
1375 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1376 && JUMP_LABEL (insn))
7777a939 1377 {
1378 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1379 live = NULL;
1380 else
1381 live = &LABEL_LIVE (JUMP_LABEL (insn));
1382 }
8f8cadbc 1383 else
1384 live = &ever_live_at_start;
1385
7777a939 1386 if (live)
1387 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1388 if (TEST_HARD_REG_BIT (*live, r))
1389 reg_state[r].use_index = -1;
8f8cadbc 1390 }
1391
8b52f64e 1392 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1393 NULL_RTX);
1394
8f8cadbc 1395 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1396 {
8b52f64e 1397 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
8f8cadbc 1398 {
1399 int regno = REGNO (XEXP (note, 0));
8f8cadbc 1400 reg_state[regno].store_ruid = reload_combine_ruid;
d83ccc81 1401 reg_state[regno].real_store_ruid = reload_combine_ruid;
8f8cadbc 1402 reg_state[regno].use_index = -1;
1403 }
1404 }
1405 }
1406
1407 free (label_live);
1408}
1409
1410/* Check if DST is a register or a subreg of a register; if it is,
d83ccc81 1411 update store_ruid, real_store_ruid and use_index in the reg_state
1412 structure accordingly. Called via note_stores from reload_combine. */
8f8cadbc 1413
1414static void
81a410b1 1415reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
8f8cadbc 1416{
1417 int regno = 0;
1418 int i;
3754d046 1419 machine_mode mode = GET_MODE (dst);
8f8cadbc 1420
1421 if (GET_CODE (dst) == SUBREG)
1422 {
1423 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1424 GET_MODE (SUBREG_REG (dst)),
1425 SUBREG_BYTE (dst),
1426 GET_MODE (dst));
1427 dst = SUBREG_REG (dst);
1428 }
fe6524b0 1429
1430 /* Some targets do argument pushes without adding REG_INC notes. */
1431
1432 if (MEM_P (dst))
1433 {
1434 dst = XEXP (dst, 0);
1435 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
a5dda0b9 1436 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1437 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
fe6524b0 1438 {
6a298741 1439 unsigned int end_regno = END_REGNO (XEXP (dst, 0));
1440 for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i)
fe6524b0 1441 {
1442 /* We could probably do better, but for now mark the register
1443 as used in an unknown fashion and set/clobbered at this
1444 insn. */
1445 reg_state[i].use_index = -1;
1446 reg_state[i].store_ruid = reload_combine_ruid;
1447 reg_state[i].real_store_ruid = reload_combine_ruid;
1448 }
1449 }
1450 else
1451 return;
1452 }
1453
8ad4c111 1454 if (!REG_P (dst))
8f8cadbc 1455 return;
1456 regno += REGNO (dst);
1457
1458 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1459 careful with registers / register parts that are not full words.
476d094d 1460 Similarly for ZERO_EXTRACT. */
d83ccc81 1461 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
8f8cadbc 1462 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1463 {
16b9e38b 1464 for (i = end_hard_regno (mode, regno) - 1; i >= regno; i--)
8f8cadbc 1465 {
1466 reg_state[i].use_index = -1;
1467 reg_state[i].store_ruid = reload_combine_ruid;
d83ccc81 1468 reg_state[i].real_store_ruid = reload_combine_ruid;
8f8cadbc 1469 }
1470 }
1471 else
1472 {
16b9e38b 1473 for (i = end_hard_regno (mode, regno) - 1; i >= regno; i--)
8f8cadbc 1474 {
1475 reg_state[i].store_ruid = reload_combine_ruid;
d83ccc81 1476 if (GET_CODE (set) == SET)
1477 reg_state[i].real_store_ruid = reload_combine_ruid;
8f8cadbc 1478 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1479 }
1480 }
1481}
1482
1483/* XP points to a piece of rtl that has to be checked for any uses of
1484 registers.
1485 *XP is the pattern of INSN, or a part of it.
1486 Called from reload_combine, and recursively by itself. */
1487static void
3aeaa53f 1488reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem)
8f8cadbc 1489{
1490 rtx x = *xp;
1491 enum rtx_code code = x->code;
1492 const char *fmt;
1493 int i, j;
1494 rtx offset = const0_rtx; /* For the REG case below. */
1495
1496 switch (code)
1497 {
1498 case SET:
8ad4c111 1499 if (REG_P (SET_DEST (x)))
8f8cadbc 1500 {
d83ccc81 1501 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
8f8cadbc 1502 return;
1503 }
1504 break;
1505
1506 case USE:
1507 /* If this is the USE of a return value, we can't change it. */
8ad4c111 1508 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
8f8cadbc 1509 {
6a298741 1510 /* Mark the return register as used in an unknown fashion. */
8f8cadbc 1511 rtx reg = XEXP (x, 0);
6a298741 1512 unsigned int end_regno = END_REGNO (reg);
1513 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
1514 reg_state[regno].use_index = -1;
8f8cadbc 1515 return;
1516 }
1517 break;
1518
1519 case CLOBBER:
8ad4c111 1520 if (REG_P (SET_DEST (x)))
8f8cadbc 1521 {
1522 /* No spurious CLOBBERs of pseudo registers may remain. */
876760f6 1523 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
8f8cadbc 1524 return;
1525 }
1526 break;
1527
70bdfe23 1528 case CLOBBER_HIGH:
1529 gcc_assert (REG_P (SET_DEST (x)));
1530 return;
1531
8f8cadbc 1532 case PLUS:
1533 /* We are interested in (plus (reg) (const_int)) . */
8ad4c111 1534 if (!REG_P (XEXP (x, 0))
971ba038 1535 || !CONST_INT_P (XEXP (x, 1)))
8f8cadbc 1536 break;
1537 offset = XEXP (x, 1);
1538 x = XEXP (x, 0);
1539 /* Fall through. */
1540 case REG:
1541 {
1542 int regno = REGNO (x);
1543 int use_index;
1544 int nregs;
1545
1546 /* No spurious USEs of pseudo registers may remain. */
876760f6 1547 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
8f8cadbc 1548
0933f1d9 1549 nregs = REG_NREGS (x);
8f8cadbc 1550
1551 /* We can't substitute into multi-hard-reg uses. */
1552 if (nregs > 1)
1553 {
1554 while (--nregs >= 0)
1555 reg_state[regno + nregs].use_index = -1;
1556 return;
1557 }
1558
727047d0 1559 /* We may be called to update uses in previously seen insns.
1560 Don't add uses beyond the last store we saw. */
1561 if (ruid < reg_state[regno].store_ruid)
1562 return;
1563
8f8cadbc 1564 /* If this register is already used in some unknown fashion, we
1565 can't do anything.
1566 If we decrement the index from zero to -1, we can't store more
1567 uses, so this register becomes used in an unknown fashion. */
1568 use_index = --reg_state[regno].use_index;
1569 if (use_index < 0)
1570 return;
1571
d83ccc81 1572 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
8f8cadbc 1573 {
1574 /* This is the first use of this register we have seen since we
1575 marked it as dead. */
1576 reg_state[regno].offset = offset;
d83ccc81 1577 reg_state[regno].all_offsets_match = true;
1578 reg_state[regno].use_ruid = ruid;
8f8cadbc 1579 }
0ead6a7d 1580 else
1581 {
1582 if (reg_state[regno].use_ruid > ruid)
1583 reg_state[regno].use_ruid = ruid;
1584
1585 if (! rtx_equal_p (offset, reg_state[regno].offset))
1586 reg_state[regno].all_offsets_match = false;
1587 }
d83ccc81 1588
8f8cadbc 1589 reg_state[regno].reg_use[use_index].insn = insn;
d83ccc81 1590 reg_state[regno].reg_use[use_index].ruid = ruid;
1591 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
8f8cadbc 1592 reg_state[regno].reg_use[use_index].usep = xp;
1593 return;
1594 }
1595
d83ccc81 1596 case MEM:
1597 containing_mem = x;
1598 break;
1599
8f8cadbc 1600 default:
1601 break;
1602 }
1603
1604 /* Recursively process the components of X. */
1605 fmt = GET_RTX_FORMAT (code);
1606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1607 {
1608 if (fmt[i] == 'e')
d83ccc81 1609 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
8f8cadbc 1610 else if (fmt[i] == 'E')
1611 {
1612 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
d83ccc81 1613 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1614 containing_mem);
8f8cadbc 1615 }
1616 }
1617}
1618\f
1619/* See if we can reduce the cost of a constant by replacing a move
1620 with an add. We track situations in which a register is set to a
1621 constant or to a register plus a constant. */
1622/* We cannot do our optimization across labels. Invalidating all the
1623 information about register contents we have would be costly, so we
1624 use move2add_last_label_luid to note where the label is and then
1625 later disable any optimization that would cross it.
6132c0d0 1626 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1627 are only valid if reg_set_luid[n] is greater than
b6b86e87 1628 move2add_last_label_luid.
1629 For a set that established a new (potential) base register with
1630 non-constant value, we use move2add_luid from the place where the
1631 setting insn is encountered; registers based off that base then
1632 get the same reg_set_luid. Constants all get
1633 move2add_last_label_luid + 1 as their reg_set_luid. */
8f8cadbc 1634static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1635
1636/* If reg_base_reg[n] is negative, register n has been set to
6132c0d0 1637 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
8f8cadbc 1638 If reg_base_reg[n] is non-negative, register n has been set to the
1639 sum of reg_offset[n] and the value of register reg_base_reg[n]
b6b86e87 1640 before reg_set_luid[n], calculated in mode reg_mode[n] .
1641 For multi-hard-register registers, all but the first one are
1642 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1643 marks it as invalid. */
8f8cadbc 1644static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1645static int reg_base_reg[FIRST_PSEUDO_REGISTER];
6132c0d0 1646static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
3754d046 1647static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
8f8cadbc 1648
1649/* move2add_luid is linearly increased while scanning the instructions
1650 from first to last. It is used to set reg_set_luid in
1651 reload_cse_move2add and move2add_note_store. */
1652static int move2add_luid;
1653
1654/* move2add_last_label_luid is set whenever a label is found. Labels
1655 invalidate all previously collected reg_offset data. */
1656static int move2add_last_label_luid;
1657
1658/* ??? We don't know how zero / sign extension is handled, hence we
1659 can't go from a narrower to a wider mode. */
1660#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1661 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1662 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
396f2130 1663 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
8f8cadbc 1664
b6b86e87 1665/* Record that REG is being set to a value with the mode of REG. */
1666
1667static void
1668move2add_record_mode (rtx reg)
1669{
1670 int regno, nregs;
3754d046 1671 machine_mode mode = GET_MODE (reg);
b6b86e87 1672
1673 if (GET_CODE (reg) == SUBREG)
1674 {
1675 regno = subreg_regno (reg);
1676 nregs = subreg_nregs (reg);
1677 }
1678 else if (REG_P (reg))
1679 {
1680 regno = REGNO (reg);
0933f1d9 1681 nregs = REG_NREGS (reg);
b6b86e87 1682 }
1683 else
1684 gcc_unreachable ();
1685 for (int i = nregs - 1; i > 0; i--)
1686 reg_mode[regno + i] = BLKmode;
1687 reg_mode[regno] = mode;
1688}
1689
1690/* Record that REG is being set to the sum of SYM and OFF. */
1691
1692static void
1693move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1694{
1695 int regno = REGNO (reg);
1696
1697 move2add_record_mode (reg);
1698 reg_set_luid[regno] = move2add_luid;
1699 reg_base_reg[regno] = -1;
1700 reg_symbol_ref[regno] = sym;
1701 reg_offset[regno] = INTVAL (off);
1702}
1703
1704/* Check if REGNO contains a valid value in MODE. */
1705
1706static bool
18426c5b 1707move2add_valid_value_p (int regno, scalar_int_mode mode)
b6b86e87 1708{
5bea3269 1709 if (reg_set_luid[regno] <= move2add_last_label_luid)
b6b86e87 1710 return false;
1711
5bea3269 1712 if (mode != reg_mode[regno])
1713 {
4c53345c 1714 scalar_int_mode old_mode;
1715 if (!is_a <scalar_int_mode> (reg_mode[regno], &old_mode)
1716 || !MODES_OK_FOR_MOVE2ADD (mode, old_mode))
5bea3269 1717 return false;
1718 /* The value loaded into regno in reg_mode[regno] is also valid in
1719 mode after truncation only if (REG:mode regno) is the lowpart of
1720 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1721 regno of the lowpart might be different. */
9edf7ea8 1722 poly_int64 s_off = subreg_lowpart_offset (mode, old_mode);
4c53345c 1723 s_off = subreg_regno_offset (regno, old_mode, s_off, mode);
9edf7ea8 1724 if (maybe_ne (s_off, 0))
5bea3269 1725 /* We could in principle adjust regno, check reg_mode[regno] to be
1726 BLKmode, and return s_off to the caller (vs. -1 for failure),
1727 but we currently have no callers that could make use of this
1728 information. */
1729 return false;
1730 }
1731
16b9e38b 1732 for (int i = end_hard_regno (mode, regno) - 1; i > regno; i--)
1733 if (reg_mode[i] != BLKmode)
b6b86e87 1734 return false;
1735 return true;
1736}
1737
18426c5b 1738/* This function is called with INSN that sets REG (of mode MODE)
1739 to (SYM + OFF), while REG is known to already have value (SYM + offset).
6132c0d0 1740 This function tries to change INSN into an add instruction
1741 (set (REG) (plus (REG) (OFF - offset))) using the known value.
d83ccc81 1742 It also updates the information about REG's known value.
1743 Return true if we made a change. */
6132c0d0 1744
d83ccc81 1745static bool
18426c5b 1746move2add_use_add2_insn (scalar_int_mode mode, rtx reg, rtx sym, rtx off,
1747 rtx_insn *insn)
6132c0d0 1748{
1749 rtx pat = PATTERN (insn);
1750 rtx src = SET_SRC (pat);
1751 int regno = REGNO (reg);
18426c5b 1752 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno], mode);
6132c0d0 1753 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
d83ccc81 1754 bool changed = false;
6132c0d0 1755
1756 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1757 use (set (reg) (reg)) instead.
1758 We don't delete this insn, nor do we convert it into a
1759 note, to avoid losing register notes or the return
1760 value flag. jump2 already knows how to get rid of
1761 no-op moves. */
1762 if (new_src == const0_rtx)
1763 {
1764 /* If the constants are different, this is a
1765 truncation, that, if turned into (set (reg)
1766 (reg)), would be discarded. Maybe we should
1767 try a truncMN pattern? */
1768 if (INTVAL (off) == reg_offset [regno])
d83ccc81 1769 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
6132c0d0 1770 }
c9a03487 1771 else
6132c0d0 1772 {
c9a03487 1773 struct full_rtx_costs oldcst, newcst;
18426c5b 1774 rtx tem = gen_rtx_PLUS (mode, reg, new_src);
c9a03487 1775
b72d459f 1776 get_full_set_rtx_cost (pat, &oldcst);
c9a03487 1777 SET_SRC (pat) = tem;
b72d459f 1778 get_full_set_rtx_cost (pat, &newcst);
c9a03487 1779 SET_SRC (pat) = src;
1780
1781 if (costs_lt_p (&newcst, &oldcst, speed)
1782 && have_add2_insn (reg, new_src))
1783 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
18426c5b 1784 else if (sym == NULL_RTX && mode != BImode)
6132c0d0 1785 {
18426c5b 1786 scalar_int_mode narrow_mode;
1787 FOR_EACH_MODE_UNTIL (narrow_mode, mode)
6132c0d0 1788 {
c9a03487 1789 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1790 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1791 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1792 {
17ce39e3 1793 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
c9a03487 1794 rtx narrow_src = gen_int_mode (INTVAL (off),
1795 narrow_mode);
1796 rtx new_set
d1f9b275 1797 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,
c9a03487 1798 narrow_reg),
1799 narrow_src);
d4177981 1800 get_full_set_rtx_cost (new_set, &newcst);
1801 if (costs_lt_p (&newcst, &oldcst, speed))
1802 {
1803 changed = validate_change (insn, &PATTERN (insn),
1804 new_set, 0);
1805 if (changed)
1806 break;
1807 }
c9a03487 1808 }
6132c0d0 1809 }
1810 }
1811 }
b6b86e87 1812 move2add_record_sym_value (reg, sym, off);
d83ccc81 1813 return changed;
6132c0d0 1814}
1815
1816
18426c5b 1817/* This function is called with INSN that sets REG (of mode MODE) to
1818 (SYM + OFF), but REG doesn't have known value (SYM + offset). This
1819 function tries to find another register which is known to already have
6132c0d0 1820 value (SYM + offset) and change INSN into an add instruction
1821 (set (REG) (plus (the found register) (OFF - offset))) if such
1822 a register is found. It also updates the information about
d83ccc81 1823 REG's known value.
1824 Return true iff we made a change. */
6132c0d0 1825
d83ccc81 1826static bool
18426c5b 1827move2add_use_add3_insn (scalar_int_mode mode, rtx reg, rtx sym, rtx off,
1828 rtx_insn *insn)
6132c0d0 1829{
1830 rtx pat = PATTERN (insn);
1831 rtx src = SET_SRC (pat);
1832 int regno = REGNO (reg);
c2130a4b 1833 int min_regno = 0;
6132c0d0 1834 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1835 int i;
d83ccc81 1836 bool changed = false;
c9a03487 1837 struct full_rtx_costs oldcst, newcst, mincst;
1838 rtx plus_expr;
1839
1840 init_costs_to_max (&mincst);
b72d459f 1841 get_full_set_rtx_cost (pat, &oldcst);
c9a03487 1842
1843 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1844 SET_SRC (pat) = plus_expr;
6132c0d0 1845
1846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
18426c5b 1847 if (move2add_valid_value_p (i, mode)
6132c0d0 1848 && reg_base_reg[i] < 0
1849 && reg_symbol_ref[i] != NULL_RTX
1850 && rtx_equal_p (sym, reg_symbol_ref[i]))
1851 {
60141df0 1852 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
6132c0d0 1853 GET_MODE (reg));
1854 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1855 use (set (reg) (reg)) instead.
1856 We don't delete this insn, nor do we convert it into a
1857 note, to avoid losing register notes or the return
1858 value flag. jump2 already knows how to get rid of
1859 no-op moves. */
1860 if (new_src == const0_rtx)
1861 {
c9a03487 1862 init_costs_to_zero (&mincst);
6132c0d0 1863 min_regno = i;
1864 break;
1865 }
1866 else
1867 {
c9a03487 1868 XEXP (plus_expr, 1) = new_src;
b72d459f 1869 get_full_set_rtx_cost (pat, &newcst);
c9a03487 1870
1871 if (costs_lt_p (&newcst, &mincst, speed))
6132c0d0 1872 {
c9a03487 1873 mincst = newcst;
6132c0d0 1874 min_regno = i;
1875 }
1876 }
1877 }
c9a03487 1878 SET_SRC (pat) = src;
6132c0d0 1879
c9a03487 1880 if (costs_lt_p (&mincst, &oldcst, speed))
6132c0d0 1881 {
1882 rtx tem;
1883
1884 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1885 if (i != min_regno)
1886 {
60141df0 1887 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
6132c0d0 1888 GET_MODE (reg));
1889 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1890 }
d83ccc81 1891 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1892 changed = true;
6132c0d0 1893 }
1894 reg_set_luid[regno] = move2add_luid;
b6b86e87 1895 move2add_record_sym_value (reg, sym, off);
d83ccc81 1896 return changed;
6132c0d0 1897}
1898
d83ccc81 1899/* Convert move insns with constant inputs to additions if they are cheaper.
1900 Return true if any changes were made. */
1901static bool
3aeaa53f 1902reload_cse_move2add (rtx_insn *first)
8f8cadbc 1903{
1904 int i;
3aeaa53f 1905 rtx_insn *insn;
d83ccc81 1906 bool changed = false;
8f8cadbc 1907
1908 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
6132c0d0 1909 {
1910 reg_set_luid[i] = 0;
1911 reg_offset[i] = 0;
1912 reg_base_reg[i] = 0;
1913 reg_symbol_ref[i] = NULL_RTX;
1914 reg_mode[i] = VOIDmode;
1915 }
8f8cadbc 1916
1917 move2add_last_label_luid = 0;
1918 move2add_luid = 2;
1919 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1920 {
1921 rtx pat, note;
1922
6d7dc5b9 1923 if (LABEL_P (insn))
8f8cadbc 1924 {
1925 move2add_last_label_luid = move2add_luid;
1926 /* We're going to increment move2add_luid twice after a
1927 label, so that we can use move2add_last_label_luid + 1 as
1928 the luid for constants. */
1929 move2add_luid++;
1930 continue;
1931 }
1932 if (! INSN_P (insn))
1933 continue;
1934 pat = PATTERN (insn);
1935 /* For simplicity, we only perform this optimization on
1936 straightforward SETs. */
18426c5b 1937 scalar_int_mode mode;
8f8cadbc 1938 if (GET_CODE (pat) == SET
18426c5b 1939 && REG_P (SET_DEST (pat))
1940 && is_a <scalar_int_mode> (GET_MODE (SET_DEST (pat)), &mode))
8f8cadbc 1941 {
1942 rtx reg = SET_DEST (pat);
1943 int regno = REGNO (reg);
1944 rtx src = SET_SRC (pat);
1945
1946 /* Check if we have valid information on the contents of this
1947 register in the mode of REG. */
18426c5b 1948 if (move2add_valid_value_p (regno, mode)
3072d30e 1949 && dbg_cnt (cse2_move2add))
8f8cadbc 1950 {
1951 /* Try to transform (set (REGX) (CONST_INT A))
1952 ...
1953 (set (REGX) (CONST_INT B))
1954 to
1955 (set (REGX) (CONST_INT A))
1956 ...
1957 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1958 or
1959 (set (REGX) (CONST_INT A))
1960 ...
1961 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1962 */
1963
6132c0d0 1964 if (CONST_INT_P (src)
1965 && reg_base_reg[regno] < 0
1966 && reg_symbol_ref[regno] == NULL_RTX)
8f8cadbc 1967 {
18426c5b 1968 changed |= move2add_use_add2_insn (mode, reg, NULL_RTX,
1969 src, insn);
8f8cadbc 1970 continue;
1971 }
1972
1973 /* Try to transform (set (REGX) (REGY))
1974 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1975 ...
1976 (set (REGX) (REGY))
1977 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1978 to
1979 (set (REGX) (REGY))
1980 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1981 ...
1982 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
8ad4c111 1983 else if (REG_P (src)
8f8cadbc 1984 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1985 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
18426c5b 1986 && move2add_valid_value_p (REGNO (src), mode))
8f8cadbc 1987 {
3aeaa53f 1988 rtx_insn *next = next_nonnote_nondebug_insn (insn);
8f8cadbc 1989 rtx set = NULL_RTX;
1990 if (next)
1991 set = single_set (next);
1992 if (set
1993 && SET_DEST (set) == reg
1994 && GET_CODE (SET_SRC (set)) == PLUS
1995 && XEXP (SET_SRC (set), 0) == reg
971ba038 1996 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
8f8cadbc 1997 {
1998 rtx src3 = XEXP (SET_SRC (set), 1);
60141df0 1999 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
8f8cadbc 2000 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
2001 HOST_WIDE_INT regno_offset = reg_offset[regno];
2002 rtx new_src =
69e41517 2003 gen_int_mode (added_offset
2004 + base_offset
2005 - regno_offset,
18426c5b 2006 mode);
f529eb25 2007 bool success = false;
2008 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
8f8cadbc 2009
2010 if (new_src == const0_rtx)
2011 /* See above why we create (set (reg) (reg)) here. */
2012 success
2013 = validate_change (next, &SET_SRC (set), reg, 0);
c9a03487 2014 else
8f8cadbc 2015 {
c9a03487 2016 rtx old_src = SET_SRC (set);
2017 struct full_rtx_costs oldcst, newcst;
18426c5b 2018 rtx tem = gen_rtx_PLUS (mode, reg, new_src);
c9a03487 2019
b72d459f 2020 get_full_set_rtx_cost (set, &oldcst);
c9a03487 2021 SET_SRC (set) = tem;
18426c5b 2022 get_full_set_src_cost (tem, mode, &newcst);
c9a03487 2023 SET_SRC (set) = old_src;
2024 costs_add_n_insns (&oldcst, 1);
2025
2026 if (costs_lt_p (&newcst, &oldcst, speed)
2027 && have_add2_insn (reg, new_src))
2028 {
d1f9b275 2029 rtx newpat = gen_rtx_SET (reg, tem);
c9a03487 2030 success
2031 = validate_change (next, &PATTERN (next),
2032 newpat, 0);
2033 }
8f8cadbc 2034 }
2035 if (success)
2036 delete_insn (insn);
d83ccc81 2037 changed |= success;
8f8cadbc 2038 insn = next;
b6b86e87 2039 move2add_record_mode (reg);
2040 reg_offset[regno]
2041 = trunc_int_for_mode (added_offset + base_offset,
18426c5b 2042 mode);
8f8cadbc 2043 continue;
2044 }
2045 }
2046 }
6132c0d0 2047
2048 /* Try to transform
2049 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2050 ...
2051 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2052 to
2053 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2054 ...
2055 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2056 if ((GET_CODE (src) == SYMBOL_REF
2057 || (GET_CODE (src) == CONST
2058 && GET_CODE (XEXP (src, 0)) == PLUS
2059 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2060 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2061 && dbg_cnt (cse2_move2add))
2062 {
2063 rtx sym, off;
2064
2065 if (GET_CODE (src) == SYMBOL_REF)
2066 {
2067 sym = src;
2068 off = const0_rtx;
2069 }
2070 else
2071 {
2072 sym = XEXP (XEXP (src, 0), 0);
2073 off = XEXP (XEXP (src, 0), 1);
2074 }
2075
2076 /* If the reg already contains the value which is sum of
2077 sym and some constant value, we can use an add2 insn. */
18426c5b 2078 if (move2add_valid_value_p (regno, mode)
6132c0d0 2079 && reg_base_reg[regno] < 0
2080 && reg_symbol_ref[regno] != NULL_RTX
2081 && rtx_equal_p (sym, reg_symbol_ref[regno]))
18426c5b 2082 changed |= move2add_use_add2_insn (mode, reg, sym, off, insn);
6132c0d0 2083
2084 /* Otherwise, we have to find a register whose value is sum
2085 of sym and some constant value. */
2086 else
18426c5b 2087 changed |= move2add_use_add3_insn (mode, reg, sym, off, insn);
6132c0d0 2088
2089 continue;
2090 }
8f8cadbc 2091 }
2092
2093 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2094 {
2095 if (REG_NOTE_KIND (note) == REG_INC
8ad4c111 2096 && REG_P (XEXP (note, 0)))
8f8cadbc 2097 {
2098 /* Reset the information about this register. */
2099 int regno = REGNO (XEXP (note, 0));
2100 if (regno < FIRST_PSEUDO_REGISTER)
b6b86e87 2101 {
2102 move2add_record_mode (XEXP (note, 0));
2103 reg_mode[regno] = VOIDmode;
2104 }
8f8cadbc 2105 }
2106 }
6132c0d0 2107 note_stores (PATTERN (insn), move2add_note_store, insn);
8f8cadbc 2108
2109 /* If INSN is a conditional branch, we try to extract an
2110 implicit set out of it. */
f222bc3b 2111 if (any_condjump_p (insn))
8f8cadbc 2112 {
2113 rtx cnd = fis_get_condition (insn);
2114
2115 if (cnd != NULL_RTX
2116 && GET_CODE (cnd) == NE
8ad4c111 2117 && REG_P (XEXP (cnd, 0))
f222bc3b 2118 && !reg_set_p (XEXP (cnd, 0), insn)
8f8cadbc 2119 /* The following two checks, which are also in
2120 move2add_note_store, are intended to reduce the
2121 number of calls to gen_rtx_SET to avoid memory
2122 allocation if possible. */
2123 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
0933f1d9 2124 && REG_NREGS (XEXP (cnd, 0)) == 1
971ba038 2125 && CONST_INT_P (XEXP (cnd, 1)))
8f8cadbc 2126 {
2127 rtx implicit_set =
d1f9b275 2128 gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1));
6132c0d0 2129 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
8f8cadbc 2130 }
2131 }
2132
2133 /* If this is a CALL_INSN, all call used registers are stored with
2134 unknown values. */
6d7dc5b9 2135 if (CALL_P (insn))
8f8cadbc 2136 {
39bde736 2137 rtx link;
2138
8f8cadbc 2139 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2140 {
2141 if (call_used_regs[i])
2142 /* Reset the information about this register. */
b6b86e87 2143 reg_mode[i] = VOIDmode;
8f8cadbc 2144 }
39bde736 2145
2146 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
2147 link = XEXP (link, 1))
2148 {
2149 rtx setuse = XEXP (link, 0);
2150 rtx usage_rtx = XEXP (setuse, 0);
70bdfe23 2151 /* CALL_INSN_FUNCTION_USAGEs can only have full clobbers, not
2152 clobber_highs. */
2153 gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH);
39bde736 2154 if (GET_CODE (setuse) == CLOBBER
2155 && REG_P (usage_rtx))
2156 {
2157 unsigned int end_regno = END_REGNO (usage_rtx);
2158 for (unsigned int r = REGNO (usage_rtx); r < end_regno; ++r)
2159 /* Reset the information about this register. */
2160 reg_mode[r] = VOIDmode;
2161 }
2162 }
8f8cadbc 2163 }
2164 }
d83ccc81 2165 return changed;
8f8cadbc 2166}
2167
6132c0d0 2168/* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2169 contains SET.
8f8cadbc 2170 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2171 Called from reload_cse_move2add via note_stores. */
2172
2173static void
6132c0d0 2174move2add_note_store (rtx dst, const_rtx set, void *data)
8f8cadbc 2175{
3aeaa53f 2176 rtx_insn *insn = (rtx_insn *) data;
8f8cadbc 2177 unsigned int regno = 0;
8974b7a3 2178 scalar_int_mode mode;
8f8cadbc 2179
8f8cadbc 2180 /* Some targets do argument pushes without adding REG_INC notes. */
2181
e16ceb8e 2182 if (MEM_P (dst))
8f8cadbc 2183 {
2184 dst = XEXP (dst, 0);
2185 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2186 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
b6b86e87 2187 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
8f8cadbc 2188 return;
2189 }
8f8cadbc 2190
b6b86e87 2191 if (GET_CODE (dst) == SUBREG)
2192 regno = subreg_regno (dst);
2193 else if (REG_P (dst))
2194 regno = REGNO (dst);
2195 else
2196 return;
8f8cadbc 2197
8974b7a3 2198 if (!is_a <scalar_int_mode> (GET_MODE (dst), &mode))
2199 goto invalidate;
2200
2201 if (GET_CODE (set) == SET)
6132c0d0 2202 {
2203 rtx note, sym = NULL_RTX;
b6b86e87 2204 rtx off;
6132c0d0 2205
2206 note = find_reg_equal_equiv_note (insn);
2207 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2208 {
2209 sym = XEXP (note, 0);
b6b86e87 2210 off = const0_rtx;
6132c0d0 2211 }
2212 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2213 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2214 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2215 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2216 {
2217 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
b6b86e87 2218 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
6132c0d0 2219 }
2220
2221 if (sym != NULL_RTX)
2222 {
b6b86e87 2223 move2add_record_sym_value (dst, sym, off);
6132c0d0 2224 return;
2225 }
2226 }
2227
8974b7a3 2228 if (GET_CODE (set) == SET
8f8cadbc 2229 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
8f8cadbc 2230 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2231 {
2232 rtx src = SET_SRC (set);
2233 rtx base_reg;
60141df0 2234 unsigned HOST_WIDE_INT offset;
8f8cadbc 2235 int base_regno;
8f8cadbc 2236
2237 switch (GET_CODE (src))
2238 {
2239 case PLUS:
8ad4c111 2240 if (REG_P (XEXP (src, 0)))
8f8cadbc 2241 {
2242 base_reg = XEXP (src, 0);
2243
971ba038 2244 if (CONST_INT_P (XEXP (src, 1)))
60141df0 2245 offset = UINTVAL (XEXP (src, 1));
8ad4c111 2246 else if (REG_P (XEXP (src, 1))
b6b86e87 2247 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
8f8cadbc 2248 {
c389f975 2249 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2250 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
8f8cadbc 2251 offset = reg_offset[REGNO (XEXP (src, 1))];
2252 /* Maybe the first register is known to be a
2253 constant. */
b6b86e87 2254 else if (move2add_valid_value_p (REGNO (base_reg), mode)
c389f975 2255 && reg_base_reg[REGNO (base_reg)] < 0
2256 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
8f8cadbc 2257 {
2258 offset = reg_offset[REGNO (base_reg)];
2259 base_reg = XEXP (src, 1);
2260 }
2261 else
2262 goto invalidate;
2263 }
2264 else
2265 goto invalidate;
2266
2267 break;
2268 }
2269
2270 goto invalidate;
2271
2272 case REG:
2273 base_reg = src;
2274 offset = 0;
2275 break;
2276
2277 case CONST_INT:
2278 /* Start tracking the register as a constant. */
2279 reg_base_reg[regno] = -1;
6132c0d0 2280 reg_symbol_ref[regno] = NULL_RTX;
8f8cadbc 2281 reg_offset[regno] = INTVAL (SET_SRC (set));
2282 /* We assign the same luid to all registers set to constants. */
2283 reg_set_luid[regno] = move2add_last_label_luid + 1;
b6b86e87 2284 move2add_record_mode (dst);
8f8cadbc 2285 return;
2286
2287 default:
b6b86e87 2288 goto invalidate;
8f8cadbc 2289 }
2290
2291 base_regno = REGNO (base_reg);
2292 /* If information about the base register is not valid, set it
2293 up as a new base register, pretending its value is known
2294 starting from the current insn. */
b6b86e87 2295 if (!move2add_valid_value_p (base_regno, mode))
8f8cadbc 2296 {
2297 reg_base_reg[base_regno] = base_regno;
6132c0d0 2298 reg_symbol_ref[base_regno] = NULL_RTX;
8f8cadbc 2299 reg_offset[base_regno] = 0;
2300 reg_set_luid[base_regno] = move2add_luid;
b6b86e87 2301 gcc_assert (GET_MODE (base_reg) == mode);
2302 move2add_record_mode (base_reg);
8f8cadbc 2303 }
8f8cadbc 2304
2305 /* Copy base information from our base register. */
2306 reg_set_luid[regno] = reg_set_luid[base_regno];
2307 reg_base_reg[regno] = reg_base_reg[base_regno];
6132c0d0 2308 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
8f8cadbc 2309
2310 /* Compute the sum of the offsets or constants. */
b6b86e87 2311 reg_offset[regno]
2312 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2313
2314 move2add_record_mode (dst);
8f8cadbc 2315 }
70bdfe23 2316 else if (GET_CODE (set) == CLOBBER_HIGH)
2317 {
2318 /* Only invalidate if actually clobbered. */
2319 if (reg_mode[regno] == BLKmode
2320 || reg_is_clobbered_by_clobber_high (regno, reg_mode[regno], dst))
2321 goto invalidate;
2322 }
8f8cadbc 2323 else
2324 {
b6b86e87 2325 invalidate:
2326 /* Invalidate the contents of the register. */
2327 move2add_record_mode (dst);
2328 reg_mode[regno] = VOIDmode;
8f8cadbc 2329 }
2330}
77fce4cd 2331\f
cbe8bda8 2332namespace {
2333
2334const pass_data pass_data_postreload_cse =
77fce4cd 2335{
cbe8bda8 2336 RTL_PASS, /* type */
2337 "postreload", /* name */
2338 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2339 TV_RELOAD_CSE_REGS, /* tv_id */
2340 0, /* properties_required */
2341 0, /* properties_provided */
2342 0, /* properties_destroyed */
2343 0, /* todo_flags_start */
8b88439e 2344 TODO_df_finish, /* todo_flags_finish */
77fce4cd 2345};
cbe8bda8 2346
2347class pass_postreload_cse : public rtl_opt_pass
2348{
2349public:
9af5ce0c 2350 pass_postreload_cse (gcc::context *ctxt)
2351 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
cbe8bda8 2352 {}
2353
2354 /* opt_pass methods: */
31315c24 2355 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2356
65b0537f 2357 virtual unsigned int execute (function *);
cbe8bda8 2358
2359}; // class pass_postreload_cse
2360
65b0537f 2361unsigned int
2362pass_postreload_cse::execute (function *fun)
2363{
2364 if (!dbg_cnt (postreload_cse))
2365 return 0;
2366
2367 /* Do a very simple CSE pass over just the hard registers. */
2368 reload_cse_regs (get_insns ());
2369 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2370 Remove any EH edges associated with them. */
2371 if (fun->can_throw_non_call_exceptions
2372 && purge_all_dead_edges ())
2373 cleanup_cfg (0);
2374
2375 return 0;
2376}
2377
cbe8bda8 2378} // anon namespace
2379
2380rtl_opt_pass *
2381make_pass_postreload_cse (gcc::context *ctxt)
2382{
2383 return new pass_postreload_cse (ctxt);
2384}