]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/regcprop.c
Fix double word typos.
[thirdparty/gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "regs.h"
29 #include "addresses.h"
30 #include "reload.h"
31 #include "recog.h"
32 #include "flags.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "emit-rtl.h"
37
38 /* The following code does forward propagation of hard register copies.
39 The object is to eliminate as many dependencies as possible, so that
40 we have the most scheduling freedom. As a side effect, we also clean
41 up some silly register allocation decisions made by reload. This
42 code may be obsoleted by a new register allocator. */
43
44 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
45 lifetime of a register and get the DEBUG_INSN subsequently reset.
46 So they are queued instead, and updated only when the register is
47 used in some subsequent real insn before it is set. */
48 struct queued_debug_insn_change
49 {
50 struct queued_debug_insn_change *next;
51 rtx_insn *insn;
52 rtx *loc;
53 rtx new_rtx;
54
55 /* Pool allocation new operator. */
56 inline void *operator new (size_t)
57 {
58 return pool.allocate ();
59 }
60
61 /* Delete operator utilizing pool allocation. */
62 inline void operator delete (void *ptr)
63 {
64 pool.remove ((queued_debug_insn_change *) ptr);
65 }
66
67 /* Memory allocation pool. */
68 static pool_allocator<queued_debug_insn_change> pool;
69 };
70
71 /* For each register, we have a list of registers that contain the same
72 value. The OLDEST_REGNO field points to the head of the list, and
73 the NEXT_REGNO field runs through the list. The MODE field indicates
74 what mode the data is known to be in; this field is VOIDmode when the
75 register is not known to contain valid data. */
76
77 struct value_data_entry
78 {
79 machine_mode mode;
80 unsigned int oldest_regno;
81 unsigned int next_regno;
82 struct queued_debug_insn_change *debug_insn_changes;
83 };
84
85 struct value_data
86 {
87 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
88 unsigned int max_value_regs;
89 unsigned int n_debug_insn_changes;
90 };
91
92 pool_allocator<queued_debug_insn_change> queued_debug_insn_change::pool
93 ("debug insn changes pool", 256);
94
95 static bool skip_debug_insn_p;
96
97 static void kill_value_one_regno (unsigned, struct value_data *);
98 static void kill_value_regno (unsigned, unsigned, struct value_data *);
99 static void kill_value (const_rtx, struct value_data *);
100 static void set_value_regno (unsigned, machine_mode, struct value_data *);
101 static void init_value_data (struct value_data *);
102 static void kill_clobbered_value (rtx, const_rtx, void *);
103 static void kill_set_value (rtx, const_rtx, void *);
104 static void copy_value (rtx, rtx, struct value_data *);
105 static bool mode_change_ok (machine_mode, machine_mode,
106 unsigned int);
107 static rtx maybe_mode_change (machine_mode, machine_mode,
108 machine_mode, unsigned int, unsigned int);
109 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
110 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
111 struct value_data *);
112 static bool replace_oldest_value_addr (rtx *, enum reg_class,
113 machine_mode, addr_space_t,
114 rtx_insn *, struct value_data *);
115 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
116 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
117 extern void debug_value_data (struct value_data *);
118 #ifdef ENABLE_CHECKING
119 static void validate_value_data (struct value_data *);
120 #endif
121
122 /* Free all queued updates for DEBUG_INSNs that change some reg to
123 register REGNO. */
124
125 static void
126 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
127 {
128 struct queued_debug_insn_change *cur, *next;
129 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
130 {
131 next = cur->next;
132 --vd->n_debug_insn_changes;
133 delete cur;
134 }
135 vd->e[regno].debug_insn_changes = NULL;
136 }
137
138 /* Kill register REGNO. This involves removing it from any value
139 lists, and resetting the value mode to VOIDmode. This is only a
140 helper function; it does not handle any hard registers overlapping
141 with REGNO. */
142
143 static void
144 kill_value_one_regno (unsigned int regno, struct value_data *vd)
145 {
146 unsigned int i, next;
147
148 if (vd->e[regno].oldest_regno != regno)
149 {
150 for (i = vd->e[regno].oldest_regno;
151 vd->e[i].next_regno != regno;
152 i = vd->e[i].next_regno)
153 continue;
154 vd->e[i].next_regno = vd->e[regno].next_regno;
155 }
156 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
157 {
158 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
159 vd->e[i].oldest_regno = next;
160 }
161
162 vd->e[regno].mode = VOIDmode;
163 vd->e[regno].oldest_regno = regno;
164 vd->e[regno].next_regno = INVALID_REGNUM;
165 if (vd->e[regno].debug_insn_changes)
166 free_debug_insn_changes (vd, regno);
167
168 #ifdef ENABLE_CHECKING
169 validate_value_data (vd);
170 #endif
171 }
172
173 /* Kill the value in register REGNO for NREGS, and any other registers
174 whose values overlap. */
175
176 static void
177 kill_value_regno (unsigned int regno, unsigned int nregs,
178 struct value_data *vd)
179 {
180 unsigned int j;
181
182 /* Kill the value we're told to kill. */
183 for (j = 0; j < nregs; ++j)
184 kill_value_one_regno (regno + j, vd);
185
186 /* Kill everything that overlapped what we're told to kill. */
187 if (regno < vd->max_value_regs)
188 j = 0;
189 else
190 j = regno - vd->max_value_regs;
191 for (; j < regno; ++j)
192 {
193 unsigned int i, n;
194 if (vd->e[j].mode == VOIDmode)
195 continue;
196 n = hard_regno_nregs[j][vd->e[j].mode];
197 if (j + n > regno)
198 for (i = 0; i < n; ++i)
199 kill_value_one_regno (j + i, vd);
200 }
201 }
202
203 /* Kill X. This is a convenience function wrapping kill_value_regno
204 so that we mind the mode the register is in. */
205
206 static void
207 kill_value (const_rtx x, struct value_data *vd)
208 {
209 if (GET_CODE (x) == SUBREG)
210 {
211 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
212 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
213 x = tmp ? tmp : SUBREG_REG (x);
214 }
215 if (REG_P (x))
216 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
217 }
218
219 /* Remember that REGNO is valid in MODE. */
220
221 static void
222 set_value_regno (unsigned int regno, machine_mode mode,
223 struct value_data *vd)
224 {
225 unsigned int nregs;
226
227 vd->e[regno].mode = mode;
228
229 nregs = hard_regno_nregs[regno][mode];
230 if (nregs > vd->max_value_regs)
231 vd->max_value_regs = nregs;
232 }
233
234 /* Initialize VD such that there are no known relationships between regs. */
235
236 static void
237 init_value_data (struct value_data *vd)
238 {
239 int i;
240 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
241 {
242 vd->e[i].mode = VOIDmode;
243 vd->e[i].oldest_regno = i;
244 vd->e[i].next_regno = INVALID_REGNUM;
245 vd->e[i].debug_insn_changes = NULL;
246 }
247 vd->max_value_regs = 0;
248 vd->n_debug_insn_changes = 0;
249 }
250
251 /* Called through note_stores. If X is clobbered, kill its value. */
252
253 static void
254 kill_clobbered_value (rtx x, const_rtx set, void *data)
255 {
256 struct value_data *const vd = (struct value_data *) data;
257 if (GET_CODE (set) == CLOBBER)
258 kill_value (x, vd);
259 }
260
261 /* A structure passed as data to kill_set_value through note_stores. */
262 struct kill_set_value_data
263 {
264 struct value_data *vd;
265 rtx ignore_set_reg;
266 };
267
268 /* Called through note_stores. If X is set, not clobbered, kill its
269 current value and install it as the root of its own value list. */
270
271 static void
272 kill_set_value (rtx x, const_rtx set, void *data)
273 {
274 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
275 if (rtx_equal_p (x, ksvd->ignore_set_reg))
276 return;
277 if (GET_CODE (set) != CLOBBER)
278 {
279 kill_value (x, ksvd->vd);
280 if (REG_P (x))
281 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
282 }
283 }
284
285 /* Kill any register used in X as the base of an auto-increment expression,
286 and install that register as the root of its own value list. */
287
288 static void
289 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
290 {
291 subrtx_iterator::array_type array;
292 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
293 {
294 const_rtx x = *iter;
295 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
296 {
297 x = XEXP (x, 0);
298 kill_value (x, vd);
299 set_value_regno (REGNO (x), GET_MODE (x), vd);
300 iter.skip_subrtxes ();
301 }
302 }
303 }
304
305 /* Assert that SRC has been copied to DEST. Adjust the data structures
306 to reflect that SRC contains an older copy of the shared value. */
307
308 static void
309 copy_value (rtx dest, rtx src, struct value_data *vd)
310 {
311 unsigned int dr = REGNO (dest);
312 unsigned int sr = REGNO (src);
313 unsigned int dn, sn;
314 unsigned int i;
315
316 /* ??? At present, it's possible to see noop sets. It'd be nice if
317 this were cleaned up beforehand... */
318 if (sr == dr)
319 return;
320
321 /* Do not propagate copies to the stack pointer, as that can leave
322 memory accesses with no scheduling dependency on the stack update. */
323 if (dr == STACK_POINTER_REGNUM)
324 return;
325
326 /* Likewise with the frame pointer, if we're using one. */
327 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
328 return;
329
330 /* Do not propagate copies to fixed or global registers, patterns
331 can be relying to see particular fixed register or users can
332 expect the chosen global register in asm. */
333 if (fixed_regs[dr] || global_regs[dr])
334 return;
335
336 /* If SRC and DEST overlap, don't record anything. */
337 dn = REG_NREGS (dest);
338 sn = REG_NREGS (src);
339 if ((dr > sr && dr < sr + sn)
340 || (sr > dr && sr < dr + dn))
341 return;
342
343 /* If SRC had no assigned mode (i.e. we didn't know it was live)
344 assign it now and assume the value came from an input argument
345 or somesuch. */
346 if (vd->e[sr].mode == VOIDmode)
347 set_value_regno (sr, vd->e[dr].mode, vd);
348
349 /* If we are narrowing the input to a smaller number of hard regs,
350 and it is in big endian, we are really extracting a high part.
351 Since we generally associate a low part of a value with the value itself,
352 we must not do the same for the high part.
353 Note we can still get low parts for the same mode combination through
354 a two-step copy involving differently sized hard regs.
355 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
356 (set (reg:DI r0) (reg:DI fr0))
357 (set (reg:SI fr2) (reg:SI r0))
358 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
359 (set (reg:SI fr2) (reg:SI fr0))
360 loads the high part of (reg:DI fr0) into fr2.
361
362 We can't properly represent the latter case in our tables, so don't
363 record anything then. */
364 else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
365 && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
366 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
367 return;
368
369 /* If SRC had been assigned a mode narrower than the copy, we can't
370 link DEST into the chain, because not all of the pieces of the
371 copy came from oldest_regno. */
372 else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
373 return;
374
375 /* Link DR at the end of the value chain used by SR. */
376
377 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
378
379 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
380 continue;
381 vd->e[i].next_regno = dr;
382
383 #ifdef ENABLE_CHECKING
384 validate_value_data (vd);
385 #endif
386 }
387
388 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
389
390 static bool
391 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
392 unsigned int regno ATTRIBUTE_UNUSED)
393 {
394 if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
395 return false;
396
397 #ifdef CANNOT_CHANGE_MODE_CLASS
398 return !REG_CANNOT_CHANGE_MODE_P (regno, orig_mode, new_mode);
399 #endif
400
401 return true;
402 }
403
404 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
405 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
406 in NEW_MODE.
407 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
408
409 static rtx
410 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
411 machine_mode new_mode, unsigned int regno,
412 unsigned int copy_regno ATTRIBUTE_UNUSED)
413 {
414 if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
415 && GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (new_mode))
416 return NULL_RTX;
417
418 if (orig_mode == new_mode)
419 return gen_raw_REG (new_mode, regno);
420 else if (mode_change_ok (orig_mode, new_mode, regno))
421 {
422 int copy_nregs = hard_regno_nregs[copy_regno][copy_mode];
423 int use_nregs = hard_regno_nregs[copy_regno][new_mode];
424 int copy_offset
425 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
426 int offset
427 = GET_MODE_SIZE (orig_mode) - GET_MODE_SIZE (new_mode) - copy_offset;
428 int byteoffset = offset % UNITS_PER_WORD;
429 int wordoffset = offset - byteoffset;
430
431 offset = ((WORDS_BIG_ENDIAN ? wordoffset : 0)
432 + (BYTES_BIG_ENDIAN ? byteoffset : 0));
433 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
434 if (HARD_REGNO_MODE_OK (regno, new_mode))
435 return gen_raw_REG (new_mode, regno);
436 }
437 return NULL_RTX;
438 }
439
440 /* Find the oldest copy of the value contained in REGNO that is in
441 register class CL and has mode MODE. If found, return an rtx
442 of that oldest register, otherwise return NULL. */
443
444 static rtx
445 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
446 {
447 unsigned int regno = REGNO (reg);
448 machine_mode mode = GET_MODE (reg);
449 unsigned int i;
450
451 /* If we are accessing REG in some mode other that what we set it in,
452 make sure that the replacement is valid. In particular, consider
453 (set (reg:DI r11) (...))
454 (set (reg:SI r9) (reg:SI r11))
455 (set (reg:SI r10) (...))
456 (set (...) (reg:DI r9))
457 Replacing r9 with r11 is invalid. */
458 if (mode != vd->e[regno].mode)
459 {
460 if (hard_regno_nregs[regno][mode]
461 > hard_regno_nregs[regno][vd->e[regno].mode])
462 return NULL_RTX;
463 }
464
465 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
466 {
467 machine_mode oldmode = vd->e[i].mode;
468 rtx new_rtx;
469
470 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
471 continue;
472
473 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
474 if (new_rtx)
475 {
476 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
477 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
478 REG_POINTER (new_rtx) = REG_POINTER (reg);
479 return new_rtx;
480 }
481 }
482
483 return NULL_RTX;
484 }
485
486 /* If possible, replace the register at *LOC with the oldest register
487 in register class CL. Return true if successfully replaced. */
488
489 static bool
490 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
491 struct value_data *vd)
492 {
493 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
494 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
495 {
496 if (DEBUG_INSN_P (insn))
497 {
498 struct queued_debug_insn_change *change;
499
500 if (dump_file)
501 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
502 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
503
504 change = new queued_debug_insn_change;
505 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
506 change->insn = insn;
507 change->loc = loc;
508 change->new_rtx = new_rtx;
509 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
510 ++vd->n_debug_insn_changes;
511 return true;
512 }
513 if (dump_file)
514 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
515 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
516
517 validate_change (insn, loc, new_rtx, 1);
518 return true;
519 }
520 return false;
521 }
522
523 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
524 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
525 BASE_REG_CLASS depending on how the register is being considered. */
526
527 static bool
528 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
529 machine_mode mode, addr_space_t as,
530 rtx_insn *insn, struct value_data *vd)
531 {
532 rtx x = *loc;
533 RTX_CODE code = GET_CODE (x);
534 const char *fmt;
535 int i, j;
536 bool changed = false;
537
538 switch (code)
539 {
540 case PLUS:
541 if (DEBUG_INSN_P (insn))
542 break;
543
544 {
545 rtx orig_op0 = XEXP (x, 0);
546 rtx orig_op1 = XEXP (x, 1);
547 RTX_CODE code0 = GET_CODE (orig_op0);
548 RTX_CODE code1 = GET_CODE (orig_op1);
549 rtx op0 = orig_op0;
550 rtx op1 = orig_op1;
551 rtx *locI = NULL;
552 rtx *locB = NULL;
553 enum rtx_code index_code = SCRATCH;
554
555 if (GET_CODE (op0) == SUBREG)
556 {
557 op0 = SUBREG_REG (op0);
558 code0 = GET_CODE (op0);
559 }
560
561 if (GET_CODE (op1) == SUBREG)
562 {
563 op1 = SUBREG_REG (op1);
564 code1 = GET_CODE (op1);
565 }
566
567 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
568 || code0 == ZERO_EXTEND || code1 == MEM)
569 {
570 locI = &XEXP (x, 0);
571 locB = &XEXP (x, 1);
572 index_code = GET_CODE (*locI);
573 }
574 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
575 || code1 == ZERO_EXTEND || code0 == MEM)
576 {
577 locI = &XEXP (x, 1);
578 locB = &XEXP (x, 0);
579 index_code = GET_CODE (*locI);
580 }
581 else if (code0 == CONST_INT || code0 == CONST
582 || code0 == SYMBOL_REF || code0 == LABEL_REF)
583 {
584 locB = &XEXP (x, 1);
585 index_code = GET_CODE (XEXP (x, 0));
586 }
587 else if (code1 == CONST_INT || code1 == CONST
588 || code1 == SYMBOL_REF || code1 == LABEL_REF)
589 {
590 locB = &XEXP (x, 0);
591 index_code = GET_CODE (XEXP (x, 1));
592 }
593 else if (code0 == REG && code1 == REG)
594 {
595 int index_op;
596 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
597
598 if (REGNO_OK_FOR_INDEX_P (regno1)
599 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
600 index_op = 1;
601 else if (REGNO_OK_FOR_INDEX_P (regno0)
602 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
603 index_op = 0;
604 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
605 || REGNO_OK_FOR_INDEX_P (regno1))
606 index_op = 1;
607 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
608 index_op = 0;
609 else
610 index_op = 1;
611
612 locI = &XEXP (x, index_op);
613 locB = &XEXP (x, !index_op);
614 index_code = GET_CODE (*locI);
615 }
616 else if (code0 == REG)
617 {
618 locI = &XEXP (x, 0);
619 locB = &XEXP (x, 1);
620 index_code = GET_CODE (*locI);
621 }
622 else if (code1 == REG)
623 {
624 locI = &XEXP (x, 1);
625 locB = &XEXP (x, 0);
626 index_code = GET_CODE (*locI);
627 }
628
629 if (locI)
630 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
631 mode, as, insn, vd);
632 if (locB)
633 changed |= replace_oldest_value_addr (locB,
634 base_reg_class (mode, as, PLUS,
635 index_code),
636 mode, as, insn, vd);
637 return changed;
638 }
639
640 case POST_INC:
641 case POST_DEC:
642 case POST_MODIFY:
643 case PRE_INC:
644 case PRE_DEC:
645 case PRE_MODIFY:
646 return false;
647
648 case MEM:
649 return replace_oldest_value_mem (x, insn, vd);
650
651 case REG:
652 return replace_oldest_value_reg (loc, cl, insn, vd);
653
654 default:
655 break;
656 }
657
658 fmt = GET_RTX_FORMAT (code);
659 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
660 {
661 if (fmt[i] == 'e')
662 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
663 insn, vd);
664 else if (fmt[i] == 'E')
665 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
666 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
667 mode, as, insn, vd);
668 }
669
670 return changed;
671 }
672
673 /* Similar to replace_oldest_value_reg, but X contains a memory. */
674
675 static bool
676 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
677 {
678 enum reg_class cl;
679
680 if (DEBUG_INSN_P (insn))
681 cl = ALL_REGS;
682 else
683 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
684
685 return replace_oldest_value_addr (&XEXP (x, 0), cl,
686 GET_MODE (x), MEM_ADDR_SPACE (x),
687 insn, vd);
688 }
689
690 /* Apply all queued updates for DEBUG_INSNs that change some reg to
691 register REGNO. */
692
693 static void
694 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
695 {
696 struct queued_debug_insn_change *change;
697 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
698
699 for (change = vd->e[regno].debug_insn_changes;
700 change;
701 change = change->next)
702 {
703 if (last_insn != change->insn)
704 {
705 apply_change_group ();
706 last_insn = change->insn;
707 }
708 validate_change (change->insn, change->loc, change->new_rtx, 1);
709 }
710 apply_change_group ();
711 }
712
713 /* Called via note_uses, for all used registers in a real insn
714 apply DEBUG_INSN changes that change registers to the used
715 registers. */
716
717 static void
718 cprop_find_used_regs (rtx *loc, void *data)
719 {
720 struct value_data *const vd = (struct value_data *) data;
721 subrtx_iterator::array_type array;
722 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
723 {
724 const_rtx x = *iter;
725 if (REG_P (x))
726 {
727 unsigned int regno = REGNO (x);
728 if (vd->e[regno].debug_insn_changes)
729 {
730 apply_debug_insn_changes (vd, regno);
731 free_debug_insn_changes (vd, regno);
732 }
733 }
734 }
735 }
736
737 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
738
739 static void
740 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
741 {
742 note_stores (PATTERN (insn), kill_clobbered_value, vd);
743
744 if (CALL_P (insn))
745 {
746 rtx exp;
747
748 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
749 {
750 rtx x = XEXP (exp, 0);
751 if (GET_CODE (x) == CLOBBER)
752 kill_value (SET_DEST (x), vd);
753 }
754 }
755 }
756
757 /* Perform the forward copy propagation on basic block BB. */
758
759 static bool
760 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
761 {
762 bool anything_changed = false;
763 rtx_insn *insn;
764
765 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
766 {
767 int n_ops, i, predicated;
768 bool is_asm, any_replacements;
769 rtx set;
770 rtx link;
771 bool replaced[MAX_RECOG_OPERANDS];
772 bool changed = false;
773 struct kill_set_value_data ksvd;
774
775 if (!NONDEBUG_INSN_P (insn))
776 {
777 if (DEBUG_INSN_P (insn))
778 {
779 rtx loc = INSN_VAR_LOCATION_LOC (insn);
780 if (!VAR_LOC_UNKNOWN_P (loc))
781 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
782 ALL_REGS, GET_MODE (loc),
783 ADDR_SPACE_GENERIC, insn, vd);
784 }
785
786 if (insn == BB_END (bb))
787 break;
788 else
789 continue;
790 }
791
792 set = single_set (insn);
793 extract_constrain_insn (insn);
794 preprocess_constraints (insn);
795 const operand_alternative *op_alt = which_op_alt ();
796 n_ops = recog_data.n_operands;
797 is_asm = asm_noperands (PATTERN (insn)) >= 0;
798
799 /* Simplify the code below by promoting OP_OUT to OP_INOUT
800 in predicated instructions. */
801
802 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
803 for (i = 0; i < n_ops; ++i)
804 {
805 int matches = op_alt[i].matches;
806 if (matches >= 0 || op_alt[i].matched >= 0
807 || (predicated && recog_data.operand_type[i] == OP_OUT))
808 recog_data.operand_type[i] = OP_INOUT;
809 }
810
811 /* Apply changes to earlier DEBUG_INSNs if possible. */
812 if (vd->n_debug_insn_changes)
813 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
814
815 /* For each earlyclobber operand, zap the value data. */
816 for (i = 0; i < n_ops; i++)
817 if (op_alt[i].earlyclobber)
818 kill_value (recog_data.operand[i], vd);
819
820 /* Within asms, a clobber cannot overlap inputs or outputs.
821 I wouldn't think this were true for regular insns, but
822 scan_rtx treats them like that... */
823 kill_clobbered_values (insn, vd);
824
825 /* Kill all auto-incremented values. */
826 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
827 kill_autoinc_value (insn, vd);
828
829 /* Kill all early-clobbered operands. */
830 for (i = 0; i < n_ops; i++)
831 if (op_alt[i].earlyclobber)
832 kill_value (recog_data.operand[i], vd);
833
834 /* If we have dead sets in the insn, then we need to note these as we
835 would clobbers. */
836 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
837 {
838 if (REG_NOTE_KIND (link) == REG_UNUSED)
839 {
840 kill_value (XEXP (link, 0), vd);
841 /* Furthermore, if the insn looked like a single-set,
842 but the dead store kills the source value of that
843 set, then we can no-longer use the plain move
844 special case below. */
845 if (set
846 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
847 set = NULL;
848 }
849 }
850
851 /* Special-case plain move instructions, since we may well
852 be able to do the move from a different register class. */
853 if (set && REG_P (SET_SRC (set)))
854 {
855 rtx src = SET_SRC (set);
856 unsigned int regno = REGNO (src);
857 machine_mode mode = GET_MODE (src);
858 unsigned int i;
859 rtx new_rtx;
860
861 /* If we are accessing SRC in some mode other that what we
862 set it in, make sure that the replacement is valid. */
863 if (mode != vd->e[regno].mode)
864 {
865 if (hard_regno_nregs[regno][mode]
866 > hard_regno_nregs[regno][vd->e[regno].mode])
867 goto no_move_special_case;
868
869 /* And likewise, if we are narrowing on big endian the transformation
870 is also invalid. */
871 if (hard_regno_nregs[regno][mode]
872 < hard_regno_nregs[regno][vd->e[regno].mode]
873 && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
874 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
875 goto no_move_special_case;
876 }
877
878 /* If the destination is also a register, try to find a source
879 register in the same class. */
880 if (REG_P (SET_DEST (set)))
881 {
882 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
883 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
884 {
885 if (dump_file)
886 fprintf (dump_file,
887 "insn %u: replaced reg %u with %u\n",
888 INSN_UID (insn), regno, REGNO (new_rtx));
889 changed = true;
890 goto did_replacement;
891 }
892 /* We need to re-extract as validate_change clobbers
893 recog_data. */
894 extract_constrain_insn (insn);
895 preprocess_constraints (insn);
896 }
897
898 /* Otherwise, try all valid registers and see if its valid. */
899 for (i = vd->e[regno].oldest_regno; i != regno;
900 i = vd->e[i].next_regno)
901 {
902 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
903 mode, i, regno);
904 if (new_rtx != NULL_RTX)
905 {
906 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
907 {
908 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
909 REG_ATTRS (new_rtx) = REG_ATTRS (src);
910 REG_POINTER (new_rtx) = REG_POINTER (src);
911 if (dump_file)
912 fprintf (dump_file,
913 "insn %u: replaced reg %u with %u\n",
914 INSN_UID (insn), regno, REGNO (new_rtx));
915 changed = true;
916 goto did_replacement;
917 }
918 /* We need to re-extract as validate_change clobbers
919 recog_data. */
920 extract_constrain_insn (insn);
921 preprocess_constraints (insn);
922 }
923 }
924 }
925 no_move_special_case:
926
927 any_replacements = false;
928
929 /* For each input operand, replace a hard register with the
930 eldest live copy that's in an appropriate register class. */
931 for (i = 0; i < n_ops; i++)
932 {
933 replaced[i] = false;
934
935 /* Don't scan match_operand here, since we've no reg class
936 information to pass down. Any operands that we could
937 substitute in will be represented elsewhere. */
938 if (recog_data.constraints[i][0] == '\0')
939 continue;
940
941 /* Don't replace in asms intentionally referencing hard regs. */
942 if (is_asm && REG_P (recog_data.operand[i])
943 && (REGNO (recog_data.operand[i])
944 == ORIGINAL_REGNO (recog_data.operand[i])))
945 continue;
946
947 if (recog_data.operand_type[i] == OP_IN)
948 {
949 if (op_alt[i].is_address)
950 replaced[i]
951 = replace_oldest_value_addr (recog_data.operand_loc[i],
952 alternative_class (op_alt, i),
953 VOIDmode, ADDR_SPACE_GENERIC,
954 insn, vd);
955 else if (REG_P (recog_data.operand[i]))
956 replaced[i]
957 = replace_oldest_value_reg (recog_data.operand_loc[i],
958 alternative_class (op_alt, i),
959 insn, vd);
960 else if (MEM_P (recog_data.operand[i]))
961 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
962 insn, vd);
963 }
964 else if (MEM_P (recog_data.operand[i]))
965 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
966 insn, vd);
967
968 /* If we performed any replacement, update match_dups. */
969 if (replaced[i])
970 {
971 int j;
972 rtx new_rtx;
973
974 new_rtx = *recog_data.operand_loc[i];
975 recog_data.operand[i] = new_rtx;
976 for (j = 0; j < recog_data.n_dups; j++)
977 if (recog_data.dup_num[j] == i)
978 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
979
980 any_replacements = true;
981 }
982 }
983
984 if (any_replacements)
985 {
986 if (! apply_change_group ())
987 {
988 for (i = 0; i < n_ops; i++)
989 if (replaced[i])
990 {
991 rtx old = *recog_data.operand_loc[i];
992 recog_data.operand[i] = old;
993 }
994
995 if (dump_file)
996 fprintf (dump_file,
997 "insn %u: reg replacements not verified\n",
998 INSN_UID (insn));
999 }
1000 else
1001 changed = true;
1002 }
1003
1004 did_replacement:
1005 if (changed)
1006 {
1007 anything_changed = true;
1008
1009 /* If something changed, perhaps further changes to earlier
1010 DEBUG_INSNs can be applied. */
1011 if (vd->n_debug_insn_changes)
1012 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1013 }
1014
1015 ksvd.vd = vd;
1016 ksvd.ignore_set_reg = NULL_RTX;
1017
1018 /* Clobber call-clobbered registers. */
1019 if (CALL_P (insn))
1020 {
1021 unsigned int set_regno = INVALID_REGNUM;
1022 unsigned int set_nregs = 0;
1023 unsigned int regno;
1024 rtx exp;
1025 HARD_REG_SET regs_invalidated_by_this_call;
1026
1027 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1028 {
1029 rtx x = XEXP (exp, 0);
1030 if (GET_CODE (x) == SET)
1031 {
1032 rtx dest = SET_DEST (x);
1033 kill_value (dest, vd);
1034 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1035 copy_value (dest, SET_SRC (x), vd);
1036 ksvd.ignore_set_reg = dest;
1037 set_regno = REGNO (dest);
1038 set_nregs = REG_NREGS (dest);
1039 break;
1040 }
1041 }
1042
1043 get_call_reg_set_usage (insn,
1044 &regs_invalidated_by_this_call,
1045 regs_invalidated_by_call);
1046 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1047 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1048 || HARD_REGNO_CALL_PART_CLOBBERED (regno, vd->e[regno].mode))
1049 && (regno < set_regno || regno >= set_regno + set_nregs))
1050 kill_value_regno (regno, 1, vd);
1051
1052 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1053 of the SET isn't in regs_invalidated_by_call hard reg set,
1054 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1055 assume the value in it is still live. */
1056 if (ksvd.ignore_set_reg)
1057 kill_clobbered_values (insn, vd);
1058 }
1059
1060 bool copy_p = (set
1061 && REG_P (SET_DEST (set))
1062 && REG_P (SET_SRC (set)));
1063 bool noop_p = (copy_p
1064 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1065
1066 if (!noop_p)
1067 {
1068 /* Notice stores. */
1069 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1070
1071 /* Notice copies. */
1072 if (copy_p)
1073 copy_value (SET_DEST (set), SET_SRC (set), vd);
1074 }
1075
1076 if (insn == BB_END (bb))
1077 break;
1078 }
1079
1080 return anything_changed;
1081 }
1082
1083 /* Dump the value chain data to stderr. */
1084
1085 DEBUG_FUNCTION void
1086 debug_value_data (struct value_data *vd)
1087 {
1088 HARD_REG_SET set;
1089 unsigned int i, j;
1090
1091 CLEAR_HARD_REG_SET (set);
1092
1093 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1094 if (vd->e[i].oldest_regno == i)
1095 {
1096 if (vd->e[i].mode == VOIDmode)
1097 {
1098 if (vd->e[i].next_regno != INVALID_REGNUM)
1099 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1100 i, vd->e[i].next_regno);
1101 continue;
1102 }
1103
1104 SET_HARD_REG_BIT (set, i);
1105 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1106
1107 for (j = vd->e[i].next_regno;
1108 j != INVALID_REGNUM;
1109 j = vd->e[j].next_regno)
1110 {
1111 if (TEST_HARD_REG_BIT (set, j))
1112 {
1113 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1114 return;
1115 }
1116
1117 if (vd->e[j].oldest_regno != i)
1118 {
1119 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1120 j, vd->e[j].oldest_regno);
1121 return;
1122 }
1123 SET_HARD_REG_BIT (set, j);
1124 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1125 }
1126 fputc ('\n', stderr);
1127 }
1128
1129 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1130 if (! TEST_HARD_REG_BIT (set, i)
1131 && (vd->e[i].mode != VOIDmode
1132 || vd->e[i].oldest_regno != i
1133 || vd->e[i].next_regno != INVALID_REGNUM))
1134 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1135 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1136 vd->e[i].next_regno);
1137 }
1138
1139 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1140 DEBUG_INSN is skipped since we do not want to involve DF related
1141 staff as how it is handled in function pass_cprop_hardreg::execute.
1142
1143 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1144 to handle DEBUG_INSN for other uses. */
1145
1146 void
1147 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1148 {
1149 struct value_data *vd;
1150 vd = XNEWVEC (struct value_data, 1);
1151 init_value_data (vd);
1152
1153 skip_debug_insn_p = true;
1154 copyprop_hardreg_forward_1 (bb, vd);
1155 free (vd);
1156 skip_debug_insn_p = false;
1157 }
1158
1159 #ifdef ENABLE_CHECKING
1160 static void
1161 validate_value_data (struct value_data *vd)
1162 {
1163 HARD_REG_SET set;
1164 unsigned int i, j;
1165
1166 CLEAR_HARD_REG_SET (set);
1167
1168 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1169 if (vd->e[i].oldest_regno == i)
1170 {
1171 if (vd->e[i].mode == VOIDmode)
1172 {
1173 if (vd->e[i].next_regno != INVALID_REGNUM)
1174 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1175 i, vd->e[i].next_regno);
1176 continue;
1177 }
1178
1179 SET_HARD_REG_BIT (set, i);
1180
1181 for (j = vd->e[i].next_regno;
1182 j != INVALID_REGNUM;
1183 j = vd->e[j].next_regno)
1184 {
1185 if (TEST_HARD_REG_BIT (set, j))
1186 internal_error ("validate_value_data: Loop in regno chain (%u)",
1187 j);
1188 if (vd->e[j].oldest_regno != i)
1189 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1190 j, vd->e[j].oldest_regno);
1191
1192 SET_HARD_REG_BIT (set, j);
1193 }
1194 }
1195
1196 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1197 if (! TEST_HARD_REG_BIT (set, i)
1198 && (vd->e[i].mode != VOIDmode
1199 || vd->e[i].oldest_regno != i
1200 || vd->e[i].next_regno != INVALID_REGNUM))
1201 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1202 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1203 vd->e[i].next_regno);
1204 }
1205 #endif
1206 \f
1207 namespace {
1208
1209 const pass_data pass_data_cprop_hardreg =
1210 {
1211 RTL_PASS, /* type */
1212 "cprop_hardreg", /* name */
1213 OPTGROUP_NONE, /* optinfo_flags */
1214 TV_CPROP_REGISTERS, /* tv_id */
1215 0, /* properties_required */
1216 0, /* properties_provided */
1217 0, /* properties_destroyed */
1218 0, /* todo_flags_start */
1219 TODO_df_finish, /* todo_flags_finish */
1220 };
1221
1222 class pass_cprop_hardreg : public rtl_opt_pass
1223 {
1224 public:
1225 pass_cprop_hardreg (gcc::context *ctxt)
1226 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1227 {}
1228
1229 /* opt_pass methods: */
1230 virtual bool gate (function *)
1231 {
1232 return (optimize > 0 && (flag_cprop_registers));
1233 }
1234
1235 virtual unsigned int execute (function *);
1236
1237 }; // class pass_cprop_hardreg
1238
1239 unsigned int
1240 pass_cprop_hardreg::execute (function *fun)
1241 {
1242 struct value_data *all_vd;
1243 basic_block bb;
1244 sbitmap visited;
1245 bool analyze_called = false;
1246
1247 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1248
1249 visited = sbitmap_alloc (last_basic_block_for_fn (fun));
1250 bitmap_clear (visited);
1251
1252 FOR_EACH_BB_FN (bb, fun)
1253 {
1254 bitmap_set_bit (visited, bb->index);
1255
1256 /* If a block has a single predecessor, that we've already
1257 processed, begin with the value data that was live at
1258 the end of the predecessor block. */
1259 /* ??? Ought to use more intelligent queuing of blocks. */
1260 if (single_pred_p (bb)
1261 && bitmap_bit_p (visited, single_pred (bb)->index)
1262 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1263 {
1264 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1265 if (all_vd[bb->index].n_debug_insn_changes)
1266 {
1267 unsigned int regno;
1268
1269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1270 {
1271 if (all_vd[bb->index].e[regno].debug_insn_changes)
1272 {
1273 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1274 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1275 break;
1276 }
1277 }
1278 }
1279 }
1280 else
1281 init_value_data (all_vd + bb->index);
1282
1283 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1284 }
1285
1286 if (MAY_HAVE_DEBUG_INSNS)
1287 {
1288 FOR_EACH_BB_FN (bb, fun)
1289 if (bitmap_bit_p (visited, bb->index)
1290 && all_vd[bb->index].n_debug_insn_changes)
1291 {
1292 unsigned int regno;
1293 bitmap live;
1294
1295 if (!analyze_called)
1296 {
1297 df_analyze ();
1298 analyze_called = true;
1299 }
1300 live = df_get_live_out (bb);
1301 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1302 if (all_vd[bb->index].e[regno].debug_insn_changes)
1303 {
1304 if (REGNO_REG_SET_P (live, regno))
1305 apply_debug_insn_changes (all_vd + bb->index, regno);
1306 if (all_vd[bb->index].n_debug_insn_changes == 0)
1307 break;
1308 }
1309 }
1310
1311 queued_debug_insn_change::pool.release ();
1312 }
1313
1314 sbitmap_free (visited);
1315 free (all_vd);
1316 return 0;
1317 }
1318
1319 } // anon namespace
1320
1321 rtl_opt_pass *
1322 make_pass_cprop_hardreg (gcc::context *ctxt)
1323 {
1324 return new pass_cprop_hardreg (ctxt);
1325 }