]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/regcprop.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "regs.h"
29 #include "addresses.h"
30 #include "reload.h"
31 #include "recog.h"
32 #include "flags.h"
33 #include "diagnostic-core.h"
34 #include "obstack.h"
35 #include "tree-pass.h"
36 #include "rtl-iter.h"
37 #include "emit-rtl.h"
38
39 /* The following code does forward propagation of hard register copies.
40 The object is to eliminate as many dependencies as possible, so that
41 we have the most scheduling freedom. As a side effect, we also clean
42 up some silly register allocation decisions made by reload. This
43 code may be obsoleted by a new register allocator. */
44
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46 lifetime of a register and get the DEBUG_INSN subsequently reset.
47 So they are queued instead, and updated only when the register is
48 used in some subsequent real insn before it is set. */
49 struct queued_debug_insn_change
50 {
51 struct queued_debug_insn_change *next;
52 rtx_insn *insn;
53 rtx *loc;
54 rtx new_rtx;
55
56 /* Pool allocation new operator. */
57 inline void *operator new (size_t)
58 {
59 return pool.allocate ();
60 }
61
62 /* Delete operator utilizing pool allocation. */
63 inline void operator delete (void *ptr)
64 {
65 pool.remove ((queued_debug_insn_change *) ptr);
66 }
67
68 /* Memory allocation pool. */
69 static pool_allocator<queued_debug_insn_change> pool;
70 };
71
72 /* For each register, we have a list of registers that contain the same
73 value. The OLDEST_REGNO field points to the head of the list, and
74 the NEXT_REGNO field runs through the list. The MODE field indicates
75 what mode the data is known to be in; this field is VOIDmode when the
76 register is not known to contain valid data. */
77
78 struct value_data_entry
79 {
80 machine_mode mode;
81 unsigned int oldest_regno;
82 unsigned int next_regno;
83 struct queued_debug_insn_change *debug_insn_changes;
84 };
85
86 struct value_data
87 {
88 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
89 unsigned int max_value_regs;
90 unsigned int n_debug_insn_changes;
91 };
92
93 pool_allocator<queued_debug_insn_change> queued_debug_insn_change::pool
94 ("debug insn changes pool", 256);
95
96 static bool skip_debug_insn_p;
97
98 static void kill_value_one_regno (unsigned, struct value_data *);
99 static void kill_value_regno (unsigned, unsigned, struct value_data *);
100 static void kill_value (const_rtx, struct value_data *);
101 static void set_value_regno (unsigned, machine_mode, struct value_data *);
102 static void init_value_data (struct value_data *);
103 static void kill_clobbered_value (rtx, const_rtx, void *);
104 static void kill_set_value (rtx, const_rtx, void *);
105 static void copy_value (rtx, rtx, struct value_data *);
106 static bool mode_change_ok (machine_mode, machine_mode,
107 unsigned int);
108 static rtx maybe_mode_change (machine_mode, machine_mode,
109 machine_mode, unsigned int, unsigned int);
110 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
111 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
112 struct value_data *);
113 static bool replace_oldest_value_addr (rtx *, enum reg_class,
114 machine_mode, addr_space_t,
115 rtx_insn *, struct value_data *);
116 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
117 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
118 extern void debug_value_data (struct value_data *);
119 #ifdef ENABLE_CHECKING
120 static void validate_value_data (struct value_data *);
121 #endif
122
123 /* Free all queued updates for DEBUG_INSNs that change some reg to
124 register REGNO. */
125
126 static void
127 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
128 {
129 struct queued_debug_insn_change *cur, *next;
130 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
131 {
132 next = cur->next;
133 --vd->n_debug_insn_changes;
134 delete cur;
135 }
136 vd->e[regno].debug_insn_changes = NULL;
137 }
138
139 /* Kill register REGNO. This involves removing it from any value
140 lists, and resetting the value mode to VOIDmode. This is only a
141 helper function; it does not handle any hard registers overlapping
142 with REGNO. */
143
144 static void
145 kill_value_one_regno (unsigned int regno, struct value_data *vd)
146 {
147 unsigned int i, next;
148
149 if (vd->e[regno].oldest_regno != regno)
150 {
151 for (i = vd->e[regno].oldest_regno;
152 vd->e[i].next_regno != regno;
153 i = vd->e[i].next_regno)
154 continue;
155 vd->e[i].next_regno = vd->e[regno].next_regno;
156 }
157 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
158 {
159 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
160 vd->e[i].oldest_regno = next;
161 }
162
163 vd->e[regno].mode = VOIDmode;
164 vd->e[regno].oldest_regno = regno;
165 vd->e[regno].next_regno = INVALID_REGNUM;
166 if (vd->e[regno].debug_insn_changes)
167 free_debug_insn_changes (vd, regno);
168
169 #ifdef ENABLE_CHECKING
170 validate_value_data (vd);
171 #endif
172 }
173
174 /* Kill the value in register REGNO for NREGS, and any other registers
175 whose values overlap. */
176
177 static void
178 kill_value_regno (unsigned int regno, unsigned int nregs,
179 struct value_data *vd)
180 {
181 unsigned int j;
182
183 /* Kill the value we're told to kill. */
184 for (j = 0; j < nregs; ++j)
185 kill_value_one_regno (regno + j, vd);
186
187 /* Kill everything that overlapped what we're told to kill. */
188 if (regno < vd->max_value_regs)
189 j = 0;
190 else
191 j = regno - vd->max_value_regs;
192 for (; j < regno; ++j)
193 {
194 unsigned int i, n;
195 if (vd->e[j].mode == VOIDmode)
196 continue;
197 n = hard_regno_nregs[j][vd->e[j].mode];
198 if (j + n > regno)
199 for (i = 0; i < n; ++i)
200 kill_value_one_regno (j + i, vd);
201 }
202 }
203
204 /* Kill X. This is a convenience function wrapping kill_value_regno
205 so that we mind the mode the register is in. */
206
207 static void
208 kill_value (const_rtx x, struct value_data *vd)
209 {
210 if (GET_CODE (x) == SUBREG)
211 {
212 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
213 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
214 x = tmp ? tmp : SUBREG_REG (x);
215 }
216 if (REG_P (x))
217 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
218 }
219
220 /* Remember that REGNO is valid in MODE. */
221
222 static void
223 set_value_regno (unsigned int regno, machine_mode mode,
224 struct value_data *vd)
225 {
226 unsigned int nregs;
227
228 vd->e[regno].mode = mode;
229
230 nregs = hard_regno_nregs[regno][mode];
231 if (nregs > vd->max_value_regs)
232 vd->max_value_regs = nregs;
233 }
234
235 /* Initialize VD such that there are no known relationships between regs. */
236
237 static void
238 init_value_data (struct value_data *vd)
239 {
240 int i;
241 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
242 {
243 vd->e[i].mode = VOIDmode;
244 vd->e[i].oldest_regno = i;
245 vd->e[i].next_regno = INVALID_REGNUM;
246 vd->e[i].debug_insn_changes = NULL;
247 }
248 vd->max_value_regs = 0;
249 vd->n_debug_insn_changes = 0;
250 }
251
252 /* Called through note_stores. If X is clobbered, kill its value. */
253
254 static void
255 kill_clobbered_value (rtx x, const_rtx set, void *data)
256 {
257 struct value_data *const vd = (struct value_data *) data;
258 if (GET_CODE (set) == CLOBBER)
259 kill_value (x, vd);
260 }
261
262 /* A structure passed as data to kill_set_value through note_stores. */
263 struct kill_set_value_data
264 {
265 struct value_data *vd;
266 rtx ignore_set_reg;
267 };
268
269 /* Called through note_stores. If X is set, not clobbered, kill its
270 current value and install it as the root of its own value list. */
271
272 static void
273 kill_set_value (rtx x, const_rtx set, void *data)
274 {
275 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
276 if (rtx_equal_p (x, ksvd->ignore_set_reg))
277 return;
278 if (GET_CODE (set) != CLOBBER)
279 {
280 kill_value (x, ksvd->vd);
281 if (REG_P (x))
282 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
283 }
284 }
285
286 /* Kill any register used in X as the base of an auto-increment expression,
287 and install that register as the root of its own value list. */
288
289 static void
290 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
291 {
292 subrtx_iterator::array_type array;
293 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
294 {
295 const_rtx x = *iter;
296 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
297 {
298 x = XEXP (x, 0);
299 kill_value (x, vd);
300 set_value_regno (REGNO (x), GET_MODE (x), vd);
301 iter.skip_subrtxes ();
302 }
303 }
304 }
305
306 /* Assert that SRC has been copied to DEST. Adjust the data structures
307 to reflect that SRC contains an older copy of the shared value. */
308
309 static void
310 copy_value (rtx dest, rtx src, struct value_data *vd)
311 {
312 unsigned int dr = REGNO (dest);
313 unsigned int sr = REGNO (src);
314 unsigned int dn, sn;
315 unsigned int i;
316
317 /* ??? At present, it's possible to see noop sets. It'd be nice if
318 this were cleaned up beforehand... */
319 if (sr == dr)
320 return;
321
322 /* Do not propagate copies to the stack pointer, as that can leave
323 memory accesses with no scheduling dependency on the stack update. */
324 if (dr == STACK_POINTER_REGNUM)
325 return;
326
327 /* Likewise with the frame pointer, if we're using one. */
328 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
329 return;
330
331 /* Do not propagate copies to fixed or global registers, patterns
332 can be relying to see particular fixed register or users can
333 expect the chosen global register in asm. */
334 if (fixed_regs[dr] || global_regs[dr])
335 return;
336
337 /* If SRC and DEST overlap, don't record anything. */
338 dn = REG_NREGS (dest);
339 sn = REG_NREGS (src);
340 if ((dr > sr && dr < sr + sn)
341 || (sr > dr && sr < dr + dn))
342 return;
343
344 /* If SRC had no assigned mode (i.e. we didn't know it was live)
345 assign it now and assume the value came from an input argument
346 or somesuch. */
347 if (vd->e[sr].mode == VOIDmode)
348 set_value_regno (sr, vd->e[dr].mode, vd);
349
350 /* If we are narrowing the input to a smaller number of hard regs,
351 and it is in big endian, we are really extracting a high part.
352 Since we generally associate a low part of a value with the value itself,
353 we must not do the same for the high part.
354 Note we can still get low parts for the same mode combination through
355 a two-step copy involving differently sized hard regs.
356 Assume hard regs fr* are 32 bits bits each, while r* are 64 bits each:
357 (set (reg:DI r0) (reg:DI fr0))
358 (set (reg:SI fr2) (reg:SI r0))
359 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
360 (set (reg:SI fr2) (reg:SI fr0))
361 loads the high part of (reg:DI fr0) into fr2.
362
363 We can't properly represent the latter case in our tables, so don't
364 record anything then. */
365 else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
366 && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
367 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
368 return;
369
370 /* If SRC had been assigned a mode narrower than the copy, we can't
371 link DEST into the chain, because not all of the pieces of the
372 copy came from oldest_regno. */
373 else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
374 return;
375
376 /* Link DR at the end of the value chain used by SR. */
377
378 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
379
380 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
381 continue;
382 vd->e[i].next_regno = dr;
383
384 #ifdef ENABLE_CHECKING
385 validate_value_data (vd);
386 #endif
387 }
388
389 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
390
391 static bool
392 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
393 unsigned int regno ATTRIBUTE_UNUSED)
394 {
395 if (GET_MODE_SIZE (orig_mode) < GET_MODE_SIZE (new_mode))
396 return false;
397
398 #ifdef CANNOT_CHANGE_MODE_CLASS
399 return !REG_CANNOT_CHANGE_MODE_P (regno, orig_mode, new_mode);
400 #endif
401
402 return true;
403 }
404
405 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
406 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
407 in NEW_MODE.
408 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
409
410 static rtx
411 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
412 machine_mode new_mode, unsigned int regno,
413 unsigned int copy_regno ATTRIBUTE_UNUSED)
414 {
415 if (GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (orig_mode)
416 && GET_MODE_SIZE (copy_mode) < GET_MODE_SIZE (new_mode))
417 return NULL_RTX;
418
419 if (orig_mode == new_mode)
420 return gen_raw_REG (new_mode, regno);
421 else if (mode_change_ok (orig_mode, new_mode, regno))
422 {
423 int copy_nregs = hard_regno_nregs[copy_regno][copy_mode];
424 int use_nregs = hard_regno_nregs[copy_regno][new_mode];
425 int copy_offset
426 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
427 int offset
428 = GET_MODE_SIZE (orig_mode) - GET_MODE_SIZE (new_mode) - copy_offset;
429 int byteoffset = offset % UNITS_PER_WORD;
430 int wordoffset = offset - byteoffset;
431
432 offset = ((WORDS_BIG_ENDIAN ? wordoffset : 0)
433 + (BYTES_BIG_ENDIAN ? byteoffset : 0));
434 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
435 if (HARD_REGNO_MODE_OK (regno, new_mode))
436 return gen_raw_REG (new_mode, regno);
437 }
438 return NULL_RTX;
439 }
440
441 /* Find the oldest copy of the value contained in REGNO that is in
442 register class CL and has mode MODE. If found, return an rtx
443 of that oldest register, otherwise return NULL. */
444
445 static rtx
446 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
447 {
448 unsigned int regno = REGNO (reg);
449 machine_mode mode = GET_MODE (reg);
450 unsigned int i;
451
452 /* If we are accessing REG in some mode other that what we set it in,
453 make sure that the replacement is valid. In particular, consider
454 (set (reg:DI r11) (...))
455 (set (reg:SI r9) (reg:SI r11))
456 (set (reg:SI r10) (...))
457 (set (...) (reg:DI r9))
458 Replacing r9 with r11 is invalid. */
459 if (mode != vd->e[regno].mode)
460 {
461 if (hard_regno_nregs[regno][mode]
462 > hard_regno_nregs[regno][vd->e[regno].mode])
463 return NULL_RTX;
464 }
465
466 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
467 {
468 machine_mode oldmode = vd->e[i].mode;
469 rtx new_rtx;
470
471 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
472 continue;
473
474 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
475 if (new_rtx)
476 {
477 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
478 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
479 REG_POINTER (new_rtx) = REG_POINTER (reg);
480 return new_rtx;
481 }
482 }
483
484 return NULL_RTX;
485 }
486
487 /* If possible, replace the register at *LOC with the oldest register
488 in register class CL. Return true if successfully replaced. */
489
490 static bool
491 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
492 struct value_data *vd)
493 {
494 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
495 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
496 {
497 if (DEBUG_INSN_P (insn))
498 {
499 struct queued_debug_insn_change *change;
500
501 if (dump_file)
502 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
503 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
504
505 change = new queued_debug_insn_change;
506 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
507 change->insn = insn;
508 change->loc = loc;
509 change->new_rtx = new_rtx;
510 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
511 ++vd->n_debug_insn_changes;
512 return true;
513 }
514 if (dump_file)
515 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
516 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
517
518 validate_change (insn, loc, new_rtx, 1);
519 return true;
520 }
521 return false;
522 }
523
524 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
525 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
526 BASE_REG_CLASS depending on how the register is being considered. */
527
528 static bool
529 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
530 machine_mode mode, addr_space_t as,
531 rtx_insn *insn, struct value_data *vd)
532 {
533 rtx x = *loc;
534 RTX_CODE code = GET_CODE (x);
535 const char *fmt;
536 int i, j;
537 bool changed = false;
538
539 switch (code)
540 {
541 case PLUS:
542 if (DEBUG_INSN_P (insn))
543 break;
544
545 {
546 rtx orig_op0 = XEXP (x, 0);
547 rtx orig_op1 = XEXP (x, 1);
548 RTX_CODE code0 = GET_CODE (orig_op0);
549 RTX_CODE code1 = GET_CODE (orig_op1);
550 rtx op0 = orig_op0;
551 rtx op1 = orig_op1;
552 rtx *locI = NULL;
553 rtx *locB = NULL;
554 enum rtx_code index_code = SCRATCH;
555
556 if (GET_CODE (op0) == SUBREG)
557 {
558 op0 = SUBREG_REG (op0);
559 code0 = GET_CODE (op0);
560 }
561
562 if (GET_CODE (op1) == SUBREG)
563 {
564 op1 = SUBREG_REG (op1);
565 code1 = GET_CODE (op1);
566 }
567
568 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
569 || code0 == ZERO_EXTEND || code1 == MEM)
570 {
571 locI = &XEXP (x, 0);
572 locB = &XEXP (x, 1);
573 index_code = GET_CODE (*locI);
574 }
575 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
576 || code1 == ZERO_EXTEND || code0 == MEM)
577 {
578 locI = &XEXP (x, 1);
579 locB = &XEXP (x, 0);
580 index_code = GET_CODE (*locI);
581 }
582 else if (code0 == CONST_INT || code0 == CONST
583 || code0 == SYMBOL_REF || code0 == LABEL_REF)
584 {
585 locB = &XEXP (x, 1);
586 index_code = GET_CODE (XEXP (x, 0));
587 }
588 else if (code1 == CONST_INT || code1 == CONST
589 || code1 == SYMBOL_REF || code1 == LABEL_REF)
590 {
591 locB = &XEXP (x, 0);
592 index_code = GET_CODE (XEXP (x, 1));
593 }
594 else if (code0 == REG && code1 == REG)
595 {
596 int index_op;
597 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
598
599 if (REGNO_OK_FOR_INDEX_P (regno1)
600 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
601 index_op = 1;
602 else if (REGNO_OK_FOR_INDEX_P (regno0)
603 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
604 index_op = 0;
605 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
606 || REGNO_OK_FOR_INDEX_P (regno1))
607 index_op = 1;
608 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
609 index_op = 0;
610 else
611 index_op = 1;
612
613 locI = &XEXP (x, index_op);
614 locB = &XEXP (x, !index_op);
615 index_code = GET_CODE (*locI);
616 }
617 else if (code0 == REG)
618 {
619 locI = &XEXP (x, 0);
620 locB = &XEXP (x, 1);
621 index_code = GET_CODE (*locI);
622 }
623 else if (code1 == REG)
624 {
625 locI = &XEXP (x, 1);
626 locB = &XEXP (x, 0);
627 index_code = GET_CODE (*locI);
628 }
629
630 if (locI)
631 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
632 mode, as, insn, vd);
633 if (locB)
634 changed |= replace_oldest_value_addr (locB,
635 base_reg_class (mode, as, PLUS,
636 index_code),
637 mode, as, insn, vd);
638 return changed;
639 }
640
641 case POST_INC:
642 case POST_DEC:
643 case POST_MODIFY:
644 case PRE_INC:
645 case PRE_DEC:
646 case PRE_MODIFY:
647 return false;
648
649 case MEM:
650 return replace_oldest_value_mem (x, insn, vd);
651
652 case REG:
653 return replace_oldest_value_reg (loc, cl, insn, vd);
654
655 default:
656 break;
657 }
658
659 fmt = GET_RTX_FORMAT (code);
660 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
661 {
662 if (fmt[i] == 'e')
663 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
664 insn, vd);
665 else if (fmt[i] == 'E')
666 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
667 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
668 mode, as, insn, vd);
669 }
670
671 return changed;
672 }
673
674 /* Similar to replace_oldest_value_reg, but X contains a memory. */
675
676 static bool
677 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
678 {
679 enum reg_class cl;
680
681 if (DEBUG_INSN_P (insn))
682 cl = ALL_REGS;
683 else
684 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
685
686 return replace_oldest_value_addr (&XEXP (x, 0), cl,
687 GET_MODE (x), MEM_ADDR_SPACE (x),
688 insn, vd);
689 }
690
691 /* Apply all queued updates for DEBUG_INSNs that change some reg to
692 register REGNO. */
693
694 static void
695 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
696 {
697 struct queued_debug_insn_change *change;
698 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
699
700 for (change = vd->e[regno].debug_insn_changes;
701 change;
702 change = change->next)
703 {
704 if (last_insn != change->insn)
705 {
706 apply_change_group ();
707 last_insn = change->insn;
708 }
709 validate_change (change->insn, change->loc, change->new_rtx, 1);
710 }
711 apply_change_group ();
712 }
713
714 /* Called via note_uses, for all used registers in a real insn
715 apply DEBUG_INSN changes that change registers to the used
716 registers. */
717
718 static void
719 cprop_find_used_regs (rtx *loc, void *data)
720 {
721 struct value_data *const vd = (struct value_data *) data;
722 subrtx_iterator::array_type array;
723 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
724 {
725 const_rtx x = *iter;
726 if (REG_P (x))
727 {
728 unsigned int regno = REGNO (x);
729 if (vd->e[regno].debug_insn_changes)
730 {
731 apply_debug_insn_changes (vd, regno);
732 free_debug_insn_changes (vd, regno);
733 }
734 }
735 }
736 }
737
738 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
739
740 static void
741 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
742 {
743 note_stores (PATTERN (insn), kill_clobbered_value, vd);
744
745 if (CALL_P (insn))
746 {
747 rtx exp;
748
749 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
750 {
751 rtx x = XEXP (exp, 0);
752 if (GET_CODE (x) == CLOBBER)
753 kill_value (SET_DEST (x), vd);
754 }
755 }
756 }
757
758 /* Perform the forward copy propagation on basic block BB. */
759
760 static bool
761 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
762 {
763 bool anything_changed = false;
764 rtx_insn *insn;
765
766 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
767 {
768 int n_ops, i, predicated;
769 bool is_asm, any_replacements;
770 rtx set;
771 rtx link;
772 bool replaced[MAX_RECOG_OPERANDS];
773 bool changed = false;
774 struct kill_set_value_data ksvd;
775
776 if (!NONDEBUG_INSN_P (insn))
777 {
778 if (DEBUG_INSN_P (insn))
779 {
780 rtx loc = INSN_VAR_LOCATION_LOC (insn);
781 if (!VAR_LOC_UNKNOWN_P (loc))
782 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
783 ALL_REGS, GET_MODE (loc),
784 ADDR_SPACE_GENERIC, insn, vd);
785 }
786
787 if (insn == BB_END (bb))
788 break;
789 else
790 continue;
791 }
792
793 set = single_set (insn);
794 extract_constrain_insn (insn);
795 preprocess_constraints (insn);
796 const operand_alternative *op_alt = which_op_alt ();
797 n_ops = recog_data.n_operands;
798 is_asm = asm_noperands (PATTERN (insn)) >= 0;
799
800 /* Simplify the code below by promoting OP_OUT to OP_INOUT
801 in predicated instructions. */
802
803 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
804 for (i = 0; i < n_ops; ++i)
805 {
806 int matches = op_alt[i].matches;
807 if (matches >= 0 || op_alt[i].matched >= 0
808 || (predicated && recog_data.operand_type[i] == OP_OUT))
809 recog_data.operand_type[i] = OP_INOUT;
810 }
811
812 /* Apply changes to earlier DEBUG_INSNs if possible. */
813 if (vd->n_debug_insn_changes)
814 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
815
816 /* For each earlyclobber operand, zap the value data. */
817 for (i = 0; i < n_ops; i++)
818 if (op_alt[i].earlyclobber)
819 kill_value (recog_data.operand[i], vd);
820
821 /* Within asms, a clobber cannot overlap inputs or outputs.
822 I wouldn't think this were true for regular insns, but
823 scan_rtx treats them like that... */
824 kill_clobbered_values (insn, vd);
825
826 /* Kill all auto-incremented values. */
827 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
828 kill_autoinc_value (insn, vd);
829
830 /* Kill all early-clobbered operands. */
831 for (i = 0; i < n_ops; i++)
832 if (op_alt[i].earlyclobber)
833 kill_value (recog_data.operand[i], vd);
834
835 /* If we have dead sets in the insn, then we need to note these as we
836 would clobbers. */
837 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
838 {
839 if (REG_NOTE_KIND (link) == REG_UNUSED)
840 {
841 kill_value (XEXP (link, 0), vd);
842 /* Furthermore, if the insn looked like a single-set,
843 but the dead store kills the source value of that
844 set, then we can no-longer use the plain move
845 special case below. */
846 if (set
847 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
848 set = NULL;
849 }
850 }
851
852 /* Special-case plain move instructions, since we may well
853 be able to do the move from a different register class. */
854 if (set && REG_P (SET_SRC (set)))
855 {
856 rtx src = SET_SRC (set);
857 unsigned int regno = REGNO (src);
858 machine_mode mode = GET_MODE (src);
859 unsigned int i;
860 rtx new_rtx;
861
862 /* If we are accessing SRC in some mode other that what we
863 set it in, make sure that the replacement is valid. */
864 if (mode != vd->e[regno].mode)
865 {
866 if (hard_regno_nregs[regno][mode]
867 > hard_regno_nregs[regno][vd->e[regno].mode])
868 goto no_move_special_case;
869
870 /* And likewise, if we are narrowing on big endian the transformation
871 is also invalid. */
872 if (hard_regno_nregs[regno][mode]
873 < hard_regno_nregs[regno][vd->e[regno].mode]
874 && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
875 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
876 goto no_move_special_case;
877 }
878
879 /* If the destination is also a register, try to find a source
880 register in the same class. */
881 if (REG_P (SET_DEST (set)))
882 {
883 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), src, vd);
884 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
885 {
886 if (dump_file)
887 fprintf (dump_file,
888 "insn %u: replaced reg %u with %u\n",
889 INSN_UID (insn), regno, REGNO (new_rtx));
890 changed = true;
891 goto did_replacement;
892 }
893 /* We need to re-extract as validate_change clobbers
894 recog_data. */
895 extract_constrain_insn (insn);
896 preprocess_constraints (insn);
897 }
898
899 /* Otherwise, try all valid registers and see if its valid. */
900 for (i = vd->e[regno].oldest_regno; i != regno;
901 i = vd->e[i].next_regno)
902 {
903 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
904 mode, i, regno);
905 if (new_rtx != NULL_RTX)
906 {
907 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
908 {
909 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
910 REG_ATTRS (new_rtx) = REG_ATTRS (src);
911 REG_POINTER (new_rtx) = REG_POINTER (src);
912 if (dump_file)
913 fprintf (dump_file,
914 "insn %u: replaced reg %u with %u\n",
915 INSN_UID (insn), regno, REGNO (new_rtx));
916 changed = true;
917 goto did_replacement;
918 }
919 /* We need to re-extract as validate_change clobbers
920 recog_data. */
921 extract_constrain_insn (insn);
922 preprocess_constraints (insn);
923 }
924 }
925 }
926 no_move_special_case:
927
928 any_replacements = false;
929
930 /* For each input operand, replace a hard register with the
931 eldest live copy that's in an appropriate register class. */
932 for (i = 0; i < n_ops; i++)
933 {
934 replaced[i] = false;
935
936 /* Don't scan match_operand here, since we've no reg class
937 information to pass down. Any operands that we could
938 substitute in will be represented elsewhere. */
939 if (recog_data.constraints[i][0] == '\0')
940 continue;
941
942 /* Don't replace in asms intentionally referencing hard regs. */
943 if (is_asm && REG_P (recog_data.operand[i])
944 && (REGNO (recog_data.operand[i])
945 == ORIGINAL_REGNO (recog_data.operand[i])))
946 continue;
947
948 if (recog_data.operand_type[i] == OP_IN)
949 {
950 if (op_alt[i].is_address)
951 replaced[i]
952 = replace_oldest_value_addr (recog_data.operand_loc[i],
953 alternative_class (op_alt, i),
954 VOIDmode, ADDR_SPACE_GENERIC,
955 insn, vd);
956 else if (REG_P (recog_data.operand[i]))
957 replaced[i]
958 = replace_oldest_value_reg (recog_data.operand_loc[i],
959 alternative_class (op_alt, i),
960 insn, vd);
961 else if (MEM_P (recog_data.operand[i]))
962 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
963 insn, vd);
964 }
965 else if (MEM_P (recog_data.operand[i]))
966 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
967 insn, vd);
968
969 /* If we performed any replacement, update match_dups. */
970 if (replaced[i])
971 {
972 int j;
973 rtx new_rtx;
974
975 new_rtx = *recog_data.operand_loc[i];
976 recog_data.operand[i] = new_rtx;
977 for (j = 0; j < recog_data.n_dups; j++)
978 if (recog_data.dup_num[j] == i)
979 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
980
981 any_replacements = true;
982 }
983 }
984
985 if (any_replacements)
986 {
987 if (! apply_change_group ())
988 {
989 for (i = 0; i < n_ops; i++)
990 if (replaced[i])
991 {
992 rtx old = *recog_data.operand_loc[i];
993 recog_data.operand[i] = old;
994 }
995
996 if (dump_file)
997 fprintf (dump_file,
998 "insn %u: reg replacements not verified\n",
999 INSN_UID (insn));
1000 }
1001 else
1002 changed = true;
1003 }
1004
1005 did_replacement:
1006 if (changed)
1007 {
1008 anything_changed = true;
1009
1010 /* If something changed, perhaps further changes to earlier
1011 DEBUG_INSNs can be applied. */
1012 if (vd->n_debug_insn_changes)
1013 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1014 }
1015
1016 ksvd.vd = vd;
1017 ksvd.ignore_set_reg = NULL_RTX;
1018
1019 /* Clobber call-clobbered registers. */
1020 if (CALL_P (insn))
1021 {
1022 unsigned int set_regno = INVALID_REGNUM;
1023 unsigned int set_nregs = 0;
1024 unsigned int regno;
1025 rtx exp;
1026 HARD_REG_SET regs_invalidated_by_this_call;
1027
1028 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1029 {
1030 rtx x = XEXP (exp, 0);
1031 if (GET_CODE (x) == SET)
1032 {
1033 rtx dest = SET_DEST (x);
1034 kill_value (dest, vd);
1035 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1036 copy_value (dest, SET_SRC (x), vd);
1037 ksvd.ignore_set_reg = dest;
1038 set_regno = REGNO (dest);
1039 set_nregs = REG_NREGS (dest);
1040 break;
1041 }
1042 }
1043
1044 get_call_reg_set_usage (insn,
1045 &regs_invalidated_by_this_call,
1046 regs_invalidated_by_call);
1047 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1048 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1049 || HARD_REGNO_CALL_PART_CLOBBERED (regno, vd->e[regno].mode))
1050 && (regno < set_regno || regno >= set_regno + set_nregs))
1051 kill_value_regno (regno, 1, vd);
1052
1053 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1054 of the SET isn't in regs_invalidated_by_call hard reg set,
1055 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1056 assume the value in it is still live. */
1057 if (ksvd.ignore_set_reg)
1058 kill_clobbered_values (insn, vd);
1059 }
1060
1061 bool copy_p = (set
1062 && REG_P (SET_DEST (set))
1063 && REG_P (SET_SRC (set)));
1064 bool noop_p = (copy_p
1065 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1066
1067 if (!noop_p)
1068 {
1069 /* Notice stores. */
1070 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1071
1072 /* Notice copies. */
1073 if (copy_p)
1074 copy_value (SET_DEST (set), SET_SRC (set), vd);
1075 }
1076
1077 if (insn == BB_END (bb))
1078 break;
1079 }
1080
1081 return anything_changed;
1082 }
1083
1084 /* Dump the value chain data to stderr. */
1085
1086 DEBUG_FUNCTION void
1087 debug_value_data (struct value_data *vd)
1088 {
1089 HARD_REG_SET set;
1090 unsigned int i, j;
1091
1092 CLEAR_HARD_REG_SET (set);
1093
1094 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1095 if (vd->e[i].oldest_regno == i)
1096 {
1097 if (vd->e[i].mode == VOIDmode)
1098 {
1099 if (vd->e[i].next_regno != INVALID_REGNUM)
1100 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1101 i, vd->e[i].next_regno);
1102 continue;
1103 }
1104
1105 SET_HARD_REG_BIT (set, i);
1106 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1107
1108 for (j = vd->e[i].next_regno;
1109 j != INVALID_REGNUM;
1110 j = vd->e[j].next_regno)
1111 {
1112 if (TEST_HARD_REG_BIT (set, j))
1113 {
1114 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1115 return;
1116 }
1117
1118 if (vd->e[j].oldest_regno != i)
1119 {
1120 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1121 j, vd->e[j].oldest_regno);
1122 return;
1123 }
1124 SET_HARD_REG_BIT (set, j);
1125 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1126 }
1127 fputc ('\n', stderr);
1128 }
1129
1130 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1131 if (! TEST_HARD_REG_BIT (set, i)
1132 && (vd->e[i].mode != VOIDmode
1133 || vd->e[i].oldest_regno != i
1134 || vd->e[i].next_regno != INVALID_REGNUM))
1135 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1136 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1137 vd->e[i].next_regno);
1138 }
1139
1140 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1141 DEBUG_INSN is skipped since we do not want to involve DF related
1142 staff as how it is handled in function pass_cprop_hardreg::execute.
1143
1144 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1145 to handle DEBUG_INSN for other uses. */
1146
1147 void
1148 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1149 {
1150 struct value_data *vd;
1151 vd = XNEWVEC (struct value_data, 1);
1152 init_value_data (vd);
1153
1154 skip_debug_insn_p = true;
1155 copyprop_hardreg_forward_1 (bb, vd);
1156 free (vd);
1157 skip_debug_insn_p = false;
1158 }
1159
1160 #ifdef ENABLE_CHECKING
1161 static void
1162 validate_value_data (struct value_data *vd)
1163 {
1164 HARD_REG_SET set;
1165 unsigned int i, j;
1166
1167 CLEAR_HARD_REG_SET (set);
1168
1169 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1170 if (vd->e[i].oldest_regno == i)
1171 {
1172 if (vd->e[i].mode == VOIDmode)
1173 {
1174 if (vd->e[i].next_regno != INVALID_REGNUM)
1175 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1176 i, vd->e[i].next_regno);
1177 continue;
1178 }
1179
1180 SET_HARD_REG_BIT (set, i);
1181
1182 for (j = vd->e[i].next_regno;
1183 j != INVALID_REGNUM;
1184 j = vd->e[j].next_regno)
1185 {
1186 if (TEST_HARD_REG_BIT (set, j))
1187 internal_error ("validate_value_data: Loop in regno chain (%u)",
1188 j);
1189 if (vd->e[j].oldest_regno != i)
1190 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1191 j, vd->e[j].oldest_regno);
1192
1193 SET_HARD_REG_BIT (set, j);
1194 }
1195 }
1196
1197 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1198 if (! TEST_HARD_REG_BIT (set, i)
1199 && (vd->e[i].mode != VOIDmode
1200 || vd->e[i].oldest_regno != i
1201 || vd->e[i].next_regno != INVALID_REGNUM))
1202 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1203 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1204 vd->e[i].next_regno);
1205 }
1206 #endif
1207 \f
1208 namespace {
1209
1210 const pass_data pass_data_cprop_hardreg =
1211 {
1212 RTL_PASS, /* type */
1213 "cprop_hardreg", /* name */
1214 OPTGROUP_NONE, /* optinfo_flags */
1215 TV_CPROP_REGISTERS, /* tv_id */
1216 0, /* properties_required */
1217 0, /* properties_provided */
1218 0, /* properties_destroyed */
1219 0, /* todo_flags_start */
1220 TODO_df_finish, /* todo_flags_finish */
1221 };
1222
1223 class pass_cprop_hardreg : public rtl_opt_pass
1224 {
1225 public:
1226 pass_cprop_hardreg (gcc::context *ctxt)
1227 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1228 {}
1229
1230 /* opt_pass methods: */
1231 virtual bool gate (function *)
1232 {
1233 return (optimize > 0 && (flag_cprop_registers));
1234 }
1235
1236 virtual unsigned int execute (function *);
1237
1238 }; // class pass_cprop_hardreg
1239
1240 unsigned int
1241 pass_cprop_hardreg::execute (function *fun)
1242 {
1243 struct value_data *all_vd;
1244 basic_block bb;
1245 sbitmap visited;
1246 bool analyze_called = false;
1247
1248 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1249
1250 visited = sbitmap_alloc (last_basic_block_for_fn (fun));
1251 bitmap_clear (visited);
1252
1253 FOR_EACH_BB_FN (bb, fun)
1254 {
1255 bitmap_set_bit (visited, bb->index);
1256
1257 /* If a block has a single predecessor, that we've already
1258 processed, begin with the value data that was live at
1259 the end of the predecessor block. */
1260 /* ??? Ought to use more intelligent queuing of blocks. */
1261 if (single_pred_p (bb)
1262 && bitmap_bit_p (visited, single_pred (bb)->index)
1263 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1264 {
1265 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1266 if (all_vd[bb->index].n_debug_insn_changes)
1267 {
1268 unsigned int regno;
1269
1270 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1271 {
1272 if (all_vd[bb->index].e[regno].debug_insn_changes)
1273 {
1274 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1275 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1276 break;
1277 }
1278 }
1279 }
1280 }
1281 else
1282 init_value_data (all_vd + bb->index);
1283
1284 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1285 }
1286
1287 if (MAY_HAVE_DEBUG_INSNS)
1288 {
1289 FOR_EACH_BB_FN (bb, fun)
1290 if (bitmap_bit_p (visited, bb->index)
1291 && all_vd[bb->index].n_debug_insn_changes)
1292 {
1293 unsigned int regno;
1294 bitmap live;
1295
1296 if (!analyze_called)
1297 {
1298 df_analyze ();
1299 analyze_called = true;
1300 }
1301 live = df_get_live_out (bb);
1302 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1303 if (all_vd[bb->index].e[regno].debug_insn_changes)
1304 {
1305 if (REGNO_REG_SET_P (live, regno))
1306 apply_debug_insn_changes (all_vd + bb->index, regno);
1307 if (all_vd[bb->index].n_debug_insn_changes == 0)
1308 break;
1309 }
1310 }
1311
1312 queued_debug_insn_change::pool.release ();
1313 }
1314
1315 sbitmap_free (visited);
1316 free (all_vd);
1317 return 0;
1318 }
1319
1320 } // anon namespace
1321
1322 rtl_opt_pass *
1323 make_pass_cprop_hardreg (gcc::context *ctxt)
1324 {
1325 return new pass_cprop_hardreg (ctxt);
1326 }