]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/regcprop.c
Make more use of subreg_size_lowpart_offset
[thirdparty/gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38
39 /* The following code does forward propagation of hard register copies.
40 The object is to eliminate as many dependencies as possible, so that
41 we have the most scheduling freedom. As a side effect, we also clean
42 up some silly register allocation decisions made by reload. This
43 code may be obsoleted by a new register allocator. */
44
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46 lifetime of a register and get the DEBUG_INSN subsequently reset.
47 So they are queued instead, and updated only when the register is
48 used in some subsequent real insn before it is set. */
49 struct queued_debug_insn_change
50 {
51 struct queued_debug_insn_change *next;
52 rtx_insn *insn;
53 rtx *loc;
54 rtx new_rtx;
55 };
56
57 /* For each register, we have a list of registers that contain the same
58 value. The OLDEST_REGNO field points to the head of the list, and
59 the NEXT_REGNO field runs through the list. The MODE field indicates
60 what mode the data is known to be in; this field is VOIDmode when the
61 register is not known to contain valid data. */
62
63 struct value_data_entry
64 {
65 machine_mode mode;
66 unsigned int oldest_regno;
67 unsigned int next_regno;
68 struct queued_debug_insn_change *debug_insn_changes;
69 };
70
71 struct value_data
72 {
73 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
74 unsigned int max_value_regs;
75 unsigned int n_debug_insn_changes;
76 };
77
78 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
79 ("debug insn changes pool");
80
81 static bool skip_debug_insn_p;
82
83 static void kill_value_one_regno (unsigned, struct value_data *);
84 static void kill_value_regno (unsigned, unsigned, struct value_data *);
85 static void kill_value (const_rtx, struct value_data *);
86 static void set_value_regno (unsigned, machine_mode, struct value_data *);
87 static void init_value_data (struct value_data *);
88 static void kill_clobbered_value (rtx, const_rtx, void *);
89 static void kill_set_value (rtx, const_rtx, void *);
90 static void copy_value (rtx, rtx, struct value_data *);
91 static bool mode_change_ok (machine_mode, machine_mode,
92 unsigned int);
93 static rtx maybe_mode_change (machine_mode, machine_mode,
94 machine_mode, unsigned int, unsigned int);
95 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
96 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
97 struct value_data *);
98 static bool replace_oldest_value_addr (rtx *, enum reg_class,
99 machine_mode, addr_space_t,
100 rtx_insn *, struct value_data *);
101 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
102 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
103 extern void debug_value_data (struct value_data *);
104 static void validate_value_data (struct value_data *);
105
106 /* Free all queued updates for DEBUG_INSNs that change some reg to
107 register REGNO. */
108
109 static void
110 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
111 {
112 struct queued_debug_insn_change *cur, *next;
113 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
114 {
115 next = cur->next;
116 --vd->n_debug_insn_changes;
117 queued_debug_insn_change_pool.remove (cur);
118 }
119 vd->e[regno].debug_insn_changes = NULL;
120 }
121
122 /* Kill register REGNO. This involves removing it from any value
123 lists, and resetting the value mode to VOIDmode. This is only a
124 helper function; it does not handle any hard registers overlapping
125 with REGNO. */
126
127 static void
128 kill_value_one_regno (unsigned int regno, struct value_data *vd)
129 {
130 unsigned int i, next;
131
132 if (vd->e[regno].oldest_regno != regno)
133 {
134 for (i = vd->e[regno].oldest_regno;
135 vd->e[i].next_regno != regno;
136 i = vd->e[i].next_regno)
137 continue;
138 vd->e[i].next_regno = vd->e[regno].next_regno;
139 }
140 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
141 {
142 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
143 vd->e[i].oldest_regno = next;
144 }
145
146 vd->e[regno].mode = VOIDmode;
147 vd->e[regno].oldest_regno = regno;
148 vd->e[regno].next_regno = INVALID_REGNUM;
149 if (vd->e[regno].debug_insn_changes)
150 free_debug_insn_changes (vd, regno);
151
152 if (flag_checking)
153 validate_value_data (vd);
154 }
155
156 /* Kill the value in register REGNO for NREGS, and any other registers
157 whose values overlap. */
158
159 static void
160 kill_value_regno (unsigned int regno, unsigned int nregs,
161 struct value_data *vd)
162 {
163 unsigned int j;
164
165 /* Kill the value we're told to kill. */
166 for (j = 0; j < nregs; ++j)
167 kill_value_one_regno (regno + j, vd);
168
169 /* Kill everything that overlapped what we're told to kill. */
170 if (regno < vd->max_value_regs)
171 j = 0;
172 else
173 j = regno - vd->max_value_regs;
174 for (; j < regno; ++j)
175 {
176 unsigned int i, n;
177 if (vd->e[j].mode == VOIDmode)
178 continue;
179 n = hard_regno_nregs (j, vd->e[j].mode);
180 if (j + n > regno)
181 for (i = 0; i < n; ++i)
182 kill_value_one_regno (j + i, vd);
183 }
184 }
185
186 /* Kill X. This is a convenience function wrapping kill_value_regno
187 so that we mind the mode the register is in. */
188
189 static void
190 kill_value (const_rtx x, struct value_data *vd)
191 {
192 if (GET_CODE (x) == SUBREG)
193 {
194 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
195 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
196 x = tmp ? tmp : SUBREG_REG (x);
197 }
198 if (REG_P (x))
199 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
200 }
201
202 /* Remember that REGNO is valid in MODE. */
203
204 static void
205 set_value_regno (unsigned int regno, machine_mode mode,
206 struct value_data *vd)
207 {
208 unsigned int nregs;
209
210 vd->e[regno].mode = mode;
211
212 nregs = hard_regno_nregs (regno, mode);
213 if (nregs > vd->max_value_regs)
214 vd->max_value_regs = nregs;
215 }
216
217 /* Initialize VD such that there are no known relationships between regs. */
218
219 static void
220 init_value_data (struct value_data *vd)
221 {
222 int i;
223 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
224 {
225 vd->e[i].mode = VOIDmode;
226 vd->e[i].oldest_regno = i;
227 vd->e[i].next_regno = INVALID_REGNUM;
228 vd->e[i].debug_insn_changes = NULL;
229 }
230 vd->max_value_regs = 0;
231 vd->n_debug_insn_changes = 0;
232 }
233
234 /* Called through note_stores. If X is clobbered, kill its value. */
235
236 static void
237 kill_clobbered_value (rtx x, const_rtx set, void *data)
238 {
239 struct value_data *const vd = (struct value_data *) data;
240 if (GET_CODE (set) == CLOBBER)
241 kill_value (x, vd);
242 }
243
244 /* A structure passed as data to kill_set_value through note_stores. */
245 struct kill_set_value_data
246 {
247 struct value_data *vd;
248 rtx ignore_set_reg;
249 };
250
251 /* Called through note_stores. If X is set, not clobbered, kill its
252 current value and install it as the root of its own value list. */
253
254 static void
255 kill_set_value (rtx x, const_rtx set, void *data)
256 {
257 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
258 if (rtx_equal_p (x, ksvd->ignore_set_reg))
259 return;
260 if (GET_CODE (set) != CLOBBER)
261 {
262 kill_value (x, ksvd->vd);
263 if (REG_P (x))
264 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
265 }
266 }
267
268 /* Kill any register used in X as the base of an auto-increment expression,
269 and install that register as the root of its own value list. */
270
271 static void
272 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
273 {
274 subrtx_iterator::array_type array;
275 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
276 {
277 const_rtx x = *iter;
278 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
279 {
280 x = XEXP (x, 0);
281 kill_value (x, vd);
282 set_value_regno (REGNO (x), GET_MODE (x), vd);
283 iter.skip_subrtxes ();
284 }
285 }
286 }
287
288 /* Assert that SRC has been copied to DEST. Adjust the data structures
289 to reflect that SRC contains an older copy of the shared value. */
290
291 static void
292 copy_value (rtx dest, rtx src, struct value_data *vd)
293 {
294 unsigned int dr = REGNO (dest);
295 unsigned int sr = REGNO (src);
296 unsigned int dn, sn;
297 unsigned int i;
298
299 /* ??? At present, it's possible to see noop sets. It'd be nice if
300 this were cleaned up beforehand... */
301 if (sr == dr)
302 return;
303
304 /* Do not propagate copies to the stack pointer, as that can leave
305 memory accesses with no scheduling dependency on the stack update. */
306 if (dr == STACK_POINTER_REGNUM)
307 return;
308
309 /* Likewise with the frame pointer, if we're using one. */
310 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
311 return;
312
313 /* Do not propagate copies to fixed or global registers, patterns
314 can be relying to see particular fixed register or users can
315 expect the chosen global register in asm. */
316 if (fixed_regs[dr] || global_regs[dr])
317 return;
318
319 /* If SRC and DEST overlap, don't record anything. */
320 dn = REG_NREGS (dest);
321 sn = REG_NREGS (src);
322 if ((dr > sr && dr < sr + sn)
323 || (sr > dr && sr < dr + dn))
324 return;
325
326 /* If SRC had no assigned mode (i.e. we didn't know it was live)
327 assign it now and assume the value came from an input argument
328 or somesuch. */
329 if (vd->e[sr].mode == VOIDmode)
330 set_value_regno (sr, vd->e[dr].mode, vd);
331
332 /* If we are narrowing the input to a smaller number of hard regs,
333 and it is in big endian, we are really extracting a high part.
334 Since we generally associate a low part of a value with the value itself,
335 we must not do the same for the high part.
336 Note we can still get low parts for the same mode combination through
337 a two-step copy involving differently sized hard regs.
338 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
339 (set (reg:DI r0) (reg:DI fr0))
340 (set (reg:SI fr2) (reg:SI r0))
341 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
342 (set (reg:SI fr2) (reg:SI fr0))
343 loads the high part of (reg:DI fr0) into fr2.
344
345 We can't properly represent the latter case in our tables, so don't
346 record anything then. */
347 else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
348 && subreg_lowpart_offset (GET_MODE (dest), vd->e[sr].mode) != 0)
349 return;
350
351 /* If SRC had been assigned a mode narrower than the copy, we can't
352 link DEST into the chain, because not all of the pieces of the
353 copy came from oldest_regno. */
354 else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
355 return;
356
357 /* Link DR at the end of the value chain used by SR. */
358
359 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
360
361 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
362 continue;
363 vd->e[i].next_regno = dr;
364
365 if (flag_checking)
366 validate_value_data (vd);
367 }
368
369 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
370
371 static bool
372 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
373 unsigned int regno ATTRIBUTE_UNUSED)
374 {
375 if (partial_subreg_p (orig_mode, new_mode))
376 return false;
377
378 return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
379 }
380
381 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
382 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
383 in NEW_MODE.
384 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
385
386 static rtx
387 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
388 machine_mode new_mode, unsigned int regno,
389 unsigned int copy_regno ATTRIBUTE_UNUSED)
390 {
391 if (partial_subreg_p (copy_mode, orig_mode)
392 && partial_subreg_p (copy_mode, new_mode))
393 return NULL_RTX;
394
395 /* Avoid creating multiple copies of the stack pointer. Some ports
396 assume there is one and only one stack pointer.
397
398 It's unclear if we need to do the same for other special registers. */
399 if (regno == STACK_POINTER_REGNUM)
400 return NULL_RTX;
401
402 if (orig_mode == new_mode)
403 return gen_raw_REG (new_mode, regno);
404 else if (mode_change_ok (orig_mode, new_mode, regno))
405 {
406 int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
407 int use_nregs = hard_regno_nregs (copy_regno, new_mode);
408 int copy_offset
409 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
410 unsigned int offset
411 = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
412 GET_MODE_SIZE (orig_mode));
413 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
414 if (targetm.hard_regno_mode_ok (regno, new_mode))
415 return gen_raw_REG (new_mode, regno);
416 }
417 return NULL_RTX;
418 }
419
420 /* Find the oldest copy of the value contained in REGNO that is in
421 register class CL and has mode MODE. If found, return an rtx
422 of that oldest register, otherwise return NULL. */
423
424 static rtx
425 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
426 {
427 unsigned int regno = REGNO (reg);
428 machine_mode mode = GET_MODE (reg);
429 unsigned int i;
430
431 /* If we are accessing REG in some mode other that what we set it in,
432 make sure that the replacement is valid. In particular, consider
433 (set (reg:DI r11) (...))
434 (set (reg:SI r9) (reg:SI r11))
435 (set (reg:SI r10) (...))
436 (set (...) (reg:DI r9))
437 Replacing r9 with r11 is invalid. */
438 if (mode != vd->e[regno].mode
439 && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
440 return NULL_RTX;
441
442 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
443 {
444 machine_mode oldmode = vd->e[i].mode;
445 rtx new_rtx;
446
447 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
448 continue;
449
450 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
451 if (new_rtx)
452 {
453 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
454 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
455 REG_POINTER (new_rtx) = REG_POINTER (reg);
456 return new_rtx;
457 }
458 }
459
460 return NULL_RTX;
461 }
462
463 /* If possible, replace the register at *LOC with the oldest register
464 in register class CL. Return true if successfully replaced. */
465
466 static bool
467 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
468 struct value_data *vd)
469 {
470 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
471 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
472 {
473 if (DEBUG_INSN_P (insn))
474 {
475 struct queued_debug_insn_change *change;
476
477 if (dump_file)
478 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
479 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
480
481 change = queued_debug_insn_change_pool.allocate ();
482 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
483 change->insn = insn;
484 change->loc = loc;
485 change->new_rtx = new_rtx;
486 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
487 ++vd->n_debug_insn_changes;
488 return true;
489 }
490 if (dump_file)
491 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
492 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
493
494 validate_change (insn, loc, new_rtx, 1);
495 return true;
496 }
497 return false;
498 }
499
500 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
501 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
502 BASE_REG_CLASS depending on how the register is being considered. */
503
504 static bool
505 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
506 machine_mode mode, addr_space_t as,
507 rtx_insn *insn, struct value_data *vd)
508 {
509 rtx x = *loc;
510 RTX_CODE code = GET_CODE (x);
511 const char *fmt;
512 int i, j;
513 bool changed = false;
514
515 switch (code)
516 {
517 case PLUS:
518 if (DEBUG_INSN_P (insn))
519 break;
520
521 {
522 rtx orig_op0 = XEXP (x, 0);
523 rtx orig_op1 = XEXP (x, 1);
524 RTX_CODE code0 = GET_CODE (orig_op0);
525 RTX_CODE code1 = GET_CODE (orig_op1);
526 rtx op0 = orig_op0;
527 rtx op1 = orig_op1;
528 rtx *locI = NULL;
529 rtx *locB = NULL;
530 enum rtx_code index_code = SCRATCH;
531
532 if (GET_CODE (op0) == SUBREG)
533 {
534 op0 = SUBREG_REG (op0);
535 code0 = GET_CODE (op0);
536 }
537
538 if (GET_CODE (op1) == SUBREG)
539 {
540 op1 = SUBREG_REG (op1);
541 code1 = GET_CODE (op1);
542 }
543
544 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
545 || code0 == ZERO_EXTEND || code1 == MEM)
546 {
547 locI = &XEXP (x, 0);
548 locB = &XEXP (x, 1);
549 index_code = GET_CODE (*locI);
550 }
551 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
552 || code1 == ZERO_EXTEND || code0 == MEM)
553 {
554 locI = &XEXP (x, 1);
555 locB = &XEXP (x, 0);
556 index_code = GET_CODE (*locI);
557 }
558 else if (code0 == CONST_INT || code0 == CONST
559 || code0 == SYMBOL_REF || code0 == LABEL_REF)
560 {
561 locB = &XEXP (x, 1);
562 index_code = GET_CODE (XEXP (x, 0));
563 }
564 else if (code1 == CONST_INT || code1 == CONST
565 || code1 == SYMBOL_REF || code1 == LABEL_REF)
566 {
567 locB = &XEXP (x, 0);
568 index_code = GET_CODE (XEXP (x, 1));
569 }
570 else if (code0 == REG && code1 == REG)
571 {
572 int index_op;
573 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
574
575 if (REGNO_OK_FOR_INDEX_P (regno1)
576 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
577 index_op = 1;
578 else if (REGNO_OK_FOR_INDEX_P (regno0)
579 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
580 index_op = 0;
581 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
582 || REGNO_OK_FOR_INDEX_P (regno1))
583 index_op = 1;
584 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
585 index_op = 0;
586 else
587 index_op = 1;
588
589 locI = &XEXP (x, index_op);
590 locB = &XEXP (x, !index_op);
591 index_code = GET_CODE (*locI);
592 }
593 else if (code0 == REG)
594 {
595 locI = &XEXP (x, 0);
596 locB = &XEXP (x, 1);
597 index_code = GET_CODE (*locI);
598 }
599 else if (code1 == REG)
600 {
601 locI = &XEXP (x, 1);
602 locB = &XEXP (x, 0);
603 index_code = GET_CODE (*locI);
604 }
605
606 if (locI)
607 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
608 mode, as, insn, vd);
609 if (locB)
610 changed |= replace_oldest_value_addr (locB,
611 base_reg_class (mode, as, PLUS,
612 index_code),
613 mode, as, insn, vd);
614 return changed;
615 }
616
617 case POST_INC:
618 case POST_DEC:
619 case POST_MODIFY:
620 case PRE_INC:
621 case PRE_DEC:
622 case PRE_MODIFY:
623 return false;
624
625 case MEM:
626 return replace_oldest_value_mem (x, insn, vd);
627
628 case REG:
629 return replace_oldest_value_reg (loc, cl, insn, vd);
630
631 default:
632 break;
633 }
634
635 fmt = GET_RTX_FORMAT (code);
636 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
637 {
638 if (fmt[i] == 'e')
639 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
640 insn, vd);
641 else if (fmt[i] == 'E')
642 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
643 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
644 mode, as, insn, vd);
645 }
646
647 return changed;
648 }
649
650 /* Similar to replace_oldest_value_reg, but X contains a memory. */
651
652 static bool
653 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
654 {
655 enum reg_class cl;
656
657 if (DEBUG_INSN_P (insn))
658 cl = ALL_REGS;
659 else
660 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
661
662 return replace_oldest_value_addr (&XEXP (x, 0), cl,
663 GET_MODE (x), MEM_ADDR_SPACE (x),
664 insn, vd);
665 }
666
667 /* Apply all queued updates for DEBUG_INSNs that change some reg to
668 register REGNO. */
669
670 static void
671 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
672 {
673 struct queued_debug_insn_change *change;
674 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
675
676 for (change = vd->e[regno].debug_insn_changes;
677 change;
678 change = change->next)
679 {
680 if (last_insn != change->insn)
681 {
682 apply_change_group ();
683 last_insn = change->insn;
684 }
685 validate_change (change->insn, change->loc, change->new_rtx, 1);
686 }
687 apply_change_group ();
688 }
689
690 /* Called via note_uses, for all used registers in a real insn
691 apply DEBUG_INSN changes that change registers to the used
692 registers. */
693
694 static void
695 cprop_find_used_regs (rtx *loc, void *data)
696 {
697 struct value_data *const vd = (struct value_data *) data;
698 subrtx_iterator::array_type array;
699 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
700 {
701 const_rtx x = *iter;
702 if (REG_P (x))
703 {
704 unsigned int regno = REGNO (x);
705 if (vd->e[regno].debug_insn_changes)
706 {
707 apply_debug_insn_changes (vd, regno);
708 free_debug_insn_changes (vd, regno);
709 }
710 }
711 }
712 }
713
714 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
715
716 static void
717 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
718 {
719 note_stores (PATTERN (insn), kill_clobbered_value, vd);
720
721 if (CALL_P (insn))
722 {
723 rtx exp;
724
725 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
726 {
727 rtx x = XEXP (exp, 0);
728 if (GET_CODE (x) == CLOBBER)
729 kill_value (SET_DEST (x), vd);
730 }
731 }
732 }
733
734 /* Perform the forward copy propagation on basic block BB. */
735
736 static bool
737 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
738 {
739 bool anything_changed = false;
740 rtx_insn *insn, *next;
741
742 for (insn = BB_HEAD (bb); ; insn = next)
743 {
744 int n_ops, i, predicated;
745 bool is_asm, any_replacements;
746 rtx set;
747 rtx link;
748 bool replaced[MAX_RECOG_OPERANDS];
749 bool changed = false;
750 struct kill_set_value_data ksvd;
751
752 next = NEXT_INSN (insn);
753 if (!NONDEBUG_INSN_P (insn))
754 {
755 if (DEBUG_INSN_P (insn))
756 {
757 rtx loc = INSN_VAR_LOCATION_LOC (insn);
758 if (!VAR_LOC_UNKNOWN_P (loc))
759 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
760 ALL_REGS, GET_MODE (loc),
761 ADDR_SPACE_GENERIC, insn, vd);
762 }
763
764 if (insn == BB_END (bb))
765 break;
766 else
767 continue;
768 }
769
770 set = single_set (insn);
771
772 /* Detect noop sets and remove them before processing side effects. */
773 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
774 {
775 unsigned int regno = REGNO (SET_SRC (set));
776 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
777 SET_DEST (set), vd);
778 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
779 SET_SRC (set), vd);
780 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
781 {
782 bool last = insn == BB_END (bb);
783 delete_insn (insn);
784 if (last)
785 break;
786 continue;
787 }
788 }
789
790 extract_constrain_insn (insn);
791 preprocess_constraints (insn);
792 const operand_alternative *op_alt = which_op_alt ();
793 n_ops = recog_data.n_operands;
794 is_asm = asm_noperands (PATTERN (insn)) >= 0;
795
796 /* Simplify the code below by promoting OP_OUT to OP_INOUT
797 in predicated instructions. */
798
799 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
800 for (i = 0; i < n_ops; ++i)
801 {
802 int matches = op_alt[i].matches;
803 if (matches >= 0 || op_alt[i].matched >= 0
804 || (predicated && recog_data.operand_type[i] == OP_OUT))
805 recog_data.operand_type[i] = OP_INOUT;
806 }
807
808 /* Apply changes to earlier DEBUG_INSNs if possible. */
809 if (vd->n_debug_insn_changes)
810 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
811
812 /* For each earlyclobber operand, zap the value data. */
813 for (i = 0; i < n_ops; i++)
814 if (op_alt[i].earlyclobber)
815 kill_value (recog_data.operand[i], vd);
816
817 /* Within asms, a clobber cannot overlap inputs or outputs.
818 I wouldn't think this were true for regular insns, but
819 scan_rtx treats them like that... */
820 kill_clobbered_values (insn, vd);
821
822 /* Kill all auto-incremented values. */
823 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
824 kill_autoinc_value (insn, vd);
825
826 /* Kill all early-clobbered operands. */
827 for (i = 0; i < n_ops; i++)
828 if (op_alt[i].earlyclobber)
829 kill_value (recog_data.operand[i], vd);
830
831 /* If we have dead sets in the insn, then we need to note these as we
832 would clobbers. */
833 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
834 {
835 if (REG_NOTE_KIND (link) == REG_UNUSED)
836 {
837 kill_value (XEXP (link, 0), vd);
838 /* Furthermore, if the insn looked like a single-set,
839 but the dead store kills the source value of that
840 set, then we can no-longer use the plain move
841 special case below. */
842 if (set
843 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
844 set = NULL;
845 }
846 }
847
848 /* Special-case plain move instructions, since we may well
849 be able to do the move from a different register class. */
850 if (set && REG_P (SET_SRC (set)))
851 {
852 rtx src = SET_SRC (set);
853 unsigned int regno = REGNO (src);
854 machine_mode mode = GET_MODE (src);
855 unsigned int i;
856 rtx new_rtx;
857
858 /* If we are accessing SRC in some mode other that what we
859 set it in, make sure that the replacement is valid. */
860 if (mode != vd->e[regno].mode)
861 {
862 if (REG_NREGS (src)
863 > hard_regno_nregs (regno, vd->e[regno].mode))
864 goto no_move_special_case;
865
866 /* And likewise, if we are narrowing on big endian the transformation
867 is also invalid. */
868 if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
869 && subreg_lowpart_offset (mode, vd->e[regno].mode) != 0)
870 goto no_move_special_case;
871 }
872
873 /* If the destination is also a register, try to find a source
874 register in the same class. */
875 if (REG_P (SET_DEST (set)))
876 {
877 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
878 src, vd);
879
880 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
881 {
882 if (dump_file)
883 fprintf (dump_file,
884 "insn %u: replaced reg %u with %u\n",
885 INSN_UID (insn), regno, REGNO (new_rtx));
886 changed = true;
887 goto did_replacement;
888 }
889 /* We need to re-extract as validate_change clobbers
890 recog_data. */
891 extract_constrain_insn (insn);
892 preprocess_constraints (insn);
893 }
894
895 /* Otherwise, try all valid registers and see if its valid. */
896 for (i = vd->e[regno].oldest_regno; i != regno;
897 i = vd->e[i].next_regno)
898 {
899 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
900 mode, i, regno);
901 if (new_rtx != NULL_RTX)
902 {
903 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
904 {
905 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
906 REG_ATTRS (new_rtx) = REG_ATTRS (src);
907 REG_POINTER (new_rtx) = REG_POINTER (src);
908 if (dump_file)
909 fprintf (dump_file,
910 "insn %u: replaced reg %u with %u\n",
911 INSN_UID (insn), regno, REGNO (new_rtx));
912 changed = true;
913 goto did_replacement;
914 }
915 /* We need to re-extract as validate_change clobbers
916 recog_data. */
917 extract_constrain_insn (insn);
918 preprocess_constraints (insn);
919 }
920 }
921 }
922 no_move_special_case:
923
924 any_replacements = false;
925
926 /* For each input operand, replace a hard register with the
927 eldest live copy that's in an appropriate register class. */
928 for (i = 0; i < n_ops; i++)
929 {
930 replaced[i] = false;
931
932 /* Don't scan match_operand here, since we've no reg class
933 information to pass down. Any operands that we could
934 substitute in will be represented elsewhere. */
935 if (recog_data.constraints[i][0] == '\0')
936 continue;
937
938 /* Don't replace in asms intentionally referencing hard regs. */
939 if (is_asm && REG_P (recog_data.operand[i])
940 && (REGNO (recog_data.operand[i])
941 == ORIGINAL_REGNO (recog_data.operand[i])))
942 continue;
943
944 if (recog_data.operand_type[i] == OP_IN)
945 {
946 if (op_alt[i].is_address)
947 replaced[i]
948 = replace_oldest_value_addr (recog_data.operand_loc[i],
949 alternative_class (op_alt, i),
950 VOIDmode, ADDR_SPACE_GENERIC,
951 insn, vd);
952 else if (REG_P (recog_data.operand[i]))
953 replaced[i]
954 = replace_oldest_value_reg (recog_data.operand_loc[i],
955 alternative_class (op_alt, i),
956 insn, vd);
957 else if (MEM_P (recog_data.operand[i]))
958 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
959 insn, vd);
960 }
961 else if (MEM_P (recog_data.operand[i]))
962 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
963 insn, vd);
964
965 /* If we performed any replacement, update match_dups. */
966 if (replaced[i])
967 {
968 int j;
969 rtx new_rtx;
970
971 new_rtx = *recog_data.operand_loc[i];
972 recog_data.operand[i] = new_rtx;
973 for (j = 0; j < recog_data.n_dups; j++)
974 if (recog_data.dup_num[j] == i)
975 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
976
977 any_replacements = true;
978 }
979 }
980
981 if (any_replacements)
982 {
983 if (! apply_change_group ())
984 {
985 for (i = 0; i < n_ops; i++)
986 if (replaced[i])
987 {
988 rtx old = *recog_data.operand_loc[i];
989 recog_data.operand[i] = old;
990 }
991
992 if (dump_file)
993 fprintf (dump_file,
994 "insn %u: reg replacements not verified\n",
995 INSN_UID (insn));
996 }
997 else
998 changed = true;
999 }
1000
1001 did_replacement:
1002 if (changed)
1003 {
1004 anything_changed = true;
1005
1006 /* If something changed, perhaps further changes to earlier
1007 DEBUG_INSNs can be applied. */
1008 if (vd->n_debug_insn_changes)
1009 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1010 }
1011
1012 ksvd.vd = vd;
1013 ksvd.ignore_set_reg = NULL_RTX;
1014
1015 /* Clobber call-clobbered registers. */
1016 if (CALL_P (insn))
1017 {
1018 unsigned int set_regno = INVALID_REGNUM;
1019 unsigned int set_nregs = 0;
1020 unsigned int regno;
1021 rtx exp;
1022 HARD_REG_SET regs_invalidated_by_this_call;
1023
1024 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1025 {
1026 rtx x = XEXP (exp, 0);
1027 if (GET_CODE (x) == SET)
1028 {
1029 rtx dest = SET_DEST (x);
1030 kill_value (dest, vd);
1031 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1032 copy_value (dest, SET_SRC (x), vd);
1033 ksvd.ignore_set_reg = dest;
1034 set_regno = REGNO (dest);
1035 set_nregs = REG_NREGS (dest);
1036 break;
1037 }
1038 }
1039
1040 get_call_reg_set_usage (insn,
1041 &regs_invalidated_by_this_call,
1042 regs_invalidated_by_call);
1043 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1044 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1045 || (targetm.hard_regno_call_part_clobbered
1046 (regno, vd->e[regno].mode)))
1047 && (regno < set_regno || regno >= set_regno + set_nregs))
1048 kill_value_regno (regno, 1, vd);
1049
1050 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1051 of the SET isn't in regs_invalidated_by_call hard reg set,
1052 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1053 assume the value in it is still live. */
1054 if (ksvd.ignore_set_reg)
1055 kill_clobbered_values (insn, vd);
1056 }
1057
1058 bool copy_p = (set
1059 && REG_P (SET_DEST (set))
1060 && REG_P (SET_SRC (set)));
1061 bool noop_p = (copy_p
1062 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1063
1064 /* If a noop move is using narrower mode than we have recorded,
1065 we need to either remove the noop move, or kill_set_value. */
1066 if (noop_p
1067 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1068 vd->e[REGNO (SET_DEST (set))].mode))
1069 {
1070 if (noop_move_p (insn))
1071 {
1072 bool last = insn == BB_END (bb);
1073 delete_insn (insn);
1074 if (last)
1075 break;
1076 }
1077 else
1078 noop_p = false;
1079 }
1080
1081 if (!noop_p)
1082 {
1083 /* Notice stores. */
1084 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1085
1086 /* Notice copies. */
1087 if (copy_p)
1088 copy_value (SET_DEST (set), SET_SRC (set), vd);
1089 }
1090
1091 if (insn == BB_END (bb))
1092 break;
1093 }
1094
1095 return anything_changed;
1096 }
1097
1098 /* Dump the value chain data to stderr. */
1099
1100 DEBUG_FUNCTION void
1101 debug_value_data (struct value_data *vd)
1102 {
1103 HARD_REG_SET set;
1104 unsigned int i, j;
1105
1106 CLEAR_HARD_REG_SET (set);
1107
1108 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1109 if (vd->e[i].oldest_regno == i)
1110 {
1111 if (vd->e[i].mode == VOIDmode)
1112 {
1113 if (vd->e[i].next_regno != INVALID_REGNUM)
1114 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1115 i, vd->e[i].next_regno);
1116 continue;
1117 }
1118
1119 SET_HARD_REG_BIT (set, i);
1120 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1121
1122 for (j = vd->e[i].next_regno;
1123 j != INVALID_REGNUM;
1124 j = vd->e[j].next_regno)
1125 {
1126 if (TEST_HARD_REG_BIT (set, j))
1127 {
1128 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1129 return;
1130 }
1131
1132 if (vd->e[j].oldest_regno != i)
1133 {
1134 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1135 j, vd->e[j].oldest_regno);
1136 return;
1137 }
1138 SET_HARD_REG_BIT (set, j);
1139 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1140 }
1141 fputc ('\n', stderr);
1142 }
1143
1144 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1145 if (! TEST_HARD_REG_BIT (set, i)
1146 && (vd->e[i].mode != VOIDmode
1147 || vd->e[i].oldest_regno != i
1148 || vd->e[i].next_regno != INVALID_REGNUM))
1149 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1150 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1151 vd->e[i].next_regno);
1152 }
1153
1154 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1155 DEBUG_INSN is skipped since we do not want to involve DF related
1156 staff as how it is handled in function pass_cprop_hardreg::execute.
1157
1158 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1159 to handle DEBUG_INSN for other uses. */
1160
1161 void
1162 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1163 {
1164 struct value_data *vd;
1165 vd = XNEWVEC (struct value_data, 1);
1166 init_value_data (vd);
1167
1168 skip_debug_insn_p = true;
1169 copyprop_hardreg_forward_1 (bb, vd);
1170 free (vd);
1171 skip_debug_insn_p = false;
1172 }
1173
1174 static void
1175 validate_value_data (struct value_data *vd)
1176 {
1177 HARD_REG_SET set;
1178 unsigned int i, j;
1179
1180 CLEAR_HARD_REG_SET (set);
1181
1182 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1183 if (vd->e[i].oldest_regno == i)
1184 {
1185 if (vd->e[i].mode == VOIDmode)
1186 {
1187 if (vd->e[i].next_regno != INVALID_REGNUM)
1188 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1189 i, vd->e[i].next_regno);
1190 continue;
1191 }
1192
1193 SET_HARD_REG_BIT (set, i);
1194
1195 for (j = vd->e[i].next_regno;
1196 j != INVALID_REGNUM;
1197 j = vd->e[j].next_regno)
1198 {
1199 if (TEST_HARD_REG_BIT (set, j))
1200 internal_error ("validate_value_data: Loop in regno chain (%u)",
1201 j);
1202 if (vd->e[j].oldest_regno != i)
1203 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1204 j, vd->e[j].oldest_regno);
1205
1206 SET_HARD_REG_BIT (set, j);
1207 }
1208 }
1209
1210 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1211 if (! TEST_HARD_REG_BIT (set, i)
1212 && (vd->e[i].mode != VOIDmode
1213 || vd->e[i].oldest_regno != i
1214 || vd->e[i].next_regno != INVALID_REGNUM))
1215 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1216 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1217 vd->e[i].next_regno);
1218 }
1219
1220 \f
1221 namespace {
1222
1223 const pass_data pass_data_cprop_hardreg =
1224 {
1225 RTL_PASS, /* type */
1226 "cprop_hardreg", /* name */
1227 OPTGROUP_NONE, /* optinfo_flags */
1228 TV_CPROP_REGISTERS, /* tv_id */
1229 0, /* properties_required */
1230 0, /* properties_provided */
1231 0, /* properties_destroyed */
1232 0, /* todo_flags_start */
1233 TODO_df_finish, /* todo_flags_finish */
1234 };
1235
1236 class pass_cprop_hardreg : public rtl_opt_pass
1237 {
1238 public:
1239 pass_cprop_hardreg (gcc::context *ctxt)
1240 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1241 {}
1242
1243 /* opt_pass methods: */
1244 virtual bool gate (function *)
1245 {
1246 return (optimize > 0 && (flag_cprop_registers));
1247 }
1248
1249 virtual unsigned int execute (function *);
1250
1251 }; // class pass_cprop_hardreg
1252
1253 unsigned int
1254 pass_cprop_hardreg::execute (function *fun)
1255 {
1256 struct value_data *all_vd;
1257 basic_block bb;
1258 bool analyze_called = false;
1259
1260 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1261
1262 auto_sbitmap visited (last_basic_block_for_fn (fun));
1263 bitmap_clear (visited);
1264
1265 FOR_EACH_BB_FN (bb, fun)
1266 {
1267 bitmap_set_bit (visited, bb->index);
1268
1269 /* If a block has a single predecessor, that we've already
1270 processed, begin with the value data that was live at
1271 the end of the predecessor block. */
1272 /* ??? Ought to use more intelligent queuing of blocks. */
1273 if (single_pred_p (bb)
1274 && bitmap_bit_p (visited, single_pred (bb)->index)
1275 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1276 {
1277 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1278 if (all_vd[bb->index].n_debug_insn_changes)
1279 {
1280 unsigned int regno;
1281
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 {
1284 if (all_vd[bb->index].e[regno].debug_insn_changes)
1285 {
1286 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1287 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1288 break;
1289 }
1290 }
1291 }
1292 }
1293 else
1294 init_value_data (all_vd + bb->index);
1295
1296 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1297 }
1298
1299 if (MAY_HAVE_DEBUG_INSNS)
1300 {
1301 FOR_EACH_BB_FN (bb, fun)
1302 if (bitmap_bit_p (visited, bb->index)
1303 && all_vd[bb->index].n_debug_insn_changes)
1304 {
1305 unsigned int regno;
1306 bitmap live;
1307
1308 if (!analyze_called)
1309 {
1310 df_analyze ();
1311 analyze_called = true;
1312 }
1313 live = df_get_live_out (bb);
1314 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1315 if (all_vd[bb->index].e[regno].debug_insn_changes)
1316 {
1317 if (REGNO_REG_SET_P (live, regno))
1318 apply_debug_insn_changes (all_vd + bb->index, regno);
1319 if (all_vd[bb->index].n_debug_insn_changes == 0)
1320 break;
1321 }
1322 }
1323
1324 queued_debug_insn_change_pool.release ();
1325 }
1326
1327 free (all_vd);
1328 return 0;
1329 }
1330
1331 } // anon namespace
1332
1333 rtl_opt_pass *
1334 make_pass_cprop_hardreg (gcc::context *ctxt)
1335 {
1336 return new pass_cprop_hardreg (ctxt);
1337 }