]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/regcprop.c
Turn HARD_REGNO_MODE_OK into a target hook
[thirdparty/gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38
39 /* The following code does forward propagation of hard register copies.
40 The object is to eliminate as many dependencies as possible, so that
41 we have the most scheduling freedom. As a side effect, we also clean
42 up some silly register allocation decisions made by reload. This
43 code may be obsoleted by a new register allocator. */
44
45 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
46 lifetime of a register and get the DEBUG_INSN subsequently reset.
47 So they are queued instead, and updated only when the register is
48 used in some subsequent real insn before it is set. */
49 struct queued_debug_insn_change
50 {
51 struct queued_debug_insn_change *next;
52 rtx_insn *insn;
53 rtx *loc;
54 rtx new_rtx;
55 };
56
57 /* For each register, we have a list of registers that contain the same
58 value. The OLDEST_REGNO field points to the head of the list, and
59 the NEXT_REGNO field runs through the list. The MODE field indicates
60 what mode the data is known to be in; this field is VOIDmode when the
61 register is not known to contain valid data. */
62
63 struct value_data_entry
64 {
65 machine_mode mode;
66 unsigned int oldest_regno;
67 unsigned int next_regno;
68 struct queued_debug_insn_change *debug_insn_changes;
69 };
70
71 struct value_data
72 {
73 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
74 unsigned int max_value_regs;
75 unsigned int n_debug_insn_changes;
76 };
77
78 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
79 ("debug insn changes pool");
80
81 static bool skip_debug_insn_p;
82
83 static void kill_value_one_regno (unsigned, struct value_data *);
84 static void kill_value_regno (unsigned, unsigned, struct value_data *);
85 static void kill_value (const_rtx, struct value_data *);
86 static void set_value_regno (unsigned, machine_mode, struct value_data *);
87 static void init_value_data (struct value_data *);
88 static void kill_clobbered_value (rtx, const_rtx, void *);
89 static void kill_set_value (rtx, const_rtx, void *);
90 static void copy_value (rtx, rtx, struct value_data *);
91 static bool mode_change_ok (machine_mode, machine_mode,
92 unsigned int);
93 static rtx maybe_mode_change (machine_mode, machine_mode,
94 machine_mode, unsigned int, unsigned int);
95 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
96 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
97 struct value_data *);
98 static bool replace_oldest_value_addr (rtx *, enum reg_class,
99 machine_mode, addr_space_t,
100 rtx_insn *, struct value_data *);
101 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
102 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
103 extern void debug_value_data (struct value_data *);
104 static void validate_value_data (struct value_data *);
105
106 /* Free all queued updates for DEBUG_INSNs that change some reg to
107 register REGNO. */
108
109 static void
110 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
111 {
112 struct queued_debug_insn_change *cur, *next;
113 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
114 {
115 next = cur->next;
116 --vd->n_debug_insn_changes;
117 queued_debug_insn_change_pool.remove (cur);
118 }
119 vd->e[regno].debug_insn_changes = NULL;
120 }
121
122 /* Kill register REGNO. This involves removing it from any value
123 lists, and resetting the value mode to VOIDmode. This is only a
124 helper function; it does not handle any hard registers overlapping
125 with REGNO. */
126
127 static void
128 kill_value_one_regno (unsigned int regno, struct value_data *vd)
129 {
130 unsigned int i, next;
131
132 if (vd->e[regno].oldest_regno != regno)
133 {
134 for (i = vd->e[regno].oldest_regno;
135 vd->e[i].next_regno != regno;
136 i = vd->e[i].next_regno)
137 continue;
138 vd->e[i].next_regno = vd->e[regno].next_regno;
139 }
140 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
141 {
142 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
143 vd->e[i].oldest_regno = next;
144 }
145
146 vd->e[regno].mode = VOIDmode;
147 vd->e[regno].oldest_regno = regno;
148 vd->e[regno].next_regno = INVALID_REGNUM;
149 if (vd->e[regno].debug_insn_changes)
150 free_debug_insn_changes (vd, regno);
151
152 if (flag_checking)
153 validate_value_data (vd);
154 }
155
156 /* Kill the value in register REGNO for NREGS, and any other registers
157 whose values overlap. */
158
159 static void
160 kill_value_regno (unsigned int regno, unsigned int nregs,
161 struct value_data *vd)
162 {
163 unsigned int j;
164
165 /* Kill the value we're told to kill. */
166 for (j = 0; j < nregs; ++j)
167 kill_value_one_regno (regno + j, vd);
168
169 /* Kill everything that overlapped what we're told to kill. */
170 if (regno < vd->max_value_regs)
171 j = 0;
172 else
173 j = regno - vd->max_value_regs;
174 for (; j < regno; ++j)
175 {
176 unsigned int i, n;
177 if (vd->e[j].mode == VOIDmode)
178 continue;
179 n = hard_regno_nregs[j][vd->e[j].mode];
180 if (j + n > regno)
181 for (i = 0; i < n; ++i)
182 kill_value_one_regno (j + i, vd);
183 }
184 }
185
186 /* Kill X. This is a convenience function wrapping kill_value_regno
187 so that we mind the mode the register is in. */
188
189 static void
190 kill_value (const_rtx x, struct value_data *vd)
191 {
192 if (GET_CODE (x) == SUBREG)
193 {
194 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
195 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
196 x = tmp ? tmp : SUBREG_REG (x);
197 }
198 if (REG_P (x))
199 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
200 }
201
202 /* Remember that REGNO is valid in MODE. */
203
204 static void
205 set_value_regno (unsigned int regno, machine_mode mode,
206 struct value_data *vd)
207 {
208 unsigned int nregs;
209
210 vd->e[regno].mode = mode;
211
212 nregs = hard_regno_nregs[regno][mode];
213 if (nregs > vd->max_value_regs)
214 vd->max_value_regs = nregs;
215 }
216
217 /* Initialize VD such that there are no known relationships between regs. */
218
219 static void
220 init_value_data (struct value_data *vd)
221 {
222 int i;
223 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
224 {
225 vd->e[i].mode = VOIDmode;
226 vd->e[i].oldest_regno = i;
227 vd->e[i].next_regno = INVALID_REGNUM;
228 vd->e[i].debug_insn_changes = NULL;
229 }
230 vd->max_value_regs = 0;
231 vd->n_debug_insn_changes = 0;
232 }
233
234 /* Called through note_stores. If X is clobbered, kill its value. */
235
236 static void
237 kill_clobbered_value (rtx x, const_rtx set, void *data)
238 {
239 struct value_data *const vd = (struct value_data *) data;
240 if (GET_CODE (set) == CLOBBER)
241 kill_value (x, vd);
242 }
243
244 /* A structure passed as data to kill_set_value through note_stores. */
245 struct kill_set_value_data
246 {
247 struct value_data *vd;
248 rtx ignore_set_reg;
249 };
250
251 /* Called through note_stores. If X is set, not clobbered, kill its
252 current value and install it as the root of its own value list. */
253
254 static void
255 kill_set_value (rtx x, const_rtx set, void *data)
256 {
257 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
258 if (rtx_equal_p (x, ksvd->ignore_set_reg))
259 return;
260 if (GET_CODE (set) != CLOBBER)
261 {
262 kill_value (x, ksvd->vd);
263 if (REG_P (x))
264 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
265 }
266 }
267
268 /* Kill any register used in X as the base of an auto-increment expression,
269 and install that register as the root of its own value list. */
270
271 static void
272 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
273 {
274 subrtx_iterator::array_type array;
275 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
276 {
277 const_rtx x = *iter;
278 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
279 {
280 x = XEXP (x, 0);
281 kill_value (x, vd);
282 set_value_regno (REGNO (x), GET_MODE (x), vd);
283 iter.skip_subrtxes ();
284 }
285 }
286 }
287
288 /* Assert that SRC has been copied to DEST. Adjust the data structures
289 to reflect that SRC contains an older copy of the shared value. */
290
291 static void
292 copy_value (rtx dest, rtx src, struct value_data *vd)
293 {
294 unsigned int dr = REGNO (dest);
295 unsigned int sr = REGNO (src);
296 unsigned int dn, sn;
297 unsigned int i;
298
299 /* ??? At present, it's possible to see noop sets. It'd be nice if
300 this were cleaned up beforehand... */
301 if (sr == dr)
302 return;
303
304 /* Do not propagate copies to the stack pointer, as that can leave
305 memory accesses with no scheduling dependency on the stack update. */
306 if (dr == STACK_POINTER_REGNUM)
307 return;
308
309 /* Likewise with the frame pointer, if we're using one. */
310 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
311 return;
312
313 /* Do not propagate copies to fixed or global registers, patterns
314 can be relying to see particular fixed register or users can
315 expect the chosen global register in asm. */
316 if (fixed_regs[dr] || global_regs[dr])
317 return;
318
319 /* If SRC and DEST overlap, don't record anything. */
320 dn = REG_NREGS (dest);
321 sn = REG_NREGS (src);
322 if ((dr > sr && dr < sr + sn)
323 || (sr > dr && sr < dr + dn))
324 return;
325
326 /* If SRC had no assigned mode (i.e. we didn't know it was live)
327 assign it now and assume the value came from an input argument
328 or somesuch. */
329 if (vd->e[sr].mode == VOIDmode)
330 set_value_regno (sr, vd->e[dr].mode, vd);
331
332 /* If we are narrowing the input to a smaller number of hard regs,
333 and it is in big endian, we are really extracting a high part.
334 Since we generally associate a low part of a value with the value itself,
335 we must not do the same for the high part.
336 Note we can still get low parts for the same mode combination through
337 a two-step copy involving differently sized hard regs.
338 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
339 (set (reg:DI r0) (reg:DI fr0))
340 (set (reg:SI fr2) (reg:SI r0))
341 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
342 (set (reg:SI fr2) (reg:SI fr0))
343 loads the high part of (reg:DI fr0) into fr2.
344
345 We can't properly represent the latter case in our tables, so don't
346 record anything then. */
347 else if (sn < (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode]
348 && (GET_MODE_SIZE (vd->e[sr].mode) > UNITS_PER_WORD
349 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
350 return;
351
352 /* If SRC had been assigned a mode narrower than the copy, we can't
353 link DEST into the chain, because not all of the pieces of the
354 copy came from oldest_regno. */
355 else if (sn > (unsigned int) hard_regno_nregs[sr][vd->e[sr].mode])
356 return;
357
358 /* Link DR at the end of the value chain used by SR. */
359
360 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
361
362 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
363 continue;
364 vd->e[i].next_regno = dr;
365
366 if (flag_checking)
367 validate_value_data (vd);
368 }
369
370 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
371
372 static bool
373 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
374 unsigned int regno ATTRIBUTE_UNUSED)
375 {
376 if (partial_subreg_p (orig_mode, new_mode))
377 return false;
378
379 #ifdef CANNOT_CHANGE_MODE_CLASS
380 return !REG_CANNOT_CHANGE_MODE_P (regno, orig_mode, new_mode);
381 #endif
382
383 return true;
384 }
385
386 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
387 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
388 in NEW_MODE.
389 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
390
391 static rtx
392 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
393 machine_mode new_mode, unsigned int regno,
394 unsigned int copy_regno ATTRIBUTE_UNUSED)
395 {
396 if (partial_subreg_p (copy_mode, orig_mode)
397 && partial_subreg_p (copy_mode, new_mode))
398 return NULL_RTX;
399
400 /* Avoid creating multiple copies of the stack pointer. Some ports
401 assume there is one and only one stack pointer.
402
403 It's unclear if we need to do the same for other special registers. */
404 if (regno == STACK_POINTER_REGNUM)
405 return NULL_RTX;
406
407 if (orig_mode == new_mode)
408 return gen_raw_REG (new_mode, regno);
409 else if (mode_change_ok (orig_mode, new_mode, regno))
410 {
411 int copy_nregs = hard_regno_nregs[copy_regno][copy_mode];
412 int use_nregs = hard_regno_nregs[copy_regno][new_mode];
413 int copy_offset
414 = GET_MODE_SIZE (copy_mode) / copy_nregs * (copy_nregs - use_nregs);
415 int offset
416 = GET_MODE_SIZE (orig_mode) - GET_MODE_SIZE (new_mode) - copy_offset;
417 int byteoffset = offset % UNITS_PER_WORD;
418 int wordoffset = offset - byteoffset;
419
420 offset = ((WORDS_BIG_ENDIAN ? wordoffset : 0)
421 + (BYTES_BIG_ENDIAN ? byteoffset : 0));
422 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
423 if (targetm.hard_regno_mode_ok (regno, new_mode))
424 return gen_raw_REG (new_mode, regno);
425 }
426 return NULL_RTX;
427 }
428
429 /* Find the oldest copy of the value contained in REGNO that is in
430 register class CL and has mode MODE. If found, return an rtx
431 of that oldest register, otherwise return NULL. */
432
433 static rtx
434 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
435 {
436 unsigned int regno = REGNO (reg);
437 machine_mode mode = GET_MODE (reg);
438 unsigned int i;
439
440 /* If we are accessing REG in some mode other that what we set it in,
441 make sure that the replacement is valid. In particular, consider
442 (set (reg:DI r11) (...))
443 (set (reg:SI r9) (reg:SI r11))
444 (set (reg:SI r10) (...))
445 (set (...) (reg:DI r9))
446 Replacing r9 with r11 is invalid. */
447 if (mode != vd->e[regno].mode)
448 {
449 if (hard_regno_nregs[regno][mode]
450 > hard_regno_nregs[regno][vd->e[regno].mode])
451 return NULL_RTX;
452 }
453
454 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
455 {
456 machine_mode oldmode = vd->e[i].mode;
457 rtx new_rtx;
458
459 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
460 continue;
461
462 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
463 if (new_rtx)
464 {
465 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
466 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
467 REG_POINTER (new_rtx) = REG_POINTER (reg);
468 return new_rtx;
469 }
470 }
471
472 return NULL_RTX;
473 }
474
475 /* If possible, replace the register at *LOC with the oldest register
476 in register class CL. Return true if successfully replaced. */
477
478 static bool
479 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
480 struct value_data *vd)
481 {
482 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
483 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
484 {
485 if (DEBUG_INSN_P (insn))
486 {
487 struct queued_debug_insn_change *change;
488
489 if (dump_file)
490 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
491 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
492
493 change = queued_debug_insn_change_pool.allocate ();
494 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
495 change->insn = insn;
496 change->loc = loc;
497 change->new_rtx = new_rtx;
498 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
499 ++vd->n_debug_insn_changes;
500 return true;
501 }
502 if (dump_file)
503 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
504 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
505
506 validate_change (insn, loc, new_rtx, 1);
507 return true;
508 }
509 return false;
510 }
511
512 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
513 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
514 BASE_REG_CLASS depending on how the register is being considered. */
515
516 static bool
517 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
518 machine_mode mode, addr_space_t as,
519 rtx_insn *insn, struct value_data *vd)
520 {
521 rtx x = *loc;
522 RTX_CODE code = GET_CODE (x);
523 const char *fmt;
524 int i, j;
525 bool changed = false;
526
527 switch (code)
528 {
529 case PLUS:
530 if (DEBUG_INSN_P (insn))
531 break;
532
533 {
534 rtx orig_op0 = XEXP (x, 0);
535 rtx orig_op1 = XEXP (x, 1);
536 RTX_CODE code0 = GET_CODE (orig_op0);
537 RTX_CODE code1 = GET_CODE (orig_op1);
538 rtx op0 = orig_op0;
539 rtx op1 = orig_op1;
540 rtx *locI = NULL;
541 rtx *locB = NULL;
542 enum rtx_code index_code = SCRATCH;
543
544 if (GET_CODE (op0) == SUBREG)
545 {
546 op0 = SUBREG_REG (op0);
547 code0 = GET_CODE (op0);
548 }
549
550 if (GET_CODE (op1) == SUBREG)
551 {
552 op1 = SUBREG_REG (op1);
553 code1 = GET_CODE (op1);
554 }
555
556 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
557 || code0 == ZERO_EXTEND || code1 == MEM)
558 {
559 locI = &XEXP (x, 0);
560 locB = &XEXP (x, 1);
561 index_code = GET_CODE (*locI);
562 }
563 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
564 || code1 == ZERO_EXTEND || code0 == MEM)
565 {
566 locI = &XEXP (x, 1);
567 locB = &XEXP (x, 0);
568 index_code = GET_CODE (*locI);
569 }
570 else if (code0 == CONST_INT || code0 == CONST
571 || code0 == SYMBOL_REF || code0 == LABEL_REF)
572 {
573 locB = &XEXP (x, 1);
574 index_code = GET_CODE (XEXP (x, 0));
575 }
576 else if (code1 == CONST_INT || code1 == CONST
577 || code1 == SYMBOL_REF || code1 == LABEL_REF)
578 {
579 locB = &XEXP (x, 0);
580 index_code = GET_CODE (XEXP (x, 1));
581 }
582 else if (code0 == REG && code1 == REG)
583 {
584 int index_op;
585 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
586
587 if (REGNO_OK_FOR_INDEX_P (regno1)
588 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
589 index_op = 1;
590 else if (REGNO_OK_FOR_INDEX_P (regno0)
591 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
592 index_op = 0;
593 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
594 || REGNO_OK_FOR_INDEX_P (regno1))
595 index_op = 1;
596 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
597 index_op = 0;
598 else
599 index_op = 1;
600
601 locI = &XEXP (x, index_op);
602 locB = &XEXP (x, !index_op);
603 index_code = GET_CODE (*locI);
604 }
605 else if (code0 == REG)
606 {
607 locI = &XEXP (x, 0);
608 locB = &XEXP (x, 1);
609 index_code = GET_CODE (*locI);
610 }
611 else if (code1 == REG)
612 {
613 locI = &XEXP (x, 1);
614 locB = &XEXP (x, 0);
615 index_code = GET_CODE (*locI);
616 }
617
618 if (locI)
619 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
620 mode, as, insn, vd);
621 if (locB)
622 changed |= replace_oldest_value_addr (locB,
623 base_reg_class (mode, as, PLUS,
624 index_code),
625 mode, as, insn, vd);
626 return changed;
627 }
628
629 case POST_INC:
630 case POST_DEC:
631 case POST_MODIFY:
632 case PRE_INC:
633 case PRE_DEC:
634 case PRE_MODIFY:
635 return false;
636
637 case MEM:
638 return replace_oldest_value_mem (x, insn, vd);
639
640 case REG:
641 return replace_oldest_value_reg (loc, cl, insn, vd);
642
643 default:
644 break;
645 }
646
647 fmt = GET_RTX_FORMAT (code);
648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
649 {
650 if (fmt[i] == 'e')
651 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
652 insn, vd);
653 else if (fmt[i] == 'E')
654 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
655 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
656 mode, as, insn, vd);
657 }
658
659 return changed;
660 }
661
662 /* Similar to replace_oldest_value_reg, but X contains a memory. */
663
664 static bool
665 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
666 {
667 enum reg_class cl;
668
669 if (DEBUG_INSN_P (insn))
670 cl = ALL_REGS;
671 else
672 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
673
674 return replace_oldest_value_addr (&XEXP (x, 0), cl,
675 GET_MODE (x), MEM_ADDR_SPACE (x),
676 insn, vd);
677 }
678
679 /* Apply all queued updates for DEBUG_INSNs that change some reg to
680 register REGNO. */
681
682 static void
683 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
684 {
685 struct queued_debug_insn_change *change;
686 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
687
688 for (change = vd->e[regno].debug_insn_changes;
689 change;
690 change = change->next)
691 {
692 if (last_insn != change->insn)
693 {
694 apply_change_group ();
695 last_insn = change->insn;
696 }
697 validate_change (change->insn, change->loc, change->new_rtx, 1);
698 }
699 apply_change_group ();
700 }
701
702 /* Called via note_uses, for all used registers in a real insn
703 apply DEBUG_INSN changes that change registers to the used
704 registers. */
705
706 static void
707 cprop_find_used_regs (rtx *loc, void *data)
708 {
709 struct value_data *const vd = (struct value_data *) data;
710 subrtx_iterator::array_type array;
711 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
712 {
713 const_rtx x = *iter;
714 if (REG_P (x))
715 {
716 unsigned int regno = REGNO (x);
717 if (vd->e[regno].debug_insn_changes)
718 {
719 apply_debug_insn_changes (vd, regno);
720 free_debug_insn_changes (vd, regno);
721 }
722 }
723 }
724 }
725
726 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
727
728 static void
729 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
730 {
731 note_stores (PATTERN (insn), kill_clobbered_value, vd);
732
733 if (CALL_P (insn))
734 {
735 rtx exp;
736
737 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
738 {
739 rtx x = XEXP (exp, 0);
740 if (GET_CODE (x) == CLOBBER)
741 kill_value (SET_DEST (x), vd);
742 }
743 }
744 }
745
746 /* Perform the forward copy propagation on basic block BB. */
747
748 static bool
749 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
750 {
751 bool anything_changed = false;
752 rtx_insn *insn, *next;
753
754 for (insn = BB_HEAD (bb); ; insn = next)
755 {
756 int n_ops, i, predicated;
757 bool is_asm, any_replacements;
758 rtx set;
759 rtx link;
760 bool replaced[MAX_RECOG_OPERANDS];
761 bool changed = false;
762 struct kill_set_value_data ksvd;
763
764 next = NEXT_INSN (insn);
765 if (!NONDEBUG_INSN_P (insn))
766 {
767 if (DEBUG_INSN_P (insn))
768 {
769 rtx loc = INSN_VAR_LOCATION_LOC (insn);
770 if (!VAR_LOC_UNKNOWN_P (loc))
771 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
772 ALL_REGS, GET_MODE (loc),
773 ADDR_SPACE_GENERIC, insn, vd);
774 }
775
776 if (insn == BB_END (bb))
777 break;
778 else
779 continue;
780 }
781
782 set = single_set (insn);
783
784 /* Detect noop sets and remove them before processing side effects. */
785 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
786 {
787 unsigned int regno = REGNO (SET_SRC (set));
788 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
789 SET_DEST (set), vd);
790 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
791 SET_SRC (set), vd);
792 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
793 {
794 bool last = insn == BB_END (bb);
795 delete_insn (insn);
796 if (last)
797 break;
798 continue;
799 }
800 }
801
802 extract_constrain_insn (insn);
803 preprocess_constraints (insn);
804 const operand_alternative *op_alt = which_op_alt ();
805 n_ops = recog_data.n_operands;
806 is_asm = asm_noperands (PATTERN (insn)) >= 0;
807
808 /* Simplify the code below by promoting OP_OUT to OP_INOUT
809 in predicated instructions. */
810
811 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
812 for (i = 0; i < n_ops; ++i)
813 {
814 int matches = op_alt[i].matches;
815 if (matches >= 0 || op_alt[i].matched >= 0
816 || (predicated && recog_data.operand_type[i] == OP_OUT))
817 recog_data.operand_type[i] = OP_INOUT;
818 }
819
820 /* Apply changes to earlier DEBUG_INSNs if possible. */
821 if (vd->n_debug_insn_changes)
822 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
823
824 /* For each earlyclobber operand, zap the value data. */
825 for (i = 0; i < n_ops; i++)
826 if (op_alt[i].earlyclobber)
827 kill_value (recog_data.operand[i], vd);
828
829 /* Within asms, a clobber cannot overlap inputs or outputs.
830 I wouldn't think this were true for regular insns, but
831 scan_rtx treats them like that... */
832 kill_clobbered_values (insn, vd);
833
834 /* Kill all auto-incremented values. */
835 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
836 kill_autoinc_value (insn, vd);
837
838 /* Kill all early-clobbered operands. */
839 for (i = 0; i < n_ops; i++)
840 if (op_alt[i].earlyclobber)
841 kill_value (recog_data.operand[i], vd);
842
843 /* If we have dead sets in the insn, then we need to note these as we
844 would clobbers. */
845 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
846 {
847 if (REG_NOTE_KIND (link) == REG_UNUSED)
848 {
849 kill_value (XEXP (link, 0), vd);
850 /* Furthermore, if the insn looked like a single-set,
851 but the dead store kills the source value of that
852 set, then we can no-longer use the plain move
853 special case below. */
854 if (set
855 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
856 set = NULL;
857 }
858 }
859
860 /* Special-case plain move instructions, since we may well
861 be able to do the move from a different register class. */
862 if (set && REG_P (SET_SRC (set)))
863 {
864 rtx src = SET_SRC (set);
865 unsigned int regno = REGNO (src);
866 machine_mode mode = GET_MODE (src);
867 unsigned int i;
868 rtx new_rtx;
869
870 /* If we are accessing SRC in some mode other that what we
871 set it in, make sure that the replacement is valid. */
872 if (mode != vd->e[regno].mode)
873 {
874 if (hard_regno_nregs[regno][mode]
875 > hard_regno_nregs[regno][vd->e[regno].mode])
876 goto no_move_special_case;
877
878 /* And likewise, if we are narrowing on big endian the transformation
879 is also invalid. */
880 if (hard_regno_nregs[regno][mode]
881 < hard_regno_nregs[regno][vd->e[regno].mode]
882 && (GET_MODE_SIZE (vd->e[regno].mode) > UNITS_PER_WORD
883 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
884 goto no_move_special_case;
885 }
886
887 /* If the destination is also a register, try to find a source
888 register in the same class. */
889 if (REG_P (SET_DEST (set)))
890 {
891 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
892 src, vd);
893
894 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
895 {
896 if (dump_file)
897 fprintf (dump_file,
898 "insn %u: replaced reg %u with %u\n",
899 INSN_UID (insn), regno, REGNO (new_rtx));
900 changed = true;
901 goto did_replacement;
902 }
903 /* We need to re-extract as validate_change clobbers
904 recog_data. */
905 extract_constrain_insn (insn);
906 preprocess_constraints (insn);
907 }
908
909 /* Otherwise, try all valid registers and see if its valid. */
910 for (i = vd->e[regno].oldest_regno; i != regno;
911 i = vd->e[i].next_regno)
912 {
913 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
914 mode, i, regno);
915 if (new_rtx != NULL_RTX)
916 {
917 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
918 {
919 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
920 REG_ATTRS (new_rtx) = REG_ATTRS (src);
921 REG_POINTER (new_rtx) = REG_POINTER (src);
922 if (dump_file)
923 fprintf (dump_file,
924 "insn %u: replaced reg %u with %u\n",
925 INSN_UID (insn), regno, REGNO (new_rtx));
926 changed = true;
927 goto did_replacement;
928 }
929 /* We need to re-extract as validate_change clobbers
930 recog_data. */
931 extract_constrain_insn (insn);
932 preprocess_constraints (insn);
933 }
934 }
935 }
936 no_move_special_case:
937
938 any_replacements = false;
939
940 /* For each input operand, replace a hard register with the
941 eldest live copy that's in an appropriate register class. */
942 for (i = 0; i < n_ops; i++)
943 {
944 replaced[i] = false;
945
946 /* Don't scan match_operand here, since we've no reg class
947 information to pass down. Any operands that we could
948 substitute in will be represented elsewhere. */
949 if (recog_data.constraints[i][0] == '\0')
950 continue;
951
952 /* Don't replace in asms intentionally referencing hard regs. */
953 if (is_asm && REG_P (recog_data.operand[i])
954 && (REGNO (recog_data.operand[i])
955 == ORIGINAL_REGNO (recog_data.operand[i])))
956 continue;
957
958 if (recog_data.operand_type[i] == OP_IN)
959 {
960 if (op_alt[i].is_address)
961 replaced[i]
962 = replace_oldest_value_addr (recog_data.operand_loc[i],
963 alternative_class (op_alt, i),
964 VOIDmode, ADDR_SPACE_GENERIC,
965 insn, vd);
966 else if (REG_P (recog_data.operand[i]))
967 replaced[i]
968 = replace_oldest_value_reg (recog_data.operand_loc[i],
969 alternative_class (op_alt, i),
970 insn, vd);
971 else if (MEM_P (recog_data.operand[i]))
972 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
973 insn, vd);
974 }
975 else if (MEM_P (recog_data.operand[i]))
976 replaced[i] = replace_oldest_value_mem (recog_data.operand[i],
977 insn, vd);
978
979 /* If we performed any replacement, update match_dups. */
980 if (replaced[i])
981 {
982 int j;
983 rtx new_rtx;
984
985 new_rtx = *recog_data.operand_loc[i];
986 recog_data.operand[i] = new_rtx;
987 for (j = 0; j < recog_data.n_dups; j++)
988 if (recog_data.dup_num[j] == i)
989 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
990
991 any_replacements = true;
992 }
993 }
994
995 if (any_replacements)
996 {
997 if (! apply_change_group ())
998 {
999 for (i = 0; i < n_ops; i++)
1000 if (replaced[i])
1001 {
1002 rtx old = *recog_data.operand_loc[i];
1003 recog_data.operand[i] = old;
1004 }
1005
1006 if (dump_file)
1007 fprintf (dump_file,
1008 "insn %u: reg replacements not verified\n",
1009 INSN_UID (insn));
1010 }
1011 else
1012 changed = true;
1013 }
1014
1015 did_replacement:
1016 if (changed)
1017 {
1018 anything_changed = true;
1019
1020 /* If something changed, perhaps further changes to earlier
1021 DEBUG_INSNs can be applied. */
1022 if (vd->n_debug_insn_changes)
1023 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1024 }
1025
1026 ksvd.vd = vd;
1027 ksvd.ignore_set_reg = NULL_RTX;
1028
1029 /* Clobber call-clobbered registers. */
1030 if (CALL_P (insn))
1031 {
1032 unsigned int set_regno = INVALID_REGNUM;
1033 unsigned int set_nregs = 0;
1034 unsigned int regno;
1035 rtx exp;
1036 HARD_REG_SET regs_invalidated_by_this_call;
1037
1038 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1039 {
1040 rtx x = XEXP (exp, 0);
1041 if (GET_CODE (x) == SET)
1042 {
1043 rtx dest = SET_DEST (x);
1044 kill_value (dest, vd);
1045 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1046 copy_value (dest, SET_SRC (x), vd);
1047 ksvd.ignore_set_reg = dest;
1048 set_regno = REGNO (dest);
1049 set_nregs = REG_NREGS (dest);
1050 break;
1051 }
1052 }
1053
1054 get_call_reg_set_usage (insn,
1055 &regs_invalidated_by_this_call,
1056 regs_invalidated_by_call);
1057 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1058 if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
1059 || (targetm.hard_regno_call_part_clobbered
1060 (regno, vd->e[regno].mode)))
1061 && (regno < set_regno || regno >= set_regno + set_nregs))
1062 kill_value_regno (regno, 1, vd);
1063
1064 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1065 of the SET isn't in regs_invalidated_by_call hard reg set,
1066 but instead among CLOBBERs on the CALL_INSN, we could wrongly
1067 assume the value in it is still live. */
1068 if (ksvd.ignore_set_reg)
1069 kill_clobbered_values (insn, vd);
1070 }
1071
1072 bool copy_p = (set
1073 && REG_P (SET_DEST (set))
1074 && REG_P (SET_SRC (set)));
1075 bool noop_p = (copy_p
1076 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1077
1078 /* If a noop move is using narrower mode than we have recorded,
1079 we need to either remove the noop move, or kill_set_value. */
1080 if (noop_p
1081 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1082 vd->e[REGNO (SET_DEST (set))].mode))
1083 {
1084 if (noop_move_p (insn))
1085 {
1086 bool last = insn == BB_END (bb);
1087 delete_insn (insn);
1088 if (last)
1089 break;
1090 }
1091 else
1092 noop_p = false;
1093 }
1094
1095 if (!noop_p)
1096 {
1097 /* Notice stores. */
1098 note_stores (PATTERN (insn), kill_set_value, &ksvd);
1099
1100 /* Notice copies. */
1101 if (copy_p)
1102 copy_value (SET_DEST (set), SET_SRC (set), vd);
1103 }
1104
1105 if (insn == BB_END (bb))
1106 break;
1107 }
1108
1109 return anything_changed;
1110 }
1111
1112 /* Dump the value chain data to stderr. */
1113
1114 DEBUG_FUNCTION void
1115 debug_value_data (struct value_data *vd)
1116 {
1117 HARD_REG_SET set;
1118 unsigned int i, j;
1119
1120 CLEAR_HARD_REG_SET (set);
1121
1122 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1123 if (vd->e[i].oldest_regno == i)
1124 {
1125 if (vd->e[i].mode == VOIDmode)
1126 {
1127 if (vd->e[i].next_regno != INVALID_REGNUM)
1128 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1129 i, vd->e[i].next_regno);
1130 continue;
1131 }
1132
1133 SET_HARD_REG_BIT (set, i);
1134 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1135
1136 for (j = vd->e[i].next_regno;
1137 j != INVALID_REGNUM;
1138 j = vd->e[j].next_regno)
1139 {
1140 if (TEST_HARD_REG_BIT (set, j))
1141 {
1142 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1143 return;
1144 }
1145
1146 if (vd->e[j].oldest_regno != i)
1147 {
1148 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1149 j, vd->e[j].oldest_regno);
1150 return;
1151 }
1152 SET_HARD_REG_BIT (set, j);
1153 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1154 }
1155 fputc ('\n', stderr);
1156 }
1157
1158 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1159 if (! TEST_HARD_REG_BIT (set, i)
1160 && (vd->e[i].mode != VOIDmode
1161 || vd->e[i].oldest_regno != i
1162 || vd->e[i].next_regno != INVALID_REGNUM))
1163 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1164 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1165 vd->e[i].next_regno);
1166 }
1167
1168 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1169 DEBUG_INSN is skipped since we do not want to involve DF related
1170 staff as how it is handled in function pass_cprop_hardreg::execute.
1171
1172 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1173 to handle DEBUG_INSN for other uses. */
1174
1175 void
1176 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1177 {
1178 struct value_data *vd;
1179 vd = XNEWVEC (struct value_data, 1);
1180 init_value_data (vd);
1181
1182 skip_debug_insn_p = true;
1183 copyprop_hardreg_forward_1 (bb, vd);
1184 free (vd);
1185 skip_debug_insn_p = false;
1186 }
1187
1188 static void
1189 validate_value_data (struct value_data *vd)
1190 {
1191 HARD_REG_SET set;
1192 unsigned int i, j;
1193
1194 CLEAR_HARD_REG_SET (set);
1195
1196 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1197 if (vd->e[i].oldest_regno == i)
1198 {
1199 if (vd->e[i].mode == VOIDmode)
1200 {
1201 if (vd->e[i].next_regno != INVALID_REGNUM)
1202 internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)",
1203 i, vd->e[i].next_regno);
1204 continue;
1205 }
1206
1207 SET_HARD_REG_BIT (set, i);
1208
1209 for (j = vd->e[i].next_regno;
1210 j != INVALID_REGNUM;
1211 j = vd->e[j].next_regno)
1212 {
1213 if (TEST_HARD_REG_BIT (set, j))
1214 internal_error ("validate_value_data: Loop in regno chain (%u)",
1215 j);
1216 if (vd->e[j].oldest_regno != i)
1217 internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)",
1218 j, vd->e[j].oldest_regno);
1219
1220 SET_HARD_REG_BIT (set, j);
1221 }
1222 }
1223
1224 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1225 if (! TEST_HARD_REG_BIT (set, i)
1226 && (vd->e[i].mode != VOIDmode
1227 || vd->e[i].oldest_regno != i
1228 || vd->e[i].next_regno != INVALID_REGNUM))
1229 internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)",
1230 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1231 vd->e[i].next_regno);
1232 }
1233
1234 \f
1235 namespace {
1236
1237 const pass_data pass_data_cprop_hardreg =
1238 {
1239 RTL_PASS, /* type */
1240 "cprop_hardreg", /* name */
1241 OPTGROUP_NONE, /* optinfo_flags */
1242 TV_CPROP_REGISTERS, /* tv_id */
1243 0, /* properties_required */
1244 0, /* properties_provided */
1245 0, /* properties_destroyed */
1246 0, /* todo_flags_start */
1247 TODO_df_finish, /* todo_flags_finish */
1248 };
1249
1250 class pass_cprop_hardreg : public rtl_opt_pass
1251 {
1252 public:
1253 pass_cprop_hardreg (gcc::context *ctxt)
1254 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1255 {}
1256
1257 /* opt_pass methods: */
1258 virtual bool gate (function *)
1259 {
1260 return (optimize > 0 && (flag_cprop_registers));
1261 }
1262
1263 virtual unsigned int execute (function *);
1264
1265 }; // class pass_cprop_hardreg
1266
1267 unsigned int
1268 pass_cprop_hardreg::execute (function *fun)
1269 {
1270 struct value_data *all_vd;
1271 basic_block bb;
1272 bool analyze_called = false;
1273
1274 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1275
1276 auto_sbitmap visited (last_basic_block_for_fn (fun));
1277 bitmap_clear (visited);
1278
1279 FOR_EACH_BB_FN (bb, fun)
1280 {
1281 bitmap_set_bit (visited, bb->index);
1282
1283 /* If a block has a single predecessor, that we've already
1284 processed, begin with the value data that was live at
1285 the end of the predecessor block. */
1286 /* ??? Ought to use more intelligent queuing of blocks. */
1287 if (single_pred_p (bb)
1288 && bitmap_bit_p (visited, single_pred (bb)->index)
1289 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1290 {
1291 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1292 if (all_vd[bb->index].n_debug_insn_changes)
1293 {
1294 unsigned int regno;
1295
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 {
1298 if (all_vd[bb->index].e[regno].debug_insn_changes)
1299 {
1300 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1301 if (--all_vd[bb->index].n_debug_insn_changes == 0)
1302 break;
1303 }
1304 }
1305 }
1306 }
1307 else
1308 init_value_data (all_vd + bb->index);
1309
1310 copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1311 }
1312
1313 if (MAY_HAVE_DEBUG_INSNS)
1314 {
1315 FOR_EACH_BB_FN (bb, fun)
1316 if (bitmap_bit_p (visited, bb->index)
1317 && all_vd[bb->index].n_debug_insn_changes)
1318 {
1319 unsigned int regno;
1320 bitmap live;
1321
1322 if (!analyze_called)
1323 {
1324 df_analyze ();
1325 analyze_called = true;
1326 }
1327 live = df_get_live_out (bb);
1328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1329 if (all_vd[bb->index].e[regno].debug_insn_changes)
1330 {
1331 if (REGNO_REG_SET_P (live, regno))
1332 apply_debug_insn_changes (all_vd + bb->index, regno);
1333 if (all_vd[bb->index].n_debug_insn_changes == 0)
1334 break;
1335 }
1336 }
1337
1338 queued_debug_insn_change_pool.release ();
1339 }
1340
1341 free (all_vd);
1342 return 0;
1343 }
1344
1345 } // anon namespace
1346
1347 rtl_opt_pass *
1348 make_pass_cprop_hardreg (gcc::context *ctxt)
1349 {
1350 return new pass_cprop_hardreg (ctxt);
1351 }