]>
Commit | Line | Data |
---|---|---|
2d4749b6 | 1 | /* Copy propagation on hard registers for the GNU compiler. |
8e8f6434 | 2 | Copyright (C) 2000-2018 Free Software Foundation, Inc. |
2d4749b6 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it | |
7 | under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 3, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY | |
13 | or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public | |
14 | License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING3. If not see | |
18 | <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
9ef16211 | 23 | #include "backend.h" |
2d4749b6 | 24 | #include "rtl.h" |
9ef16211 | 25 | #include "df.h" |
ad7b10a2 | 26 | #include "memmodel.h" |
2d4749b6 | 27 | #include "tm_p.h" |
28 | #include "insn-config.h" | |
29 | #include "regs.h" | |
7c29e30e | 30 | #include "emit-rtl.h" |
31 | #include "recog.h" | |
32 | #include "diagnostic-core.h" | |
2d4749b6 | 33 | #include "addresses.h" |
2d4749b6 | 34 | #include "tree-pass.h" |
29f9ec8f | 35 | #include "rtl-iter.h" |
f14d8d6a | 36 | #include "cfgrtl.h" |
5da94e60 | 37 | #include "target.h" |
2d4749b6 | 38 | |
39 | /* The following code does forward propagation of hard register copies. | |
40 | The object is to eliminate as many dependencies as possible, so that | |
41 | we have the most scheduling freedom. As a side effect, we also clean | |
42 | up some silly register allocation decisions made by reload. This | |
43 | code may be obsoleted by a new register allocator. */ | |
44 | ||
2058ec71 | 45 | /* DEBUG_INSNs aren't changed right away, as doing so might extend the |
46 | lifetime of a register and get the DEBUG_INSN subsequently reset. | |
47 | So they are queued instead, and updated only when the register is | |
48 | used in some subsequent real insn before it is set. */ | |
49 | struct queued_debug_insn_change | |
50 | { | |
51 | struct queued_debug_insn_change *next; | |
0991de81 | 52 | rtx_insn *insn; |
2058ec71 | 53 | rtx *loc; |
54 | rtx new_rtx; | |
55 | }; | |
56 | ||
2d4749b6 | 57 | /* For each register, we have a list of registers that contain the same |
58 | value. The OLDEST_REGNO field points to the head of the list, and | |
59 | the NEXT_REGNO field runs through the list. The MODE field indicates | |
60 | what mode the data is known to be in; this field is VOIDmode when the | |
61 | register is not known to contain valid data. */ | |
62 | ||
63 | struct value_data_entry | |
64 | { | |
3754d046 | 65 | machine_mode mode; |
2d4749b6 | 66 | unsigned int oldest_regno; |
67 | unsigned int next_regno; | |
2058ec71 | 68 | struct queued_debug_insn_change *debug_insn_changes; |
2d4749b6 | 69 | }; |
70 | ||
71 | struct value_data | |
72 | { | |
73 | struct value_data_entry e[FIRST_PSEUDO_REGISTER]; | |
74 | unsigned int max_value_regs; | |
2058ec71 | 75 | unsigned int n_debug_insn_changes; |
2d4749b6 | 76 | }; |
77 | ||
e16712b1 | 78 | static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool |
1dc6c44d | 79 | ("debug insn changes pool"); |
fe4549c4 | 80 | |
59483f68 | 81 | static bool skip_debug_insn_p; |
2058ec71 | 82 | |
2d4749b6 | 83 | static void kill_value_one_regno (unsigned, struct value_data *); |
84 | static void kill_value_regno (unsigned, unsigned, struct value_data *); | |
29f9ec8f | 85 | static void kill_value (const_rtx, struct value_data *); |
3754d046 | 86 | static void set_value_regno (unsigned, machine_mode, struct value_data *); |
2d4749b6 | 87 | static void init_value_data (struct value_data *); |
88 | static void kill_clobbered_value (rtx, const_rtx, void *); | |
89 | static void kill_set_value (rtx, const_rtx, void *); | |
2d4749b6 | 90 | static void copy_value (rtx, rtx, struct value_data *); |
3754d046 | 91 | static bool mode_change_ok (machine_mode, machine_mode, |
2d4749b6 | 92 | unsigned int); |
3754d046 | 93 | static rtx maybe_mode_change (machine_mode, machine_mode, |
94 | machine_mode, unsigned int, unsigned int); | |
2d4749b6 | 95 | static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *); |
0991de81 | 96 | static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *, |
2d4749b6 | 97 | struct value_data *); |
98 | static bool replace_oldest_value_addr (rtx *, enum reg_class, | |
3754d046 | 99 | machine_mode, addr_space_t, |
0991de81 | 100 | rtx_insn *, struct value_data *); |
101 | static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *); | |
2d4749b6 | 102 | static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *); |
103 | extern void debug_value_data (struct value_data *); | |
2d4749b6 | 104 | static void validate_value_data (struct value_data *); |
2d4749b6 | 105 | |
2058ec71 | 106 | /* Free all queued updates for DEBUG_INSNs that change some reg to |
107 | register REGNO. */ | |
108 | ||
109 | static void | |
110 | free_debug_insn_changes (struct value_data *vd, unsigned int regno) | |
111 | { | |
112 | struct queued_debug_insn_change *cur, *next; | |
113 | for (cur = vd->e[regno].debug_insn_changes; cur; cur = next) | |
114 | { | |
115 | next = cur->next; | |
116 | --vd->n_debug_insn_changes; | |
e16712b1 | 117 | queued_debug_insn_change_pool.remove (cur); |
2058ec71 | 118 | } |
119 | vd->e[regno].debug_insn_changes = NULL; | |
120 | } | |
121 | ||
2d4749b6 | 122 | /* Kill register REGNO. This involves removing it from any value |
123 | lists, and resetting the value mode to VOIDmode. This is only a | |
124 | helper function; it does not handle any hard registers overlapping | |
125 | with REGNO. */ | |
126 | ||
127 | static void | |
128 | kill_value_one_regno (unsigned int regno, struct value_data *vd) | |
129 | { | |
130 | unsigned int i, next; | |
131 | ||
132 | if (vd->e[regno].oldest_regno != regno) | |
133 | { | |
134 | for (i = vd->e[regno].oldest_regno; | |
135 | vd->e[i].next_regno != regno; | |
136 | i = vd->e[i].next_regno) | |
137 | continue; | |
138 | vd->e[i].next_regno = vd->e[regno].next_regno; | |
139 | } | |
140 | else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM) | |
141 | { | |
142 | for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno) | |
143 | vd->e[i].oldest_regno = next; | |
144 | } | |
145 | ||
146 | vd->e[regno].mode = VOIDmode; | |
147 | vd->e[regno].oldest_regno = regno; | |
148 | vd->e[regno].next_regno = INVALID_REGNUM; | |
2058ec71 | 149 | if (vd->e[regno].debug_insn_changes) |
150 | free_debug_insn_changes (vd, regno); | |
2d4749b6 | 151 | |
382ecba7 | 152 | if (flag_checking) |
153 | validate_value_data (vd); | |
2d4749b6 | 154 | } |
155 | ||
156 | /* Kill the value in register REGNO for NREGS, and any other registers | |
157 | whose values overlap. */ | |
158 | ||
159 | static void | |
160 | kill_value_regno (unsigned int regno, unsigned int nregs, | |
161 | struct value_data *vd) | |
162 | { | |
163 | unsigned int j; | |
164 | ||
165 | /* Kill the value we're told to kill. */ | |
166 | for (j = 0; j < nregs; ++j) | |
167 | kill_value_one_regno (regno + j, vd); | |
168 | ||
169 | /* Kill everything that overlapped what we're told to kill. */ | |
170 | if (regno < vd->max_value_regs) | |
171 | j = 0; | |
172 | else | |
173 | j = regno - vd->max_value_regs; | |
174 | for (; j < regno; ++j) | |
175 | { | |
176 | unsigned int i, n; | |
177 | if (vd->e[j].mode == VOIDmode) | |
178 | continue; | |
92d2aec3 | 179 | n = hard_regno_nregs (j, vd->e[j].mode); |
2d4749b6 | 180 | if (j + n > regno) |
181 | for (i = 0; i < n; ++i) | |
182 | kill_value_one_regno (j + i, vd); | |
183 | } | |
184 | } | |
185 | ||
186 | /* Kill X. This is a convenience function wrapping kill_value_regno | |
187 | so that we mind the mode the register is in. */ | |
188 | ||
189 | static void | |
29f9ec8f | 190 | kill_value (const_rtx x, struct value_data *vd) |
2d4749b6 | 191 | { |
2d4749b6 | 192 | if (GET_CODE (x) == SUBREG) |
193 | { | |
29f9ec8f | 194 | rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x), |
195 | GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); | |
196 | x = tmp ? tmp : SUBREG_REG (x); | |
2d4749b6 | 197 | } |
198 | if (REG_P (x)) | |
0933f1d9 | 199 | kill_value_regno (REGNO (x), REG_NREGS (x), vd); |
2d4749b6 | 200 | } |
201 | ||
202 | /* Remember that REGNO is valid in MODE. */ | |
203 | ||
204 | static void | |
3754d046 | 205 | set_value_regno (unsigned int regno, machine_mode mode, |
2d4749b6 | 206 | struct value_data *vd) |
207 | { | |
208 | unsigned int nregs; | |
209 | ||
210 | vd->e[regno].mode = mode; | |
211 | ||
92d2aec3 | 212 | nregs = hard_regno_nregs (regno, mode); |
2d4749b6 | 213 | if (nregs > vd->max_value_regs) |
214 | vd->max_value_regs = nregs; | |
215 | } | |
216 | ||
217 | /* Initialize VD such that there are no known relationships between regs. */ | |
218 | ||
219 | static void | |
220 | init_value_data (struct value_data *vd) | |
221 | { | |
222 | int i; | |
223 | for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) | |
224 | { | |
225 | vd->e[i].mode = VOIDmode; | |
226 | vd->e[i].oldest_regno = i; | |
227 | vd->e[i].next_regno = INVALID_REGNUM; | |
2058ec71 | 228 | vd->e[i].debug_insn_changes = NULL; |
2d4749b6 | 229 | } |
230 | vd->max_value_regs = 0; | |
2058ec71 | 231 | vd->n_debug_insn_changes = 0; |
2d4749b6 | 232 | } |
233 | ||
234 | /* Called through note_stores. If X is clobbered, kill its value. */ | |
235 | ||
236 | static void | |
237 | kill_clobbered_value (rtx x, const_rtx set, void *data) | |
238 | { | |
239 | struct value_data *const vd = (struct value_data *) data; | |
240 | if (GET_CODE (set) == CLOBBER) | |
241 | kill_value (x, vd); | |
242 | } | |
243 | ||
c8010b80 | 244 | /* A structure passed as data to kill_set_value through note_stores. */ |
245 | struct kill_set_value_data | |
246 | { | |
247 | struct value_data *vd; | |
248 | rtx ignore_set_reg; | |
249 | }; | |
250 | ||
2d4749b6 | 251 | /* Called through note_stores. If X is set, not clobbered, kill its |
252 | current value and install it as the root of its own value list. */ | |
253 | ||
254 | static void | |
255 | kill_set_value (rtx x, const_rtx set, void *data) | |
256 | { | |
c8010b80 | 257 | struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data; |
258 | if (rtx_equal_p (x, ksvd->ignore_set_reg)) | |
259 | return; | |
2d4749b6 | 260 | if (GET_CODE (set) != CLOBBER) |
261 | { | |
c8010b80 | 262 | kill_value (x, ksvd->vd); |
2d4749b6 | 263 | if (REG_P (x)) |
c8010b80 | 264 | set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd); |
2d4749b6 | 265 | } |
266 | } | |
267 | ||
29f9ec8f | 268 | /* Kill any register used in X as the base of an auto-increment expression, |
269 | and install that register as the root of its own value list. */ | |
2d4749b6 | 270 | |
29f9ec8f | 271 | static void |
8108f3f0 | 272 | kill_autoinc_value (rtx_insn *insn, struct value_data *vd) |
2d4749b6 | 273 | { |
29f9ec8f | 274 | subrtx_iterator::array_type array; |
275 | FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST) | |
2d4749b6 | 276 | { |
29f9ec8f | 277 | const_rtx x = *iter; |
278 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC) | |
279 | { | |
280 | x = XEXP (x, 0); | |
281 | kill_value (x, vd); | |
282 | set_value_regno (REGNO (x), GET_MODE (x), vd); | |
283 | iter.skip_subrtxes (); | |
284 | } | |
2d4749b6 | 285 | } |
2d4749b6 | 286 | } |
287 | ||
288 | /* Assert that SRC has been copied to DEST. Adjust the data structures | |
289 | to reflect that SRC contains an older copy of the shared value. */ | |
290 | ||
291 | static void | |
292 | copy_value (rtx dest, rtx src, struct value_data *vd) | |
293 | { | |
294 | unsigned int dr = REGNO (dest); | |
295 | unsigned int sr = REGNO (src); | |
296 | unsigned int dn, sn; | |
297 | unsigned int i; | |
298 | ||
299 | /* ??? At present, it's possible to see noop sets. It'd be nice if | |
300 | this were cleaned up beforehand... */ | |
301 | if (sr == dr) | |
302 | return; | |
303 | ||
304 | /* Do not propagate copies to the stack pointer, as that can leave | |
305 | memory accesses with no scheduling dependency on the stack update. */ | |
306 | if (dr == STACK_POINTER_REGNUM) | |
307 | return; | |
308 | ||
309 | /* Likewise with the frame pointer, if we're using one. */ | |
310 | if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM) | |
311 | return; | |
312 | ||
313 | /* Do not propagate copies to fixed or global registers, patterns | |
314 | can be relying to see particular fixed register or users can | |
315 | expect the chosen global register in asm. */ | |
316 | if (fixed_regs[dr] || global_regs[dr]) | |
317 | return; | |
318 | ||
319 | /* If SRC and DEST overlap, don't record anything. */ | |
0933f1d9 | 320 | dn = REG_NREGS (dest); |
321 | sn = REG_NREGS (src); | |
2d4749b6 | 322 | if ((dr > sr && dr < sr + sn) |
323 | || (sr > dr && sr < dr + dn)) | |
324 | return; | |
325 | ||
326 | /* If SRC had no assigned mode (i.e. we didn't know it was live) | |
327 | assign it now and assume the value came from an input argument | |
328 | or somesuch. */ | |
329 | if (vd->e[sr].mode == VOIDmode) | |
330 | set_value_regno (sr, vd->e[dr].mode, vd); | |
331 | ||
332 | /* If we are narrowing the input to a smaller number of hard regs, | |
333 | and it is in big endian, we are really extracting a high part. | |
334 | Since we generally associate a low part of a value with the value itself, | |
335 | we must not do the same for the high part. | |
336 | Note we can still get low parts for the same mode combination through | |
337 | a two-step copy involving differently sized hard regs. | |
47ae02b7 | 338 | Assume hard regs fr* are 32 bits each, while r* are 64 bits each: |
2d4749b6 | 339 | (set (reg:DI r0) (reg:DI fr0)) |
340 | (set (reg:SI fr2) (reg:SI r0)) | |
341 | loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while: | |
342 | (set (reg:SI fr2) (reg:SI fr0)) | |
343 | loads the high part of (reg:DI fr0) into fr2. | |
344 | ||
345 | We can't properly represent the latter case in our tables, so don't | |
346 | record anything then. */ | |
92d2aec3 | 347 | else if (sn < hard_regno_nregs (sr, vd->e[sr].mode) |
9edf7ea8 | 348 | && maybe_ne (subreg_lowpart_offset (GET_MODE (dest), |
349 | vd->e[sr].mode), 0U)) | |
2d4749b6 | 350 | return; |
351 | ||
352 | /* If SRC had been assigned a mode narrower than the copy, we can't | |
353 | link DEST into the chain, because not all of the pieces of the | |
354 | copy came from oldest_regno. */ | |
92d2aec3 | 355 | else if (sn > hard_regno_nregs (sr, vd->e[sr].mode)) |
2d4749b6 | 356 | return; |
357 | ||
358 | /* Link DR at the end of the value chain used by SR. */ | |
359 | ||
360 | vd->e[dr].oldest_regno = vd->e[sr].oldest_regno; | |
361 | ||
362 | for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno) | |
363 | continue; | |
364 | vd->e[i].next_regno = dr; | |
365 | ||
382ecba7 | 366 | if (flag_checking) |
367 | validate_value_data (vd); | |
2d4749b6 | 368 | } |
369 | ||
370 | /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */ | |
371 | ||
372 | static bool | |
3754d046 | 373 | mode_change_ok (machine_mode orig_mode, machine_mode new_mode, |
2d4749b6 | 374 | unsigned int regno ATTRIBUTE_UNUSED) |
375 | { | |
974534ab | 376 | if (partial_subreg_p (orig_mode, new_mode)) |
2d4749b6 | 377 | return false; |
378 | ||
b56a9dbc | 379 | return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode); |
2d4749b6 | 380 | } |
381 | ||
382 | /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it - | |
383 | was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed | |
384 | in NEW_MODE. | |
385 | Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */ | |
386 | ||
387 | static rtx | |
3754d046 | 388 | maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode, |
389 | machine_mode new_mode, unsigned int regno, | |
2d4749b6 | 390 | unsigned int copy_regno ATTRIBUTE_UNUSED) |
391 | { | |
974534ab | 392 | if (partial_subreg_p (copy_mode, orig_mode) |
393 | && partial_subreg_p (copy_mode, new_mode)) | |
2d4749b6 | 394 | return NULL_RTX; |
395 | ||
e206fe62 | 396 | /* Avoid creating multiple copies of the stack pointer. Some ports |
397 | assume there is one and only one stack pointer. | |
398 | ||
399 | It's unclear if we need to do the same for other special registers. */ | |
400 | if (regno == STACK_POINTER_REGNUM) | |
401 | return NULL_RTX; | |
402 | ||
2d4749b6 | 403 | if (orig_mode == new_mode) |
15183fd2 | 404 | return gen_raw_REG (new_mode, regno); |
2d4749b6 | 405 | else if (mode_change_ok (orig_mode, new_mode, regno)) |
406 | { | |
92d2aec3 | 407 | int copy_nregs = hard_regno_nregs (copy_regno, copy_mode); |
408 | int use_nregs = hard_regno_nregs (copy_regno, new_mode); | |
52acb7ae | 409 | poly_uint64 bytes_per_reg; |
410 | if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode), | |
411 | copy_nregs, &bytes_per_reg)) | |
412 | return NULL_RTX; | |
413 | poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs); | |
9edf7ea8 | 414 | poly_uint64 offset |
18355707 | 415 | = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset, |
416 | GET_MODE_SIZE (orig_mode)); | |
3a45e441 | 417 | regno += subreg_regno_offset (regno, orig_mode, offset, new_mode); |
b395382f | 418 | if (targetm.hard_regno_mode_ok (regno, new_mode)) |
15183fd2 | 419 | return gen_raw_REG (new_mode, regno); |
2d4749b6 | 420 | } |
421 | return NULL_RTX; | |
422 | } | |
423 | ||
424 | /* Find the oldest copy of the value contained in REGNO that is in | |
425 | register class CL and has mode MODE. If found, return an rtx | |
426 | of that oldest register, otherwise return NULL. */ | |
427 | ||
428 | static rtx | |
429 | find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd) | |
430 | { | |
431 | unsigned int regno = REGNO (reg); | |
3754d046 | 432 | machine_mode mode = GET_MODE (reg); |
2d4749b6 | 433 | unsigned int i; |
434 | ||
bce107d7 | 435 | gcc_assert (regno < FIRST_PSEUDO_REGISTER); |
436 | ||
2d4749b6 | 437 | /* If we are accessing REG in some mode other that what we set it in, |
438 | make sure that the replacement is valid. In particular, consider | |
439 | (set (reg:DI r11) (...)) | |
440 | (set (reg:SI r9) (reg:SI r11)) | |
441 | (set (reg:SI r10) (...)) | |
442 | (set (...) (reg:DI r9)) | |
443 | Replacing r9 with r11 is invalid. */ | |
10fa8f76 | 444 | if (mode != vd->e[regno].mode |
92d2aec3 | 445 | && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode)) |
10fa8f76 | 446 | return NULL_RTX; |
2d4749b6 | 447 | |
448 | for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno) | |
449 | { | |
3754d046 | 450 | machine_mode oldmode = vd->e[i].mode; |
2d4749b6 | 451 | rtx new_rtx; |
452 | ||
453 | if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i)) | |
75219367 | 454 | continue; |
2d4749b6 | 455 | |
456 | new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno); | |
457 | if (new_rtx) | |
458 | { | |
459 | ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg); | |
460 | REG_ATTRS (new_rtx) = REG_ATTRS (reg); | |
461 | REG_POINTER (new_rtx) = REG_POINTER (reg); | |
462 | return new_rtx; | |
463 | } | |
464 | } | |
465 | ||
466 | return NULL_RTX; | |
467 | } | |
468 | ||
469 | /* If possible, replace the register at *LOC with the oldest register | |
470 | in register class CL. Return true if successfully replaced. */ | |
471 | ||
472 | static bool | |
0991de81 | 473 | replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn, |
2d4749b6 | 474 | struct value_data *vd) |
475 | { | |
476 | rtx new_rtx = find_oldest_value_reg (cl, *loc, vd); | |
59483f68 | 477 | if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p)) |
2d4749b6 | 478 | { |
2058ec71 | 479 | if (DEBUG_INSN_P (insn)) |
480 | { | |
481 | struct queued_debug_insn_change *change; | |
482 | ||
483 | if (dump_file) | |
484 | fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n", | |
485 | INSN_UID (insn), REGNO (*loc), REGNO (new_rtx)); | |
486 | ||
e16712b1 | 487 | change = queued_debug_insn_change_pool.allocate (); |
2058ec71 | 488 | change->next = vd->e[REGNO (new_rtx)].debug_insn_changes; |
489 | change->insn = insn; | |
490 | change->loc = loc; | |
491 | change->new_rtx = new_rtx; | |
492 | vd->e[REGNO (new_rtx)].debug_insn_changes = change; | |
493 | ++vd->n_debug_insn_changes; | |
494 | return true; | |
495 | } | |
2d4749b6 | 496 | if (dump_file) |
497 | fprintf (dump_file, "insn %u: replaced reg %u with %u\n", | |
498 | INSN_UID (insn), REGNO (*loc), REGNO (new_rtx)); | |
499 | ||
500 | validate_change (insn, loc, new_rtx, 1); | |
501 | return true; | |
502 | } | |
503 | return false; | |
504 | } | |
505 | ||
506 | /* Similar to replace_oldest_value_reg, but *LOC contains an address. | |
507 | Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or | |
508 | BASE_REG_CLASS depending on how the register is being considered. */ | |
509 | ||
510 | static bool | |
511 | replace_oldest_value_addr (rtx *loc, enum reg_class cl, | |
3754d046 | 512 | machine_mode mode, addr_space_t as, |
0991de81 | 513 | rtx_insn *insn, struct value_data *vd) |
2d4749b6 | 514 | { |
515 | rtx x = *loc; | |
516 | RTX_CODE code = GET_CODE (x); | |
517 | const char *fmt; | |
518 | int i, j; | |
519 | bool changed = false; | |
520 | ||
521 | switch (code) | |
522 | { | |
523 | case PLUS: | |
9845d120 | 524 | if (DEBUG_INSN_P (insn)) |
525 | break; | |
526 | ||
2d4749b6 | 527 | { |
528 | rtx orig_op0 = XEXP (x, 0); | |
529 | rtx orig_op1 = XEXP (x, 1); | |
530 | RTX_CODE code0 = GET_CODE (orig_op0); | |
531 | RTX_CODE code1 = GET_CODE (orig_op1); | |
532 | rtx op0 = orig_op0; | |
533 | rtx op1 = orig_op1; | |
534 | rtx *locI = NULL; | |
535 | rtx *locB = NULL; | |
536 | enum rtx_code index_code = SCRATCH; | |
537 | ||
538 | if (GET_CODE (op0) == SUBREG) | |
539 | { | |
540 | op0 = SUBREG_REG (op0); | |
541 | code0 = GET_CODE (op0); | |
542 | } | |
543 | ||
544 | if (GET_CODE (op1) == SUBREG) | |
545 | { | |
546 | op1 = SUBREG_REG (op1); | |
547 | code1 = GET_CODE (op1); | |
548 | } | |
549 | ||
550 | if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE | |
551 | || code0 == ZERO_EXTEND || code1 == MEM) | |
552 | { | |
553 | locI = &XEXP (x, 0); | |
554 | locB = &XEXP (x, 1); | |
555 | index_code = GET_CODE (*locI); | |
556 | } | |
557 | else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE | |
558 | || code1 == ZERO_EXTEND || code0 == MEM) | |
559 | { | |
560 | locI = &XEXP (x, 1); | |
561 | locB = &XEXP (x, 0); | |
562 | index_code = GET_CODE (*locI); | |
563 | } | |
564 | else if (code0 == CONST_INT || code0 == CONST | |
565 | || code0 == SYMBOL_REF || code0 == LABEL_REF) | |
566 | { | |
567 | locB = &XEXP (x, 1); | |
568 | index_code = GET_CODE (XEXP (x, 0)); | |
569 | } | |
570 | else if (code1 == CONST_INT || code1 == CONST | |
571 | || code1 == SYMBOL_REF || code1 == LABEL_REF) | |
572 | { | |
573 | locB = &XEXP (x, 0); | |
574 | index_code = GET_CODE (XEXP (x, 1)); | |
575 | } | |
576 | else if (code0 == REG && code1 == REG) | |
577 | { | |
578 | int index_op; | |
579 | unsigned regno0 = REGNO (op0), regno1 = REGNO (op1); | |
580 | ||
581 | if (REGNO_OK_FOR_INDEX_P (regno1) | |
f8a8fc7b | 582 | && regno_ok_for_base_p (regno0, mode, as, PLUS, REG)) |
2d4749b6 | 583 | index_op = 1; |
584 | else if (REGNO_OK_FOR_INDEX_P (regno0) | |
f8a8fc7b | 585 | && regno_ok_for_base_p (regno1, mode, as, PLUS, REG)) |
2d4749b6 | 586 | index_op = 0; |
f8a8fc7b | 587 | else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG) |
2d4749b6 | 588 | || REGNO_OK_FOR_INDEX_P (regno1)) |
589 | index_op = 1; | |
f8a8fc7b | 590 | else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG)) |
2d4749b6 | 591 | index_op = 0; |
592 | else | |
593 | index_op = 1; | |
594 | ||
595 | locI = &XEXP (x, index_op); | |
596 | locB = &XEXP (x, !index_op); | |
597 | index_code = GET_CODE (*locI); | |
598 | } | |
599 | else if (code0 == REG) | |
600 | { | |
601 | locI = &XEXP (x, 0); | |
602 | locB = &XEXP (x, 1); | |
603 | index_code = GET_CODE (*locI); | |
604 | } | |
605 | else if (code1 == REG) | |
606 | { | |
607 | locI = &XEXP (x, 1); | |
608 | locB = &XEXP (x, 0); | |
609 | index_code = GET_CODE (*locI); | |
610 | } | |
611 | ||
612 | if (locI) | |
f8a8fc7b | 613 | changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS, |
614 | mode, as, insn, vd); | |
2d4749b6 | 615 | if (locB) |
616 | changed |= replace_oldest_value_addr (locB, | |
f8a8fc7b | 617 | base_reg_class (mode, as, PLUS, |
2d4749b6 | 618 | index_code), |
f8a8fc7b | 619 | mode, as, insn, vd); |
2d4749b6 | 620 | return changed; |
621 | } | |
622 | ||
623 | case POST_INC: | |
624 | case POST_DEC: | |
625 | case POST_MODIFY: | |
626 | case PRE_INC: | |
627 | case PRE_DEC: | |
628 | case PRE_MODIFY: | |
629 | return false; | |
630 | ||
631 | case MEM: | |
632 | return replace_oldest_value_mem (x, insn, vd); | |
633 | ||
634 | case REG: | |
635 | return replace_oldest_value_reg (loc, cl, insn, vd); | |
636 | ||
637 | default: | |
638 | break; | |
639 | } | |
640 | ||
641 | fmt = GET_RTX_FORMAT (code); | |
642 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
643 | { | |
644 | if (fmt[i] == 'e') | |
f8a8fc7b | 645 | changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as, |
2d4749b6 | 646 | insn, vd); |
647 | else if (fmt[i] == 'E') | |
648 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
649 | changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl, | |
f8a8fc7b | 650 | mode, as, insn, vd); |
2d4749b6 | 651 | } |
652 | ||
653 | return changed; | |
654 | } | |
655 | ||
656 | /* Similar to replace_oldest_value_reg, but X contains a memory. */ | |
657 | ||
658 | static bool | |
0991de81 | 659 | replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd) |
2d4749b6 | 660 | { |
9845d120 | 661 | enum reg_class cl; |
662 | ||
663 | if (DEBUG_INSN_P (insn)) | |
664 | cl = ALL_REGS; | |
665 | else | |
f8a8fc7b | 666 | cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH); |
9845d120 | 667 | |
668 | return replace_oldest_value_addr (&XEXP (x, 0), cl, | |
f8a8fc7b | 669 | GET_MODE (x), MEM_ADDR_SPACE (x), |
670 | insn, vd); | |
2d4749b6 | 671 | } |
672 | ||
2058ec71 | 673 | /* Apply all queued updates for DEBUG_INSNs that change some reg to |
674 | register REGNO. */ | |
675 | ||
676 | static void | |
677 | apply_debug_insn_changes (struct value_data *vd, unsigned int regno) | |
678 | { | |
679 | struct queued_debug_insn_change *change; | |
0991de81 | 680 | rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn; |
2058ec71 | 681 | |
682 | for (change = vd->e[regno].debug_insn_changes; | |
683 | change; | |
684 | change = change->next) | |
685 | { | |
686 | if (last_insn != change->insn) | |
687 | { | |
688 | apply_change_group (); | |
689 | last_insn = change->insn; | |
690 | } | |
691 | validate_change (change->insn, change->loc, change->new_rtx, 1); | |
692 | } | |
693 | apply_change_group (); | |
694 | } | |
695 | ||
2058ec71 | 696 | /* Called via note_uses, for all used registers in a real insn |
697 | apply DEBUG_INSN changes that change registers to the used | |
698 | registers. */ | |
699 | ||
700 | static void | |
6243e03f | 701 | cprop_find_used_regs (rtx *loc, void *data) |
2058ec71 | 702 | { |
6243e03f | 703 | struct value_data *const vd = (struct value_data *) data; |
704 | subrtx_iterator::array_type array; | |
705 | FOR_EACH_SUBRTX (iter, array, *loc, NONCONST) | |
706 | { | |
707 | const_rtx x = *iter; | |
708 | if (REG_P (x)) | |
709 | { | |
710 | unsigned int regno = REGNO (x); | |
711 | if (vd->e[regno].debug_insn_changes) | |
712 | { | |
713 | apply_debug_insn_changes (vd, regno); | |
714 | free_debug_insn_changes (vd, regno); | |
715 | } | |
716 | } | |
717 | } | |
2058ec71 | 718 | } |
719 | ||
9c4a0128 | 720 | /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */ |
721 | ||
722 | static void | |
723 | kill_clobbered_values (rtx_insn *insn, struct value_data *vd) | |
724 | { | |
725 | note_stores (PATTERN (insn), kill_clobbered_value, vd); | |
726 | ||
727 | if (CALL_P (insn)) | |
728 | { | |
729 | rtx exp; | |
730 | ||
731 | for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1)) | |
732 | { | |
733 | rtx x = XEXP (exp, 0); | |
734 | if (GET_CODE (x) == CLOBBER) | |
735 | kill_value (SET_DEST (x), vd); | |
736 | } | |
737 | } | |
738 | } | |
739 | ||
2d4749b6 | 740 | /* Perform the forward copy propagation on basic block BB. */ |
741 | ||
742 | static bool | |
743 | copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd) | |
744 | { | |
9845d120 | 745 | bool anything_changed = false; |
f14d8d6a | 746 | rtx_insn *insn, *next; |
2d4749b6 | 747 | |
f14d8d6a | 748 | for (insn = BB_HEAD (bb); ; insn = next) |
2d4749b6 | 749 | { |
757fefec | 750 | int n_ops, i, predicated; |
2d4749b6 | 751 | bool is_asm, any_replacements; |
752 | rtx set; | |
ff5a75fc | 753 | rtx link; |
2d4749b6 | 754 | bool replaced[MAX_RECOG_OPERANDS]; |
9845d120 | 755 | bool changed = false; |
c8010b80 | 756 | struct kill_set_value_data ksvd; |
2d4749b6 | 757 | |
f14d8d6a | 758 | next = NEXT_INSN (insn); |
9845d120 | 759 | if (!NONDEBUG_INSN_P (insn)) |
2d4749b6 | 760 | { |
c64f38bf | 761 | if (DEBUG_BIND_INSN_P (insn)) |
9845d120 | 762 | { |
763 | rtx loc = INSN_VAR_LOCATION_LOC (insn); | |
2058ec71 | 764 | if (!VAR_LOC_UNKNOWN_P (loc)) |
765 | replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn), | |
766 | ALL_REGS, GET_MODE (loc), | |
f8a8fc7b | 767 | ADDR_SPACE_GENERIC, insn, vd); |
9845d120 | 768 | } |
769 | ||
2d4749b6 | 770 | if (insn == BB_END (bb)) |
771 | break; | |
772 | else | |
773 | continue; | |
774 | } | |
775 | ||
776 | set = single_set (insn); | |
9c1ff919 | 777 | |
778 | /* Detect noop sets and remove them before processing side effects. */ | |
779 | if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set))) | |
780 | { | |
781 | unsigned int regno = REGNO (SET_SRC (set)); | |
782 | rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno), | |
783 | SET_DEST (set), vd); | |
784 | rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno), | |
785 | SET_SRC (set), vd); | |
786 | if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set))) | |
787 | { | |
788 | bool last = insn == BB_END (bb); | |
9c1ff919 | 789 | delete_insn (insn); |
790 | if (last) | |
791 | break; | |
792 | continue; | |
793 | } | |
794 | } | |
795 | ||
835b8178 | 796 | extract_constrain_insn (insn); |
8eaaac4d | 797 | preprocess_constraints (insn); |
89a7a6a5 | 798 | const operand_alternative *op_alt = which_op_alt (); |
2d4749b6 | 799 | n_ops = recog_data.n_operands; |
800 | is_asm = asm_noperands (PATTERN (insn)) >= 0; | |
801 | ||
89a7a6a5 | 802 | /* Simplify the code below by promoting OP_OUT to OP_INOUT |
2d4749b6 | 803 | in predicated instructions. */ |
804 | ||
805 | predicated = GET_CODE (PATTERN (insn)) == COND_EXEC; | |
806 | for (i = 0; i < n_ops; ++i) | |
807 | { | |
757fefec | 808 | int matches = op_alt[i].matches; |
757fefec | 809 | if (matches >= 0 || op_alt[i].matched >= 0 |
2d4749b6 | 810 | || (predicated && recog_data.operand_type[i] == OP_OUT)) |
811 | recog_data.operand_type[i] = OP_INOUT; | |
812 | } | |
813 | ||
2058ec71 | 814 | /* Apply changes to earlier DEBUG_INSNs if possible. */ |
815 | if (vd->n_debug_insn_changes) | |
816 | note_uses (&PATTERN (insn), cprop_find_used_regs, vd); | |
817 | ||
2d4749b6 | 818 | /* For each earlyclobber operand, zap the value data. */ |
819 | for (i = 0; i < n_ops; i++) | |
757fefec | 820 | if (op_alt[i].earlyclobber) |
2d4749b6 | 821 | kill_value (recog_data.operand[i], vd); |
822 | ||
823 | /* Within asms, a clobber cannot overlap inputs or outputs. | |
824 | I wouldn't think this were true for regular insns, but | |
825 | scan_rtx treats them like that... */ | |
9c4a0128 | 826 | kill_clobbered_values (insn, vd); |
2d4749b6 | 827 | |
828 | /* Kill all auto-incremented values. */ | |
829 | /* ??? REG_INC is useless, since stack pushes aren't done that way. */ | |
29f9ec8f | 830 | kill_autoinc_value (insn, vd); |
2d4749b6 | 831 | |
832 | /* Kill all early-clobbered operands. */ | |
833 | for (i = 0; i < n_ops; i++) | |
757fefec | 834 | if (op_alt[i].earlyclobber) |
2d4749b6 | 835 | kill_value (recog_data.operand[i], vd); |
836 | ||
ff5a75fc | 837 | /* If we have dead sets in the insn, then we need to note these as we |
838 | would clobbers. */ | |
839 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
840 | { | |
841 | if (REG_NOTE_KIND (link) == REG_UNUSED) | |
842 | { | |
843 | kill_value (XEXP (link, 0), vd); | |
844 | /* Furthermore, if the insn looked like a single-set, | |
845 | but the dead store kills the source value of that | |
846 | set, then we can no-longer use the plain move | |
847 | special case below. */ | |
848 | if (set | |
849 | && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set))) | |
850 | set = NULL; | |
851 | } | |
852 | } | |
853 | ||
2d4749b6 | 854 | /* Special-case plain move instructions, since we may well |
855 | be able to do the move from a different register class. */ | |
856 | if (set && REG_P (SET_SRC (set))) | |
857 | { | |
858 | rtx src = SET_SRC (set); | |
859 | unsigned int regno = REGNO (src); | |
3754d046 | 860 | machine_mode mode = GET_MODE (src); |
2d4749b6 | 861 | unsigned int i; |
862 | rtx new_rtx; | |
863 | ||
864 | /* If we are accessing SRC in some mode other that what we | |
865 | set it in, make sure that the replacement is valid. */ | |
866 | if (mode != vd->e[regno].mode) | |
867 | { | |
10fa8f76 | 868 | if (REG_NREGS (src) |
92d2aec3 | 869 | > hard_regno_nregs (regno, vd->e[regno].mode)) |
2d4749b6 | 870 | goto no_move_special_case; |
417491d1 | 871 | |
872 | /* And likewise, if we are narrowing on big endian the transformation | |
873 | is also invalid. */ | |
92d2aec3 | 874 | if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode) |
9edf7ea8 | 875 | && maybe_ne (subreg_lowpart_offset (mode, |
876 | vd->e[regno].mode), 0U)) | |
417491d1 | 877 | goto no_move_special_case; |
2d4749b6 | 878 | } |
879 | ||
880 | /* If the destination is also a register, try to find a source | |
881 | register in the same class. */ | |
882 | if (REG_P (SET_DEST (set))) | |
883 | { | |
9c1ff919 | 884 | new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno), |
885 | src, vd); | |
886 | ||
2d4749b6 | 887 | if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0)) |
888 | { | |
889 | if (dump_file) | |
890 | fprintf (dump_file, | |
891 | "insn %u: replaced reg %u with %u\n", | |
892 | INSN_UID (insn), regno, REGNO (new_rtx)); | |
893 | changed = true; | |
894 | goto did_replacement; | |
895 | } | |
cc7416ff | 896 | /* We need to re-extract as validate_change clobbers |
897 | recog_data. */ | |
835b8178 | 898 | extract_constrain_insn (insn); |
8eaaac4d | 899 | preprocess_constraints (insn); |
2d4749b6 | 900 | } |
901 | ||
902 | /* Otherwise, try all valid registers and see if its valid. */ | |
903 | for (i = vd->e[regno].oldest_regno; i != regno; | |
904 | i = vd->e[i].next_regno) | |
905 | { | |
906 | new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode, | |
907 | mode, i, regno); | |
908 | if (new_rtx != NULL_RTX) | |
909 | { | |
910 | if (validate_change (insn, &SET_SRC (set), new_rtx, 0)) | |
911 | { | |
912 | ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src); | |
913 | REG_ATTRS (new_rtx) = REG_ATTRS (src); | |
914 | REG_POINTER (new_rtx) = REG_POINTER (src); | |
915 | if (dump_file) | |
916 | fprintf (dump_file, | |
917 | "insn %u: replaced reg %u with %u\n", | |
918 | INSN_UID (insn), regno, REGNO (new_rtx)); | |
919 | changed = true; | |
920 | goto did_replacement; | |
921 | } | |
cc7416ff | 922 | /* We need to re-extract as validate_change clobbers |
923 | recog_data. */ | |
835b8178 | 924 | extract_constrain_insn (insn); |
8eaaac4d | 925 | preprocess_constraints (insn); |
2d4749b6 | 926 | } |
927 | } | |
928 | } | |
929 | no_move_special_case: | |
930 | ||
931 | any_replacements = false; | |
932 | ||
933 | /* For each input operand, replace a hard register with the | |
934 | eldest live copy that's in an appropriate register class. */ | |
935 | for (i = 0; i < n_ops; i++) | |
936 | { | |
937 | replaced[i] = false; | |
938 | ||
939 | /* Don't scan match_operand here, since we've no reg class | |
940 | information to pass down. Any operands that we could | |
941 | substitute in will be represented elsewhere. */ | |
942 | if (recog_data.constraints[i][0] == '\0') | |
943 | continue; | |
944 | ||
945 | /* Don't replace in asms intentionally referencing hard regs. */ | |
946 | if (is_asm && REG_P (recog_data.operand[i]) | |
947 | && (REGNO (recog_data.operand[i]) | |
948 | == ORIGINAL_REGNO (recog_data.operand[i]))) | |
949 | continue; | |
950 | ||
951 | if (recog_data.operand_type[i] == OP_IN) | |
952 | { | |
757fefec | 953 | if (op_alt[i].is_address) |
2d4749b6 | 954 | replaced[i] |
955 | = replace_oldest_value_addr (recog_data.operand_loc[i], | |
89a7a6a5 | 956 | alternative_class (op_alt, i), |
957 | VOIDmode, ADDR_SPACE_GENERIC, | |
958 | insn, vd); | |
2d4749b6 | 959 | else if (REG_P (recog_data.operand[i])) |
960 | replaced[i] | |
961 | = replace_oldest_value_reg (recog_data.operand_loc[i], | |
89a7a6a5 | 962 | alternative_class (op_alt, i), |
963 | insn, vd); | |
2d4749b6 | 964 | else if (MEM_P (recog_data.operand[i])) |
965 | replaced[i] = replace_oldest_value_mem (recog_data.operand[i], | |
966 | insn, vd); | |
967 | } | |
968 | else if (MEM_P (recog_data.operand[i])) | |
969 | replaced[i] = replace_oldest_value_mem (recog_data.operand[i], | |
970 | insn, vd); | |
971 | ||
972 | /* If we performed any replacement, update match_dups. */ | |
973 | if (replaced[i]) | |
974 | { | |
975 | int j; | |
976 | rtx new_rtx; | |
977 | ||
978 | new_rtx = *recog_data.operand_loc[i]; | |
979 | recog_data.operand[i] = new_rtx; | |
980 | for (j = 0; j < recog_data.n_dups; j++) | |
981 | if (recog_data.dup_num[j] == i) | |
982 | validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1); | |
983 | ||
984 | any_replacements = true; | |
985 | } | |
986 | } | |
987 | ||
988 | if (any_replacements) | |
989 | { | |
990 | if (! apply_change_group ()) | |
991 | { | |
992 | for (i = 0; i < n_ops; i++) | |
993 | if (replaced[i]) | |
994 | { | |
995 | rtx old = *recog_data.operand_loc[i]; | |
996 | recog_data.operand[i] = old; | |
997 | } | |
998 | ||
999 | if (dump_file) | |
1000 | fprintf (dump_file, | |
1001 | "insn %u: reg replacements not verified\n", | |
1002 | INSN_UID (insn)); | |
1003 | } | |
1004 | else | |
1005 | changed = true; | |
1006 | } | |
1007 | ||
1008 | did_replacement: | |
9845d120 | 1009 | if (changed) |
c7458ee3 | 1010 | { |
1011 | anything_changed = true; | |
1012 | ||
1013 | /* If something changed, perhaps further changes to earlier | |
1014 | DEBUG_INSNs can be applied. */ | |
1015 | if (vd->n_debug_insn_changes) | |
1016 | note_uses (&PATTERN (insn), cprop_find_used_regs, vd); | |
1017 | } | |
9845d120 | 1018 | |
c8010b80 | 1019 | ksvd.vd = vd; |
1020 | ksvd.ignore_set_reg = NULL_RTX; | |
1021 | ||
2d4749b6 | 1022 | /* Clobber call-clobbered registers. */ |
1023 | if (CALL_P (insn)) | |
c8010b80 | 1024 | { |
24ec6636 | 1025 | unsigned int set_regno = INVALID_REGNUM; |
1026 | unsigned int set_nregs = 0; | |
1027 | unsigned int regno; | |
c8010b80 | 1028 | rtx exp; |
7f73851f | 1029 | HARD_REG_SET regs_invalidated_by_this_call; |
24ec6636 | 1030 | |
c8010b80 | 1031 | for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1)) |
1032 | { | |
1033 | rtx x = XEXP (exp, 0); | |
1034 | if (GET_CODE (x) == SET) | |
1035 | { | |
1036 | rtx dest = SET_DEST (x); | |
1037 | kill_value (dest, vd); | |
1038 | set_value_regno (REGNO (dest), GET_MODE (dest), vd); | |
1039 | copy_value (dest, SET_SRC (x), vd); | |
1040 | ksvd.ignore_set_reg = dest; | |
1041 | set_regno = REGNO (dest); | |
0933f1d9 | 1042 | set_nregs = REG_NREGS (dest); |
c8010b80 | 1043 | break; |
1044 | } | |
1045 | } | |
24ec6636 | 1046 | |
7f73851f | 1047 | get_call_reg_set_usage (insn, |
1048 | ®s_invalidated_by_this_call, | |
1049 | regs_invalidated_by_call); | |
1aafbb7e | 1050 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
7f73851f | 1051 | if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno) |
5da94e60 | 1052 | || (targetm.hard_regno_call_part_clobbered |
1053 | (regno, vd->e[regno].mode))) | |
1aafbb7e | 1054 | && (regno < set_regno || regno >= set_regno + set_nregs)) |
24ec6636 | 1055 | kill_value_regno (regno, 1, vd); |
ca719585 | 1056 | |
1057 | /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC | |
1058 | of the SET isn't in regs_invalidated_by_call hard reg set, | |
1059 | but instead among CLOBBERs on the CALL_INSN, we could wrongly | |
1060 | assume the value in it is still live. */ | |
1061 | if (ksvd.ignore_set_reg) | |
9c4a0128 | 1062 | kill_clobbered_values (insn, vd); |
c8010b80 | 1063 | } |
2d4749b6 | 1064 | |
d4878205 | 1065 | bool copy_p = (set |
1066 | && REG_P (SET_DEST (set)) | |
1067 | && REG_P (SET_SRC (set))); | |
1068 | bool noop_p = (copy_p | |
1069 | && rtx_equal_p (SET_DEST (set), SET_SRC (set))); | |
2d4749b6 | 1070 | |
f14d8d6a | 1071 | /* If a noop move is using narrower mode than we have recorded, |
1072 | we need to either remove the noop move, or kill_set_value. */ | |
1073 | if (noop_p | |
974534ab | 1074 | && partial_subreg_p (GET_MODE (SET_DEST (set)), |
1075 | vd->e[REGNO (SET_DEST (set))].mode)) | |
f14d8d6a | 1076 | { |
1077 | if (noop_move_p (insn)) | |
1078 | { | |
1079 | bool last = insn == BB_END (bb); | |
1080 | delete_insn (insn); | |
1081 | if (last) | |
1082 | break; | |
1083 | } | |
1084 | else | |
1085 | noop_p = false; | |
1086 | } | |
1087 | ||
d4878205 | 1088 | if (!noop_p) |
1089 | { | |
1090 | /* Notice stores. */ | |
1091 | note_stores (PATTERN (insn), kill_set_value, &ksvd); | |
1092 | ||
1093 | /* Notice copies. */ | |
1094 | if (copy_p) | |
1095 | copy_value (SET_DEST (set), SET_SRC (set), vd); | |
1096 | } | |
2d4749b6 | 1097 | |
1098 | if (insn == BB_END (bb)) | |
1099 | break; | |
1100 | } | |
1101 | ||
9845d120 | 1102 | return anything_changed; |
2d4749b6 | 1103 | } |
1104 | ||
2d4749b6 | 1105 | /* Dump the value chain data to stderr. */ |
1106 | ||
4b987fac | 1107 | DEBUG_FUNCTION void |
2d4749b6 | 1108 | debug_value_data (struct value_data *vd) |
1109 | { | |
1110 | HARD_REG_SET set; | |
1111 | unsigned int i, j; | |
1112 | ||
1113 | CLEAR_HARD_REG_SET (set); | |
1114 | ||
1115 | for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) | |
1116 | if (vd->e[i].oldest_regno == i) | |
1117 | { | |
1118 | if (vd->e[i].mode == VOIDmode) | |
1119 | { | |
1120 | if (vd->e[i].next_regno != INVALID_REGNUM) | |
1121 | fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n", | |
1122 | i, vd->e[i].next_regno); | |
1123 | continue; | |
1124 | } | |
1125 | ||
1126 | SET_HARD_REG_BIT (set, i); | |
1127 | fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode)); | |
1128 | ||
1129 | for (j = vd->e[i].next_regno; | |
1130 | j != INVALID_REGNUM; | |
1131 | j = vd->e[j].next_regno) | |
1132 | { | |
1133 | if (TEST_HARD_REG_BIT (set, j)) | |
1134 | { | |
1135 | fprintf (stderr, "[%u] Loop in regno chain\n", j); | |
1136 | return; | |
1137 | } | |
1138 | ||
1139 | if (vd->e[j].oldest_regno != i) | |
1140 | { | |
1141 | fprintf (stderr, "[%u] Bad oldest_regno (%u)\n", | |
1142 | j, vd->e[j].oldest_regno); | |
1143 | return; | |
1144 | } | |
1145 | SET_HARD_REG_BIT (set, j); | |
1146 | fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode)); | |
1147 | } | |
1148 | fputc ('\n', stderr); | |
1149 | } | |
1150 | ||
1151 | for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) | |
1152 | if (! TEST_HARD_REG_BIT (set, i) | |
1153 | && (vd->e[i].mode != VOIDmode | |
1154 | || vd->e[i].oldest_regno != i | |
1155 | || vd->e[i].next_regno != INVALID_REGNUM)) | |
1156 | fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n", | |
1157 | i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno, | |
1158 | vd->e[i].next_regno); | |
1159 | } | |
1160 | ||
59483f68 | 1161 | /* Do copyprop_hardreg_forward_1 for a single basic block BB. |
1162 | DEBUG_INSN is skipped since we do not want to involve DF related | |
1163 | staff as how it is handled in function pass_cprop_hardreg::execute. | |
1164 | ||
1165 | NOTE: Currently it is only used for shrink-wrap. Maybe extend it | |
1166 | to handle DEBUG_INSN for other uses. */ | |
1167 | ||
1168 | void | |
1169 | copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb) | |
1170 | { | |
1171 | struct value_data *vd; | |
1172 | vd = XNEWVEC (struct value_data, 1); | |
1173 | init_value_data (vd); | |
1174 | ||
1175 | skip_debug_insn_p = true; | |
1176 | copyprop_hardreg_forward_1 (bb, vd); | |
1177 | free (vd); | |
1178 | skip_debug_insn_p = false; | |
1179 | } | |
1180 | ||
2d4749b6 | 1181 | static void |
1182 | validate_value_data (struct value_data *vd) | |
1183 | { | |
1184 | HARD_REG_SET set; | |
1185 | unsigned int i, j; | |
1186 | ||
1187 | CLEAR_HARD_REG_SET (set); | |
1188 | ||
1189 | for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) | |
1190 | if (vd->e[i].oldest_regno == i) | |
1191 | { | |
1192 | if (vd->e[i].mode == VOIDmode) | |
1193 | { | |
1194 | if (vd->e[i].next_regno != INVALID_REGNUM) | |
1195 | internal_error ("validate_value_data: [%u] Bad next_regno for empty chain (%u)", | |
1196 | i, vd->e[i].next_regno); | |
1197 | continue; | |
1198 | } | |
1199 | ||
1200 | SET_HARD_REG_BIT (set, i); | |
1201 | ||
1202 | for (j = vd->e[i].next_regno; | |
1203 | j != INVALID_REGNUM; | |
1204 | j = vd->e[j].next_regno) | |
1205 | { | |
1206 | if (TEST_HARD_REG_BIT (set, j)) | |
1207 | internal_error ("validate_value_data: Loop in regno chain (%u)", | |
1208 | j); | |
1209 | if (vd->e[j].oldest_regno != i) | |
1210 | internal_error ("validate_value_data: [%u] Bad oldest_regno (%u)", | |
1211 | j, vd->e[j].oldest_regno); | |
1212 | ||
1213 | SET_HARD_REG_BIT (set, j); | |
1214 | } | |
1215 | } | |
1216 | ||
1217 | for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) | |
1218 | if (! TEST_HARD_REG_BIT (set, i) | |
1219 | && (vd->e[i].mode != VOIDmode | |
1220 | || vd->e[i].oldest_regno != i | |
1221 | || vd->e[i].next_regno != INVALID_REGNUM)) | |
1222 | internal_error ("validate_value_data: [%u] Non-empty reg in chain (%s %u %i)", | |
1223 | i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno, | |
1224 | vd->e[i].next_regno); | |
1225 | } | |
382ecba7 | 1226 | |
2d4749b6 | 1227 | \f |
cbe8bda8 | 1228 | namespace { |
1229 | ||
1230 | const pass_data pass_data_cprop_hardreg = | |
2d4749b6 | 1231 | { |
cbe8bda8 | 1232 | RTL_PASS, /* type */ |
1233 | "cprop_hardreg", /* name */ | |
1234 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 1235 | TV_CPROP_REGISTERS, /* tv_id */ |
1236 | 0, /* properties_required */ | |
1237 | 0, /* properties_provided */ | |
1238 | 0, /* properties_destroyed */ | |
1239 | 0, /* todo_flags_start */ | |
8b88439e | 1240 | TODO_df_finish, /* todo_flags_finish */ |
2d4749b6 | 1241 | }; |
cbe8bda8 | 1242 | |
1243 | class pass_cprop_hardreg : public rtl_opt_pass | |
1244 | { | |
1245 | public: | |
9af5ce0c | 1246 | pass_cprop_hardreg (gcc::context *ctxt) |
1247 | : rtl_opt_pass (pass_data_cprop_hardreg, ctxt) | |
cbe8bda8 | 1248 | {} |
1249 | ||
1250 | /* opt_pass methods: */ | |
31315c24 | 1251 | virtual bool gate (function *) |
1252 | { | |
1253 | return (optimize > 0 && (flag_cprop_registers)); | |
1254 | } | |
1255 | ||
65b0537f | 1256 | virtual unsigned int execute (function *); |
cbe8bda8 | 1257 | |
1258 | }; // class pass_cprop_hardreg | |
1259 | ||
65b0537f | 1260 | unsigned int |
1261 | pass_cprop_hardreg::execute (function *fun) | |
1262 | { | |
1263 | struct value_data *all_vd; | |
1264 | basic_block bb; | |
65b0537f | 1265 | bool analyze_called = false; |
1266 | ||
1267 | all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun)); | |
1268 | ||
3c6549f8 | 1269 | auto_sbitmap visited (last_basic_block_for_fn (fun)); |
65b0537f | 1270 | bitmap_clear (visited); |
1271 | ||
65b0537f | 1272 | FOR_EACH_BB_FN (bb, fun) |
1273 | { | |
1274 | bitmap_set_bit (visited, bb->index); | |
1275 | ||
1276 | /* If a block has a single predecessor, that we've already | |
1277 | processed, begin with the value data that was live at | |
1278 | the end of the predecessor block. */ | |
1279 | /* ??? Ought to use more intelligent queuing of blocks. */ | |
1280 | if (single_pred_p (bb) | |
1281 | && bitmap_bit_p (visited, single_pred (bb)->index) | |
1282 | && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))) | |
1283 | { | |
1284 | all_vd[bb->index] = all_vd[single_pred (bb)->index]; | |
1285 | if (all_vd[bb->index].n_debug_insn_changes) | |
1286 | { | |
1287 | unsigned int regno; | |
1288 | ||
1289 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1290 | { | |
1291 | if (all_vd[bb->index].e[regno].debug_insn_changes) | |
1292 | { | |
1293 | all_vd[bb->index].e[regno].debug_insn_changes = NULL; | |
1294 | if (--all_vd[bb->index].n_debug_insn_changes == 0) | |
1295 | break; | |
1296 | } | |
1297 | } | |
1298 | } | |
1299 | } | |
1300 | else | |
1301 | init_value_data (all_vd + bb->index); | |
1302 | ||
1303 | copyprop_hardreg_forward_1 (bb, all_vd + bb->index); | |
1304 | } | |
1305 | ||
c64f38bf | 1306 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
65b0537f | 1307 | { |
1308 | FOR_EACH_BB_FN (bb, fun) | |
1309 | if (bitmap_bit_p (visited, bb->index) | |
1310 | && all_vd[bb->index].n_debug_insn_changes) | |
1311 | { | |
1312 | unsigned int regno; | |
1313 | bitmap live; | |
1314 | ||
1315 | if (!analyze_called) | |
1316 | { | |
1317 | df_analyze (); | |
1318 | analyze_called = true; | |
1319 | } | |
1320 | live = df_get_live_out (bb); | |
1321 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1322 | if (all_vd[bb->index].e[regno].debug_insn_changes) | |
1323 | { | |
1324 | if (REGNO_REG_SET_P (live, regno)) | |
1325 | apply_debug_insn_changes (all_vd + bb->index, regno); | |
1326 | if (all_vd[bb->index].n_debug_insn_changes == 0) | |
1327 | break; | |
1328 | } | |
1329 | } | |
1330 | ||
e16712b1 | 1331 | queued_debug_insn_change_pool.release (); |
65b0537f | 1332 | } |
1333 | ||
65b0537f | 1334 | free (all_vd); |
1335 | return 0; | |
1336 | } | |
1337 | ||
cbe8bda8 | 1338 | } // anon namespace |
1339 | ||
1340 | rtl_opt_pass * | |
1341 | make_pass_cprop_hardreg (gcc::context *ctxt) | |
1342 | { | |
1343 | return new pass_cprop_hardreg (ctxt); | |
1344 | } |