]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/df-problems.c
df-problems.c (df_lr_local_compute): Manually CSE PIC_OFFSET_TABLE_REGNUM.
[thirdparty/gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "target.h"
43 #include "timevar.h"
44 #include "df.h"
45 #include "except.h"
46 #include "dce.h"
47 #include "vecprim.h"
48
49 /* Note that turning REG_DEAD_DEBUGGING on will cause
50 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
51 addresses in the dumps. */
52 #if 0
53 #define REG_DEAD_DEBUGGING
54 #endif
55
56 #define DF_SPARSE_THRESHOLD 32
57
58 static bitmap_head seen_in_block;
59 static bitmap_head seen_in_insn;
60
61 \f
62 /*----------------------------------------------------------------------------
63 Public functions access functions for the dataflow problems.
64 ----------------------------------------------------------------------------*/
65 /* Get the live at out set for BB no matter what problem happens to be
66 defined. This function is used by the register allocators who
67 choose different dataflow problems depending on the optimization
68 level. */
69
70 bitmap
71 df_get_live_out (basic_block bb)
72 {
73 gcc_assert (df_lr);
74
75 if (df_live)
76 return DF_LIVE_OUT (bb);
77 else
78 return DF_LR_OUT (bb);
79 }
80
81 /* Get the live at in set for BB no matter what problem happens to be
82 defined. This function is used by the register allocators who
83 choose different dataflow problems depending on the optimization
84 level. */
85
86 bitmap
87 df_get_live_in (basic_block bb)
88 {
89 gcc_assert (df_lr);
90
91 if (df_live)
92 return DF_LIVE_IN (bb);
93 else
94 return DF_LR_IN (bb);
95 }
96
97 /*----------------------------------------------------------------------------
98 Utility functions.
99 ----------------------------------------------------------------------------*/
100
101 /* Generic versions to get the void* version of the block info. Only
102 used inside the problem instance vectors. */
103
104 /* Dump a def-use or use-def chain for REF to FILE. */
105
106 void
107 df_chain_dump (struct df_link *link, FILE *file)
108 {
109 fprintf (file, "{ ");
110 for (; link; link = link->next)
111 {
112 fprintf (file, "%c%d(bb %d insn %d) ",
113 DF_REF_REG_DEF_P (link->ref)
114 ? 'd'
115 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
116 DF_REF_ID (link->ref),
117 DF_REF_BBNO (link->ref),
118 DF_REF_IS_ARTIFICIAL (link->ref)
119 ? -1 : DF_REF_INSN_UID (link->ref));
120 }
121 fprintf (file, "}");
122 }
123
124
125 /* Print some basic block info as part of df_dump. */
126
127 void
128 df_print_bb_index (basic_block bb, FILE *file)
129 {
130 edge e;
131 edge_iterator ei;
132
133 fprintf (file, "\n( ");
134 FOR_EACH_EDGE (e, ei, bb->preds)
135 {
136 basic_block pred = e->src;
137 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
138 }
139 fprintf (file, ")->[%d]->( ", bb->index);
140 FOR_EACH_EDGE (e, ei, bb->succs)
141 {
142 basic_block succ = e->dest;
143 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
144 }
145 fprintf (file, ")\n");
146 }
147
148 \f
149 /*----------------------------------------------------------------------------
150 REACHING DEFINITIONS
151
152 Find the locations in the function where each definition site for a
153 pseudo reaches. In and out bitvectors are built for each basic
154 block. The id field in the ref is used to index into these sets.
155 See df.h for details.
156 ----------------------------------------------------------------------------*/
157
158 /* This problem plays a large number of games for the sake of
159 efficiency.
160
161 1) The order of the bits in the bitvectors. After the scanning
162 phase, all of the defs are sorted. All of the defs for the reg 0
163 are first, followed by all defs for reg 1 and so on.
164
165 2) There are two kill sets, one if the number of defs is less or
166 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
167 greater.
168
169 <= : Data is built directly in the kill set.
170
171 > : One level of indirection is used to keep from generating long
172 strings of 1 bits in the kill sets. Bitvectors that are indexed
173 by the regnum are used to represent that there is a killing def
174 for the register. The confluence and transfer functions use
175 these along with the bitmap_clear_range call to remove ranges of
176 bits without actually generating a knockout vector.
177
178 The kill and sparse_kill and the dense_invalidated_by_call and
179 sparse_invalidated_by_call both play this game. */
180
181 /* Private data used to compute the solution for this problem. These
182 data structures are not accessible outside of this module. */
183 struct df_rd_problem_data
184 {
185 /* The set of defs to regs invalidated by call. */
186 bitmap_head sparse_invalidated_by_call;
187 /* The set of defs to regs invalidate by call for rd. */
188 bitmap_head dense_invalidated_by_call;
189 /* An obstack for the bitmaps we need for this problem. */
190 bitmap_obstack rd_bitmaps;
191 };
192
193
194 /* Free basic block info. */
195
196 static void
197 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
198 void *vbb_info)
199 {
200 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
201 if (bb_info)
202 {
203 bitmap_clear (&bb_info->kill);
204 bitmap_clear (&bb_info->sparse_kill);
205 bitmap_clear (&bb_info->gen);
206 bitmap_clear (&bb_info->in);
207 bitmap_clear (&bb_info->out);
208 }
209 }
210
211
212 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
213 not touched unless the block is new. */
214
215 static void
216 df_rd_alloc (bitmap all_blocks)
217 {
218 unsigned int bb_index;
219 bitmap_iterator bi;
220 struct df_rd_problem_data *problem_data;
221
222 if (df_rd->problem_data)
223 {
224 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
225 bitmap_clear (&problem_data->sparse_invalidated_by_call);
226 bitmap_clear (&problem_data->dense_invalidated_by_call);
227 }
228 else
229 {
230 problem_data = XNEW (struct df_rd_problem_data);
231 df_rd->problem_data = problem_data;
232
233 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
234 bitmap_initialize (&problem_data->sparse_invalidated_by_call,
235 &problem_data->rd_bitmaps);
236 bitmap_initialize (&problem_data->dense_invalidated_by_call,
237 &problem_data->rd_bitmaps);
238 }
239
240 df_grow_bb_info (df_rd);
241
242 /* Because of the clustering of all use sites for the same pseudo,
243 we have to process all of the blocks before doing the
244 analysis. */
245
246 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
247 {
248 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
249
250 /* When bitmaps are already initialized, just clear them. */
251 if (bb_info->kill.obstack)
252 {
253 bitmap_clear (&bb_info->kill);
254 bitmap_clear (&bb_info->sparse_kill);
255 bitmap_clear (&bb_info->gen);
256 }
257 else
258 {
259 bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
260 bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
261 bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
262 bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
263 bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
264 }
265 }
266 df_rd->optional_p = true;
267 }
268
269
270 /* Add the effect of the top artificial defs of BB to the reaching definitions
271 bitmap LOCAL_RD. */
272
273 void
274 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
275 {
276 int bb_index = bb->index;
277 df_ref *def_rec;
278 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
279 {
280 df_ref def = *def_rec;
281 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
282 {
283 unsigned int dregno = DF_REF_REGNO (def);
284 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
285 bitmap_clear_range (local_rd,
286 DF_DEFS_BEGIN (dregno),
287 DF_DEFS_COUNT (dregno));
288 bitmap_set_bit (local_rd, DF_REF_ID (def));
289 }
290 }
291 }
292
293 /* Add the effect of the defs of INSN to the reaching definitions bitmap
294 LOCAL_RD. */
295
296 void
297 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
298 bitmap local_rd)
299 {
300 unsigned uid = INSN_UID (insn);
301 df_ref *def_rec;
302
303 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
304 {
305 df_ref def = *def_rec;
306 unsigned int dregno = DF_REF_REGNO (def);
307 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
308 || (dregno >= FIRST_PSEUDO_REGISTER))
309 {
310 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
311 bitmap_clear_range (local_rd,
312 DF_DEFS_BEGIN (dregno),
313 DF_DEFS_COUNT (dregno));
314 if (!(DF_REF_FLAGS (def)
315 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
316 bitmap_set_bit (local_rd, DF_REF_ID (def));
317 }
318 }
319 }
320
321 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
322 more complicated than just simulating, because we must produce the
323 gen and kill sets and hence deal with the two possible representations
324 of kill sets. */
325
326 static void
327 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
328 df_ref *def_rec,
329 int top_flag)
330 {
331 while (*def_rec)
332 {
333 df_ref def = *def_rec;
334 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
335 {
336 unsigned int regno = DF_REF_REGNO (def);
337 unsigned int begin = DF_DEFS_BEGIN (regno);
338 unsigned int n_defs = DF_DEFS_COUNT (regno);
339
340 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
341 || (regno >= FIRST_PSEUDO_REGISTER))
342 {
343 /* Only the last def(s) for a regno in the block has any
344 effect. */
345 if (!bitmap_bit_p (&seen_in_block, regno))
346 {
347 /* The first def for regno in insn gets to knock out the
348 defs from other instructions. */
349 if ((!bitmap_bit_p (&seen_in_insn, regno))
350 /* If the def is to only part of the reg, it does
351 not kill the other defs that reach here. */
352 && (!(DF_REF_FLAGS (def) &
353 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
354 {
355 if (n_defs > DF_SPARSE_THRESHOLD)
356 {
357 bitmap_set_bit (&bb_info->sparse_kill, regno);
358 bitmap_clear_range(&bb_info->gen, begin, n_defs);
359 }
360 else
361 {
362 bitmap_set_range (&bb_info->kill, begin, n_defs);
363 bitmap_clear_range (&bb_info->gen, begin, n_defs);
364 }
365 }
366
367 bitmap_set_bit (&seen_in_insn, regno);
368 /* All defs for regno in the instruction may be put into
369 the gen set. */
370 if (!(DF_REF_FLAGS (def)
371 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
372 bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
373 }
374 }
375 }
376 def_rec++;
377 }
378 }
379
380 /* Compute local reaching def info for basic block BB. */
381
382 static void
383 df_rd_bb_local_compute (unsigned int bb_index)
384 {
385 basic_block bb = BASIC_BLOCK (bb_index);
386 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
387 rtx insn;
388
389 bitmap_clear (&seen_in_block);
390 bitmap_clear (&seen_in_insn);
391
392 /* Artificials are only hard regs. */
393 if (!(df->changeable_flags & DF_NO_HARD_REGS))
394 df_rd_bb_local_compute_process_def (bb_info,
395 df_get_artificial_defs (bb_index),
396 0);
397
398 FOR_BB_INSNS_REVERSE (bb, insn)
399 {
400 unsigned int uid = INSN_UID (insn);
401
402 if (!INSN_P (insn))
403 continue;
404
405 df_rd_bb_local_compute_process_def (bb_info,
406 DF_INSN_UID_DEFS (uid), 0);
407
408 /* This complex dance with the two bitmaps is required because
409 instructions can assign twice to the same pseudo. This
410 generally happens with calls that will have one def for the
411 result and another def for the clobber. If only one vector
412 is used and the clobber goes first, the result will be
413 lost. */
414 bitmap_ior_into (&seen_in_block, &seen_in_insn);
415 bitmap_clear (&seen_in_insn);
416 }
417
418 /* Process the artificial defs at the top of the block last since we
419 are going backwards through the block and these are logically at
420 the start. */
421 if (!(df->changeable_flags & DF_NO_HARD_REGS))
422 df_rd_bb_local_compute_process_def (bb_info,
423 df_get_artificial_defs (bb_index),
424 DF_REF_AT_TOP);
425 }
426
427
428 /* Compute local reaching def info for each basic block within BLOCKS. */
429
430 static void
431 df_rd_local_compute (bitmap all_blocks)
432 {
433 unsigned int bb_index;
434 bitmap_iterator bi;
435 unsigned int regno;
436 struct df_rd_problem_data *problem_data
437 = (struct df_rd_problem_data *) df_rd->problem_data;
438 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
439 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
440
441 bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
442 bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
443
444 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
445
446 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
447 {
448 df_rd_bb_local_compute (bb_index);
449 }
450
451 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
452 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
453 {
454 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
455 bitmap_set_bit (sparse_invalidated, regno);
456 else
457 bitmap_set_range (dense_invalidated,
458 DF_DEFS_BEGIN (regno),
459 DF_DEFS_COUNT (regno));
460 }
461
462 bitmap_clear (&seen_in_block);
463 bitmap_clear (&seen_in_insn);
464 }
465
466
467 /* Initialize the solution bit vectors for problem. */
468
469 static void
470 df_rd_init_solution (bitmap all_blocks)
471 {
472 unsigned int bb_index;
473 bitmap_iterator bi;
474
475 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
476 {
477 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
478
479 bitmap_copy (&bb_info->out, &bb_info->gen);
480 bitmap_clear (&bb_info->in);
481 }
482 }
483
484 /* In of target gets or of out of source. */
485
486 static bool
487 df_rd_confluence_n (edge e)
488 {
489 bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
490 bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
491 bool changed = false;
492
493 if (e->flags & EDGE_FAKE)
494 return false;
495
496 if (e->flags & EDGE_EH)
497 {
498 struct df_rd_problem_data *problem_data
499 = (struct df_rd_problem_data *) df_rd->problem_data;
500 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
501 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
502 bitmap_iterator bi;
503 unsigned int regno;
504 bitmap_head tmp;
505
506 bitmap_initialize (&tmp, &df_bitmap_obstack);
507 bitmap_copy (&tmp, op2);
508 bitmap_and_compl_into (&tmp, dense_invalidated);
509
510 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
511 {
512 bitmap_clear_range (&tmp,
513 DF_DEFS_BEGIN (regno),
514 DF_DEFS_COUNT (regno));
515 }
516 changed |= bitmap_ior_into (op1, &tmp);
517 bitmap_clear (&tmp);
518 return changed;
519 }
520 else
521 return bitmap_ior_into (op1, op2);
522 }
523
524
525 /* Transfer function. */
526
527 static bool
528 df_rd_transfer_function (int bb_index)
529 {
530 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
531 unsigned int regno;
532 bitmap_iterator bi;
533 bitmap in = &bb_info->in;
534 bitmap out = &bb_info->out;
535 bitmap gen = &bb_info->gen;
536 bitmap kill = &bb_info->kill;
537 bitmap sparse_kill = &bb_info->sparse_kill;
538
539 if (bitmap_empty_p (sparse_kill))
540 return bitmap_ior_and_compl (out, gen, in, kill);
541 else
542 {
543 struct df_rd_problem_data *problem_data;
544 bool changed = false;
545 bitmap_head tmp;
546
547 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
548 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
549 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
550 bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
551
552 bitmap_copy (&tmp, in);
553 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
554 {
555 bitmap_clear_range (&tmp,
556 DF_DEFS_BEGIN (regno),
557 DF_DEFS_COUNT (regno));
558 }
559 bitmap_and_compl_into (&tmp, kill);
560 bitmap_ior_into (&tmp, gen);
561 changed = !bitmap_equal_p (&tmp, out);
562 if (changed)
563 {
564 bitmap_clear (out);
565 bb_info->out = tmp;
566 }
567 else
568 bitmap_clear (&tmp);
569 return changed;
570 }
571 }
572
573
574 /* Free all storage associated with the problem. */
575
576 static void
577 df_rd_free (void)
578 {
579 struct df_rd_problem_data *problem_data
580 = (struct df_rd_problem_data *) df_rd->problem_data;
581
582 if (problem_data)
583 {
584 bitmap_obstack_release (&problem_data->rd_bitmaps);
585
586 df_rd->block_info_size = 0;
587 free (df_rd->block_info);
588 df_rd->block_info = NULL;
589 free (df_rd->problem_data);
590 }
591 free (df_rd);
592 }
593
594
595 /* Debugging info. */
596
597 static void
598 df_rd_start_dump (FILE *file)
599 {
600 struct df_rd_problem_data *problem_data
601 = (struct df_rd_problem_data *) df_rd->problem_data;
602 unsigned int m = DF_REG_SIZE(df);
603 unsigned int regno;
604
605 if (!df_rd->block_info)
606 return;
607
608 fprintf (file, ";; Reaching defs:\n\n");
609
610 fprintf (file, " sparse invalidated \t");
611 dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
612 fprintf (file, " dense invalidated \t");
613 dump_bitmap (file, &problem_data->dense_invalidated_by_call);
614
615 for (regno = 0; regno < m; regno++)
616 if (DF_DEFS_COUNT (regno))
617 fprintf (file, "%d[%d,%d] ", regno,
618 DF_DEFS_BEGIN (regno),
619 DF_DEFS_COUNT (regno));
620 fprintf (file, "\n");
621
622 }
623
624
625 /* Debugging info at top of bb. */
626
627 static void
628 df_rd_top_dump (basic_block bb, FILE *file)
629 {
630 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
631 if (!bb_info)
632 return;
633
634 fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (&bb_info->in));
635 dump_bitmap (file, &bb_info->in);
636 fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (&bb_info->gen));
637 dump_bitmap (file, &bb_info->gen);
638 fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (&bb_info->kill));
639 dump_bitmap (file, &bb_info->kill);
640 }
641
642
643 /* Debugging info at top of bb. */
644
645 static void
646 df_rd_bottom_dump (basic_block bb, FILE *file)
647 {
648 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
649 if (!bb_info)
650 return;
651
652 fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (&bb_info->out));
653 dump_bitmap (file, &bb_info->out);
654 }
655
656 /* All of the information associated with every instance of the problem. */
657
658 static struct df_problem problem_RD =
659 {
660 DF_RD, /* Problem id. */
661 DF_FORWARD, /* Direction. */
662 df_rd_alloc, /* Allocate the problem specific data. */
663 NULL, /* Reset global information. */
664 df_rd_free_bb_info, /* Free basic block info. */
665 df_rd_local_compute, /* Local compute function. */
666 df_rd_init_solution, /* Init the solution specific data. */
667 df_worklist_dataflow, /* Worklist solver. */
668 NULL, /* Confluence operator 0. */
669 df_rd_confluence_n, /* Confluence operator n. */
670 df_rd_transfer_function, /* Transfer function. */
671 NULL, /* Finalize function. */
672 df_rd_free, /* Free all of the problem information. */
673 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
674 df_rd_start_dump, /* Debugging. */
675 df_rd_top_dump, /* Debugging start block. */
676 df_rd_bottom_dump, /* Debugging end block. */
677 NULL, /* Incremental solution verify start. */
678 NULL, /* Incremental solution verify end. */
679 NULL, /* Dependent problem. */
680 sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
681 TV_DF_RD, /* Timing variable. */
682 true /* Reset blocks on dropping out of blocks_to_analyze. */
683 };
684
685
686
687 /* Create a new RD instance and add it to the existing instance
688 of DF. */
689
690 void
691 df_rd_add_problem (void)
692 {
693 df_add_problem (&problem_RD);
694 }
695
696
697 \f
698 /*----------------------------------------------------------------------------
699 LIVE REGISTERS
700
701 Find the locations in the function where any use of a pseudo can
702 reach in the backwards direction. In and out bitvectors are built
703 for each basic block. The regno is used to index into these sets.
704 See df.h for details.
705 ----------------------------------------------------------------------------*/
706
707 /* Private data used to verify the solution for this problem. */
708 struct df_lr_problem_data
709 {
710 bitmap_head *in;
711 bitmap_head *out;
712 /* An obstack for the bitmaps we need for this problem. */
713 bitmap_obstack lr_bitmaps;
714 };
715
716 /* Free basic block info. */
717
718 static void
719 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
720 void *vbb_info)
721 {
722 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
723 if (bb_info)
724 {
725 bitmap_clear (&bb_info->use);
726 bitmap_clear (&bb_info->def);
727 bitmap_clear (&bb_info->in);
728 bitmap_clear (&bb_info->out);
729 }
730 }
731
732
733 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
734 not touched unless the block is new. */
735
736 static void
737 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
738 {
739 unsigned int bb_index;
740 bitmap_iterator bi;
741 struct df_lr_problem_data *problem_data;
742
743 df_grow_bb_info (df_lr);
744 if (df_lr->problem_data)
745 problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
746 else
747 {
748 problem_data = XNEW (struct df_lr_problem_data);
749 df_lr->problem_data = problem_data;
750
751 problem_data->out = NULL;
752 problem_data->in = NULL;
753 bitmap_obstack_initialize (&problem_data->lr_bitmaps);
754 }
755
756 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
757 {
758 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
759
760 /* When bitmaps are already initialized, just clear them. */
761 if (bb_info->use.obstack)
762 {
763 bitmap_clear (&bb_info->def);
764 bitmap_clear (&bb_info->use);
765 }
766 else
767 {
768 bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
769 bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
770 bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
771 bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
772 }
773 }
774
775 df_lr->optional_p = false;
776 }
777
778
779 /* Reset the global solution for recalculation. */
780
781 static void
782 df_lr_reset (bitmap all_blocks)
783 {
784 unsigned int bb_index;
785 bitmap_iterator bi;
786
787 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
788 {
789 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
790 gcc_assert (bb_info);
791 bitmap_clear (&bb_info->in);
792 bitmap_clear (&bb_info->out);
793 }
794 }
795
796
797 /* Compute local live register info for basic block BB. */
798
799 static void
800 df_lr_bb_local_compute (unsigned int bb_index)
801 {
802 basic_block bb = BASIC_BLOCK (bb_index);
803 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
804 rtx insn;
805 df_ref *def_rec;
806 df_ref *use_rec;
807
808 /* Process the registers set in an exception handler. */
809 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
810 {
811 df_ref def = *def_rec;
812 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
813 {
814 unsigned int dregno = DF_REF_REGNO (def);
815 bitmap_set_bit (&bb_info->def, dregno);
816 bitmap_clear_bit (&bb_info->use, dregno);
817 }
818 }
819
820 /* Process the hardware registers that are always live. */
821 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
822 {
823 df_ref use = *use_rec;
824 /* Add use to set of uses in this BB. */
825 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
826 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
827 }
828
829 FOR_BB_INSNS_REVERSE (bb, insn)
830 {
831 unsigned int uid = INSN_UID (insn);
832
833 if (!NONDEBUG_INSN_P (insn))
834 continue;
835
836 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
837 {
838 df_ref def = *def_rec;
839 /* If the def is to only part of the reg, it does
840 not kill the other defs that reach here. */
841 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
842 {
843 unsigned int dregno = DF_REF_REGNO (def);
844 bitmap_set_bit (&bb_info->def, dregno);
845 bitmap_clear_bit (&bb_info->use, dregno);
846 }
847 }
848
849 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
850 {
851 df_ref use = *use_rec;
852 /* Add use to set of uses in this BB. */
853 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
854 }
855 }
856
857 /* Process the registers set in an exception handler or the hard
858 frame pointer if this block is the target of a non local
859 goto. */
860 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
861 {
862 df_ref def = *def_rec;
863 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
864 {
865 unsigned int dregno = DF_REF_REGNO (def);
866 bitmap_set_bit (&bb_info->def, dregno);
867 bitmap_clear_bit (&bb_info->use, dregno);
868 }
869 }
870
871 #ifdef EH_USES
872 /* Process the uses that are live into an exception handler. */
873 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
874 {
875 df_ref use = *use_rec;
876 /* Add use to set of uses in this BB. */
877 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
878 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
879 }
880 #endif
881
882 /* If the df_live problem is not defined, such as at -O0 and -O1, we
883 still need to keep the luids up to date. This is normally done
884 in the df_live problem since this problem has a forwards
885 scan. */
886 if (!df_live)
887 df_recompute_luids (bb);
888 }
889
890
891 /* Compute local live register info for each basic block within BLOCKS. */
892
893 static void
894 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
895 {
896 unsigned int bb_index;
897 bitmap_iterator bi;
898
899 bitmap_clear (&df->hardware_regs_used);
900
901 /* The all-important stack pointer must always be live. */
902 bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
903
904 /* Before reload, there are a few registers that must be forced
905 live everywhere -- which might not already be the case for
906 blocks within infinite loops. */
907 if (!reload_completed)
908 {
909 unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
910 /* Any reference to any pseudo before reload is a potential
911 reference of the frame pointer. */
912 bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
913
914 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
915 /* Pseudos with argument area equivalences may require
916 reloading via the argument pointer. */
917 if (fixed_regs[ARG_POINTER_REGNUM])
918 bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
919 #endif
920
921 /* Any constant, or pseudo with constant equivalences, may
922 require reloading from memory using the pic register. */
923 if (pic_offset_table_regnum != INVALID_REGNUM
924 && fixed_regs[pic_offset_table_regnum])
925 bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
926 }
927
928 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
929 {
930 if (bb_index == EXIT_BLOCK)
931 {
932 /* The exit block is special for this problem and its bits are
933 computed from thin air. */
934 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
935 bitmap_copy (&bb_info->use, df->exit_block_uses);
936 }
937 else
938 df_lr_bb_local_compute (bb_index);
939 }
940
941 bitmap_clear (df_lr->out_of_date_transfer_functions);
942 }
943
944
945 /* Initialize the solution vectors. */
946
947 static void
948 df_lr_init (bitmap all_blocks)
949 {
950 unsigned int bb_index;
951 bitmap_iterator bi;
952
953 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
954 {
955 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
956 bitmap_copy (&bb_info->in, &bb_info->use);
957 bitmap_clear (&bb_info->out);
958 }
959 }
960
961
962 /* Confluence function that processes infinite loops. This might be a
963 noreturn function that throws. And even if it isn't, getting the
964 unwind info right helps debugging. */
965 static void
966 df_lr_confluence_0 (basic_block bb)
967 {
968 bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
969 if (bb != EXIT_BLOCK_PTR)
970 bitmap_copy (op1, &df->hardware_regs_used);
971 }
972
973
974 /* Confluence function that ignores fake edges. */
975
976 static bool
977 df_lr_confluence_n (edge e)
978 {
979 bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
980 bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
981 bool changed = false;
982
983 /* Call-clobbered registers die across exception and call edges. */
984 /* ??? Abnormal call edges ignored for the moment, as this gets
985 confused by sibling call edges, which crashes reg-stack. */
986 if (e->flags & EDGE_EH)
987 changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
988 else
989 changed = bitmap_ior_into (op1, op2);
990
991 changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
992 return changed;
993 }
994
995
996 /* Transfer function. */
997
998 static bool
999 df_lr_transfer_function (int bb_index)
1000 {
1001 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1002 bitmap in = &bb_info->in;
1003 bitmap out = &bb_info->out;
1004 bitmap use = &bb_info->use;
1005 bitmap def = &bb_info->def;
1006
1007 return bitmap_ior_and_compl (in, use, out, def);
1008 }
1009
1010
1011 /* Run the fast dce as a side effect of building LR. */
1012
1013 static void
1014 df_lr_finalize (bitmap all_blocks)
1015 {
1016 df_lr->solutions_dirty = false;
1017 if (df->changeable_flags & DF_LR_RUN_DCE)
1018 {
1019 run_fast_df_dce ();
1020
1021 /* If dce deletes some instructions, we need to recompute the lr
1022 solution before proceeding further. The problem is that fast
1023 dce is a pessimestic dataflow algorithm. In the case where
1024 it deletes a statement S inside of a loop, the uses inside of
1025 S may not be deleted from the dataflow solution because they
1026 were carried around the loop. While it is conservatively
1027 correct to leave these extra bits, the standards of df
1028 require that we maintain the best possible (least fixed
1029 point) solution. The only way to do that is to redo the
1030 iteration from the beginning. See PR35805 for an
1031 example. */
1032 if (df_lr->solutions_dirty)
1033 {
1034 df_clear_flags (DF_LR_RUN_DCE);
1035 df_lr_alloc (all_blocks);
1036 df_lr_local_compute (all_blocks);
1037 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1038 df_lr_finalize (all_blocks);
1039 df_set_flags (DF_LR_RUN_DCE);
1040 }
1041 }
1042 }
1043
1044
1045 /* Free all storage associated with the problem. */
1046
1047 static void
1048 df_lr_free (void)
1049 {
1050 struct df_lr_problem_data *problem_data
1051 = (struct df_lr_problem_data *) df_lr->problem_data;
1052 if (df_lr->block_info)
1053 {
1054
1055 df_lr->block_info_size = 0;
1056 free (df_lr->block_info);
1057 df_lr->block_info = NULL;
1058 bitmap_obstack_release (&problem_data->lr_bitmaps);
1059 free (df_lr->problem_data);
1060 df_lr->problem_data = NULL;
1061 }
1062
1063 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1064 free (df_lr);
1065 }
1066
1067
1068 /* Debugging info at top of bb. */
1069
1070 static void
1071 df_lr_top_dump (basic_block bb, FILE *file)
1072 {
1073 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1074 struct df_lr_problem_data *problem_data;
1075 if (!bb_info)
1076 return;
1077
1078 fprintf (file, ";; lr in \t");
1079 df_print_regset (file, &bb_info->in);
1080 if (df_lr->problem_data)
1081 {
1082 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1083 if (problem_data->in)
1084 {
1085 fprintf (file, ";; old in \t");
1086 df_print_regset (file, &problem_data->in[bb->index]);
1087 }
1088 }
1089 fprintf (file, ";; lr use \t");
1090 df_print_regset (file, &bb_info->use);
1091 fprintf (file, ";; lr def \t");
1092 df_print_regset (file, &bb_info->def);
1093 }
1094
1095
1096 /* Debugging info at bottom of bb. */
1097
1098 static void
1099 df_lr_bottom_dump (basic_block bb, FILE *file)
1100 {
1101 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1102 struct df_lr_problem_data *problem_data;
1103 if (!bb_info)
1104 return;
1105
1106 fprintf (file, ";; lr out \t");
1107 df_print_regset (file, &bb_info->out);
1108 if (df_lr->problem_data)
1109 {
1110 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1111 if (problem_data->out)
1112 {
1113 fprintf (file, ";; old out \t");
1114 df_print_regset (file, &problem_data->out[bb->index]);
1115 }
1116 }
1117 }
1118
1119
1120 /* Build the datastructure to verify that the solution to the dataflow
1121 equations is not dirty. */
1122
1123 static void
1124 df_lr_verify_solution_start (void)
1125 {
1126 basic_block bb;
1127 struct df_lr_problem_data *problem_data;
1128 if (df_lr->solutions_dirty)
1129 return;
1130
1131 /* Set it true so that the solution is recomputed. */
1132 df_lr->solutions_dirty = true;
1133
1134 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1135 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1136 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1137
1138 FOR_ALL_BB (bb)
1139 {
1140 bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
1141 bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
1142 bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
1143 bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
1144 }
1145 }
1146
1147
1148 /* Compare the saved datastructure and the new solution to the dataflow
1149 equations. */
1150
1151 static void
1152 df_lr_verify_solution_end (void)
1153 {
1154 struct df_lr_problem_data *problem_data;
1155 basic_block bb;
1156
1157 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1158
1159 if (!problem_data->out)
1160 return;
1161
1162 if (df_lr->solutions_dirty)
1163 /* Do not check if the solution is still dirty. See the comment
1164 in df_lr_finalize for details. */
1165 df_lr->solutions_dirty = false;
1166 else
1167 FOR_ALL_BB (bb)
1168 {
1169 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
1170 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
1171 {
1172 /*df_dump (stderr);*/
1173 gcc_unreachable ();
1174 }
1175 }
1176
1177 /* Cannot delete them immediately because you may want to dump them
1178 if the comparison fails. */
1179 FOR_ALL_BB (bb)
1180 {
1181 bitmap_clear (&problem_data->in[bb->index]);
1182 bitmap_clear (&problem_data->out[bb->index]);
1183 }
1184
1185 free (problem_data->in);
1186 free (problem_data->out);
1187 problem_data->in = NULL;
1188 problem_data->out = NULL;
1189 }
1190
1191
1192 /* All of the information associated with every instance of the problem. */
1193
1194 static struct df_problem problem_LR =
1195 {
1196 DF_LR, /* Problem id. */
1197 DF_BACKWARD, /* Direction. */
1198 df_lr_alloc, /* Allocate the problem specific data. */
1199 df_lr_reset, /* Reset global information. */
1200 df_lr_free_bb_info, /* Free basic block info. */
1201 df_lr_local_compute, /* Local compute function. */
1202 df_lr_init, /* Init the solution specific data. */
1203 df_worklist_dataflow, /* Worklist solver. */
1204 df_lr_confluence_0, /* Confluence operator 0. */
1205 df_lr_confluence_n, /* Confluence operator n. */
1206 df_lr_transfer_function, /* Transfer function. */
1207 df_lr_finalize, /* Finalize function. */
1208 df_lr_free, /* Free all of the problem information. */
1209 NULL, /* Remove this problem from the stack of dataflow problems. */
1210 NULL, /* Debugging. */
1211 df_lr_top_dump, /* Debugging start block. */
1212 df_lr_bottom_dump, /* Debugging end block. */
1213 df_lr_verify_solution_start,/* Incremental solution verify start. */
1214 df_lr_verify_solution_end, /* Incremental solution verify end. */
1215 NULL, /* Dependent problem. */
1216 sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
1217 TV_DF_LR, /* Timing variable. */
1218 false /* Reset blocks on dropping out of blocks_to_analyze. */
1219 };
1220
1221
1222 /* Create a new DATAFLOW instance and add it to an existing instance
1223 of DF. The returned structure is what is used to get at the
1224 solution. */
1225
1226 void
1227 df_lr_add_problem (void)
1228 {
1229 df_add_problem (&problem_LR);
1230 /* These will be initialized when df_scan_blocks processes each
1231 block. */
1232 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1233 }
1234
1235
1236 /* Verify that all of the lr related info is consistent and
1237 correct. */
1238
1239 void
1240 df_lr_verify_transfer_functions (void)
1241 {
1242 basic_block bb;
1243 bitmap_head saved_def;
1244 bitmap_head saved_use;
1245 bitmap_head all_blocks;
1246
1247 if (!df)
1248 return;
1249
1250 bitmap_initialize (&saved_def, &bitmap_default_obstack);
1251 bitmap_initialize (&saved_use, &bitmap_default_obstack);
1252 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1253
1254 FOR_ALL_BB (bb)
1255 {
1256 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1257 bitmap_set_bit (&all_blocks, bb->index);
1258
1259 if (bb_info)
1260 {
1261 /* Make a copy of the transfer functions and then compute
1262 new ones to see if the transfer functions have
1263 changed. */
1264 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1265 bb->index))
1266 {
1267 bitmap_copy (&saved_def, &bb_info->def);
1268 bitmap_copy (&saved_use, &bb_info->use);
1269 bitmap_clear (&bb_info->def);
1270 bitmap_clear (&bb_info->use);
1271
1272 df_lr_bb_local_compute (bb->index);
1273 gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
1274 gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
1275 }
1276 }
1277 else
1278 {
1279 /* If we do not have basic block info, the block must be in
1280 the list of dirty blocks or else some one has added a
1281 block behind our backs. */
1282 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1283 bb->index));
1284 }
1285 /* Make sure no one created a block without following
1286 procedures. */
1287 gcc_assert (df_scan_get_bb_info (bb->index));
1288 }
1289
1290 /* Make sure there are no dirty bits in blocks that have been deleted. */
1291 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1292 &all_blocks));
1293
1294 bitmap_clear (&saved_def);
1295 bitmap_clear (&saved_use);
1296 bitmap_clear (&all_blocks);
1297 }
1298
1299
1300 \f
1301 /*----------------------------------------------------------------------------
1302 LIVE AND MUST-INITIALIZED REGISTERS.
1303
1304 This problem first computes the IN and OUT bitvectors for the
1305 must-initialized registers problems, which is a forward problem.
1306 It gives the set of registers for which we MUST have an available
1307 definition on any path from the entry block to the entry/exit of
1308 a basic block. Sets generate a definition, while clobbers kill
1309 a definition.
1310
1311 In and out bitvectors are built for each basic block and are indexed by
1312 regnum (see df.h for details). In and out bitvectors in struct
1313 df_live_bb_info actually refers to the must-initialized problem;
1314
1315 Then, the in and out sets for the LIVE problem itself are computed.
1316 These are the logical AND of the IN and OUT sets from the LR problem
1317 and the must-initialized problem.
1318 ----------------------------------------------------------------------------*/
1319
1320 /* Private data used to verify the solution for this problem. */
1321 struct df_live_problem_data
1322 {
1323 bitmap_head *in;
1324 bitmap_head *out;
1325 /* An obstack for the bitmaps we need for this problem. */
1326 bitmap_obstack live_bitmaps;
1327 };
1328
1329 /* Scratch var used by transfer functions. This is used to implement
1330 an optimization to reduce the amount of space used to compute the
1331 combined lr and live analysis. */
1332 static bitmap_head df_live_scratch;
1333
1334
1335 /* Free basic block info. */
1336
1337 static void
1338 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1339 void *vbb_info)
1340 {
1341 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1342 if (bb_info)
1343 {
1344 bitmap_clear (&bb_info->gen);
1345 bitmap_clear (&bb_info->kill);
1346 bitmap_clear (&bb_info->in);
1347 bitmap_clear (&bb_info->out);
1348 }
1349 }
1350
1351
1352 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1353 not touched unless the block is new. */
1354
1355 static void
1356 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1357 {
1358 unsigned int bb_index;
1359 bitmap_iterator bi;
1360 struct df_live_problem_data *problem_data;
1361
1362 if (df_live->problem_data)
1363 problem_data = (struct df_live_problem_data *) df_live->problem_data;
1364 else
1365 {
1366 problem_data = XNEW (struct df_live_problem_data);
1367 df_live->problem_data = problem_data;
1368
1369 problem_data->out = NULL;
1370 problem_data->in = NULL;
1371 bitmap_obstack_initialize (&problem_data->live_bitmaps);
1372 bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
1373 }
1374
1375 df_grow_bb_info (df_live);
1376
1377 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1378 {
1379 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1380
1381 /* When bitmaps are already initialized, just clear them. */
1382 if (bb_info->kill.obstack)
1383 {
1384 bitmap_clear (&bb_info->kill);
1385 bitmap_clear (&bb_info->gen);
1386 }
1387 else
1388 {
1389 bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
1390 bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
1391 bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
1392 bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
1393 }
1394 }
1395 df_live->optional_p = (optimize <= 1);
1396 }
1397
1398
1399 /* Reset the global solution for recalculation. */
1400
1401 static void
1402 df_live_reset (bitmap all_blocks)
1403 {
1404 unsigned int bb_index;
1405 bitmap_iterator bi;
1406
1407 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1408 {
1409 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1410 gcc_assert (bb_info);
1411 bitmap_clear (&bb_info->in);
1412 bitmap_clear (&bb_info->out);
1413 }
1414 }
1415
1416
1417 /* Compute local uninitialized register info for basic block BB. */
1418
1419 static void
1420 df_live_bb_local_compute (unsigned int bb_index)
1421 {
1422 basic_block bb = BASIC_BLOCK (bb_index);
1423 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1424 rtx insn;
1425 df_ref *def_rec;
1426 int luid = 0;
1427
1428 FOR_BB_INSNS (bb, insn)
1429 {
1430 unsigned int uid = INSN_UID (insn);
1431 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1432
1433 /* Inserting labels does not always trigger the incremental
1434 rescanning. */
1435 if (!insn_info)
1436 {
1437 gcc_assert (!INSN_P (insn));
1438 insn_info = df_insn_create_insn_record (insn);
1439 }
1440
1441 DF_INSN_INFO_LUID (insn_info) = luid;
1442 if (!INSN_P (insn))
1443 continue;
1444
1445 luid++;
1446 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
1447 {
1448 df_ref def = *def_rec;
1449 unsigned int regno = DF_REF_REGNO (def);
1450
1451 if (DF_REF_FLAGS_IS_SET (def,
1452 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1453 /* All partial or conditional def
1454 seen are included in the gen set. */
1455 bitmap_set_bit (&bb_info->gen, regno);
1456 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1457 /* Only must clobbers for the entire reg destroy the
1458 value. */
1459 bitmap_set_bit (&bb_info->kill, regno);
1460 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1461 bitmap_set_bit (&bb_info->gen, regno);
1462 }
1463 }
1464
1465 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
1466 {
1467 df_ref def = *def_rec;
1468 bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
1469 }
1470 }
1471
1472
1473 /* Compute local uninitialized register info. */
1474
1475 static void
1476 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1477 {
1478 unsigned int bb_index;
1479 bitmap_iterator bi;
1480
1481 df_grow_insn_info ();
1482
1483 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1484 0, bb_index, bi)
1485 {
1486 df_live_bb_local_compute (bb_index);
1487 }
1488
1489 bitmap_clear (df_live->out_of_date_transfer_functions);
1490 }
1491
1492
1493 /* Initialize the solution vectors. */
1494
1495 static void
1496 df_live_init (bitmap all_blocks)
1497 {
1498 unsigned int bb_index;
1499 bitmap_iterator bi;
1500
1501 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1502 {
1503 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1504 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1505
1506 /* No register may reach a location where it is not used. Thus
1507 we trim the rr result to the places where it is used. */
1508 bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
1509 bitmap_clear (&bb_info->in);
1510 }
1511 }
1512
1513 /* Forward confluence function that ignores fake edges. */
1514
1515 static bool
1516 df_live_confluence_n (edge e)
1517 {
1518 bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
1519 bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
1520
1521 if (e->flags & EDGE_FAKE)
1522 return false;
1523
1524 return bitmap_ior_into (op1, op2);
1525 }
1526
1527
1528 /* Transfer function for the forwards must-initialized problem. */
1529
1530 static bool
1531 df_live_transfer_function (int bb_index)
1532 {
1533 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1534 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1535 bitmap in = &bb_info->in;
1536 bitmap out = &bb_info->out;
1537 bitmap gen = &bb_info->gen;
1538 bitmap kill = &bb_info->kill;
1539
1540 /* We need to use a scratch set here so that the value returned from this
1541 function invocation properly reflects whether the sets changed in a
1542 significant way; i.e. not just because the lr set was anded in. */
1543 bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
1544 /* No register may reach a location where it is not used. Thus
1545 we trim the rr result to the places where it is used. */
1546 bitmap_and_into (in, &bb_lr_info->in);
1547
1548 return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
1549 }
1550
1551
1552 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1553
1554 static void
1555 df_live_finalize (bitmap all_blocks)
1556 {
1557
1558 if (df_live->solutions_dirty)
1559 {
1560 bitmap_iterator bi;
1561 unsigned int bb_index;
1562
1563 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1564 {
1565 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1566 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1567
1568 /* No register may reach a location where it is not used. Thus
1569 we trim the rr result to the places where it is used. */
1570 bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
1571 bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
1572 }
1573
1574 df_live->solutions_dirty = false;
1575 }
1576 }
1577
1578
1579 /* Free all storage associated with the problem. */
1580
1581 static void
1582 df_live_free (void)
1583 {
1584 struct df_live_problem_data *problem_data
1585 = (struct df_live_problem_data *) df_live->problem_data;
1586 if (df_live->block_info)
1587 {
1588 df_live->block_info_size = 0;
1589 free (df_live->block_info);
1590 df_live->block_info = NULL;
1591 bitmap_clear (&df_live_scratch);
1592 bitmap_obstack_release (&problem_data->live_bitmaps);
1593 free (problem_data);
1594 df_live->problem_data = NULL;
1595 }
1596 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1597 free (df_live);
1598 }
1599
1600
1601 /* Debugging info at top of bb. */
1602
1603 static void
1604 df_live_top_dump (basic_block bb, FILE *file)
1605 {
1606 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1607 struct df_live_problem_data *problem_data;
1608
1609 if (!bb_info)
1610 return;
1611
1612 fprintf (file, ";; live in \t");
1613 df_print_regset (file, &bb_info->in);
1614 if (df_live->problem_data)
1615 {
1616 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1617 if (problem_data->in)
1618 {
1619 fprintf (file, ";; old in \t");
1620 df_print_regset (file, &problem_data->in[bb->index]);
1621 }
1622 }
1623 fprintf (file, ";; live gen \t");
1624 df_print_regset (file, &bb_info->gen);
1625 fprintf (file, ";; live kill\t");
1626 df_print_regset (file, &bb_info->kill);
1627 }
1628
1629
1630 /* Debugging info at bottom of bb. */
1631
1632 static void
1633 df_live_bottom_dump (basic_block bb, FILE *file)
1634 {
1635 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1636 struct df_live_problem_data *problem_data;
1637
1638 if (!bb_info)
1639 return;
1640
1641 fprintf (file, ";; live out \t");
1642 df_print_regset (file, &bb_info->out);
1643 if (df_live->problem_data)
1644 {
1645 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1646 if (problem_data->out)
1647 {
1648 fprintf (file, ";; old out \t");
1649 df_print_regset (file, &problem_data->out[bb->index]);
1650 }
1651 }
1652 }
1653
1654
1655 /* Build the datastructure to verify that the solution to the dataflow
1656 equations is not dirty. */
1657
1658 static void
1659 df_live_verify_solution_start (void)
1660 {
1661 basic_block bb;
1662 struct df_live_problem_data *problem_data;
1663 if (df_live->solutions_dirty)
1664 return;
1665
1666 /* Set it true so that the solution is recomputed. */
1667 df_live->solutions_dirty = true;
1668
1669 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1670 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1671 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1672
1673 FOR_ALL_BB (bb)
1674 {
1675 bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
1676 bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
1677 bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
1678 bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
1679 }
1680 }
1681
1682
1683 /* Compare the saved datastructure and the new solution to the dataflow
1684 equations. */
1685
1686 static void
1687 df_live_verify_solution_end (void)
1688 {
1689 struct df_live_problem_data *problem_data;
1690 basic_block bb;
1691
1692 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1693 if (!problem_data->out)
1694 return;
1695
1696 FOR_ALL_BB (bb)
1697 {
1698 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
1699 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1700 {
1701 /*df_dump (stderr);*/
1702 gcc_unreachable ();
1703 }
1704 }
1705
1706 /* Cannot delete them immediately because you may want to dump them
1707 if the comparison fails. */
1708 FOR_ALL_BB (bb)
1709 {
1710 bitmap_clear (&problem_data->in[bb->index]);
1711 bitmap_clear (&problem_data->out[bb->index]);
1712 }
1713
1714 free (problem_data->in);
1715 free (problem_data->out);
1716 free (problem_data);
1717 df_live->problem_data = NULL;
1718 }
1719
1720
1721 /* All of the information associated with every instance of the problem. */
1722
1723 static struct df_problem problem_LIVE =
1724 {
1725 DF_LIVE, /* Problem id. */
1726 DF_FORWARD, /* Direction. */
1727 df_live_alloc, /* Allocate the problem specific data. */
1728 df_live_reset, /* Reset global information. */
1729 df_live_free_bb_info, /* Free basic block info. */
1730 df_live_local_compute, /* Local compute function. */
1731 df_live_init, /* Init the solution specific data. */
1732 df_worklist_dataflow, /* Worklist solver. */
1733 NULL, /* Confluence operator 0. */
1734 df_live_confluence_n, /* Confluence operator n. */
1735 df_live_transfer_function, /* Transfer function. */
1736 df_live_finalize, /* Finalize function. */
1737 df_live_free, /* Free all of the problem information. */
1738 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1739 NULL, /* Debugging. */
1740 df_live_top_dump, /* Debugging start block. */
1741 df_live_bottom_dump, /* Debugging end block. */
1742 df_live_verify_solution_start,/* Incremental solution verify start. */
1743 df_live_verify_solution_end, /* Incremental solution verify end. */
1744 &problem_LR, /* Dependent problem. */
1745 sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
1746 TV_DF_LIVE, /* Timing variable. */
1747 false /* Reset blocks on dropping out of blocks_to_analyze. */
1748 };
1749
1750
1751 /* Create a new DATAFLOW instance and add it to an existing instance
1752 of DF. The returned structure is what is used to get at the
1753 solution. */
1754
1755 void
1756 df_live_add_problem (void)
1757 {
1758 df_add_problem (&problem_LIVE);
1759 /* These will be initialized when df_scan_blocks processes each
1760 block. */
1761 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1762 }
1763
1764
1765 /* Set all of the blocks as dirty. This needs to be done if this
1766 problem is added after all of the insns have been scanned. */
1767
1768 void
1769 df_live_set_all_dirty (void)
1770 {
1771 basic_block bb;
1772 FOR_ALL_BB (bb)
1773 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1774 bb->index);
1775 }
1776
1777
1778 /* Verify that all of the lr related info is consistent and
1779 correct. */
1780
1781 void
1782 df_live_verify_transfer_functions (void)
1783 {
1784 basic_block bb;
1785 bitmap_head saved_gen;
1786 bitmap_head saved_kill;
1787 bitmap_head all_blocks;
1788
1789 if (!df)
1790 return;
1791
1792 bitmap_initialize (&saved_gen, &bitmap_default_obstack);
1793 bitmap_initialize (&saved_kill, &bitmap_default_obstack);
1794 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1795
1796 df_grow_insn_info ();
1797
1798 FOR_ALL_BB (bb)
1799 {
1800 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1801 bitmap_set_bit (&all_blocks, bb->index);
1802
1803 if (bb_info)
1804 {
1805 /* Make a copy of the transfer functions and then compute
1806 new ones to see if the transfer functions have
1807 changed. */
1808 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1809 bb->index))
1810 {
1811 bitmap_copy (&saved_gen, &bb_info->gen);
1812 bitmap_copy (&saved_kill, &bb_info->kill);
1813 bitmap_clear (&bb_info->gen);
1814 bitmap_clear (&bb_info->kill);
1815
1816 df_live_bb_local_compute (bb->index);
1817 gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
1818 gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
1819 }
1820 }
1821 else
1822 {
1823 /* If we do not have basic block info, the block must be in
1824 the list of dirty blocks or else some one has added a
1825 block behind our backs. */
1826 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1827 bb->index));
1828 }
1829 /* Make sure no one created a block without following
1830 procedures. */
1831 gcc_assert (df_scan_get_bb_info (bb->index));
1832 }
1833
1834 /* Make sure there are no dirty bits in blocks that have been deleted. */
1835 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1836 &all_blocks));
1837 bitmap_clear (&saved_gen);
1838 bitmap_clear (&saved_kill);
1839 bitmap_clear (&all_blocks);
1840 }
1841 \f
1842 /*----------------------------------------------------------------------------
1843 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1844
1845 Link either the defs to the uses and / or the uses to the defs.
1846
1847 These problems are set up like the other dataflow problems so that
1848 they nicely fit into the framework. They are much simpler and only
1849 involve a single traversal of instructions and an examination of
1850 the reaching defs information (the dependent problem).
1851 ----------------------------------------------------------------------------*/
1852
1853 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1854
1855 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1856
1857 struct df_link *
1858 df_chain_create (df_ref src, df_ref dst)
1859 {
1860 struct df_link *head = DF_REF_CHAIN (src);
1861 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1862
1863 DF_REF_CHAIN (src) = link;
1864 link->next = head;
1865 link->ref = dst;
1866 return link;
1867 }
1868
1869
1870 /* Delete any du or ud chains that start at REF and point to
1871 TARGET. */
1872 static void
1873 df_chain_unlink_1 (df_ref ref, df_ref target)
1874 {
1875 struct df_link *chain = DF_REF_CHAIN (ref);
1876 struct df_link *prev = NULL;
1877
1878 while (chain)
1879 {
1880 if (chain->ref == target)
1881 {
1882 if (prev)
1883 prev->next = chain->next;
1884 else
1885 DF_REF_CHAIN (ref) = chain->next;
1886 pool_free (df_chain->block_pool, chain);
1887 return;
1888 }
1889 prev = chain;
1890 chain = chain->next;
1891 }
1892 }
1893
1894
1895 /* Delete a du or ud chain that leave or point to REF. */
1896
1897 void
1898 df_chain_unlink (df_ref ref)
1899 {
1900 struct df_link *chain = DF_REF_CHAIN (ref);
1901 while (chain)
1902 {
1903 struct df_link *next = chain->next;
1904 /* Delete the other side if it exists. */
1905 df_chain_unlink_1 (chain->ref, ref);
1906 pool_free (df_chain->block_pool, chain);
1907 chain = next;
1908 }
1909 DF_REF_CHAIN (ref) = NULL;
1910 }
1911
1912
1913 /* Copy the du or ud chain starting at FROM_REF and attach it to
1914 TO_REF. */
1915
1916 void
1917 df_chain_copy (df_ref to_ref,
1918 struct df_link *from_ref)
1919 {
1920 while (from_ref)
1921 {
1922 df_chain_create (to_ref, from_ref->ref);
1923 from_ref = from_ref->next;
1924 }
1925 }
1926
1927
1928 /* Remove this problem from the stack of dataflow problems. */
1929
1930 static void
1931 df_chain_remove_problem (void)
1932 {
1933 bitmap_iterator bi;
1934 unsigned int bb_index;
1935
1936 /* Wholesale destruction of the old chains. */
1937 if (df_chain->block_pool)
1938 free_alloc_pool (df_chain->block_pool);
1939
1940 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1941 {
1942 rtx insn;
1943 df_ref *def_rec;
1944 df_ref *use_rec;
1945 basic_block bb = BASIC_BLOCK (bb_index);
1946
1947 if (df_chain_problem_p (DF_DU_CHAIN))
1948 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
1949 DF_REF_CHAIN (*def_rec) = NULL;
1950 if (df_chain_problem_p (DF_UD_CHAIN))
1951 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
1952 DF_REF_CHAIN (*use_rec) = NULL;
1953
1954 FOR_BB_INSNS (bb, insn)
1955 {
1956 unsigned int uid = INSN_UID (insn);
1957
1958 if (INSN_P (insn))
1959 {
1960 if (df_chain_problem_p (DF_DU_CHAIN))
1961 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
1962 DF_REF_CHAIN (*def_rec) = NULL;
1963 if (df_chain_problem_p (DF_UD_CHAIN))
1964 {
1965 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
1966 DF_REF_CHAIN (*use_rec) = NULL;
1967 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
1968 DF_REF_CHAIN (*use_rec) = NULL;
1969 }
1970 }
1971 }
1972 }
1973
1974 bitmap_clear (df_chain->out_of_date_transfer_functions);
1975 df_chain->block_pool = NULL;
1976 }
1977
1978
1979 /* Remove the chain problem completely. */
1980
1981 static void
1982 df_chain_fully_remove_problem (void)
1983 {
1984 df_chain_remove_problem ();
1985 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
1986 free (df_chain);
1987 }
1988
1989
1990 /* Create def-use or use-def chains. */
1991
1992 static void
1993 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1994 {
1995 df_chain_remove_problem ();
1996 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
1997 sizeof (struct df_link), 50);
1998 df_chain->optional_p = true;
1999 }
2000
2001
2002 /* Reset all of the chains when the set of basic blocks changes. */
2003
2004 static void
2005 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2006 {
2007 df_chain_remove_problem ();
2008 }
2009
2010
2011 /* Create the chains for a list of USEs. */
2012
2013 static void
2014 df_chain_create_bb_process_use (bitmap local_rd,
2015 df_ref *use_rec,
2016 int top_flag)
2017 {
2018 bitmap_iterator bi;
2019 unsigned int def_index;
2020
2021 while (*use_rec)
2022 {
2023 df_ref use = *use_rec;
2024 unsigned int uregno = DF_REF_REGNO (use);
2025 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2026 || (uregno >= FIRST_PSEUDO_REGISTER))
2027 {
2028 /* Do not want to go through this for an uninitialized var. */
2029 int count = DF_DEFS_COUNT (uregno);
2030 if (count)
2031 {
2032 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2033 {
2034 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2035 unsigned int last_index = first_index + count - 1;
2036
2037 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2038 {
2039 df_ref def;
2040 if (def_index > last_index)
2041 break;
2042
2043 def = DF_DEFS_GET (def_index);
2044 if (df_chain_problem_p (DF_DU_CHAIN))
2045 df_chain_create (def, use);
2046 if (df_chain_problem_p (DF_UD_CHAIN))
2047 df_chain_create (use, def);
2048 }
2049 }
2050 }
2051 }
2052
2053 use_rec++;
2054 }
2055 }
2056
2057
2058 /* Create chains from reaching defs bitmaps for basic block BB. */
2059
2060 static void
2061 df_chain_create_bb (unsigned int bb_index)
2062 {
2063 basic_block bb = BASIC_BLOCK (bb_index);
2064 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2065 rtx insn;
2066 bitmap_head cpy;
2067
2068 bitmap_initialize (&cpy, &bitmap_default_obstack);
2069 bitmap_copy (&cpy, &bb_info->in);
2070 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2071
2072 /* Since we are going forwards, process the artificial uses first
2073 then the artificial defs second. */
2074
2075 #ifdef EH_USES
2076 /* Create the chains for the artificial uses from the EH_USES at the
2077 beginning of the block. */
2078
2079 /* Artificials are only hard regs. */
2080 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2081 df_chain_create_bb_process_use (&cpy,
2082 df_get_artificial_uses (bb->index),
2083 DF_REF_AT_TOP);
2084 #endif
2085
2086 df_rd_simulate_artificial_defs_at_top (bb, &cpy);
2087
2088 /* Process the regular instructions next. */
2089 FOR_BB_INSNS (bb, insn)
2090 if (INSN_P (insn))
2091 {
2092 unsigned int uid = INSN_UID (insn);
2093
2094 /* First scan the uses and link them up with the defs that remain
2095 in the cpy vector. */
2096 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
2097 if (df->changeable_flags & DF_EQ_NOTES)
2098 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
2099
2100 /* Since we are going forwards, process the defs second. */
2101 df_rd_simulate_one_insn (bb, insn, &cpy);
2102 }
2103
2104 /* Create the chains for the artificial uses of the hard registers
2105 at the end of the block. */
2106 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2107 df_chain_create_bb_process_use (&cpy,
2108 df_get_artificial_uses (bb->index),
2109 0);
2110
2111 bitmap_clear (&cpy);
2112 }
2113
2114 /* Create def-use chains from reaching use bitmaps for basic blocks
2115 in BLOCKS. */
2116
2117 static void
2118 df_chain_finalize (bitmap all_blocks)
2119 {
2120 unsigned int bb_index;
2121 bitmap_iterator bi;
2122
2123 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2124 {
2125 df_chain_create_bb (bb_index);
2126 }
2127 }
2128
2129
2130 /* Free all storage associated with the problem. */
2131
2132 static void
2133 df_chain_free (void)
2134 {
2135 free_alloc_pool (df_chain->block_pool);
2136 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2137 free (df_chain);
2138 }
2139
2140
2141 /* Debugging info. */
2142
2143 static void
2144 df_chain_top_dump (basic_block bb, FILE *file)
2145 {
2146 if (df_chain_problem_p (DF_DU_CHAIN))
2147 {
2148 rtx insn;
2149 df_ref *def_rec = df_get_artificial_defs (bb->index);
2150 if (*def_rec)
2151 {
2152
2153 fprintf (file, ";; DU chains for artificial defs\n");
2154 while (*def_rec)
2155 {
2156 df_ref def = *def_rec;
2157 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2158 df_chain_dump (DF_REF_CHAIN (def), file);
2159 fprintf (file, "\n");
2160 def_rec++;
2161 }
2162 }
2163
2164 FOR_BB_INSNS (bb, insn)
2165 {
2166 if (INSN_P (insn))
2167 {
2168 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2169 def_rec = DF_INSN_INFO_DEFS (insn_info);
2170 if (*def_rec)
2171 {
2172 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2173 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2174
2175 while (*def_rec)
2176 {
2177 df_ref def = *def_rec;
2178 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2179 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2180 fprintf (file, "read/write ");
2181 df_chain_dump (DF_REF_CHAIN (def), file);
2182 fprintf (file, "\n");
2183 def_rec++;
2184 }
2185 }
2186 }
2187 }
2188 }
2189 }
2190
2191
2192 static void
2193 df_chain_bottom_dump (basic_block bb, FILE *file)
2194 {
2195 if (df_chain_problem_p (DF_UD_CHAIN))
2196 {
2197 rtx insn;
2198 df_ref *use_rec = df_get_artificial_uses (bb->index);
2199
2200 if (*use_rec)
2201 {
2202 fprintf (file, ";; UD chains for artificial uses\n");
2203 while (*use_rec)
2204 {
2205 df_ref use = *use_rec;
2206 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2207 df_chain_dump (DF_REF_CHAIN (use), file);
2208 fprintf (file, "\n");
2209 use_rec++;
2210 }
2211 }
2212
2213 FOR_BB_INSNS (bb, insn)
2214 {
2215 if (INSN_P (insn))
2216 {
2217 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2218 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
2219 use_rec = DF_INSN_INFO_USES (insn_info);
2220 if (*use_rec || *eq_use_rec)
2221 {
2222 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2223 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2224
2225 while (*use_rec)
2226 {
2227 df_ref use = *use_rec;
2228 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2229 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2230 fprintf (file, "read/write ");
2231 df_chain_dump (DF_REF_CHAIN (use), file);
2232 fprintf (file, "\n");
2233 use_rec++;
2234 }
2235 while (*eq_use_rec)
2236 {
2237 df_ref use = *eq_use_rec;
2238 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2239 df_chain_dump (DF_REF_CHAIN (use), file);
2240 fprintf (file, "\n");
2241 eq_use_rec++;
2242 }
2243 }
2244 }
2245 }
2246 }
2247 }
2248
2249
2250 static struct df_problem problem_CHAIN =
2251 {
2252 DF_CHAIN, /* Problem id. */
2253 DF_NONE, /* Direction. */
2254 df_chain_alloc, /* Allocate the problem specific data. */
2255 df_chain_reset, /* Reset global information. */
2256 NULL, /* Free basic block info. */
2257 NULL, /* Local compute function. */
2258 NULL, /* Init the solution specific data. */
2259 NULL, /* Iterative solver. */
2260 NULL, /* Confluence operator 0. */
2261 NULL, /* Confluence operator n. */
2262 NULL, /* Transfer function. */
2263 df_chain_finalize, /* Finalize function. */
2264 df_chain_free, /* Free all of the problem information. */
2265 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2266 NULL, /* Debugging. */
2267 df_chain_top_dump, /* Debugging start block. */
2268 df_chain_bottom_dump, /* Debugging end block. */
2269 NULL, /* Incremental solution verify start. */
2270 NULL, /* Incremental solution verify end. */
2271 &problem_RD, /* Dependent problem. */
2272 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
2273 TV_DF_CHAIN, /* Timing variable. */
2274 false /* Reset blocks on dropping out of blocks_to_analyze. */
2275 };
2276
2277
2278 /* Create a new DATAFLOW instance and add it to an existing instance
2279 of DF. The returned structure is what is used to get at the
2280 solution. */
2281
2282 void
2283 df_chain_add_problem (unsigned int chain_flags)
2284 {
2285 df_add_problem (&problem_CHAIN);
2286 df_chain->local_flags = chain_flags;
2287 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2288 }
2289
2290 #undef df_chain_problem_p
2291
2292 \f
2293 /*----------------------------------------------------------------------------
2294 WORD LEVEL LIVE REGISTERS
2295
2296 Find the locations in the function where any use of a pseudo can
2297 reach in the backwards direction. In and out bitvectors are built
2298 for each basic block. We only track pseudo registers that have a
2299 size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
2300 contain two bits corresponding to each of the subwords.
2301
2302 ----------------------------------------------------------------------------*/
2303
2304 /* Private data used to verify the solution for this problem. */
2305 struct df_word_lr_problem_data
2306 {
2307 /* An obstack for the bitmaps we need for this problem. */
2308 bitmap_obstack word_lr_bitmaps;
2309 };
2310
2311
2312 /* Free basic block info. */
2313
2314 static void
2315 df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2316 void *vbb_info)
2317 {
2318 struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
2319 if (bb_info)
2320 {
2321 bitmap_clear (&bb_info->use);
2322 bitmap_clear (&bb_info->def);
2323 bitmap_clear (&bb_info->in);
2324 bitmap_clear (&bb_info->out);
2325 }
2326 }
2327
2328
2329 /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
2330 not touched unless the block is new. */
2331
2332 static void
2333 df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2334 {
2335 unsigned int bb_index;
2336 bitmap_iterator bi;
2337 basic_block bb;
2338 struct df_word_lr_problem_data *problem_data
2339 = XNEW (struct df_word_lr_problem_data);
2340
2341 df_word_lr->problem_data = problem_data;
2342
2343 df_grow_bb_info (df_word_lr);
2344
2345 /* Create the mapping from regnos to slots. This does not change
2346 unless the problem is destroyed and recreated. In particular, if
2347 we end up deleting the only insn that used a subreg, we do not
2348 want to redo the mapping because this would invalidate everything
2349 else. */
2350
2351 bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
2352
2353 FOR_EACH_BB (bb)
2354 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
2355
2356 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2357 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2358
2359 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2360 {
2361 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2362
2363 /* When bitmaps are already initialized, just clear them. */
2364 if (bb_info->use.obstack)
2365 {
2366 bitmap_clear (&bb_info->def);
2367 bitmap_clear (&bb_info->use);
2368 }
2369 else
2370 {
2371 bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
2372 bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
2373 bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
2374 bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
2375 }
2376 }
2377
2378 df_word_lr->optional_p = true;
2379 }
2380
2381
2382 /* Reset the global solution for recalculation. */
2383
2384 static void
2385 df_word_lr_reset (bitmap all_blocks)
2386 {
2387 unsigned int bb_index;
2388 bitmap_iterator bi;
2389
2390 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2391 {
2392 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2393 gcc_assert (bb_info);
2394 bitmap_clear (&bb_info->in);
2395 bitmap_clear (&bb_info->out);
2396 }
2397 }
2398
2399 /* Examine REF, and if it is for a reg we're interested in, set or
2400 clear the bits corresponding to its subwords from the bitmap
2401 according to IS_SET. LIVE is the bitmap we should update. We do
2402 not track hard regs or pseudos of any size other than 2 *
2403 UNITS_PER_WORD.
2404 We return true if we changed the bitmap, or if we encountered a register
2405 we're not tracking. */
2406
2407 bool
2408 df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
2409 {
2410 rtx orig_reg = DF_REF_REG (ref);
2411 rtx reg = orig_reg;
2412 enum machine_mode reg_mode;
2413 unsigned regno;
2414 /* Left at -1 for whole accesses. */
2415 int which_subword = -1;
2416 bool changed = false;
2417
2418 if (GET_CODE (reg) == SUBREG)
2419 reg = SUBREG_REG (orig_reg);
2420 regno = REGNO (reg);
2421 reg_mode = GET_MODE (reg);
2422 if (regno < FIRST_PSEUDO_REGISTER
2423 || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
2424 return true;
2425
2426 if (GET_CODE (orig_reg) == SUBREG
2427 && df_read_modify_subreg_p (orig_reg))
2428 {
2429 gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
2430 if (subreg_lowpart_p (orig_reg))
2431 which_subword = 0;
2432 else
2433 which_subword = 1;
2434 }
2435 if (is_set)
2436 {
2437 if (which_subword != 1)
2438 changed |= bitmap_set_bit (live, regno * 2);
2439 if (which_subword != 0)
2440 changed |= bitmap_set_bit (live, regno * 2 + 1);
2441 }
2442 else
2443 {
2444 if (which_subword != 1)
2445 changed |= bitmap_clear_bit (live, regno * 2);
2446 if (which_subword != 0)
2447 changed |= bitmap_clear_bit (live, regno * 2 + 1);
2448 }
2449 return changed;
2450 }
2451
2452 /* Compute local live register info for basic block BB. */
2453
2454 static void
2455 df_word_lr_bb_local_compute (unsigned int bb_index)
2456 {
2457 basic_block bb = BASIC_BLOCK (bb_index);
2458 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2459 rtx insn;
2460 df_ref *def_rec;
2461 df_ref *use_rec;
2462
2463 /* Ensure that artificial refs don't contain references to pseudos. */
2464 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2465 {
2466 df_ref def = *def_rec;
2467 gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
2468 }
2469
2470 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2471 {
2472 df_ref use = *use_rec;
2473 gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
2474 }
2475
2476 FOR_BB_INSNS_REVERSE (bb, insn)
2477 {
2478 unsigned int uid = INSN_UID (insn);
2479
2480 if (!NONDEBUG_INSN_P (insn))
2481 continue;
2482 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2483 {
2484 df_ref def = *def_rec;
2485 /* If the def is to only part of the reg, it does
2486 not kill the other defs that reach here. */
2487 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2488 {
2489 df_word_lr_mark_ref (def, true, &bb_info->def);
2490 df_word_lr_mark_ref (def, false, &bb_info->use);
2491 }
2492 }
2493 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2494 {
2495 df_ref use = *use_rec;
2496 df_word_lr_mark_ref (use, true, &bb_info->use);
2497 }
2498 }
2499 }
2500
2501
2502 /* Compute local live register info for each basic block within BLOCKS. */
2503
2504 static void
2505 df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2506 {
2507 unsigned int bb_index;
2508 bitmap_iterator bi;
2509
2510 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2511 {
2512 if (bb_index == EXIT_BLOCK)
2513 {
2514 unsigned regno;
2515 bitmap_iterator bi;
2516 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
2517 regno, bi)
2518 gcc_unreachable ();
2519 }
2520 else
2521 df_word_lr_bb_local_compute (bb_index);
2522 }
2523
2524 bitmap_clear (df_word_lr->out_of_date_transfer_functions);
2525 }
2526
2527
2528 /* Initialize the solution vectors. */
2529
2530 static void
2531 df_word_lr_init (bitmap all_blocks)
2532 {
2533 unsigned int bb_index;
2534 bitmap_iterator bi;
2535
2536 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2537 {
2538 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2539 bitmap_copy (&bb_info->in, &bb_info->use);
2540 bitmap_clear (&bb_info->out);
2541 }
2542 }
2543
2544
2545 /* Confluence function that ignores fake edges. */
2546
2547 static bool
2548 df_word_lr_confluence_n (edge e)
2549 {
2550 bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
2551 bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
2552
2553 return bitmap_ior_into (op1, op2);
2554 }
2555
2556
2557 /* Transfer function. */
2558
2559 static bool
2560 df_word_lr_transfer_function (int bb_index)
2561 {
2562 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2563 bitmap in = &bb_info->in;
2564 bitmap out = &bb_info->out;
2565 bitmap use = &bb_info->use;
2566 bitmap def = &bb_info->def;
2567
2568 return bitmap_ior_and_compl (in, use, out, def);
2569 }
2570
2571
2572 /* Free all storage associated with the problem. */
2573
2574 static void
2575 df_word_lr_free (void)
2576 {
2577 struct df_word_lr_problem_data *problem_data
2578 = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
2579
2580 if (df_word_lr->block_info)
2581 {
2582 df_word_lr->block_info_size = 0;
2583 free (df_word_lr->block_info);
2584 df_word_lr->block_info = NULL;
2585 }
2586
2587 BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
2588 bitmap_obstack_release (&problem_data->word_lr_bitmaps);
2589 free (problem_data);
2590 free (df_word_lr);
2591 }
2592
2593
2594 /* Debugging info at top of bb. */
2595
2596 static void
2597 df_word_lr_top_dump (basic_block bb, FILE *file)
2598 {
2599 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2600 if (!bb_info)
2601 return;
2602
2603 fprintf (file, ";; blr in \t");
2604 df_print_word_regset (file, &bb_info->in);
2605 fprintf (file, ";; blr use \t");
2606 df_print_word_regset (file, &bb_info->use);
2607 fprintf (file, ";; blr def \t");
2608 df_print_word_regset (file, &bb_info->def);
2609 }
2610
2611
2612 /* Debugging info at bottom of bb. */
2613
2614 static void
2615 df_word_lr_bottom_dump (basic_block bb, FILE *file)
2616 {
2617 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2618 if (!bb_info)
2619 return;
2620
2621 fprintf (file, ";; blr out \t");
2622 df_print_word_regset (file, &bb_info->out);
2623 }
2624
2625
2626 /* All of the information associated with every instance of the problem. */
2627
2628 static struct df_problem problem_WORD_LR =
2629 {
2630 DF_WORD_LR, /* Problem id. */
2631 DF_BACKWARD, /* Direction. */
2632 df_word_lr_alloc, /* Allocate the problem specific data. */
2633 df_word_lr_reset, /* Reset global information. */
2634 df_word_lr_free_bb_info, /* Free basic block info. */
2635 df_word_lr_local_compute, /* Local compute function. */
2636 df_word_lr_init, /* Init the solution specific data. */
2637 df_worklist_dataflow, /* Worklist solver. */
2638 NULL, /* Confluence operator 0. */
2639 df_word_lr_confluence_n, /* Confluence operator n. */
2640 df_word_lr_transfer_function, /* Transfer function. */
2641 NULL, /* Finalize function. */
2642 df_word_lr_free, /* Free all of the problem information. */
2643 df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
2644 NULL, /* Debugging. */
2645 df_word_lr_top_dump, /* Debugging start block. */
2646 df_word_lr_bottom_dump, /* Debugging end block. */
2647 NULL, /* Incremental solution verify start. */
2648 NULL, /* Incremental solution verify end. */
2649 NULL, /* Dependent problem. */
2650 sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
2651 TV_DF_WORD_LR, /* Timing variable. */
2652 false /* Reset blocks on dropping out of blocks_to_analyze. */
2653 };
2654
2655
2656 /* Create a new DATAFLOW instance and add it to an existing instance
2657 of DF. The returned structure is what is used to get at the
2658 solution. */
2659
2660 void
2661 df_word_lr_add_problem (void)
2662 {
2663 df_add_problem (&problem_WORD_LR);
2664 /* These will be initialized when df_scan_blocks processes each
2665 block. */
2666 df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2667 }
2668
2669
2670 /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
2671 any bits, which is used by the caller to determine whether a set is
2672 necessary. We also return true if there are other reasons not to delete
2673 an insn. */
2674
2675 bool
2676 df_word_lr_simulate_defs (rtx insn, bitmap live)
2677 {
2678 bool changed = false;
2679 df_ref *def_rec;
2680 unsigned int uid = INSN_UID (insn);
2681
2682 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2683 {
2684 df_ref def = *def_rec;
2685 if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
2686 changed = true;
2687 else
2688 changed |= df_word_lr_mark_ref (*def_rec, false, live);
2689 }
2690 return changed;
2691 }
2692
2693
2694 /* Simulate the effects of the uses of INSN on LIVE. */
2695
2696 void
2697 df_word_lr_simulate_uses (rtx insn, bitmap live)
2698 {
2699 df_ref *use_rec;
2700 unsigned int uid = INSN_UID (insn);
2701
2702 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2703 df_word_lr_mark_ref (*use_rec, true, live);
2704 }
2705 \f
2706 /*----------------------------------------------------------------------------
2707 This problem computes REG_DEAD and REG_UNUSED notes.
2708 ----------------------------------------------------------------------------*/
2709
2710 static void
2711 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2712 {
2713 df_note->optional_p = true;
2714 }
2715
2716 #ifdef REG_DEAD_DEBUGGING
2717 static void
2718 df_print_note (const char *prefix, rtx insn, rtx note)
2719 {
2720 if (dump_file)
2721 {
2722 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
2723 print_rtl (dump_file, note);
2724 fprintf (dump_file, "\n");
2725 }
2726 }
2727 #endif
2728
2729
2730 /* After reg-stack, the x86 floating point stack regs are difficult to
2731 analyze because of all of the pushes, pops and rotations. Thus, we
2732 just leave the notes alone. */
2733
2734 #ifdef STACK_REGS
2735 static inline bool
2736 df_ignore_stack_reg (int regno)
2737 {
2738 return regstack_completed
2739 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
2740 }
2741 #else
2742 static inline bool
2743 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
2744 {
2745 return false;
2746 }
2747 #endif
2748
2749
2750 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add
2751 them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. */
2752
2753 static void
2754 df_kill_notes (rtx insn)
2755 {
2756 rtx *pprev = &REG_NOTES (insn);
2757 rtx link = *pprev;
2758
2759 while (link)
2760 {
2761 switch (REG_NOTE_KIND (link))
2762 {
2763 case REG_DEAD:
2764 /* After reg-stack, we need to ignore any unused notes
2765 for the stack registers. */
2766 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2767 {
2768 pprev = &XEXP (link, 1);
2769 link = *pprev;
2770 }
2771 else
2772 {
2773 rtx next = XEXP (link, 1);
2774 #ifdef REG_DEAD_DEBUGGING
2775 df_print_note ("deleting: ", insn, link);
2776 #endif
2777 free_EXPR_LIST_node (link);
2778 *pprev = link = next;
2779 }
2780 break;
2781
2782 case REG_UNUSED:
2783 /* After reg-stack, we need to ignore any unused notes
2784 for the stack registers. */
2785 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2786 {
2787 pprev = &XEXP (link, 1);
2788 link = *pprev;
2789 }
2790 else
2791 {
2792 rtx next = XEXP (link, 1);
2793 #ifdef REG_DEAD_DEBUGGING
2794 df_print_note ("deleting: ", insn, link);
2795 #endif
2796 free_EXPR_LIST_node (link);
2797 *pprev = link = next;
2798 }
2799 break;
2800
2801 default:
2802 pprev = &XEXP (link, 1);
2803 link = *pprev;
2804 break;
2805 }
2806 }
2807 }
2808
2809
2810 /* Set a NOTE_TYPE note for REG in INSN. */
2811
2812 static inline void
2813 df_set_note (enum reg_note note_type, rtx insn, rtx reg)
2814 {
2815 gcc_checking_assert (!DEBUG_INSN_P (insn));
2816 add_reg_note (insn, note_type, reg);
2817 }
2818
2819 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
2820 arguments. Return true if the register value described by MWS's
2821 mw_reg is known to be completely unused, and if mw_reg can therefore
2822 be used in a REG_UNUSED note. */
2823
2824 static bool
2825 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
2826 bitmap live, bitmap artificial_uses)
2827 {
2828 unsigned int r;
2829
2830 /* If MWS describes a partial reference, create REG_UNUSED notes for
2831 individual hard registers. */
2832 if (mws->flags & DF_REF_PARTIAL)
2833 return false;
2834
2835 /* Likewise if some part of the register is used. */
2836 for (r = mws->start_regno; r <= mws->end_regno; r++)
2837 if (bitmap_bit_p (live, r)
2838 || bitmap_bit_p (artificial_uses, r))
2839 return false;
2840
2841 gcc_assert (REG_P (mws->mw_reg));
2842 return true;
2843 }
2844
2845
2846 /* Node of a linked list of uses of dead REGs in debug insns. */
2847 struct dead_debug_use
2848 {
2849 df_ref use;
2850 struct dead_debug_use *next;
2851 };
2852
2853 /* Linked list of the above, with a bitmap of the REGs in the
2854 list. */
2855 struct dead_debug
2856 {
2857 struct dead_debug_use *head;
2858 bitmap used;
2859 bitmap to_rescan;
2860 };
2861
2862 static void dead_debug_reset (struct dead_debug *, unsigned int);
2863
2864
2865 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
2866 based on the bits in LIVE. Do not generate notes for registers in
2867 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
2868 not generated if the reg is both read and written by the
2869 instruction.
2870 */
2871
2872 static void
2873 df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2874 bitmap live, bitmap do_not_gen,
2875 bitmap artificial_uses,
2876 struct dead_debug *debug)
2877 {
2878 unsigned int r;
2879
2880 #ifdef REG_DEAD_DEBUGGING
2881 if (dump_file)
2882 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
2883 mws->start_regno, mws->end_regno);
2884 #endif
2885
2886 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
2887 {
2888 unsigned int regno = mws->start_regno;
2889 df_set_note (REG_UNUSED, insn, mws->mw_reg);
2890 dead_debug_reset (debug, regno);
2891
2892 #ifdef REG_DEAD_DEBUGGING
2893 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
2894 #endif
2895 bitmap_set_bit (do_not_gen, regno);
2896 /* Only do this if the value is totally dead. */
2897 }
2898 else
2899 for (r = mws->start_regno; r <= mws->end_regno; r++)
2900 {
2901 if (!bitmap_bit_p (live, r)
2902 && !bitmap_bit_p (artificial_uses, r))
2903 {
2904 df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
2905 dead_debug_reset (debug, r);
2906 #ifdef REG_DEAD_DEBUGGING
2907 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
2908 #endif
2909 }
2910 bitmap_set_bit (do_not_gen, r);
2911 }
2912 }
2913
2914
2915 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
2916 arguments. Return true if the register value described by MWS's
2917 mw_reg is known to be completely dead, and if mw_reg can therefore
2918 be used in a REG_DEAD note. */
2919
2920 static bool
2921 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
2922 bitmap live, bitmap artificial_uses,
2923 bitmap do_not_gen)
2924 {
2925 unsigned int r;
2926
2927 /* If MWS describes a partial reference, create REG_DEAD notes for
2928 individual hard registers. */
2929 if (mws->flags & DF_REF_PARTIAL)
2930 return false;
2931
2932 /* Likewise if some part of the register is not dead. */
2933 for (r = mws->start_regno; r <= mws->end_regno; r++)
2934 if (bitmap_bit_p (live, r)
2935 || bitmap_bit_p (artificial_uses, r)
2936 || bitmap_bit_p (do_not_gen, r))
2937 return false;
2938
2939 gcc_assert (REG_P (mws->mw_reg));
2940 return true;
2941 }
2942
2943 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
2944 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
2945 from being set if the instruction both reads and writes the
2946 register. */
2947
2948 static void
2949 df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2950 bitmap live, bitmap do_not_gen,
2951 bitmap artificial_uses, bool *added_notes_p)
2952 {
2953 unsigned int r;
2954 bool is_debug = *added_notes_p;
2955
2956 *added_notes_p = false;
2957
2958 #ifdef REG_DEAD_DEBUGGING
2959 if (dump_file)
2960 {
2961 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
2962 mws->start_regno, mws->end_regno);
2963 df_print_regset (dump_file, do_not_gen);
2964 fprintf (dump_file, " live =");
2965 df_print_regset (dump_file, live);
2966 fprintf (dump_file, " artificial uses =");
2967 df_print_regset (dump_file, artificial_uses);
2968 }
2969 #endif
2970
2971 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
2972 {
2973 /* Add a dead note for the entire multi word register. */
2974 if (is_debug)
2975 {
2976 *added_notes_p = true;
2977 return;
2978 }
2979 df_set_note (REG_DEAD, insn, mws->mw_reg);
2980 #ifdef REG_DEAD_DEBUGGING
2981 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
2982 #endif
2983 }
2984 else
2985 {
2986 for (r = mws->start_regno; r <= mws->end_regno; r++)
2987 if (!bitmap_bit_p (live, r)
2988 && !bitmap_bit_p (artificial_uses, r)
2989 && !bitmap_bit_p (do_not_gen, r))
2990 {
2991 if (is_debug)
2992 {
2993 *added_notes_p = true;
2994 return;
2995 }
2996 df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
2997 #ifdef REG_DEAD_DEBUGGING
2998 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
2999 #endif
3000 }
3001 }
3002 return;
3003 }
3004
3005
3006 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3007 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3008
3009 static void
3010 df_create_unused_note (rtx insn, df_ref def,
3011 bitmap live, bitmap artificial_uses,
3012 struct dead_debug *debug)
3013 {
3014 unsigned int dregno = DF_REF_REGNO (def);
3015
3016 #ifdef REG_DEAD_DEBUGGING
3017 if (dump_file)
3018 {
3019 fprintf (dump_file, " regular looking at def ");
3020 df_ref_debug (def, dump_file);
3021 }
3022 #endif
3023
3024 if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3025 || bitmap_bit_p (live, dregno)
3026 || bitmap_bit_p (artificial_uses, dregno)
3027 || df_ignore_stack_reg (dregno)))
3028 {
3029 rtx reg = (DF_REF_LOC (def))
3030 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3031 df_set_note (REG_UNUSED, insn, reg);
3032 dead_debug_reset (debug, dregno);
3033 #ifdef REG_DEAD_DEBUGGING
3034 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3035 #endif
3036 }
3037
3038 return;
3039 }
3040
3041
3042 /* Initialize DEBUG to an empty list, and clear USED, if given. */
3043 static inline void
3044 dead_debug_init (struct dead_debug *debug, bitmap used)
3045 {
3046 debug->head = NULL;
3047 debug->used = used;
3048 debug->to_rescan = NULL;
3049 if (used)
3050 bitmap_clear (used);
3051 }
3052
3053 /* Reset all debug insns with pending uses. Release the bitmap in it,
3054 unless it is USED. USED must be the same bitmap passed to
3055 dead_debug_init. */
3056 static inline void
3057 dead_debug_finish (struct dead_debug *debug, bitmap used)
3058 {
3059 struct dead_debug_use *head;
3060 rtx insn = NULL;
3061
3062 if (debug->used != used)
3063 BITMAP_FREE (debug->used);
3064
3065 while ((head = debug->head))
3066 {
3067 insn = DF_REF_INSN (head->use);
3068 if (!head->next || DF_REF_INSN (head->next->use) != insn)
3069 {
3070 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3071 df_insn_rescan_debug_internal (insn);
3072 if (debug->to_rescan)
3073 bitmap_clear_bit (debug->to_rescan, INSN_UID (insn));
3074 }
3075 debug->head = head->next;
3076 XDELETE (head);
3077 }
3078
3079 if (debug->to_rescan)
3080 {
3081 bitmap_iterator bi;
3082 unsigned int uid;
3083
3084 EXECUTE_IF_SET_IN_BITMAP (debug->to_rescan, 0, uid, bi)
3085 {
3086 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
3087 if (insn_info)
3088 df_insn_rescan (insn_info->insn);
3089 }
3090 BITMAP_FREE (debug->to_rescan);
3091 }
3092 }
3093
3094 /* Reset DEBUG_INSNs with pending uses of DREGNO. */
3095 static void
3096 dead_debug_reset (struct dead_debug *debug, unsigned int dregno)
3097 {
3098 struct dead_debug_use **tailp = &debug->head;
3099 struct dead_debug_use *cur;
3100 rtx insn;
3101
3102 if (!debug->used || !bitmap_clear_bit (debug->used, dregno))
3103 return;
3104
3105 while ((cur = *tailp))
3106 {
3107 if (DF_REF_REGNO (cur->use) == dregno)
3108 {
3109 *tailp = cur->next;
3110 insn = DF_REF_INSN (cur->use);
3111 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3112 if (debug->to_rescan == NULL)
3113 debug->to_rescan = BITMAP_ALLOC (NULL);
3114 bitmap_set_bit (debug->to_rescan, INSN_UID (insn));
3115 XDELETE (cur);
3116 }
3117 else
3118 tailp = &(*tailp)->next;
3119 }
3120 }
3121
3122 /* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug
3123 insn. Create a bitmap for DEBUG as needed. */
3124 static inline void
3125 dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno)
3126 {
3127 struct dead_debug_use *newddu = XNEW (struct dead_debug_use);
3128
3129 newddu->use = use;
3130 newddu->next = debug->head;
3131 debug->head = newddu;
3132
3133 if (!debug->used)
3134 debug->used = BITMAP_ALLOC (NULL);
3135
3136 bitmap_set_bit (debug->used, uregno);
3137 }
3138
3139 /* If UREGNO is referenced by any entry in DEBUG, emit a debug insn
3140 before INSN that binds the REG to a debug temp, and replace all
3141 uses of UREGNO in DEBUG with uses of the debug temp. INSN must be
3142 the insn where UREGNO dies. */
3143 static inline void
3144 dead_debug_insert_before (struct dead_debug *debug, unsigned int uregno,
3145 rtx insn)
3146 {
3147 struct dead_debug_use **tailp = &debug->head;
3148 struct dead_debug_use *cur;
3149 struct dead_debug_use *uses = NULL;
3150 struct dead_debug_use **usesp = &uses;
3151 rtx reg = NULL;
3152 rtx dval;
3153 rtx bind;
3154
3155 if (!debug->used || !bitmap_clear_bit (debug->used, uregno))
3156 return;
3157
3158 /* Move all uses of uregno from debug->head to uses, setting mode to
3159 the widest referenced mode. */
3160 while ((cur = *tailp))
3161 {
3162 if (DF_REF_REGNO (cur->use) == uregno)
3163 {
3164 *usesp = cur;
3165 usesp = &cur->next;
3166 *tailp = cur->next;
3167 cur->next = NULL;
3168 if (!reg
3169 || (GET_MODE_BITSIZE (GET_MODE (reg))
3170 < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use)))))
3171 reg = *DF_REF_REAL_LOC (cur->use);
3172 }
3173 else
3174 tailp = &(*tailp)->next;
3175 }
3176
3177 gcc_assert (reg);
3178
3179 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3180 dval = make_debug_expr_from_rtl (reg);
3181
3182 /* Emit a debug bind insn before the insn in which reg dies. */
3183 bind = gen_rtx_VAR_LOCATION (GET_MODE (reg),
3184 DEBUG_EXPR_TREE_DECL (dval), reg,
3185 VAR_INIT_STATUS_INITIALIZED);
3186
3187 bind = emit_debug_insn_before (bind, insn);
3188 df_insn_rescan (bind);
3189
3190 /* Adjust all uses. */
3191 while ((cur = uses))
3192 {
3193 if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg))
3194 *DF_REF_REAL_LOC (cur->use) = dval;
3195 else
3196 *DF_REF_REAL_LOC (cur->use)
3197 = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval);
3198 /* ??? Should we simplify subreg of subreg? */
3199 if (debug->to_rescan == NULL)
3200 debug->to_rescan = BITMAP_ALLOC (NULL);
3201 bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use)));
3202 uses = cur->next;
3203 XDELETE (cur);
3204 }
3205 }
3206
3207 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3208 info: lifetime, bb, and number of defs and uses for basic block
3209 BB. The three bitvectors are scratch regs used here. */
3210
3211 static void
3212 df_note_bb_compute (unsigned int bb_index,
3213 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3214 {
3215 basic_block bb = BASIC_BLOCK (bb_index);
3216 rtx insn;
3217 df_ref *def_rec;
3218 df_ref *use_rec;
3219 struct dead_debug debug;
3220
3221 dead_debug_init (&debug, NULL);
3222
3223 bitmap_copy (live, df_get_live_out (bb));
3224 bitmap_clear (artificial_uses);
3225
3226 #ifdef REG_DEAD_DEBUGGING
3227 if (dump_file)
3228 {
3229 fprintf (dump_file, "live at bottom ");
3230 df_print_regset (dump_file, live);
3231 }
3232 #endif
3233
3234 /* Process the artificial defs and uses at the bottom of the block
3235 to begin processing. */
3236 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3237 {
3238 df_ref def = *def_rec;
3239 #ifdef REG_DEAD_DEBUGGING
3240 if (dump_file)
3241 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3242 #endif
3243
3244 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3245 bitmap_clear_bit (live, DF_REF_REGNO (def));
3246 }
3247
3248 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3249 {
3250 df_ref use = *use_rec;
3251 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3252 {
3253 unsigned int regno = DF_REF_REGNO (use);
3254 bitmap_set_bit (live, regno);
3255
3256 /* Notes are not generated for any of the artificial registers
3257 at the bottom of the block. */
3258 bitmap_set_bit (artificial_uses, regno);
3259 }
3260 }
3261
3262 #ifdef REG_DEAD_DEBUGGING
3263 if (dump_file)
3264 {
3265 fprintf (dump_file, "live before artificials out ");
3266 df_print_regset (dump_file, live);
3267 }
3268 #endif
3269
3270 FOR_BB_INSNS_REVERSE (bb, insn)
3271 {
3272 unsigned int uid = INSN_UID (insn);
3273 struct df_mw_hardreg **mws_rec;
3274 int debug_insn;
3275
3276 if (!INSN_P (insn))
3277 continue;
3278
3279 debug_insn = DEBUG_INSN_P (insn);
3280
3281 bitmap_clear (do_not_gen);
3282 df_kill_notes (insn);
3283
3284 /* Process the defs. */
3285 if (CALL_P (insn))
3286 {
3287 #ifdef REG_DEAD_DEBUGGING
3288 if (dump_file)
3289 {
3290 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
3291 df_print_regset (dump_file, live);
3292 }
3293 #endif
3294 /* We only care about real sets for calls. Clobbers cannot
3295 be depended on to really die. */
3296 mws_rec = DF_INSN_UID_MWS (uid);
3297 while (*mws_rec)
3298 {
3299 struct df_mw_hardreg *mws = *mws_rec;
3300 if ((DF_MWS_REG_DEF_P (mws))
3301 && !df_ignore_stack_reg (mws->start_regno))
3302 df_set_unused_notes_for_mw (insn,
3303 mws, live, do_not_gen,
3304 artificial_uses, &debug);
3305 mws_rec++;
3306 }
3307
3308 /* All of the defs except the return value are some sort of
3309 clobber. This code is for the return. */
3310 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3311 {
3312 df_ref def = *def_rec;
3313 unsigned int dregno = DF_REF_REGNO (def);
3314 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3315 {
3316 df_create_unused_note (insn,
3317 def, live, artificial_uses, &debug);
3318 bitmap_set_bit (do_not_gen, dregno);
3319 }
3320
3321 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3322 bitmap_clear_bit (live, dregno);
3323 }
3324 }
3325 else
3326 {
3327 /* Regular insn. */
3328 mws_rec = DF_INSN_UID_MWS (uid);
3329 while (*mws_rec)
3330 {
3331 struct df_mw_hardreg *mws = *mws_rec;
3332 if (DF_MWS_REG_DEF_P (mws))
3333 df_set_unused_notes_for_mw (insn,
3334 mws, live, do_not_gen,
3335 artificial_uses, &debug);
3336 mws_rec++;
3337 }
3338
3339 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3340 {
3341 df_ref def = *def_rec;
3342 unsigned int dregno = DF_REF_REGNO (def);
3343 df_create_unused_note (insn,
3344 def, live, artificial_uses, &debug);
3345
3346 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3347 bitmap_set_bit (do_not_gen, dregno);
3348
3349 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3350 bitmap_clear_bit (live, dregno);
3351 }
3352 }
3353
3354 /* Process the uses. */
3355 mws_rec = DF_INSN_UID_MWS (uid);
3356 while (*mws_rec)
3357 {
3358 struct df_mw_hardreg *mws = *mws_rec;
3359 if ((DF_MWS_REG_DEF_P (mws))
3360 && !df_ignore_stack_reg (mws->start_regno))
3361 {
3362 bool really_add_notes = debug_insn != 0;
3363
3364 df_set_dead_notes_for_mw (insn,
3365 mws, live, do_not_gen,
3366 artificial_uses,
3367 &really_add_notes);
3368
3369 if (really_add_notes)
3370 debug_insn = -1;
3371 }
3372 mws_rec++;
3373 }
3374
3375 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3376 {
3377 df_ref use = *use_rec;
3378 unsigned int uregno = DF_REF_REGNO (use);
3379
3380 #ifdef REG_DEAD_DEBUGGING
3381 if (dump_file && !debug_insn)
3382 {
3383 fprintf (dump_file, " regular looking at use ");
3384 df_ref_debug (use, dump_file);
3385 }
3386 #endif
3387 if (!bitmap_bit_p (live, uregno))
3388 {
3389 if (debug_insn)
3390 {
3391 if (debug_insn > 0)
3392 {
3393 dead_debug_add (&debug, use, uregno);
3394 continue;
3395 }
3396 break;
3397 }
3398 else
3399 dead_debug_insert_before (&debug, uregno, insn);
3400
3401 if ( (!(DF_REF_FLAGS (use)
3402 & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
3403 && (!bitmap_bit_p (do_not_gen, uregno))
3404 && (!bitmap_bit_p (artificial_uses, uregno))
3405 && (!df_ignore_stack_reg (uregno)))
3406 {
3407 rtx reg = (DF_REF_LOC (use))
3408 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3409 df_set_note (REG_DEAD, insn, reg);
3410
3411 #ifdef REG_DEAD_DEBUGGING
3412 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3413 #endif
3414 }
3415 /* This register is now live. */
3416 bitmap_set_bit (live, uregno);
3417 }
3418 }
3419
3420 if (debug_insn == -1)
3421 {
3422 /* ??? We could probably do better here, replacing dead
3423 registers with their definitions. */
3424 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3425 df_insn_rescan_debug_internal (insn);
3426 }
3427 }
3428
3429 dead_debug_finish (&debug, NULL);
3430 }
3431
3432
3433 /* Compute register info: lifetime, bb, and number of defs and uses. */
3434 static void
3435 df_note_compute (bitmap all_blocks)
3436 {
3437 unsigned int bb_index;
3438 bitmap_iterator bi;
3439 bitmap_head live, do_not_gen, artificial_uses;
3440
3441 bitmap_initialize (&live, &df_bitmap_obstack);
3442 bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
3443 bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
3444
3445 #ifdef REG_DEAD_DEBUGGING
3446 if (dump_file)
3447 print_rtl_with_bb (dump_file, get_insns());
3448 #endif
3449
3450 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3451 {
3452 df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
3453 }
3454
3455 bitmap_clear (&live);
3456 bitmap_clear (&do_not_gen);
3457 bitmap_clear (&artificial_uses);
3458 }
3459
3460
3461 /* Free all storage associated with the problem. */
3462
3463 static void
3464 df_note_free (void)
3465 {
3466 free (df_note);
3467 }
3468
3469
3470 /* All of the information associated every instance of the problem. */
3471
3472 static struct df_problem problem_NOTE =
3473 {
3474 DF_NOTE, /* Problem id. */
3475 DF_NONE, /* Direction. */
3476 df_note_alloc, /* Allocate the problem specific data. */
3477 NULL, /* Reset global information. */
3478 NULL, /* Free basic block info. */
3479 df_note_compute, /* Local compute function. */
3480 NULL, /* Init the solution specific data. */
3481 NULL, /* Iterative solver. */
3482 NULL, /* Confluence operator 0. */
3483 NULL, /* Confluence operator n. */
3484 NULL, /* Transfer function. */
3485 NULL, /* Finalize function. */
3486 df_note_free, /* Free all of the problem information. */
3487 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3488 NULL, /* Debugging. */
3489 NULL, /* Debugging start block. */
3490 NULL, /* Debugging end block. */
3491 NULL, /* Incremental solution verify start. */
3492 NULL, /* Incremental solution verify end. */
3493 &problem_LR, /* Dependent problem. */
3494 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
3495 TV_DF_NOTE, /* Timing variable. */
3496 false /* Reset blocks on dropping out of blocks_to_analyze. */
3497 };
3498
3499
3500 /* Create a new DATAFLOW instance and add it to an existing instance
3501 of DF. The returned structure is what is used to get at the
3502 solution. */
3503
3504 void
3505 df_note_add_problem (void)
3506 {
3507 df_add_problem (&problem_NOTE);
3508 }
3509
3510
3511
3512 \f
3513 /*----------------------------------------------------------------------------
3514 Functions for simulating the effects of single insns.
3515
3516 You can either simulate in the forwards direction, starting from
3517 the top of a block or the backwards direction from the end of the
3518 block. If you go backwards, defs are examined first to clear bits,
3519 then uses are examined to set bits. If you go forwards, defs are
3520 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3521 are examined to clear bits. In either case, the result of examining
3522 a def can be undone (respectively by a use or a REG_UNUSED note).
3523
3524 If you start at the top of the block, use one of DF_LIVE_IN or
3525 DF_LR_IN. If you start at the bottom of the block use one of
3526 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3527 THEY WILL BE DESTROYED.
3528 ----------------------------------------------------------------------------*/
3529
3530
3531 /* Find the set of DEFs for INSN. */
3532
3533 void
3534 df_simulate_find_defs (rtx insn, bitmap defs)
3535 {
3536 df_ref *def_rec;
3537 unsigned int uid = INSN_UID (insn);
3538
3539 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3540 {
3541 df_ref def = *def_rec;
3542 bitmap_set_bit (defs, DF_REF_REGNO (def));
3543 }
3544 }
3545
3546 /* Find the set of uses for INSN. This includes partial defs. */
3547
3548 static void
3549 df_simulate_find_uses (rtx insn, bitmap uses)
3550 {
3551 df_ref *rec;
3552 unsigned int uid = INSN_UID (insn);
3553
3554 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
3555 {
3556 df_ref def = *rec;
3557 if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3558 bitmap_set_bit (uses, DF_REF_REGNO (def));
3559 }
3560 for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
3561 {
3562 df_ref use = *rec;
3563 bitmap_set_bit (uses, DF_REF_REGNO (use));
3564 }
3565 }
3566
3567 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3568
3569 void
3570 df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
3571 {
3572 df_ref *def_rec;
3573 unsigned int uid = INSN_UID (insn);
3574
3575 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3576 {
3577 df_ref def = *def_rec;
3578 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3579 bitmap_set_bit (defs, DF_REF_REGNO (def));
3580 }
3581 }
3582
3583
3584 /* Simulate the effects of the defs of INSN on LIVE. */
3585
3586 void
3587 df_simulate_defs (rtx insn, bitmap live)
3588 {
3589 df_ref *def_rec;
3590 unsigned int uid = INSN_UID (insn);
3591
3592 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3593 {
3594 df_ref def = *def_rec;
3595 unsigned int dregno = DF_REF_REGNO (def);
3596
3597 /* If the def is to only part of the reg, it does
3598 not kill the other defs that reach here. */
3599 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3600 bitmap_clear_bit (live, dregno);
3601 }
3602 }
3603
3604
3605 /* Simulate the effects of the uses of INSN on LIVE. */
3606
3607 void
3608 df_simulate_uses (rtx insn, bitmap live)
3609 {
3610 df_ref *use_rec;
3611 unsigned int uid = INSN_UID (insn);
3612
3613 if (DEBUG_INSN_P (insn))
3614 return;
3615
3616 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3617 {
3618 df_ref use = *use_rec;
3619 /* Add use to set of uses in this BB. */
3620 bitmap_set_bit (live, DF_REF_REGNO (use));
3621 }
3622 }
3623
3624
3625 /* Add back the always live regs in BB to LIVE. */
3626
3627 static inline void
3628 df_simulate_fixup_sets (basic_block bb, bitmap live)
3629 {
3630 /* These regs are considered always live so if they end up dying
3631 because of some def, we need to bring the back again. */
3632 if (bb_has_eh_pred (bb))
3633 bitmap_ior_into (live, &df->eh_block_artificial_uses);
3634 else
3635 bitmap_ior_into (live, &df->regular_block_artificial_uses);
3636 }
3637
3638
3639 /*----------------------------------------------------------------------------
3640 The following three functions are used only for BACKWARDS scanning:
3641 i.e. they process the defs before the uses.
3642
3643 df_simulate_initialize_backwards should be called first with a
3644 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3645 df_simulate_one_insn_backwards should be called for each insn in
3646 the block, starting with the last one. Finally,
3647 df_simulate_finalize_backwards can be called to get a new value
3648 of the sets at the top of the block (this is rarely used).
3649 ----------------------------------------------------------------------------*/
3650
3651 /* Apply the artificial uses and defs at the end of BB in a backwards
3652 direction. */
3653
3654 void
3655 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3656 {
3657 df_ref *def_rec;
3658 df_ref *use_rec;
3659 int bb_index = bb->index;
3660
3661 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3662 {
3663 df_ref def = *def_rec;
3664 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3665 bitmap_clear_bit (live, DF_REF_REGNO (def));
3666 }
3667
3668 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3669 {
3670 df_ref use = *use_rec;
3671 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3672 bitmap_set_bit (live, DF_REF_REGNO (use));
3673 }
3674 }
3675
3676
3677 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3678
3679 void
3680 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
3681 {
3682 if (!NONDEBUG_INSN_P (insn))
3683 return;
3684
3685 df_simulate_defs (insn, live);
3686 df_simulate_uses (insn, live);
3687 df_simulate_fixup_sets (bb, live);
3688 }
3689
3690
3691 /* Apply the artificial uses and defs at the top of BB in a backwards
3692 direction. */
3693
3694 void
3695 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3696 {
3697 df_ref *def_rec;
3698 #ifdef EH_USES
3699 df_ref *use_rec;
3700 #endif
3701 int bb_index = bb->index;
3702
3703 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3704 {
3705 df_ref def = *def_rec;
3706 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3707 bitmap_clear_bit (live, DF_REF_REGNO (def));
3708 }
3709
3710 #ifdef EH_USES
3711 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3712 {
3713 df_ref use = *use_rec;
3714 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3715 bitmap_set_bit (live, DF_REF_REGNO (use));
3716 }
3717 #endif
3718 }
3719 /*----------------------------------------------------------------------------
3720 The following three functions are used only for FORWARDS scanning:
3721 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3722 Thus it is important to add the DF_NOTES problem to the stack of
3723 problems computed before using these functions.
3724
3725 df_simulate_initialize_forwards should be called first with a
3726 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3727 df_simulate_one_insn_forwards should be called for each insn in
3728 the block, starting with the first one.
3729 ----------------------------------------------------------------------------*/
3730
3731 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
3732 DF_LR_IN for basic block BB, for forward scanning by marking artificial
3733 defs live. */
3734
3735 void
3736 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3737 {
3738 df_ref *def_rec;
3739 int bb_index = bb->index;
3740
3741 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3742 {
3743 df_ref def = *def_rec;
3744 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3745 bitmap_set_bit (live, DF_REF_REGNO (def));
3746 }
3747 }
3748
3749 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3750
3751 void
3752 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
3753 {
3754 rtx link;
3755 if (! INSN_P (insn))
3756 return;
3757
3758 /* Make sure that DF_NOTE really is an active df problem. */
3759 gcc_assert (df_note);
3760
3761 /* Note that this is the opposite as how the problem is defined, because
3762 in the LR problem defs _kill_ liveness. However, they do so backwards,
3763 while here the scan is performed forwards! So, first assume that the
3764 def is live, and if this is not true REG_UNUSED notes will rectify the
3765 situation. */
3766 df_simulate_find_noclobber_defs (insn, live);
3767
3768 /* Clear all of the registers that go dead. */
3769 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3770 {
3771 switch (REG_NOTE_KIND (link))
3772 {
3773 case REG_DEAD:
3774 case REG_UNUSED:
3775 {
3776 rtx reg = XEXP (link, 0);
3777 int regno = REGNO (reg);
3778 if (HARD_REGISTER_NUM_P (regno))
3779 bitmap_clear_range (live, regno,
3780 hard_regno_nregs[regno][GET_MODE (reg)]);
3781 else
3782 bitmap_clear_bit (live, regno);
3783 }
3784 break;
3785 default:
3786 break;
3787 }
3788 }
3789 df_simulate_fixup_sets (bb, live);
3790 }
3791 \f
3792 /* Used by the next two functions to encode information about the
3793 memory references we found. */
3794 #define MEMREF_NORMAL 1
3795 #define MEMREF_VOLATILE 2
3796
3797 /* A subroutine of can_move_insns_across_p called through for_each_rtx.
3798 Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
3799
3800 static int
3801 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3802 {
3803 rtx x = *px;
3804
3805 if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
3806 return MEMREF_VOLATILE;
3807
3808 if (!MEM_P (x))
3809 return 0;
3810 if (MEM_VOLATILE_P (x))
3811 return MEMREF_VOLATILE;
3812 if (MEM_READONLY_P (x))
3813 return 0;
3814
3815 return MEMREF_NORMAL;
3816 }
3817
3818 /* A subroutine of can_move_insns_across_p called through note_stores.
3819 DATA points to an integer in which we set either the bit for
3820 MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
3821 of either kind. */
3822
3823 static void
3824 find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
3825 void *data ATTRIBUTE_UNUSED)
3826 {
3827 int *pflags = (int *)data;
3828 if (GET_CODE (x) == SUBREG)
3829 x = XEXP (x, 0);
3830 /* Treat stores to SP as stores to memory, this will prevent problems
3831 when there are references to the stack frame. */
3832 if (x == stack_pointer_rtx)
3833 *pflags |= MEMREF_VOLATILE;
3834 if (!MEM_P (x))
3835 return;
3836 *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
3837 }
3838
3839 /* Scan BB backwards, using df_simulate functions to keep track of
3840 lifetimes, up to insn POINT. The result is stored in LIVE. */
3841
3842 void
3843 simulate_backwards_to_point (basic_block bb, regset live, rtx point)
3844 {
3845 rtx insn;
3846 bitmap_copy (live, df_get_live_out (bb));
3847 df_simulate_initialize_backwards (bb, live);
3848
3849 /* Scan and update life information until we reach the point we're
3850 interested in. */
3851 for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
3852 df_simulate_one_insn_backwards (bb, insn, live);
3853 }
3854
3855 /* Return true if it is safe to move a group of insns, described by
3856 the range FROM to TO, backwards across another group of insns,
3857 described by ACROSS_FROM to ACROSS_TO. It is assumed that there
3858 are no insns between ACROSS_TO and FROM, but they may be in
3859 different basic blocks; MERGE_BB is the block from which the
3860 insns will be moved. The caller must pass in a regset MERGE_LIVE
3861 which specifies the registers live after TO.
3862
3863 This function may be called in one of two cases: either we try to
3864 move identical instructions from all successor blocks into their
3865 predecessor, or we try to move from only one successor block. If
3866 OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
3867 the second case. It should contain a set of registers live at the
3868 end of ACROSS_TO which must not be clobbered by moving the insns.
3869 In that case, we're also more careful about moving memory references
3870 and trapping insns.
3871
3872 We return false if it is not safe to move the entire group, but it
3873 may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
3874 is set to point at the last moveable insn in such a case. */
3875
3876 bool
3877 can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
3878 basic_block merge_bb, regset merge_live,
3879 regset other_branch_live, rtx *pmove_upto)
3880 {
3881 rtx insn, next, max_to;
3882 bitmap merge_set, merge_use, local_merge_live;
3883 bitmap test_set, test_use;
3884 unsigned i, fail = 0;
3885 bitmap_iterator bi;
3886 int memrefs_in_across = 0;
3887 int mem_sets_in_across = 0;
3888 bool trapping_insns_in_across = false;
3889
3890 if (pmove_upto != NULL)
3891 *pmove_upto = NULL_RTX;
3892
3893 /* Find real bounds, ignoring debug insns. */
3894 while (!NONDEBUG_INSN_P (from) && from != to)
3895 from = NEXT_INSN (from);
3896 while (!NONDEBUG_INSN_P (to) && from != to)
3897 to = PREV_INSN (to);
3898
3899 for (insn = across_to; ; insn = next)
3900 {
3901 if (NONDEBUG_INSN_P (insn))
3902 {
3903 memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
3904 NULL);
3905 note_stores (PATTERN (insn), find_memory_stores,
3906 &mem_sets_in_across);
3907 /* This is used just to find sets of the stack pointer. */
3908 memrefs_in_across |= mem_sets_in_across;
3909 trapping_insns_in_across |= may_trap_p (PATTERN (insn));
3910 }
3911 next = PREV_INSN (insn);
3912 if (insn == across_from)
3913 break;
3914 }
3915
3916 /* Collect:
3917 MERGE_SET = set of registers set in MERGE_BB
3918 MERGE_USE = set of registers used in MERGE_BB and live at its top
3919 MERGE_LIVE = set of registers live at the point inside the MERGE
3920 range that we've reached during scanning
3921 TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
3922 TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
3923 and live before ACROSS_FROM. */
3924
3925 merge_set = BITMAP_ALLOC (&reg_obstack);
3926 merge_use = BITMAP_ALLOC (&reg_obstack);
3927 local_merge_live = BITMAP_ALLOC (&reg_obstack);
3928 test_set = BITMAP_ALLOC (&reg_obstack);
3929 test_use = BITMAP_ALLOC (&reg_obstack);
3930
3931 /* Compute the set of registers set and used in the ACROSS range. */
3932 if (other_branch_live != NULL)
3933 bitmap_copy (test_use, other_branch_live);
3934 df_simulate_initialize_backwards (merge_bb, test_use);
3935 for (insn = across_to; ; insn = next)
3936 {
3937 if (NONDEBUG_INSN_P (insn))
3938 {
3939 df_simulate_find_defs (insn, test_set);
3940 df_simulate_defs (insn, test_use);
3941 df_simulate_uses (insn, test_use);
3942 }
3943 next = PREV_INSN (insn);
3944 if (insn == across_from)
3945 break;
3946 }
3947
3948 /* Compute an upper bound for the amount of insns moved, by finding
3949 the first insn in MERGE that sets a register in TEST_USE, or uses
3950 a register in TEST_SET. We also check for calls, trapping operations,
3951 and memory references. */
3952 max_to = NULL_RTX;
3953 for (insn = from; ; insn = next)
3954 {
3955 if (CALL_P (insn))
3956 break;
3957 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3958 break;
3959 if (NONDEBUG_INSN_P (insn))
3960 {
3961 if (may_trap_or_fault_p (PATTERN (insn))
3962 && (trapping_insns_in_across || other_branch_live != NULL))
3963 break;
3964
3965 /* We cannot move memory stores past each other, or move memory
3966 reads past stores, at least not without tracking them and
3967 calling true_dependence on every pair.
3968
3969 If there is no other branch and no memory references or
3970 sets in the ACROSS range, we can move memory references
3971 freely, even volatile ones.
3972
3973 Otherwise, the rules are as follows: volatile memory
3974 references and stores can't be moved at all, and any type
3975 of memory reference can't be moved if there are volatile
3976 accesses or stores in the ACROSS range. That leaves
3977 normal reads, which can be moved, as the trapping case is
3978 dealt with elsewhere. */
3979 if (other_branch_live != NULL || memrefs_in_across != 0)
3980 {
3981 int mem_ref_flags = 0;
3982 int mem_set_flags = 0;
3983 note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
3984 mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
3985 NULL);
3986 /* Catch sets of the stack pointer. */
3987 mem_ref_flags |= mem_set_flags;
3988
3989 if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
3990 break;
3991 if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
3992 break;
3993 if (mem_set_flags != 0
3994 || (mem_sets_in_across != 0 && mem_ref_flags != 0))
3995 break;
3996 }
3997 df_simulate_find_uses (insn, merge_use);
3998 /* We're only interested in uses which use a value live at
3999 the top, not one previously set in this block. */
4000 bitmap_and_compl_into (merge_use, merge_set);
4001 df_simulate_find_defs (insn, merge_set);
4002 if (bitmap_intersect_p (merge_set, test_use)
4003 || bitmap_intersect_p (merge_use, test_set))
4004 break;
4005 #ifdef HAVE_cc0
4006 if (!sets_cc0_p (insn))
4007 #endif
4008 max_to = insn;
4009 }
4010 next = NEXT_INSN (insn);
4011 if (insn == to)
4012 break;
4013 }
4014 if (max_to != to)
4015 fail = 1;
4016
4017 if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
4018 goto out;
4019
4020 /* Now, lower this upper bound by also taking into account that
4021 a range of insns moved across ACROSS must not leave a register
4022 live at the end that will be clobbered in ACROSS. We need to
4023 find a point where TEST_SET & LIVE == 0.
4024
4025 Insns in the MERGE range that set registers which are also set
4026 in the ACROSS range may still be moved as long as we also move
4027 later insns which use the results of the set, and make the
4028 register dead again. This is verified by the condition stated
4029 above. We only need to test it for registers that are set in
4030 the moved region.
4031
4032 MERGE_LIVE is provided by the caller and holds live registers after
4033 TO. */
4034 bitmap_copy (local_merge_live, merge_live);
4035 for (insn = to; insn != max_to; insn = PREV_INSN (insn))
4036 df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
4037
4038 /* We're not interested in registers that aren't set in the moved
4039 region at all. */
4040 bitmap_and_into (local_merge_live, merge_set);
4041 for (;;)
4042 {
4043 if (NONDEBUG_INSN_P (insn))
4044 {
4045 if (!bitmap_intersect_p (test_set, local_merge_live)
4046 #ifdef HAVE_cc0
4047 && !sets_cc0_p (insn)
4048 #endif
4049 )
4050 {
4051 max_to = insn;
4052 break;
4053 }
4054
4055 df_simulate_one_insn_backwards (merge_bb, insn,
4056 local_merge_live);
4057 }
4058 if (insn == from)
4059 {
4060 fail = 1;
4061 goto out;
4062 }
4063 insn = PREV_INSN (insn);
4064 }
4065
4066 if (max_to != to)
4067 fail = 1;
4068
4069 if (pmove_upto)
4070 *pmove_upto = max_to;
4071
4072 /* For small register class machines, don't lengthen lifetimes of
4073 hard registers before reload. */
4074 if (! reload_completed
4075 && targetm.small_register_classes_for_mode_p (VOIDmode))
4076 {
4077 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4078 {
4079 if (i < FIRST_PSEUDO_REGISTER
4080 && ! fixed_regs[i]
4081 && ! global_regs[i])
4082 fail = 1;
4083 }
4084 }
4085
4086 out:
4087 BITMAP_FREE (merge_set);
4088 BITMAP_FREE (merge_use);
4089 BITMAP_FREE (local_merge_live);
4090 BITMAP_FREE (test_set);
4091 BITMAP_FREE (test_use);
4092
4093 return !fail;
4094 }
4095
4096 \f
4097 /*----------------------------------------------------------------------------
4098 MULTIPLE DEFINITIONS
4099
4100 Find the locations in the function reached by multiple definition sites
4101 for a live pseudo. In and out bitvectors are built for each basic
4102 block. They are restricted for efficiency to live registers.
4103
4104 The gen and kill sets for the problem are obvious. Together they
4105 include all defined registers in a basic block; the gen set includes
4106 registers where a partial or conditional or may-clobber definition is
4107 last in the BB, while the kill set includes registers with a complete
4108 definition coming last. However, the computation of the dataflow
4109 itself is interesting.
4110
4111 The idea behind it comes from SSA form's iterated dominance frontier
4112 criterion for inserting PHI functions. Just like in that case, we can use
4113 the dominance frontier to find places where multiple definitions meet;
4114 a register X defined in a basic block BB1 has multiple definitions in
4115 basic blocks in BB1's dominance frontier.
4116
4117 So, the in-set of a basic block BB2 is not just the union of the
4118 out-sets of BB2's predecessors, but includes some more bits that come
4119 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
4120 the previous paragraph). I called this set the init-set of BB2.
4121
4122 (Note: I actually use the kill-set only to build the init-set.
4123 gen bits are anyway propagated from BB1 to BB2 by dataflow).
4124
4125 For example, if you have
4126
4127 BB1 : r10 = 0
4128 r11 = 0
4129 if <...> goto BB2 else goto BB3;
4130
4131 BB2 : r10 = 1
4132 r12 = 1
4133 goto BB3;
4134
4135 BB3 :
4136
4137 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
4138 init-set of BB3 includes r10 and r12, but not r11. Note that we do
4139 not need to iterate the dominance frontier, because we do not insert
4140 anything like PHI functions there! Instead, dataflow will take care of
4141 propagating the information to BB3's successors.
4142 ---------------------------------------------------------------------------*/
4143
4144 /* Private data used to verify the solution for this problem. */
4145 struct df_md_problem_data
4146 {
4147 /* An obstack for the bitmaps we need for this problem. */
4148 bitmap_obstack md_bitmaps;
4149 };
4150
4151 /* Scratch var used by transfer functions. This is used to do md analysis
4152 only for live registers. */
4153 static bitmap_head df_md_scratch;
4154
4155
4156 static void
4157 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
4158 void *vbb_info)
4159 {
4160 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
4161 if (bb_info)
4162 {
4163 bitmap_clear (&bb_info->kill);
4164 bitmap_clear (&bb_info->gen);
4165 bitmap_clear (&bb_info->init);
4166 bitmap_clear (&bb_info->in);
4167 bitmap_clear (&bb_info->out);
4168 }
4169 }
4170
4171
4172 /* Allocate or reset bitmaps for DF_MD. The solution bits are
4173 not touched unless the block is new. */
4174
4175 static void
4176 df_md_alloc (bitmap all_blocks)
4177 {
4178 unsigned int bb_index;
4179 bitmap_iterator bi;
4180 struct df_md_problem_data *problem_data;
4181
4182 df_grow_bb_info (df_md);
4183 if (df_md->problem_data)
4184 problem_data = (struct df_md_problem_data *) df_md->problem_data;
4185 else
4186 {
4187 problem_data = XNEW (struct df_md_problem_data);
4188 df_md->problem_data = problem_data;
4189 bitmap_obstack_initialize (&problem_data->md_bitmaps);
4190 }
4191 bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
4192
4193 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4194 {
4195 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4196 /* When bitmaps are already initialized, just clear them. */
4197 if (bb_info->init.obstack)
4198 {
4199 bitmap_clear (&bb_info->init);
4200 bitmap_clear (&bb_info->gen);
4201 bitmap_clear (&bb_info->kill);
4202 bitmap_clear (&bb_info->in);
4203 bitmap_clear (&bb_info->out);
4204 }
4205 else
4206 {
4207 bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
4208 bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
4209 bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
4210 bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
4211 bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
4212 }
4213 }
4214
4215 df_md->optional_p = true;
4216 }
4217
4218 /* Add the effect of the top artificial defs of BB to the multiple definitions
4219 bitmap LOCAL_MD. */
4220
4221 void
4222 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4223 {
4224 int bb_index = bb->index;
4225 df_ref *def_rec;
4226 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
4227 {
4228 df_ref def = *def_rec;
4229 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4230 {
4231 unsigned int dregno = DF_REF_REGNO (def);
4232 if (DF_REF_FLAGS (def)
4233 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4234 bitmap_set_bit (local_md, dregno);
4235 else
4236 bitmap_clear_bit (local_md, dregno);
4237 }
4238 }
4239 }
4240
4241
4242 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4243 LOCAL_MD. */
4244
4245 void
4246 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
4247 bitmap local_md)
4248 {
4249 unsigned uid = INSN_UID (insn);
4250 df_ref *def_rec;
4251
4252 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4253 {
4254 df_ref def = *def_rec;
4255 unsigned int dregno = DF_REF_REGNO (def);
4256 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4257 || (dregno >= FIRST_PSEUDO_REGISTER))
4258 {
4259 if (DF_REF_FLAGS (def)
4260 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4261 bitmap_set_bit (local_md, DF_REF_ID (def));
4262 else
4263 bitmap_clear_bit (local_md, DF_REF_ID (def));
4264 }
4265 }
4266 }
4267
4268 static void
4269 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4270 df_ref *def_rec,
4271 int top_flag)
4272 {
4273 df_ref def;
4274 bitmap_clear (&seen_in_insn);
4275
4276 while ((def = *def_rec++) != NULL)
4277 {
4278 unsigned int dregno = DF_REF_REGNO (def);
4279 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4280 || (dregno >= FIRST_PSEUDO_REGISTER))
4281 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4282 {
4283 if (!bitmap_bit_p (&seen_in_insn, dregno))
4284 {
4285 if (DF_REF_FLAGS (def)
4286 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4287 {
4288 bitmap_set_bit (&bb_info->gen, dregno);
4289 bitmap_clear_bit (&bb_info->kill, dregno);
4290 }
4291 else
4292 {
4293 /* When we find a clobber and a regular def,
4294 make sure the regular def wins. */
4295 bitmap_set_bit (&seen_in_insn, dregno);
4296 bitmap_set_bit (&bb_info->kill, dregno);
4297 bitmap_clear_bit (&bb_info->gen, dregno);
4298 }
4299 }
4300 }
4301 }
4302 }
4303
4304
4305 /* Compute local multiple def info for basic block BB. */
4306
4307 static void
4308 df_md_bb_local_compute (unsigned int bb_index)
4309 {
4310 basic_block bb = BASIC_BLOCK (bb_index);
4311 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4312 rtx insn;
4313
4314 /* Artificials are only hard regs. */
4315 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4316 df_md_bb_local_compute_process_def (bb_info,
4317 df_get_artificial_defs (bb_index),
4318 DF_REF_AT_TOP);
4319
4320 FOR_BB_INSNS (bb, insn)
4321 {
4322 unsigned int uid = INSN_UID (insn);
4323 if (!INSN_P (insn))
4324 continue;
4325
4326 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4327 }
4328
4329 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4330 df_md_bb_local_compute_process_def (bb_info,
4331 df_get_artificial_defs (bb_index),
4332 0);
4333 }
4334
4335 /* Compute local reaching def info for each basic block within BLOCKS. */
4336
4337 static void
4338 df_md_local_compute (bitmap all_blocks)
4339 {
4340 unsigned int bb_index, df_bb_index;
4341 bitmap_iterator bi1, bi2;
4342 basic_block bb;
4343 bitmap_head *frontiers;
4344
4345 bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
4346
4347 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4348 {
4349 df_md_bb_local_compute (bb_index);
4350 }
4351
4352 bitmap_clear (&seen_in_insn);
4353
4354 frontiers = XNEWVEC (bitmap_head, last_basic_block);
4355 FOR_ALL_BB (bb)
4356 bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
4357
4358 compute_dominance_frontiers (frontiers);
4359
4360 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4361 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4362 {
4363 bitmap kill = &df_md_get_bb_info (bb_index)->kill;
4364 EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
4365 {
4366 basic_block bb = BASIC_BLOCK (df_bb_index);
4367 if (bitmap_bit_p (all_blocks, df_bb_index))
4368 bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
4369 df_get_live_in (bb));
4370 }
4371 }
4372
4373 FOR_ALL_BB (bb)
4374 bitmap_clear (&frontiers[bb->index]);
4375 free (frontiers);
4376 }
4377
4378
4379 /* Reset the global solution for recalculation. */
4380
4381 static void
4382 df_md_reset (bitmap all_blocks)
4383 {
4384 unsigned int bb_index;
4385 bitmap_iterator bi;
4386
4387 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4388 {
4389 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4390 gcc_assert (bb_info);
4391 bitmap_clear (&bb_info->in);
4392 bitmap_clear (&bb_info->out);
4393 }
4394 }
4395
4396 static bool
4397 df_md_transfer_function (int bb_index)
4398 {
4399 basic_block bb = BASIC_BLOCK (bb_index);
4400 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4401 bitmap in = &bb_info->in;
4402 bitmap out = &bb_info->out;
4403 bitmap gen = &bb_info->gen;
4404 bitmap kill = &bb_info->kill;
4405
4406 /* We need to use a scratch set here so that the value returned from this
4407 function invocation properly reflects whether the sets changed in a
4408 significant way; i.e. not just because the live set was anded in. */
4409 bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
4410
4411 /* Multiple definitions of a register are not relevant if it is not
4412 live. Thus we trim the result to the places where it is live. */
4413 bitmap_and_into (in, df_get_live_in (bb));
4414
4415 return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
4416 }
4417
4418 /* Initialize the solution bit vectors for problem. */
4419
4420 static void
4421 df_md_init (bitmap all_blocks)
4422 {
4423 unsigned int bb_index;
4424 bitmap_iterator bi;
4425
4426 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4427 {
4428 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4429
4430 bitmap_copy (&bb_info->in, &bb_info->init);
4431 df_md_transfer_function (bb_index);
4432 }
4433 }
4434
4435 static void
4436 df_md_confluence_0 (basic_block bb)
4437 {
4438 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4439 bitmap_copy (&bb_info->in, &bb_info->init);
4440 }
4441
4442 /* In of target gets or of out of source. */
4443
4444 static bool
4445 df_md_confluence_n (edge e)
4446 {
4447 bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
4448 bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
4449
4450 if (e->flags & EDGE_FAKE)
4451 return false;
4452
4453 if (e->flags & EDGE_EH)
4454 return bitmap_ior_and_compl_into (op1, op2,
4455 regs_invalidated_by_call_regset);
4456 else
4457 return bitmap_ior_into (op1, op2);
4458 }
4459
4460 /* Free all storage associated with the problem. */
4461
4462 static void
4463 df_md_free (void)
4464 {
4465 struct df_md_problem_data *problem_data
4466 = (struct df_md_problem_data *) df_md->problem_data;
4467
4468 bitmap_obstack_release (&problem_data->md_bitmaps);
4469 free (problem_data);
4470 df_md->problem_data = NULL;
4471
4472 df_md->block_info_size = 0;
4473 free (df_md->block_info);
4474 df_md->block_info = NULL;
4475 free (df_md);
4476 }
4477
4478
4479 /* Debugging info at top of bb. */
4480
4481 static void
4482 df_md_top_dump (basic_block bb, FILE *file)
4483 {
4484 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4485 if (!bb_info)
4486 return;
4487
4488 fprintf (file, ";; md in \t");
4489 df_print_regset (file, &bb_info->in);
4490 fprintf (file, ";; md init \t");
4491 df_print_regset (file, &bb_info->init);
4492 fprintf (file, ";; md gen \t");
4493 df_print_regset (file, &bb_info->gen);
4494 fprintf (file, ";; md kill \t");
4495 df_print_regset (file, &bb_info->kill);
4496 }
4497
4498 /* Debugging info at bottom of bb. */
4499
4500 static void
4501 df_md_bottom_dump (basic_block bb, FILE *file)
4502 {
4503 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4504 if (!bb_info)
4505 return;
4506
4507 fprintf (file, ";; md out \t");
4508 df_print_regset (file, &bb_info->out);
4509 }
4510
4511 static struct df_problem problem_MD =
4512 {
4513 DF_MD, /* Problem id. */
4514 DF_FORWARD, /* Direction. */
4515 df_md_alloc, /* Allocate the problem specific data. */
4516 df_md_reset, /* Reset global information. */
4517 df_md_free_bb_info, /* Free basic block info. */
4518 df_md_local_compute, /* Local compute function. */
4519 df_md_init, /* Init the solution specific data. */
4520 df_worklist_dataflow, /* Worklist solver. */
4521 df_md_confluence_0, /* Confluence operator 0. */
4522 df_md_confluence_n, /* Confluence operator n. */
4523 df_md_transfer_function, /* Transfer function. */
4524 NULL, /* Finalize function. */
4525 df_md_free, /* Free all of the problem information. */
4526 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4527 NULL, /* Debugging. */
4528 df_md_top_dump, /* Debugging start block. */
4529 df_md_bottom_dump, /* Debugging end block. */
4530 NULL, /* Incremental solution verify start. */
4531 NULL, /* Incremental solution verify end. */
4532 NULL, /* Dependent problem. */
4533 sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
4534 TV_DF_MD, /* Timing variable. */
4535 false /* Reset blocks on dropping out of blocks_to_analyze. */
4536 };
4537
4538 /* Create a new MD instance and add it to the existing instance
4539 of DF. */
4540
4541 void
4542 df_md_add_problem (void)
4543 {
4544 df_add_problem (&problem_MD);
4545 }
4546
4547
4548