]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/df-problems.c
df.h (DF_SCAN, [...]): New macros.
[thirdparty/gcc.git] / gcc / df-problems.c
CommitLineData
4d779342
DB
1/* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
13Software Foundation; either version 2, or (at your option) any later
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
22along with GCC; see the file COPYING. If not, write to the Free
23Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2402110-1301, USA. */
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "rtl.h"
31#include "tm_p.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "function.h"
35#include "regs.h"
36#include "output.h"
37#include "alloc-pool.h"
38#include "flags.h"
39#include "hard-reg-set.h"
40#include "basic-block.h"
41#include "sbitmap.h"
42#include "bitmap.h"
43#include "timevar.h"
44#include "df.h"
45
46#define DF_SPARSE_THRESHOLD 32
47
48static bitmap seen_in_block = NULL;
49static bitmap seen_in_insn = NULL;
50
51\f
52/*----------------------------------------------------------------------------
53 Public functions access functions for the dataflow problems.
54----------------------------------------------------------------------------*/
55
56/* Get the instance of the problem that DFLOW is dependent on. */
57
58struct dataflow *
59df_get_dependent_problem (struct dataflow *dflow)
60{
61 struct df *df = dflow->df;
62 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
63
64 gcc_assert (dependent_problem);
65 return df->problems_by_index[dependent_problem->id];
66}
67
68
69/* Create a du or ud chain from SRC to DST and link it into SRC. */
70
71struct df_link *
72df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
73{
74 struct df_link *head = DF_REF_CHAIN (src);
75 struct df_link *link = pool_alloc (dflow->block_pool);;
76
77 DF_REF_CHAIN (src) = link;
78 link->next = head;
79 link->ref = dst;
80 return link;
81}
82
83
84/* Delete a du or ud chain for REF. If LINK is NULL, delete all
85 chains for ref and check to see if the reverse chains can also be
86 deleted. If LINK is not NULL it must be a link off of ref. In
87 this case, the other end is not deleted. */
88
89void
90df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
91{
92 struct df_link *chain = DF_REF_CHAIN (ref);
93 if (link)
94 {
95 /* Link was the first element in the chain. */
96 if (chain == link)
97 DF_REF_CHAIN (ref) = link->next;
98 else
99 {
100 /* Link is an internal element in the chain. */
101 struct df_link *prev = chain;
102 while (chain)
103 {
104 if (chain == link)
105 {
106 prev->next = chain->next;
107 break;
108 }
109 prev = chain;
110 chain = chain->next;
111 }
112 }
113 pool_free (dflow->block_pool, link);
114 }
115 else
116 {
117 /* If chain is NULL here, it was because of a recursive call
118 when the other flavor of chains was not built. Just run thru
119 the entire chain calling the other side and then deleting the
120 link. */
121 while (chain)
122 {
123 struct df_link *next = chain->next;
124 /* Delete the other side if it exists. */
125 df_chain_unlink (dflow, chain->ref, chain);
126 chain = next;
127 }
128 }
129}
130
131
132/* Copy the du or ud chain starting at FROM_REF and attach it to
133 TO_REF. */
134
135void
136df_chain_copy (struct dataflow *dflow,
137 struct df_ref *to_ref,
138 struct df_link *from_ref)
139{
140 while (from_ref)
141 {
142 df_chain_create (dflow, to_ref, from_ref->ref);
143 from_ref = from_ref->next;
144 }
145}
146
147
148/* Get the live in set for BB no matter what problem happens to be
149 defined. */
150
151bitmap
152df_get_live_in (struct df *df, basic_block bb)
153{
154 gcc_assert (df->problems_by_index[DF_LR]);
155
156 if (df->problems_by_index[DF_UREC])
157 return DF_RA_LIVE_IN (df, bb);
158 else if (df->problems_by_index[DF_UR])
159 return DF_LIVE_IN (df, bb);
160 else
161 return DF_UPWARD_LIVE_IN (df, bb);
162}
163
164
165/* Get the live out set for BB no matter what problem happens to be
166 defined. */
167
168bitmap
169df_get_live_out (struct df *df, basic_block bb)
170{
171 gcc_assert (df->problems_by_index[DF_LR]);
172
173 if (df->problems_by_index[DF_UREC])
174 return DF_RA_LIVE_OUT (df, bb);
175 else if (df->problems_by_index[DF_UR])
176 return DF_LIVE_OUT (df, bb);
177 else
178 return DF_UPWARD_LIVE_OUT (df, bb);
179}
180
181
182/*----------------------------------------------------------------------------
183 Utility functions.
184----------------------------------------------------------------------------*/
185
186/* Generic versions to get the void* version of the block info. Only
187 used inside the problem instace vectors. */
188
189/* Grow the bb_info array. */
190
191void
192df_grow_bb_info (struct dataflow *dflow)
193{
194 unsigned int new_size = last_basic_block + 1;
195 if (dflow->block_info_size < new_size)
196 {
197 new_size += new_size / 4;
198 dflow->block_info = xrealloc (dflow->block_info,
199 new_size *sizeof (void*));
200 memset (dflow->block_info + dflow->block_info_size, 0,
201 (new_size - dflow->block_info_size) *sizeof (void *));
202 dflow->block_info_size = new_size;
203 }
204}
205
206/* Dump a def-use or use-def chain for REF to FILE. */
207
208void
209df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
210{
211 fprintf (file, "{ ");
212 for (; link; link = link->next)
213 {
214 fprintf (file, "%c%d(bb %d insn %d) ",
215 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
216 DF_REF_ID (link->ref),
217 DF_REF_BBNO (link->ref),
218 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
219 }
220 fprintf (file, "}");
221}
222
223
224/* Print some basic block info as part of df_dump. */
225
226void
227df_print_bb_index (basic_block bb, FILE *file)
228{
229 edge e;
230 edge_iterator ei;
231
232 fprintf (file, "( ");
233 FOR_EACH_EDGE (e, ei, bb->preds)
234 {
235 basic_block pred = e->src;
236 fprintf (file, "%d ", pred->index);
237 }
238 fprintf (file, ")->[%d]->( ", bb->index);
239 FOR_EACH_EDGE (e, ei, bb->succs)
240 {
241 basic_block succ = e->dest;
242 fprintf (file, "%d ", succ->index);
243 }
244 fprintf (file, ")\n");
245}
246
247
248/* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
249
250static inline bitmap
251df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
252{
253 bitmap ids = maps[regno];
254 if (!ids)
255 {
256 unsigned int i;
257 unsigned int end = start + count;;
258 ids = BITMAP_ALLOC (NULL);
259 maps[regno] = ids;
260 for (i = start; i < end; i++)
261 bitmap_set_bit (ids, i);
262 }
263 return ids;
264}
265
266
267/* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
268 up correctly. */
269
270static void
271df_set_seen (void)
272{
273 seen_in_block = BITMAP_ALLOC (NULL);
274 seen_in_insn = BITMAP_ALLOC (NULL);
275}
276
277
278static void
279df_unset_seen (void)
280{
281 BITMAP_FREE (seen_in_block);
282 BITMAP_FREE (seen_in_insn);
283}
284
285
286\f
287/*----------------------------------------------------------------------------
288 REACHING USES
289
290 Find the locations in the function where each use site for a pseudo
291 can reach backwards.
292
293----------------------------------------------------------------------------*/
294
295struct df_ru_problem_data
296{
297 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
298 unsigned int use_sites_size; /* Size of use_sites. */
299 /* The set of defs to regs invalidated by call. */
300 bitmap sparse_invalidated_by_call;
301 /* The set of defs to regs invalidate by call for ru. */
302 bitmap dense_invalidated_by_call;
303};
304
305/* Get basic block info. */
306
307struct df_ru_bb_info *
308df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
309{
310 return (struct df_ru_bb_info *) dflow->block_info[index];
311}
312
313
314/* Set basic block info. */
315
316static void
317df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
318 struct df_ru_bb_info *bb_info)
319{
320 dflow->block_info[index] = bb_info;
321}
322
323
324/* Free basic block info. */
325
326static void
327df_ru_free_bb_info (struct dataflow *dflow, void *vbb_info)
328{
329 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
330 if (bb_info)
331 {
332 BITMAP_FREE (bb_info->kill);
333 BITMAP_FREE (bb_info->sparse_kill);
334 BITMAP_FREE (bb_info->gen);
335 BITMAP_FREE (bb_info->in);
336 BITMAP_FREE (bb_info->out);
337 pool_free (dflow->block_pool, bb_info);
338 }
339}
340
341
342/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
343 not touched unless the block is new. */
344
345static void
346df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
347{
348 unsigned int bb_index;
349 bitmap_iterator bi;
350 unsigned int reg_size = max_reg_num ();
351
352 if (! dflow->block_pool)
353 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
354 sizeof (struct df_ru_bb_info), 50);
355
356 if (dflow->problem_data)
357 {
358 unsigned int i;
359 struct df_ru_problem_data *problem_data =
360 (struct df_ru_problem_data *) dflow->problem_data;
361
362 for (i = 0; i < problem_data->use_sites_size; i++)
363 {
364 bitmap bm = problem_data->use_sites[i];
365 if (bm)
366 {
367 BITMAP_FREE (bm);
368 problem_data->use_sites[i] = NULL;
369 }
370 }
371
372 if (problem_data->use_sites_size > reg_size)
373 {
374 problem_data->use_sites
375 = xrealloc (problem_data->use_sites, reg_size *sizeof (bitmap));
376 memset (problem_data->use_sites, 0,
377 (reg_size - problem_data->use_sites_size) *sizeof (bitmap));
378 problem_data->use_sites_size = reg_size;
379 }
380
381 bitmap_clear (problem_data->sparse_invalidated_by_call);
382 bitmap_clear (problem_data->dense_invalidated_by_call);
383 }
384 else
385 {
386 struct df_ru_problem_data *problem_data =
387 xmalloc (sizeof (struct df_ru_problem_data));
388 dflow->problem_data = problem_data;
389
390 problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
391 problem_data->use_sites_size = reg_size;
392 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
393 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
394 }
395
396 df_grow_bb_info (dflow);
397
398 /* Because of the clustering of all def sites for the same pseudo,
399 we have to process all of the blocks before doing the
400 analysis. */
401
402 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
403 {
404 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
405 if (bb_info)
406 {
407 bitmap_clear (bb_info->kill);
408 bitmap_clear (bb_info->sparse_kill);
409 bitmap_clear (bb_info->gen);
410 }
411 else
412 {
413 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
414 df_ru_set_bb_info (dflow, bb_index, bb_info);
415 bb_info->kill = BITMAP_ALLOC (NULL);
416 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
417 bb_info->gen = BITMAP_ALLOC (NULL);
418 bb_info->in = BITMAP_ALLOC (NULL);
419 bb_info->out = BITMAP_ALLOC (NULL);
420 }
421 }
422}
423
424
425/* Process a list of DEFs for df_ru_bb_local_compute. */
426
427static void
428df_ru_bb_local_compute_process_def (struct dataflow *dflow,
429 struct df_ru_bb_info *bb_info,
430 struct df_ref *def)
431{
432 struct df *df = dflow->df;
433 while (def)
434 {
435 unsigned int regno = DF_REF_REGNO (def);
436 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
437 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
438 if (!bitmap_bit_p (seen_in_block, regno))
439 {
440 /* The first def for regno, causes the kill info to be
441 generated and the gen information to cleared. */
442 if (!bitmap_bit_p (seen_in_insn, regno))
443 {
444 if (n_uses > DF_SPARSE_THRESHOLD)
445 {
446 bitmap_set_bit (bb_info->sparse_kill, regno);
447 bitmap_clear_range (bb_info->gen, begin, n_uses);
448 }
449 else
450 {
451 struct df_ru_problem_data *problem_data =
452 (struct df_ru_problem_data *) dflow->problem_data;
453 bitmap uses =
454 df_ref_bitmap (problem_data->use_sites, regno,
455 begin, n_uses);
456 bitmap_ior_into (bb_info->kill, uses);
457 bitmap_and_compl_into (bb_info->gen, uses);
458 }
459 }
460 bitmap_set_bit (seen_in_insn, regno);
461 }
462 def = def->next_ref;
463 }
464}
465
466
467/* Process a list of USEs for df_ru_bb_local_compute. */
468
469static void
470df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
471 struct df_ref *use,
472 enum df_ref_flags top_flag)
473{
474 while (use)
475 {
476 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
477 {
478 /* Add use to set of gens in this BB unless we have seen a
479 def in a previous instruction. */
480 unsigned int regno = DF_REF_REGNO (use);
481 if (!bitmap_bit_p (seen_in_block, regno))
482 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
483 }
484 use = use->next_ref;
485 }
486}
487
488/* Compute local reaching use (upward exposed use) info for basic
489 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
490static void
491df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
492{
493 struct df *df = dflow->df;
494 basic_block bb = BASIC_BLOCK (bb_index);
495 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
496 rtx insn;
497
498 /* Set when a def for regno is seen. */
499 bitmap_clear (seen_in_block);
500 bitmap_clear (seen_in_insn);
501
502#ifdef EH_USES
503 /* Variables defined in the prolog that are used by the exception
504 handler. */
505 df_ru_bb_local_compute_process_use (bb_info,
506 df_get_artificial_uses (df, bb_index),
507 DF_REF_AT_TOP);
508#endif
509
510 /* Process the artificial defs first since these are at the top of
511 the block. */
512 df_ru_bb_local_compute_process_def (dflow, bb_info,
513 df_get_artificial_defs (df, bb_index));
514
515 FOR_BB_INSNS (bb, insn)
516 {
517 unsigned int uid = INSN_UID (insn);
518 if (! INSN_P (insn))
519 continue;
520
521 df_ru_bb_local_compute_process_def (dflow, bb_info,
522 DF_INSN_UID_GET (df, uid)->defs);
523
524 /* The use processing must happen after the defs processing even
525 though the uses logically happen first since the defs clear
526 the gen set. Otherwise, a use for regno occuring in the same
527 instruction as a def for regno would be cleared. */
528 df_ru_bb_local_compute_process_use (bb_info,
529 DF_INSN_UID_GET (df, uid)->uses, 0);
530
531 bitmap_ior_into (seen_in_block, seen_in_insn);
532 bitmap_clear (seen_in_insn);
533 }
534
535 /* Process the hardware registers that are always live. */
536 df_ru_bb_local_compute_process_use (bb_info,
537 df_get_artificial_uses (df, bb_index), 0);
538}
539
540
541/* Compute local reaching use (upward exposed use) info for each basic
542 block within BLOCKS. */
543static void
544df_ru_local_compute (struct dataflow *dflow,
545 bitmap all_blocks,
546 bitmap rescan_blocks ATTRIBUTE_UNUSED)
547{
548 struct df *df = dflow->df;
549 unsigned int bb_index;
550 bitmap_iterator bi;
551 unsigned int regno;
552 struct df_ru_problem_data *problem_data =
553 (struct df_ru_problem_data *) dflow->problem_data;
554 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
555 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
556
557 df_set_seen ();
558
559 if (!df->use_info.refs_organized)
560 df_reorganize_refs (&df->use_info);
561
562 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
563 {
564 df_ru_bb_local_compute (dflow, bb_index);
565 }
566
567 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
568 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
569 {
570 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
571 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
572 bitmap_set_bit (sparse_invalidated, regno);
573 else
574 {
575 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
576 reg_info->begin, reg_info->n_refs);
577 bitmap_ior_into (dense_invalidated, defs);
578 }
579 }
580
581 df_unset_seen ();
582}
583
584
585/* Initialize the solution bit vectors for problem. */
586
587static void
588df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
589{
590 unsigned int bb_index;
591 bitmap_iterator bi;
592
593 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
594 {
595 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
596 bitmap_copy (bb_info->in, bb_info->gen);
597 bitmap_clear (bb_info->out);
598 }
599}
600
601
602/* Out of target gets or of in of source. */
603
604static void
605df_ru_confluence_n (struct dataflow *dflow, edge e)
606{
607 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
608 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
609
610 if (e->flags & EDGE_EH)
611 {
612 struct df_ru_problem_data *problem_data =
613 (struct df_ru_problem_data *) dflow->problem_data;
614 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
615 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
616 struct df *df = dflow->df;
617 bitmap_iterator bi;
618 unsigned int regno;
619 bitmap_ior_and_compl_into (op1, op2, dense_invalidated);
620 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
621 {
622 bitmap_clear_range (op1,
623 DF_REG_USE_GET (df, regno)->begin,
624 DF_REG_USE_GET (df, regno)->n_refs);
625 }
626 }
627 else
628 bitmap_ior_into (op1, op2);
629}
630
631
632/* Transfer function. */
633
634static bool
635df_ru_transfer_function (struct dataflow *dflow, int bb_index)
636{
637 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
638 unsigned int regno;
639 bitmap_iterator bi;
640 bitmap in = bb_info->in;
641 bitmap out = bb_info->out;
642 bitmap gen = bb_info->gen;
643 bitmap kill = bb_info->kill;
644 bitmap sparse_kill = bb_info->sparse_kill;
645
646 if (bitmap_empty_p (sparse_kill))
647 return bitmap_ior_and_compl (in, gen, out, kill);
648 else
649 {
650 struct df *df = dflow->df;
651 bool changed = false;
652 bitmap tmp = BITMAP_ALLOC (NULL);
653 bitmap_copy (tmp, in);
654 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
655 {
656 bitmap_clear_range (tmp,
657 DF_REG_USE_GET (df, regno)->begin,
658 DF_REG_USE_GET (df, regno)->n_refs);
659 }
660 bitmap_and_compl_into (tmp, kill);
661 bitmap_ior_into (tmp, gen);
662 changed = !bitmap_equal_p (tmp, out);
663 if (changed)
664 {
665 BITMAP_FREE (out);
666 bb_info->in = tmp;
667 }
668 else
669 BITMAP_FREE (tmp);
670 return changed;
671 }
672}
673
674
675/* Free all storage associated with the problem. */
676
677static void
678df_ru_free (struct dataflow *dflow)
679{
680 unsigned int i;
681 struct df_ru_problem_data *problem_data =
682 (struct df_ru_problem_data *) dflow->problem_data;
683
684 for (i = 0; i < dflow->block_info_size; i++)
685 {
686 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
687 if (bb_info)
688 {
689 BITMAP_FREE (bb_info->kill);
690 BITMAP_FREE (bb_info->sparse_kill);
691 BITMAP_FREE (bb_info->gen);
692 BITMAP_FREE (bb_info->in);
693 BITMAP_FREE (bb_info->out);
694 }
695 }
696
697 free_alloc_pool (dflow->block_pool);
698
699 for (i = 0; i < problem_data->use_sites_size; i++)
700 {
701 bitmap bm = problem_data->use_sites[i];
702 if (bm)
703 BITMAP_FREE (bm);
704 }
705
706 free (problem_data->use_sites);
707 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
708 BITMAP_FREE (problem_data->dense_invalidated_by_call);
709
710 dflow->block_info_size = 0;
711 free (dflow->block_info);
712 free (dflow->problem_data);
713 free (dflow);
714}
715
716
717/* Debugging info. */
718
719static void
720df_ru_dump (struct dataflow *dflow, FILE *file)
721{
722 basic_block bb;
723 struct df *df = dflow->df;
724 struct df_ru_problem_data *problem_data =
725 (struct df_ru_problem_data *) dflow->problem_data;
726 unsigned int m = max_reg_num ();
727 unsigned int regno;
728
729 fprintf (file, "Reaching uses:\n");
730
731 fprintf (file, " sparse invalidated \t");
732 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
733 fprintf (file, " dense invalidated \t");
734 dump_bitmap (file, problem_data->dense_invalidated_by_call);
735
736 for (regno = 0; regno < m; regno++)
737 if (DF_REG_USE_GET (df, regno)->n_refs)
738 fprintf (file, "%d[%d,%d] ", regno,
739 DF_REG_USE_GET (df, regno)->begin,
740 DF_REG_USE_GET (df, regno)->n_refs);
741 fprintf (file, "\n");
742
743 FOR_ALL_BB (bb)
744 {
745 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
746 df_print_bb_index (bb, file);
747
748 if (! bb_info->in)
749 continue;
750
751 fprintf (file, " in \t");
752 dump_bitmap (file, bb_info->in);
753 fprintf (file, " gen \t");
754 dump_bitmap (file, bb_info->gen);
755 fprintf (file, " kill\t");
756 dump_bitmap (file, bb_info->kill);
757 fprintf (file, " out \t");
758 dump_bitmap (file, bb_info->out);
759 }
760}
761
762/* All of the information associated with every instance of the problem. */
763
764static struct df_problem problem_RU =
765{
766 DF_RU, /* Problem id. */
767 DF_BACKWARD, /* Direction. */
768 df_ru_alloc, /* Allocate the problem specific data. */
769 df_ru_free_bb_info, /* Free basic block info. */
770 df_ru_local_compute, /* Local compute function. */
771 df_ru_init_solution, /* Init the solution specific data. */
772 df_iterative_dataflow, /* Iterative solver. */
773 NULL, /* Confluence operator 0. */
774 df_ru_confluence_n, /* Confluence operator n. */
775 df_ru_transfer_function, /* Transfer function. */
776 NULL, /* Finalize function. */
777 df_ru_free, /* Free all of the problem information. */
778 df_ru_dump, /* Debugging. */
779 NULL /* Dependent problem. */
780};
781
782
783
784/* Create a new DATAFLOW instance and add it to an existing instance
785 of DF. The returned structure is what is used to get at the
786 solution. */
787
788struct dataflow *
789df_ru_add_problem (struct df *df)
790{
791 return df_add_problem (df, &problem_RU);
792}
793
794\f
795/*----------------------------------------------------------------------------
796 REACHING DEFINITIONS
797
798 Find the locations in the function where each definition site for a
799 pseudo reaches.
800----------------------------------------------------------------------------*/
801
802struct df_rd_problem_data
803{
804 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
805 unsigned int def_sites_size; /* Size of def_sites. */
806 /* The set of defs to regs invalidated by call. */
807 bitmap sparse_invalidated_by_call;
808 /* The set of defs to regs invalidate by call for ru. */
809 bitmap dense_invalidated_by_call;
810};
811
812/* Get basic block info. */
813
814struct df_rd_bb_info *
815df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
816{
817 return (struct df_rd_bb_info *) dflow->block_info[index];
818}
819
820
821/* Set basic block info. */
822
823static void
824df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
825 struct df_rd_bb_info *bb_info)
826{
827 dflow->block_info[index] = bb_info;
828}
829
830
831/* Free basic block info. */
832
833static void
834df_rd_free_bb_info (struct dataflow *dflow, void *vbb_info)
835{
836 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
837 if (bb_info)
838 {
839 BITMAP_FREE (bb_info->kill);
840 BITMAP_FREE (bb_info->sparse_kill);
841 BITMAP_FREE (bb_info->gen);
842 BITMAP_FREE (bb_info->in);
843 BITMAP_FREE (bb_info->out);
844 pool_free (dflow->block_pool, bb_info);
845 }
846}
847
848
849/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
850 not touched unless the block is new. */
851
852static void
853df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
854{
855 unsigned int bb_index;
856 bitmap_iterator bi;
857 unsigned int reg_size = max_reg_num ();
858
859 if (! dflow->block_pool)
860 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
861 sizeof (struct df_rd_bb_info), 50);
862
863 if (dflow->problem_data)
864 {
865 unsigned int i;
866 struct df_rd_problem_data *problem_data =
867 (struct df_rd_problem_data *) dflow->problem_data;
868
869 for (i = 0; i < problem_data->def_sites_size; i++)
870 {
871 bitmap bm = problem_data->def_sites[i];
872 if (bm)
873 {
874 BITMAP_FREE (bm);
875 problem_data->def_sites[i] = NULL;
876 }
877 }
878
879 if (problem_data->def_sites_size > reg_size)
880 {
881 problem_data->def_sites
882 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
883 memset (problem_data->def_sites, 0,
884 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
885 problem_data->def_sites_size = reg_size;
886 }
887
888 bitmap_clear (problem_data->sparse_invalidated_by_call);
889 bitmap_clear (problem_data->dense_invalidated_by_call);
890 }
891 else
892 {
893 struct df_rd_problem_data *problem_data =
894 xmalloc (sizeof (struct df_rd_problem_data));
895 dflow->problem_data = problem_data;
896
897 problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
898 problem_data->def_sites_size = reg_size;
899 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
900 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
901 }
902
903 df_grow_bb_info (dflow);
904
905 /* Because of the clustering of all def sites for the same pseudo,
906 we have to process all of the blocks before doing the
907 analysis. */
908
909 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
910 {
911 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
912 if (bb_info)
913 {
914 bitmap_clear (bb_info->kill);
915 bitmap_clear (bb_info->sparse_kill);
916 bitmap_clear (bb_info->gen);
917 }
918 else
919 {
920 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
921 df_rd_set_bb_info (dflow, bb_index, bb_info);
922 bb_info->kill = BITMAP_ALLOC (NULL);
923 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
924 bb_info->gen = BITMAP_ALLOC (NULL);
925 bb_info->in = BITMAP_ALLOC (NULL);
926 bb_info->out = BITMAP_ALLOC (NULL);
927 }
928 }
929}
930
931
932/* Process a list of DEFs for df_rd_bb_local_compute. */
933
934static void
935df_rd_bb_local_compute_process_def (struct dataflow *dflow,
936 struct df_rd_bb_info *bb_info,
937 struct df_ref *def)
938{
939 struct df *df = dflow->df;
940 while (def)
941 {
942 unsigned int regno = DF_REF_REGNO (def);
943 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
944 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
945
946 /* Only the last def(s) for a regno in the block has any
947 effect. */
948 if (!bitmap_bit_p (seen_in_block, regno))
949 {
950 /* The first def for regno in insn gets to knock out the
951 defs from other instructions. */
952 if (!bitmap_bit_p (seen_in_insn, regno))
953 {
954 if (n_defs > DF_SPARSE_THRESHOLD)
955 {
956 bitmap_set_bit (bb_info->sparse_kill, regno);
957 bitmap_clear_range (bb_info->gen, begin, n_defs);
958 }
959 else
960 {
961 struct df_rd_problem_data *problem_data =
962 (struct df_rd_problem_data *) dflow->problem_data;
963 bitmap defs =
964 df_ref_bitmap (problem_data->def_sites, regno,
965 begin, n_defs);
966 bitmap_ior_into (bb_info->kill, defs);
967 bitmap_and_compl_into (bb_info->gen, defs);
968 }
969 }
970
971 bitmap_set_bit (seen_in_insn, regno);
972 /* All defs for regno in the instruction may be put into
973 the gen set. */
974 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
975 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
976 }
977 def = def->next_ref;
978 }
979}
980
981/* Compute local reaching def info for basic block BB. */
982
983static void
984df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
985{
986 struct df *df = dflow->df;
987 basic_block bb = BASIC_BLOCK (bb_index);
988 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
989 rtx insn;
990
991 bitmap_clear (seen_in_block);
992 bitmap_clear (seen_in_insn);
993
994 FOR_BB_INSNS_REVERSE (bb, insn)
995 {
996 unsigned int uid = INSN_UID (insn);
997
998 if (! INSN_P (insn))
999 continue;
1000
1001 df_rd_bb_local_compute_process_def (dflow, bb_info,
1002 DF_INSN_UID_GET (df, uid)->defs);
1003
1004 /* This complex dance with the two bitmaps is required because
1005 instructions can assign twice to the same pseudo. This
1006 generally happens with calls that will have one def for the
1007 result and another def for the clobber. If only one vector
1008 is used and the clobber goes first, the result will be
1009 lost. */
1010 bitmap_ior_into (seen_in_block, seen_in_insn);
1011 bitmap_clear (seen_in_insn);
1012 }
1013
1014 /* Process the artificial defs last since we are going backwards
1015 thur the block and these are logically at the start. */
1016 df_rd_bb_local_compute_process_def (dflow, bb_info,
1017 df_get_artificial_defs (df, bb_index));
1018}
1019
1020
1021/* Compute local reaching def info for each basic block within BLOCKS. */
1022
1023static void
1024df_rd_local_compute (struct dataflow *dflow,
1025 bitmap all_blocks,
1026 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1027{
1028 struct df *df = dflow->df;
1029 unsigned int bb_index;
1030 bitmap_iterator bi;
1031 unsigned int regno;
1032 struct df_rd_problem_data *problem_data =
1033 (struct df_rd_problem_data *) dflow->problem_data;
1034 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1035 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1036
1037 df_set_seen ();
1038
1039 if (!df->def_info.refs_organized)
1040 df_reorganize_refs (&df->def_info);
1041
1042 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1043 {
1044 df_rd_bb_local_compute (dflow, bb_index);
1045 }
1046
1047 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1048 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1049 {
1050 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1051 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1052 {
1053 bitmap_set_bit (sparse_invalidated, regno);
1054 }
1055 else
1056 {
1057 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1058 reg_info->begin, reg_info->n_refs);
1059 bitmap_ior_into (dense_invalidated, defs);
1060 }
1061 }
1062 df_unset_seen ();
1063}
1064
1065
1066/* Initialize the solution bit vectors for problem. */
1067
1068static void
1069df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1070{
1071 unsigned int bb_index;
1072 bitmap_iterator bi;
1073
1074 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1075 {
1076 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1077
1078 bitmap_copy (bb_info->out, bb_info->gen);
1079 bitmap_clear (bb_info->in);
1080 }
1081}
1082
1083/* In of target gets or of out of source. */
1084
1085static void
1086df_rd_confluence_n (struct dataflow *dflow, edge e)
1087{
1088 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1089 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1090
1091 if (e->flags & EDGE_EH)
1092 {
1093 struct df_rd_problem_data *problem_data =
1094 (struct df_rd_problem_data *) dflow->problem_data;
1095 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1096 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1097 struct df *df = dflow->df;
1098 bitmap_iterator bi;
1099 unsigned int regno;
1100 bitmap_ior_and_compl_into (op1, op2, dense_invalidated);
1101 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1102 {
1103 bitmap_clear_range (op1,
1104 DF_REG_DEF_GET (df, regno)->begin,
1105 DF_REG_DEF_GET (df, regno)->n_refs);
1106 }
1107 }
1108 else
1109 bitmap_ior_into (op1, op2);
1110}
1111
1112
1113/* Transfer function. */
1114
1115static bool
1116df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1117{
1118 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1119 unsigned int regno;
1120 bitmap_iterator bi;
1121 bitmap in = bb_info->in;
1122 bitmap out = bb_info->out;
1123 bitmap gen = bb_info->gen;
1124 bitmap kill = bb_info->kill;
1125 bitmap sparse_kill = bb_info->sparse_kill;
1126
1127 if (bitmap_empty_p (sparse_kill))
1128 return bitmap_ior_and_compl (out, gen, in, kill);
1129 else
1130 {
1131 struct df *df = dflow->df;
1132 bool changed = false;
1133 bitmap tmp = BITMAP_ALLOC (NULL);
1134 bitmap_copy (tmp, in);
1135 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1136 {
1137 bitmap_clear_range (tmp,
1138 DF_REG_DEF_GET (df, regno)->begin,
1139 DF_REG_DEF_GET (df, regno)->n_refs);
1140 }
1141 bitmap_and_compl_into (tmp, kill);
1142 bitmap_ior_into (tmp, gen);
1143 changed = !bitmap_equal_p (tmp, out);
1144 if (changed)
1145 {
1146 BITMAP_FREE (out);
1147 bb_info->out = tmp;
1148 }
1149 else
1150 BITMAP_FREE (tmp);
1151 return changed;
1152 }
1153}
1154
1155
1156/* Free all storage associated with the problem. */
1157
1158static void
1159df_rd_free (struct dataflow *dflow)
1160{
1161 unsigned int i;
1162 struct df_rd_problem_data *problem_data =
1163 (struct df_rd_problem_data *) dflow->problem_data;
1164
1165 for (i = 0; i < dflow->block_info_size; i++)
1166 {
1167 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1168 if (bb_info)
1169 {
1170 BITMAP_FREE (bb_info->kill);
1171 BITMAP_FREE (bb_info->sparse_kill);
1172 BITMAP_FREE (bb_info->gen);
1173 BITMAP_FREE (bb_info->in);
1174 BITMAP_FREE (bb_info->out);
1175 }
1176 }
1177
1178 free_alloc_pool (dflow->block_pool);
1179
1180 for (i = 0; i < problem_data->def_sites_size; i++)
1181 {
1182 bitmap bm = problem_data->def_sites[i];
1183 if (bm)
1184 BITMAP_FREE (bm);
1185 }
1186
1187 free (problem_data->def_sites);
1188 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1189 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1190
1191 dflow->block_info_size = 0;
1192 free (dflow->block_info);
1193 free (dflow->problem_data);
1194 free (dflow);
1195}
1196
1197
1198/* Debugging info. */
1199
1200static void
1201df_rd_dump (struct dataflow *dflow, FILE *file)
1202{
1203 struct df *df = dflow->df;
1204 basic_block bb;
1205 struct df_rd_problem_data *problem_data =
1206 (struct df_rd_problem_data *) dflow->problem_data;
1207 unsigned int m = max_reg_num ();
1208 unsigned int regno;
1209
1210 fprintf (file, "Reaching defs:\n\n");
1211
1212 fprintf (file, " sparse invalidated \t");
1213 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1214 fprintf (file, " dense invalidated \t");
1215 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1216
1217 for (regno = 0; regno < m; regno++)
1218 if (DF_REG_DEF_GET (df, regno)->n_refs)
1219 fprintf (file, "%d[%d,%d] ", regno,
1220 DF_REG_DEF_GET (df, regno)->begin,
1221 DF_REG_DEF_GET (df, regno)->n_refs);
1222 fprintf (file, "\n");
1223
1224 FOR_ALL_BB (bb)
1225 {
1226 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1227 df_print_bb_index (bb, file);
1228
1229 if (! bb_info->in)
1230 continue;
1231
1232 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1233 dump_bitmap (file, bb_info->in);
1234 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1235 dump_bitmap (file, bb_info->gen);
1236 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1237 dump_bitmap (file, bb_info->kill);
1238 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1239 dump_bitmap (file, bb_info->out);
1240 }
1241}
1242
1243/* All of the information associated with every instance of the problem. */
1244
1245static struct df_problem problem_RD =
1246{
1247 DF_RD, /* Problem id. */
1248 DF_FORWARD, /* Direction. */
1249 df_rd_alloc, /* Allocate the problem specific data. */
1250 df_rd_free_bb_info, /* Free basic block info. */
1251 df_rd_local_compute, /* Local compute function. */
1252 df_rd_init_solution, /* Init the solution specific data. */
1253 df_iterative_dataflow, /* Iterative solver. */
1254 NULL, /* Confluence operator 0. */
1255 df_rd_confluence_n, /* Confluence operator n. */
1256 df_rd_transfer_function, /* Transfer function. */
1257 NULL, /* Finalize function. */
1258 df_rd_free, /* Free all of the problem information. */
1259 df_rd_dump, /* Debugging. */
1260 NULL /* Dependent problem. */
1261};
1262
1263
1264
1265/* Create a new DATAFLOW instance and add it to an existing instance
1266 of DF. The returned structure is what is used to get at the
1267 solution. */
1268
1269struct dataflow *
1270df_rd_add_problem (struct df *df)
1271{
1272 return df_add_problem (df, &problem_RD);
1273}
1274
1275
1276\f
1277/*----------------------------------------------------------------------------
1278 LIVE REGISTERS
1279
1280 Find the locations in the function where any use of a pseudo can reach
1281 in the backwards direction.
1282----------------------------------------------------------------------------*/
1283
1284/* Get basic block info. */
1285
1286struct df_lr_bb_info *
1287df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1288{
1289 return (struct df_lr_bb_info *) dflow->block_info[index];
1290}
1291
1292
1293/* Set basic block info. */
1294
1295static void
1296df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1297 struct df_lr_bb_info *bb_info)
1298{
1299 dflow->block_info[index] = bb_info;
1300}
1301
1302
1303/* Free basic block info. */
1304
1305static void
1306df_lr_free_bb_info (struct dataflow *dflow, void *vbb_info)
1307{
1308 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1309 if (bb_info)
1310 {
1311 BITMAP_FREE (bb_info->use);
1312 BITMAP_FREE (bb_info->def);
1313 BITMAP_FREE (bb_info->in);
1314 BITMAP_FREE (bb_info->out);
1315 pool_free (dflow->block_pool, bb_info);
1316 }
1317}
1318
1319
1320/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1321 not touched unless the block is new. */
1322
1323static void
1324df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1325{
1326 unsigned int bb_index;
1327 bitmap_iterator bi;
1328
1329 if (! dflow->block_pool)
1330 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1331 sizeof (struct df_lr_bb_info), 50);
1332
1333 df_grow_bb_info (dflow);
1334
1335 /* Because of the clustering of all def sites for the same pseudo,
1336 we have to process all of the blocks before doing the
1337 analysis. */
1338
1339 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1340 {
1341 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1342 if (bb_info)
1343 {
1344 bitmap_clear (bb_info->def);
1345 bitmap_clear (bb_info->use);
1346 }
1347 else
1348 {
1349 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1350 df_lr_set_bb_info (dflow, bb_index, bb_info);
1351 bb_info->use = BITMAP_ALLOC (NULL);
1352 bb_info->def = BITMAP_ALLOC (NULL);
1353 bb_info->in = BITMAP_ALLOC (NULL);
1354 bb_info->out = BITMAP_ALLOC (NULL);
1355 }
1356 }
1357}
1358
1359
1360/* Compute local live register info for basic block BB. */
1361
1362static void
1363df_lr_bb_local_compute (struct dataflow *dflow,
1364 struct df *df, unsigned int bb_index)
1365{
1366 basic_block bb = BASIC_BLOCK (bb_index);
1367 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1368 rtx insn;
1369 struct df_ref *def;
1370 struct df_ref *use;
1371
1372 /* Process the hardware registers that are always live. */
1373 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1374 /* Add use to set of uses in this BB. */
1375 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1376 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1377
1378 FOR_BB_INSNS_REVERSE (bb, insn)
1379 {
1380 unsigned int uid = INSN_UID (insn);
1381
1382 if (! INSN_P (insn))
1383 continue;
1384
1385 if (CALL_P (insn))
1386 {
1387 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1388 {
1389 unsigned int dregno = DF_REF_REGNO (def);
1390
1391 if (dregno >= FIRST_PSEUDO_REGISTER
1392 || !(SIBLING_CALL_P (insn)
1393 && bitmap_bit_p (df->exit_block_uses, dregno)
1394 && !refers_to_regno_p (dregno, dregno+1,
1395 current_function_return_rtx,
1396 (rtx *)0)))
1397 {
1398 /* Add def to set of defs in this BB. */
1399 bitmap_set_bit (bb_info->def, dregno);
1400 bitmap_clear_bit (bb_info->use, dregno);
1401 }
1402 }
1403 }
1404 else
1405 {
1406 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1407 {
1408 unsigned int dregno = DF_REF_REGNO (def);
1409
1410 if (DF_INSN_CONTAINS_ASM (df, insn)
1411 && dregno < FIRST_PSEUDO_REGISTER)
1412 {
1413 unsigned int i;
1414 unsigned int end =
1415 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1416 for (i = dregno; i <= end; ++i)
1417 regs_asm_clobbered[i] = 1;
1418 }
1419 /* Add def to set of defs in this BB. */
1420 bitmap_set_bit (bb_info->def, dregno);
1421 bitmap_clear_bit (bb_info->use, dregno);
1422 }
1423 }
1424
1425 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1426 /* Add use to set of uses in this BB. */
1427 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1428 }
1429
1430 /* Process the registers set in an exception handler. */
1431 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1432 {
1433 unsigned int dregno = DF_REF_REGNO (def);
1434 bitmap_set_bit (bb_info->def, dregno);
1435 bitmap_clear_bit (bb_info->use, dregno);
1436 }
1437
1438#ifdef EH_USES
1439 /* Process the uses that are live into an exception handler. */
1440 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1441 /* Add use to set of uses in this BB. */
1442 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1443 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1444#endif
1445}
1446
1447/* Compute local live register info for each basic block within BLOCKS. */
1448
1449static void
1450df_lr_local_compute (struct dataflow *dflow,
1451 bitmap all_blocks,
1452 bitmap rescan_blocks)
1453{
1454 struct df *df = dflow->df;
1455 unsigned int bb_index;
1456 bitmap_iterator bi;
1457
1458 /* Assume that the stack pointer is unchanging if alloca hasn't
1459 been used. */
1460 if (bitmap_equal_p (all_blocks, rescan_blocks))
1461 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1462
1463 bitmap_clear (df->hardware_regs_used);
1464
1465 /* The all-important stack pointer must always be live. */
1466 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1467
1468 /* Before reload, there are a few registers that must be forced
1469 live everywhere -- which might not already be the case for
1470 blocks within infinite loops. */
1471 if (! reload_completed)
1472 {
1473 /* Any reference to any pseudo before reload is a potential
1474 reference of the frame pointer. */
1475 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1476
1477#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1478 /* Pseudos with argument area equivalences may require
1479 reloading via the argument pointer. */
1480 if (fixed_regs[ARG_POINTER_REGNUM])
1481 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1482#endif
1483
1484 /* Any constant, or pseudo with constant equivalences, may
1485 require reloading from memory using the pic register. */
1486 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1487 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1488 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1489 }
1490
1491 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1492 {
1493 /* The exit block is special for this problem and its bits are
1494 computed from thin air. */
1495 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1496 bitmap_copy (bb_info->use, df->exit_block_uses);
1497 }
1498
1499 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1500 {
1501 if (bb_index == EXIT_BLOCK)
1502 continue;
1503 df_lr_bb_local_compute (dflow, df, bb_index);
1504 }
1505}
1506
1507
1508/* Initialize the solution vectors. */
1509
1510static void
1511df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1512{
1513 unsigned int bb_index;
1514 bitmap_iterator bi;
1515
1516 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1517 {
1518 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1519 bitmap_copy (bb_info->in, bb_info->use);
1520 bitmap_clear (bb_info->out);
1521 }
1522}
1523
1524
1525/* Confluence function that processes infinite loops. This might be a
1526 noreturn function that throws. And even if it isn't, getting the
1527 unwind info right helps debugging. */
1528static void
1529df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1530{
1531 struct df *df = dflow->df;
1532
1533 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1534 if (bb != EXIT_BLOCK_PTR)
1535 bitmap_copy (op1, df->hardware_regs_used);
1536}
1537
1538
1539/* Confluence function that ignores fake edges. */
1540static void
1541df_lr_confluence_n (struct dataflow *dflow, edge e)
1542{
1543 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1544 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1545
1546 /* Call-clobbered registers die across exception and call edges. */
1547 /* ??? Abnormal call edges ignored for the moment, as this gets
1548 confused by sibling call edges, which crashes reg-stack. */
1549 if (e->flags & EDGE_EH)
1550 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1551 else
1552 bitmap_ior_into (op1, op2);
1553
1554 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1555}
1556
1557
1558/* Transfer function. */
1559static bool
1560df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1561{
1562 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1563 bitmap in = bb_info->in;
1564 bitmap out = bb_info->out;
1565 bitmap use = bb_info->use;
1566 bitmap def = bb_info->def;
1567
1568 return bitmap_ior_and_compl (in, use, out, def);
1569}
1570
1571
1572/* Free all storage associated with the problem. */
1573
1574static void
1575df_lr_free (struct dataflow *dflow)
1576{
1577 unsigned int i;
1578 for (i = 0; i < dflow->block_info_size; i++)
1579 {
1580 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1581 if (bb_info)
1582 {
1583 BITMAP_FREE (bb_info->use);
1584 BITMAP_FREE (bb_info->def);
1585 BITMAP_FREE (bb_info->in);
1586 BITMAP_FREE (bb_info->out);
1587 }
1588 }
1589 free_alloc_pool (dflow->block_pool);
1590
1591 dflow->block_info_size = 0;
1592 free (dflow->block_info);
1593 free (dflow);
1594}
1595
1596
1597/* Debugging info. */
1598
1599static void
1600df_lr_dump (struct dataflow *dflow, FILE *file)
1601{
1602 basic_block bb;
1603
1604 fprintf (file, "Live Registers:\n");
1605 FOR_ALL_BB (bb)
1606 {
1607 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1608 df_print_bb_index (bb, file);
1609
1610 if (!bb_info->in)
1611 continue;
1612
1613 fprintf (file, " in \t");
1614 dump_bitmap (file, bb_info->in);
1615 fprintf (file, " use \t");
1616 dump_bitmap (file, bb_info->use);
1617 fprintf (file, " def \t");
1618 dump_bitmap (file, bb_info->def);
1619 fprintf (file, " out \t");
1620 dump_bitmap (file, bb_info->out);
1621 }
1622}
1623
1624/* All of the information associated with every instance of the problem. */
1625
1626static struct df_problem problem_LR =
1627{
1628 DF_LR, /* Problem id. */
1629 DF_BACKWARD, /* Direction. */
1630 df_lr_alloc, /* Allocate the problem specific data. */
1631 df_lr_free_bb_info, /* Free basic block info. */
1632 df_lr_local_compute, /* Local compute function. */
1633 df_lr_init, /* Init the solution specific data. */
1634 df_iterative_dataflow, /* Iterative solver. */
1635 df_lr_confluence_0, /* Confluence operator 0. */
1636 df_lr_confluence_n, /* Confluence operator n. */
1637 df_lr_transfer_function, /* Transfer function. */
1638 NULL, /* Finalize function. */
1639 df_lr_free, /* Free all of the problem information. */
1640 df_lr_dump, /* Debugging. */
1641 NULL /* Dependent problem. */
1642};
1643
1644
1645/* Create a new DATAFLOW instance and add it to an existing instance
1646 of DF. The returned structure is what is used to get at the
1647 solution. */
1648
1649struct dataflow *
1650df_lr_add_problem (struct df *df)
1651{
1652 return df_add_problem (df, &problem_LR);
1653}
1654
1655
1656\f
1657/*----------------------------------------------------------------------------
1658 UNINITIALIZED REGISTERS
1659
1660 Find the set of uses for registers that are reachable from the entry
1661 block without passing thru a definition.
1662----------------------------------------------------------------------------*/
1663
1664/* Get basic block info. */
1665
1666struct df_ur_bb_info *
1667df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1668{
1669 return (struct df_ur_bb_info *) dflow->block_info[index];
1670}
1671
1672
1673/* Set basic block info. */
1674
1675static void
1676df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1677 struct df_ur_bb_info *bb_info)
1678{
1679 dflow->block_info[index] = bb_info;
1680}
1681
1682
1683/* Free basic block info. */
1684
1685static void
1686df_ur_free_bb_info (struct dataflow *dflow, void *vbb_info)
1687{
1688 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1689 if (bb_info)
1690 {
1691 BITMAP_FREE (bb_info->gen);
1692 BITMAP_FREE (bb_info->kill);
1693 BITMAP_FREE (bb_info->in);
1694 BITMAP_FREE (bb_info->out);
1695 pool_free (dflow->block_pool, bb_info);
1696 }
1697}
1698
1699
1700/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1701 not touched unless the block is new. */
1702
1703static void
1704df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1705{
1706 unsigned int bb_index;
1707 bitmap_iterator bi;
1708
1709 if (! dflow->block_pool)
1710 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1711 sizeof (struct df_ur_bb_info), 100);
1712
1713 df_grow_bb_info (dflow);
1714
1715 /* Because of the clustering of all def sites for the same pseudo,
1716 we have to process all of the blocks before doing the
1717 analysis. */
1718
1719 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1720 {
1721 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1722 if (bb_info)
1723 {
1724 bitmap_clear (bb_info->kill);
1725 bitmap_clear (bb_info->gen);
1726 }
1727 else
1728 {
1729 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1730 df_ur_set_bb_info (dflow, bb_index, bb_info);
1731 bb_info->kill = BITMAP_ALLOC (NULL);
1732 bb_info->gen = BITMAP_ALLOC (NULL);
1733 bb_info->in = BITMAP_ALLOC (NULL);
1734 bb_info->out = BITMAP_ALLOC (NULL);
1735 }
1736 }
1737}
1738
1739
1740/* Compute local uninitialized register info for basic block BB. */
1741
1742static void
1743df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1744{
1745 struct df *df = dflow->df;
1746 basic_block bb = BASIC_BLOCK (bb_index);
1747 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1748 rtx insn;
1749 struct df_ref *def;
1750
1751 bitmap_clear (seen_in_block);
1752 bitmap_clear (seen_in_insn);
1753
1754 FOR_BB_INSNS_REVERSE (bb, insn)
1755 {
1756 unsigned int uid = INSN_UID (insn);
1757 if (!INSN_P (insn))
1758 continue;
1759
1760 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1761 {
1762 unsigned int regno = DF_REF_REGNO (def);
1763 /* Only the last def counts. */
1764 if (!bitmap_bit_p (seen_in_block, regno))
1765 {
1766 bitmap_set_bit (seen_in_insn, regno);
1767
1768 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1769 bitmap_set_bit (bb_info->kill, regno);
1770 else
1771 bitmap_set_bit (bb_info->gen, regno);
1772 }
1773 }
1774 bitmap_ior_into (seen_in_block, seen_in_insn);
1775 bitmap_clear (seen_in_insn);
1776 }
1777
1778 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1779 {
1780 unsigned int regno = DF_REF_REGNO (def);
1781 if (!bitmap_bit_p (seen_in_block, regno))
1782 {
1783 bitmap_set_bit (seen_in_block, regno);
1784 bitmap_set_bit (bb_info->gen, regno);
1785 }
1786 }
1787}
1788
1789
1790/* Compute local uninitialized register info. */
1791
1792static void
1793df_ur_local_compute (struct dataflow *dflow,
1794 bitmap all_blocks ATTRIBUTE_UNUSED,
1795 bitmap rescan_blocks)
1796{
1797 unsigned int bb_index;
1798 bitmap_iterator bi;
1799
1800 df_set_seen ();
1801
1802 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1803 {
1804 df_ur_bb_local_compute (dflow, bb_index);
1805 }
1806
1807 df_unset_seen ();
1808}
1809
1810
1811/* Initialize the solution vectors. */
1812
1813static void
1814df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1815{
1816 unsigned int bb_index;
1817 bitmap_iterator bi;
1818
1819 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1820 {
1821 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1822
1823 bitmap_copy (bb_info->out, bb_info->gen);
1824 bitmap_clear (bb_info->in);
1825 }
1826}
1827
1828
1829/* Or in the stack regs, hard regs and early clobber regs into the the
1830 ur_in sets of all of the blocks. */
1831
1832static void
1833df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1834{
1835 struct df *df = dflow->df;
1836 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1837 bitmap tmp = BITMAP_ALLOC (NULL);
1838 bitmap_iterator bi;
1839 unsigned int bb_index;
1840
1841 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1842 {
1843 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1844 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1845
1846 bitmap_ior_into (bb_info->in, df_all_hard_regs);
1847 bitmap_ior_into (bb_info->out, df_all_hard_regs);
1848
1849 /* No register may reach a location where it is not used. Thus
1850 we trim the rr result to the places where it is used. */
1851 bitmap_and_into (bb_info->in, bb_lr_info->in);
1852 bitmap_and_into (bb_info->out, bb_lr_info->out);
1853
1854#if 1
1855 /* Hard registers may still stick in the ur_out set, but not
1856 be in the ur_in set, if their only mention was in a call
1857 in this block. This is because a call kills in the lr
1858 problem but does not kill in the ur problem. To clean
1859 this up, we execute the transfer function on the lr_in
1860 set and then use that to knock bits out of ur_out. */
1861 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1862 bb_info->kill);
1863 bitmap_and_into (bb_info->out, tmp);
1864#endif
1865 }
1866
1867 BITMAP_FREE (tmp);
1868}
1869
1870
1871/* Confluence function that ignores fake edges. */
1872
1873static void
1874df_ur_confluence_n (struct dataflow *dflow, edge e)
1875{
1876 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1877 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1878
1879 if (e->flags & EDGE_FAKE)
1880 return;
1881
1882 bitmap_ior_into (op1, op2);
1883}
1884
1885
1886/* Transfer function. */
1887
1888static bool
1889df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1890{
1891 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1892 bitmap in = bb_info->in;
1893 bitmap out = bb_info->out;
1894 bitmap gen = bb_info->gen;
1895 bitmap kill = bb_info->kill;
1896
1897 return bitmap_ior_and_compl (out, gen, in, kill);
1898}
1899
1900
1901/* Free all storage associated with the problem. */
1902
1903static void
1904df_ur_free (struct dataflow *dflow)
1905{
1906 unsigned int i;
1907
1908 for (i = 0; i < dflow->block_info_size; i++)
1909 {
1910 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1911 if (bb_info)
1912 {
1913 BITMAP_FREE (bb_info->gen);
1914 BITMAP_FREE (bb_info->kill);
1915 BITMAP_FREE (bb_info->in);
1916 BITMAP_FREE (bb_info->out);
1917 }
1918 }
1919
1920 free_alloc_pool (dflow->block_pool);
1921 dflow->block_info_size = 0;
1922 free (dflow->block_info);
1923 free (dflow);
1924}
1925
1926
1927/* Debugging info. */
1928
1929static void
1930df_ur_dump (struct dataflow *dflow, FILE *file)
1931{
1932 basic_block bb;
1933
1934 fprintf (file, "Undefined regs:\n");
1935
1936 FOR_ALL_BB (bb)
1937 {
1938 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
1939 df_print_bb_index (bb, file);
1940
1941 if (! bb_info->in)
1942 continue;
1943
1944 fprintf (file, " in \t");
1945 dump_bitmap (file, bb_info->in);
1946 fprintf (file, " gen \t");
1947 dump_bitmap (file, bb_info->gen);
1948 fprintf (file, " kill\t");
1949 dump_bitmap (file, bb_info->kill);
1950 fprintf (file, " out \t");
1951 dump_bitmap (file, bb_info->out);
1952 }
1953}
1954
1955/* All of the information associated with every instance of the problem. */
1956
1957static struct df_problem problem_UR =
1958{
1959 DF_UR, /* Problem id. */
1960 DF_FORWARD, /* Direction. */
1961 df_ur_alloc, /* Allocate the problem specific data. */
1962 df_ur_free_bb_info, /* Free basic block info. */
1963 df_ur_local_compute, /* Local compute function. */
1964 df_ur_init, /* Init the solution specific data. */
1965 df_iterative_dataflow, /* Iterative solver. */
1966 NULL, /* Confluence operator 0. */
1967 df_ur_confluence_n, /* Confluence operator n. */
1968 df_ur_transfer_function, /* Transfer function. */
1969 df_ur_local_finalize, /* Finalize function. */
1970 df_ur_free, /* Free all of the problem information. */
1971 df_ur_dump, /* Debugging. */
1972 &problem_LR /* Dependent problem. */
1973};
1974
1975
1976/* Create a new DATAFLOW instance and add it to an existing instance
1977 of DF. The returned structure is what is used to get at the
1978 solution. */
1979
1980struct dataflow *
1981df_ur_add_problem (struct df *df)
1982{
1983 return df_add_problem (df, &problem_UR);
1984}
1985
1986
1987\f
1988/*----------------------------------------------------------------------------
1989 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
1990
1991 Find the set of uses for registers that are reachable from the entry
1992 block without passing thru a definition.
1993
1994 This is a variant of the UR problem above that has a lot of special
1995 features just for the register allocation phase.
1996----------------------------------------------------------------------------*/
1997
1998struct df_urec_problem_data
1999{
2000 bool earlyclobbers_found; /* True if any instruction contains an
2001 earlyclobber. */
2002#ifdef STACK_REGS
2003 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2004#endif
2005};
2006
2007
2008/* Get basic block info. */
2009
2010struct df_urec_bb_info *
2011df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2012{
2013 return (struct df_urec_bb_info *) dflow->block_info[index];
2014}
2015
2016
2017/* Set basic block info. */
2018
2019static void
2020df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2021 struct df_urec_bb_info *bb_info)
2022{
2023 dflow->block_info[index] = bb_info;
2024}
2025
2026
2027/* Free basic block info. */
2028
2029static void
2030df_urec_free_bb_info (struct dataflow *dflow, void *vbb_info)
2031{
2032 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2033 if (bb_info)
2034 {
2035 BITMAP_FREE (bb_info->gen);
2036 BITMAP_FREE (bb_info->kill);
2037 BITMAP_FREE (bb_info->in);
2038 BITMAP_FREE (bb_info->out);
2039 BITMAP_FREE (bb_info->earlyclobber);
2040 pool_free (dflow->block_pool, bb_info);
2041 }
2042}
2043
2044
2045/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2046 not touched unless the block is new. */
2047
2048static void
2049df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2050{
2051 unsigned int bb_index;
2052 bitmap_iterator bi;
2053 struct df_urec_problem_data *problem_data =
2054 (struct df_urec_problem_data *) dflow->problem_data;
2055
2056 if (! dflow->block_pool)
2057 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2058 sizeof (struct df_urec_bb_info), 50);
2059
2060 if (!dflow->problem_data)
2061 {
2062 problem_data = xmalloc (sizeof (struct df_urec_problem_data));
2063 dflow->problem_data = problem_data;
2064 }
2065 problem_data->earlyclobbers_found = false;
2066
2067 df_grow_bb_info (dflow);
2068
2069 /* Because of the clustering of all def sites for the same pseudo,
2070 we have to process all of the blocks before doing the
2071 analysis. */
2072
2073 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2074 {
2075 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2076 if (bb_info)
2077 {
2078 bitmap_clear (bb_info->kill);
2079 bitmap_clear (bb_info->gen);
2080 bitmap_clear (bb_info->earlyclobber);
2081 }
2082 else
2083 {
2084 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2085 df_urec_set_bb_info (dflow, bb_index, bb_info);
2086 bb_info->kill = BITMAP_ALLOC (NULL);
2087 bb_info->gen = BITMAP_ALLOC (NULL);
2088 bb_info->in = BITMAP_ALLOC (NULL);
2089 bb_info->out = BITMAP_ALLOC (NULL);
2090 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2091 }
2092 }
2093}
2094
2095
2096/* The function modifies local info for register REG being changed in
2097 SETTER. DATA is used to pass the current basic block info. */
2098
2099static void
2100df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2101{
2102 int regno;
2103 int endregno;
2104 int i;
2105 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2106
2107 if (GET_CODE (reg) == SUBREG)
2108 reg = SUBREG_REG (reg);
2109
2110 if (!REG_P (reg))
2111 return;
2112
2113
2114 endregno = regno = REGNO (reg);
2115 if (regno < FIRST_PSEUDO_REGISTER)
2116 {
2117 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2118
2119 for (i = regno; i < endregno; i++)
2120 {
2121 bitmap_set_bit (bb_info->kill, i);
2122
2123 if (GET_CODE (setter) != CLOBBER)
2124 bitmap_set_bit (bb_info->gen, i);
2125 else
2126 bitmap_clear_bit (bb_info->gen, i);
2127 }
2128 }
2129 else
2130 {
2131 bitmap_set_bit (bb_info->kill, regno);
2132
2133 if (GET_CODE (setter) != CLOBBER)
2134 bitmap_set_bit (bb_info->gen, regno);
2135 else
2136 bitmap_clear_bit (bb_info->gen, regno);
2137 }
2138}
2139/* Classes of registers which could be early clobbered in the current
2140 insn. */
2141
2142DEF_VEC_I(int);
2143DEF_VEC_ALLOC_I(int,heap);
2144
2145static VEC(int,heap) *earlyclobber_regclass;
2146
2147/* This function finds and stores register classes that could be early
2148 clobbered in INSN. If any earlyclobber classes are found, the function
2149 returns TRUE, in all other cases it returns FALSE. */
2150
2151static bool
2152df_urec_check_earlyclobber (rtx insn)
2153{
2154 int opno;
2155 bool found = false;
2156
2157 extract_insn (insn);
2158
2159 VEC_truncate (int, earlyclobber_regclass, 0);
2160 for (opno = 0; opno < recog_data.n_operands; opno++)
2161 {
2162 char c;
2163 bool amp_p;
2164 int i;
2165 enum reg_class class;
2166 const char *p = recog_data.constraints[opno];
2167
2168 class = NO_REGS;
2169 amp_p = false;
2170 for (;;)
2171 {
2172 c = *p;
2173 switch (c)
2174 {
2175 case '=': case '+': case '?':
2176 case '#': case '!':
2177 case '*': case '%':
2178 case 'm': case '<': case '>': case 'V': case 'o':
2179 case 'E': case 'F': case 'G': case 'H':
2180 case 's': case 'i': case 'n':
2181 case 'I': case 'J': case 'K': case 'L':
2182 case 'M': case 'N': case 'O': case 'P':
2183 case 'X':
2184 case '0': case '1': case '2': case '3': case '4':
2185 case '5': case '6': case '7': case '8': case '9':
2186 /* These don't say anything we care about. */
2187 break;
2188
2189 case '&':
2190 amp_p = true;
2191 break;
2192 case '\0':
2193 case ',':
2194 if (amp_p && class != NO_REGS)
2195 {
2196 int rc;
2197
2198 found = true;
2199 for (i = 0;
2200 VEC_iterate (int, earlyclobber_regclass, i, rc);
2201 i++)
2202 {
2203 if (rc == (int) class)
2204 goto found_rc;
2205 }
2206
2207 /* We use VEC_quick_push here because
2208 earlyclobber_regclass holds no more than
2209 N_REG_CLASSES elements. */
2210 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2211 found_rc:
2212 ;
2213 }
2214
2215 amp_p = false;
2216 class = NO_REGS;
2217 break;
2218
2219 case 'r':
2220 class = GENERAL_REGS;
2221 break;
2222
2223 default:
2224 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2225 break;
2226 }
2227 if (c == '\0')
2228 break;
2229 p += CONSTRAINT_LEN (c, p);
2230 }
2231 }
2232
2233 return found;
2234}
2235
2236/* The function checks that pseudo-register *X has a class
2237 intersecting with the class of pseudo-register could be early
2238 clobbered in the same insn.
2239
2240 This function is a no-op if earlyclobber_regclass is empty.
2241
2242 Reload can assign the same hard register to uninitialized
2243 pseudo-register and early clobbered pseudo-register in an insn if
2244 the pseudo-register is used first time in given BB and not lived at
2245 the BB start. To prevent this we don't change life information for
2246 such pseudo-registers. */
2247
2248static int
2249df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2250{
2251 enum reg_class pref_class, alt_class;
2252 int i, regno;
2253 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2254
2255 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2256 {
2257 int rc;
2258
2259 regno = REGNO (*x);
2260 if (bitmap_bit_p (bb_info->kill, regno)
2261 || bitmap_bit_p (bb_info->gen, regno))
2262 return 0;
2263 pref_class = reg_preferred_class (regno);
2264 alt_class = reg_alternate_class (regno);
2265 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2266 {
2267 if (reg_classes_intersect_p (rc, pref_class)
2268 || (rc != NO_REGS
2269 && reg_classes_intersect_p (rc, alt_class)))
2270 {
2271 bitmap_set_bit (bb_info->earlyclobber, regno);
2272 break;
2273 }
2274 }
2275 }
2276 return 0;
2277}
2278
2279/* The function processes all pseudo-registers in *X with the aid of
2280 previous function. */
2281
2282static void
2283df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2284{
2285 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2286}
2287
2288
2289/* Compute local uninitialized register info for basic block BB. */
2290
2291static void
2292df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2293{
2294 struct df *df = dflow->df;
2295 basic_block bb = BASIC_BLOCK (bb_index);
2296 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2297 rtx insn;
2298 struct df_ref *def;
2299
2300 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2301 {
2302 unsigned int regno = DF_REF_REGNO (def);
2303 bitmap_set_bit (bb_info->gen, regno);
2304 }
2305
2306 FOR_BB_INSNS (bb, insn)
2307 {
2308 if (INSN_P (insn))
2309 {
2310 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2311 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2312 && df_urec_check_earlyclobber (insn))
2313 {
2314 struct df_urec_problem_data *problem_data =
2315 (struct df_urec_problem_data *) dflow->problem_data;
2316 problem_data->earlyclobbers_found = true;
2317 note_uses (&PATTERN (insn),
2318 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2319 }
2320 }
2321 }
2322}
2323
2324
2325/* Compute local uninitialized register info. */
2326
2327static void
2328df_urec_local_compute (struct dataflow *dflow,
2329 bitmap all_blocks ATTRIBUTE_UNUSED,
2330 bitmap rescan_blocks)
2331{
2332 unsigned int bb_index;
2333 bitmap_iterator bi;
2334#ifdef STACK_REGS
2335 int i;
2336 HARD_REG_SET zero, stack_hard_regs, used;
2337 struct df_urec_problem_data *problem_data =
2338 (struct df_urec_problem_data *) dflow->problem_data;
2339
2340 /* Any register that MAY be allocated to a register stack (like the
2341 387) is treated poorly. Each such register is marked as being
2342 live everywhere. This keeps the register allocator and the
2343 subsequent passes from doing anything useful with these values.
2344
2345 FIXME: This seems like an incredibly poor idea. */
2346
2347 CLEAR_HARD_REG_SET (zero);
2348 CLEAR_HARD_REG_SET (stack_hard_regs);
2349 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2350 SET_HARD_REG_BIT (stack_hard_regs, i);
2351 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2352 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2353 {
2354 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2355 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2356 AND_HARD_REG_SET (used, stack_hard_regs);
2357 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2358 bitmap_set_bit (problem_data->stack_regs, i);
2359 skip:
2360 ;
2361 }
2362#endif
2363
2364 /* We know that earlyclobber_regclass holds no more than
2365 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2366 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2367
2368 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2369 {
2370 df_urec_bb_local_compute (dflow, bb_index);
2371 }
2372
2373 VEC_free (int, heap, earlyclobber_regclass);
2374}
2375
2376
2377/* Initialize the solution vectors. */
2378
2379static void
2380df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2381{
2382 unsigned int bb_index;
2383 bitmap_iterator bi;
2384
2385 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2386 {
2387 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2388
2389 /* FIXME: This is a hack, it has been copied over from
2390 make_accurate_live_analysis by Vlad. Most likely it is necessary
2391 because the generation of gen and kill information for hardware
2392 registers in ur is a subset of what is really necessary and what
2393 is done for the lr problem. */
2394
2395 /* Inside the register allocator, partial availability is only
2396 allowed for the psuedo registers. To implement this, the rr is
2397 initially iored with a mask ones for the hard registers and zeros
2398 for the pseudos before being iterated. This means that each
2399 hardware register will be live unless explicitly killed by some
2400 statement. Eventually most of these bit will die because the
2401 results of rr are anded with the results of lr before being used.
2402 Outside of register allocation, a more conservative strategy of
2403 completely ignoring the unintialized registers is imployed in the
2404 finalizer function. */
2405 if (df_state & DF_SCAN_GLOBAL)
2406 {
2407 bitmap_ior (bb_info->out, bb_info->gen, df_all_hard_regs);
2408 bitmap_copy (bb_info->in, df_all_hard_regs);
2409 }
2410 else
2411 {
2412 bitmap_copy (bb_info->out, bb_info->gen);
2413 bitmap_clear (bb_info->in);
2414 }
2415 }
2416}
2417
2418
2419/* Or in the stack regs, hard regs and early clobber regs into the the
2420 ur_in sets of all of the blocks. */
2421
2422static void
2423df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2424{
2425 struct df *df = dflow->df;
2426 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2427 bitmap tmp = BITMAP_ALLOC (NULL);
2428 bitmap_iterator bi;
2429 unsigned int bb_index;
2430 struct df_urec_problem_data *problem_data =
2431 (struct df_urec_problem_data *) dflow->problem_data;
2432
2433 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2434 {
2435 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2436 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2437
2438 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2439 {
2440 if (problem_data->earlyclobbers_found)
2441 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2442
2443#ifdef STACK_REGS
2444 /* We can not use the same stack register for uninitialized
2445 pseudo-register and another living pseudo-register
2446 because if the uninitialized pseudo-register dies,
2447 subsequent pass reg-stack will be confused (it will
2448 believe that the other register dies). */
2449 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2450 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2451#endif
2452 }
2453
2454 if (!(df_state & DF_SCAN_GLOBAL))
2455 {
2456 bitmap_ior_into (bb_info->in, df_all_hard_regs);
2457 bitmap_ior_into (bb_info->out, df_all_hard_regs);
2458 }
2459
2460 /* No register may reach a location where it is not used. Thus
2461 we trim the rr result to the places where it is used. */
2462 bitmap_and_into (bb_info->in, bb_lr_info->in);
2463 bitmap_and_into (bb_info->out, bb_lr_info->out);
2464
2465#if 1
2466 /* Hard registers may still stick in the ur_out set, but not
2467 be in the ur_in set, if their only mention was in a call
2468 in this block. This is because a call kills in the lr
2469 problem but does not kill in the rr problem. To clean
2470 this up, we execute the transfer function on the lr_in
2471 set and then use that to knock bits out of ur_out. */
2472 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2473 bb_info->kill);
2474 bitmap_and_into (bb_info->out, tmp);
2475#endif
2476 }
2477
2478#ifdef STACK_REGS
2479 BITMAP_FREE (problem_data->stack_regs);
2480#endif
2481 BITMAP_FREE (tmp);
2482}
2483
2484
2485/* Confluence function that ignores fake edges. */
2486
2487static void
2488df_urec_confluence_n (struct dataflow *dflow, edge e)
2489{
2490 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2491 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2492
2493 if (e->flags & EDGE_FAKE)
2494 return;
2495
2496 bitmap_ior_into (op1, op2);
2497}
2498
2499
2500/* Transfer function. */
2501
2502static bool
2503df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2504{
2505 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2506 bitmap in = bb_info->in;
2507 bitmap out = bb_info->out;
2508 bitmap gen = bb_info->gen;
2509 bitmap kill = bb_info->kill;
2510
2511 return bitmap_ior_and_compl (out, gen, in, kill);
2512}
2513
2514
2515/* Free all storage associated with the problem. */
2516
2517static void
2518df_urec_free (struct dataflow *dflow)
2519{
2520 unsigned int i;
2521
2522 for (i = 0; i < dflow->block_info_size; i++)
2523 {
2524 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2525 if (bb_info)
2526 {
2527 BITMAP_FREE (bb_info->gen);
2528 BITMAP_FREE (bb_info->kill);
2529 BITMAP_FREE (bb_info->in);
2530 BITMAP_FREE (bb_info->out);
2531 BITMAP_FREE (bb_info->earlyclobber);
2532 }
2533 }
2534
2535 free_alloc_pool (dflow->block_pool);
2536
2537 dflow->block_info_size = 0;
2538 free (dflow->block_info);
2539 free (dflow->problem_data);
2540 free (dflow);
2541}
2542
2543
2544/* Debugging info. */
2545
2546static void
2547df_urec_dump (struct dataflow *dflow, FILE *file)
2548{
2549 basic_block bb;
2550
2551 fprintf (file, "Undefined regs:\n");
2552
2553 FOR_ALL_BB (bb)
2554 {
2555 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2556 df_print_bb_index (bb, file);
2557
2558 if (! bb_info->in)
2559 continue;
2560
2561 fprintf (file, " in \t");
2562 dump_bitmap (file, bb_info->in);
2563 fprintf (file, " gen \t");
2564 dump_bitmap (file, bb_info->gen);
2565 fprintf (file, " kill\t");
2566 dump_bitmap (file, bb_info->kill);
2567 fprintf (file, " ec\t");
2568 dump_bitmap (file, bb_info->earlyclobber);
2569 fprintf (file, " out \t");
2570 dump_bitmap (file, bb_info->out);
2571 }
2572}
2573
2574/* All of the information associated with every instance of the problem. */
2575
2576static struct df_problem problem_UREC =
2577{
2578 DF_UREC, /* Problem id. */
2579 DF_FORWARD, /* Direction. */
2580 df_urec_alloc, /* Allocate the problem specific data. */
2581 df_urec_free_bb_info, /* Free basic block info. */
2582 df_urec_local_compute, /* Local compute function. */
2583 df_urec_init, /* Init the solution specific data. */
2584 df_iterative_dataflow, /* Iterative solver. */
2585 NULL, /* Confluence operator 0. */
2586 df_urec_confluence_n, /* Confluence operator n. */
2587 df_urec_transfer_function, /* Transfer function. */
2588 df_urec_local_finalize, /* Finalize function. */
2589 df_urec_free, /* Free all of the problem information. */
2590 df_urec_dump, /* Debugging. */
2591 &problem_LR /* Dependent problem. */
2592};
2593
2594
2595/* Create a new DATAFLOW instance and add it to an existing instance
2596 of DF. The returned structure is what is used to get at the
2597 solution. */
2598
2599struct dataflow *
2600df_urec_add_problem (struct df *df)
2601{
2602 return df_add_problem (df, &problem_UREC);
2603}
2604
2605
2606\f
2607/*----------------------------------------------------------------------------
2608 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2609
2610 Link either the defs to the uses and / or the uses to the defs.
2611
2612 These problems are set up like the other dataflow problems so that
2613 they nicely fit into the framework. They are much simpler and only
2614 involve a single traversal of instructions and an examination of
2615 the reaching defs information (the dependent problem).
2616----------------------------------------------------------------------------*/
2617
2618struct df_chain_problem_data
2619{
2620 int flags;
2621};
2622
2623
2624/* Create def-use or use-def chains. */
2625
2626static void
2627df_chain_alloc (struct dataflow *dflow,
2628 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2629{
2630 struct df *df = dflow->df;
2631 unsigned int i;
2632 struct df_chain_problem_data *problem_data =
2633 (struct df_chain_problem_data *) dflow->problem_data;
2634
2635 /* Wholesale destruction of the old chains. */
2636 if (dflow->block_pool)
2637 free_alloc_pool (dflow->block_pool);
2638
2639 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2640 sizeof (struct df_link), 100);
2641
2642 if (problem_data->flags & DF_DU_CHAIN)
2643 {
2644 if (!df->def_info.refs_organized)
2645 df_reorganize_refs (&df->def_info);
2646
2647 /* Clear out the pointers from the refs. */
2648 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2649 {
2650 struct df_ref *ref = df->def_info.refs[i];
2651 DF_REF_CHAIN (ref) = NULL;
2652 }
2653 }
2654
2655 if (problem_data->flags & DF_UD_CHAIN)
2656 {
2657 if (!df->use_info.refs_organized)
2658 df_reorganize_refs (&df->use_info);
2659 for (i = 0; i < DF_USES_SIZE (df); i++)
2660 {
2661 struct df_ref *ref = df->use_info.refs[i];
2662 DF_REF_CHAIN (ref) = NULL;
2663 }
2664 }
2665}
2666
2667
2668/* Create the chains for a list of USEs. */
2669
2670static void
2671df_chain_create_bb_process_use (struct dataflow *dflow,
2672 struct df_chain_problem_data *problem_data,
2673 bitmap local_rd,
2674 struct df_ref *use,
2675 enum df_ref_flags top_flag)
2676{
2677 struct df *df = dflow->df;
2678 bitmap_iterator bi;
2679 unsigned int def_index;
2680
2681 while (use)
2682 {
2683 /* Do not want to go thur this for an uninitialized var. */
2684 unsigned int uregno = DF_REF_REGNO (use);
2685 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2686 if (count)
2687 {
2688 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2689 {
2690 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2691 unsigned int last_index = first_index + count - 1;
2692
2693 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2694 {
2695 struct df_ref *def;
2696 if (def_index > last_index)
2697 break;
2698
2699 def = DF_DEFS_GET (df, def_index);
2700 if (problem_data->flags & DF_DU_CHAIN)
2701 df_chain_create (dflow, def, use);
2702 if (problem_data->flags & DF_UD_CHAIN)
2703 df_chain_create (dflow, use, def);
2704 }
2705 }
2706 }
2707 use = use->next_ref;
2708 }
2709}
2710
2711/* Reset the storage pool that the def-use or use-def chains have been
2712 allocated in. We do not need to re adjust the pointers in the refs,
2713 these have already been clean out.*/
2714
2715/* Create chains from reaching defs bitmaps for basic block BB. */
2716static void
2717df_chain_create_bb (struct dataflow *dflow,
2718 struct dataflow *rd_dflow,
2719 unsigned int bb_index)
2720{
2721 basic_block bb = BASIC_BLOCK (bb_index);
2722 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2723 rtx insn;
2724 bitmap cpy = BITMAP_ALLOC (NULL);
2725 struct df *df = dflow->df;
2726 struct df_chain_problem_data *problem_data =
2727 (struct df_chain_problem_data *) dflow->problem_data;
2728 struct df_ref *def;
2729
2730 bitmap_copy (cpy, bb_info->in);
2731
2732 /* Since we are going forwards, process the artificial uses first
2733 then the artificial defs second. */
2734
2735#ifdef EH_USES
2736 /* Create the chains for the artificial uses from the EH_USES at the
2737 beginning of the block. */
2738 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2739 df_get_artificial_uses (df, bb->index),
2740 DF_REF_AT_TOP);
2741#endif
2742
2743 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2744 {
2745 unsigned int dregno = DF_REF_REGNO (def);
2746 bitmap_clear_range (cpy,
2747 DF_REG_DEF_GET (df, dregno)->begin,
2748 DF_REG_DEF_GET (df, dregno)->n_refs);
2749 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2750 bitmap_set_bit (cpy, DF_REF_ID (def));
2751 }
2752
2753 /* Process the regular instructions next. */
2754 FOR_BB_INSNS (bb, insn)
2755 {
2756 struct df_ref *def;
2757 unsigned int uid = INSN_UID (insn);
2758
2759 if (! INSN_P (insn))
2760 continue;
2761
2762 /* Now scan the uses and link them up with the defs that remain
2763 in the cpy vector. */
2764
2765 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2766 DF_INSN_UID_GET (df, uid)->uses, 0);
2767
2768 /* Since we are going forwards, process the defs second. This
2769 pass only changes the bits in cpy. */
2770 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2771 {
2772 unsigned int dregno = DF_REF_REGNO (def);
2773 bitmap_clear_range (cpy,
2774 DF_REG_DEF_GET (df, dregno)->begin,
2775 DF_REG_DEF_GET (df, dregno)->n_refs);
2776 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2777 bitmap_set_bit (cpy, DF_REF_ID (def));
2778 }
2779 }
2780
2781 /* Create the chains for the artificial uses of the hard registers
2782 at the end of the block. */
2783 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2784 df_get_artificial_uses (df, bb->index), 0);
2785}
2786
2787/* Create def-use chains from reaching use bitmaps for basic blocks
2788 in BLOCKS. */
2789
2790static void
2791df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2792{
2793 unsigned int bb_index;
2794 bitmap_iterator bi;
2795 struct df *df = dflow->df;
2796 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2797
2798 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2799 {
2800 df_chain_create_bb (dflow, rd_dflow, bb_index);
2801 }
2802}
2803
2804
2805/* Free all storage associated with the problem. */
2806
2807static void
2808df_chain_free (struct dataflow *dflow)
2809{
2810 free_alloc_pool (dflow->block_pool);
2811 free (dflow->problem_data);
2812 free (dflow);
2813}
2814
2815
2816/* Debugging info. */
2817
2818static void
2819df_chains_dump (struct dataflow *dflow, FILE *file)
2820{
2821 struct df *df = dflow->df;
2822 unsigned int j;
2823 struct df_chain_problem_data *problem_data =
2824 (struct df_chain_problem_data *) dflow->problem_data;
2825
2826 if (problem_data->flags & DF_DU_CHAIN)
2827 {
2828 fprintf (file, "Def-use chains:\n");
2829 for (j = 0; j < df->def_info.bitmap_size; j++)
2830 {
2831 struct df_ref *def = DF_DEFS_GET (df, j);
2832 if (def)
2833 {
2834 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2835 j, DF_REF_BBNO (def),
2836 DF_INSN_LUID (df, DF_REF_INSN (def)),
2837 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
2838 DF_REF_REGNO (def));
2839 if (def->flags & DF_REF_READ_WRITE)
2840 fprintf (file, "read/write ");
2841 df_chain_dump (df, DF_REF_CHAIN (def), file);
2842 fprintf (file, "\n");
2843 }
2844 }
2845 }
2846
2847 if (problem_data->flags & DF_UD_CHAIN)
2848 {
2849 fprintf (file, "Use-def chains:\n");
2850 for (j = 0; j < df->use_info.bitmap_size; j++)
2851 {
2852 struct df_ref *use = DF_USES_GET (df, j);
2853 if (use)
2854 {
2855 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
2856 j, DF_REF_BBNO (use),
2857 DF_REF_INSN (use) ?
2858 DF_INSN_LUID (df, DF_REF_INSN (use))
2859 : -1,
2860 DF_REF_INSN (DF_USES_GET (df, j)) ?
2861 DF_REF_INSN_UID (DF_USES_GET (df,j))
2862 : -1,
2863 DF_REF_REGNO (use));
2864 if (use->flags & DF_REF_READ_WRITE)
2865 fprintf (file, "read/write ");
2866 if (use->flags & DF_REF_STRIPPED)
2867 fprintf (file, "stripped ");
2868 if (use->flags & DF_REF_IN_NOTE)
2869 fprintf (file, "note ");
2870 df_chain_dump (df, DF_REF_CHAIN (use), file);
2871 fprintf (file, "\n");
2872 }
2873 }
2874 }
2875}
2876
2877
2878static struct df_problem problem_CHAIN =
2879{
2880 DF_CHAIN, /* Problem id. */
2881 DF_NONE, /* Direction. */
2882 df_chain_alloc, /* Allocate the problem specific data. */
2883 NULL, /* Free basic block info. */
2884 NULL, /* Local compute function. */
2885 NULL, /* Init the solution specific data. */
2886 NULL, /* Iterative solver. */
2887 NULL, /* Confluence operator 0. */
2888 NULL, /* Confluence operator n. */
2889 NULL, /* Transfer function. */
2890 df_chain_finalize, /* Finalize function. */
2891 df_chain_free, /* Free all of the problem information. */
2892 df_chains_dump, /* Debugging. */
2893 &problem_RD /* Dependent problem. */
2894};
2895
2896
2897/* Create a new DATAFLOW instance and add it to an existing instance
2898 of DF. The returned structure is what is used to get at the
2899 solution. */
2900
2901struct dataflow *
2902df_chain_add_problem (struct df *df, int flags)
2903{
2904 struct df_chain_problem_data *problem_data =
2905 xmalloc (sizeof (struct df_chain_problem_data));
2906 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
2907
2908 dflow->problem_data = problem_data;
2909 problem_data->flags = flags;
2910
2911 return dflow;
2912}
2913
2914
2915/*----------------------------------------------------------------------------
2916 REGISTER INFORMATION
2917
2918 Currently this consists of only lifetime information. But the plan is
2919 to enhance it so that it produces all of the register information needed
2920 by the register allocators.
2921----------------------------------------------------------------------------*/
2922
2923
2924struct df_ri_problem_data
2925{
2926 int *lifetime;
2927};
2928
2929
2930/* Allocate the lifetime information. */
2931
2932static void
2933df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2934{
2935 struct df_ri_problem_data *problem_data =
2936 (struct df_ri_problem_data *) dflow->problem_data;
2937
2938 if (!dflow->problem_data)
2939 {
2940 struct df_ri_problem_data *problem_data =
2941 xmalloc (sizeof (struct df_ri_problem_data));
2942 dflow->problem_data = problem_data;
2943 }
2944
2945 problem_data->lifetime = xrealloc (problem_data->lifetime,
2946 max_reg_num () *sizeof (int));
2947 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
2948}
2949
2950/* Compute register info: lifetime, bb, and number of defs and uses
2951 for basic block BB. */
2952
2953static void
2954df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
2955{
2956 struct df *df = dflow->df;
2957 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
2958 struct df_ri_problem_data *problem_data =
2959 (struct df_ri_problem_data *) dflow->problem_data;
2960 basic_block bb = BASIC_BLOCK (bb_index);
2961 rtx insn;
2962
2963 bitmap_copy (live, bb_info->out);
2964
2965 FOR_BB_INSNS_REVERSE (bb, insn)
2966 {
2967 unsigned int uid = INSN_UID (insn);
2968 unsigned int regno;
2969 bitmap_iterator bi;
2970 struct df_ref *def;
2971 struct df_ref *use;
2972
2973 if (! INSN_P (insn))
2974 continue;
2975
2976 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2977 {
2978 unsigned int dregno = DF_REF_REGNO (def);
2979
2980 /* Kill this register. */
2981 bitmap_clear_bit (live, dregno);
2982 }
2983
2984 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
2985 {
2986 unsigned int uregno = DF_REF_REGNO (use);
2987
2988 /* This register is now live. */
2989 bitmap_set_bit (live, uregno);
2990 }
2991
2992 /* Increment lifetimes of all live registers. */
2993 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
2994 {
2995 problem_data->lifetime[regno]++;
2996 }
2997 }
2998}
2999
3000
3001/* Compute register info: lifetime, bb, and number of defs and uses. */
3002static void
3003df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3004 bitmap blocks_to_scan)
3005{
3006 unsigned int bb_index;
3007 bitmap_iterator bi;
3008 bitmap live;
3009
3010 live = BITMAP_ALLOC (NULL);
3011
3012 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3013 {
3014 df_ri_bb_compute (dflow, bb_index, live);
3015 }
3016
3017 BITMAP_FREE (live);
3018}
3019
3020
3021/* Free all storage associated with the problem. */
3022
3023static void
3024df_ri_free (struct dataflow *dflow)
3025{
3026 struct df_ri_problem_data *problem_data =
3027 (struct df_ri_problem_data *) dflow->problem_data;
3028
3029 free (problem_data->lifetime);
3030 free (dflow->problem_data);
3031 free (dflow);
3032}
3033
3034
3035/* Debugging info. */
3036
3037static void
3038df_ri_dump (struct dataflow *dflow, FILE *file)
3039{
3040 struct df_ri_problem_data *problem_data =
3041 (struct df_ri_problem_data *) dflow->problem_data;
3042 int j;
3043
3044 fprintf (file, "Register info:\n");
3045 for (j = 0; j < max_reg_num (); j++)
3046 {
3047 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3048 }
3049}
3050
3051/* All of the information associated every instance of the problem. */
3052
3053static struct df_problem problem_RI =
3054{
3055 DF_RI, /* Problem id. */
3056 DF_NONE, /* Direction. */
3057 df_ri_alloc, /* Allocate the problem specific data. */
3058 NULL, /* Free basic block info. */
3059 df_ri_compute, /* Local compute function. */
3060 NULL, /* Init the solution specific data. */
3061 NULL, /* Iterative solver. */
3062 NULL, /* Confluence operator 0. */
3063 NULL, /* Confluence operator n. */
3064 NULL, /* Transfer function. */
3065 NULL, /* Finalize function. */
3066 df_ri_free, /* Free all of the problem information. */
3067 df_ri_dump, /* Debugging. */
3068 &problem_UR /* Dependent problem. */
3069};
3070
3071
3072/* Create a new DATAFLOW instance and add it to an existing instance
3073 of DF. The returned structure is what is used to get at the
3074 solution. */
3075
3076struct dataflow *
3077df_ri_add_problem (struct df *df)
3078{
3079 return df_add_problem (df, &problem_RI);
3080}
3081
3082
3083/* Return total lifetime (in insns) of REG. */
3084int
3085df_reg_lifetime (struct df *df, rtx reg)
3086{
3087 struct dataflow *dflow = df->problems_by_index[DF_RI];
3088 struct df_ri_problem_data *problem_data =
3089 (struct df_ri_problem_data *) dflow->problem_data;
3090 return problem_data->lifetime[REGNO (reg)];
3091}
3092
3093