]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/df-scan.c
2006-01-27 H.J. Lu <hongjiu.lu@intel.com>
[thirdparty/gcc.git] / gcc / df-scan.c
CommitLineData
e011eba9 1/* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
3
4
5/* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
12
13This file is part of GCC.
14
15GCC is free software; you can redistribute it and/or modify it under
16the terms of the GNU General Public License as published by the Free
17Software Foundation; either version 2, or (at your option) any later
18version.
19
20GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21WARRANTY; without even the implied warranty of MERCHANTABILITY or
22FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23for more details.
24
25You should have received a copy of the GNU General Public License
26along with GCC; see the file COPYING. If not, write to the Free
27Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2802110-1301, USA.
29*/
30
31#include "config.h"
32#include "system.h"
33#include "coretypes.h"
34#include "tm.h"
35#include "rtl.h"
36#include "tm_p.h"
37#include "insn-config.h"
38#include "recog.h"
39#include "function.h"
40#include "regs.h"
41#include "output.h"
42#include "alloc-pool.h"
43#include "flags.h"
44#include "hard-reg-set.h"
45#include "basic-block.h"
46#include "sbitmap.h"
47#include "bitmap.h"
48#include "timevar.h"
49#include "df.h"
50
51#ifndef HAVE_epilogue
52#define HAVE_epilogue 0
53#endif
54#ifndef HAVE_prologue
55#define HAVE_prologue 0
56#endif
57#ifndef HAVE_sibcall_epilogue
58#define HAVE_sibcall_epilogue 0
59#endif
60
61#ifndef EPILOGUE_USES
62#define EPILOGUE_USES(REGNO) 0
63#endif
64
65/* Indicates where we are in the compilation. */
66int df_state;
67
68/* The bitmap_obstack is used to hold some static variables that
69 should not be reset after each function is compiled. */
70
71static bitmap_obstack persistent_obstack;
72
73/* The set of hard registers in eliminables[i].from. */
74
75static HARD_REG_SET elim_reg_set;
76
77/* This is a bitmap copy of regs_invalidated_by_call so that we can
78 easily add it into bitmaps, etc. */
79
80bitmap df_invalidated_by_call = NULL;
81
82/* Initialize ur_in and ur_out as if all hard registers were partially
83 available. */
84
85bitmap df_all_hard_regs = NULL;
86
87static void df_ref_record (struct dataflow *, rtx, rtx *,
88 basic_block, rtx, enum df_ref_type,
89 enum df_ref_flags, bool record_live);
90static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
91 enum df_ref_flags, bool record_live);
92static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
93static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
94 basic_block, rtx, enum df_ref_flags);
95
96static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
97static void df_bb_refs_record (struct dataflow *, basic_block);
98static void df_refs_record (struct dataflow *, bitmap);
99static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
100 basic_block, rtx, enum df_ref_type,
101 enum df_ref_flags);
102static void df_record_exit_block_uses (struct dataflow *);
103static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
104static void df_grow_ref_info (struct df_ref_info *, unsigned int);
105static void df_grow_insn_info (struct df *);
106
107\f
108/*----------------------------------------------------------------------------
109 SCANNING DATAFLOW PROBLEM
110
111 There are several ways in which scanning looks just like the other
112 dataflow problems. It shares the all the mechanisms for local info
113 as well as basic block info. Where it differs is when and how often
114 it gets run. It also has no need for the iterative solver.
115----------------------------------------------------------------------------*/
116
117/* Problem data for the scanning dataflow function. */
118struct df_scan_problem_data
119{
120 alloc_pool ref_pool;
121 alloc_pool insn_pool;
122 alloc_pool reg_pool;
123};
124
125typedef struct df_scan_bb_info *df_scan_bb_info_t;
126
127static void
128df_scan_free_internal (struct dataflow *dflow)
129{
130 struct df *df = dflow->df;
131 struct df_scan_problem_data *problem_data =
132 (struct df_scan_problem_data *) dflow->problem_data;
133
134 free (df->def_info.regs);
135 free (df->def_info.refs);
136 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
137
138 free (df->use_info.regs);
139 free (df->use_info.refs);
140 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
141
142 free (df->insns);
143 df->insns = NULL;
144 df->insns_size = 0;
145
146 free (dflow->block_info);
147 dflow->block_info = NULL;
148 dflow->block_info_size = 0;
149
150 BITMAP_FREE (df->hardware_regs_used);
151 BITMAP_FREE (df->exit_block_uses);
152
153 free_alloc_pool (dflow->block_pool);
154 free_alloc_pool (problem_data->ref_pool);
155 free_alloc_pool (problem_data->insn_pool);
156 free_alloc_pool (problem_data->reg_pool);
157}
158
159
160/* Get basic block info. */
161
162struct df_scan_bb_info *
163df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
164{
165 gcc_assert (index < dflow->block_info_size);
166 return (struct df_scan_bb_info *) dflow->block_info[index];
167}
168
169
170/* Set basic block info. */
171
172static void
173df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
174 struct df_scan_bb_info *bb_info)
175{
176 gcc_assert (index < dflow->block_info_size);
177 dflow->block_info[index] = (void *) bb_info;
178}
179
180
181/* Free basic block info. */
182
183static void
d0802b39 184df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
e011eba9 185{
186 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
187 if (bb_info)
d0802b39 188 {
189 df_bb_refs_delete (dflow, bb->index);
190 pool_free (dflow->block_pool, bb_info);
191 }
e011eba9 192}
193
194
195/* Allocate the problem data for the scanning problem. This should be
196 called when the problem is created or when the entire function is to
197 be rescanned. */
198
199static void
200df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
201{
202 struct df *df = dflow->df;
203 struct df_scan_problem_data *problem_data;
204 unsigned int insn_num = get_max_uid () + 1;
205 unsigned int block_size = 50;
206 unsigned int bb_index;
207 bitmap_iterator bi;
208
209 /* Given the number of pools, this is really faster than tearing
210 everything apart. */
211 if (dflow->problem_data)
212 df_scan_free_internal (dflow);
213
214 dflow->block_pool
215 = create_alloc_pool ("df_scan_block pool",
216 sizeof (struct df_scan_bb_info),
217 block_size);
218
219 problem_data = xmalloc (sizeof (struct df_scan_problem_data));
220 dflow->problem_data = problem_data;
221
222 problem_data->ref_pool
223 = create_alloc_pool ("df_scan_ref pool",
224 sizeof (struct df_ref), block_size);
225 problem_data->insn_pool
226 = create_alloc_pool ("df_scan_insn pool",
227 sizeof (struct df_insn_info), block_size);
e011eba9 228 problem_data->reg_pool
229 = create_alloc_pool ("df_scan_reg pool",
230 sizeof (struct df_reg_info), block_size);
231
232 insn_num += insn_num / 4;
233 df_grow_reg_info (dflow, &df->def_info);
234 df_grow_ref_info (&df->def_info, insn_num);
235
236 df_grow_reg_info (dflow, &df->use_info);
237 df_grow_ref_info (&df->use_info, insn_num *2);
238
239 df_grow_insn_info (df);
240 df_grow_bb_info (dflow);
241
242 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
243 {
244 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
245 if (!bb_info)
246 {
247 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
248 df_scan_set_bb_info (dflow, bb_index, bb_info);
249 }
250 bb_info->artificial_defs = NULL;
251 bb_info->artificial_uses = NULL;
252 }
253
254 df->hardware_regs_used = BITMAP_ALLOC (NULL);
255 df->exit_block_uses = BITMAP_ALLOC (NULL);
256}
257
258
259/* Free all of the data associated with the scan problem. */
260
261static void
262df_scan_free (struct dataflow *dflow)
263{
264 struct df *df = dflow->df;
265
d0802b39 266 if (dflow->problem_data)
267 {
268 df_scan_free_internal (dflow);
269 free (dflow->problem_data);
270 }
271
e011eba9 272 if (df->blocks_to_scan)
273 BITMAP_FREE (df->blocks_to_scan);
274
275 if (df->blocks_to_analyze)
276 BITMAP_FREE (df->blocks_to_analyze);
277
e011eba9 278 free (dflow);
279}
280
281static void
282df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
283{
284 struct df *df = dflow->df;
285 int i;
286
287 fprintf (file, " all hard regs \t");
288 dump_bitmap (file, df_all_hard_regs);
289 fprintf (file, " invalidated by call \t");
290 dump_bitmap (file, df_invalidated_by_call);
291 fprintf (file, " hardware regs used \t");
292 dump_bitmap (file, df->hardware_regs_used);
293 fprintf (file, " exit block uses \t");
294 dump_bitmap (file, df->exit_block_uses);
295 fprintf (file, " regs ever live \t");
296 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
297 if (regs_ever_live[i])
298 fprintf (file, "%d ", i);
299 fprintf (file, "\n");
300}
301
302static struct df_problem problem_SCAN =
303{
304 DF_SCAN, /* Problem id. */
305 DF_NONE, /* Direction. */
306 df_scan_alloc, /* Allocate the problem specific data. */
f64e6a69 307 NULL, /* Reset global information. */
e011eba9 308 df_scan_free_bb_info, /* Free basic block info. */
309 NULL, /* Local compute function. */
310 NULL, /* Init the solution specific data. */
311 NULL, /* Iterative solver. */
312 NULL, /* Confluence operator 0. */
313 NULL, /* Confluence operator n. */
314 NULL, /* Transfer function. */
315 NULL, /* Finalize function. */
316 df_scan_free, /* Free all of the problem information. */
317 df_scan_dump, /* Debugging. */
318 NULL /* Dependent problem. */
319};
320
321
322/* Create a new DATAFLOW instance and add it to an existing instance
323 of DF. The returned structure is what is used to get at the
324 solution. */
325
326struct dataflow *
327df_scan_add_problem (struct df *df)
328{
329 return df_add_problem (df, &problem_SCAN);
330}
331
332/*----------------------------------------------------------------------------
333 Storage Allocation Utilities
334----------------------------------------------------------------------------*/
335
336
337/* First, grow the reg_info information. If the current size is less than
338 the number of psuedos, grow to 25% more than the number of
339 pseudos.
340
341 Second, assure that all of the slots up to max_reg_num have been
342 filled with reg_info structures. */
343
344static void
345df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
346{
347 unsigned int max_reg = max_reg_num ();
348 unsigned int new_size = max_reg;
349 struct df_scan_problem_data *problem_data =
350 (struct df_scan_problem_data *) dflow->problem_data;
351 unsigned int i;
352
353 if (ref_info->regs_size < new_size)
354 {
355 new_size += new_size / 4;
356 ref_info->regs = xrealloc (ref_info->regs,
357 new_size *sizeof (struct df_reg_info*));
358 ref_info->regs_size = new_size;
359 }
360
361 for (i = ref_info->regs_inited; i < max_reg; i++)
362 {
363 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
364 memset (reg_info, 0, sizeof (struct df_reg_info));
365 ref_info->regs[i] = reg_info;
366 }
367
368 ref_info->regs_inited = max_reg;
369}
370
371
372/* Grow the ref information. */
373
374static void
375df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
376{
377 if (ref_info->refs_size < new_size)
378 {
379 ref_info->refs = xrealloc (ref_info->refs,
380 new_size *sizeof (struct df_ref *));
381 memset (ref_info->refs + ref_info->refs_size, 0,
382 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
383 ref_info->refs_size = new_size;
384 }
385}
386
387
388/* Grow the ref information. If the current size is less than the
389 number of instructions, grow to 25% more than the number of
390 instructions. */
391
392static void
393df_grow_insn_info (struct df *df)
394{
395 unsigned int new_size = get_max_uid () + 1;
396 if (df->insns_size < new_size)
397 {
398 new_size += new_size / 4;
399 df->insns = xrealloc (df->insns,
400 new_size *sizeof (struct df_insn_info *));
401 memset (df->insns + df->insns_size, 0,
402 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
403 df->insns_size = new_size;
404 }
405}
406
407
408
409\f
410/*----------------------------------------------------------------------------
411 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
412----------------------------------------------------------------------------*/
413
414/* Rescan some BLOCKS or all the blocks defined by the last call to
415 df_set_blocks if BLOCKS is NULL); */
416
417void
418df_rescan_blocks (struct df *df, bitmap blocks)
419{
420 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
421
d0802b39 422 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
e011eba9 423 basic_block bb;
424
425 df->def_info.refs_organized = false;
426 df->use_info.refs_organized = false;
427
428 if (blocks)
429 {
f64e6a69 430 int i;
431
e011eba9 432 /* Need to assure that there are space in all of the tables. */
433 unsigned int insn_num = get_max_uid () + 1;
434 insn_num += insn_num / 4;
435
436 df_grow_reg_info (dflow, &df->def_info);
437 df_grow_ref_info (&df->def_info, insn_num);
438
439 df_grow_reg_info (dflow, &df->use_info);
440 df_grow_ref_info (&df->use_info, insn_num *2);
441
442 df_grow_insn_info (df);
443 df_grow_bb_info (dflow);
444
445 bitmap_copy (local_blocks_to_scan, blocks);
446 df->def_info.add_refs_inline = true;
447 df->use_info.add_refs_inline = true;
448
f64e6a69 449 for (i = df->num_problems_defined; i; i--)
450 {
451 bitmap blocks_to_reset = NULL;
452 if (*dflow->problem->reset_fun)
453 {
454 if (!blocks_to_reset)
455 {
456 blocks_to_reset = BITMAP_ALLOC (NULL);
457 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
458 if (df->blocks_to_scan)
459 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
460 }
461 (*dflow->problem->reset_fun) (dflow, blocks_to_reset);
462 }
463 if (blocks_to_reset)
464 BITMAP_FREE (blocks_to_reset);
465 }
466
e011eba9 467 df_refs_delete (dflow, local_blocks_to_scan);
468
469 /* This may be a mistake, but if an explicit blocks is passed in
470 and the set of blocks to analyze has been explicitly set, add
471 the extra blocks to blocks_to_analyze. The alternative is to
472 put an assert here. We do not want this to just go by
473 silently or else we may get storage leaks. */
474 if (df->blocks_to_analyze)
475 bitmap_ior_into (df->blocks_to_analyze, blocks);
476 }
477 else
478 {
479 /* If we are going to do everything, just reallocate everything.
480 Most stuff is allocated in pools so this is faster than
481 walking it. */
482 if (df->blocks_to_analyze)
483 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
484 else
485 FOR_ALL_BB (bb)
486 {
487 bitmap_set_bit (local_blocks_to_scan, bb->index);
488 }
489 df_scan_alloc (dflow, local_blocks_to_scan);
490
491 df->def_info.add_refs_inline = false;
492 df->use_info.add_refs_inline = false;
493 }
494
495 df_refs_record (dflow, local_blocks_to_scan);
496#if 0
497 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
498#endif
499
500 if (!df->blocks_to_scan)
501 df->blocks_to_scan = BITMAP_ALLOC (NULL);
502
503 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
504 BITMAP_FREE (local_blocks_to_scan);
505}
506
507/* Create a new ref of type DF_REF_TYPE for register REG at address
508 LOC within INSN of BB. */
509
510struct df_ref *
511df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
512 basic_block bb,
513 enum df_ref_type ref_type,
514 enum df_ref_flags ref_flags)
515{
516 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
517 struct df_scan_bb_info *bb_info;
518
519 df_grow_reg_info (dflow, &df->use_info);
520 df_grow_reg_info (dflow, &df->def_info);
521 df_grow_bb_info (dflow);
522
523 /* Make sure there is the bb_info for this block. */
524 bb_info = df_scan_get_bb_info (dflow, bb->index);
525 if (!bb_info)
526 {
527 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
528 df_scan_set_bb_info (dflow, bb->index, bb_info);
529 bb_info->artificial_defs = NULL;
530 bb_info->artificial_uses = NULL;
531 }
532
533 if (ref_type == DF_REF_REG_DEF)
534 df->def_info.add_refs_inline = true;
535 else
536 df->use_info.add_refs_inline = true;
537
538 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
539}
540
541
542\f
543/*----------------------------------------------------------------------------
544 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
545----------------------------------------------------------------------------*/
546
547
548/* Get the artifical uses for a basic block. */
549
550struct df_ref *
551df_get_artificial_defs (struct df *df, unsigned int bb_index)
552{
553 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
554 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
555}
556
557
558/* Get the artifical uses for a basic block. */
559
560struct df_ref *
561df_get_artificial_uses (struct df *df, unsigned int bb_index)
562{
563 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
564 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
565}
566
567
568/* Link REF at the front of reg_use or reg_def chain for REGNO. */
569
570void
571df_reg_chain_create (struct df_reg_info *reg_info,
572 struct df_ref *ref)
573{
574 struct df_ref *head = reg_info->reg_chain;
575 reg_info->reg_chain = ref;
576
577 DF_REF_NEXT_REG (ref) = head;
578
579 /* We cannot actually link to the head of the chain. */
580 DF_REF_PREV_REG (ref) = NULL;
581
582 if (head)
583 DF_REF_PREV_REG (head) = ref;
584}
585
586
587/* Remove REF from the CHAIN. Return the head of the chain. This
588 will be CHAIN unless the REF was at the beginning of the chain. */
589
590static struct df_ref *
591df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
592{
593 struct df_ref *orig_chain = chain;
594 struct df_ref *prev = NULL;
595 while (chain)
596 {
597 if (chain == ref)
598 {
599 if (prev)
600 {
601 prev->next_ref = ref->next_ref;
602 ref->next_ref = NULL;
603 return orig_chain;
604 }
605 else
606 {
607 chain = ref->next_ref;
608 ref->next_ref = NULL;
609 return chain;
610 }
611 }
612
613 prev = chain;
614 chain = chain->next_ref;
615 }
616
617 /* Someone passed in a ref that was not in the chain. */
618 gcc_unreachable ();
619 return NULL;
620}
621
622
623/* Unlink and delete REF at the reg_use or reg_def chain. Also delete
624 the def-use or use-def chain if it exists. Returns the next ref in
625 uses or defs chain. */
626
627struct df_ref *
628df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
629{
630 struct df *df = dflow->df;
631 struct df_ref *next = DF_REF_NEXT_REG (ref);
632 struct df_ref *prev = DF_REF_PREV_REG (ref);
633 struct df_scan_problem_data *problem_data =
634 (struct df_scan_problem_data *) dflow->problem_data;
635 struct df_reg_info *reg_info;
636 struct df_ref *next_ref = ref->next_ref;
637 unsigned int id = DF_REF_ID (ref);
638
639 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
640 {
641 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
642 df->def_info.bitmap_size--;
643 if (df->def_info.refs && (id < df->def_info.refs_size))
644 DF_DEFS_SET (df, id, NULL);
645 }
646 else
647 {
648 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
649 df->use_info.bitmap_size--;
650 if (df->use_info.refs && (id < df->use_info.refs_size))
651 DF_USES_SET (df, id, NULL);
652 }
653
654 /* Delete any def-use or use-def chains that start here. */
655 if (DF_REF_CHAIN (ref))
656 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
657
658 reg_info->n_refs--;
659
660 /* Unlink from the reg chain. If there is no prev, this is the
661 first of the list. If not, just join the next and prev. */
662 if (prev)
663 {
664 DF_REF_NEXT_REG (prev) = next;
665 if (next)
666 DF_REF_PREV_REG (next) = prev;
667 }
668 else
669 {
670 reg_info->reg_chain = next;
671 if (next)
672 DF_REF_PREV_REG (next) = NULL;
673 }
674
675 pool_free (problem_data->ref_pool, ref);
676 return next_ref;
677}
678
679
680/* Unlink REF from all def-use/use-def chains, etc. */
681
682void
683df_ref_remove (struct df *df, struct df_ref *ref)
684{
d0802b39 685 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
e011eba9 686 if (DF_REF_REG_DEF_P (ref))
687 {
688 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
689 {
690 struct df_scan_bb_info *bb_info
691 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
692 bb_info->artificial_defs
693 = df_ref_unlink (bb_info->artificial_defs, ref);
694 }
695 else
696 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)) =
697 df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
698
699 if (df->def_info.add_refs_inline)
700 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
701 }
702 else
703 {
704 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
705 {
706 struct df_scan_bb_info *bb_info
707 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
708 bb_info->artificial_uses
709 = df_ref_unlink (bb_info->artificial_uses, ref);
710 }
711 else
712 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)) =
713 df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
714
715 if (df->use_info.add_refs_inline)
716 DF_USES_SET (df, DF_REF_ID (ref), NULL);
717 }
718
719 df_reg_chain_unlink (dflow, ref);
720}
721
722
723/* Create the insn record for INSN. If there was one there, zero it out. */
724
725static struct df_insn_info *
726df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
727{
728 struct df *df = dflow->df;
729 struct df_scan_problem_data *problem_data =
730 (struct df_scan_problem_data *) dflow->problem_data;
731
732 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
733 if (!insn_rec)
734 {
735 insn_rec = pool_alloc (problem_data->insn_pool);
736 DF_INSN_SET (df, insn, insn_rec);
737 }
738 memset (insn_rec, 0, sizeof (struct df_insn_info));
739
740 return insn_rec;
741}
742
d0802b39 743
744/* Delete all of the refs information from INSN. */
e011eba9 745
746void
747df_insn_refs_delete (struct dataflow *dflow, rtx insn)
748{
749 struct df *df = dflow->df;
750 unsigned int uid = INSN_UID (insn);
f64e6a69 751 struct df_insn_info *insn_info = NULL;
e011eba9 752 struct df_ref *ref;
753 struct df_scan_problem_data *problem_data =
754 (struct df_scan_problem_data *) dflow->problem_data;
755
f64e6a69 756 if (uid < df->insns_size)
757 insn_info = DF_INSN_UID_GET (df, uid);
758
e011eba9 759 if (insn_info)
760 {
761 ref = insn_info->defs;
762 while (ref)
763 ref = df_reg_chain_unlink (dflow, ref);
764
765 ref = insn_info->uses;
766 while (ref)
767 ref = df_reg_chain_unlink (dflow, ref);
768
769 pool_free (problem_data->insn_pool, insn_info);
770 DF_INSN_SET (df, insn, NULL);
771 }
772}
773
774
d0802b39 775/* Delete all of the refs information from basic_block with BB_INDEX. */
776
777void
778df_bb_refs_delete (struct dataflow *dflow, int bb_index)
779{
780 struct df_ref *def;
781 struct df_ref *use;
782
783 struct df_scan_bb_info *bb_info
784 = df_scan_get_bb_info (dflow, bb_index);
785 rtx insn;
786 basic_block bb = BASIC_BLOCK (bb_index);
787 FOR_BB_INSNS (bb, insn)
788 {
789 if (INSN_P (insn))
790 {
791 /* Record defs within INSN. */
792 df_insn_refs_delete (dflow, insn);
793 }
794 }
795
f64e6a69 796 /* Get rid of any artifical uses or defs. */
d0802b39 797 if (bb_info)
798 {
799 def = bb_info->artificial_defs;
800 while (def)
801 def = df_reg_chain_unlink (dflow, def);
802 bb_info->artificial_defs = NULL;
803 use = bb_info->artificial_uses;
804 while (use)
805 use = df_reg_chain_unlink (dflow, use);
806 bb_info->artificial_uses = NULL;
807 }
808}
809
810
e011eba9 811/* Delete all of the refs information from BLOCKS. */
812
813void
814df_refs_delete (struct dataflow *dflow, bitmap blocks)
815{
816 bitmap_iterator bi;
817 unsigned int bb_index;
e011eba9 818
819 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
820 {
d0802b39 821 df_bb_refs_delete (dflow, bb_index);
e011eba9 822 }
823}
824
825
826/* Take build ref table for either the uses or defs from the reg-use
827 or reg-def chains. */
828
829void
830df_reorganize_refs (struct df_ref_info *ref_info)
831{
832 unsigned int m = ref_info->regs_inited;
833 unsigned int regno;
834 unsigned int offset = 0;
835 unsigned int size = 0;
836
837 if (ref_info->refs_organized)
838 return;
839
840 if (ref_info->refs_size < ref_info->bitmap_size)
841 {
842 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
843 df_grow_ref_info (ref_info, new_size);
844 }
845
846 for (regno = 0; regno < m; regno++)
847 {
848 struct df_reg_info *reg_info = ref_info->regs[regno];
849 int count = 0;
850 if (reg_info)
851 {
852 struct df_ref *ref = reg_info->reg_chain;
853 reg_info->begin = offset;
854 while (ref)
855 {
856 ref_info->refs[offset] = ref;
857 DF_REF_ID (ref) = offset++;
858 ref = DF_REF_NEXT_REG (ref);
859 count++;
860 size++;
861 }
862 reg_info->n_refs = count;
863 }
864 }
865
866 /* The bitmap size is not decremented when refs are deleted. So
867 reset it now that we have squished out all of the empty
868 slots. */
869 ref_info->bitmap_size = size;
870 ref_info->refs_organized = true;
871 ref_info->add_refs_inline = true;
872}
873
874\f
875/* Local miscellaneous routines. */
876
877/* Local routines for recording refs. */
878
879/* Set where we are in the compilation. */
880
881void
882df_set_state (int state)
883{
884 df_state = state;
885}
886
887
888\f
889/*----------------------------------------------------------------------------
890 Hard core instruction scanning code. No external interfaces here,
891 just a lot of routines that look inside insns.
892----------------------------------------------------------------------------*/
893
894/* Create a ref and add it to the reg-def or reg-use chains. */
895
896static struct df_ref *
897df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
898 basic_block bb, rtx insn,
899 enum df_ref_type ref_type,
900 enum df_ref_flags ref_flags)
901{
902 struct df_ref *this_ref;
903 struct df *df = dflow->df;
904 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
905 struct df_scan_problem_data *problem_data =
906 (struct df_scan_problem_data *) dflow->problem_data;
907
908 this_ref = pool_alloc (problem_data->ref_pool);
909 DF_REF_REG (this_ref) = reg;
910 DF_REF_REGNO (this_ref) = regno;
911 DF_REF_LOC (this_ref) = loc;
912 DF_REF_INSN (this_ref) = insn;
913 DF_REF_CHAIN (this_ref) = NULL;
914 DF_REF_TYPE (this_ref) = ref_type;
915 DF_REF_FLAGS (this_ref) = ref_flags;
916 DF_REF_DATA (this_ref) = NULL;
917 DF_REF_BB (this_ref) = bb;
918
919 /* Link the ref into the reg_def and reg_use chains and keep a count
920 of the instances. */
921 if (ref_type == DF_REF_REG_DEF)
922 {
923 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
924 reg_info->n_refs++;
925
926 /* Add the ref to the reg_def chain. */
927 df_reg_chain_create (reg_info, this_ref);
928 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
929 if (df->def_info.add_refs_inline)
930 {
931 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
932 {
933 int new_size = df->def_info.bitmap_size
934 + df->def_info.bitmap_size / 4;
935 df_grow_ref_info (&df->def_info, new_size);
936 }
937 /* Add the ref to the big array of defs. */
938 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
939 df->def_info.refs_organized = false;
940 }
941
942 df->def_info.bitmap_size++;
943
944 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
945 {
946 struct df_scan_bb_info *bb_info
947 = df_scan_get_bb_info (dflow, bb->index);
948 this_ref->next_ref = bb_info->artificial_defs;
949 bb_info->artificial_defs = this_ref;
950 }
951 else
952 {
953 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
954 DF_INSN_GET (df, insn)->defs = this_ref;
955 }
956 }
957 else
958 {
959 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
960 reg_info->n_refs++;
961
962 /* Add the ref to the reg_use chain. */
963 df_reg_chain_create (reg_info, this_ref);
964 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
965 if (df->use_info.add_refs_inline)
966 {
967 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
968 {
969 int new_size = df->use_info.bitmap_size
970 + df->use_info.bitmap_size / 4;
971 df_grow_ref_info (&df->use_info, new_size);
972 }
973 /* Add the ref to the big array of defs. */
974 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
975 df->use_info.refs_organized = false;
976 }
977
978 df->use_info.bitmap_size++;
979 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
980 {
981 struct df_scan_bb_info *bb_info
982 = df_scan_get_bb_info (dflow, bb->index);
983 this_ref->next_ref = bb_info->artificial_uses;
984 bb_info->artificial_uses = this_ref;
985 }
986 else
987 {
988 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
989 DF_INSN_GET (df, insn)->uses = this_ref;
990 }
991 }
992 return this_ref;
993}
994
995
996/* Create new references of type DF_REF_TYPE for each part of register REG
997 at address LOC within INSN of BB. */
998
999static void
1000df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1001 basic_block bb, rtx insn,
1002 enum df_ref_type ref_type,
1003 enum df_ref_flags ref_flags,
1004 bool record_live)
1005{
1006 unsigned int regno;
1007 struct df *df = dflow->df;
1008
1009 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1010
1011 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1012 all. Notably only those representing a word extraction from a multi-word
1013 reg. As written in the docu those should have the form
1014 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1015 XXX Is that true? We could also use the global word_mode variable. */
1016 if ((df->flags & DF_SUBREGS) == 0
1017 && GET_CODE (reg) == SUBREG
1018 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1019 || GET_MODE_SIZE (GET_MODE (reg))
1020 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1021 {
1022 loc = &SUBREG_REG (reg);
1023 reg = *loc;
1024 ref_flags |= DF_REF_STRIPPED;
1025 }
1026
1027 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1028 if (regno < FIRST_PSEUDO_REGISTER)
1029 {
1030 int i;
1031 int endregno;
1032
1033 if (! (df->flags & DF_HARD_REGS))
1034 return;
1035
1036 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
1037 for the mode, because we only want to add references to regs, which
1038 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
1039 reference the whole reg 0 in DI mode (which would also include
1040 reg 1, at least, if 0 and 1 are SImode registers). */
1041 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1042 if (GET_CODE (reg) == SUBREG)
1043 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1044 SUBREG_BYTE (reg), GET_MODE (reg));
1045 endregno += regno;
1046
1047 for (i = regno; i < endregno; i++)
1048 {
1049 /* Calls are handled at call site because regs_ever_live
1050 doesn't include clobbered regs, only used ones. */
1051 if (ref_type == DF_REF_REG_DEF && record_live)
1052 regs_ever_live[i] = 1;
1053 else if ((ref_type == DF_REF_REG_USE
1054 || ref_type == DF_REF_REG_MEM_STORE
1055 || ref_type == DF_REF_REG_MEM_LOAD)
1056 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1057 {
1058 /* Set regs_ever_live on uses of non-eliminable frame
1059 pointers and arg pointers. */
1060 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
1061 && (regno == FRAME_POINTER_REGNUM
1062 || regno == ARG_POINTER_REGNUM)))
1063 regs_ever_live[i] = 1;
1064 }
1065
1066 df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1067 bb, insn, ref_type, ref_flags);
1068 }
1069 }
1070 else
1071 {
1072 df_ref_create_structure (dflow, reg, loc,
1073 bb, insn, ref_type, ref_flags);
1074 }
1075}
1076
1077
1078/* A set to a non-paradoxical SUBREG for which the number of word_mode units
1079 covered by the outer mode is smaller than that covered by the inner mode,
1080 is a read-modify-write operation.
1081 This function returns true iff the SUBREG X is such a SUBREG. */
1082
1083bool
1084df_read_modify_subreg_p (rtx x)
1085{
1086 unsigned int isize, osize;
1087 if (GET_CODE (x) != SUBREG)
1088 return false;
1089 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1090 osize = GET_MODE_SIZE (GET_MODE (x));
1091 return (isize > osize && isize > UNITS_PER_WORD);
1092}
1093
1094
1095/* Process all the registers defined in the rtx, X.
1096 Autoincrement/decrement definitions will be picked up by
1097 df_uses_record. */
1098
1099static void
1100df_def_record_1 (struct dataflow *dflow, rtx x,
1101 basic_block bb, rtx insn,
1102 enum df_ref_flags flags, bool record_live)
1103{
1104 rtx *loc;
1105 rtx dst;
1106
1107 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1108 construct. */
1109 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1110 loc = &XEXP (x, 0);
1111 else
1112 loc = &SET_DEST (x);
1113 dst = *loc;
1114
1115 /* Some targets place small structures in registers for
1116 return values of functions. */
1117 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
1118 {
1119 int i;
1120
1121 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1122 {
1123 rtx temp = XVECEXP (dst, 0, i);
1124 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1125 || GET_CODE (temp) == SET)
1126 df_def_record_1 (dflow, temp, bb, insn,
1127 GET_CODE (temp) == CLOBBER ? flags | DF_REF_CLOBBER : flags,
1128 record_live);
1129 }
1130 return;
1131 }
1132
1133 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1134 be handy for the reg allocator. */
1135 while (GET_CODE (dst) == STRICT_LOW_PART
1136 || GET_CODE (dst) == ZERO_EXTRACT
1137 || df_read_modify_subreg_p (dst))
1138 {
1139#if 0
1140 /* Strict low part always contains SUBREG, but we do not want to make
1141 it appear outside, as whole register is always considered. */
1142 if (GET_CODE (dst) == STRICT_LOW_PART)
1143 {
1144 loc = &XEXP (dst, 0);
1145 dst = *loc;
1146 }
1147#endif
1148 loc = &XEXP (dst, 0);
1149 dst = *loc;
1150 flags |= DF_REF_READ_WRITE;
1151 }
1152
1153 if (REG_P (dst)
1154 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1155 df_ref_record (dflow, dst, loc, bb, insn,
1156 DF_REF_REG_DEF, flags, record_live);
1157}
1158
1159
1160/* Process all the registers defined in the pattern rtx, X. */
1161
1162static void
1163df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1164{
1165 RTX_CODE code = GET_CODE (x);
1166
1167 if (code == SET || code == CLOBBER)
1168 {
1169 /* Mark the single def within the pattern. */
1170 df_def_record_1 (dflow, x, bb, insn,
1171 code == CLOBBER ? DF_REF_CLOBBER : 0, true);
1172 }
1173 else if (code == COND_EXEC)
1174 {
1175 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1176 }
1177 else if (code == PARALLEL)
1178 {
1179 int i;
1180
1181 /* Mark the multiple defs within the pattern. */
1182 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1183 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1184 }
1185}
1186
1187
1188/* Process all the registers used in the rtx at address LOC. */
1189
1190static void
1191df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1192 basic_block bb, rtx insn, enum df_ref_flags flags)
1193{
1194 RTX_CODE code;
1195 rtx x;
1196 retry:
1197 x = *loc;
1198 if (!x)
1199 return;
1200 code = GET_CODE (x);
1201 switch (code)
1202 {
1203 case LABEL_REF:
1204 case SYMBOL_REF:
1205 case CONST_INT:
1206 case CONST:
1207 case CONST_DOUBLE:
1208 case CONST_VECTOR:
1209 case PC:
1210 case CC0:
1211 case ADDR_VEC:
1212 case ADDR_DIFF_VEC:
1213 return;
1214
1215 case CLOBBER:
1216 /* If we are clobbering a MEM, mark any registers inside the address
1217 as being used. */
1218 if (MEM_P (XEXP (x, 0)))
1219 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1220 DF_REF_REG_MEM_STORE, bb, insn, flags);
1221
1222 /* If we're clobbering a REG then we have a def so ignore. */
1223 return;
1224
1225 case MEM:
1226 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1227 flags & DF_REF_IN_NOTE);
1228 return;
1229
1230 case SUBREG:
1231 /* While we're here, optimize this case. */
1232
1233 /* In case the SUBREG is not of a REG, do not optimize. */
1234 if (!REG_P (SUBREG_REG (x)))
1235 {
1236 loc = &SUBREG_REG (x);
1237 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1238 return;
1239 }
1240 /* ... Fall through ... */
1241
1242 case REG:
1243 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1244 return;
1245
1246 case SET:
1247 {
1248 rtx dst = SET_DEST (x);
1249 gcc_assert (!(flags & DF_REF_IN_NOTE));
1250 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1251
1252 switch (GET_CODE (dst))
1253 {
1254 case SUBREG:
1255 if (df_read_modify_subreg_p (dst))
1256 {
1257 df_uses_record (dflow, &SUBREG_REG (dst),
1258 DF_REF_REG_USE, bb,
1259 insn, DF_REF_READ_WRITE);
1260 break;
1261 }
1262 /* Fall through. */
1263 case REG:
1264 case PARALLEL:
1265 case SCRATCH:
1266 case PC:
1267 case CC0:
1268 break;
1269 case MEM:
1270 df_uses_record (dflow, &XEXP (dst, 0),
1271 DF_REF_REG_MEM_STORE,
1272 bb, insn, 0);
1273 break;
1274 case STRICT_LOW_PART:
1275 {
1276 rtx *temp = &XEXP (dst, 0);
1277 /* A strict_low_part uses the whole REG and not just the
1278 SUBREG. */
1279 dst = XEXP (dst, 0);
1280 df_uses_record (dflow,
1281 (GET_CODE (dst) == SUBREG)
1282 ? &SUBREG_REG (dst) : temp,
1283 DF_REF_REG_USE, bb,
1284 insn, DF_REF_READ_WRITE);
1285 }
1286 break;
1287 case ZERO_EXTRACT:
1288 case SIGN_EXTRACT:
1289 df_uses_record (dflow, &XEXP (dst, 0),
1290 DF_REF_REG_USE, bb, insn,
1291 DF_REF_READ_WRITE);
1292 df_uses_record (dflow, &XEXP (dst, 1),
1293 DF_REF_REG_USE, bb, insn, 0);
1294 df_uses_record (dflow, &XEXP (dst, 2),
1295 DF_REF_REG_USE, bb, insn, 0);
1296 dst = XEXP (dst, 0);
1297 break;
1298 default:
1299 gcc_unreachable ();
1300 }
1301 return;
1302 }
1303
1304 case RETURN:
1305 break;
1306
1307 case ASM_OPERANDS:
1308 case UNSPEC_VOLATILE:
1309 case TRAP_IF:
1310 case ASM_INPUT:
1311 {
1312 /* Traditional and volatile asm instructions must be
1313 considered to use and clobber all hard registers, all
1314 pseudo-registers and all of memory. So must TRAP_IF and
1315 UNSPEC_VOLATILE operations.
1316
1317 Consider for instance a volatile asm that changes the fpu
1318 rounding mode. An insn should not be moved across this
1319 even if it only uses pseudo-regs because it might give an
1320 incorrectly rounded result.
1321
1322 However, flow.c's liveness computation did *not* do this,
1323 giving the reasoning as " ?!? Unfortunately, marking all
1324 hard registers as live causes massive problems for the
1325 register allocator and marking all pseudos as live creates
1326 mountains of uninitialized variable warnings."
1327
1328 In order to maintain the status quo with regard to liveness
1329 and uses, we do what flow.c did and just mark any regs we
1330 can find in ASM_OPERANDS as used. Later on, when liveness
1331 is computed, asm insns are scanned and regs_asm_clobbered
1332 is filled out.
1333
1334 For all ASM_OPERANDS, we must traverse the vector of input
1335 operands. We can not just fall through here since then we
1336 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1337 which do not indicate traditional asms unlike their normal
1338 usage. */
1339 if (code == ASM_OPERANDS)
1340 {
1341 int j;
1342
1343 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1344 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1345 DF_REF_REG_USE, bb, insn, 0);
1346 return;
1347 }
1348 break;
1349 }
1350
1351 case PRE_DEC:
1352 case POST_DEC:
1353 case PRE_INC:
1354 case POST_INC:
1355 case PRE_MODIFY:
1356 case POST_MODIFY:
1357 /* Catch the def of the register being modified. */
1358 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1359 DF_REF_REG_DEF, DF_REF_READ_WRITE, true);
1360
1361 /* ... Fall through to handle uses ... */
1362
1363 default:
1364 break;
1365 }
1366
1367 /* Recursively scan the operands of this expression. */
1368 {
1369 const char *fmt = GET_RTX_FORMAT (code);
1370 int i;
1371
1372 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1373 {
1374 if (fmt[i] == 'e')
1375 {
1376 /* Tail recursive case: save a function call level. */
1377 if (i == 0)
1378 {
1379 loc = &XEXP (x, 0);
1380 goto retry;
1381 }
1382 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1383 }
1384 else if (fmt[i] == 'E')
1385 {
1386 int j;
1387 for (j = 0; j < XVECLEN (x, i); j++)
1388 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1389 bb, insn, flags);
1390 }
1391 }
1392 }
1393}
1394
1395/* Return true if *LOC contains an asm. */
1396
1397static int
1398df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1399{
1400 if ( !*loc)
1401 return 0;
1402 if (GET_CODE (*loc) == ASM_OPERANDS)
1403 return 1;
1404 return 0;
1405}
1406
1407
1408/* Return true if INSN contains an ASM. */
1409
1410static int
1411df_insn_contains_asm (rtx insn)
1412{
1413 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1414}
1415
1416
1417
1418/* Record all the refs for DF within INSN of basic block BB. */
1419
1420static void
1421df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1422{
1423 int i;
1424 struct df *df = dflow->df;
1425
1426 if (INSN_P (insn))
1427 {
1428 rtx note;
1429
1430 if (df_insn_contains_asm (insn))
1431 DF_INSN_CONTAINS_ASM (df, insn) = true;
1432
1433 /* Record register defs. */
1434 df_defs_record (dflow, PATTERN (insn), bb, insn);
1435
1436 if (df->flags & DF_EQUIV_NOTES)
1437 for (note = REG_NOTES (insn); note;
1438 note = XEXP (note, 1))
1439 {
1440 switch (REG_NOTE_KIND (note))
1441 {
1442 case REG_EQUIV:
1443 case REG_EQUAL:
1444 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1445 bb, insn, DF_REF_IN_NOTE);
1446 default:
1447 break;
1448 }
1449 }
1450
1451 if (CALL_P (insn))
1452 {
1453 rtx note;
1454
1455 /* Record the registers used to pass arguments, and explicitly
1456 noted as clobbered. */
1457 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1458 note = XEXP (note, 1))
1459 {
1460 if (GET_CODE (XEXP (note, 0)) == USE)
1461 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1462 DF_REF_REG_USE,
1463 bb, insn, 0);
1464 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1465 {
1466 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1467 if (REG_P (XEXP (XEXP (note, 0), 0)))
1468 {
1469 rtx reg = XEXP (XEXP (note, 0), 0);
1470 int regno_last;
1471 int regno_first;
1472 int i;
1473
1474 regno_last = regno_first = REGNO (reg);
1475 if (regno_first < FIRST_PSEUDO_REGISTER)
1476 regno_last
1477 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1478 for (i = regno_first; i <= regno_last; i++)
1479 regs_ever_live[i] = 1;
1480 }
1481 }
1482 }
1483
1484 /* The stack ptr is used (honorarily) by a CALL insn. */
1485 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1486 DF_REF_REG_USE, bb, insn,
1487 0);
1488
1489 if (df->flags & DF_HARD_REGS)
1490 {
1491 bitmap_iterator bi;
1492 unsigned int ui;
1493 /* Calls may also reference any of the global registers,
1494 so they are recorded as used. */
1495 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1496 if (global_regs[i])
1497 df_uses_record (dflow, &regno_reg_rtx[i],
1498 DF_REF_REG_USE, bb, insn,
1499 0);
1500 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1501 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb, insn,
1502 DF_REF_REG_DEF, DF_REF_CLOBBER, false);
1503 }
1504 }
1505
1506 /* Record the register uses. */
1507 df_uses_record (dflow, &PATTERN (insn),
1508 DF_REF_REG_USE, bb, insn, 0);
1509
1510 }
1511}
1512
1513static bool
1514df_has_eh_preds (basic_block bb)
1515{
1516 edge e;
1517 edge_iterator ei;
1518
1519 FOR_EACH_EDGE (e, ei, bb->preds)
1520 {
1521 if (e->flags & EDGE_EH)
1522 return true;
1523 }
1524 return false;
1525}
1526
1527/* Record all the refs within the basic block BB. */
1528
1529static void
1530df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1531{
1532 struct df *df = dflow->df;
1533 rtx insn;
1534 int luid = 0;
1535 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1536
1537 /* Need to make sure that there is a record in the basic block info. */
1538 if (!bb_info)
1539 {
1540 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1541 df_scan_set_bb_info (dflow, bb->index, bb_info);
1542 bb_info->artificial_defs = NULL;
1543 bb_info->artificial_uses = NULL;
1544 }
1545
1546 /* Scan the block an insn at a time from beginning to end. */
1547 FOR_BB_INSNS (bb, insn)
1548 {
1549 df_insn_create_insn_record (dflow, insn);
1550 if (INSN_P (insn))
1551 {
1552 /* Record defs within INSN. */
1553 DF_INSN_LUID (df, insn) = luid++;
1554 df_insn_refs_record (dflow, bb, insn);
1555 }
1556 DF_INSN_LUID (df, insn) = luid;
1557 }
1558
1559#ifdef EH_RETURN_DATA_REGNO
1560 if ((df->flags & DF_HARD_REGS)
1561 && df_has_eh_preds (bb))
1562 {
1563 unsigned int i;
1564 /* Mark the registers that will contain data for the handler. */
1565 if (current_function_calls_eh_return)
1566 for (i = 0; ; ++i)
1567 {
1568 unsigned regno = EH_RETURN_DATA_REGNO (i);
1569 if (regno == INVALID_REGNUM)
1570 break;
1571 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i], bb, NULL,
1572 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP, false);
1573 }
1574 }
1575#endif
1576
1577#ifdef EH_USES
1578 /* This code is putting in a artificial ref for the use at the TOP
1579 of the block that receives the exception. It is too cumbersome
1580 to actually put the ref on the edge. We could either model this
1581 at the top of the receiver block or the bottom of the sender
1582 block.
1583
1584 The bottom of the sender block is problematic because not all
1585 out-edges of the a block are eh-edges. However, it is true that
1586 all edges into a block are either eh-edges or none of them are
1587 eh-edges. Thus, we can model this at the top of the eh-receiver
1588 for all of the edges at once. */
1589 if ((df->flags & DF_HARD_REGS)
1590 && df_has_eh_preds (bb))
1591 {
1592 unsigned int i;
1593 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1594 if (EH_USES (i))
1595 df_uses_record (dflow, &regno_reg_rtx[i],
1596 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1597 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1598 }
1599#endif
1600
1601 if ((df->flags & DF_HARD_REGS)
1602 && bb->index >= NUM_FIXED_BLOCKS)
1603 {
1604 /* Before reload, there are a few registers that must be forced
1605 live everywhere -- which might not already be the case for
1606 blocks within infinite loops. */
1607 if (! reload_completed)
1608 {
1609
1610 /* Any reference to any pseudo before reload is a potential
1611 reference of the frame pointer. */
d0802b39 1612 df_uses_record (dflow, &regno_reg_rtx[FRAME_POINTER_REGNUM],
e011eba9 1613 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1614
1615#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1616 /* Pseudos with argument area equivalences may require
1617 reloading via the argument pointer. */
1618 if (fixed_regs[ARG_POINTER_REGNUM])
1619 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1620 DF_REF_REG_USE, bb, NULL,
1621 DF_REF_ARTIFICIAL);
1622#endif
1623
1624 /* Any constant, or pseudo with constant equivalences, may
1625 require reloading from memory using the pic register. */
1626 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1627 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1628 df_uses_record (dflow, &regno_reg_rtx[PIC_OFFSET_TABLE_REGNUM],
1629 DF_REF_REG_USE, bb, NULL,
1630 DF_REF_ARTIFICIAL);
1631 }
1632 /* The all-important stack pointer must always be live. */
1633 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1634 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1635 }
1636}
1637
1638
1639/* Record all the refs in the basic blocks specified by BLOCKS. */
1640
1641static void
1642df_refs_record (struct dataflow *dflow, bitmap blocks)
1643{
1644 unsigned int bb_index;
1645 bitmap_iterator bi;
1646
1647 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1648 {
1649 basic_block bb = BASIC_BLOCK (bb_index);
1650 df_bb_refs_record (dflow, bb);
1651 }
1652
1653 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1654 df_record_exit_block_uses (dflow);
1655}
1656
1657
1658/*----------------------------------------------------------------------------
1659 Specialized hard register scanning functions.
1660----------------------------------------------------------------------------*/
1661
1662/* Mark a register in SET. Hard registers in large modes get all
1663 of their component registers set as well. */
1664
1665static void
1666df_mark_reg (rtx reg, void *vset)
1667{
1668 bitmap set = (bitmap) vset;
1669 int regno = REGNO (reg);
1670
1671 gcc_assert (GET_MODE (reg) != BLKmode);
1672
1673 bitmap_set_bit (set, regno);
1674 if (regno < FIRST_PSEUDO_REGISTER)
1675 {
1676 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1677 while (--n > 0)
1678 bitmap_set_bit (set, regno + n);
1679 }
1680}
1681
1682/* Record the set of hard registers that are used in the exit block. */
1683
1684static void
1685df_record_exit_block_uses (struct dataflow *dflow)
1686{
1687 unsigned int i;
1688 bitmap_iterator bi;
1689 struct df *df = dflow->df;
1690
1691 bitmap_clear (df->exit_block_uses);
1692
1693 if (! (df->flags & DF_HARD_REGS))
1694 return;
1695
1696 /* If exiting needs the right stack value, consider the stack
1697 pointer live at the end of the function. */
1698 if ((HAVE_epilogue && epilogue_completed)
1699 || ! EXIT_IGNORE_STACK
1700 || (! FRAME_POINTER_REQUIRED
1701 && ! current_function_calls_alloca
1702 && flag_omit_frame_pointer)
1703 || current_function_sp_is_unchanging)
1704 {
1705 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1706 }
1707
1708 /* Mark the frame pointer if needed at the end of the function.
1709 If we end up eliminating it, it will be removed from the live
1710 list of each basic block by reload. */
1711
1712 if (! reload_completed || frame_pointer_needed)
1713 {
1714 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1715#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1716 /* If they are different, also mark the hard frame pointer as live. */
1717 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1718 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1719#endif
1720 }
1721
1722#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1723 /* Many architectures have a GP register even without flag_pic.
1724 Assume the pic register is not in use, or will be handled by
1725 other means, if it is not fixed. */
1726 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1727 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1728 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1729#endif
1730
1731 /* Mark all global registers, and all registers used by the
1732 epilogue as being live at the end of the function since they
1733 may be referenced by our caller. */
1734 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1735 if (global_regs[i] || EPILOGUE_USES (i))
1736 bitmap_set_bit (df->exit_block_uses, i);
1737
1738 if (HAVE_epilogue && epilogue_completed)
1739 {
1740 /* Mark all call-saved registers that we actually used. */
1741 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1742 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1743 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1744 bitmap_set_bit (df->exit_block_uses, i);
1745 }
1746
1747#ifdef EH_RETURN_DATA_REGNO
1748 /* Mark the registers that will contain data for the handler. */
1749 if (reload_completed && current_function_calls_eh_return)
1750 for (i = 0; ; ++i)
1751 {
1752 unsigned regno = EH_RETURN_DATA_REGNO (i);
1753 if (regno == INVALID_REGNUM)
1754 break;
1755 bitmap_set_bit (df->exit_block_uses, regno);
1756 }
1757#endif
1758
1759#ifdef EH_RETURN_STACKADJ_RTX
1760 if ((! HAVE_epilogue || ! epilogue_completed)
1761 && current_function_calls_eh_return)
1762 {
1763 rtx tmp = EH_RETURN_STACKADJ_RTX;
1764 if (tmp && REG_P (tmp))
1765 df_mark_reg (tmp, df->exit_block_uses);
1766 }
1767#endif
1768
1769#ifdef EH_RETURN_HANDLER_RTX
1770 if ((! HAVE_epilogue || ! epilogue_completed)
1771 && current_function_calls_eh_return)
1772 {
1773 rtx tmp = EH_RETURN_HANDLER_RTX;
1774 if (tmp && REG_P (tmp))
1775 df_mark_reg (tmp, df->exit_block_uses);
1776 }
1777#endif
1778
1779 /* Mark function return value. */
1780 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
1781
1782 if (df->flags & DF_HARD_REGS)
1783 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
1784 df_uses_record (dflow, &regno_reg_rtx[i],
1785 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1786 DF_REF_ARTIFICIAL);
1787}
1788
1789static bool initialized = false;
1790
1791/* Initialize some platform specific structures. */
1792
1793void
1794df_hard_reg_init (void)
1795{
e011eba9 1796 int i;
bebf8106 1797#ifdef ELIMINABLE_REGS
e011eba9 1798 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
1799#endif
1800 /* After reload, some ports add certain bits to regs_ever_live so
1801 this cannot be reset. */
1802
1803 if (!reload_completed)
1804 memset (regs_ever_live, 0, sizeof (regs_ever_live));
1805
1806 if (initialized)
1807 return;
1808
1809 bitmap_obstack_initialize (&persistent_obstack);
1810
1811 /* Record which registers will be eliminated. We use this in
1812 mark_used_regs. */
1813 CLEAR_HARD_REG_SET (elim_reg_set);
1814
1815#ifdef ELIMINABLE_REGS
1816 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
1817 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1818#else
1819 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1820#endif
1821
1822 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
1823
1824 /* Inconveniently, this is only readily available in hard reg set
1825 form. */
1826 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1827 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1828 bitmap_set_bit (df_invalidated_by_call, i);
1829
1830 df_all_hard_regs = BITMAP_ALLOC (&persistent_obstack);
1831
1832 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1833 bitmap_set_bit (df_all_hard_regs, i);
1834
1835 initialized = true;
1836}