]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/df-scan.c
2006-01-31 Paolo Carlini <pcarlini@suse.de>
[thirdparty/gcc.git] / gcc / df-scan.c
CommitLineData
e011eba9 1/* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
3
4
5/* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
12
13This file is part of GCC.
14
15GCC is free software; you can redistribute it and/or modify it under
16the terms of the GNU General Public License as published by the Free
17Software Foundation; either version 2, or (at your option) any later
18version.
19
20GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21WARRANTY; without even the implied warranty of MERCHANTABILITY or
22FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23for more details.
24
25You should have received a copy of the GNU General Public License
26along with GCC; see the file COPYING. If not, write to the Free
27Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2802110-1301, USA.
29*/
30
31#include "config.h"
32#include "system.h"
33#include "coretypes.h"
34#include "tm.h"
35#include "rtl.h"
36#include "tm_p.h"
37#include "insn-config.h"
38#include "recog.h"
39#include "function.h"
40#include "regs.h"
41#include "output.h"
42#include "alloc-pool.h"
43#include "flags.h"
44#include "hard-reg-set.h"
45#include "basic-block.h"
46#include "sbitmap.h"
47#include "bitmap.h"
48#include "timevar.h"
fcf2ad9f 49#include "tree.h"
50#include "target.h"
51#include "target-def.h"
e011eba9 52#include "df.h"
53
54#ifndef HAVE_epilogue
55#define HAVE_epilogue 0
56#endif
57#ifndef HAVE_prologue
58#define HAVE_prologue 0
59#endif
60#ifndef HAVE_sibcall_epilogue
61#define HAVE_sibcall_epilogue 0
62#endif
63
64#ifndef EPILOGUE_USES
65#define EPILOGUE_USES(REGNO) 0
66#endif
67
68/* Indicates where we are in the compilation. */
69int df_state;
70
71/* The bitmap_obstack is used to hold some static variables that
72 should not be reset after each function is compiled. */
73
74static bitmap_obstack persistent_obstack;
75
76/* The set of hard registers in eliminables[i].from. */
77
78static HARD_REG_SET elim_reg_set;
79
80/* This is a bitmap copy of regs_invalidated_by_call so that we can
81 easily add it into bitmaps, etc. */
82
83bitmap df_invalidated_by_call = NULL;
84
85/* Initialize ur_in and ur_out as if all hard registers were partially
86 available. */
87
e011eba9 88static void df_ref_record (struct dataflow *, rtx, rtx *,
89 basic_block, rtx, enum df_ref_type,
90 enum df_ref_flags, bool record_live);
91static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
92 enum df_ref_flags, bool record_live);
93static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
94static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
95 basic_block, rtx, enum df_ref_flags);
96
97static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
98static void df_bb_refs_record (struct dataflow *, basic_block);
99static void df_refs_record (struct dataflow *, bitmap);
100static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
101 basic_block, rtx, enum df_ref_type,
102 enum df_ref_flags);
fcf2ad9f 103static void df_record_entry_block_defs (struct dataflow *);
e011eba9 104static void df_record_exit_block_uses (struct dataflow *);
105static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
106static void df_grow_ref_info (struct df_ref_info *, unsigned int);
107static void df_grow_insn_info (struct df *);
108
109\f
110/*----------------------------------------------------------------------------
111 SCANNING DATAFLOW PROBLEM
112
113 There are several ways in which scanning looks just like the other
114 dataflow problems. It shares the all the mechanisms for local info
115 as well as basic block info. Where it differs is when and how often
116 it gets run. It also has no need for the iterative solver.
117----------------------------------------------------------------------------*/
118
119/* Problem data for the scanning dataflow function. */
120struct df_scan_problem_data
121{
122 alloc_pool ref_pool;
123 alloc_pool insn_pool;
124 alloc_pool reg_pool;
125};
126
127typedef struct df_scan_bb_info *df_scan_bb_info_t;
128
129static void
130df_scan_free_internal (struct dataflow *dflow)
131{
132 struct df *df = dflow->df;
133 struct df_scan_problem_data *problem_data =
134 (struct df_scan_problem_data *) dflow->problem_data;
135
136 free (df->def_info.regs);
137 free (df->def_info.refs);
138 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
139
140 free (df->use_info.regs);
141 free (df->use_info.refs);
142 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
143
144 free (df->insns);
145 df->insns = NULL;
146 df->insns_size = 0;
147
148 free (dflow->block_info);
149 dflow->block_info = NULL;
150 dflow->block_info_size = 0;
151
152 BITMAP_FREE (df->hardware_regs_used);
fcf2ad9f 153 BITMAP_FREE (df->entry_block_defs);
e011eba9 154 BITMAP_FREE (df->exit_block_uses);
155
156 free_alloc_pool (dflow->block_pool);
157 free_alloc_pool (problem_data->ref_pool);
158 free_alloc_pool (problem_data->insn_pool);
159 free_alloc_pool (problem_data->reg_pool);
160}
161
162
163/* Get basic block info. */
164
165struct df_scan_bb_info *
166df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
167{
168 gcc_assert (index < dflow->block_info_size);
169 return (struct df_scan_bb_info *) dflow->block_info[index];
170}
171
172
173/* Set basic block info. */
174
175static void
176df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
177 struct df_scan_bb_info *bb_info)
178{
179 gcc_assert (index < dflow->block_info_size);
180 dflow->block_info[index] = (void *) bb_info;
181}
182
183
184/* Free basic block info. */
185
186static void
d0802b39 187df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
e011eba9 188{
189 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
190 if (bb_info)
d0802b39 191 {
192 df_bb_refs_delete (dflow, bb->index);
193 pool_free (dflow->block_pool, bb_info);
194 }
e011eba9 195}
196
197
198/* Allocate the problem data for the scanning problem. This should be
199 called when the problem is created or when the entire function is to
200 be rescanned. */
201
202static void
203df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
204{
205 struct df *df = dflow->df;
206 struct df_scan_problem_data *problem_data;
207 unsigned int insn_num = get_max_uid () + 1;
208 unsigned int block_size = 50;
209 unsigned int bb_index;
210 bitmap_iterator bi;
211
212 /* Given the number of pools, this is really faster than tearing
213 everything apart. */
214 if (dflow->problem_data)
215 df_scan_free_internal (dflow);
216
217 dflow->block_pool
218 = create_alloc_pool ("df_scan_block pool",
219 sizeof (struct df_scan_bb_info),
220 block_size);
221
222 problem_data = xmalloc (sizeof (struct df_scan_problem_data));
223 dflow->problem_data = problem_data;
224
225 problem_data->ref_pool
226 = create_alloc_pool ("df_scan_ref pool",
227 sizeof (struct df_ref), block_size);
228 problem_data->insn_pool
229 = create_alloc_pool ("df_scan_insn pool",
230 sizeof (struct df_insn_info), block_size);
e011eba9 231 problem_data->reg_pool
232 = create_alloc_pool ("df_scan_reg pool",
233 sizeof (struct df_reg_info), block_size);
234
235 insn_num += insn_num / 4;
236 df_grow_reg_info (dflow, &df->def_info);
237 df_grow_ref_info (&df->def_info, insn_num);
238
239 df_grow_reg_info (dflow, &df->use_info);
240 df_grow_ref_info (&df->use_info, insn_num *2);
241
242 df_grow_insn_info (df);
243 df_grow_bb_info (dflow);
244
245 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
246 {
247 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
248 if (!bb_info)
249 {
250 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
251 df_scan_set_bb_info (dflow, bb_index, bb_info);
252 }
253 bb_info->artificial_defs = NULL;
254 bb_info->artificial_uses = NULL;
255 }
256
257 df->hardware_regs_used = BITMAP_ALLOC (NULL);
fcf2ad9f 258 df->entry_block_defs = BITMAP_ALLOC (NULL);
e011eba9 259 df->exit_block_uses = BITMAP_ALLOC (NULL);
260}
261
262
263/* Free all of the data associated with the scan problem. */
264
265static void
266df_scan_free (struct dataflow *dflow)
267{
268 struct df *df = dflow->df;
269
d0802b39 270 if (dflow->problem_data)
271 {
272 df_scan_free_internal (dflow);
273 free (dflow->problem_data);
274 }
275
e011eba9 276 if (df->blocks_to_scan)
277 BITMAP_FREE (df->blocks_to_scan);
278
279 if (df->blocks_to_analyze)
280 BITMAP_FREE (df->blocks_to_analyze);
281
e011eba9 282 free (dflow);
283}
284
285static void
286df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
287{
288 struct df *df = dflow->df;
289 int i;
290
e011eba9 291 fprintf (file, " invalidated by call \t");
292 dump_bitmap (file, df_invalidated_by_call);
293 fprintf (file, " hardware regs used \t");
294 dump_bitmap (file, df->hardware_regs_used);
fcf2ad9f 295 fprintf (file, " entry block uses \t");
296 dump_bitmap (file, df->entry_block_defs);
e011eba9 297 fprintf (file, " exit block uses \t");
298 dump_bitmap (file, df->exit_block_uses);
299 fprintf (file, " regs ever live \t");
300 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
301 if (regs_ever_live[i])
302 fprintf (file, "%d ", i);
303 fprintf (file, "\n");
304}
305
306static struct df_problem problem_SCAN =
307{
308 DF_SCAN, /* Problem id. */
309 DF_NONE, /* Direction. */
310 df_scan_alloc, /* Allocate the problem specific data. */
f64e6a69 311 NULL, /* Reset global information. */
e011eba9 312 df_scan_free_bb_info, /* Free basic block info. */
313 NULL, /* Local compute function. */
314 NULL, /* Init the solution specific data. */
315 NULL, /* Iterative solver. */
316 NULL, /* Confluence operator 0. */
317 NULL, /* Confluence operator n. */
318 NULL, /* Transfer function. */
319 NULL, /* Finalize function. */
320 df_scan_free, /* Free all of the problem information. */
321 df_scan_dump, /* Debugging. */
322 NULL /* Dependent problem. */
323};
324
325
326/* Create a new DATAFLOW instance and add it to an existing instance
327 of DF. The returned structure is what is used to get at the
328 solution. */
329
330struct dataflow *
331df_scan_add_problem (struct df *df)
332{
333 return df_add_problem (df, &problem_SCAN);
334}
335
336/*----------------------------------------------------------------------------
337 Storage Allocation Utilities
338----------------------------------------------------------------------------*/
339
340
341/* First, grow the reg_info information. If the current size is less than
342 the number of psuedos, grow to 25% more than the number of
343 pseudos.
344
345 Second, assure that all of the slots up to max_reg_num have been
346 filled with reg_info structures. */
347
348static void
349df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
350{
351 unsigned int max_reg = max_reg_num ();
352 unsigned int new_size = max_reg;
353 struct df_scan_problem_data *problem_data =
354 (struct df_scan_problem_data *) dflow->problem_data;
355 unsigned int i;
356
357 if (ref_info->regs_size < new_size)
358 {
359 new_size += new_size / 4;
360 ref_info->regs = xrealloc (ref_info->regs,
361 new_size *sizeof (struct df_reg_info*));
362 ref_info->regs_size = new_size;
363 }
364
365 for (i = ref_info->regs_inited; i < max_reg; i++)
366 {
367 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
368 memset (reg_info, 0, sizeof (struct df_reg_info));
369 ref_info->regs[i] = reg_info;
370 }
371
372 ref_info->regs_inited = max_reg;
373}
374
375
376/* Grow the ref information. */
377
378static void
379df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
380{
381 if (ref_info->refs_size < new_size)
382 {
383 ref_info->refs = xrealloc (ref_info->refs,
384 new_size *sizeof (struct df_ref *));
385 memset (ref_info->refs + ref_info->refs_size, 0,
386 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
387 ref_info->refs_size = new_size;
388 }
389}
390
391
392/* Grow the ref information. If the current size is less than the
393 number of instructions, grow to 25% more than the number of
394 instructions. */
395
396static void
397df_grow_insn_info (struct df *df)
398{
399 unsigned int new_size = get_max_uid () + 1;
400 if (df->insns_size < new_size)
401 {
402 new_size += new_size / 4;
403 df->insns = xrealloc (df->insns,
404 new_size *sizeof (struct df_insn_info *));
405 memset (df->insns + df->insns_size, 0,
406 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
407 df->insns_size = new_size;
408 }
409}
410
411
412
413\f
414/*----------------------------------------------------------------------------
415 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
416----------------------------------------------------------------------------*/
417
418/* Rescan some BLOCKS or all the blocks defined by the last call to
419 df_set_blocks if BLOCKS is NULL); */
420
421void
422df_rescan_blocks (struct df *df, bitmap blocks)
423{
424 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
425
d0802b39 426 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
e011eba9 427 basic_block bb;
428
429 df->def_info.refs_organized = false;
430 df->use_info.refs_organized = false;
431
432 if (blocks)
433 {
f64e6a69 434 int i;
435
e011eba9 436 /* Need to assure that there are space in all of the tables. */
437 unsigned int insn_num = get_max_uid () + 1;
438 insn_num += insn_num / 4;
439
440 df_grow_reg_info (dflow, &df->def_info);
441 df_grow_ref_info (&df->def_info, insn_num);
442
443 df_grow_reg_info (dflow, &df->use_info);
444 df_grow_ref_info (&df->use_info, insn_num *2);
445
446 df_grow_insn_info (df);
447 df_grow_bb_info (dflow);
448
449 bitmap_copy (local_blocks_to_scan, blocks);
450 df->def_info.add_refs_inline = true;
451 df->use_info.add_refs_inline = true;
452
f64e6a69 453 for (i = df->num_problems_defined; i; i--)
454 {
455 bitmap blocks_to_reset = NULL;
456 if (*dflow->problem->reset_fun)
457 {
458 if (!blocks_to_reset)
459 {
460 blocks_to_reset = BITMAP_ALLOC (NULL);
461 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
462 if (df->blocks_to_scan)
463 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
464 }
465 (*dflow->problem->reset_fun) (dflow, blocks_to_reset);
466 }
467 if (blocks_to_reset)
468 BITMAP_FREE (blocks_to_reset);
469 }
470
e011eba9 471 df_refs_delete (dflow, local_blocks_to_scan);
472
473 /* This may be a mistake, but if an explicit blocks is passed in
474 and the set of blocks to analyze has been explicitly set, add
475 the extra blocks to blocks_to_analyze. The alternative is to
476 put an assert here. We do not want this to just go by
477 silently or else we may get storage leaks. */
478 if (df->blocks_to_analyze)
479 bitmap_ior_into (df->blocks_to_analyze, blocks);
480 }
481 else
482 {
483 /* If we are going to do everything, just reallocate everything.
484 Most stuff is allocated in pools so this is faster than
485 walking it. */
486 if (df->blocks_to_analyze)
487 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
488 else
489 FOR_ALL_BB (bb)
490 {
491 bitmap_set_bit (local_blocks_to_scan, bb->index);
492 }
493 df_scan_alloc (dflow, local_blocks_to_scan);
494
495 df->def_info.add_refs_inline = false;
496 df->use_info.add_refs_inline = false;
497 }
498
499 df_refs_record (dflow, local_blocks_to_scan);
500#if 0
501 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
502#endif
503
504 if (!df->blocks_to_scan)
505 df->blocks_to_scan = BITMAP_ALLOC (NULL);
506
507 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
508 BITMAP_FREE (local_blocks_to_scan);
509}
510
511/* Create a new ref of type DF_REF_TYPE for register REG at address
512 LOC within INSN of BB. */
513
514struct df_ref *
515df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
516 basic_block bb,
517 enum df_ref_type ref_type,
518 enum df_ref_flags ref_flags)
519{
520 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
521 struct df_scan_bb_info *bb_info;
522
523 df_grow_reg_info (dflow, &df->use_info);
524 df_grow_reg_info (dflow, &df->def_info);
525 df_grow_bb_info (dflow);
526
527 /* Make sure there is the bb_info for this block. */
528 bb_info = df_scan_get_bb_info (dflow, bb->index);
529 if (!bb_info)
530 {
531 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
532 df_scan_set_bb_info (dflow, bb->index, bb_info);
533 bb_info->artificial_defs = NULL;
534 bb_info->artificial_uses = NULL;
535 }
536
537 if (ref_type == DF_REF_REG_DEF)
538 df->def_info.add_refs_inline = true;
539 else
540 df->use_info.add_refs_inline = true;
541
542 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
543}
544
545
546\f
547/*----------------------------------------------------------------------------
548 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
549----------------------------------------------------------------------------*/
550
551
552/* Get the artifical uses for a basic block. */
553
554struct df_ref *
555df_get_artificial_defs (struct df *df, unsigned int bb_index)
556{
557 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
558 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
559}
560
561
562/* Get the artifical uses for a basic block. */
563
564struct df_ref *
565df_get_artificial_uses (struct df *df, unsigned int bb_index)
566{
567 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
568 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
569}
570
571
572/* Link REF at the front of reg_use or reg_def chain for REGNO. */
573
574void
575df_reg_chain_create (struct df_reg_info *reg_info,
576 struct df_ref *ref)
577{
578 struct df_ref *head = reg_info->reg_chain;
579 reg_info->reg_chain = ref;
580
581 DF_REF_NEXT_REG (ref) = head;
582
583 /* We cannot actually link to the head of the chain. */
584 DF_REF_PREV_REG (ref) = NULL;
585
586 if (head)
587 DF_REF_PREV_REG (head) = ref;
588}
589
590
591/* Remove REF from the CHAIN. Return the head of the chain. This
592 will be CHAIN unless the REF was at the beginning of the chain. */
593
594static struct df_ref *
595df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
596{
597 struct df_ref *orig_chain = chain;
598 struct df_ref *prev = NULL;
599 while (chain)
600 {
601 if (chain == ref)
602 {
603 if (prev)
604 {
605 prev->next_ref = ref->next_ref;
606 ref->next_ref = NULL;
607 return orig_chain;
608 }
609 else
610 {
611 chain = ref->next_ref;
612 ref->next_ref = NULL;
613 return chain;
614 }
615 }
616
617 prev = chain;
618 chain = chain->next_ref;
619 }
620
621 /* Someone passed in a ref that was not in the chain. */
622 gcc_unreachable ();
623 return NULL;
624}
625
626
627/* Unlink and delete REF at the reg_use or reg_def chain. Also delete
628 the def-use or use-def chain if it exists. Returns the next ref in
629 uses or defs chain. */
630
631struct df_ref *
632df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
633{
634 struct df *df = dflow->df;
635 struct df_ref *next = DF_REF_NEXT_REG (ref);
636 struct df_ref *prev = DF_REF_PREV_REG (ref);
637 struct df_scan_problem_data *problem_data =
638 (struct df_scan_problem_data *) dflow->problem_data;
639 struct df_reg_info *reg_info;
640 struct df_ref *next_ref = ref->next_ref;
641 unsigned int id = DF_REF_ID (ref);
642
643 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
644 {
645 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
646 df->def_info.bitmap_size--;
647 if (df->def_info.refs && (id < df->def_info.refs_size))
648 DF_DEFS_SET (df, id, NULL);
649 }
650 else
651 {
652 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
653 df->use_info.bitmap_size--;
654 if (df->use_info.refs && (id < df->use_info.refs_size))
655 DF_USES_SET (df, id, NULL);
656 }
657
658 /* Delete any def-use or use-def chains that start here. */
659 if (DF_REF_CHAIN (ref))
660 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
661
662 reg_info->n_refs--;
663
664 /* Unlink from the reg chain. If there is no prev, this is the
665 first of the list. If not, just join the next and prev. */
666 if (prev)
667 {
668 DF_REF_NEXT_REG (prev) = next;
669 if (next)
670 DF_REF_PREV_REG (next) = prev;
671 }
672 else
673 {
674 reg_info->reg_chain = next;
675 if (next)
676 DF_REF_PREV_REG (next) = NULL;
677 }
678
679 pool_free (problem_data->ref_pool, ref);
680 return next_ref;
681}
682
683
684/* Unlink REF from all def-use/use-def chains, etc. */
685
686void
687df_ref_remove (struct df *df, struct df_ref *ref)
688{
d0802b39 689 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
e011eba9 690 if (DF_REF_REG_DEF_P (ref))
691 {
692 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
693 {
694 struct df_scan_bb_info *bb_info
695 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
696 bb_info->artificial_defs
697 = df_ref_unlink (bb_info->artificial_defs, ref);
698 }
699 else
700 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)) =
701 df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
702
703 if (df->def_info.add_refs_inline)
704 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
705 }
706 else
707 {
708 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
709 {
710 struct df_scan_bb_info *bb_info
711 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
712 bb_info->artificial_uses
713 = df_ref_unlink (bb_info->artificial_uses, ref);
714 }
715 else
716 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)) =
717 df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
718
719 if (df->use_info.add_refs_inline)
720 DF_USES_SET (df, DF_REF_ID (ref), NULL);
721 }
722
723 df_reg_chain_unlink (dflow, ref);
724}
725
726
727/* Create the insn record for INSN. If there was one there, zero it out. */
728
729static struct df_insn_info *
730df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
731{
732 struct df *df = dflow->df;
733 struct df_scan_problem_data *problem_data =
734 (struct df_scan_problem_data *) dflow->problem_data;
735
736 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
737 if (!insn_rec)
738 {
739 insn_rec = pool_alloc (problem_data->insn_pool);
740 DF_INSN_SET (df, insn, insn_rec);
741 }
742 memset (insn_rec, 0, sizeof (struct df_insn_info));
743
744 return insn_rec;
745}
746
d0802b39 747
748/* Delete all of the refs information from INSN. */
e011eba9 749
750void
751df_insn_refs_delete (struct dataflow *dflow, rtx insn)
752{
753 struct df *df = dflow->df;
754 unsigned int uid = INSN_UID (insn);
f64e6a69 755 struct df_insn_info *insn_info = NULL;
e011eba9 756 struct df_ref *ref;
757 struct df_scan_problem_data *problem_data =
758 (struct df_scan_problem_data *) dflow->problem_data;
759
f64e6a69 760 if (uid < df->insns_size)
761 insn_info = DF_INSN_UID_GET (df, uid);
762
e011eba9 763 if (insn_info)
764 {
765 ref = insn_info->defs;
766 while (ref)
767 ref = df_reg_chain_unlink (dflow, ref);
768
769 ref = insn_info->uses;
770 while (ref)
771 ref = df_reg_chain_unlink (dflow, ref);
772
773 pool_free (problem_data->insn_pool, insn_info);
774 DF_INSN_SET (df, insn, NULL);
775 }
776}
777
778
d0802b39 779/* Delete all of the refs information from basic_block with BB_INDEX. */
780
781void
782df_bb_refs_delete (struct dataflow *dflow, int bb_index)
783{
784 struct df_ref *def;
785 struct df_ref *use;
786
787 struct df_scan_bb_info *bb_info
788 = df_scan_get_bb_info (dflow, bb_index);
789 rtx insn;
790 basic_block bb = BASIC_BLOCK (bb_index);
791 FOR_BB_INSNS (bb, insn)
792 {
793 if (INSN_P (insn))
794 {
795 /* Record defs within INSN. */
796 df_insn_refs_delete (dflow, insn);
797 }
798 }
799
f64e6a69 800 /* Get rid of any artifical uses or defs. */
d0802b39 801 if (bb_info)
802 {
803 def = bb_info->artificial_defs;
804 while (def)
805 def = df_reg_chain_unlink (dflow, def);
806 bb_info->artificial_defs = NULL;
807 use = bb_info->artificial_uses;
808 while (use)
809 use = df_reg_chain_unlink (dflow, use);
810 bb_info->artificial_uses = NULL;
811 }
812}
813
814
e011eba9 815/* Delete all of the refs information from BLOCKS. */
816
817void
818df_refs_delete (struct dataflow *dflow, bitmap blocks)
819{
820 bitmap_iterator bi;
821 unsigned int bb_index;
e011eba9 822
823 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
824 {
d0802b39 825 df_bb_refs_delete (dflow, bb_index);
e011eba9 826 }
827}
828
829
830/* Take build ref table for either the uses or defs from the reg-use
831 or reg-def chains. */
832
833void
834df_reorganize_refs (struct df_ref_info *ref_info)
835{
836 unsigned int m = ref_info->regs_inited;
837 unsigned int regno;
838 unsigned int offset = 0;
839 unsigned int size = 0;
840
841 if (ref_info->refs_organized)
842 return;
843
844 if (ref_info->refs_size < ref_info->bitmap_size)
845 {
846 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
847 df_grow_ref_info (ref_info, new_size);
848 }
849
850 for (regno = 0; regno < m; regno++)
851 {
852 struct df_reg_info *reg_info = ref_info->regs[regno];
853 int count = 0;
854 if (reg_info)
855 {
856 struct df_ref *ref = reg_info->reg_chain;
857 reg_info->begin = offset;
858 while (ref)
859 {
860 ref_info->refs[offset] = ref;
861 DF_REF_ID (ref) = offset++;
862 ref = DF_REF_NEXT_REG (ref);
863 count++;
864 size++;
865 }
866 reg_info->n_refs = count;
867 }
868 }
869
870 /* The bitmap size is not decremented when refs are deleted. So
871 reset it now that we have squished out all of the empty
872 slots. */
873 ref_info->bitmap_size = size;
874 ref_info->refs_organized = true;
875 ref_info->add_refs_inline = true;
876}
877
878\f
879/* Local miscellaneous routines. */
880
881/* Local routines for recording refs. */
882
883/* Set where we are in the compilation. */
884
885void
886df_set_state (int state)
887{
888 df_state = state;
889}
890
891
892\f
893/*----------------------------------------------------------------------------
894 Hard core instruction scanning code. No external interfaces here,
895 just a lot of routines that look inside insns.
896----------------------------------------------------------------------------*/
897
898/* Create a ref and add it to the reg-def or reg-use chains. */
899
900static struct df_ref *
901df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
902 basic_block bb, rtx insn,
903 enum df_ref_type ref_type,
904 enum df_ref_flags ref_flags)
905{
906 struct df_ref *this_ref;
907 struct df *df = dflow->df;
908 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
909 struct df_scan_problem_data *problem_data =
910 (struct df_scan_problem_data *) dflow->problem_data;
911
912 this_ref = pool_alloc (problem_data->ref_pool);
913 DF_REF_REG (this_ref) = reg;
914 DF_REF_REGNO (this_ref) = regno;
915 DF_REF_LOC (this_ref) = loc;
916 DF_REF_INSN (this_ref) = insn;
917 DF_REF_CHAIN (this_ref) = NULL;
918 DF_REF_TYPE (this_ref) = ref_type;
919 DF_REF_FLAGS (this_ref) = ref_flags;
920 DF_REF_DATA (this_ref) = NULL;
921 DF_REF_BB (this_ref) = bb;
922
923 /* Link the ref into the reg_def and reg_use chains and keep a count
924 of the instances. */
925 if (ref_type == DF_REF_REG_DEF)
926 {
927 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
928 reg_info->n_refs++;
929
930 /* Add the ref to the reg_def chain. */
931 df_reg_chain_create (reg_info, this_ref);
932 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
933 if (df->def_info.add_refs_inline)
934 {
935 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
936 {
937 int new_size = df->def_info.bitmap_size
938 + df->def_info.bitmap_size / 4;
939 df_grow_ref_info (&df->def_info, new_size);
940 }
941 /* Add the ref to the big array of defs. */
942 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
943 df->def_info.refs_organized = false;
944 }
945
946 df->def_info.bitmap_size++;
947
948 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
949 {
950 struct df_scan_bb_info *bb_info
951 = df_scan_get_bb_info (dflow, bb->index);
952 this_ref->next_ref = bb_info->artificial_defs;
953 bb_info->artificial_defs = this_ref;
954 }
955 else
956 {
957 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
958 DF_INSN_GET (df, insn)->defs = this_ref;
959 }
960 }
961 else
962 {
963 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
964 reg_info->n_refs++;
965
966 /* Add the ref to the reg_use chain. */
967 df_reg_chain_create (reg_info, this_ref);
968 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
969 if (df->use_info.add_refs_inline)
970 {
971 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
972 {
973 int new_size = df->use_info.bitmap_size
974 + df->use_info.bitmap_size / 4;
975 df_grow_ref_info (&df->use_info, new_size);
976 }
977 /* Add the ref to the big array of defs. */
978 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
979 df->use_info.refs_organized = false;
980 }
981
982 df->use_info.bitmap_size++;
983 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
984 {
985 struct df_scan_bb_info *bb_info
986 = df_scan_get_bb_info (dflow, bb->index);
987 this_ref->next_ref = bb_info->artificial_uses;
988 bb_info->artificial_uses = this_ref;
989 }
990 else
991 {
992 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
993 DF_INSN_GET (df, insn)->uses = this_ref;
994 }
995 }
996 return this_ref;
997}
998
999
1000/* Create new references of type DF_REF_TYPE for each part of register REG
1001 at address LOC within INSN of BB. */
1002
1003static void
1004df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1005 basic_block bb, rtx insn,
1006 enum df_ref_type ref_type,
1007 enum df_ref_flags ref_flags,
1008 bool record_live)
1009{
1010 unsigned int regno;
1011 struct df *df = dflow->df;
1012
1013 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1014
1015 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1016 all. Notably only those representing a word extraction from a multi-word
1017 reg. As written in the docu those should have the form
1018 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1019 XXX Is that true? We could also use the global word_mode variable. */
1020 if ((df->flags & DF_SUBREGS) == 0
1021 && GET_CODE (reg) == SUBREG
1022 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1023 || GET_MODE_SIZE (GET_MODE (reg))
1024 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1025 {
1026 loc = &SUBREG_REG (reg);
1027 reg = *loc;
1028 ref_flags |= DF_REF_STRIPPED;
1029 }
1030
1031 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1032 if (regno < FIRST_PSEUDO_REGISTER)
1033 {
1034 int i;
1035 int endregno;
1036
1037 if (! (df->flags & DF_HARD_REGS))
1038 return;
1039
1040 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
1041 for the mode, because we only want to add references to regs, which
1042 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
1043 reference the whole reg 0 in DI mode (which would also include
1044 reg 1, at least, if 0 and 1 are SImode registers). */
1045 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1046 if (GET_CODE (reg) == SUBREG)
1047 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1048 SUBREG_BYTE (reg), GET_MODE (reg));
1049 endregno += regno;
1050
1051 for (i = regno; i < endregno; i++)
1052 {
1053 /* Calls are handled at call site because regs_ever_live
1054 doesn't include clobbered regs, only used ones. */
1055 if (ref_type == DF_REF_REG_DEF && record_live)
1056 regs_ever_live[i] = 1;
1057 else if ((ref_type == DF_REF_REG_USE
1058 || ref_type == DF_REF_REG_MEM_STORE
1059 || ref_type == DF_REF_REG_MEM_LOAD)
1060 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1061 {
1062 /* Set regs_ever_live on uses of non-eliminable frame
1063 pointers and arg pointers. */
1064 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
1065 && (regno == FRAME_POINTER_REGNUM
1066 || regno == ARG_POINTER_REGNUM)))
1067 regs_ever_live[i] = 1;
1068 }
1069
1070 df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1071 bb, insn, ref_type, ref_flags);
1072 }
1073 }
1074 else
1075 {
1076 df_ref_create_structure (dflow, reg, loc,
1077 bb, insn, ref_type, ref_flags);
1078 }
1079}
1080
1081
1082/* A set to a non-paradoxical SUBREG for which the number of word_mode units
1083 covered by the outer mode is smaller than that covered by the inner mode,
1084 is a read-modify-write operation.
1085 This function returns true iff the SUBREG X is such a SUBREG. */
1086
1087bool
1088df_read_modify_subreg_p (rtx x)
1089{
1090 unsigned int isize, osize;
1091 if (GET_CODE (x) != SUBREG)
1092 return false;
1093 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1094 osize = GET_MODE_SIZE (GET_MODE (x));
1095 return (isize > osize && isize > UNITS_PER_WORD);
1096}
1097
1098
1099/* Process all the registers defined in the rtx, X.
1100 Autoincrement/decrement definitions will be picked up by
1101 df_uses_record. */
1102
1103static void
1104df_def_record_1 (struct dataflow *dflow, rtx x,
1105 basic_block bb, rtx insn,
1106 enum df_ref_flags flags, bool record_live)
1107{
1108 rtx *loc;
1109 rtx dst;
1110
1111 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1112 construct. */
1113 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1114 loc = &XEXP (x, 0);
1115 else
1116 loc = &SET_DEST (x);
1117 dst = *loc;
1118
1119 /* Some targets place small structures in registers for
1120 return values of functions. */
1121 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
1122 {
1123 int i;
1124
1125 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1126 {
1127 rtx temp = XVECEXP (dst, 0, i);
1128 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1129 || GET_CODE (temp) == SET)
1130 df_def_record_1 (dflow, temp, bb, insn,
1131 GET_CODE (temp) == CLOBBER ? flags | DF_REF_CLOBBER : flags,
1132 record_live);
1133 }
1134 return;
1135 }
1136
1137 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1138 be handy for the reg allocator. */
1139 while (GET_CODE (dst) == STRICT_LOW_PART
1140 || GET_CODE (dst) == ZERO_EXTRACT
1141 || df_read_modify_subreg_p (dst))
1142 {
1143#if 0
1144 /* Strict low part always contains SUBREG, but we do not want to make
1145 it appear outside, as whole register is always considered. */
1146 if (GET_CODE (dst) == STRICT_LOW_PART)
1147 {
1148 loc = &XEXP (dst, 0);
1149 dst = *loc;
1150 }
1151#endif
1152 loc = &XEXP (dst, 0);
1153 dst = *loc;
1154 flags |= DF_REF_READ_WRITE;
1155 }
1156
1157 if (REG_P (dst)
1158 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1159 df_ref_record (dflow, dst, loc, bb, insn,
1160 DF_REF_REG_DEF, flags, record_live);
1161}
1162
1163
1164/* Process all the registers defined in the pattern rtx, X. */
1165
1166static void
1167df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1168{
1169 RTX_CODE code = GET_CODE (x);
1170
1171 if (code == SET || code == CLOBBER)
1172 {
1173 /* Mark the single def within the pattern. */
1174 df_def_record_1 (dflow, x, bb, insn,
1175 code == CLOBBER ? DF_REF_CLOBBER : 0, true);
1176 }
1177 else if (code == COND_EXEC)
1178 {
1179 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1180 }
1181 else if (code == PARALLEL)
1182 {
1183 int i;
1184
1185 /* Mark the multiple defs within the pattern. */
1186 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1187 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1188 }
1189}
1190
1191
1192/* Process all the registers used in the rtx at address LOC. */
1193
1194static void
1195df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1196 basic_block bb, rtx insn, enum df_ref_flags flags)
1197{
1198 RTX_CODE code;
1199 rtx x;
1200 retry:
1201 x = *loc;
1202 if (!x)
1203 return;
1204 code = GET_CODE (x);
1205 switch (code)
1206 {
1207 case LABEL_REF:
1208 case SYMBOL_REF:
1209 case CONST_INT:
1210 case CONST:
1211 case CONST_DOUBLE:
1212 case CONST_VECTOR:
1213 case PC:
1214 case CC0:
1215 case ADDR_VEC:
1216 case ADDR_DIFF_VEC:
1217 return;
1218
1219 case CLOBBER:
1220 /* If we are clobbering a MEM, mark any registers inside the address
1221 as being used. */
1222 if (MEM_P (XEXP (x, 0)))
1223 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1224 DF_REF_REG_MEM_STORE, bb, insn, flags);
1225
1226 /* If we're clobbering a REG then we have a def so ignore. */
1227 return;
1228
1229 case MEM:
1230 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1231 flags & DF_REF_IN_NOTE);
1232 return;
1233
1234 case SUBREG:
1235 /* While we're here, optimize this case. */
1236
1237 /* In case the SUBREG is not of a REG, do not optimize. */
1238 if (!REG_P (SUBREG_REG (x)))
1239 {
1240 loc = &SUBREG_REG (x);
1241 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1242 return;
1243 }
1244 /* ... Fall through ... */
1245
1246 case REG:
1247 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1248 return;
1249
1250 case SET:
1251 {
1252 rtx dst = SET_DEST (x);
1253 gcc_assert (!(flags & DF_REF_IN_NOTE));
fcf2ad9f 1254 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
e011eba9 1255
1256 switch (GET_CODE (dst))
1257 {
1258 case SUBREG:
1259 if (df_read_modify_subreg_p (dst))
1260 {
1261 df_uses_record (dflow, &SUBREG_REG (dst),
1262 DF_REF_REG_USE, bb,
fcf2ad9f 1263 insn, flags | DF_REF_READ_WRITE);
e011eba9 1264 break;
1265 }
1266 /* Fall through. */
1267 case REG:
1268 case PARALLEL:
1269 case SCRATCH:
1270 case PC:
1271 case CC0:
1272 break;
1273 case MEM:
1274 df_uses_record (dflow, &XEXP (dst, 0),
1275 DF_REF_REG_MEM_STORE,
fcf2ad9f 1276 bb, insn, flags);
e011eba9 1277 break;
1278 case STRICT_LOW_PART:
1279 {
1280 rtx *temp = &XEXP (dst, 0);
1281 /* A strict_low_part uses the whole REG and not just the
1282 SUBREG. */
1283 dst = XEXP (dst, 0);
1284 df_uses_record (dflow,
1285 (GET_CODE (dst) == SUBREG)
1286 ? &SUBREG_REG (dst) : temp,
1287 DF_REF_REG_USE, bb,
1288 insn, DF_REF_READ_WRITE);
1289 }
1290 break;
1291 case ZERO_EXTRACT:
1292 case SIGN_EXTRACT:
1293 df_uses_record (dflow, &XEXP (dst, 0),
1294 DF_REF_REG_USE, bb, insn,
1295 DF_REF_READ_WRITE);
1296 df_uses_record (dflow, &XEXP (dst, 1),
fcf2ad9f 1297 DF_REF_REG_USE, bb, insn, flags);
e011eba9 1298 df_uses_record (dflow, &XEXP (dst, 2),
fcf2ad9f 1299 DF_REF_REG_USE, bb, insn, flags);
e011eba9 1300 dst = XEXP (dst, 0);
1301 break;
1302 default:
1303 gcc_unreachable ();
1304 }
1305 return;
1306 }
1307
1308 case RETURN:
1309 break;
1310
1311 case ASM_OPERANDS:
1312 case UNSPEC_VOLATILE:
1313 case TRAP_IF:
1314 case ASM_INPUT:
1315 {
1316 /* Traditional and volatile asm instructions must be
1317 considered to use and clobber all hard registers, all
1318 pseudo-registers and all of memory. So must TRAP_IF and
1319 UNSPEC_VOLATILE operations.
1320
1321 Consider for instance a volatile asm that changes the fpu
1322 rounding mode. An insn should not be moved across this
1323 even if it only uses pseudo-regs because it might give an
1324 incorrectly rounded result.
1325
1326 However, flow.c's liveness computation did *not* do this,
1327 giving the reasoning as " ?!? Unfortunately, marking all
1328 hard registers as live causes massive problems for the
1329 register allocator and marking all pseudos as live creates
1330 mountains of uninitialized variable warnings."
1331
1332 In order to maintain the status quo with regard to liveness
1333 and uses, we do what flow.c did and just mark any regs we
1334 can find in ASM_OPERANDS as used. Later on, when liveness
1335 is computed, asm insns are scanned and regs_asm_clobbered
1336 is filled out.
1337
1338 For all ASM_OPERANDS, we must traverse the vector of input
1339 operands. We can not just fall through here since then we
1340 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1341 which do not indicate traditional asms unlike their normal
1342 usage. */
1343 if (code == ASM_OPERANDS)
1344 {
1345 int j;
1346
1347 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1348 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
fcf2ad9f 1349 DF_REF_REG_USE, bb, insn, flags);
e011eba9 1350 return;
1351 }
1352 break;
1353 }
1354
1355 case PRE_DEC:
1356 case POST_DEC:
1357 case PRE_INC:
1358 case POST_INC:
1359 case PRE_MODIFY:
1360 case POST_MODIFY:
1361 /* Catch the def of the register being modified. */
fcf2ad9f 1362 flags |= DF_REF_READ_WRITE;
e011eba9 1363 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
fcf2ad9f 1364 DF_REF_REG_DEF, flags, true);
e011eba9 1365
1366 /* ... Fall through to handle uses ... */
1367
1368 default:
1369 break;
1370 }
1371
1372 /* Recursively scan the operands of this expression. */
1373 {
1374 const char *fmt = GET_RTX_FORMAT (code);
1375 int i;
1376
1377 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1378 {
1379 if (fmt[i] == 'e')
1380 {
1381 /* Tail recursive case: save a function call level. */
1382 if (i == 0)
1383 {
1384 loc = &XEXP (x, 0);
1385 goto retry;
1386 }
1387 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1388 }
1389 else if (fmt[i] == 'E')
1390 {
1391 int j;
1392 for (j = 0; j < XVECLEN (x, i); j++)
1393 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1394 bb, insn, flags);
1395 }
1396 }
1397 }
1398}
1399
1400/* Return true if *LOC contains an asm. */
1401
1402static int
1403df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1404{
1405 if ( !*loc)
1406 return 0;
1407 if (GET_CODE (*loc) == ASM_OPERANDS)
1408 return 1;
1409 return 0;
1410}
1411
1412
1413/* Return true if INSN contains an ASM. */
1414
1415static int
1416df_insn_contains_asm (rtx insn)
1417{
1418 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1419}
1420
1421
1422
1423/* Record all the refs for DF within INSN of basic block BB. */
1424
1425static void
1426df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1427{
1428 int i;
1429 struct df *df = dflow->df;
1430
1431 if (INSN_P (insn))
1432 {
1433 rtx note;
1434
1435 if (df_insn_contains_asm (insn))
1436 DF_INSN_CONTAINS_ASM (df, insn) = true;
1437
1438 /* Record register defs. */
1439 df_defs_record (dflow, PATTERN (insn), bb, insn);
1440
1441 if (df->flags & DF_EQUIV_NOTES)
1442 for (note = REG_NOTES (insn); note;
1443 note = XEXP (note, 1))
1444 {
1445 switch (REG_NOTE_KIND (note))
1446 {
1447 case REG_EQUIV:
1448 case REG_EQUAL:
1449 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1450 bb, insn, DF_REF_IN_NOTE);
1451 default:
1452 break;
1453 }
1454 }
1455
1456 if (CALL_P (insn))
1457 {
1458 rtx note;
1459
1460 /* Record the registers used to pass arguments, and explicitly
1461 noted as clobbered. */
1462 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1463 note = XEXP (note, 1))
1464 {
1465 if (GET_CODE (XEXP (note, 0)) == USE)
1466 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1467 DF_REF_REG_USE,
1468 bb, insn, 0);
1469 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1470 {
1471 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1472 if (REG_P (XEXP (XEXP (note, 0), 0)))
1473 {
1474 rtx reg = XEXP (XEXP (note, 0), 0);
1475 int regno_last;
1476 int regno_first;
1477 int i;
1478
1479 regno_last = regno_first = REGNO (reg);
1480 if (regno_first < FIRST_PSEUDO_REGISTER)
1481 regno_last
1482 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1483 for (i = regno_first; i <= regno_last; i++)
1484 regs_ever_live[i] = 1;
1485 }
1486 }
1487 }
1488
1489 /* The stack ptr is used (honorarily) by a CALL insn. */
1490 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1491 DF_REF_REG_USE, bb, insn,
1492 0);
1493
1494 if (df->flags & DF_HARD_REGS)
1495 {
1496 bitmap_iterator bi;
1497 unsigned int ui;
1498 /* Calls may also reference any of the global registers,
1499 so they are recorded as used. */
1500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1501 if (global_regs[i])
1502 df_uses_record (dflow, &regno_reg_rtx[i],
1503 DF_REF_REG_USE, bb, insn,
1504 0);
1505 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1506 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb, insn,
1507 DF_REF_REG_DEF, DF_REF_CLOBBER, false);
1508 }
1509 }
1510
1511 /* Record the register uses. */
1512 df_uses_record (dflow, &PATTERN (insn),
1513 DF_REF_REG_USE, bb, insn, 0);
1514
1515 }
1516}
1517
1518static bool
1519df_has_eh_preds (basic_block bb)
1520{
1521 edge e;
1522 edge_iterator ei;
1523
1524 FOR_EACH_EDGE (e, ei, bb->preds)
1525 {
1526 if (e->flags & EDGE_EH)
1527 return true;
1528 }
1529 return false;
1530}
1531
1532/* Record all the refs within the basic block BB. */
1533
1534static void
1535df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1536{
1537 struct df *df = dflow->df;
1538 rtx insn;
1539 int luid = 0;
1540 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1541
1542 /* Need to make sure that there is a record in the basic block info. */
1543 if (!bb_info)
1544 {
1545 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1546 df_scan_set_bb_info (dflow, bb->index, bb_info);
1547 bb_info->artificial_defs = NULL;
1548 bb_info->artificial_uses = NULL;
1549 }
1550
1551 /* Scan the block an insn at a time from beginning to end. */
1552 FOR_BB_INSNS (bb, insn)
1553 {
1554 df_insn_create_insn_record (dflow, insn);
1555 if (INSN_P (insn))
1556 {
1557 /* Record defs within INSN. */
1558 DF_INSN_LUID (df, insn) = luid++;
1559 df_insn_refs_record (dflow, bb, insn);
1560 }
1561 DF_INSN_LUID (df, insn) = luid;
1562 }
1563
1564#ifdef EH_RETURN_DATA_REGNO
1565 if ((df->flags & DF_HARD_REGS)
1566 && df_has_eh_preds (bb))
1567 {
1568 unsigned int i;
1569 /* Mark the registers that will contain data for the handler. */
fcf2ad9f 1570 for (i = 0; ; ++i)
1571 {
1572 unsigned regno = EH_RETURN_DATA_REGNO (i);
1573 if (regno == INVALID_REGNUM)
1574 break;
1575 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i], bb, NULL,
1576 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1577 false);
1578 }
e011eba9 1579 }
1580#endif
1581
fcf2ad9f 1582
e011eba9 1583 if ((df->flags & DF_HARD_REGS)
1584 && df_has_eh_preds (bb))
1585 {
fcf2ad9f 1586#ifdef EH_USES
e011eba9 1587 unsigned int i;
fcf2ad9f 1588 /* This code is putting in a artificial ref for the use at the
1589 TOP of the block that receives the exception. It is too
1590 cumbersome to actually put the ref on the edge. We could
1591 either model this at the top of the receiver block or the
1592 bottom of the sender block.
1593
1594 The bottom of the sender block is problematic because not all
1595 out-edges of the a block are eh-edges. However, it is true
1596 that all edges into a block are either eh-edges or none of
1597 them are eh-edges. Thus, we can model this at the top of the
1598 eh-receiver for all of the edges at once. */
e011eba9 1599 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1600 if (EH_USES (i))
1601 df_uses_record (dflow, &regno_reg_rtx[i],
fcf2ad9f 1602 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1603 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1604#endif
1605
1606 /* The following code (down thru the arg_pointer seting APPEARS
1607 to be necessary because there is nothing that actually
1608 describes what the exception handling code may actually need
1609 to keep alive. */
1610 if (reload_completed)
1611 {
1612 if (frame_pointer_needed)
1613 {
1614 df_uses_record (dflow, &regno_reg_rtx[FRAME_POINTER_REGNUM],
1615 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1616#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1617 df_uses_record (dflow, &regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
1618 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1619#endif
1620 }
1621#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1622 if (fixed_regs[ARG_POINTER_REGNUM])
1623 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1624 DF_REF_REG_USE, bb, NULL,
1625 DF_REF_ARTIFICIAL);
e011eba9 1626#endif
fcf2ad9f 1627 }
1628 }
e011eba9 1629
1630 if ((df->flags & DF_HARD_REGS)
1631 && bb->index >= NUM_FIXED_BLOCKS)
1632 {
1633 /* Before reload, there are a few registers that must be forced
1634 live everywhere -- which might not already be the case for
1635 blocks within infinite loops. */
1636 if (! reload_completed)
1637 {
1638
1639 /* Any reference to any pseudo before reload is a potential
1640 reference of the frame pointer. */
d0802b39 1641 df_uses_record (dflow, &regno_reg_rtx[FRAME_POINTER_REGNUM],
e011eba9 1642 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1643
1644#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1645 /* Pseudos with argument area equivalences may require
1646 reloading via the argument pointer. */
1647 if (fixed_regs[ARG_POINTER_REGNUM])
1648 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1649 DF_REF_REG_USE, bb, NULL,
1650 DF_REF_ARTIFICIAL);
1651#endif
1652
1653 /* Any constant, or pseudo with constant equivalences, may
1654 require reloading from memory using the pic register. */
1655 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1656 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1657 df_uses_record (dflow, &regno_reg_rtx[PIC_OFFSET_TABLE_REGNUM],
1658 DF_REF_REG_USE, bb, NULL,
1659 DF_REF_ARTIFICIAL);
1660 }
1661 /* The all-important stack pointer must always be live. */
1662 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1663 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1664 }
1665}
1666
1667
1668/* Record all the refs in the basic blocks specified by BLOCKS. */
1669
1670static void
1671df_refs_record (struct dataflow *dflow, bitmap blocks)
1672{
1673 unsigned int bb_index;
1674 bitmap_iterator bi;
1675
1676 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1677 {
1678 basic_block bb = BASIC_BLOCK (bb_index);
1679 df_bb_refs_record (dflow, bb);
1680 }
1681
1682 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1683 df_record_exit_block_uses (dflow);
fcf2ad9f 1684
1685 if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1686 df_record_entry_block_defs (dflow);
e011eba9 1687}
1688
1689
1690/*----------------------------------------------------------------------------
1691 Specialized hard register scanning functions.
1692----------------------------------------------------------------------------*/
1693
1694/* Mark a register in SET. Hard registers in large modes get all
1695 of their component registers set as well. */
1696
1697static void
1698df_mark_reg (rtx reg, void *vset)
1699{
1700 bitmap set = (bitmap) vset;
1701 int regno = REGNO (reg);
1702
1703 gcc_assert (GET_MODE (reg) != BLKmode);
1704
1705 bitmap_set_bit (set, regno);
1706 if (regno < FIRST_PSEUDO_REGISTER)
1707 {
1708 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1709 while (--n > 0)
1710 bitmap_set_bit (set, regno + n);
1711 }
1712}
1713
fcf2ad9f 1714
1715/* Record the (conservative) set of hard registers that are defined on
1716 entry to the function. */
1717
1718static void
1719df_record_entry_block_defs (struct dataflow * dflow)
1720{
1721 unsigned int i;
1722 bitmap_iterator bi;
1723 rtx r;
1724 struct df * df = dflow->df;
1725
1726 bitmap_clear (df->entry_block_defs);
1727
1728 if (! (df->flags & DF_HARD_REGS))
1729 return;
1730
1731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1732 {
1733 if (FUNCTION_ARG_REGNO_P (i))
1734#ifdef INCOMING_REGNO
1735 bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1736#else
1737 bitmap_set_bit (df->entry_block_defs, i);
1738#endif
1739 }
1740
1741 /* Once the prologue has been generated, all of these registers
1742 should just show up in the first regular block. */
1743 if (HAVE_prologue && epilogue_completed)
1744 {
1745 /* Defs for the callee saved registers are inserted so that the
1746 pushes have some defining location. */
1747 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1748 if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1749 bitmap_set_bit (df->entry_block_defs, i);
1750 }
1751 else
1752 {
abecc17d 1753#ifdef INCOMING_RETURN_ADDR_RTX
fcf2ad9f 1754 if (REG_P (INCOMING_RETURN_ADDR_RTX))
1755 bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
abecc17d 1756#endif
fcf2ad9f 1757
1758 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1759 only STATIC_CHAIN_REGNUM is defined. If they are different,
1760 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
1761#ifdef STATIC_CHAIN_INCOMING_REGNUM
1762 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1763#else
1764#ifdef STATIC_CHAIN_REGNUM
1765 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1766#endif
1767#endif
1768
1769 r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1770 if (r && REG_P (r))
1771 bitmap_set_bit (df->entry_block_defs, REGNO (r));
1772 }
1773
1774 /* These registers are live everywhere. */
1775 if (!reload_completed)
1776 {
1777 /* Any reference to any pseudo before reload is a potential
1778 reference of the frame pointer. */
1779 bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1780
1781#ifdef EH_USES
1782 /* The ia-64, the only machine that uses this, does not define these
1783 until after reload. */
1784 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1785 if (EH_USES (i))
1786 {
1787 bitmap_set_bit (df->entry_block_defs, i);
1788 }
1789#endif
1790
1791#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1792 /* Pseudos with argument area equivalences may require
1793 reloading via the argument pointer. */
1794 if (fixed_regs[ARG_POINTER_REGNUM])
1795 bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1796#endif
1797
1798#ifdef PIC_OFFSET_TABLE_REGNUM
1799 /* Any constant, or pseudo with constant equivalences, may
1800 require reloading from memory using the pic register. */
1801 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1802 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1803 bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1804#endif
1805 }
1806
1807 (*targetm.live_on_entry) (df->entry_block_defs);
1808
1809 EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1810 {
1811 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1812 ENTRY_BLOCK_PTR, NULL,
1813 DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1814 }
1815}
1816
1817
e011eba9 1818/* Record the set of hard registers that are used in the exit block. */
1819
1820static void
1821df_record_exit_block_uses (struct dataflow *dflow)
1822{
1823 unsigned int i;
1824 bitmap_iterator bi;
1825 struct df *df = dflow->df;
1826
1827 bitmap_clear (df->exit_block_uses);
1828
1829 if (! (df->flags & DF_HARD_REGS))
1830 return;
1831
1832 /* If exiting needs the right stack value, consider the stack
1833 pointer live at the end of the function. */
1834 if ((HAVE_epilogue && epilogue_completed)
1835 || ! EXIT_IGNORE_STACK
1836 || (! FRAME_POINTER_REQUIRED
1837 && ! current_function_calls_alloca
1838 && flag_omit_frame_pointer)
1839 || current_function_sp_is_unchanging)
1840 {
1841 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1842 }
1843
1844 /* Mark the frame pointer if needed at the end of the function.
1845 If we end up eliminating it, it will be removed from the live
1846 list of each basic block by reload. */
1847
1848 if (! reload_completed || frame_pointer_needed)
1849 {
1850 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1851#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1852 /* If they are different, also mark the hard frame pointer as live. */
1853 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1854 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1855#endif
1856 }
1857
1858#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1859 /* Many architectures have a GP register even without flag_pic.
1860 Assume the pic register is not in use, or will be handled by
1861 other means, if it is not fixed. */
1862 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1863 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1864 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1865#endif
1866
1867 /* Mark all global registers, and all registers used by the
1868 epilogue as being live at the end of the function since they
1869 may be referenced by our caller. */
1870 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1871 if (global_regs[i] || EPILOGUE_USES (i))
1872 bitmap_set_bit (df->exit_block_uses, i);
1873
1874 if (HAVE_epilogue && epilogue_completed)
1875 {
1876 /* Mark all call-saved registers that we actually used. */
1877 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1878 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1879 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1880 bitmap_set_bit (df->exit_block_uses, i);
1881 }
1882
1883#ifdef EH_RETURN_DATA_REGNO
1884 /* Mark the registers that will contain data for the handler. */
1885 if (reload_completed && current_function_calls_eh_return)
1886 for (i = 0; ; ++i)
1887 {
1888 unsigned regno = EH_RETURN_DATA_REGNO (i);
1889 if (regno == INVALID_REGNUM)
1890 break;
1891 bitmap_set_bit (df->exit_block_uses, regno);
1892 }
1893#endif
1894
1895#ifdef EH_RETURN_STACKADJ_RTX
1896 if ((! HAVE_epilogue || ! epilogue_completed)
1897 && current_function_calls_eh_return)
1898 {
1899 rtx tmp = EH_RETURN_STACKADJ_RTX;
1900 if (tmp && REG_P (tmp))
1901 df_mark_reg (tmp, df->exit_block_uses);
1902 }
1903#endif
1904
1905#ifdef EH_RETURN_HANDLER_RTX
1906 if ((! HAVE_epilogue || ! epilogue_completed)
1907 && current_function_calls_eh_return)
1908 {
1909 rtx tmp = EH_RETURN_HANDLER_RTX;
1910 if (tmp && REG_P (tmp))
1911 df_mark_reg (tmp, df->exit_block_uses);
1912 }
1913#endif
1914
1915 /* Mark function return value. */
1916 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
1917
1918 if (df->flags & DF_HARD_REGS)
1919 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
1920 df_uses_record (dflow, &regno_reg_rtx[i],
1921 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1922 DF_REF_ARTIFICIAL);
1923}
1924
1925static bool initialized = false;
1926
1927/* Initialize some platform specific structures. */
1928
1929void
1930df_hard_reg_init (void)
1931{
e011eba9 1932 int i;
bebf8106 1933#ifdef ELIMINABLE_REGS
e011eba9 1934 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
1935#endif
1936 /* After reload, some ports add certain bits to regs_ever_live so
1937 this cannot be reset. */
1938
1939 if (!reload_completed)
1940 memset (regs_ever_live, 0, sizeof (regs_ever_live));
1941
1942 if (initialized)
1943 return;
1944
1945 bitmap_obstack_initialize (&persistent_obstack);
1946
1947 /* Record which registers will be eliminated. We use this in
1948 mark_used_regs. */
1949 CLEAR_HARD_REG_SET (elim_reg_set);
1950
1951#ifdef ELIMINABLE_REGS
1952 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
1953 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1954#else
1955 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1956#endif
1957
1958 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
1959
1960 /* Inconveniently, this is only readily available in hard reg set
1961 form. */
1962 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1963 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1964 bitmap_set_bit (df_invalidated_by_call, i);
1965
e011eba9 1966 initialized = true;
1967}