]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/df-scan.c
df-core.c: Update comments referring to removed features.
[thirdparty/gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
48
49 #ifndef HAVE_epilogue
50 #define HAVE_epilogue 0
51 #endif
52 #ifndef HAVE_prologue
53 #define HAVE_prologue 0
54 #endif
55 #ifndef HAVE_sibcall_epilogue
56 #define HAVE_sibcall_epilogue 0
57 #endif
58
59 #ifndef EPILOGUE_USES
60 #define EPILOGUE_USES(REGNO) 0
61 #endif
62
63 /* The bitmap_obstack is used to hold some static variables that
64 should not be reset after each function is compiled. */
65
66 static bitmap_obstack persistent_obstack;
67
68 /* The set of hard registers in eliminables[i].from. */
69
70 static HARD_REG_SET elim_reg_set;
71
72 /* This is a bitmap copy of regs_invalidated_by_call so that we can
73 easily add it into bitmaps, etc. */
74
75 bitmap df_invalidated_by_call = NULL;
76
77 /* Initialize ur_in and ur_out as if all hard registers were partially
78 available. */
79
80 struct df_collection_rec
81 {
82 struct df_ref ** def_vec;
83 unsigned int next_def;
84 struct df_ref ** use_vec;
85 unsigned int next_use;
86 struct df_ref ** eq_use_vec;
87 unsigned int next_eq_use;
88 struct df_mw_hardreg **mw_vec;
89 unsigned int next_mw;
90 };
91
92 static struct df_ref * df_null_ref_rec[1];
93 static struct df_mw_hardreg * df_null_mw_rec[1];
94
95 static void df_ref_record (struct df_collection_rec *,
96 rtx, rtx *,
97 basic_block, rtx, enum df_ref_type,
98 enum df_ref_flags);
99 static void df_def_record_1 (struct df_collection_rec *,
100 rtx, basic_block, rtx,
101 enum df_ref_flags);
102 static void df_defs_record (struct df_collection_rec *,
103 rtx, basic_block, rtx,
104 enum df_ref_flags);
105 static void df_uses_record (struct df_collection_rec *,
106 rtx *, enum df_ref_type,
107 basic_block, rtx, enum df_ref_flags);
108
109 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
110 basic_block, rtx, enum df_ref_type,
111 enum df_ref_flags);
112
113 static void df_insn_refs_collect (struct df_collection_rec*,
114 basic_block, rtx);
115 static void df_canonize_collection_rec (struct df_collection_rec *);
116
117 static void df_get_regular_block_artificial_uses (bitmap);
118 static void df_get_eh_block_artificial_uses (bitmap);
119
120 static void df_record_entry_block_defs (bitmap);
121 static void df_record_exit_block_uses (bitmap);
122 static void df_get_exit_block_use_set (bitmap);
123 static void df_get_entry_block_def_set (bitmap);
124 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
125 static void df_ref_chain_delete_du_chain (struct df_ref **);
126 static void df_ref_chain_delete (struct df_ref **);
127
128 static void df_refs_add_to_chains (struct df_collection_rec *,
129 basic_block, rtx);
130
131 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
132 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
133 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
134 static void df_install_ref (struct df_ref *, struct df_reg_info *,
135 struct df_ref_info *, bool);
136
137 static int df_ref_compare (const void *, const void *);
138 static int df_mw_compare (const void *, const void *);
139
140 /* Indexed by hardware reg number, is true if that register is ever
141 used in the current function.
142
143 In df-scan.c, this is set up to record the hard regs used
144 explicitly. Reload adds in the hard regs used for holding pseudo
145 regs. Final uses it to generate the code in the function prologue
146 and epilogue to save and restore registers as needed. */
147
148 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
149 \f
150 /*----------------------------------------------------------------------------
151 SCANNING DATAFLOW PROBLEM
152
153 There are several ways in which scanning looks just like the other
154 dataflow problems. It shares the all the mechanisms for local info
155 as well as basic block info. Where it differs is when and how often
156 it gets run. It also has no need for the iterative solver.
157 ----------------------------------------------------------------------------*/
158
159 /* Problem data for the scanning dataflow function. */
160 struct df_scan_problem_data
161 {
162 alloc_pool ref_pool;
163 alloc_pool insn_pool;
164 alloc_pool reg_pool;
165 alloc_pool mw_reg_pool;
166 alloc_pool mw_link_pool;
167 bitmap_obstack reg_bitmaps;
168 bitmap_obstack insn_bitmaps;
169 };
170
171 typedef struct df_scan_bb_info *df_scan_bb_info_t;
172
173 static void
174 df_scan_free_internal (void)
175 {
176 struct df_scan_problem_data *problem_data
177 = (struct df_scan_problem_data *) df_scan->problem_data;
178
179 free (df->def_info.refs);
180 free (df->def_info.begin);
181 free (df->def_info.count);
182 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
183
184 free (df->use_info.refs);
185 free (df->use_info.begin);
186 free (df->use_info.count);
187 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
188
189 free (df->def_regs);
190 df->def_regs = NULL;
191 free (df->use_regs);
192 df->use_regs = NULL;
193 free (df->eq_use_regs);
194 df->eq_use_regs = NULL;
195 df->regs_size = 0;
196 DF_REG_SIZE(df) = 0;
197
198 free (df->insns);
199 df->insns = NULL;
200 DF_INSN_SIZE () = 0;
201
202 free (df_scan->block_info);
203 df_scan->block_info = NULL;
204 df_scan->block_info_size = 0;
205
206 BITMAP_FREE (df->hardware_regs_used);
207 BITMAP_FREE (df->regular_block_artificial_uses);
208 BITMAP_FREE (df->eh_block_artificial_uses);
209 BITMAP_FREE (df->entry_block_defs);
210 BITMAP_FREE (df->exit_block_uses);
211 BITMAP_FREE (df->insns_to_delete);
212 BITMAP_FREE (df->insns_to_rescan);
213 BITMAP_FREE (df->insns_to_notes_rescan);
214
215 free_alloc_pool (df_scan->block_pool);
216 free_alloc_pool (problem_data->ref_pool);
217 free_alloc_pool (problem_data->insn_pool);
218 free_alloc_pool (problem_data->reg_pool);
219 free_alloc_pool (problem_data->mw_reg_pool);
220 free_alloc_pool (problem_data->mw_link_pool);
221 bitmap_obstack_release (&problem_data->reg_bitmaps);
222 bitmap_obstack_release (&problem_data->insn_bitmaps);
223 free (df_scan->problem_data);
224 }
225
226
227 /* Set basic block info. */
228
229 static void
230 df_scan_set_bb_info (unsigned int index,
231 struct df_scan_bb_info *bb_info)
232 {
233 gcc_assert (df_scan);
234 df_grow_bb_info (df_scan);
235 df_scan->block_info[index] = (void *) bb_info;
236 }
237
238
239 /* Free basic block info. */
240
241 static void
242 df_scan_free_bb_info (basic_block bb, void *vbb_info)
243 {
244 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
245 unsigned int bb_index = bb->index;
246 if (bb_info)
247 {
248 rtx insn;
249 FOR_BB_INSNS (bb, insn)
250 {
251 if (INSN_P (insn))
252 /* Record defs within INSN. */
253 df_insn_delete (bb, INSN_UID (insn));
254 }
255
256 if (bb_index < df_scan->block_info_size)
257 bb_info = df_scan_get_bb_info (bb_index);
258
259 /* Get rid of any artificial uses or defs. */
260 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
261 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
262 df_ref_chain_delete (bb_info->artificial_defs);
263 df_ref_chain_delete (bb_info->artificial_uses);
264 bb_info->artificial_defs = NULL;
265 bb_info->artificial_uses = NULL;
266 pool_free (df_scan->block_pool, bb_info);
267 }
268 }
269
270
271 /* Allocate the problem data for the scanning problem. This should be
272 called when the problem is created or when the entire function is to
273 be rescanned. */
274 void
275 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
276 {
277 struct df_scan_problem_data *problem_data;
278 unsigned int insn_num = get_max_uid () + 1;
279 unsigned int block_size = 400;
280 basic_block bb;
281
282 /* Given the number of pools, this is really faster than tearing
283 everything apart. */
284 if (df_scan->problem_data)
285 df_scan_free_internal ();
286
287 df_scan->block_pool
288 = create_alloc_pool ("df_scan_block pool",
289 sizeof (struct df_scan_bb_info),
290 block_size);
291
292 problem_data = XNEW (struct df_scan_problem_data);
293 df_scan->problem_data = problem_data;
294 df_scan->computed = true;
295
296 problem_data->ref_pool
297 = create_alloc_pool ("df_scan_ref pool",
298 sizeof (struct df_ref), block_size);
299 problem_data->insn_pool
300 = create_alloc_pool ("df_scan_insn pool",
301 sizeof (struct df_insn_info), block_size);
302 problem_data->reg_pool
303 = create_alloc_pool ("df_scan_reg pool",
304 sizeof (struct df_reg_info), block_size);
305 problem_data->mw_reg_pool
306 = create_alloc_pool ("df_scan_mw_reg pool",
307 sizeof (struct df_mw_hardreg), block_size);
308 problem_data->mw_link_pool
309 = create_alloc_pool ("df_scan_mw_link pool",
310 sizeof (struct df_link), block_size);
311
312 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
313 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
314
315 insn_num += insn_num / 4;
316 df_grow_reg_info ();
317
318 df_grow_insn_info ();
319 df_grow_bb_info (df_scan);
320
321 FOR_ALL_BB (bb)
322 {
323 unsigned int bb_index = bb->index;
324 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
325 if (!bb_info)
326 {
327 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
328 df_scan_set_bb_info (bb_index, bb_info);
329 }
330 bb_info->artificial_defs = NULL;
331 bb_info->artificial_uses = NULL;
332 }
333
334 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
335 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
336 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
337 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
338 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
340 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
341 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
342 df_scan->optional_p = false;
343 }
344
345
346 /* Free all of the data associated with the scan problem. */
347
348 static void
349 df_scan_free (void)
350 {
351 if (df_scan->problem_data)
352 df_scan_free_internal ();
353
354 if (df->blocks_to_analyze)
355 {
356 BITMAP_FREE (df->blocks_to_analyze);
357 df->blocks_to_analyze = NULL;
358 }
359
360 free (df_scan);
361 }
362
363 /* Dump the preamble for DF_SCAN dump. */
364 static void
365 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
366 {
367 int i;
368
369 fprintf (file, ";; invalidated by call \t");
370 df_print_regset (file, df_invalidated_by_call);
371 fprintf (file, ";; hardware regs used \t");
372 df_print_regset (file, df->hardware_regs_used);
373 fprintf (file, ";; regular block artificial uses \t");
374 df_print_regset (file, df->regular_block_artificial_uses);
375 fprintf (file, ";; eh block artificial uses \t");
376 df_print_regset (file, df->eh_block_artificial_uses);
377 fprintf (file, ";; entry block defs \t");
378 df_print_regset (file, df->entry_block_defs);
379 fprintf (file, ";; exit block uses \t");
380 df_print_regset (file, df->exit_block_uses);
381 fprintf (file, ";; regs ever live \t");
382 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
383 if (df_regs_ever_live_p (i))
384 fprintf (file, " %d[%s]", i, reg_names[i]);
385
386 fprintf (file, "\n");
387 }
388
389 /* Dump the bb_info for a given basic block. */
390 static void
391 df_scan_start_block (basic_block bb, FILE *file)
392 {
393 struct df_scan_bb_info *bb_info
394 = df_scan_get_bb_info (bb->index);
395
396 if (bb_info)
397 {
398 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
399 df_refs_chain_dump (bb_info->artificial_defs, true, file);
400 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
401 df_refs_chain_dump (bb_info->artificial_uses, true, file);
402 fprintf (file, "\n");
403 }
404 #if 0
405 {
406 rtx insn;
407 FOR_BB_INSNS (bb, insn)
408 if (INSN_P (insn))
409 df_insn_debug (insn, false, file);
410 }
411 #endif
412 }
413
414 static struct df_problem problem_SCAN =
415 {
416 DF_SCAN, /* Problem id. */
417 DF_NONE, /* Direction. */
418 df_scan_alloc, /* Allocate the problem specific data. */
419 NULL, /* Reset global information. */
420 df_scan_free_bb_info, /* Free basic block info. */
421 NULL, /* Local compute function. */
422 NULL, /* Init the solution specific data. */
423 NULL, /* Iterative solver. */
424 NULL, /* Confluence operator 0. */
425 NULL, /* Confluence operator n. */
426 NULL, /* Transfer function. */
427 NULL, /* Finalize function. */
428 df_scan_free, /* Free all of the problem information. */
429 NULL, /* Remove this problem from the stack of dataflow problems. */
430 df_scan_start_dump, /* Debugging. */
431 df_scan_start_block, /* Debugging start block. */
432 NULL, /* Debugging end block. */
433 NULL, /* Incremental solution verify start. */
434 NULL, /* Incremental solution verify end. */
435 NULL, /* Dependent problem. */
436 TV_DF_SCAN, /* Timing variable. */
437 false /* Reset blocks on dropping out of blocks_to_analyze. */
438 };
439
440
441 /* Create a new DATAFLOW instance and add it to an existing instance
442 of DF. The returned structure is what is used to get at the
443 solution. */
444
445 void
446 df_scan_add_problem (void)
447 {
448 df_add_problem (&problem_SCAN);
449 }
450
451 \f
452 /*----------------------------------------------------------------------------
453 Storage Allocation Utilities
454 ----------------------------------------------------------------------------*/
455
456
457 /* First, grow the reg_info information. If the current size is less than
458 the number of psuedos, grow to 25% more than the number of
459 pseudos.
460
461 Second, assure that all of the slots up to max_reg_num have been
462 filled with reg_info structures. */
463
464 void
465 df_grow_reg_info (void)
466 {
467 unsigned int max_reg = max_reg_num ();
468 unsigned int new_size = max_reg;
469 struct df_scan_problem_data *problem_data
470 = (struct df_scan_problem_data *) df_scan->problem_data;
471 unsigned int i;
472
473 if (df->regs_size < new_size)
474 {
475 new_size += new_size / 4;
476 df->def_regs = xrealloc (df->def_regs,
477 new_size *sizeof (struct df_reg_info*));
478 df->use_regs = xrealloc (df->use_regs,
479 new_size *sizeof (struct df_reg_info*));
480 df->eq_use_regs = xrealloc (df->eq_use_regs,
481 new_size *sizeof (struct df_reg_info*));
482 df->def_info.begin = xrealloc (df->def_info.begin,
483 new_size *sizeof (int));
484 df->def_info.count = xrealloc (df->def_info.count,
485 new_size *sizeof (int));
486 df->use_info.begin = xrealloc (df->use_info.begin,
487 new_size *sizeof (int));
488 df->use_info.count = xrealloc (df->use_info.count,
489 new_size *sizeof (int));
490 df->regs_size = new_size;
491 }
492
493 for (i = df->regs_inited; i < max_reg; i++)
494 {
495 struct df_reg_info *reg_info;
496
497 reg_info = pool_alloc (problem_data->reg_pool);
498 memset (reg_info, 0, sizeof (struct df_reg_info));
499 df->def_regs[i] = reg_info;
500 reg_info = pool_alloc (problem_data->reg_pool);
501 memset (reg_info, 0, sizeof (struct df_reg_info));
502 df->use_regs[i] = reg_info;
503 reg_info = pool_alloc (problem_data->reg_pool);
504 memset (reg_info, 0, sizeof (struct df_reg_info));
505 df->eq_use_regs[i] = reg_info;
506 df->def_info.begin[i] = 0;
507 df->def_info.count[i] = 0;
508 df->use_info.begin[i] = 0;
509 df->use_info.count[i] = 0;
510 }
511
512 df->regs_inited = max_reg;
513 }
514
515
516 /* Grow the ref information. */
517
518 static void
519 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
520 {
521 if (ref_info->refs_size < new_size)
522 {
523 ref_info->refs = xrealloc (ref_info->refs,
524 new_size *sizeof (struct df_ref *));
525 memset (ref_info->refs + ref_info->refs_size, 0,
526 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
527 ref_info->refs_size = new_size;
528 }
529 }
530
531
532 /* Check and grow the ref information if necessary. This routine
533 guarantees total_size + BITMAP_ADDEND amount of entries in refs
534 array. It updates ref_info->refs_size only and does not change
535 ref_info->total_size. */
536
537 static void
538 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
539 unsigned bitmap_addend)
540 {
541 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
542 {
543 int new_size = ref_info->total_size + bitmap_addend;
544 new_size += ref_info->total_size / 4;
545 df_grow_ref_info (ref_info, new_size);
546 }
547 }
548
549
550 /* Grow the ref information. If the current size is less than the
551 number of instructions, grow to 25% more than the number of
552 instructions. */
553
554 void
555 df_grow_insn_info (void)
556 {
557 unsigned int new_size = get_max_uid () + 1;
558 if (DF_INSN_SIZE () < new_size)
559 {
560 new_size += new_size / 4;
561 df->insns = xrealloc (df->insns,
562 new_size *sizeof (struct df_insn_info *));
563 memset (df->insns + df->insns_size, 0,
564 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
565 DF_INSN_SIZE () = new_size;
566 }
567 }
568
569
570
571 \f
572 /*----------------------------------------------------------------------------
573 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
574 ----------------------------------------------------------------------------*/
575
576 /* Rescan all of the block_to_analyze or all of the blocks in the
577 function if df_set_blocks if blocks_to_analyze is NULL; */
578
579 void
580 df_scan_blocks (void)
581 {
582 basic_block bb;
583
584 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
585 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
586
587 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
588 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
589
590 bitmap_ior_into (df->eh_block_artificial_uses,
591 df->regular_block_artificial_uses);
592
593 /* ENTRY and EXIT blocks have special defs/uses. */
594 df_get_entry_block_def_set (df->entry_block_defs);
595 df_record_entry_block_defs (df->entry_block_defs);
596 df_get_exit_block_use_set (df->exit_block_uses);
597 df_record_exit_block_uses (df->exit_block_uses);
598 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
599 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
600
601 /* Regular blocks */
602 FOR_EACH_BB (bb)
603 {
604 unsigned int bb_index = bb->index;
605 df_bb_refs_record (bb_index, true);
606 }
607 }
608
609
610 /* Create a new ref of type DF_REF_TYPE for register REG at address
611 LOC within INSN of BB. */
612
613 struct df_ref *
614 df_ref_create (rtx reg, rtx *loc, rtx insn,
615 basic_block bb,
616 enum df_ref_type ref_type,
617 enum df_ref_flags ref_flags)
618 {
619 struct df_ref *ref;
620 struct df_reg_info **reg_info;
621 struct df_ref_info *ref_info;
622 struct df_ref **ref_rec;
623 struct df_ref ***ref_rec_ptr;
624 unsigned int count = 0;
625 bool add_to_table;
626
627 df_grow_reg_info ();
628
629 /* You cannot hack artificial refs. */
630 gcc_assert (insn);
631 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
632 ref_type, ref_flags);
633
634 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
635 {
636 reg_info = df->def_regs;
637 ref_info = &df->def_info;
638 ref_rec_ptr = &DF_INSN_DEFS (insn);
639 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
640 }
641 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
642 {
643 reg_info = df->eq_use_regs;
644 ref_info = &df->use_info;
645 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
646 switch (ref_info->ref_order)
647 {
648 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
649 case DF_REF_ORDER_BY_REG_WITH_NOTES:
650 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
651 add_to_table = true;
652 break;
653 default:
654 add_to_table = false;
655 break;
656 }
657 }
658 else
659 {
660 reg_info = df->use_regs;
661 ref_info = &df->use_info;
662 ref_rec_ptr = &DF_INSN_USES (insn);
663 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
664 }
665
666 /* Do not add if ref is not in the right blocks. */
667 if (add_to_table && df->analyze_subset)
668 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
669
670 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
671
672 if (add_to_table)
673 switch (ref_info->ref_order)
674 {
675 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
676 case DF_REF_ORDER_BY_REG_WITH_NOTES:
677 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
678 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
679 break;
680 default:
681 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
682 break;
683 }
684
685 ref_rec = *ref_rec_ptr;
686 while (*ref_rec)
687 {
688 count++;
689 ref_rec++;
690 }
691
692 ref_rec = *ref_rec_ptr;
693 if (count)
694 {
695 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
696 *ref_rec_ptr = ref_rec;
697 ref_rec[count] = ref;
698 ref_rec[count+1] = NULL;
699 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
700 }
701 else
702 {
703 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
704 ref_rec[0] = ref;
705 ref_rec[1] = NULL;
706 *ref_rec_ptr = ref_rec;
707 }
708
709 #if 0
710 if (dump_file)
711 {
712 fprintf (dump_file, "adding ref ");
713 df_ref_debug (ref, dump_file);
714 }
715 #endif
716 /* By adding the ref directly, df_insn_rescan my not find any
717 differences even though the block will have changed. So we need
718 to mark the block dirty ourselves. */
719 df_set_bb_dirty (bb);
720
721 return ref;
722 }
723
724
725 \f
726 /*----------------------------------------------------------------------------
727 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
728 ----------------------------------------------------------------------------*/
729
730
731 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
732 Also delete the def-use or use-def chain if it exists. */
733
734 static void
735 df_reg_chain_unlink (struct df_ref *ref)
736 {
737 struct df_ref *next = DF_REF_NEXT_REG (ref);
738 struct df_ref *prev = DF_REF_PREV_REG (ref);
739 struct df_scan_problem_data *problem_data
740 = (struct df_scan_problem_data *) df_scan->problem_data;
741 int id = DF_REF_ID (ref);
742 struct df_reg_info *reg_info;
743 struct df_ref **refs = NULL;
744
745 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
746 {
747 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
748 refs = df->def_info.refs;
749 }
750 else
751 {
752 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
753 {
754 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
755 switch (df->use_info.ref_order)
756 {
757 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
758 case DF_REF_ORDER_BY_REG_WITH_NOTES:
759 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
760 refs = df->use_info.refs;
761 break;
762 default:
763 break;
764 }
765 }
766 else
767 {
768 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
769 refs = df->use_info.refs;
770 }
771 }
772
773 if (refs)
774 {
775 if (df->analyze_subset)
776 {
777 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
778 refs[id] = NULL;
779 }
780 else
781 refs[id] = NULL;
782 }
783
784 /* Delete any def-use or use-def chains that start here. It is
785 possible that there is trash in this field. This happens for
786 insns that have been deleted when rescanning has been deferred
787 and the chain problem has also been deleted. The chain tear down
788 code skips deleted insns. */
789 if (df_chain && DF_REF_CHAIN (ref))
790 df_chain_unlink (ref);
791
792 reg_info->n_refs--;
793 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
794 {
795 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
796 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
797 }
798
799 /* Unlink from the reg chain. If there is no prev, this is the
800 first of the list. If not, just join the next and prev. */
801 if (prev)
802 DF_REF_NEXT_REG (prev) = next;
803 else
804 {
805 gcc_assert (reg_info->reg_chain == ref);
806 reg_info->reg_chain = next;
807 }
808 if (next)
809 DF_REF_PREV_REG (next) = prev;
810
811 pool_free (problem_data->ref_pool, ref);
812 }
813
814
815 /* Remove REF from VEC. */
816
817 static void
818 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
819 {
820 struct df_ref **vec = *vec_ptr;
821
822 if (vec[1])
823 {
824 while (*vec && *vec != ref)
825 vec++;
826
827 while (*vec)
828 {
829 *vec = *(vec+1);
830 vec++;
831 }
832 }
833 else
834 {
835 free (vec);
836 *vec_ptr = df_null_ref_rec;
837 }
838 }
839
840
841 /* Unlink REF from all def-use/use-def chains, etc. */
842
843 void
844 df_ref_remove (struct df_ref *ref)
845 {
846 #if 0
847 if (dump_file)
848 {
849 fprintf (dump_file, "removing ref ");
850 df_ref_debug (ref, dump_file);
851 }
852 #endif
853
854 if (DF_REF_REG_DEF_P (ref))
855 {
856 if (DF_REF_IS_ARTIFICIAL (ref))
857 {
858 struct df_scan_bb_info *bb_info
859 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
860 df_ref_compress_rec (&bb_info->artificial_defs, ref);
861 }
862 else
863 {
864 unsigned int uid = DF_REF_INSN_UID (ref);
865 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
866 df_ref_compress_rec (&insn_rec->defs, ref);
867 }
868 }
869 else
870 {
871 if (DF_REF_IS_ARTIFICIAL (ref))
872 {
873 struct df_scan_bb_info *bb_info
874 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
875 df_ref_compress_rec (&bb_info->artificial_uses, ref);
876 }
877 else
878 {
879 unsigned int uid = DF_REF_INSN_UID (ref);
880 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
881
882 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
883 df_ref_compress_rec (&insn_rec->eq_uses, ref);
884 else
885 df_ref_compress_rec (&insn_rec->uses, ref);
886 }
887 }
888
889 /* By deleting the ref directly, df_insn_rescan my not find any
890 differences even though the block will have changed. So we need
891 to mark the block dirty ourselves. */
892 df_set_bb_dirty (DF_REF_BB (ref));
893 df_reg_chain_unlink (ref);
894 }
895
896
897 /* Create the insn record for INSN. If there was one there, zero it
898 out. */
899
900 struct df_insn_info *
901 df_insn_create_insn_record (rtx insn)
902 {
903 struct df_scan_problem_data *problem_data
904 = (struct df_scan_problem_data *) df_scan->problem_data;
905 struct df_insn_info *insn_rec;
906
907 df_grow_insn_info ();
908 insn_rec = DF_INSN_GET (insn);
909 if (!insn_rec)
910 {
911 insn_rec = pool_alloc (problem_data->insn_pool);
912 DF_INSN_SET (insn, insn_rec);
913 }
914 memset (insn_rec, 0, sizeof (struct df_insn_info));
915 insn_rec->insn = insn;
916 return insn_rec;
917 }
918
919
920 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
921
922 static void
923 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
924 {
925 while (*ref_rec)
926 {
927 struct df_ref *ref = *ref_rec;
928 /* CHAIN is allocated by DF_CHAIN. So make sure to
929 pass df_scan instance for the problem. */
930 if (DF_REF_CHAIN (ref))
931 df_chain_unlink (ref);
932 ref_rec++;
933 }
934 }
935
936
937 /* Delete all refs in the ref chain. */
938
939 static void
940 df_ref_chain_delete (struct df_ref **ref_rec)
941 {
942 struct df_ref **start = ref_rec;
943 while (*ref_rec)
944 {
945 df_reg_chain_unlink (*ref_rec);
946 ref_rec++;
947 }
948
949 /* If the list is empty, it has a special shared element that is not
950 to be deleted. */
951 if (*start)
952 free (start);
953 }
954
955
956 /* Delete the hardreg chain. */
957
958 static void
959 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
960 {
961 struct df_scan_problem_data *problem_data;
962
963 if (!hardregs)
964 return;
965
966 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
967
968 while (*hardregs)
969 {
970 pool_free (problem_data->mw_reg_pool, *hardregs);
971 hardregs++;
972 }
973 }
974
975
976 /* Delete all of the refs information from INSN. BB must be passed in
977 except when called from df_process_deferred_rescans to mark the block
978 as dirty. */
979
980 void
981 df_insn_delete (basic_block bb, unsigned int uid)
982 {
983 struct df_insn_info *insn_info = NULL;
984 if (!df)
985 return;
986
987 df_grow_bb_info (df_scan);
988 df_grow_reg_info ();
989
990 /* The block must be marked as dirty now, rather than later as in
991 df_insn_rescan and df_notes_rescan because it may not be there at
992 rescanning time and the mark would blow up. */
993 if (bb)
994 df_set_bb_dirty (bb);
995
996 insn_info = DF_INSN_UID_SAFE_GET (uid);
997
998 /* The client has deferred rescanning. */
999 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1000 {
1001 if (insn_info)
1002 {
1003 bitmap_clear_bit (df->insns_to_rescan, uid);
1004 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1005 bitmap_set_bit (df->insns_to_delete, uid);
1006 }
1007 if (dump_file)
1008 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1009 return;
1010 }
1011
1012 if (dump_file)
1013 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1014
1015 bitmap_clear_bit (df->insns_to_delete, uid);
1016 bitmap_clear_bit (df->insns_to_rescan, uid);
1017 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1018 if (insn_info)
1019 {
1020 struct df_scan_problem_data *problem_data
1021 = (struct df_scan_problem_data *) df_scan->problem_data;
1022
1023 /* In general, notes do not have the insn_info fields
1024 initialized. However, combine deletes insns by changing them
1025 to notes. How clever. So we cannot just check if it is a
1026 valid insn before short circuiting this code, we need to see
1027 if we actually initialized it. */
1028 if (insn_info->defs)
1029 {
1030 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1031
1032 if (df_chain)
1033 {
1034 df_ref_chain_delete_du_chain (insn_info->defs);
1035 df_ref_chain_delete_du_chain (insn_info->uses);
1036 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1037 }
1038
1039 df_ref_chain_delete (insn_info->defs);
1040 df_ref_chain_delete (insn_info->uses);
1041 df_ref_chain_delete (insn_info->eq_uses);
1042 }
1043 pool_free (problem_data->insn_pool, insn_info);
1044 DF_INSN_UID_SET (uid, NULL);
1045 }
1046 }
1047
1048
1049 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1050
1051 static void
1052 df_free_collection_rec (struct df_collection_rec *collection_rec)
1053 {
1054 struct df_scan_problem_data *problem_data
1055 = (struct df_scan_problem_data *) df_scan->problem_data;
1056 struct df_ref **ref;
1057 struct df_mw_hardreg **mw;
1058
1059 if (collection_rec->def_vec)
1060 for (ref = collection_rec->def_vec; *ref; ref++)
1061 pool_free (problem_data->ref_pool, *ref);
1062 if (collection_rec->use_vec)
1063 for (ref = collection_rec->use_vec; *ref; ref++)
1064 pool_free (problem_data->ref_pool, *ref);
1065 if (collection_rec->eq_use_vec)
1066 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1067 pool_free (problem_data->ref_pool, *ref);
1068 if (collection_rec->mw_vec)
1069 for (mw = collection_rec->mw_vec; *mw; mw++)
1070 pool_free (problem_data->mw_reg_pool, *mw);
1071 }
1072
1073
1074 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1075
1076 bool
1077 df_insn_rescan (rtx insn)
1078 {
1079 unsigned int uid = INSN_UID (insn);
1080 struct df_insn_info *insn_info = NULL;
1081 basic_block bb = BLOCK_FOR_INSN (insn);
1082 struct df_collection_rec collection_rec;
1083 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1084 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1085 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1086 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1087
1088 if ((!df) || (!INSN_P (insn)))
1089 return false;
1090
1091 if (!bb)
1092 {
1093 if (dump_file)
1094 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1095 return false;
1096 }
1097
1098 /* The client has disabled rescanning and plans to do it itself. */
1099 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1100 return false;
1101
1102 df_grow_bb_info (df_scan);
1103 df_grow_reg_info ();
1104
1105 insn_info = DF_INSN_UID_SAFE_GET (uid);
1106
1107 /* The client has deferred rescanning. */
1108 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1109 {
1110 if (!insn_info)
1111 {
1112 insn_info = df_insn_create_insn_record (insn);
1113 insn_info->defs = df_null_ref_rec;
1114 insn_info->uses = df_null_ref_rec;
1115 insn_info->eq_uses = df_null_ref_rec;
1116 insn_info->mw_hardregs = df_null_mw_rec;
1117 }
1118 if (dump_file)
1119 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1120
1121 bitmap_clear_bit (df->insns_to_delete, uid);
1122 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1123 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1124 return false;
1125 }
1126
1127 bitmap_clear_bit (df->insns_to_delete, uid);
1128 bitmap_clear_bit (df->insns_to_rescan, uid);
1129 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1130 if (insn_info)
1131 {
1132 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1133 /* If there's no change, return false. */
1134 if (the_same)
1135 {
1136 df_free_collection_rec (&collection_rec);
1137 if (dump_file)
1138 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1139 return false;
1140 }
1141 if (dump_file)
1142 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1143
1144 /* There's change - we need to delete the existing info. */
1145 df_insn_delete (NULL, uid);
1146 df_insn_create_insn_record (insn);
1147 }
1148 else
1149 {
1150 df_insn_create_insn_record (insn);
1151 df_insn_refs_collect (&collection_rec, bb, insn);
1152 if (dump_file)
1153 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1154 }
1155
1156 df_refs_add_to_chains (&collection_rec, bb, insn);
1157 df_set_bb_dirty (bb);
1158 return true;
1159 }
1160
1161
1162 /* Rescan all of the insns in the function. Note that the artificial
1163 uses and defs are not touched. This function will destroy def-se
1164 or use-def chains. */
1165
1166 void
1167 df_insn_rescan_all (void)
1168 {
1169 bool no_insn_rescan = false;
1170 bool defer_insn_rescan = false;
1171 basic_block bb;
1172 bitmap_iterator bi;
1173 unsigned int uid;
1174 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1175
1176 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1177 {
1178 df_clear_flags (DF_NO_INSN_RESCAN);
1179 no_insn_rescan = true;
1180 }
1181
1182 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1183 {
1184 df_clear_flags (DF_DEFER_INSN_RESCAN);
1185 defer_insn_rescan = true;
1186 }
1187
1188 bitmap_copy (tmp, df->insns_to_delete);
1189 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1190 {
1191 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1192 if (insn_info)
1193 df_insn_delete (NULL, uid);
1194 }
1195
1196 BITMAP_FREE (tmp);
1197 bitmap_clear (df->insns_to_delete);
1198 bitmap_clear (df->insns_to_rescan);
1199 bitmap_clear (df->insns_to_notes_rescan);
1200
1201 FOR_EACH_BB (bb)
1202 {
1203 rtx insn;
1204 FOR_BB_INSNS (bb, insn)
1205 {
1206 df_insn_rescan (insn);
1207 }
1208 }
1209
1210 if (no_insn_rescan)
1211 df_set_flags (DF_NO_INSN_RESCAN);
1212 if (defer_insn_rescan)
1213 df_set_flags (DF_DEFER_INSN_RESCAN);
1214 }
1215
1216
1217 /* Process all of the deferred rescans or deletions. */
1218
1219 void
1220 df_process_deferred_rescans (void)
1221 {
1222 bool no_insn_rescan = false;
1223 bool defer_insn_rescan = false;
1224 bitmap_iterator bi;
1225 unsigned int uid;
1226 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1227
1228 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1229 {
1230 df_clear_flags (DF_NO_INSN_RESCAN);
1231 no_insn_rescan = true;
1232 }
1233
1234 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1235 {
1236 df_clear_flags (DF_DEFER_INSN_RESCAN);
1237 defer_insn_rescan = true;
1238 }
1239
1240 if (dump_file)
1241 fprintf (dump_file, "starting the processing of deferred insns\n");
1242
1243 bitmap_copy (tmp, df->insns_to_delete);
1244 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1245 {
1246 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1247 if (insn_info)
1248 df_insn_delete (NULL, uid);
1249 }
1250
1251 bitmap_copy (tmp, df->insns_to_rescan);
1252 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1253 {
1254 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1255 if (insn_info)
1256 df_insn_rescan (insn_info->insn);
1257 }
1258
1259 bitmap_copy (tmp, df->insns_to_notes_rescan);
1260 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1261 {
1262 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1263 if (insn_info)
1264 df_notes_rescan (insn_info->insn);
1265 }
1266
1267 if (dump_file)
1268 fprintf (dump_file, "ending the processing of deferred insns\n");
1269
1270 BITMAP_FREE (tmp);
1271 bitmap_clear (df->insns_to_delete);
1272 bitmap_clear (df->insns_to_rescan);
1273 bitmap_clear (df->insns_to_notes_rescan);
1274
1275 if (no_insn_rescan)
1276 df_set_flags (DF_NO_INSN_RESCAN);
1277 if (defer_insn_rescan)
1278 df_set_flags (DF_DEFER_INSN_RESCAN);
1279
1280 /* If someone changed regs_ever_live during this pass, fix up the
1281 entry and exit blocks. */
1282 if (df->redo_entry_and_exit)
1283 {
1284 df_update_entry_exit_and_calls ();
1285 df->redo_entry_and_exit = false;
1286 }
1287 }
1288
1289
1290 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1291 the uses if INCLUDE_USES. Include the eq_uses if
1292 INCLUDE_EQ_USES. */
1293
1294 static unsigned int
1295 df_count_refs (bool include_defs, bool include_uses,
1296 bool include_eq_uses)
1297 {
1298 unsigned int regno;
1299 int size = 0;
1300 unsigned int m = df->regs_inited;
1301
1302 for (regno = 0; regno < m; regno++)
1303 {
1304 if (include_defs)
1305 size += DF_REG_DEF_COUNT (regno);
1306 if (include_uses)
1307 size += DF_REG_USE_COUNT (regno);
1308 if (include_eq_uses)
1309 size += DF_REG_EQ_USE_COUNT (regno);
1310 }
1311 return size;
1312 }
1313
1314
1315 /* Take build ref table for either the uses or defs from the reg-use
1316 or reg-def chains. This version processes the refs in reg order
1317 which is likely to be best if processing the whole function. */
1318
1319 static void
1320 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1321 bool include_defs,
1322 bool include_uses,
1323 bool include_eq_uses)
1324 {
1325 unsigned int m = df->regs_inited;
1326 unsigned int regno;
1327 unsigned int offset = 0;
1328 unsigned int start;
1329
1330 if (df->changeable_flags & DF_NO_HARD_REGS)
1331 {
1332 start = FIRST_PSEUDO_REGISTER;
1333 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1334 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1335 }
1336 else
1337 start = 0;
1338
1339 ref_info->total_size
1340 = df_count_refs (include_defs, include_uses, include_eq_uses);
1341
1342 df_check_and_grow_ref_info (ref_info, 1);
1343
1344 for (regno = start; regno < m; regno++)
1345 {
1346 int count = 0;
1347 ref_info->begin[regno] = offset;
1348 if (include_defs)
1349 {
1350 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1351 while (ref)
1352 {
1353 ref_info->refs[offset] = ref;
1354 DF_REF_ID (ref) = offset++;
1355 count++;
1356 ref = DF_REF_NEXT_REG (ref);
1357 gcc_assert (offset < ref_info->refs_size);
1358 }
1359 }
1360 if (include_uses)
1361 {
1362 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1363 while (ref)
1364 {
1365 ref_info->refs[offset] = ref;
1366 DF_REF_ID (ref) = offset++;
1367 count++;
1368 ref = DF_REF_NEXT_REG (ref);
1369 gcc_assert (offset < ref_info->refs_size);
1370 }
1371 }
1372 if (include_eq_uses)
1373 {
1374 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1375 while (ref)
1376 {
1377 ref_info->refs[offset] = ref;
1378 DF_REF_ID (ref) = offset++;
1379 count++;
1380 ref = DF_REF_NEXT_REG (ref);
1381 gcc_assert (offset < ref_info->refs_size);
1382 }
1383 }
1384 ref_info->count[regno] = count;
1385 }
1386
1387 /* The bitmap size is not decremented when refs are deleted. So
1388 reset it now that we have squished out all of the empty
1389 slots. */
1390 ref_info->table_size = offset;
1391 }
1392
1393
1394 /* Take build ref table for either the uses or defs from the reg-use
1395 or reg-def chains. This version processes the refs in insn order
1396 which is likely to be best if processing some segment of the
1397 function. */
1398
1399 static void
1400 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1401 bool include_defs,
1402 bool include_uses,
1403 bool include_eq_uses)
1404 {
1405 bitmap_iterator bi;
1406 unsigned int bb_index;
1407 unsigned int m = df->regs_inited;
1408 unsigned int offset = 0;
1409 unsigned int r;
1410 unsigned int start
1411 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1412
1413 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1414 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1415
1416 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1417 df_check_and_grow_ref_info (ref_info, 1);
1418
1419 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1420 {
1421 basic_block bb = BASIC_BLOCK (bb_index);
1422 rtx insn;
1423 struct df_ref **ref_rec;
1424
1425 if (include_defs)
1426 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1427 {
1428 unsigned int regno = DF_REF_REGNO (*ref_rec);
1429 ref_info->count[regno]++;
1430 }
1431 if (include_uses)
1432 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1433 {
1434 unsigned int regno = DF_REF_REGNO (*ref_rec);
1435 ref_info->count[regno]++;
1436 }
1437
1438 FOR_BB_INSNS (bb, insn)
1439 {
1440 if (INSN_P (insn))
1441 {
1442 unsigned int uid = INSN_UID (insn);
1443
1444 if (include_defs)
1445 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1446 {
1447 unsigned int regno = DF_REF_REGNO (*ref_rec);
1448 ref_info->count[regno]++;
1449 }
1450 if (include_uses)
1451 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1452 {
1453 unsigned int regno = DF_REF_REGNO (*ref_rec);
1454 ref_info->count[regno]++;
1455 }
1456 if (include_eq_uses)
1457 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1458 {
1459 unsigned int regno = DF_REF_REGNO (*ref_rec);
1460 ref_info->count[regno]++;
1461 }
1462 }
1463 }
1464 }
1465
1466 for (r = start; r < m; r++)
1467 {
1468 ref_info->begin[r] = offset;
1469 offset += ref_info->count[r];
1470 ref_info->count[r] = 0;
1471 }
1472
1473 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1474 {
1475 basic_block bb = BASIC_BLOCK (bb_index);
1476 rtx insn;
1477 struct df_ref **ref_rec;
1478
1479 if (include_defs)
1480 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1481 {
1482 struct df_ref *ref = *ref_rec;
1483 unsigned int regno = DF_REF_REGNO (ref);
1484 if (regno >= start)
1485 {
1486 unsigned int id
1487 = ref_info->begin[regno] + ref_info->count[regno]++;
1488 DF_REF_ID (ref) = id;
1489 ref_info->refs[id] = ref;
1490 }
1491 }
1492 if (include_uses)
1493 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1494 {
1495 struct df_ref *ref = *ref_rec;
1496 unsigned int regno = DF_REF_REGNO (ref);
1497 if (regno >= start)
1498 {
1499 unsigned int id
1500 = ref_info->begin[regno] + ref_info->count[regno]++;
1501 DF_REF_ID (ref) = id;
1502 ref_info->refs[id] = ref;
1503 }
1504 }
1505
1506 FOR_BB_INSNS (bb, insn)
1507 {
1508 if (INSN_P (insn))
1509 {
1510 unsigned int uid = INSN_UID (insn);
1511
1512 if (include_defs)
1513 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1514 {
1515 struct df_ref *ref = *ref_rec;
1516 unsigned int regno = DF_REF_REGNO (ref);
1517 if (regno >= start)
1518 {
1519 unsigned int id
1520 = ref_info->begin[regno] + ref_info->count[regno]++;
1521 DF_REF_ID (ref) = id;
1522 ref_info->refs[id] = ref;
1523 }
1524 }
1525 if (include_uses)
1526 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1527 {
1528 struct df_ref *ref = *ref_rec;
1529 unsigned int regno = DF_REF_REGNO (ref);
1530 if (regno >= start)
1531 {
1532 unsigned int id
1533 = ref_info->begin[regno] + ref_info->count[regno]++;
1534 DF_REF_ID (ref) = id;
1535 ref_info->refs[id] = ref;
1536 }
1537 }
1538 if (include_eq_uses)
1539 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1540 {
1541 struct df_ref *ref = *ref_rec;
1542 unsigned int regno = DF_REF_REGNO (ref);
1543 if (regno >= start)
1544 {
1545 unsigned int id
1546 = ref_info->begin[regno] + ref_info->count[regno]++;
1547 DF_REF_ID (ref) = id;
1548 ref_info->refs[id] = ref;
1549 }
1550 }
1551 }
1552 }
1553 }
1554
1555 /* The bitmap size is not decremented when refs are deleted. So
1556 reset it now that we have squished out all of the empty
1557 slots. */
1558
1559 ref_info->table_size = offset;
1560 }
1561
1562 /* Take build ref table for either the uses or defs from the reg-use
1563 or reg-def chains. */
1564
1565 static void
1566 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1567 bool include_defs,
1568 bool include_uses,
1569 bool include_eq_uses)
1570 {
1571 if (df->analyze_subset)
1572 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1573 include_uses, include_eq_uses);
1574 else
1575 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1576 include_uses, include_eq_uses);
1577 }
1578
1579
1580 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1581 static unsigned int
1582 df_add_refs_to_table (unsigned int offset,
1583 struct df_ref_info *ref_info,
1584 struct df_ref **ref_vec)
1585 {
1586 while (*ref_vec)
1587 {
1588 struct df_ref *ref = *ref_vec;
1589 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1590 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1591 {
1592 ref_info->refs[offset] = ref;
1593 DF_REF_ID (*ref_vec) = offset++;
1594 }
1595 ref_vec++;
1596 }
1597 return offset;
1598 }
1599
1600
1601 /* Count the number of refs in all of the insns of BB. Include the
1602 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1603 eq_uses if INCLUDE_EQ_USES. */
1604
1605 static unsigned int
1606 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1607 struct df_ref_info *ref_info,
1608 bool include_defs, bool include_uses,
1609 bool include_eq_uses)
1610 {
1611 rtx insn;
1612
1613 if (include_defs)
1614 offset = df_add_refs_to_table (offset, ref_info,
1615 df_get_artificial_defs (bb->index));
1616 if (include_uses)
1617 offset = df_add_refs_to_table (offset, ref_info,
1618 df_get_artificial_uses (bb->index));
1619
1620 FOR_BB_INSNS (bb, insn)
1621 if (INSN_P (insn))
1622 {
1623 unsigned int uid = INSN_UID (insn);
1624 if (include_defs)
1625 offset = df_add_refs_to_table (offset, ref_info,
1626 DF_INSN_UID_DEFS (uid));
1627 if (include_uses)
1628 offset = df_add_refs_to_table (offset, ref_info,
1629 DF_INSN_UID_USES (uid));
1630 if (include_eq_uses)
1631 offset = df_add_refs_to_table (offset, ref_info,
1632 DF_INSN_UID_EQ_USES (uid));
1633 }
1634 return offset;
1635 }
1636
1637
1638 /* Organize the refs by insn into the table in REF_INFO. If
1639 blocks_to_analyze is defined, use that set, otherwise the entire
1640 program. Include the defs if INCLUDE_DEFS. Include the uses if
1641 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1642
1643 static void
1644 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1645 bool include_defs, bool include_uses,
1646 bool include_eq_uses)
1647 {
1648 basic_block bb;
1649 unsigned int offset = 0;
1650
1651 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1652 df_check_and_grow_ref_info (ref_info, 1);
1653 if (df->blocks_to_analyze)
1654 {
1655 bitmap_iterator bi;
1656 unsigned int index;
1657
1658 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1659 {
1660 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1661 include_defs, include_uses,
1662 include_eq_uses);
1663 }
1664
1665 ref_info->table_size = offset;
1666 }
1667 else
1668 {
1669 FOR_ALL_BB (bb)
1670 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1671 include_defs, include_uses,
1672 include_eq_uses);
1673 ref_info->table_size = offset;
1674 }
1675 }
1676
1677
1678 /* If the use refs in DF are not organized, reorganize them. */
1679
1680 void
1681 df_maybe_reorganize_use_refs (enum df_ref_order order)
1682 {
1683 if (order == df->use_info.ref_order)
1684 return;
1685
1686 switch (order)
1687 {
1688 case DF_REF_ORDER_BY_REG:
1689 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1690 break;
1691
1692 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1693 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1694 break;
1695
1696 case DF_REF_ORDER_BY_INSN:
1697 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1698 break;
1699
1700 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1701 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1702 break;
1703
1704 case DF_REF_ORDER_NO_TABLE:
1705 free (df->use_info.refs);
1706 df->use_info.refs = NULL;
1707 df->use_info.refs_size = 0;
1708 break;
1709
1710 case DF_REF_ORDER_UNORDERED:
1711 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1712 gcc_unreachable ();
1713 break;
1714 }
1715
1716 df->use_info.ref_order = order;
1717 }
1718
1719
1720 /* If the def refs in DF are not organized, reorganize them. */
1721
1722 void
1723 df_maybe_reorganize_def_refs (enum df_ref_order order)
1724 {
1725 if (order == df->def_info.ref_order)
1726 return;
1727
1728 switch (order)
1729 {
1730 case DF_REF_ORDER_BY_REG:
1731 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1732 break;
1733
1734 case DF_REF_ORDER_BY_INSN:
1735 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1736 break;
1737
1738 case DF_REF_ORDER_NO_TABLE:
1739 free (df->def_info.refs);
1740 df->def_info.refs = NULL;
1741 df->def_info.refs_size = 0;
1742 break;
1743
1744 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1745 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1746 case DF_REF_ORDER_UNORDERED:
1747 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1748 gcc_unreachable ();
1749 break;
1750 }
1751
1752 df->def_info.ref_order = order;
1753 }
1754
1755
1756 /* Change the BB of all refs in the ref chain to NEW_BB.
1757 Assumes that all refs in the chain have the same BB.
1758 If changed, return the original bb the chain belonged to
1759 (or .
1760 If no change, return NEW_BB.
1761 If something's wrong, it will return NULL. */
1762
1763 static basic_block
1764 df_ref_chain_change_bb (struct df_ref **ref_rec,
1765 basic_block old_bb,
1766 basic_block new_bb)
1767 {
1768 while (*ref_rec)
1769 {
1770 struct df_ref *ref = *ref_rec;
1771
1772 if (DF_REF_BB (ref) == new_bb)
1773 return new_bb;
1774 else
1775 {
1776 gcc_assert (old_bb == NULL || DF_REF_BB (ref) == old_bb);
1777 old_bb = DF_REF_BB (ref);
1778 DF_REF_BB (ref) = new_bb;
1779 }
1780 ref_rec++;
1781 }
1782
1783 return old_bb;
1784 }
1785
1786
1787 /* Change all of the basic block references in INSN to use the insn's
1788 current basic block. This function is called from routines that move
1789 instructions from one block to another. */
1790
1791 void
1792 df_insn_change_bb (rtx insn)
1793 {
1794 basic_block new_bb = BLOCK_FOR_INSN (insn);
1795 basic_block old_bb = NULL;
1796 struct df_insn_info *insn_info;
1797 unsigned int uid = INSN_UID (insn);
1798
1799 if (!df)
1800 return;
1801
1802 if (dump_file)
1803 fprintf (dump_file, "changing bb of uid %d\n", uid);
1804
1805 insn_info = DF_INSN_UID_SAFE_GET (uid);
1806 if (insn_info == NULL)
1807 {
1808 if (dump_file)
1809 fprintf (dump_file, " unscanned insn\n");
1810 df_insn_rescan (insn);
1811 return;
1812 }
1813
1814 if (!INSN_P (insn))
1815 return;
1816
1817 old_bb = df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1818 if (old_bb == new_bb)
1819 return;
1820
1821 old_bb = df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1822 if (old_bb == new_bb)
1823 return;
1824
1825 old_bb = df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1826 if (old_bb == new_bb)
1827 return;
1828
1829 df_set_bb_dirty (new_bb);
1830 if (old_bb)
1831 {
1832 if (dump_file)
1833 fprintf (dump_file, " from %d to %d\n",
1834 old_bb->index, new_bb->index);
1835 df_set_bb_dirty (old_bb);
1836 }
1837 else
1838 if (dump_file)
1839 fprintf (dump_file, " to %d\n", new_bb->index);
1840 }
1841
1842
1843 /* Helper function for df_ref_change_reg_with_loc. */
1844
1845 static void
1846 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1847 int new_regno, rtx loc)
1848 {
1849 struct df_ref *the_ref = old->reg_chain;
1850
1851 while (the_ref)
1852 {
1853 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1854 {
1855 struct df_ref *next_ref = the_ref->next_reg;
1856 struct df_ref *prev_ref = the_ref->prev_reg;
1857 struct df_ref **ref_vec, **ref_vec_t;
1858 unsigned int count = 0;
1859
1860 DF_REF_REGNO (the_ref) = new_regno;
1861 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1862
1863 /* Pull the_ref out of the old regno chain. */
1864 if (prev_ref)
1865 prev_ref->next_reg = next_ref;
1866 else
1867 old->reg_chain = next_ref;
1868 if (next_ref)
1869 next_ref->prev_reg = prev_ref;
1870 old->n_refs--;
1871
1872 /* Put the ref into the new regno chain. */
1873 the_ref->prev_reg = NULL;
1874 the_ref->next_reg = new->reg_chain;
1875 if (new->reg_chain)
1876 new->reg_chain->prev_reg = the_ref;
1877 new->reg_chain = the_ref;
1878 new->n_refs++;
1879 df_set_bb_dirty (DF_REF_BB (the_ref));
1880
1881 /* Need to resort the record that the ref was in because the
1882 regno is a sorting key. First, find the right record. */
1883 if (DF_REF_IS_ARTIFICIAL (the_ref))
1884 {
1885 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1886 if (DF_REF_REG_DEF_P (the_ref))
1887 ref_vec = df_get_artificial_defs (bb_index);
1888 else
1889 ref_vec = df_get_artificial_uses (bb_index);
1890 }
1891 else
1892 {
1893 struct df_insn_info *insn_info
1894 = DF_INSN_GET (DF_REF_INSN (the_ref));
1895 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1896 ref_vec = insn_info->eq_uses;
1897 else
1898 ref_vec = insn_info->uses;
1899 if (dump_file)
1900 fprintf (dump_file, "changing reg in insn %d\n",
1901 INSN_UID (DF_REF_INSN (the_ref)));
1902 }
1903 ref_vec_t = ref_vec;
1904
1905 /* Find the length. */
1906 while (*ref_vec_t)
1907 {
1908 count++;
1909 ref_vec_t++;
1910 }
1911 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1912
1913 the_ref = next_ref;
1914 }
1915 else
1916 the_ref = the_ref->next_reg;
1917 }
1918 }
1919
1920
1921 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1922 NEW_REGNO. Refs that do not match LOC are not changed. This call
1923 is to support the SET_REGNO macro. */
1924
1925 void
1926 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1927 {
1928 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1929 return;
1930
1931 df_grow_reg_info ();
1932
1933 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1934 DF_REG_DEF_GET (new_regno), new_regno, loc);
1935 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1936 DF_REG_USE_GET (new_regno), new_regno, loc);
1937 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1938 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1939 }
1940
1941
1942 /* Delete the mw_hardregs that point into the eq_notes. */
1943
1944 static unsigned int
1945 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1946 {
1947 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1948 unsigned int deleted = 0;
1949 unsigned int count = 0;
1950 struct df_scan_problem_data *problem_data
1951 = (struct df_scan_problem_data *) df_scan->problem_data;
1952
1953 if (!*mw_vec)
1954 return 0;
1955
1956 while (*mw_vec)
1957 {
1958 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1959 {
1960 struct df_mw_hardreg **temp_vec = mw_vec;
1961
1962 pool_free (problem_data->mw_reg_pool, *mw_vec);
1963 temp_vec = mw_vec;
1964 /* Shove the remaining ones down one to fill the gap. While
1965 this looks n**2, it is highly unusual to have any mw regs
1966 in eq_notes and the chances of more than one are almost
1967 non existent. */
1968 while (*temp_vec)
1969 {
1970 *temp_vec = *(temp_vec + 1);
1971 temp_vec++;
1972 }
1973 deleted++;
1974 }
1975 else
1976 {
1977 mw_vec++;
1978 count++;
1979 }
1980 }
1981
1982 if (count == 0)
1983 {
1984 free (insn_info->mw_hardregs);
1985 insn_info->mw_hardregs = df_null_mw_rec;
1986 return 0;
1987 }
1988 return deleted;
1989 }
1990
1991
1992 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1993
1994 void
1995 df_notes_rescan (rtx insn)
1996 {
1997 struct df_insn_info *insn_info;
1998 unsigned int uid = INSN_UID (insn);
1999
2000 if (!df)
2001 return;
2002
2003 /* The client has disabled rescanning and plans to do it itself. */
2004 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2005 return;
2006
2007 /* Do nothing if the insn hasn't been emitted yet. */
2008 if (!BLOCK_FOR_INSN (insn))
2009 return;
2010
2011 df_grow_bb_info (df_scan);
2012 df_grow_reg_info ();
2013
2014 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2015
2016 /* The client has deferred rescanning. */
2017 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2018 {
2019 if (!insn_info)
2020 {
2021 insn_info = df_insn_create_insn_record (insn);
2022 insn_info->defs = df_null_ref_rec;
2023 insn_info->uses = df_null_ref_rec;
2024 insn_info->eq_uses = df_null_ref_rec;
2025 insn_info->mw_hardregs = df_null_mw_rec;
2026 }
2027
2028 bitmap_clear_bit (df->insns_to_delete, uid);
2029 /* If the insn is set to be rescanned, it does not need to also
2030 be notes rescanned. */
2031 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2032 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2033 return;
2034 }
2035
2036 bitmap_clear_bit (df->insns_to_delete, uid);
2037 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2038
2039 if (insn_info)
2040 {
2041 basic_block bb = BLOCK_FOR_INSN (insn);
2042 rtx note;
2043 struct df_collection_rec collection_rec;
2044 unsigned int num_deleted;
2045
2046 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2047 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2048 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2049
2050 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2051 df_ref_chain_delete (insn_info->eq_uses);
2052 insn_info->eq_uses = NULL;
2053
2054 /* Process REG_EQUIV/REG_EQUAL notes */
2055 for (note = REG_NOTES (insn); note;
2056 note = XEXP (note, 1))
2057 {
2058 switch (REG_NOTE_KIND (note))
2059 {
2060 case REG_EQUIV:
2061 case REG_EQUAL:
2062 df_uses_record (&collection_rec,
2063 &XEXP (note, 0), DF_REF_REG_USE,
2064 bb, insn, DF_REF_IN_NOTE);
2065 default:
2066 break;
2067 }
2068 }
2069
2070 /* Find some place to put any new mw_hardregs. */
2071 df_canonize_collection_rec (&collection_rec);
2072 if (collection_rec.next_mw)
2073 {
2074 unsigned int count = 0;
2075 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2076 while (*mw_rec)
2077 {
2078 count++;
2079 mw_rec++;
2080 }
2081
2082 if (count)
2083 {
2084 /* Append to the end of the existing record after
2085 expanding it if necessary. */
2086 if (collection_rec.next_mw > num_deleted)
2087 {
2088 insn_info->mw_hardregs =
2089 xrealloc (insn_info->mw_hardregs,
2090 (count + 1 + collection_rec.next_mw)
2091 * sizeof (struct df_ref*));
2092 }
2093 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2094 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2095 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2096 sizeof (struct df_mw_hardreg *), df_mw_compare);
2097 }
2098 else
2099 {
2100 /* No vector there. */
2101 insn_info->mw_hardregs
2102 = XNEWVEC (struct df_mw_hardreg*,
2103 count + 1 + collection_rec.next_mw);
2104 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2105 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2106 }
2107 }
2108 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2109 ignore it. */
2110 collection_rec.mw_vec = NULL;
2111 collection_rec.next_mw = 0;
2112 df_refs_add_to_chains (&collection_rec, bb, insn);
2113 }
2114 else
2115 df_insn_rescan (insn);
2116
2117 }
2118
2119 \f
2120 /*----------------------------------------------------------------------------
2121 Hard core instruction scanning code. No external interfaces here,
2122 just a lot of routines that look inside insns.
2123 ----------------------------------------------------------------------------*/
2124
2125
2126 /* Return true if the contents of two df_ref's are identical.
2127 It ignores DF_REF_MARKER. */
2128
2129 static bool
2130 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2131 {
2132 if (!ref2)
2133 return false;
2134 return (ref1 == ref2) ||
2135 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2136 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2137 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2138 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2139 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2140 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2141 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2142 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2143 }
2144
2145
2146 /* Compare REF1 and REF2 for sorting. This is only called from places
2147 where all of the refs are of the same type, in the same insn, and
2148 have the same bb. So these fields are not checked. */
2149
2150 static int
2151 df_ref_compare (const void *r1, const void *r2)
2152 {
2153 const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
2154 const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
2155
2156 if (ref1 == ref2)
2157 return 0;
2158
2159 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2160 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2161
2162 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2163 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2164
2165 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2166 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2167 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2168
2169 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2170 {
2171 /* If two refs are identical except that one of them has is from
2172 a mw and one is not, we need to have the one with the mw
2173 first. */
2174 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2175 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2176 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2177 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2178 return -1;
2179 else
2180 return 1;
2181 }
2182 return 0;
2183 }
2184
2185 static void
2186 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2187 {
2188 struct df_ref *tmp = ref_vec[i];
2189 ref_vec[i] = ref_vec[j];
2190 ref_vec[j] = tmp;
2191 }
2192
2193 /* Sort and compress a set of refs. */
2194
2195 static unsigned int
2196 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2197 {
2198 struct df_scan_problem_data *problem_data
2199 = (struct df_scan_problem_data *) df_scan->problem_data;
2200 unsigned int i;
2201 unsigned int dist = 0;
2202
2203 ref_vec[count] = NULL;
2204 /* If there are 1 or 0 elements, there is nothing to do. */
2205 if (count < 2)
2206 return count;
2207 else if (count == 2)
2208 {
2209 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2210 df_swap_refs (ref_vec, 0, 1);
2211 }
2212 else
2213 {
2214 for (i = 0; i < count - 1; i++)
2215 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2216 break;
2217 /* If the array is already strictly ordered,
2218 which is the most common case for large COUNT case
2219 (which happens for CALL INSNs),
2220 no need to sort and filter out duplicate.
2221 Simply return the count.
2222 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2223 of DF_REF_COMPARE. */
2224 if (i == count - 1)
2225 return count;
2226 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2227 }
2228
2229 for (i=0; i<count-dist; i++)
2230 {
2231 /* Find the next ref that is not equal to the current ref. */
2232 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2233 {
2234 pool_free (problem_data->ref_pool, ref_vec[i + dist + 1]);
2235 dist++;
2236 }
2237 /* Copy it down to the next position. */
2238 if (dist)
2239 ref_vec[i+1] = ref_vec[i + dist + 1];
2240 }
2241
2242 count -= dist;
2243 ref_vec[count] = NULL;
2244 return count;
2245 }
2246
2247
2248 /* Return true if the contents of two df_ref's are identical.
2249 It ignores DF_REF_MARKER. */
2250
2251 static bool
2252 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2253 {
2254 if (!mw2)
2255 return false;
2256 return (mw1 == mw2) ||
2257 (mw1->mw_reg == mw2->mw_reg
2258 && mw1->type == mw2->type
2259 && mw1->flags == mw2->flags
2260 && mw1->start_regno == mw2->start_regno
2261 && mw1->end_regno == mw2->end_regno);
2262 }
2263
2264
2265 /* Compare MW1 and MW2 for sorting. */
2266
2267 static int
2268 df_mw_compare (const void *m1, const void *m2)
2269 {
2270 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2271 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2272
2273 if (mw1 == mw2)
2274 return 0;
2275
2276 if (mw1->type != mw2->type)
2277 return mw1->type - mw2->type;
2278
2279 if (mw1->flags != mw2->flags)
2280 return mw1->flags - mw2->flags;
2281
2282 if (mw1->start_regno != mw2->start_regno)
2283 return mw1->start_regno - mw2->start_regno;
2284
2285 if (mw1->end_regno != mw2->end_regno)
2286 return mw1->end_regno - mw2->end_regno;
2287
2288 if (mw1->mw_reg != mw2->mw_reg)
2289 return mw1->mw_order - mw2->mw_order;
2290
2291 return 0;
2292 }
2293
2294
2295 /* Sort and compress a set of refs. */
2296
2297 static unsigned int
2298 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2299 {
2300 struct df_scan_problem_data *problem_data
2301 = (struct df_scan_problem_data *) df_scan->problem_data;
2302 unsigned int i;
2303 unsigned int dist = 0;
2304 mw_vec[count] = NULL;
2305
2306 if (count < 2)
2307 return count;
2308 else if (count == 2)
2309 {
2310 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2311 {
2312 struct df_mw_hardreg *tmp = mw_vec[0];
2313 mw_vec[0] = mw_vec[1];
2314 mw_vec[1] = tmp;
2315 }
2316 }
2317 else
2318 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2319
2320 for (i=0; i<count-dist; i++)
2321 {
2322 /* Find the next ref that is not equal to the current ref. */
2323 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2324 {
2325 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2326 dist++;
2327 }
2328 /* Copy it down to the next position. */
2329 if (dist)
2330 mw_vec[i+1] = mw_vec[i + dist + 1];
2331 }
2332
2333 count -= dist;
2334 mw_vec[count] = NULL;
2335 return count;
2336 }
2337
2338
2339 /* Sort and remove duplicates from the COLLECTION_REC. */
2340
2341 static void
2342 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2343 {
2344 if (collection_rec->def_vec)
2345 collection_rec->next_def
2346 = df_sort_and_compress_refs (collection_rec->def_vec,
2347 collection_rec->next_def);
2348 if (collection_rec->use_vec)
2349 collection_rec->next_use
2350 = df_sort_and_compress_refs (collection_rec->use_vec,
2351 collection_rec->next_use);
2352 if (collection_rec->eq_use_vec)
2353 collection_rec->next_eq_use
2354 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2355 collection_rec->next_eq_use);
2356 if (collection_rec->mw_vec)
2357 collection_rec->next_mw
2358 = df_sort_and_compress_mws (collection_rec->mw_vec,
2359 collection_rec->next_mw);
2360 }
2361
2362
2363 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2364
2365 static void
2366 df_install_ref (struct df_ref *this_ref,
2367 struct df_reg_info *reg_info,
2368 struct df_ref_info *ref_info,
2369 bool add_to_table)
2370 {
2371 unsigned int regno = DF_REF_REGNO (this_ref);
2372 /* Add the ref to the reg_{def,use,eq_use} chain. */
2373 struct df_ref *head = reg_info->reg_chain;
2374
2375 reg_info->reg_chain = this_ref;
2376 reg_info->n_refs++;
2377
2378 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2379 {
2380 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2381 df->hard_regs_live_count[regno]++;
2382 }
2383
2384 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2385 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2386
2387 DF_REF_NEXT_REG (this_ref) = head;
2388
2389 /* We cannot actually link to the head of the chain. */
2390 DF_REF_PREV_REG (this_ref) = NULL;
2391
2392 if (head)
2393 DF_REF_PREV_REG (head) = this_ref;
2394
2395 if (add_to_table)
2396 {
2397 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2398 df_check_and_grow_ref_info (ref_info, 1);
2399 DF_REF_ID (this_ref) = ref_info->table_size;
2400 /* Add the ref to the big array of defs. */
2401 ref_info->refs[ref_info->table_size] = this_ref;
2402 ref_info->table_size++;
2403 }
2404 else
2405 DF_REF_ID (this_ref) = -1;
2406
2407 ref_info->total_size++;
2408 }
2409
2410
2411 /* This function takes one of the groups of refs (defs, uses or
2412 eq_uses) and installs the entire group into the insn. It also adds
2413 each of these refs into the appropriate chains. */
2414
2415 static struct df_ref **
2416 df_install_refs (basic_block bb,
2417 struct df_ref **old_vec, unsigned int count,
2418 struct df_reg_info **reg_info,
2419 struct df_ref_info *ref_info,
2420 bool is_notes)
2421 {
2422 if (count)
2423 {
2424 unsigned int i;
2425 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2426 bool add_to_table;
2427
2428 switch (ref_info->ref_order)
2429 {
2430 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2431 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2432 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2433 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2434 add_to_table = true;
2435 break;
2436 case DF_REF_ORDER_UNORDERED:
2437 case DF_REF_ORDER_BY_REG:
2438 case DF_REF_ORDER_BY_INSN:
2439 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2440 add_to_table = !is_notes;
2441 break;
2442 default:
2443 add_to_table = false;
2444 break;
2445 }
2446
2447 /* Do not add if ref is not in the right blocks. */
2448 if (add_to_table && df->analyze_subset)
2449 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2450
2451 for (i = 0; i < count; i++)
2452 {
2453 struct df_ref *this_ref = old_vec[i];
2454 new_vec[i] = this_ref;
2455 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2456 ref_info, add_to_table);
2457 }
2458
2459 new_vec[count] = NULL;
2460 return new_vec;
2461 }
2462 else
2463 return df_null_ref_rec;
2464 }
2465
2466
2467 /* This function takes the mws installs the entire group into the
2468 insn. */
2469
2470 static struct df_mw_hardreg **
2471 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2472 {
2473 if (count)
2474 {
2475 struct df_mw_hardreg **new_vec
2476 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2477 memcpy (new_vec, old_vec,
2478 sizeof (struct df_mw_hardreg*) * (count + 1));
2479 return new_vec;
2480 }
2481 else
2482 return df_null_mw_rec;
2483 }
2484
2485
2486 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2487 chains and update other necessary information. */
2488
2489 static void
2490 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2491 basic_block bb, rtx insn)
2492 {
2493 if (insn)
2494 {
2495 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2496 /* If there is a vector in the collection rec, add it to the
2497 insn. A null rec is a signal that the caller will handle the
2498 chain specially. */
2499 if (collection_rec->def_vec)
2500 {
2501 if (insn_rec->defs && *insn_rec->defs)
2502 free (insn_rec->defs);
2503 insn_rec->defs
2504 = df_install_refs (bb, collection_rec->def_vec,
2505 collection_rec->next_def,
2506 df->def_regs,
2507 &df->def_info, false);
2508 }
2509 if (collection_rec->use_vec)
2510 {
2511 if (insn_rec->uses && *insn_rec->uses)
2512 free (insn_rec->uses);
2513 insn_rec->uses
2514 = df_install_refs (bb, collection_rec->use_vec,
2515 collection_rec->next_use,
2516 df->use_regs,
2517 &df->use_info, false);
2518 }
2519 if (collection_rec->eq_use_vec)
2520 {
2521 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2522 free (insn_rec->eq_uses);
2523 insn_rec->eq_uses
2524 = df_install_refs (bb, collection_rec->eq_use_vec,
2525 collection_rec->next_eq_use,
2526 df->eq_use_regs,
2527 &df->use_info, true);
2528 }
2529 if (collection_rec->mw_vec)
2530 {
2531 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2532 free (insn_rec->mw_hardregs);
2533 insn_rec->mw_hardregs
2534 = df_install_mws (collection_rec->mw_vec,
2535 collection_rec->next_mw);
2536 }
2537 }
2538 else
2539 {
2540 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2541
2542 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2543 free (bb_info->artificial_defs);
2544 bb_info->artificial_defs
2545 = df_install_refs (bb, collection_rec->def_vec,
2546 collection_rec->next_def,
2547 df->def_regs,
2548 &df->def_info, false);
2549 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2550 free (bb_info->artificial_uses);
2551 bb_info->artificial_uses
2552 = df_install_refs (bb, collection_rec->use_vec,
2553 collection_rec->next_use,
2554 df->use_regs,
2555 &df->use_info, false);
2556 }
2557 }
2558
2559
2560 /* Allocate a ref and initialize its fields. */
2561
2562 static struct df_ref *
2563 df_ref_create_structure (struct df_collection_rec *collection_rec,
2564 rtx reg, rtx *loc,
2565 basic_block bb, rtx insn,
2566 enum df_ref_type ref_type,
2567 enum df_ref_flags ref_flags)
2568 {
2569 struct df_ref *this_ref;
2570 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2571 struct df_scan_problem_data *problem_data
2572 = (struct df_scan_problem_data *) df_scan->problem_data;
2573
2574 this_ref = pool_alloc (problem_data->ref_pool);
2575 DF_REF_ID (this_ref) = -1;
2576 DF_REF_REG (this_ref) = reg;
2577 DF_REF_REGNO (this_ref) = regno;
2578 DF_REF_LOC (this_ref) = loc;
2579 DF_REF_INSN (this_ref) = insn;
2580 DF_REF_CHAIN (this_ref) = NULL;
2581 DF_REF_TYPE (this_ref) = ref_type;
2582 DF_REF_FLAGS (this_ref) = ref_flags;
2583 DF_REF_BB (this_ref) = bb;
2584 DF_REF_NEXT_REG (this_ref) = NULL;
2585 DF_REF_PREV_REG (this_ref) = NULL;
2586 DF_REF_ORDER (this_ref) = df->ref_order++;
2587
2588 /* We need to clear this bit because fwprop, and in the future
2589 possibly other optimizations sometimes create new refs using ond
2590 refs as the model. */
2591 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2592
2593 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2594 if ((regno < FIRST_PSEUDO_REGISTER)
2595 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2596 {
2597 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2598 {
2599 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2600 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2601 }
2602 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2603 && (regno == FRAME_POINTER_REGNUM
2604 || regno == ARG_POINTER_REGNUM)))
2605 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2606 }
2607
2608 if (collection_rec)
2609 {
2610 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2611 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2612 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2613 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2614 else
2615 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2616 }
2617
2618 return this_ref;
2619 }
2620
2621
2622 /* Create new references of type DF_REF_TYPE for each part of register REG
2623 at address LOC within INSN of BB. */
2624
2625 static void
2626 df_ref_record (struct df_collection_rec *collection_rec,
2627 rtx reg, rtx *loc,
2628 basic_block bb, rtx insn,
2629 enum df_ref_type ref_type,
2630 enum df_ref_flags ref_flags)
2631 {
2632 unsigned int regno;
2633
2634 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2635
2636 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2637 if (regno < FIRST_PSEUDO_REGISTER)
2638 {
2639 struct df_mw_hardreg *hardreg = NULL;
2640 struct df_scan_problem_data *problem_data
2641 = (struct df_scan_problem_data *) df_scan->problem_data;
2642 unsigned int i;
2643 unsigned int endregno;
2644 struct df_ref *ref;
2645
2646 if (GET_CODE (reg) == SUBREG)
2647 {
2648 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2649 SUBREG_BYTE (reg), GET_MODE (reg));
2650 endregno = regno + subreg_nregs (reg);
2651 }
2652 else
2653 endregno = END_HARD_REGNO (reg);
2654
2655 /* If this is a multiword hardreg, we create some extra
2656 datastructures that will enable us to easily build REG_DEAD
2657 and REG_UNUSED notes. */
2658 if ((endregno != regno + 1) && insn)
2659 {
2660 /* Sets to a subreg of a multiword register are partial.
2661 Sets to a non-subreg of a multiword register are not. */
2662 if (GET_CODE (reg) == SUBREG)
2663 ref_flags |= DF_REF_PARTIAL;
2664 ref_flags |= DF_REF_MW_HARDREG;
2665
2666 hardreg = pool_alloc (problem_data->mw_reg_pool);
2667 hardreg->type = ref_type;
2668 hardreg->flags = ref_flags;
2669 hardreg->mw_reg = reg;
2670 hardreg->start_regno = regno;
2671 hardreg->end_regno = endregno - 1;
2672 hardreg->mw_order = df->ref_order++;
2673 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2674 }
2675
2676 for (i = regno; i < endregno; i++)
2677 {
2678 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2679 bb, insn, ref_type, ref_flags);
2680
2681 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2682 }
2683 }
2684 else
2685 {
2686 struct df_ref *ref;
2687 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2688 ref_type, ref_flags);
2689 }
2690 }
2691
2692
2693 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2694 covered by the outer mode is smaller than that covered by the inner mode,
2695 is a read-modify-write operation.
2696 This function returns true iff the SUBREG X is such a SUBREG. */
2697
2698 bool
2699 df_read_modify_subreg_p (rtx x)
2700 {
2701 unsigned int isize, osize;
2702 if (GET_CODE (x) != SUBREG)
2703 return false;
2704 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2705 osize = GET_MODE_SIZE (GET_MODE (x));
2706 return isize > osize
2707 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2708 }
2709
2710
2711 /* Process all the registers defined in the rtx, X.
2712 Autoincrement/decrement definitions will be picked up by
2713 df_uses_record. */
2714
2715 static void
2716 df_def_record_1 (struct df_collection_rec *collection_rec,
2717 rtx x, basic_block bb, rtx insn,
2718 enum df_ref_flags flags)
2719 {
2720 rtx *loc;
2721 rtx dst;
2722
2723 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2724 construct. */
2725 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2726 loc = &XEXP (x, 0);
2727 else
2728 loc = &SET_DEST (x);
2729 dst = *loc;
2730
2731 /* It is legal to have a set destination be a parallel. */
2732 if (GET_CODE (dst) == PARALLEL)
2733 {
2734 int i;
2735
2736 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2737 {
2738 rtx temp = XVECEXP (dst, 0, i);
2739 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2740 || GET_CODE (temp) == SET)
2741 df_def_record_1 (collection_rec,
2742 temp, bb, insn,
2743 GET_CODE (temp) == CLOBBER
2744 ? flags | DF_REF_MUST_CLOBBER : flags);
2745 }
2746 return;
2747 }
2748
2749 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
2750 be handy for the reg allocator. */
2751 while (GET_CODE (dst) == STRICT_LOW_PART
2752 || GET_CODE (dst) == ZERO_EXTRACT)
2753 {
2754 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2755 if (GET_CODE (dst) == ZERO_EXTRACT)
2756 flags |= DF_REF_EXTRACT;
2757 else
2758 flags |= DF_REF_STRICT_LOWER_PART;
2759
2760 loc = &XEXP (dst, 0);
2761 dst = *loc;
2762 }
2763
2764 /* At this point if we do not have a reg or a subreg, just return. */
2765 if (REG_P (dst))
2766 {
2767 df_ref_record (collection_rec,
2768 dst, loc, bb, insn, DF_REF_REG_DEF, flags);
2769
2770 /* We want to keep sp alive everywhere - by making all
2771 writes to sp also use of sp. */
2772 if (REGNO (dst) == STACK_POINTER_REGNUM)
2773 df_ref_record (collection_rec,
2774 dst, NULL, bb, insn, DF_REF_REG_USE, flags);
2775 }
2776 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2777 {
2778 if (df_read_modify_subreg_p (dst))
2779 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2780
2781 flags |= DF_REF_SUBREG;
2782
2783 df_ref_record (collection_rec,
2784 dst, loc, bb, insn, DF_REF_REG_DEF, flags);
2785 }
2786 }
2787
2788
2789 /* Process all the registers defined in the pattern rtx, X. */
2790
2791 static void
2792 df_defs_record (struct df_collection_rec *collection_rec,
2793 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2794 {
2795 RTX_CODE code = GET_CODE (x);
2796
2797 if (code == SET || code == CLOBBER)
2798 {
2799 /* Mark the single def within the pattern. */
2800 enum df_ref_flags clobber_flags = flags;
2801 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2802 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2803 }
2804 else if (code == COND_EXEC)
2805 {
2806 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2807 bb, insn, DF_REF_CONDITIONAL);
2808 }
2809 else if (code == PARALLEL)
2810 {
2811 int i;
2812
2813 /* Mark the multiple defs within the pattern. */
2814 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2815 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2816 }
2817 }
2818
2819
2820 /* Process all the registers used in the rtx at address LOC. */
2821
2822 static void
2823 df_uses_record (struct df_collection_rec *collection_rec,
2824 rtx *loc, enum df_ref_type ref_type,
2825 basic_block bb, rtx insn, enum df_ref_flags flags)
2826 {
2827 RTX_CODE code;
2828 rtx x;
2829
2830 retry:
2831 x = *loc;
2832 if (!x)
2833 return;
2834 code = GET_CODE (x);
2835 switch (code)
2836 {
2837 case LABEL_REF:
2838 case SYMBOL_REF:
2839 case CONST_INT:
2840 case CONST:
2841 case CONST_DOUBLE:
2842 case CONST_FIXED:
2843 case CONST_VECTOR:
2844 case PC:
2845 case CC0:
2846 case ADDR_VEC:
2847 case ADDR_DIFF_VEC:
2848 return;
2849
2850 case CLOBBER:
2851 /* If we are clobbering a MEM, mark any registers inside the address
2852 as being used. */
2853 if (MEM_P (XEXP (x, 0)))
2854 df_uses_record (collection_rec,
2855 &XEXP (XEXP (x, 0), 0),
2856 DF_REF_REG_MEM_STORE, bb, insn, flags);
2857
2858 /* If we're clobbering a REG then we have a def so ignore. */
2859 return;
2860
2861 case MEM:
2862 df_uses_record (collection_rec,
2863 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2864 bb, insn, flags & DF_REF_IN_NOTE);
2865 return;
2866
2867 case SUBREG:
2868 /* While we're here, optimize this case. */
2869 flags |= DF_REF_PARTIAL;
2870 /* In case the SUBREG is not of a REG, do not optimize. */
2871 if (!REG_P (SUBREG_REG (x)))
2872 {
2873 loc = &SUBREG_REG (x);
2874 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags);
2875 return;
2876 }
2877 /* ... Fall through ... */
2878
2879 case REG:
2880 df_ref_record (collection_rec,
2881 x, loc, bb, insn, ref_type, flags);
2882 return;
2883
2884 case SET:
2885 {
2886 rtx dst = SET_DEST (x);
2887 gcc_assert (!(flags & DF_REF_IN_NOTE));
2888 df_uses_record (collection_rec,
2889 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
2890
2891 switch (GET_CODE (dst))
2892 {
2893 case SUBREG:
2894 if (df_read_modify_subreg_p (dst))
2895 {
2896 df_uses_record (collection_rec, &SUBREG_REG (dst),
2897 DF_REF_REG_USE, bb, insn,
2898 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2899 break;
2900 }
2901 /* Fall through. */
2902 case REG:
2903 case PARALLEL:
2904 case SCRATCH:
2905 case PC:
2906 case CC0:
2907 break;
2908 case MEM:
2909 df_uses_record (collection_rec, &XEXP (dst, 0),
2910 DF_REF_REG_MEM_STORE, bb, insn, flags);
2911 break;
2912 case STRICT_LOW_PART:
2913 {
2914 rtx *temp = &XEXP (dst, 0);
2915 /* A strict_low_part uses the whole REG and not just the
2916 SUBREG. */
2917 dst = XEXP (dst, 0);
2918 df_uses_record (collection_rec,
2919 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2920 DF_REF_REG_USE, bb, insn,
2921 DF_REF_READ_WRITE | DF_REF_STRICT_LOWER_PART);
2922 }
2923 break;
2924 case ZERO_EXTRACT:
2925 case SIGN_EXTRACT:
2926 df_uses_record (collection_rec, &XEXP (dst, 0),
2927 DF_REF_REG_USE, bb, insn,
2928 DF_REF_READ_WRITE | DF_REF_EXTRACT);
2929 df_uses_record (collection_rec, &XEXP (dst, 1),
2930 DF_REF_REG_USE, bb, insn, flags);
2931 df_uses_record (collection_rec, &XEXP (dst, 2),
2932 DF_REF_REG_USE, bb, insn, flags);
2933 dst = XEXP (dst, 0);
2934 break;
2935 default:
2936 gcc_unreachable ();
2937 }
2938 return;
2939 }
2940
2941 case RETURN:
2942 break;
2943
2944 case ASM_OPERANDS:
2945 case UNSPEC_VOLATILE:
2946 case TRAP_IF:
2947 case ASM_INPUT:
2948 {
2949 /* Traditional and volatile asm instructions must be
2950 considered to use and clobber all hard registers, all
2951 pseudo-registers and all of memory. So must TRAP_IF and
2952 UNSPEC_VOLATILE operations.
2953
2954 Consider for instance a volatile asm that changes the fpu
2955 rounding mode. An insn should not be moved across this
2956 even if it only uses pseudo-regs because it might give an
2957 incorrectly rounded result.
2958
2959 However, flow.c's liveness computation did *not* do this,
2960 giving the reasoning as " ?!? Unfortunately, marking all
2961 hard registers as live causes massive problems for the
2962 register allocator and marking all pseudos as live creates
2963 mountains of uninitialized variable warnings."
2964
2965 In order to maintain the status quo with regard to liveness
2966 and uses, we do what flow.c did and just mark any regs we
2967 can find in ASM_OPERANDS as used. In global asm insns are
2968 scanned and regs_asm_clobbered is filled out.
2969
2970 For all ASM_OPERANDS, we must traverse the vector of input
2971 operands. We can not just fall through here since then we
2972 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2973 which do not indicate traditional asms unlike their normal
2974 usage. */
2975 if (code == ASM_OPERANDS)
2976 {
2977 int j;
2978
2979 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
2980 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
2981 DF_REF_REG_USE, bb, insn, flags);
2982 return;
2983 }
2984 break;
2985 }
2986
2987 case PRE_DEC:
2988 case POST_DEC:
2989 case PRE_INC:
2990 case POST_INC:
2991 case PRE_MODIFY:
2992 case POST_MODIFY:
2993 /* Catch the def of the register being modified. */
2994 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
2995 DF_REF_REG_DEF,
2996 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
2997
2998 /* ... Fall through to handle uses ... */
2999
3000 default:
3001 break;
3002 }
3003
3004 /* Recursively scan the operands of this expression. */
3005 {
3006 const char *fmt = GET_RTX_FORMAT (code);
3007 int i;
3008
3009 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3010 {
3011 if (fmt[i] == 'e')
3012 {
3013 /* Tail recursive case: save a function call level. */
3014 if (i == 0)
3015 {
3016 loc = &XEXP (x, 0);
3017 goto retry;
3018 }
3019 df_uses_record (collection_rec, &XEXP (x, i), ref_type, bb, insn, flags);
3020 }
3021 else if (fmt[i] == 'E')
3022 {
3023 int j;
3024 for (j = 0; j < XVECLEN (x, i); j++)
3025 df_uses_record (collection_rec,
3026 &XVECEXP (x, i, j), ref_type, bb, insn, flags);
3027 }
3028 }
3029 }
3030
3031 return;
3032 }
3033
3034
3035 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3036
3037 static void
3038 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3039 {
3040 unsigned int i;
3041 for (i = 0; i < collection_rec->next_def; i++)
3042 {
3043 struct df_ref *ref = collection_rec->def_vec[i];
3044 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3045 {
3046 struct df_ref *use
3047 = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3048 DF_REF_LOC (ref), DF_REF_BB (ref),
3049 DF_REF_INSN (ref), DF_REF_REG_USE,
3050 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3051 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3052 }
3053 }
3054 }
3055
3056
3057 /* Get call's extra defs and uses. */
3058
3059 static void
3060 df_get_call_refs (struct df_collection_rec * collection_rec,
3061 basic_block bb,
3062 rtx insn,
3063 enum df_ref_flags flags)
3064 {
3065 rtx note;
3066 bitmap_iterator bi;
3067 unsigned int ui;
3068 bool is_sibling_call;
3069 unsigned int i;
3070 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3071
3072 /* Do not generate clobbers for registers that are the result of the
3073 call. This causes ordering problems in the chain building code
3074 depending on which def is seen first. */
3075 for (i=0; i<collection_rec->next_def; i++)
3076 {
3077 struct df_ref *def = collection_rec->def_vec[i];
3078 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3079 }
3080
3081 /* Record the registers used to pass arguments, and explicitly
3082 noted as clobbered. */
3083 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3084 note = XEXP (note, 1))
3085 {
3086 if (GET_CODE (XEXP (note, 0)) == USE)
3087 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3088 DF_REF_REG_USE, bb, insn, flags);
3089 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3090 {
3091 if (REG_P (XEXP (XEXP (note, 0), 0)))
3092 {
3093 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3094 if (!bitmap_bit_p (defs_generated, regno))
3095 df_defs_record (collection_rec, XEXP (note, 0), bb,
3096 insn, flags);
3097 }
3098 else
3099 df_uses_record (collection_rec, &XEXP (note, 0),
3100 DF_REF_REG_USE, bb, insn, flags);
3101 }
3102 }
3103
3104 /* The stack ptr is used (honorarily) by a CALL insn. */
3105 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3106 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags);
3107
3108 /* Calls may also reference any of the global registers,
3109 so they are recorded as used. */
3110 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3111 if (global_regs[i])
3112 {
3113 df_ref_record (collection_rec, regno_reg_rtx[i],
3114 NULL, bb, insn, DF_REF_REG_USE, flags);
3115 df_ref_record (collection_rec, regno_reg_rtx[i],
3116 NULL, bb, insn, DF_REF_REG_DEF, flags);
3117 }
3118
3119 is_sibling_call = SIBLING_CALL_P (insn);
3120 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3121 {
3122 if (!global_regs[ui]
3123 && (!bitmap_bit_p (defs_generated, ui))
3124 && (!is_sibling_call
3125 || !bitmap_bit_p (df->exit_block_uses, ui)
3126 || refers_to_regno_p (ui, ui+1,
3127 current_function_return_rtx, NULL)))
3128 df_ref_record (collection_rec, regno_reg_rtx[ui],
3129 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
3130 }
3131
3132 BITMAP_FREE (defs_generated);
3133 return;
3134 }
3135
3136 /* Collect all refs in the INSN. This function is free of any
3137 side-effect - it will create and return a lists of df_ref's in the
3138 COLLECTION_REC without putting those refs into existing ref chains
3139 and reg chains. */
3140
3141 static void
3142 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3143 basic_block bb, rtx insn)
3144 {
3145 rtx note;
3146 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3147
3148 /* Clear out the collection record. */
3149 collection_rec->next_def = 0;
3150 collection_rec->next_use = 0;
3151 collection_rec->next_eq_use = 0;
3152 collection_rec->next_mw = 0;
3153
3154 /* Record register defs. */
3155 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3156
3157 /* Process REG_EQUIV/REG_EQUAL notes */
3158 for (note = REG_NOTES (insn); note;
3159 note = XEXP (note, 1))
3160 {
3161 switch (REG_NOTE_KIND (note))
3162 {
3163 case REG_EQUIV:
3164 case REG_EQUAL:
3165 df_uses_record (collection_rec,
3166 &XEXP (note, 0), DF_REF_REG_USE,
3167 bb, insn, DF_REF_IN_NOTE);
3168 break;
3169 case REG_NON_LOCAL_GOTO:
3170 /* The frame ptr is used by a non-local goto. */
3171 df_ref_record (collection_rec,
3172 regno_reg_rtx[FRAME_POINTER_REGNUM],
3173 NULL,
3174 bb, insn,
3175 DF_REF_REG_USE, 0);
3176 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3177 df_ref_record (collection_rec,
3178 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3179 NULL,
3180 bb, insn,
3181 DF_REF_REG_USE, 0);
3182 #endif
3183 break;
3184 default:
3185 break;
3186 }
3187 }
3188
3189 if (CALL_P (insn))
3190 df_get_call_refs (collection_rec, bb, insn,
3191 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3192
3193 /* Record the register uses. */
3194 df_uses_record (collection_rec,
3195 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
3196
3197 /* DF_REF_CONDITIONAL needs corresponding USES. */
3198 if (is_cond_exec)
3199 df_get_conditional_uses (collection_rec);
3200
3201 df_canonize_collection_rec (collection_rec);
3202 }
3203
3204 /* Recompute the luids for the insns in BB. */
3205
3206 void
3207 df_recompute_luids (basic_block bb)
3208 {
3209 rtx insn;
3210 int luid = 0;
3211
3212 df_grow_insn_info ();
3213
3214 /* Scan the block an insn at a time from beginning to end. */
3215 FOR_BB_INSNS (bb, insn)
3216 {
3217 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3218 /* Inserting labels does not always trigger the incremental
3219 rescanning. */
3220 if (!insn_info)
3221 {
3222 gcc_assert (!INSN_P (insn));
3223 df_insn_create_insn_record (insn);
3224 }
3225
3226 DF_INSN_LUID (insn) = luid;
3227 if (INSN_P (insn))
3228 luid++;
3229 }
3230 }
3231
3232
3233 /* Returns true if the function entry needs to
3234 define the static chain register. */
3235
3236 static bool
3237 df_need_static_chain_reg (struct function *fun)
3238 {
3239 tree fun_context = decl_function_context (fun->decl);
3240 return fun_context
3241 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3242 }
3243
3244
3245 /* Collect all artificial refs at the block level for BB and add them
3246 to COLLECTION_REC. */
3247
3248 static void
3249 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3250 {
3251 collection_rec->next_def = 0;
3252 collection_rec->next_use = 0;
3253 collection_rec->next_eq_use = 0;
3254 collection_rec->next_mw = 0;
3255
3256 if (bb->index == ENTRY_BLOCK)
3257 {
3258 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3259 return;
3260 }
3261 else if (bb->index == EXIT_BLOCK)
3262 {
3263 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3264 return;
3265 }
3266
3267 #ifdef EH_RETURN_DATA_REGNO
3268 if (bb_has_eh_pred (bb))
3269 {
3270 unsigned int i;
3271 /* Mark the registers that will contain data for the handler. */
3272 for (i = 0; ; ++i)
3273 {
3274 unsigned regno = EH_RETURN_DATA_REGNO (i);
3275 if (regno == INVALID_REGNUM)
3276 break;
3277 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3278 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3279 }
3280 }
3281 #endif
3282
3283
3284 #ifdef EH_USES
3285 if (bb_has_eh_pred (bb))
3286 {
3287 unsigned int i;
3288 /* This code is putting in an artificial ref for the use at the
3289 TOP of the block that receives the exception. It is too
3290 cumbersome to actually put the ref on the edge. We could
3291 either model this at the top of the receiver block or the
3292 bottom of the sender block.
3293
3294 The bottom of the sender block is problematic because not all
3295 out-edges of the a block are eh-edges. However, it is true
3296 that all edges into a block are either eh-edges or none of
3297 them are eh-edges. Thus, we can model this at the top of the
3298 eh-receiver for all of the edges at once. */
3299 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3300 if (EH_USES (i))
3301 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3302 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP);
3303 }
3304 #endif
3305
3306 /* Add the hard_frame_pointer if this block is the target of a
3307 non-local goto. */
3308 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3309 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3310 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3311
3312 /* Add the artificial uses. */
3313 if (bb->index >= NUM_FIXED_BLOCKS)
3314 {
3315 bitmap_iterator bi;
3316 unsigned int regno;
3317 bitmap au = bb_has_eh_pred (bb)
3318 ? df->eh_block_artificial_uses
3319 : df->regular_block_artificial_uses;
3320
3321 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3322 {
3323 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3324 bb, NULL, DF_REF_REG_USE, 0);
3325 }
3326 }
3327
3328 df_canonize_collection_rec (collection_rec);
3329 }
3330
3331
3332 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3333
3334 void
3335 df_bb_refs_record (int bb_index, bool scan_insns)
3336 {
3337 basic_block bb = BASIC_BLOCK (bb_index);
3338 rtx insn;
3339 int luid = 0;
3340 struct df_scan_bb_info *bb_info;
3341 struct df_collection_rec collection_rec;
3342 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3343 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3344 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3345 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3346
3347 if (!df)
3348 return;
3349
3350 bb_info = df_scan_get_bb_info (bb_index);
3351
3352 /* Need to make sure that there is a record in the basic block info. */
3353 if (!bb_info)
3354 {
3355 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3356 df_scan_set_bb_info (bb_index, bb_info);
3357 bb_info->artificial_defs = NULL;
3358 bb_info->artificial_uses = NULL;
3359 }
3360
3361 if (scan_insns)
3362 /* Scan the block an insn at a time from beginning to end. */
3363 FOR_BB_INSNS (bb, insn)
3364 {
3365 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3366 gcc_assert (!insn_info);
3367
3368 df_insn_create_insn_record (insn);
3369 if (INSN_P (insn))
3370 {
3371 /* Record refs within INSN. */
3372 DF_INSN_LUID (insn) = luid++;
3373 df_insn_refs_collect (&collection_rec, bb, insn);
3374 df_refs_add_to_chains (&collection_rec, bb, insn);
3375 }
3376 DF_INSN_LUID (insn) = luid;
3377 }
3378
3379 /* Other block level artificial refs */
3380 df_bb_refs_collect (&collection_rec, bb);
3381 df_refs_add_to_chains (&collection_rec, bb, NULL);
3382
3383 /* Now that the block has been processed, set the block as dirty so
3384 LR and LIVE will get it processed. */
3385 df_set_bb_dirty (bb);
3386 }
3387
3388
3389 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3390 block. */
3391
3392 static void
3393 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3394 {
3395 bitmap_clear (regular_block_artificial_uses);
3396
3397 if (reload_completed)
3398 {
3399 if (frame_pointer_needed)
3400 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3401 }
3402 else
3403 /* Before reload, there are a few registers that must be forced
3404 live everywhere -- which might not already be the case for
3405 blocks within infinite loops. */
3406 {
3407 /* Any reference to any pseudo before reload is a potential
3408 reference of the frame pointer. */
3409 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3410
3411 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3412 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3413 #endif
3414
3415 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3416 /* Pseudos with argument area equivalences may require
3417 reloading via the argument pointer. */
3418 if (fixed_regs[ARG_POINTER_REGNUM])
3419 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3420 #endif
3421
3422 /* Any constant, or pseudo with constant equivalences, may
3423 require reloading from memory using the pic register. */
3424 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3425 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3426 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3427 }
3428 /* The all-important stack pointer must always be live. */
3429 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3430 }
3431
3432
3433 /* Get the artificial use set for an eh block. */
3434
3435 static void
3436 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3437 {
3438 bitmap_clear (eh_block_artificial_uses);
3439
3440 /* The following code (down thru the arg_pointer setting APPEARS
3441 to be necessary because there is nothing that actually
3442 describes what the exception handling code may actually need
3443 to keep alive. */
3444 if (reload_completed)
3445 {
3446 if (frame_pointer_needed)
3447 {
3448 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3449 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3450 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3451 #endif
3452 }
3453 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3454 if (fixed_regs[ARG_POINTER_REGNUM])
3455 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3456 #endif
3457 }
3458 }
3459
3460
3461 \f
3462 /*----------------------------------------------------------------------------
3463 Specialized hard register scanning functions.
3464 ----------------------------------------------------------------------------*/
3465
3466
3467 /* Mark a register in SET. Hard registers in large modes get all
3468 of their component registers set as well. */
3469
3470 static void
3471 df_mark_reg (rtx reg, void *vset)
3472 {
3473 bitmap set = (bitmap) vset;
3474 int regno = REGNO (reg);
3475
3476 gcc_assert (GET_MODE (reg) != BLKmode);
3477
3478 bitmap_set_bit (set, regno);
3479 if (regno < FIRST_PSEUDO_REGISTER)
3480 {
3481 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3482 while (--n > 0)
3483 bitmap_set_bit (set, regno + n);
3484 }
3485 }
3486
3487
3488 /* Set the bit for regs that are considered being defined at the entry. */
3489
3490 static void
3491 df_get_entry_block_def_set (bitmap entry_block_defs)
3492 {
3493 rtx r;
3494 int i;
3495
3496 bitmap_clear (entry_block_defs);
3497
3498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3499 {
3500 if (FUNCTION_ARG_REGNO_P (i))
3501 #ifdef INCOMING_REGNO
3502 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3503 #else
3504 bitmap_set_bit (entry_block_defs, i);
3505 #endif
3506 }
3507
3508 /* Once the prologue has been generated, all of these registers
3509 should just show up in the first regular block. */
3510 if (HAVE_prologue && epilogue_completed)
3511 {
3512 /* Defs for the callee saved registers are inserted so that the
3513 pushes have some defining location. */
3514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3516 bitmap_set_bit (entry_block_defs, i);
3517 }
3518 else
3519 {
3520 /* The always important stack pointer. */
3521 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3522
3523 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3524 only STATIC_CHAIN_REGNUM is defined. If they are different,
3525 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3526 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3527 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3528 #else
3529 #ifdef STATIC_CHAIN_REGNUM
3530 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3531 #endif
3532 #endif
3533 }
3534
3535 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3536 if (r && REG_P (r))
3537 bitmap_set_bit (entry_block_defs, REGNO (r));
3538
3539 if ((!reload_completed) || frame_pointer_needed)
3540 {
3541 /* Any reference to any pseudo before reload is a potential
3542 reference of the frame pointer. */
3543 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3544 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3545 /* If they are different, also mark the hard frame pointer as live. */
3546 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3547 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3548 #endif
3549 }
3550
3551 /* These registers are live everywhere. */
3552 if (!reload_completed)
3553 {
3554 #ifdef EH_USES
3555 /* The ia-64, the only machine that uses this, does not define these
3556 until after reload. */
3557 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3558 if (EH_USES (i))
3559 {
3560 bitmap_set_bit (entry_block_defs, i);
3561 }
3562 #endif
3563
3564 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3565 /* Pseudos with argument area equivalences may require
3566 reloading via the argument pointer. */
3567 if (fixed_regs[ARG_POINTER_REGNUM])
3568 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3569 #endif
3570
3571 #ifdef PIC_OFFSET_TABLE_REGNUM
3572 /* Any constant, or pseudo with constant equivalences, may
3573 require reloading from memory using the pic register. */
3574 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3575 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3576 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3577 #endif
3578 }
3579
3580 #ifdef INCOMING_RETURN_ADDR_RTX
3581 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3582 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3583 #endif
3584
3585 targetm.live_on_entry (entry_block_defs);
3586
3587 /* If the function has an incoming STATIC_CHAIN,
3588 it has to show up in the entry def set. */
3589 if (df_need_static_chain_reg (cfun))
3590 {
3591 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3592 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3593 #else
3594 #ifdef STATIC_CHAIN_REGNUM
3595 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3596 #endif
3597 #endif
3598 }
3599 }
3600
3601
3602 /* Return the (conservative) set of hard registers that are defined on
3603 entry to the function.
3604 It uses df->entry_block_defs to determine which register
3605 reference to include. */
3606
3607 static void
3608 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3609 bitmap entry_block_defs)
3610 {
3611 unsigned int i;
3612 bitmap_iterator bi;
3613
3614 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3615 {
3616 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3617 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3618 }
3619
3620 df_canonize_collection_rec (collection_rec);
3621 }
3622
3623
3624 /* Record the (conservative) set of hard registers that are defined on
3625 entry to the function. */
3626
3627 static void
3628 df_record_entry_block_defs (bitmap entry_block_defs)
3629 {
3630 struct df_collection_rec collection_rec;
3631 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3632 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3633
3634 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3635
3636 /* Process bb_refs chain */
3637 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3638 }
3639
3640
3641 /* Update the defs in the entry block. */
3642
3643 void
3644 df_update_entry_block_defs (void)
3645 {
3646 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3647 bool changed = false;
3648
3649 df_get_entry_block_def_set (refs);
3650 if (df->entry_block_defs)
3651 {
3652 if (!bitmap_equal_p (df->entry_block_defs, refs))
3653 {
3654 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3655 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3656 df_ref_chain_delete (bb_info->artificial_defs);
3657 bb_info->artificial_defs = NULL;
3658 changed = true;
3659 }
3660 }
3661 else
3662 {
3663 struct df_scan_problem_data *problem_data
3664 = (struct df_scan_problem_data *) df_scan->problem_data;
3665 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3666 changed = true;
3667 }
3668
3669 if (changed)
3670 {
3671 df_record_entry_block_defs (refs);
3672 bitmap_copy (df->entry_block_defs, refs);
3673 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3674 }
3675 BITMAP_FREE (refs);
3676 }
3677
3678
3679 /* Set the bit for regs that are considered being used at the exit. */
3680
3681 static void
3682 df_get_exit_block_use_set (bitmap exit_block_uses)
3683 {
3684 unsigned int i;
3685
3686 bitmap_clear (exit_block_uses);
3687
3688 /* Stack pointer is always live at the exit. */
3689 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3690
3691 /* Mark the frame pointer if needed at the end of the function.
3692 If we end up eliminating it, it will be removed from the live
3693 list of each basic block by reload. */
3694
3695 if ((!reload_completed) || frame_pointer_needed)
3696 {
3697 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3698 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3699 /* If they are different, also mark the hard frame pointer as live. */
3700 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3701 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3702 #endif
3703 }
3704
3705 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3706 /* Many architectures have a GP register even without flag_pic.
3707 Assume the pic register is not in use, or will be handled by
3708 other means, if it is not fixed. */
3709 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3710 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3711 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3712 #endif
3713
3714 /* Mark all global registers, and all registers used by the
3715 epilogue as being live at the end of the function since they
3716 may be referenced by our caller. */
3717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3718 if (global_regs[i] || EPILOGUE_USES (i))
3719 bitmap_set_bit (exit_block_uses, i);
3720
3721 if (HAVE_epilogue && epilogue_completed)
3722 {
3723 /* Mark all call-saved registers that we actually used. */
3724 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3725 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3726 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3727 bitmap_set_bit (exit_block_uses, i);
3728 }
3729
3730 #ifdef EH_RETURN_DATA_REGNO
3731 /* Mark the registers that will contain data for the handler. */
3732 if (reload_completed && current_function_calls_eh_return)
3733 for (i = 0; ; ++i)
3734 {
3735 unsigned regno = EH_RETURN_DATA_REGNO (i);
3736 if (regno == INVALID_REGNUM)
3737 break;
3738 bitmap_set_bit (exit_block_uses, regno);
3739 }
3740 #endif
3741
3742 #ifdef EH_RETURN_STACKADJ_RTX
3743 if ((!HAVE_epilogue || ! epilogue_completed)
3744 && current_function_calls_eh_return)
3745 {
3746 rtx tmp = EH_RETURN_STACKADJ_RTX;
3747 if (tmp && REG_P (tmp))
3748 df_mark_reg (tmp, exit_block_uses);
3749 }
3750 #endif
3751
3752 #ifdef EH_RETURN_HANDLER_RTX
3753 if ((!HAVE_epilogue || ! epilogue_completed)
3754 && current_function_calls_eh_return)
3755 {
3756 rtx tmp = EH_RETURN_HANDLER_RTX;
3757 if (tmp && REG_P (tmp))
3758 df_mark_reg (tmp, exit_block_uses);
3759 }
3760 #endif
3761
3762 /* Mark function return value. */
3763 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3764 }
3765
3766
3767 /* Return the refs of hard registers that are used in the exit block.
3768 It uses df->exit_block_uses to determine register to include. */
3769
3770 static void
3771 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3772 {
3773 unsigned int i;
3774 bitmap_iterator bi;
3775
3776 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3777 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3778 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3779
3780 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3781 /* It is deliberate that this is not put in the exit block uses but
3782 I do not know why. */
3783 if (reload_completed
3784 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3785 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3786 && fixed_regs[ARG_POINTER_REGNUM])
3787 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3788 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3789 #endif
3790
3791 df_canonize_collection_rec (collection_rec);
3792 }
3793
3794
3795 /* Record the set of hard registers that are used in the exit block.
3796 It uses df->exit_block_uses to determine which bit to include. */
3797
3798 static void
3799 df_record_exit_block_uses (bitmap exit_block_uses)
3800 {
3801 struct df_collection_rec collection_rec;
3802 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3803 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3804
3805 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3806
3807 /* Process bb_refs chain */
3808 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3809 }
3810
3811
3812 /* Update the uses in the exit block. */
3813
3814 void
3815 df_update_exit_block_uses (void)
3816 {
3817 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3818 bool changed = false;
3819
3820 df_get_exit_block_use_set (refs);
3821 if (df->exit_block_uses)
3822 {
3823 if (!bitmap_equal_p (df->exit_block_uses, refs))
3824 {
3825 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3826 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3827 df_ref_chain_delete (bb_info->artificial_uses);
3828 bb_info->artificial_uses = NULL;
3829 changed = true;
3830 }
3831 }
3832 else
3833 {
3834 struct df_scan_problem_data *problem_data
3835 = (struct df_scan_problem_data *) df_scan->problem_data;
3836 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3837 changed = true;
3838 }
3839
3840 if (changed)
3841 {
3842 df_record_exit_block_uses (refs);
3843 bitmap_copy (df->exit_block_uses, refs);
3844 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3845 }
3846 BITMAP_FREE (refs);
3847 }
3848
3849 static bool initialized = false;
3850
3851
3852 /* Initialize some platform specific structures. */
3853
3854 void
3855 df_hard_reg_init (void)
3856 {
3857 int i;
3858 #ifdef ELIMINABLE_REGS
3859 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3860 #endif
3861 if (initialized)
3862 return;
3863
3864 bitmap_obstack_initialize (&persistent_obstack);
3865
3866 /* Record which registers will be eliminated. We use this in
3867 mark_used_regs. */
3868 CLEAR_HARD_REG_SET (elim_reg_set);
3869
3870 #ifdef ELIMINABLE_REGS
3871 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3872 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3873 #else
3874 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3875 #endif
3876
3877 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
3878
3879 /* Inconveniently, this is only readily available in hard reg set
3880 form. */
3881 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3882 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3883 bitmap_set_bit (df_invalidated_by_call, i);
3884
3885 initialized = true;
3886 }
3887
3888
3889 /* Recompute the parts of scanning that are based on regs_ever_live
3890 because something changed in that array. */
3891
3892 void
3893 df_update_entry_exit_and_calls (void)
3894 {
3895 basic_block bb;
3896
3897 df_update_entry_block_defs ();
3898 df_update_exit_block_uses ();
3899
3900 /* The call insns need to be rescanned because there may be changes
3901 in the set of registers clobbered across the call. */
3902 FOR_EACH_BB (bb)
3903 {
3904 rtx insn;
3905 FOR_BB_INSNS (bb, insn)
3906 {
3907 if (INSN_P (insn) && CALL_P (insn))
3908 df_insn_rescan (insn);
3909 }
3910 }
3911 }
3912
3913
3914 /* Return true if hard REG is actually used in the some instruction.
3915 There are a fair number of conditions that affect the setting of
3916 this array. See the comment in df.h for df->hard_regs_live_count
3917 for the conditions that this array is set. */
3918
3919 bool
3920 df_hard_reg_used_p (unsigned int reg)
3921 {
3922 gcc_assert (df);
3923 return df->hard_regs_live_count[reg] != 0;
3924 }
3925
3926
3927 /* A count of the number of times REG is actually used in the some
3928 instruction. There are a fair number of conditions that affect the
3929 setting of this array. See the comment in df.h for
3930 df->hard_regs_live_count for the conditions that this array is
3931 set. */
3932
3933
3934 unsigned int
3935 df_hard_reg_used_count (unsigned int reg)
3936 {
3937 gcc_assert (df);
3938 return df->hard_regs_live_count[reg];
3939 }
3940
3941
3942 /* Get the value of regs_ever_live[REGNO]. */
3943
3944 bool
3945 df_regs_ever_live_p (unsigned int regno)
3946 {
3947 return regs_ever_live[regno];
3948 }
3949
3950
3951 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3952 to change, schedule that change for the next update. */
3953
3954 void
3955 df_set_regs_ever_live (unsigned int regno, bool value)
3956 {
3957 if (regs_ever_live[regno] == value)
3958 return;
3959
3960 regs_ever_live[regno] = value;
3961 if (df)
3962 df->redo_entry_and_exit = true;
3963 }
3964
3965
3966 /* Compute "regs_ever_live" information from the underlying df
3967 information. Set the vector to all false if RESET. */
3968
3969 void
3970 df_compute_regs_ever_live (bool reset)
3971 {
3972 unsigned int i;
3973 bool changed = df->redo_entry_and_exit;
3974
3975 if (reset)
3976 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3977
3978 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3979 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3980 {
3981 regs_ever_live[i] = true;
3982 changed = true;
3983 }
3984 if (changed)
3985 df_update_entry_exit_and_calls ();
3986 df->redo_entry_and_exit = false;
3987 }
3988
3989 \f
3990 /*----------------------------------------------------------------------------
3991 Dataflow ref information verification functions.
3992
3993 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3994 df_reg_chain_verify_unmarked (refs)
3995 df_refs_verify (ref*, ref*, bool)
3996 df_mws_verify (mw*, mw*, bool)
3997 df_insn_refs_verify (collection_rec, bb, insn, bool)
3998 df_bb_refs_verify (bb, refs, bool)
3999 df_bb_verify (bb)
4000 df_exit_block_bitmap_verify (bool)
4001 df_entry_block_bitmap_verify (bool)
4002 df_scan_verify ()
4003 ----------------------------------------------------------------------------*/
4004
4005
4006 /* Mark all refs in the reg chain. Verify that all of the registers
4007 are in the correct chain. */
4008
4009 static unsigned int
4010 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4011 bool is_def, bool is_eq_use)
4012 {
4013 unsigned int count = 0;
4014 struct df_ref *ref;
4015 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4016 {
4017 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4018
4019 /* If there are no def-use or use-def chains, make sure that all
4020 of the chains are clear. */
4021 if (!df_chain)
4022 gcc_assert (!DF_REF_CHAIN (ref));
4023
4024 /* Check to make sure the ref is in the correct chain. */
4025 gcc_assert (DF_REF_REGNO (ref) == regno);
4026 if (is_def)
4027 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4028 else
4029 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4030
4031 if (is_eq_use)
4032 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4033 else
4034 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4035
4036 if (ref->next_reg)
4037 gcc_assert (ref->next_reg->prev_reg == ref);
4038 count++;
4039 DF_REF_REG_MARK (ref);
4040 }
4041 return count;
4042 }
4043
4044
4045 /* Verify that all of the registers in the chain are unmarked. */
4046
4047 static void
4048 df_reg_chain_verify_unmarked (struct df_ref *refs)
4049 {
4050 struct df_ref *ref;
4051 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4052 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4053 }
4054
4055
4056 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4057
4058 static bool
4059 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4060 bool abort_if_fail)
4061 {
4062 while ((*new_rec) && (*old_rec))
4063 {
4064 if (!df_ref_equal_p (*new_rec, *old_rec))
4065 {
4066 if (abort_if_fail)
4067 gcc_assert (0);
4068 else
4069 return false;
4070 }
4071
4072 /* Abort if fail is called from the function level verifier. If
4073 that is the context, mark this reg as being seem. */
4074 if (abort_if_fail)
4075 {
4076 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4077 DF_REF_REG_UNMARK (*old_rec);
4078 }
4079
4080 new_rec++;
4081 old_rec++;
4082 }
4083
4084 if (abort_if_fail)
4085 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4086 else
4087 return ((*new_rec == NULL) && (*old_rec == NULL));
4088 return false;
4089 }
4090
4091
4092 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4093
4094 static bool
4095 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4096 bool abort_if_fail)
4097 {
4098 while ((*new_rec) && (*old_rec))
4099 {
4100 if (!df_mw_equal_p (*new_rec, *old_rec))
4101 {
4102 if (abort_if_fail)
4103 gcc_assert (0);
4104 else
4105 return false;
4106 }
4107 new_rec++;
4108 old_rec++;
4109 }
4110
4111 if (abort_if_fail)
4112 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4113 else
4114 return ((*new_rec == NULL) && (*old_rec == NULL));
4115 return false;
4116 }
4117
4118
4119 /* Return true if the existing insn refs information is complete and
4120 correct. Otherwise (i.e. if there's any missing or extra refs),
4121 return the correct df_ref chain in REFS_RETURN.
4122
4123 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4124 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4125 verification mode instead of the whole function, so unmark
4126 everything.
4127
4128 If ABORT_IF_FAIL is set, this function never returns false. */
4129
4130 static bool
4131 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4132 basic_block bb,
4133 rtx insn,
4134 bool abort_if_fail)
4135 {
4136 bool ret1, ret2, ret3, ret4;
4137 unsigned int uid = INSN_UID (insn);
4138
4139 df_insn_refs_collect (collection_rec, bb, insn);
4140
4141 if (!DF_INSN_UID_DEFS (uid))
4142 {
4143 /* The insn_rec was created but it was never filled out. */
4144 if (abort_if_fail)
4145 gcc_assert (0);
4146 else
4147 return false;
4148 }
4149
4150 /* Unfortunately we cannot opt out early if one of these is not
4151 right because the marks will not get cleared. */
4152 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4153 abort_if_fail);
4154 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4155 abort_if_fail);
4156 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4157 abort_if_fail);
4158 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4159 abort_if_fail);
4160 return (ret1 && ret2 && ret3 && ret4);
4161 }
4162
4163
4164 /* Return true if all refs in the basic block are correct and complete.
4165 Due to df_ref_chain_verify, it will cause all refs
4166 that are verified to have DF_REF_MARK bit set. */
4167
4168 static bool
4169 df_bb_verify (basic_block bb)
4170 {
4171 rtx insn;
4172 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4173 struct df_collection_rec collection_rec;
4174
4175 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4176 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4177 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4178 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4179 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4180
4181 gcc_assert (bb_info);
4182
4183 /* Scan the block an insn at a time from beginning to end. */
4184 FOR_BB_INSNS_REVERSE (bb, insn)
4185 {
4186 if (!INSN_P (insn))
4187 continue;
4188 df_insn_refs_verify (&collection_rec, bb, insn, true);
4189 df_free_collection_rec (&collection_rec);
4190 }
4191
4192 /* Do the artificial defs and uses. */
4193 df_bb_refs_collect (&collection_rec, bb);
4194 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4195 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4196 df_free_collection_rec (&collection_rec);
4197
4198 return true;
4199 }
4200
4201
4202 /* Returns true if the entry block has correct and complete df_ref set.
4203 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4204
4205 static bool
4206 df_entry_block_bitmap_verify (bool abort_if_fail)
4207 {
4208 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4209 bool is_eq;
4210
4211 df_get_entry_block_def_set (entry_block_defs);
4212
4213 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4214
4215 if (!is_eq && abort_if_fail)
4216 {
4217 print_current_pass (stderr);
4218 fprintf (stderr, "entry_block_defs = ");
4219 df_print_regset (stderr, entry_block_defs);
4220 fprintf (stderr, "df->entry_block_defs = ");
4221 df_print_regset (stderr, df->entry_block_defs);
4222 gcc_assert (0);
4223 }
4224
4225 BITMAP_FREE (entry_block_defs);
4226
4227 return is_eq;
4228 }
4229
4230
4231 /* Returns true if the exit block has correct and complete df_ref set.
4232 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4233
4234 static bool
4235 df_exit_block_bitmap_verify (bool abort_if_fail)
4236 {
4237 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4238 bool is_eq;
4239
4240 df_get_exit_block_use_set (exit_block_uses);
4241
4242 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4243
4244 if (!is_eq && abort_if_fail)
4245 {
4246 print_current_pass (stderr);
4247 fprintf (stderr, "exit_block_uses = ");
4248 df_print_regset (stderr, exit_block_uses);
4249 fprintf (stderr, "df->exit_block_uses = ");
4250 df_print_regset (stderr, df->exit_block_uses);
4251 gcc_assert (0);
4252 }
4253
4254 BITMAP_FREE (exit_block_uses);
4255
4256 return is_eq;
4257 }
4258
4259
4260 /* Return true if df_ref information for all insns in all BLOCKS are
4261 correct and complete. If BLOCKS is null, all blocks are
4262 checked. */
4263
4264 void
4265 df_scan_verify (void)
4266 {
4267 unsigned int i;
4268 basic_block bb;
4269 bitmap regular_block_artificial_uses;
4270 bitmap eh_block_artificial_uses;
4271
4272 if (!df)
4273 return;
4274
4275 /* Verification is a 4 step process. */
4276
4277 /* (1) All of the refs are marked by going thru the reg chains. */
4278 for (i = 0; i < DF_REG_SIZE (df); i++)
4279 {
4280 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4281 == DF_REG_DEF_COUNT(i));
4282 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4283 == DF_REG_USE_COUNT(i));
4284 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4285 == DF_REG_EQ_USE_COUNT(i));
4286 }
4287
4288 /* (2) There are various bitmaps whose value may change over the
4289 course of the compilation. This step recomputes them to make
4290 sure that they have not slipped out of date. */
4291 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4292 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4293
4294 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4295 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4296
4297 bitmap_ior_into (eh_block_artificial_uses,
4298 regular_block_artificial_uses);
4299
4300 /* Check artificial_uses bitmaps didn't change. */
4301 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4302 df->regular_block_artificial_uses));
4303 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4304 df->eh_block_artificial_uses));
4305
4306 BITMAP_FREE (regular_block_artificial_uses);
4307 BITMAP_FREE (eh_block_artificial_uses);
4308
4309 /* Verify entry block and exit block. These only verify the bitmaps,
4310 the refs are verified in df_bb_verify. */
4311 df_entry_block_bitmap_verify (true);
4312 df_exit_block_bitmap_verify (true);
4313
4314 /* (3) All of the insns in all of the blocks are traversed and the
4315 marks are cleared both in the artificial refs attached to the
4316 blocks and the real refs inside the insns. It is a failure to
4317 clear a mark that has not been set as this means that the ref in
4318 the block or insn was not in the reg chain. */
4319
4320 FOR_ALL_BB (bb)
4321 df_bb_verify (bb);
4322
4323 /* (4) See if all reg chains are traversed a second time. This time
4324 a check is made that the marks are clear. A set mark would be a
4325 from a reg that is not in any insn or basic block. */
4326
4327 for (i = 0; i < DF_REG_SIZE (df); i++)
4328 {
4329 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4330 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4331 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4332 }
4333 }