]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/df-scan.c
gcc/ChangeLog:
[thirdparty/gcc.git] / gcc / df-scan.c
CommitLineData
e011eba9 1/* Scanning of rtl for dataflow analysis.
a2bdd643 2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
0ac758f7 3 2008, 2009 Free Software Foundation, Inc.
e011eba9 4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
8c4c00c1 13Software Foundation; either version 3, or (at your option) any later
e011eba9 14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
8c4c00c1 22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
e011eba9 24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "rtl.h"
30#include "tm_p.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "function.h"
34#include "regs.h"
35#include "output.h"
36#include "alloc-pool.h"
37#include "flags.h"
38#include "hard-reg-set.h"
39#include "basic-block.h"
40#include "sbitmap.h"
41#include "bitmap.h"
42#include "timevar.h"
fcf2ad9f 43#include "tree.h"
44#include "target.h"
45#include "target-def.h"
e011eba9 46#include "df.h"
3072d30e 47#include "tree-pass.h"
e011eba9 48
f7f05a07 49DEF_VEC_P(df_ref);
50DEF_VEC_ALLOC_P_STACK(df_ref);
51
52#define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
53
54typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
55
56DEF_VEC_P(df_mw_hardreg_ptr);
57DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
58
59#define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
60 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
61
e011eba9 62#ifndef HAVE_epilogue
63#define HAVE_epilogue 0
64#endif
65#ifndef HAVE_prologue
66#define HAVE_prologue 0
67#endif
68#ifndef HAVE_sibcall_epilogue
69#define HAVE_sibcall_epilogue 0
70#endif
71
72#ifndef EPILOGUE_USES
73#define EPILOGUE_USES(REGNO) 0
74#endif
75
ce299759 76/* The following two macros free the vecs that hold either the refs or
77 the mw refs. They are a little tricky because the vec has 0
78 elements is special and is not to be freed. */
79#define df_scan_free_ref_vec(V) \
80 do { \
81 if (V && *V) \
82 free (V); \
83 } while (0)
84
85#define df_scan_free_mws_vec(V) \
86 do { \
87 if (V && *V) \
88 free (V); \
89 } while (0)
90
e011eba9 91/* The set of hard registers in eliminables[i].from. */
92
93static HARD_REG_SET elim_reg_set;
94
e011eba9 95/* Initialize ur_in and ur_out as if all hard registers were partially
96 available. */
97
3072d30e 98struct df_collection_rec
99{
f7f05a07 100 VEC(df_ref,stack) *def_vec;
101 VEC(df_ref,stack) *use_vec;
102 VEC(df_ref,stack) *eq_use_vec;
103 VEC(df_mw_hardreg_ptr,stack) *mw_vec;
3072d30e 104};
105
ed6e85ae 106static df_ref df_null_ref_rec[1];
3072d30e 107static struct df_mw_hardreg * df_null_mw_rec[1];
108
ed6e85ae 109static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
3072d30e 110 rtx, rtx *,
158b6cc9 111 basic_block, struct df_insn_info *,
b9c74b4d 112 enum df_ref_type, int ref_flags,
158b6cc9 113 int, int, enum machine_mode);
114static void df_def_record_1 (struct df_collection_rec *, rtx,
115 basic_block, struct df_insn_info *,
b9c74b4d 116 int ref_flags);
158b6cc9 117static void df_defs_record (struct df_collection_rec *, rtx,
118 basic_block, struct df_insn_info *,
b9c74b4d 119 int ref_flags);
ed6e85ae 120static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
3072d30e 121 rtx *, enum df_ref_type,
158b6cc9 122 basic_block, struct df_insn_info *,
b9c74b4d 123 int ref_flags,
bf1f8fbc 124 int, int, enum machine_mode);
e011eba9 125
ed6e85ae 126static df_ref df_ref_create_structure (enum df_ref_class,
127 struct df_collection_rec *, rtx, rtx *,
128 basic_block, struct df_insn_info *,
b9c74b4d 129 enum df_ref_type, int ref_flags,
ed6e85ae 130 int, int, enum machine_mode);
3072d30e 131
132static void df_insn_refs_collect (struct df_collection_rec*,
158b6cc9 133 basic_block, struct df_insn_info *);
3072d30e 134static void df_canonize_collection_rec (struct df_collection_rec *);
135
136static void df_get_regular_block_artificial_uses (bitmap);
137static void df_get_eh_block_artificial_uses (bitmap);
138
139static void df_record_entry_block_defs (bitmap);
140static void df_record_exit_block_uses (bitmap);
141static void df_get_exit_block_use_set (bitmap);
142static void df_get_entry_block_def_set (bitmap);
e011eba9 143static void df_grow_ref_info (struct df_ref_info *, unsigned int);
ed6e85ae 144static void df_ref_chain_delete_du_chain (df_ref *);
145static void df_ref_chain_delete (df_ref *);
e011eba9 146
3072d30e 147static void df_refs_add_to_chains (struct df_collection_rec *,
148 basic_block, rtx);
149
150static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
151static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
152static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
ed6e85ae 153static void df_install_ref (df_ref, struct df_reg_info *,
3072d30e 154 struct df_ref_info *, bool);
155
156static int df_ref_compare (const void *, const void *);
157static int df_mw_compare (const void *, const void *);
158
159/* Indexed by hardware reg number, is true if that register is ever
160 used in the current function.
161
162 In df-scan.c, this is set up to record the hard regs used
163 explicitly. Reload adds in the hard regs used for holding pseudo
164 regs. Final uses it to generate the code in the function prologue
165 and epilogue to save and restore registers as needed. */
166
167static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
e011eba9 168\f
169/*----------------------------------------------------------------------------
170 SCANNING DATAFLOW PROBLEM
171
172 There are several ways in which scanning looks just like the other
173 dataflow problems. It shares the all the mechanisms for local info
174 as well as basic block info. Where it differs is when and how often
175 it gets run. It also has no need for the iterative solver.
176----------------------------------------------------------------------------*/
177
178/* Problem data for the scanning dataflow function. */
179struct df_scan_problem_data
180{
ed6e85ae 181 alloc_pool ref_base_pool;
182 alloc_pool ref_artificial_pool;
183 alloc_pool ref_regular_pool;
30de5b55 184 alloc_pool ref_extract_pool;
e011eba9 185 alloc_pool insn_pool;
186 alloc_pool reg_pool;
3e6933a8 187 alloc_pool mw_reg_pool;
3072d30e 188 bitmap_obstack reg_bitmaps;
189 bitmap_obstack insn_bitmaps;
e011eba9 190};
191
192typedef struct df_scan_bb_info *df_scan_bb_info_t;
193
ce299759 194
195/* Internal function to shut down the scanning problem. */
e011eba9 196static void
3072d30e 197df_scan_free_internal (void)
e011eba9 198{
3e6933a8 199 struct df_scan_problem_data *problem_data
3072d30e 200 = (struct df_scan_problem_data *) df_scan->problem_data;
ce299759 201 unsigned int i;
202 basic_block bb;
203
204 /* The vectors that hold the refs are not pool allocated because
205 they come in many sizes. This makes them impossible to delete
206 all at once. */
207 for (i = 0; i < DF_INSN_SIZE(); i++)
208 {
209 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
210 /* Skip the insns that have no insn_info or have been
211 deleted. */
212 if (insn_info)
213 {
214 df_scan_free_ref_vec (insn_info->defs);
215 df_scan_free_ref_vec (insn_info->uses);
216 df_scan_free_ref_vec (insn_info->eq_uses);
217 df_scan_free_mws_vec (insn_info->mw_hardregs);
218 }
219 }
220
221 FOR_ALL_BB (bb)
222 {
223 unsigned int bb_index = bb->index;
224 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
225 if (bb_info)
226 {
227 df_scan_free_ref_vec (bb_info->artificial_defs);
228 df_scan_free_ref_vec (bb_info->artificial_uses);
229 }
230 }
e011eba9 231
e011eba9 232 free (df->def_info.refs);
3072d30e 233 free (df->def_info.begin);
234 free (df->def_info.count);
e011eba9 235 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
236
e011eba9 237 free (df->use_info.refs);
3072d30e 238 free (df->use_info.begin);
239 free (df->use_info.count);
e011eba9 240 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
241
3072d30e 242 free (df->def_regs);
243 df->def_regs = NULL;
244 free (df->use_regs);
245 df->use_regs = NULL;
246 free (df->eq_use_regs);
247 df->eq_use_regs = NULL;
248 df->regs_size = 0;
249 DF_REG_SIZE(df) = 0;
250
e011eba9 251 free (df->insns);
252 df->insns = NULL;
3072d30e 253 DF_INSN_SIZE () = 0;
e011eba9 254
3072d30e 255 free (df_scan->block_info);
256 df_scan->block_info = NULL;
257 df_scan->block_info_size = 0;
e011eba9 258
259 BITMAP_FREE (df->hardware_regs_used);
3072d30e 260 BITMAP_FREE (df->regular_block_artificial_uses);
261 BITMAP_FREE (df->eh_block_artificial_uses);
fcf2ad9f 262 BITMAP_FREE (df->entry_block_defs);
e011eba9 263 BITMAP_FREE (df->exit_block_uses);
3072d30e 264 BITMAP_FREE (df->insns_to_delete);
265 BITMAP_FREE (df->insns_to_rescan);
266 BITMAP_FREE (df->insns_to_notes_rescan);
e011eba9 267
3072d30e 268 free_alloc_pool (df_scan->block_pool);
ed6e85ae 269 free_alloc_pool (problem_data->ref_base_pool);
270 free_alloc_pool (problem_data->ref_artificial_pool);
271 free_alloc_pool (problem_data->ref_regular_pool);
30de5b55 272 free_alloc_pool (problem_data->ref_extract_pool);
e011eba9 273 free_alloc_pool (problem_data->insn_pool);
274 free_alloc_pool (problem_data->reg_pool);
3e6933a8 275 free_alloc_pool (problem_data->mw_reg_pool);
3072d30e 276 bitmap_obstack_release (&problem_data->reg_bitmaps);
277 bitmap_obstack_release (&problem_data->insn_bitmaps);
278 free (df_scan->problem_data);
e011eba9 279}
280
281
282/* Set basic block info. */
283
284static void
3072d30e 285df_scan_set_bb_info (unsigned int index,
e011eba9 286 struct df_scan_bb_info *bb_info)
287{
3072d30e 288 gcc_assert (df_scan);
289 df_grow_bb_info (df_scan);
290 df_scan->block_info[index] = (void *) bb_info;
e011eba9 291}
292
293
294/* Free basic block info. */
295
296static void
3072d30e 297df_scan_free_bb_info (basic_block bb, void *vbb_info)
e011eba9 298{
299 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
3072d30e 300 unsigned int bb_index = bb->index;
e011eba9 301 if (bb_info)
d0802b39 302 {
3072d30e 303 rtx insn;
304 FOR_BB_INSNS (bb, insn)
305 {
306 if (INSN_P (insn))
307 /* Record defs within INSN. */
308 df_insn_delete (bb, INSN_UID (insn));
309 }
310
311 if (bb_index < df_scan->block_info_size)
312 bb_info = df_scan_get_bb_info (bb_index);
313
314 /* Get rid of any artificial uses or defs. */
315 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
316 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
317 df_ref_chain_delete (bb_info->artificial_defs);
318 df_ref_chain_delete (bb_info->artificial_uses);
319 bb_info->artificial_defs = NULL;
320 bb_info->artificial_uses = NULL;
321 pool_free (df_scan->block_pool, bb_info);
d0802b39 322 }
e011eba9 323}
324
325
326/* Allocate the problem data for the scanning problem. This should be
327 called when the problem is created or when the entire function is to
328 be rescanned. */
3072d30e 329void
330df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
e011eba9 331{
e011eba9 332 struct df_scan_problem_data *problem_data;
333 unsigned int insn_num = get_max_uid () + 1;
3072d30e 334 unsigned int block_size = 400;
335 basic_block bb;
e011eba9 336
337 /* Given the number of pools, this is really faster than tearing
338 everything apart. */
3072d30e 339 if (df_scan->problem_data)
340 df_scan_free_internal ();
e011eba9 341
3072d30e 342 df_scan->block_pool
e011eba9 343 = create_alloc_pool ("df_scan_block pool",
344 sizeof (struct df_scan_bb_info),
345 block_size);
346
4c36ffe6 347 problem_data = XNEW (struct df_scan_problem_data);
3072d30e 348 df_scan->problem_data = problem_data;
349 df_scan->computed = true;
e011eba9 350
ed6e85ae 351 problem_data->ref_base_pool
352 = create_alloc_pool ("df_scan ref base",
353 sizeof (struct df_base_ref), block_size);
354 problem_data->ref_artificial_pool
355 = create_alloc_pool ("df_scan ref artificial",
356 sizeof (struct df_artificial_ref), block_size);
357 problem_data->ref_regular_pool
358 = create_alloc_pool ("df_scan ref regular",
359 sizeof (struct df_regular_ref), block_size);
30de5b55 360 problem_data->ref_extract_pool
ed6e85ae 361 = create_alloc_pool ("df_scan ref extract",
362 sizeof (struct df_extract_ref), block_size);
e011eba9 363 problem_data->insn_pool
ed6e85ae 364 = create_alloc_pool ("df_scan insn",
e011eba9 365 sizeof (struct df_insn_info), block_size);
e011eba9 366 problem_data->reg_pool
ed6e85ae 367 = create_alloc_pool ("df_scan reg",
e011eba9 368 sizeof (struct df_reg_info), block_size);
3e6933a8 369 problem_data->mw_reg_pool
ed6e85ae 370 = create_alloc_pool ("df_scan mw_reg",
3e6933a8 371 sizeof (struct df_mw_hardreg), block_size);
e011eba9 372
3072d30e 373 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
374 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
e011eba9 375
3072d30e 376 insn_num += insn_num / 4;
377 df_grow_reg_info ();
e011eba9 378
3072d30e 379 df_grow_insn_info ();
380 df_grow_bb_info (df_scan);
e011eba9 381
3072d30e 382 FOR_ALL_BB (bb)
e011eba9 383 {
3072d30e 384 unsigned int bb_index = bb->index;
385 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
e011eba9 386 if (!bb_info)
387 {
3072d30e 388 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
389 df_scan_set_bb_info (bb_index, bb_info);
e011eba9 390 }
391 bb_info->artificial_defs = NULL;
392 bb_info->artificial_uses = NULL;
393 }
394
3072d30e 395 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
396 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
397 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
398 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
399 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
400 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
401 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
402 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
deb2741b 403 df_scan->optional_p = false;
e011eba9 404}
405
406
407/* Free all of the data associated with the scan problem. */
408
409static void
3072d30e 410df_scan_free (void)
e011eba9 411{
3072d30e 412 if (df_scan->problem_data)
413 df_scan_free_internal ();
d0802b39 414
e011eba9 415 if (df->blocks_to_analyze)
3072d30e 416 {
417 BITMAP_FREE (df->blocks_to_analyze);
418 df->blocks_to_analyze = NULL;
419 }
e011eba9 420
3072d30e 421 free (df_scan);
e011eba9 422}
423
3072d30e 424/* Dump the preamble for DF_SCAN dump. */
e011eba9 425static void
3072d30e 426df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
e011eba9 427{
e011eba9 428 int i;
ed6e85ae 429 int dcount = 0;
430 int ucount = 0;
431 int ecount = 0;
432 int icount = 0;
433 int ccount = 0;
434 basic_block bb;
435 rtx insn;
e011eba9 436
3072d30e 437 fprintf (file, ";; invalidated by call \t");
b888113c 438 df_print_regset (file, regs_invalidated_by_call_regset);
3072d30e 439 fprintf (file, ";; hardware regs used \t");
440 df_print_regset (file, df->hardware_regs_used);
441 fprintf (file, ";; regular block artificial uses \t");
442 df_print_regset (file, df->regular_block_artificial_uses);
443 fprintf (file, ";; eh block artificial uses \t");
444 df_print_regset (file, df->eh_block_artificial_uses);
445 fprintf (file, ";; entry block defs \t");
446 df_print_regset (file, df->entry_block_defs);
447 fprintf (file, ";; exit block uses \t");
448 df_print_regset (file, df->exit_block_uses);
449 fprintf (file, ";; regs ever live \t");
e011eba9 450 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072d30e 451 if (df_regs_ever_live_p (i))
452 fprintf (file, " %d[%s]", i, reg_names[i]);
ed6e85ae 453 fprintf (file, "\n;; ref usage \t");
454
455 for (i = 0; i < (int)df->regs_inited; i++)
456 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
457 {
458 const char * sep = "";
459
460 fprintf (file, "r%d={", i);
461 if (DF_REG_DEF_COUNT (i))
462 {
463 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
464 sep = ",";
465 dcount += DF_REG_DEF_COUNT (i);
466 }
467 if (DF_REG_USE_COUNT (i))
468 {
469 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
470 sep = ",";
471 ucount += DF_REG_USE_COUNT (i);
472 }
473 if (DF_REG_EQ_USE_COUNT (i))
474 {
475 fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
476 ecount += DF_REG_EQ_USE_COUNT (i);
477 }
478 fprintf (file, "} ");
479 }
3072d30e 480
ed6e85ae 481 FOR_EACH_BB (bb)
482 FOR_BB_INSNS (bb, insn)
483 if (INSN_P (insn))
484 {
485 if (CALL_P (insn))
486 ccount++;
487 else
488 icount++;
489 }
490
491 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
492 dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
e011eba9 493}
494
3072d30e 495/* Dump the bb_info for a given basic block. */
496static void
497df_scan_start_block (basic_block bb, FILE *file)
498{
499 struct df_scan_bb_info *bb_info
500 = df_scan_get_bb_info (bb->index);
501
502 if (bb_info)
503 {
504 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
505 df_refs_chain_dump (bb_info->artificial_defs, true, file);
506 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
507 df_refs_chain_dump (bb_info->artificial_uses, true, file);
508 fprintf (file, "\n");
509 }
510#if 0
511 {
512 rtx insn;
513 FOR_BB_INSNS (bb, insn)
514 if (INSN_P (insn))
515 df_insn_debug (insn, false, file);
516 }
517#endif
518}
519
e011eba9 520static struct df_problem problem_SCAN =
521{
522 DF_SCAN, /* Problem id. */
523 DF_NONE, /* Direction. */
524 df_scan_alloc, /* Allocate the problem specific data. */
f64e6a69 525 NULL, /* Reset global information. */
e011eba9 526 df_scan_free_bb_info, /* Free basic block info. */
527 NULL, /* Local compute function. */
528 NULL, /* Init the solution specific data. */
529 NULL, /* Iterative solver. */
530 NULL, /* Confluence operator 0. */
531 NULL, /* Confluence operator n. */
532 NULL, /* Transfer function. */
533 NULL, /* Finalize function. */
534 df_scan_free, /* Free all of the problem information. */
3072d30e 535 NULL, /* Remove this problem from the stack of dataflow problems. */
536 df_scan_start_dump, /* Debugging. */
537 df_scan_start_block, /* Debugging start block. */
538 NULL, /* Debugging end block. */
539 NULL, /* Incremental solution verify start. */
6dfdc153 540 NULL, /* Incremental solution verify end. */
3e6933a8 541 NULL, /* Dependent problem. */
deb2741b 542 TV_DF_SCAN, /* Timing variable. */
543 false /* Reset blocks on dropping out of blocks_to_analyze. */
e011eba9 544};
545
546
547/* Create a new DATAFLOW instance and add it to an existing instance
548 of DF. The returned structure is what is used to get at the
549 solution. */
550
3072d30e 551void
552df_scan_add_problem (void)
e011eba9 553{
3072d30e 554 df_add_problem (&problem_SCAN);
e011eba9 555}
556
3072d30e 557\f
e011eba9 558/*----------------------------------------------------------------------------
559 Storage Allocation Utilities
560----------------------------------------------------------------------------*/
561
562
563/* First, grow the reg_info information. If the current size is less than
f0b5f617 564 the number of pseudos, grow to 25% more than the number of
e011eba9 565 pseudos.
566
567 Second, assure that all of the slots up to max_reg_num have been
568 filled with reg_info structures. */
569
3072d30e 570void
571df_grow_reg_info (void)
e011eba9 572{
573 unsigned int max_reg = max_reg_num ();
574 unsigned int new_size = max_reg;
3e6933a8 575 struct df_scan_problem_data *problem_data
3072d30e 576 = (struct df_scan_problem_data *) df_scan->problem_data;
e011eba9 577 unsigned int i;
578
3072d30e 579 if (df->regs_size < new_size)
e011eba9 580 {
581 new_size += new_size / 4;
364c0c59 582 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
583 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
584 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
585 new_size);
586 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
587 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
588 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
589 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
3072d30e 590 df->regs_size = new_size;
e011eba9 591 }
592
3072d30e 593 for (i = df->regs_inited; i < max_reg; i++)
e011eba9 594 {
3072d30e 595 struct df_reg_info *reg_info;
596
364c0c59 597 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
3072d30e 598 memset (reg_info, 0, sizeof (struct df_reg_info));
599 df->def_regs[i] = reg_info;
364c0c59 600 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
3072d30e 601 memset (reg_info, 0, sizeof (struct df_reg_info));
602 df->use_regs[i] = reg_info;
364c0c59 603 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
e011eba9 604 memset (reg_info, 0, sizeof (struct df_reg_info));
3072d30e 605 df->eq_use_regs[i] = reg_info;
606 df->def_info.begin[i] = 0;
607 df->def_info.count[i] = 0;
608 df->use_info.begin[i] = 0;
609 df->use_info.count[i] = 0;
e011eba9 610 }
611
3072d30e 612 df->regs_inited = max_reg;
e011eba9 613}
614
615
616/* Grow the ref information. */
617
618static void
619df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
620{
621 if (ref_info->refs_size < new_size)
622 {
ed6e85ae 623 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
e011eba9 624 memset (ref_info->refs + ref_info->refs_size, 0,
ed6e85ae 625 (new_size - ref_info->refs_size) *sizeof (df_ref));
e011eba9 626 ref_info->refs_size = new_size;
627 }
628}
629
630
3072d30e 631/* Check and grow the ref information if necessary. This routine
632 guarantees total_size + BITMAP_ADDEND amount of entries in refs
633 array. It updates ref_info->refs_size only and does not change
634 ref_info->total_size. */
635
636static void
637df_check_and_grow_ref_info (struct df_ref_info *ref_info,
638 unsigned bitmap_addend)
639{
640 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
641 {
642 int new_size = ref_info->total_size + bitmap_addend;
643 new_size += ref_info->total_size / 4;
644 df_grow_ref_info (ref_info, new_size);
645 }
646}
647
648
e011eba9 649/* Grow the ref information. If the current size is less than the
650 number of instructions, grow to 25% more than the number of
651 instructions. */
652
3072d30e 653void
654df_grow_insn_info (void)
e011eba9 655{
656 unsigned int new_size = get_max_uid () + 1;
3072d30e 657 if (DF_INSN_SIZE () < new_size)
e011eba9 658 {
659 new_size += new_size / 4;
364c0c59 660 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
e011eba9 661 memset (df->insns + df->insns_size, 0,
3072d30e 662 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
663 DF_INSN_SIZE () = new_size;
e011eba9 664 }
665}
666
667
668
669\f
670/*----------------------------------------------------------------------------
671 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
672----------------------------------------------------------------------------*/
673
3072d30e 674/* Rescan all of the block_to_analyze or all of the blocks in the
675 function if df_set_blocks if blocks_to_analyze is NULL; */
e011eba9 676
677void
3072d30e 678df_scan_blocks (void)
e011eba9 679{
e011eba9 680 basic_block bb;
681
3072d30e 682 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
683 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
3e6933a8 684
3072d30e 685 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
686 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
e011eba9 687
3072d30e 688 bitmap_ior_into (df->eh_block_artificial_uses,
689 df->regular_block_artificial_uses);
f64e6a69 690
3072d30e 691 /* ENTRY and EXIT blocks have special defs/uses. */
692 df_get_entry_block_def_set (df->entry_block_defs);
693 df_record_entry_block_defs (df->entry_block_defs);
694 df_get_exit_block_use_set (df->exit_block_uses);
695 df_record_exit_block_uses (df->exit_block_uses);
696 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
697 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
e011eba9 698
3072d30e 699 /* Regular blocks */
700 FOR_EACH_BB (bb)
e011eba9 701 {
3072d30e 702 unsigned int bb_index = bb->index;
703 df_bb_refs_record (bb_index, true);
e011eba9 704 }
e011eba9 705}
706
3e6933a8 707
e011eba9 708/* Create a new ref of type DF_REF_TYPE for register REG at address
30de5b55 709 LOC within INSN of BB. This function is only used externally.
710
711 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 712 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
713 fields if they were constants. Otherwise they should be -1 if
714 those flags were set. */
e011eba9 715
ed6e85ae 716df_ref
3072d30e 717df_ref_create (rtx reg, rtx *loc, rtx insn,
e011eba9 718 basic_block bb,
719 enum df_ref_type ref_type,
b9c74b4d 720 int ref_flags,
bf1f8fbc 721 int width, int offset, enum machine_mode mode)
e011eba9 722{
ed6e85ae 723 df_ref ref;
3072d30e 724 struct df_reg_info **reg_info;
725 struct df_ref_info *ref_info;
ed6e85ae 726 df_ref *ref_rec;
727 df_ref **ref_rec_ptr;
3072d30e 728 unsigned int count = 0;
729 bool add_to_table;
ed6e85ae 730 enum df_ref_class cl;
e011eba9 731
3072d30e 732 df_grow_reg_info ();
e011eba9 733
3072d30e 734 /* You cannot hack artificial refs. */
735 gcc_assert (insn);
ed6e85ae 736
737 if (width != -1 || offset != -1)
738 cl = DF_REF_EXTRACT;
739 else if (loc)
740 cl = DF_REF_REGULAR;
741 else
742 cl = DF_REF_BASE;
743 ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
bf1f8fbc 744 ref_type, ref_flags,
745 width, offset, mode);
e011eba9 746
ed6e85ae 747 if (DF_REF_REG_DEF_P (ref))
3072d30e 748 {
749 reg_info = df->def_regs;
750 ref_info = &df->def_info;
751 ref_rec_ptr = &DF_INSN_DEFS (insn);
752 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
753 }
754 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
755 {
756 reg_info = df->eq_use_regs;
757 ref_info = &df->use_info;
758 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
759 switch (ref_info->ref_order)
760 {
761 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
762 case DF_REF_ORDER_BY_REG_WITH_NOTES:
763 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
764 add_to_table = true;
765 break;
766 default:
767 add_to_table = false;
768 break;
769 }
770 }
771 else
772 {
773 reg_info = df->use_regs;
774 ref_info = &df->use_info;
775 ref_rec_ptr = &DF_INSN_USES (insn);
776 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
777 }
e011eba9 778
3072d30e 779 /* Do not add if ref is not in the right blocks. */
780 if (add_to_table && df->analyze_subset)
781 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
e011eba9 782
3072d30e 783 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
784
785 if (add_to_table)
786 switch (ref_info->ref_order)
787 {
788 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
789 case DF_REF_ORDER_BY_REG_WITH_NOTES:
790 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
791 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
792 break;
793 default:
794 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
795 break;
796 }
e011eba9 797
3072d30e 798 ref_rec = *ref_rec_ptr;
799 while (*ref_rec)
800 {
801 count++;
802 ref_rec++;
803 }
e011eba9 804
3072d30e 805 ref_rec = *ref_rec_ptr;
806 if (count)
e011eba9 807 {
ed6e85ae 808 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
3072d30e 809 *ref_rec_ptr = ref_rec;
810 ref_rec[count] = ref;
811 ref_rec[count+1] = NULL;
ed6e85ae 812 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
3072d30e 813 }
814 else
815 {
ed6e85ae 816 df_ref *ref_rec = XNEWVEC (df_ref, 2);
3072d30e 817 ref_rec[0] = ref;
818 ref_rec[1] = NULL;
819 *ref_rec_ptr = ref_rec;
820 }
e011eba9 821
3072d30e 822#if 0
823 if (dump_file)
824 {
825 fprintf (dump_file, "adding ref ");
826 df_ref_debug (ref, dump_file);
e011eba9 827 }
3072d30e 828#endif
829 /* By adding the ref directly, df_insn_rescan my not find any
830 differences even though the block will have changed. So we need
831 to mark the block dirty ourselves. */
832 df_set_bb_dirty (bb);
e011eba9 833
3072d30e 834 return ref;
e011eba9 835}
836
837
3072d30e 838\f
839/*----------------------------------------------------------------------------
840 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
841----------------------------------------------------------------------------*/
842
30de5b55 843static void
ed6e85ae 844df_free_ref (df_ref ref)
30de5b55 845{
846 struct df_scan_problem_data *problem_data
847 = (struct df_scan_problem_data *) df_scan->problem_data;
848
ed6e85ae 849 switch (DF_REF_CLASS (ref))
850 {
851 case DF_REF_BASE:
852 pool_free (problem_data->ref_base_pool, ref);
853 break;
854
855 case DF_REF_ARTIFICIAL:
856 pool_free (problem_data->ref_artificial_pool, ref);
857 break;
858
859 case DF_REF_REGULAR:
860 pool_free (problem_data->ref_regular_pool, ref);
861 break;
862
863 case DF_REF_EXTRACT:
864 pool_free (problem_data->ref_extract_pool, ref);
865 break;
866 }
30de5b55 867}
868
e011eba9 869
3072d30e 870/* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
871 Also delete the def-use or use-def chain if it exists. */
872
873static void
ed6e85ae 874df_reg_chain_unlink (df_ref ref)
e011eba9 875{
ed6e85ae 876 df_ref next = DF_REF_NEXT_REG (ref);
877 df_ref prev = DF_REF_PREV_REG (ref);
3072d30e 878 int id = DF_REF_ID (ref);
e011eba9 879 struct df_reg_info *reg_info;
ed6e85ae 880 df_ref *refs = NULL;
e011eba9 881
ed6e85ae 882 if (DF_REF_REG_DEF_P (ref))
e011eba9 883 {
ed6e85ae 884 int regno = DF_REF_REGNO (ref);
885 reg_info = DF_REG_DEF_GET (regno);
3072d30e 886 refs = df->def_info.refs;
e011eba9 887 }
888 else
889 {
3072d30e 890 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
891 {
892 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
893 switch (df->use_info.ref_order)
894 {
895 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
896 case DF_REF_ORDER_BY_REG_WITH_NOTES:
897 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
898 refs = df->use_info.refs;
899 break;
900 default:
901 break;
902 }
903 }
904 else
905 {
906 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
907 refs = df->use_info.refs;
908 }
909 }
910
911 if (refs)
912 {
913 if (df->analyze_subset)
914 {
ed6e85ae 915 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
3072d30e 916 refs[id] = NULL;
917 }
918 else
919 refs[id] = NULL;
e011eba9 920 }
921
3072d30e 922 /* Delete any def-use or use-def chains that start here. It is
923 possible that there is trash in this field. This happens for
924 insns that have been deleted when rescanning has been deferred
925 and the chain problem has also been deleted. The chain tear down
926 code skips deleted insns. */
927 if (df_chain && DF_REF_CHAIN (ref))
928 df_chain_unlink (ref);
929
e011eba9 930 reg_info->n_refs--;
3072d30e 931 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
932 {
933 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
934 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
935 }
e011eba9 936
937 /* Unlink from the reg chain. If there is no prev, this is the
938 first of the list. If not, just join the next and prev. */
939 if (prev)
3072d30e 940 DF_REF_NEXT_REG (prev) = next;
e011eba9 941 else
942 {
3072d30e 943 gcc_assert (reg_info->reg_chain == ref);
e011eba9 944 reg_info->reg_chain = next;
e011eba9 945 }
3072d30e 946 if (next)
947 DF_REF_PREV_REG (next) = prev;
e011eba9 948
30de5b55 949 df_free_ref (ref);
3072d30e 950}
951
952
953/* Remove REF from VEC. */
954
955static void
ed6e85ae 956df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
3072d30e 957{
ed6e85ae 958 df_ref *vec = *vec_ptr;
3072d30e 959
960 if (vec[1])
961 {
962 while (*vec && *vec != ref)
963 vec++;
964
965 while (*vec)
966 {
967 *vec = *(vec+1);
968 vec++;
969 }
970 }
971 else
972 {
973 free (vec);
974 *vec_ptr = df_null_ref_rec;
975 }
e011eba9 976}
977
978
979/* Unlink REF from all def-use/use-def chains, etc. */
980
981void
ed6e85ae 982df_ref_remove (df_ref ref)
e011eba9 983{
3072d30e 984#if 0
985 if (dump_file)
986 {
987 fprintf (dump_file, "removing ref ");
988 df_ref_debug (ref, dump_file);
989 }
990#endif
991
e011eba9 992 if (DF_REF_REG_DEF_P (ref))
993 {
3072d30e 994 if (DF_REF_IS_ARTIFICIAL (ref))
e011eba9 995 {
996 struct df_scan_bb_info *bb_info
ed6e85ae 997 = df_scan_get_bb_info (DF_REF_BBNO (ref));
3072d30e 998 df_ref_compress_rec (&bb_info->artificial_defs, ref);
e011eba9 999 }
1000 else
3072d30e 1001 {
1002 unsigned int uid = DF_REF_INSN_UID (ref);
1003 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1004 df_ref_compress_rec (&insn_rec->defs, ref);
1005 }
e011eba9 1006 }
1007 else
1008 {
3072d30e 1009 if (DF_REF_IS_ARTIFICIAL (ref))
e011eba9 1010 {
1011 struct df_scan_bb_info *bb_info
ed6e85ae 1012 = df_scan_get_bb_info (DF_REF_BBNO (ref));
3072d30e 1013 df_ref_compress_rec (&bb_info->artificial_uses, ref);
1014 }
1015 else
1016 {
1017 unsigned int uid = DF_REF_INSN_UID (ref);
1018 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1019
1020 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1021 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1022 else
1023 df_ref_compress_rec (&insn_rec->uses, ref);
e011eba9 1024 }
e011eba9 1025 }
1026
3072d30e 1027 /* By deleting the ref directly, df_insn_rescan my not find any
1028 differences even though the block will have changed. So we need
1029 to mark the block dirty ourselves. */
1030 df_set_bb_dirty (DF_REF_BB (ref));
1031 df_reg_chain_unlink (ref);
e011eba9 1032}
1033
1034
3072d30e 1035/* Create the insn record for INSN. If there was one there, zero it
1036 out. */
e011eba9 1037
3072d30e 1038struct df_insn_info *
1039df_insn_create_insn_record (rtx insn)
e011eba9 1040{
3e6933a8 1041 struct df_scan_problem_data *problem_data
3072d30e 1042 = (struct df_scan_problem_data *) df_scan->problem_data;
1043 struct df_insn_info *insn_rec;
e011eba9 1044
3072d30e 1045 df_grow_insn_info ();
158b6cc9 1046 insn_rec = DF_INSN_INFO_GET (insn);
e011eba9 1047 if (!insn_rec)
1048 {
364c0c59 1049 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
158b6cc9 1050 DF_INSN_INFO_SET (insn, insn_rec);
e011eba9 1051 }
1052 memset (insn_rec, 0, sizeof (struct df_insn_info));
3072d30e 1053 insn_rec->insn = insn;
e011eba9 1054 return insn_rec;
1055}
1056
d0802b39 1057
3072d30e 1058/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
e011eba9 1059
3072d30e 1060static void
ed6e85ae 1061df_ref_chain_delete_du_chain (df_ref *ref_rec)
e011eba9 1062{
3072d30e 1063 while (*ref_rec)
e011eba9 1064 {
ed6e85ae 1065 df_ref ref = *ref_rec;
3072d30e 1066 /* CHAIN is allocated by DF_CHAIN. So make sure to
1067 pass df_scan instance for the problem. */
1068 if (DF_REF_CHAIN (ref))
1069 df_chain_unlink (ref);
1070 ref_rec++;
1071 }
1072}
3e6933a8 1073
e011eba9 1074
3072d30e 1075/* Delete all refs in the ref chain. */
1076
1077static void
ed6e85ae 1078df_ref_chain_delete (df_ref *ref_rec)
3072d30e 1079{
ed6e85ae 1080 df_ref *start = ref_rec;
3072d30e 1081 while (*ref_rec)
1082 {
1083 df_reg_chain_unlink (*ref_rec);
1084 ref_rec++;
1085 }
1086
1087 /* If the list is empty, it has a special shared element that is not
1088 to be deleted. */
1089 if (*start)
1090 free (start);
1091}
1092
1093
1094/* Delete the hardreg chain. */
1095
1096static void
1097df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1098{
1099 struct df_scan_problem_data *problem_data;
1100
1101 if (!hardregs)
1102 return;
1103
1104 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1105
1106 while (*hardregs)
1107 {
1108 pool_free (problem_data->mw_reg_pool, *hardregs);
1109 hardregs++;
1110 }
1111}
1112
1113
1114/* Delete all of the refs information from INSN. BB must be passed in
1115 except when called from df_process_deferred_rescans to mark the block
1116 as dirty. */
1117
1118void
1119df_insn_delete (basic_block bb, unsigned int uid)
1120{
1121 struct df_insn_info *insn_info = NULL;
1122 if (!df)
1123 return;
1124
1125 df_grow_bb_info (df_scan);
1126 df_grow_reg_info ();
1127
1128 /* The block must be marked as dirty now, rather than later as in
1129 df_insn_rescan and df_notes_rescan because it may not be there at
1130 rescanning time and the mark would blow up. */
1131 if (bb)
1132 df_set_bb_dirty (bb);
1133
1134 insn_info = DF_INSN_UID_SAFE_GET (uid);
1135
a3de79f9 1136 /* The client has deferred rescanning. */
3072d30e 1137 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1138 {
1139 if (insn_info)
1140 {
1141 bitmap_clear_bit (df->insns_to_rescan, uid);
1142 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1143 bitmap_set_bit (df->insns_to_delete, uid);
1144 }
1145 if (dump_file)
bff9eb26 1146 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
3072d30e 1147 return;
1148 }
1149
1150 if (dump_file)
1151 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1152
1153 bitmap_clear_bit (df->insns_to_delete, uid);
1154 bitmap_clear_bit (df->insns_to_rescan, uid);
1155 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1156 if (insn_info)
1157 {
1158 struct df_scan_problem_data *problem_data
1159 = (struct df_scan_problem_data *) df_scan->problem_data;
1160
1161 /* In general, notes do not have the insn_info fields
1162 initialized. However, combine deletes insns by changing them
1163 to notes. How clever. So we cannot just check if it is a
1164 valid insn before short circuiting this code, we need to see
1165 if we actually initialized it. */
1166 if (insn_info->defs)
1167 {
1168 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1169
1170 if (df_chain)
1171 {
1172 df_ref_chain_delete_du_chain (insn_info->defs);
1173 df_ref_chain_delete_du_chain (insn_info->uses);
1174 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1175 }
1176
1177 df_ref_chain_delete (insn_info->defs);
1178 df_ref_chain_delete (insn_info->uses);
1179 df_ref_chain_delete (insn_info->eq_uses);
1180 }
e011eba9 1181 pool_free (problem_data->insn_pool, insn_info);
3072d30e 1182 DF_INSN_UID_SET (uid, NULL);
e011eba9 1183 }
1184}
1185
1186
3072d30e 1187/* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
d0802b39 1188
3072d30e 1189static void
1190df_free_collection_rec (struct df_collection_rec *collection_rec)
d0802b39 1191{
f7f05a07 1192 unsigned int ix;
3072d30e 1193 struct df_scan_problem_data *problem_data
1194 = (struct df_scan_problem_data *) df_scan->problem_data;
f7f05a07 1195 df_ref ref;
1196 struct df_mw_hardreg *mw;
1197
1198 for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
1199 df_free_ref (ref);
1200 for (ix = 0; VEC_iterate (df_ref, collection_rec->use_vec, ix, ref); ++ix)
1201 df_free_ref (ref);
1202 for (ix = 0; VEC_iterate (df_ref, collection_rec->eq_use_vec, ix, ref); ++ix)
1203 df_free_ref (ref);
1204 for (ix = 0;
1205 VEC_iterate (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw);
1206 ++ix)
1207 pool_free (problem_data->mw_reg_pool, mw);
1208
1209 VEC_free (df_ref, stack, collection_rec->def_vec);
1210 VEC_free (df_ref, stack, collection_rec->use_vec);
1211 VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1212 VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
3072d30e 1213}
d0802b39 1214
3072d30e 1215/* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1216
1217bool
1218df_insn_rescan (rtx insn)
1219{
1220 unsigned int uid = INSN_UID (insn);
1221 struct df_insn_info *insn_info = NULL;
1222 basic_block bb = BLOCK_FOR_INSN (insn);
1223 struct df_collection_rec collection_rec;
3072d30e 1224
1225 if ((!df) || (!INSN_P (insn)))
1226 return false;
1227
1228 if (!bb)
d0802b39 1229 {
3072d30e 1230 if (dump_file)
1231 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1232 return false;
1233 }
1234
1235 /* The client has disabled rescanning and plans to do it itself. */
1236 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1237 return false;
1238
1239 df_grow_bb_info (df_scan);
1240 df_grow_reg_info ();
1241
1242 insn_info = DF_INSN_UID_SAFE_GET (uid);
1243
a3de79f9 1244 /* The client has deferred rescanning. */
3072d30e 1245 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1246 {
1247 if (!insn_info)
1248 {
1249 insn_info = df_insn_create_insn_record (insn);
1250 insn_info->defs = df_null_ref_rec;
1251 insn_info->uses = df_null_ref_rec;
1252 insn_info->eq_uses = df_null_ref_rec;
1253 insn_info->mw_hardregs = df_null_mw_rec;
1254 }
1255 if (dump_file)
bff9eb26 1256 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
3072d30e 1257
1258 bitmap_clear_bit (df->insns_to_delete, uid);
1259 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1260 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1261 return false;
1262 }
1263
f7f05a07 1264 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1265 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1266 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1267 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1268
3072d30e 1269 bitmap_clear_bit (df->insns_to_delete, uid);
1270 bitmap_clear_bit (df->insns_to_rescan, uid);
1271 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1272 if (insn_info)
1273 {
2e81afe5 1274 int luid;
3072d30e 1275 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1276 /* If there's no change, return false. */
1277 if (the_same)
d0802b39 1278 {
3072d30e 1279 df_free_collection_rec (&collection_rec);
1280 if (dump_file)
1281 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1282 return false;
d0802b39 1283 }
3072d30e 1284 if (dump_file)
1285 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1286
2e81afe5 1287 /* There's change - we need to delete the existing info.
1288 Since the insn isn't moved, we can salvage its LUID. */
1289 luid = DF_INSN_LUID (insn);
3072d30e 1290 df_insn_delete (NULL, uid);
1291 df_insn_create_insn_record (insn);
2e81afe5 1292 DF_INSN_LUID (insn) = luid;
3072d30e 1293 }
1294 else
1295 {
158b6cc9 1296 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1297 df_insn_refs_collect (&collection_rec, bb, insn_info);
3072d30e 1298 if (dump_file)
1299 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1300 }
1301
1302 df_refs_add_to_chains (&collection_rec, bb, insn);
1303 df_set_bb_dirty (bb);
f7f05a07 1304
1305 VEC_free (df_ref, stack, collection_rec.def_vec);
1306 VEC_free (df_ref, stack, collection_rec.use_vec);
1307 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1308 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1309
3072d30e 1310 return true;
1311}
1312
1313
1314/* Rescan all of the insns in the function. Note that the artificial
1315 uses and defs are not touched. This function will destroy def-se
1316 or use-def chains. */
1317
1318void
1319df_insn_rescan_all (void)
1320{
1321 bool no_insn_rescan = false;
1322 bool defer_insn_rescan = false;
1323 basic_block bb;
1324 bitmap_iterator bi;
1325 unsigned int uid;
1326 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1327
1328 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1329 {
1330 df_clear_flags (DF_NO_INSN_RESCAN);
1331 no_insn_rescan = true;
d0802b39 1332 }
1333
3072d30e 1334 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
d0802b39 1335 {
3072d30e 1336 df_clear_flags (DF_DEFER_INSN_RESCAN);
1337 defer_insn_rescan = true;
1338 }
1339
1340 bitmap_copy (tmp, df->insns_to_delete);
1341 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1342 {
1343 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1344 if (insn_info)
1345 df_insn_delete (NULL, uid);
d0802b39 1346 }
3072d30e 1347
1348 BITMAP_FREE (tmp);
1349 bitmap_clear (df->insns_to_delete);
1350 bitmap_clear (df->insns_to_rescan);
1351 bitmap_clear (df->insns_to_notes_rescan);
1352
1353 FOR_EACH_BB (bb)
1354 {
1355 rtx insn;
1356 FOR_BB_INSNS (bb, insn)
1357 {
1358 df_insn_rescan (insn);
1359 }
1360 }
1361
1362 if (no_insn_rescan)
1363 df_set_flags (DF_NO_INSN_RESCAN);
1364 if (defer_insn_rescan)
1365 df_set_flags (DF_DEFER_INSN_RESCAN);
d0802b39 1366}
1367
1368
a3de79f9 1369/* Process all of the deferred rescans or deletions. */
e011eba9 1370
3072d30e 1371void
1372df_process_deferred_rescans (void)
e011eba9 1373{
3072d30e 1374 bool no_insn_rescan = false;
1375 bool defer_insn_rescan = false;
e011eba9 1376 bitmap_iterator bi;
3072d30e 1377 unsigned int uid;
1378 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1379
1380 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1381 {
1382 df_clear_flags (DF_NO_INSN_RESCAN);
1383 no_insn_rescan = true;
1384 }
1385
1386 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1387 {
1388 df_clear_flags (DF_DEFER_INSN_RESCAN);
1389 defer_insn_rescan = true;
1390 }
1391
1392 if (dump_file)
a3de79f9 1393 fprintf (dump_file, "starting the processing of deferred insns\n");
3072d30e 1394
1395 bitmap_copy (tmp, df->insns_to_delete);
1396 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1397 {
1398 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1399 if (insn_info)
1400 df_insn_delete (NULL, uid);
1401 }
1402
1403 bitmap_copy (tmp, df->insns_to_rescan);
1404 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1405 {
1406 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1407 if (insn_info)
1408 df_insn_rescan (insn_info->insn);
1409 }
1410
1411 bitmap_copy (tmp, df->insns_to_notes_rescan);
1412 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1413 {
1414 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1415 if (insn_info)
1416 df_notes_rescan (insn_info->insn);
1417 }
1418
1419 if (dump_file)
a3de79f9 1420 fprintf (dump_file, "ending the processing of deferred insns\n");
3072d30e 1421
1422 BITMAP_FREE (tmp);
1423 bitmap_clear (df->insns_to_delete);
1424 bitmap_clear (df->insns_to_rescan);
1425 bitmap_clear (df->insns_to_notes_rescan);
1426
1427 if (no_insn_rescan)
1428 df_set_flags (DF_NO_INSN_RESCAN);
1429 if (defer_insn_rescan)
1430 df_set_flags (DF_DEFER_INSN_RESCAN);
1431
1432 /* If someone changed regs_ever_live during this pass, fix up the
1433 entry and exit blocks. */
1434 if (df->redo_entry_and_exit)
1435 {
1436 df_update_entry_exit_and_calls ();
1437 df->redo_entry_and_exit = false;
1438 }
1439}
1440
e011eba9 1441
3072d30e 1442/* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1443 the uses if INCLUDE_USES. Include the eq_uses if
1444 INCLUDE_EQ_USES. */
1445
1446static unsigned int
1447df_count_refs (bool include_defs, bool include_uses,
1448 bool include_eq_uses)
1449{
1450 unsigned int regno;
1451 int size = 0;
1452 unsigned int m = df->regs_inited;
1453
1454 for (regno = 0; regno < m; regno++)
e011eba9 1455 {
3072d30e 1456 if (include_defs)
1457 size += DF_REG_DEF_COUNT (regno);
1458 if (include_uses)
1459 size += DF_REG_USE_COUNT (regno);
1460 if (include_eq_uses)
1461 size += DF_REG_EQ_USE_COUNT (regno);
e011eba9 1462 }
3072d30e 1463 return size;
e011eba9 1464}
1465
1466
1467/* Take build ref table for either the uses or defs from the reg-use
3072d30e 1468 or reg-def chains. This version processes the refs in reg order
1469 which is likely to be best if processing the whole function. */
e011eba9 1470
3072d30e 1471static void
1472df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1473 bool include_defs,
1474 bool include_uses,
1475 bool include_eq_uses)
e011eba9 1476{
3072d30e 1477 unsigned int m = df->regs_inited;
e011eba9 1478 unsigned int regno;
1479 unsigned int offset = 0;
3072d30e 1480 unsigned int start;
e011eba9 1481
3072d30e 1482 if (df->changeable_flags & DF_NO_HARD_REGS)
1483 {
1484 start = FIRST_PSEUDO_REGISTER;
1485 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1486 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
e011eba9 1487 }
3072d30e 1488 else
1489 start = 0;
e011eba9 1490
3072d30e 1491 ref_info->total_size
1492 = df_count_refs (include_defs, include_uses, include_eq_uses);
1493
1494 df_check_and_grow_ref_info (ref_info, 1);
1495
1496 for (regno = start; regno < m; regno++)
e011eba9 1497 {
e011eba9 1498 int count = 0;
3072d30e 1499 ref_info->begin[regno] = offset;
1500 if (include_defs)
1501 {
ed6e85ae 1502 df_ref ref = DF_REG_DEF_CHAIN (regno);
3072d30e 1503 while (ref)
1504 {
1505 ref_info->refs[offset] = ref;
1506 DF_REF_ID (ref) = offset++;
1507 count++;
1508 ref = DF_REF_NEXT_REG (ref);
1509 gcc_assert (offset < ref_info->refs_size);
1510 }
1511 }
1512 if (include_uses)
e011eba9 1513 {
ed6e85ae 1514 df_ref ref = DF_REG_USE_CHAIN (regno);
e011eba9 1515 while (ref)
1516 {
1517 ref_info->refs[offset] = ref;
1518 DF_REF_ID (ref) = offset++;
3072d30e 1519 count++;
e011eba9 1520 ref = DF_REF_NEXT_REG (ref);
3072d30e 1521 gcc_assert (offset < ref_info->refs_size);
1522 }
1523 }
1524 if (include_eq_uses)
1525 {
ed6e85ae 1526 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
3072d30e 1527 while (ref)
1528 {
1529 ref_info->refs[offset] = ref;
1530 DF_REF_ID (ref) = offset++;
e011eba9 1531 count++;
3072d30e 1532 ref = DF_REF_NEXT_REG (ref);
1533 gcc_assert (offset < ref_info->refs_size);
e011eba9 1534 }
e011eba9 1535 }
3072d30e 1536 ref_info->count[regno] = count;
e011eba9 1537 }
3072d30e 1538
e011eba9 1539 /* The bitmap size is not decremented when refs are deleted. So
1540 reset it now that we have squished out all of the empty
1541 slots. */
3072d30e 1542 ref_info->table_size = offset;
e011eba9 1543}
1544
e011eba9 1545
3072d30e 1546/* Take build ref table for either the uses or defs from the reg-use
1547 or reg-def chains. This version processes the refs in insn order
1548 which is likely to be best if processing some segment of the
1549 function. */
e011eba9 1550
3072d30e 1551static void
1552df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1553 bool include_defs,
1554 bool include_uses,
1555 bool include_eq_uses)
e011eba9 1556{
3072d30e 1557 bitmap_iterator bi;
1558 unsigned int bb_index;
1559 unsigned int m = df->regs_inited;
1560 unsigned int offset = 0;
1561 unsigned int r;
1562 unsigned int start
1563 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
e011eba9 1564
3072d30e 1565 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1566 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1567
1568 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1569 df_check_and_grow_ref_info (ref_info, 1);
e011eba9 1570
3072d30e 1571 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
e011eba9 1572 {
3072d30e 1573 basic_block bb = BASIC_BLOCK (bb_index);
1574 rtx insn;
ed6e85ae 1575 df_ref *ref_rec;
3072d30e 1576
1577 if (include_defs)
1578 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
3e6933a8 1579 {
3072d30e 1580 unsigned int regno = DF_REF_REGNO (*ref_rec);
1581 ref_info->count[regno]++;
3e6933a8 1582 }
3072d30e 1583 if (include_uses)
1584 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
3e6933a8 1585 {
3072d30e 1586 unsigned int regno = DF_REF_REGNO (*ref_rec);
1587 ref_info->count[regno]++;
3e6933a8 1588 }
e011eba9 1589
3072d30e 1590 FOR_BB_INSNS (bb, insn)
1591 {
1592 if (INSN_P (insn))
1593 {
1594 unsigned int uid = INSN_UID (insn);
1595
1596 if (include_defs)
1597 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1598 {
1599 unsigned int regno = DF_REF_REGNO (*ref_rec);
1600 ref_info->count[regno]++;
1601 }
1602 if (include_uses)
1603 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1604 {
1605 unsigned int regno = DF_REF_REGNO (*ref_rec);
1606 ref_info->count[regno]++;
1607 }
1608 if (include_eq_uses)
1609 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1610 {
1611 unsigned int regno = DF_REF_REGNO (*ref_rec);
1612 ref_info->count[regno]++;
1613 }
1614 }
1615 }
1616 }
1617
1618 for (r = start; r < m; r++)
1619 {
1620 ref_info->begin[r] = offset;
1621 offset += ref_info->count[r];
1622 ref_info->count[r] = 0;
1623 }
1624
1625 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1626 {
1627 basic_block bb = BASIC_BLOCK (bb_index);
1628 rtx insn;
ed6e85ae 1629 df_ref *ref_rec;
3072d30e 1630
1631 if (include_defs)
1632 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
3e6933a8 1633 {
ed6e85ae 1634 df_ref ref = *ref_rec;
3072d30e 1635 unsigned int regno = DF_REF_REGNO (ref);
1636 if (regno >= start)
3e6933a8 1637 {
3072d30e 1638 unsigned int id
1639 = ref_info->begin[regno] + ref_info->count[regno]++;
1640 DF_REF_ID (ref) = id;
1641 ref_info->refs[id] = ref;
3e6933a8 1642 }
3e6933a8 1643 }
3072d30e 1644 if (include_uses)
1645 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
3e6933a8 1646 {
ed6e85ae 1647 df_ref ref = *ref_rec;
3072d30e 1648 unsigned int regno = DF_REF_REGNO (ref);
1649 if (regno >= start)
1650 {
1651 unsigned int id
1652 = ref_info->begin[regno] + ref_info->count[regno]++;
1653 DF_REF_ID (ref) = id;
1654 ref_info->refs[id] = ref;
1655 }
3e6933a8 1656 }
3072d30e 1657
1658 FOR_BB_INSNS (bb, insn)
1659 {
1660 if (INSN_P (insn))
1661 {
1662 unsigned int uid = INSN_UID (insn);
1663
1664 if (include_defs)
1665 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1666 {
ed6e85ae 1667 df_ref ref = *ref_rec;
3072d30e 1668 unsigned int regno = DF_REF_REGNO (ref);
1669 if (regno >= start)
1670 {
1671 unsigned int id
1672 = ref_info->begin[regno] + ref_info->count[regno]++;
1673 DF_REF_ID (ref) = id;
1674 ref_info->refs[id] = ref;
1675 }
1676 }
1677 if (include_uses)
1678 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1679 {
ed6e85ae 1680 df_ref ref = *ref_rec;
3072d30e 1681 unsigned int regno = DF_REF_REGNO (ref);
1682 if (regno >= start)
1683 {
1684 unsigned int id
1685 = ref_info->begin[regno] + ref_info->count[regno]++;
1686 DF_REF_ID (ref) = id;
1687 ref_info->refs[id] = ref;
1688 }
1689 }
1690 if (include_eq_uses)
1691 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1692 {
ed6e85ae 1693 df_ref ref = *ref_rec;
3072d30e 1694 unsigned int regno = DF_REF_REGNO (ref);
1695 if (regno >= start)
1696 {
1697 unsigned int id
1698 = ref_info->begin[regno] + ref_info->count[regno]++;
1699 DF_REF_ID (ref) = id;
1700 ref_info->refs[id] = ref;
1701 }
1702 }
1703 }
1704 }
1705 }
1706
1707 /* The bitmap size is not decremented when refs are deleted. So
1708 reset it now that we have squished out all of the empty
1709 slots. */
1710
1711 ref_info->table_size = offset;
1712}
1713
1714/* Take build ref table for either the uses or defs from the reg-use
1715 or reg-def chains. */
1716
1717static void
1718df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1719 bool include_defs,
1720 bool include_uses,
1721 bool include_eq_uses)
1722{
1723 if (df->analyze_subset)
1724 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1725 include_uses, include_eq_uses);
1726 else
1727 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1728 include_uses, include_eq_uses);
1729}
1730
1731
1732/* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1733static unsigned int
1734df_add_refs_to_table (unsigned int offset,
1735 struct df_ref_info *ref_info,
ed6e85ae 1736 df_ref *ref_vec)
3072d30e 1737{
1738 while (*ref_vec)
1739 {
ed6e85ae 1740 df_ref ref = *ref_vec;
3072d30e 1741 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1742 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1743 {
1744 ref_info->refs[offset] = ref;
1745 DF_REF_ID (*ref_vec) = offset++;
1746 }
1747 ref_vec++;
1748 }
1749 return offset;
1750}
1751
1752
1753/* Count the number of refs in all of the insns of BB. Include the
1754 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1755 eq_uses if INCLUDE_EQ_USES. */
1756
1757static unsigned int
1758df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1759 struct df_ref_info *ref_info,
1760 bool include_defs, bool include_uses,
1761 bool include_eq_uses)
1762{
1763 rtx insn;
1764
1765 if (include_defs)
1766 offset = df_add_refs_to_table (offset, ref_info,
1767 df_get_artificial_defs (bb->index));
1768 if (include_uses)
1769 offset = df_add_refs_to_table (offset, ref_info,
1770 df_get_artificial_uses (bb->index));
1771
1772 FOR_BB_INSNS (bb, insn)
1773 if (INSN_P (insn))
1774 {
1775 unsigned int uid = INSN_UID (insn);
1776 if (include_defs)
1777 offset = df_add_refs_to_table (offset, ref_info,
1778 DF_INSN_UID_DEFS (uid));
1779 if (include_uses)
1780 offset = df_add_refs_to_table (offset, ref_info,
1781 DF_INSN_UID_USES (uid));
1782 if (include_eq_uses)
1783 offset = df_add_refs_to_table (offset, ref_info,
1784 DF_INSN_UID_EQ_USES (uid));
3e6933a8 1785 }
3072d30e 1786 return offset;
1787}
1788
1789
bef304b8 1790/* Organize the refs by insn into the table in REF_INFO. If
3072d30e 1791 blocks_to_analyze is defined, use that set, otherwise the entire
1792 program. Include the defs if INCLUDE_DEFS. Include the uses if
1793 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1794
1795static void
1796df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1797 bool include_defs, bool include_uses,
1798 bool include_eq_uses)
1799{
1800 basic_block bb;
1801 unsigned int offset = 0;
1802
1803 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1804 df_check_and_grow_ref_info (ref_info, 1);
1805 if (df->blocks_to_analyze)
1806 {
1807 bitmap_iterator bi;
1808 unsigned int index;
1809
1810 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1811 {
1812 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1813 include_defs, include_uses,
1814 include_eq_uses);
1815 }
1816
1817 ref_info->table_size = offset;
1818 }
1819 else
1820 {
1821 FOR_ALL_BB (bb)
1822 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1823 include_defs, include_uses,
1824 include_eq_uses);
1825 ref_info->table_size = offset;
1826 }
1827}
1828
1829
1830/* If the use refs in DF are not organized, reorganize them. */
1831
1832void
1833df_maybe_reorganize_use_refs (enum df_ref_order order)
1834{
1835 if (order == df->use_info.ref_order)
1836 return;
1837
1838 switch (order)
1839 {
1840 case DF_REF_ORDER_BY_REG:
1841 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1842 break;
1843
1844 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1845 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1846 break;
1847
1848 case DF_REF_ORDER_BY_INSN:
1849 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1850 break;
1851
1852 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1853 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1854 break;
1855
1856 case DF_REF_ORDER_NO_TABLE:
1857 free (df->use_info.refs);
1858 df->use_info.refs = NULL;
1859 df->use_info.refs_size = 0;
1860 break;
1861
1862 case DF_REF_ORDER_UNORDERED:
1863 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1864 gcc_unreachable ();
1865 break;
1866 }
1867
1868 df->use_info.ref_order = order;
1869}
1870
1871
1872/* If the def refs in DF are not organized, reorganize them. */
1873
1874void
1875df_maybe_reorganize_def_refs (enum df_ref_order order)
1876{
1877 if (order == df->def_info.ref_order)
1878 return;
1879
1880 switch (order)
1881 {
1882 case DF_REF_ORDER_BY_REG:
1883 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1884 break;
1885
1886 case DF_REF_ORDER_BY_INSN:
1887 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1888 break;
1889
1890 case DF_REF_ORDER_NO_TABLE:
1891 free (df->def_info.refs);
1892 df->def_info.refs = NULL;
1893 df->def_info.refs_size = 0;
1894 break;
1895
1896 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1897 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1898 case DF_REF_ORDER_UNORDERED:
1899 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1900 gcc_unreachable ();
1901 break;
1902 }
1903
1904 df->def_info.ref_order = order;
1905}
1906
1907
3072d30e 1908/* Change all of the basic block references in INSN to use the insn's
1909 current basic block. This function is called from routines that move
1910 instructions from one block to another. */
1911
1912void
a2bdd643 1913df_insn_change_bb (rtx insn, basic_block new_bb)
3072d30e 1914{
a2bdd643 1915 basic_block old_bb = BLOCK_FOR_INSN (insn);
3072d30e 1916 struct df_insn_info *insn_info;
1917 unsigned int uid = INSN_UID (insn);
1918
a2bdd643 1919 if (old_bb == new_bb)
1920 return;
1921
1922 set_block_for_insn (insn, new_bb);
1923
3072d30e 1924 if (!df)
1925 return;
1926
1927 if (dump_file)
1928 fprintf (dump_file, "changing bb of uid %d\n", uid);
1929
1930 insn_info = DF_INSN_UID_SAFE_GET (uid);
1931 if (insn_info == NULL)
1932 {
1933 if (dump_file)
1934 fprintf (dump_file, " unscanned insn\n");
1935 df_insn_rescan (insn);
1936 return;
1937 }
1938
1939 if (!INSN_P (insn))
1940 return;
1941
3072d30e 1942 df_set_bb_dirty (new_bb);
1943 if (old_bb)
1944 {
1945 if (dump_file)
1946 fprintf (dump_file, " from %d to %d\n",
1947 old_bb->index, new_bb->index);
1948 df_set_bb_dirty (old_bb);
1949 }
1950 else
1951 if (dump_file)
1952 fprintf (dump_file, " to %d\n", new_bb->index);
1953}
1954
1955
1956/* Helper function for df_ref_change_reg_with_loc. */
1957
1958static void
ed6e85ae 1959df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1960 struct df_reg_info *new_df,
3072d30e 1961 int new_regno, rtx loc)
1962{
ed6e85ae 1963 df_ref the_ref = old_df->reg_chain;
3072d30e 1964
1965 while (the_ref)
1966 {
ed6e85ae 1967 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1968 && (DF_REF_LOC (the_ref))
1969 && (*DF_REF_LOC (the_ref) == loc))
3072d30e 1970 {
ed6e85ae 1971 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1972 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1973 df_ref *ref_vec, *ref_vec_t;
1974 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
3072d30e 1975 unsigned int count = 0;
1976
1977 DF_REF_REGNO (the_ref) = new_regno;
1978 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1979
1980 /* Pull the_ref out of the old regno chain. */
1981 if (prev_ref)
ed6e85ae 1982 DF_REF_NEXT_REG (prev_ref) = next_ref;
3072d30e 1983 else
9ce37fa7 1984 old_df->reg_chain = next_ref;
3072d30e 1985 if (next_ref)
ed6e85ae 1986 DF_REF_PREV_REG (next_ref) = prev_ref;
9ce37fa7 1987 old_df->n_refs--;
3072d30e 1988
1989 /* Put the ref into the new regno chain. */
ed6e85ae 1990 DF_REF_PREV_REG (the_ref) = NULL;
1991 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
9ce37fa7 1992 if (new_df->reg_chain)
ed6e85ae 1993 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
9ce37fa7 1994 new_df->reg_chain = the_ref;
1995 new_df->n_refs++;
a43c254c 1996 if (DF_REF_BB (the_ref))
1997 df_set_bb_dirty (DF_REF_BB (the_ref));
3072d30e 1998
ed6e85ae 1999 /* Need to sort the record again that the ref was in because
2000 the regno is a sorting key. First, find the right
2001 record. */
2002 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2003 ref_vec = insn_info->eq_uses;
3072d30e 2004 else
ed6e85ae 2005 ref_vec = insn_info->uses;
2006 if (dump_file)
2007 fprintf (dump_file, "changing reg in insn %d\n",
2008 DF_REF_INSN_UID (the_ref));
2009
3072d30e 2010 ref_vec_t = ref_vec;
ed6e85ae 2011
3072d30e 2012 /* Find the length. */
2013 while (*ref_vec_t)
2014 {
2015 count++;
2016 ref_vec_t++;
2017 }
ed6e85ae 2018 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
3072d30e 2019
2020 the_ref = next_ref;
2021 }
2022 else
ed6e85ae 2023 the_ref = DF_REF_NEXT_REG (the_ref);
3072d30e 2024 }
2025}
2026
2027
2028/* Change the regno of all refs that contained LOC from OLD_REGNO to
ed6e85ae 2029 NEW_REGNO. Refs that do not match LOC are not changed which means
2030 that artificial refs are not changed since they have no loc. This
2031 call is to support the SET_REGNO macro. */
3072d30e 2032
2033void
2034df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2035{
2036 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2037 return;
2038
2039 df_grow_reg_info ();
2040
2041 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2042 DF_REG_DEF_GET (new_regno), new_regno, loc);
2043 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2044 DF_REG_USE_GET (new_regno), new_regno, loc);
2045 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2046 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2047}
2048
2049
2050/* Delete the mw_hardregs that point into the eq_notes. */
2051
2052static unsigned int
2053df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2054{
2055 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2056 unsigned int deleted = 0;
2057 unsigned int count = 0;
2058 struct df_scan_problem_data *problem_data
2059 = (struct df_scan_problem_data *) df_scan->problem_data;
2060
2061 if (!*mw_vec)
2062 return 0;
2063
2064 while (*mw_vec)
2065 {
2066 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2067 {
2068 struct df_mw_hardreg **temp_vec = mw_vec;
2069
2070 pool_free (problem_data->mw_reg_pool, *mw_vec);
2071 temp_vec = mw_vec;
2072 /* Shove the remaining ones down one to fill the gap. While
2073 this looks n**2, it is highly unusual to have any mw regs
2074 in eq_notes and the chances of more than one are almost
2075 non existent. */
2076 while (*temp_vec)
2077 {
2078 *temp_vec = *(temp_vec + 1);
2079 temp_vec++;
2080 }
2081 deleted++;
2082 }
2083 else
2084 {
2085 mw_vec++;
2086 count++;
2087 }
2088 }
2089
2090 if (count == 0)
2091 {
ce299759 2092 df_scan_free_mws_vec (insn_info->mw_hardregs);
3072d30e 2093 insn_info->mw_hardregs = df_null_mw_rec;
2094 return 0;
2095 }
2096 return deleted;
2097}
2098
2099
2100/* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2101
2102void
2103df_notes_rescan (rtx insn)
2104{
2105 struct df_insn_info *insn_info;
2106 unsigned int uid = INSN_UID (insn);
2107
2108 if (!df)
2109 return;
2110
2111 /* The client has disabled rescanning and plans to do it itself. */
2112 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2113 return;
2114
3e3659e7 2115 /* Do nothing if the insn hasn't been emitted yet. */
2116 if (!BLOCK_FOR_INSN (insn))
2117 return;
2118
3072d30e 2119 df_grow_bb_info (df_scan);
2120 df_grow_reg_info ();
2121
2122 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2123
a3de79f9 2124 /* The client has deferred rescanning. */
3072d30e 2125 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2126 {
2127 if (!insn_info)
2128 {
2129 insn_info = df_insn_create_insn_record (insn);
2130 insn_info->defs = df_null_ref_rec;
2131 insn_info->uses = df_null_ref_rec;
2132 insn_info->eq_uses = df_null_ref_rec;
2133 insn_info->mw_hardregs = df_null_mw_rec;
2134 }
2135
2136 bitmap_clear_bit (df->insns_to_delete, uid);
2137 /* If the insn is set to be rescanned, it does not need to also
2138 be notes rescanned. */
2139 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2140 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2141 return;
2142 }
2143
2144 bitmap_clear_bit (df->insns_to_delete, uid);
2145 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2146
2147 if (insn_info)
2148 {
2149 basic_block bb = BLOCK_FOR_INSN (insn);
2150 rtx note;
2151 struct df_collection_rec collection_rec;
2152 unsigned int num_deleted;
f7f05a07 2153 unsigned int mw_len;
3072d30e 2154
2155 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
f7f05a07 2156 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2157 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3072d30e 2158
2159 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2160 df_ref_chain_delete (insn_info->eq_uses);
2161 insn_info->eq_uses = NULL;
2162
2163 /* Process REG_EQUIV/REG_EQUAL notes */
2164 for (note = REG_NOTES (insn); note;
2165 note = XEXP (note, 1))
2166 {
2167 switch (REG_NOTE_KIND (note))
2168 {
2169 case REG_EQUIV:
2170 case REG_EQUAL:
ed6e85ae 2171 df_uses_record (DF_REF_REGULAR, &collection_rec,
3072d30e 2172 &XEXP (note, 0), DF_REF_REG_USE,
b9c74b4d 2173 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3072d30e 2174 default:
2175 break;
2176 }
2177 }
2178
2179 /* Find some place to put any new mw_hardregs. */
2180 df_canonize_collection_rec (&collection_rec);
f7f05a07 2181 mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2182 if (mw_len)
3072d30e 2183 {
2184 unsigned int count = 0;
2185 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2186 while (*mw_rec)
2187 {
2188 count++;
2189 mw_rec++;
2190 }
2191
2192 if (count)
2193 {
2194 /* Append to the end of the existing record after
2195 expanding it if necessary. */
f7f05a07 2196 if (mw_len > num_deleted)
3072d30e 2197 {
2198 insn_info->mw_hardregs =
c768b48b 2199 XRESIZEVEC (struct df_mw_hardreg *,
f7f05a07 2200 insn_info->mw_hardregs,
2201 count + 1 + mw_len);
3072d30e 2202 }
f7f05a07 2203 memcpy (&insn_info->mw_hardregs[count],
2204 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2205 mw_len * sizeof (struct df_mw_hardreg *));
2206 insn_info->mw_hardregs[count + mw_len] = NULL;
2207 qsort (insn_info->mw_hardregs, count + mw_len,
3072d30e 2208 sizeof (struct df_mw_hardreg *), df_mw_compare);
2209 }
2210 else
2211 {
2212 /* No vector there. */
2213 insn_info->mw_hardregs
f7f05a07 2214 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2215 memcpy (insn_info->mw_hardregs,
2216 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2217 mw_len * sizeof (struct df_mw_hardreg *));
2218 insn_info->mw_hardregs[mw_len] = NULL;
3072d30e 2219 }
2220 }
2221 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2222 ignore it. */
f7f05a07 2223 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3072d30e 2224 df_refs_add_to_chains (&collection_rec, bb, insn);
f7f05a07 2225 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3072d30e 2226 }
2227 else
2228 df_insn_rescan (insn);
2229
2230}
2231
2232\f
2233/*----------------------------------------------------------------------------
2234 Hard core instruction scanning code. No external interfaces here,
2235 just a lot of routines that look inside insns.
2236----------------------------------------------------------------------------*/
2237
2238
2239/* Return true if the contents of two df_ref's are identical.
2240 It ignores DF_REF_MARKER. */
2241
2242static bool
ed6e85ae 2243df_ref_equal_p (df_ref ref1, df_ref ref2)
3072d30e 2244{
2245 if (!ref2)
2246 return false;
ed6e85ae 2247
2248 if (ref1 == ref2)
2249 return true;
2250
2251 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2252 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2253 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2254 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2255 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2256 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2257 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2258 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2259 return false;
2260
2261 switch (DF_REF_CLASS (ref1))
2262 {
2263 case DF_REF_ARTIFICIAL:
2264 case DF_REF_BASE:
2265 return true;
30de5b55 2266
ed6e85ae 2267 case DF_REF_EXTRACT:
2268 if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
bf1f8fbc 2269 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
ed6e85ae 2270 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
2271 return false;
2272 /* fallthru. */
2273
2274 case DF_REF_REGULAR:
2275 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
30de5b55 2276
ed6e85ae 2277 default:
2278 gcc_unreachable ();
2279 }
2280 return false;
3072d30e 2281}
2282
2283
2284/* Compare REF1 and REF2 for sorting. This is only called from places
2285 where all of the refs are of the same type, in the same insn, and
2286 have the same bb. So these fields are not checked. */
2287
2288static int
2289df_ref_compare (const void *r1, const void *r2)
2290{
ed6e85ae 2291 const df_ref ref1 = *(const df_ref *)r1;
2292 const df_ref ref2 = *(const df_ref *)r2;
3072d30e 2293
2294 if (ref1 == ref2)
2295 return 0;
2296
ed6e85ae 2297 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2298 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2299
3072d30e 2300 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2301 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2302
2303 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2304 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2305
ed6e85ae 2306 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2307 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2308
2309 /* Cannot look at the LOC field on artificial refs. */
2310 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2311 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
3072d30e 2312 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2313
2314 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2315 {
2316 /* If two refs are identical except that one of them has is from
2317 a mw and one is not, we need to have the one with the mw
2318 first. */
2319 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2320 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2321 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2322 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2323 return -1;
2324 else
2325 return 1;
2326 }
30de5b55 2327
ed6e85ae 2328 /* The classes are the same at this point so it is safe to only look
30de5b55 2329 at ref1. */
ed6e85ae 2330 if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
30de5b55 2331 {
ed6e85ae 2332 if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2333 return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2334 if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2335 return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2336 if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2337 return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
30de5b55 2338 }
3072d30e 2339 return 0;
2340}
2341
2342static void
f7f05a07 2343df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
3072d30e 2344{
f7f05a07 2345 df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2346 VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2347 VEC_replace (df_ref, *ref_vec, j, tmp);
3072d30e 2348}
2349
2350/* Sort and compress a set of refs. */
2351
f7f05a07 2352static void
2353df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
3072d30e 2354{
f7f05a07 2355 unsigned int count;
3072d30e 2356 unsigned int i;
2357 unsigned int dist = 0;
2358
f7f05a07 2359 count = VEC_length (df_ref, *ref_vec);
2360
3072d30e 2361 /* If there are 1 or 0 elements, there is nothing to do. */
2362 if (count < 2)
f7f05a07 2363 return;
3072d30e 2364 else if (count == 2)
2365 {
f7f05a07 2366 df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2367 df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2368 if (df_ref_compare (&r0, &r1) > 0)
3072d30e 2369 df_swap_refs (ref_vec, 0, 1);
2370 }
2371 else
2372 {
2373 for (i = 0; i < count - 1; i++)
f7f05a07 2374 {
2375 df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2376 df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2377 if (df_ref_compare (&r0, &r1) >= 0)
2378 break;
2379 }
3072d30e 2380 /* If the array is already strictly ordered,
2381 which is the most common case for large COUNT case
2382 (which happens for CALL INSNs),
2383 no need to sort and filter out duplicate.
2384 Simply return the count.
2385 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2386 of DF_REF_COMPARE. */
2387 if (i == count - 1)
f7f05a07 2388 return;
2389 qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
2390 df_ref_compare);
3072d30e 2391 }
2392
2393 for (i=0; i<count-dist; i++)
2394 {
2395 /* Find the next ref that is not equal to the current ref. */
f7f05a07 2396 while (i + dist + 1 < count
2397 && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2398 VEC_index (df_ref, *ref_vec, i + dist + 1)))
3072d30e 2399 {
f7f05a07 2400 df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
3072d30e 2401 dist++;
2402 }
2403 /* Copy it down to the next position. */
f7f05a07 2404 if (dist && i + dist + 1 < count)
2405 VEC_replace (df_ref, *ref_vec, i + 1,
2406 VEC_index (df_ref, *ref_vec, i + dist + 1));
3072d30e 2407 }
2408
2409 count -= dist;
f7f05a07 2410 VEC_truncate (df_ref, *ref_vec, count);
3072d30e 2411}
2412
2413
2414/* Return true if the contents of two df_ref's are identical.
2415 It ignores DF_REF_MARKER. */
2416
2417static bool
2418df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2419{
2420 if (!mw2)
2421 return false;
2422 return (mw1 == mw2) ||
2423 (mw1->mw_reg == mw2->mw_reg
2424 && mw1->type == mw2->type
2425 && mw1->flags == mw2->flags
2426 && mw1->start_regno == mw2->start_regno
2427 && mw1->end_regno == mw2->end_regno);
2428}
2429
2430
2431/* Compare MW1 and MW2 for sorting. */
2432
2433static int
2434df_mw_compare (const void *m1, const void *m2)
2435{
c1fdef8e 2436 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2437 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
3072d30e 2438
2439 if (mw1 == mw2)
2440 return 0;
2441
2442 if (mw1->type != mw2->type)
2443 return mw1->type - mw2->type;
2444
2445 if (mw1->flags != mw2->flags)
2446 return mw1->flags - mw2->flags;
2447
2448 if (mw1->start_regno != mw2->start_regno)
2449 return mw1->start_regno - mw2->start_regno;
2450
2451 if (mw1->end_regno != mw2->end_regno)
2452 return mw1->end_regno - mw2->end_regno;
2453
2454 if (mw1->mw_reg != mw2->mw_reg)
2455 return mw1->mw_order - mw2->mw_order;
2456
2457 return 0;
2458}
2459
2460
2461/* Sort and compress a set of refs. */
2462
f7f05a07 2463static void
2464df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
3072d30e 2465{
f7f05a07 2466 unsigned int count;
3072d30e 2467 struct df_scan_problem_data *problem_data
2468 = (struct df_scan_problem_data *) df_scan->problem_data;
2469 unsigned int i;
2470 unsigned int dist = 0;
3072d30e 2471
f7f05a07 2472 count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
3072d30e 2473 if (count < 2)
f7f05a07 2474 return;
3072d30e 2475 else if (count == 2)
2476 {
f7f05a07 2477 struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2478 struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2479 if (df_mw_compare (&m0, &m1) > 0)
3072d30e 2480 {
f7f05a07 2481 struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2482 *mw_vec, 0);
2483 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2484 VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2485 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
3072d30e 2486 }
2487 }
2488 else
f7f05a07 2489 qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
2490 sizeof (struct df_mw_hardreg *), df_mw_compare);
3072d30e 2491
2492 for (i=0; i<count-dist; i++)
2493 {
2494 /* Find the next ref that is not equal to the current ref. */
f7f05a07 2495 while (i + dist + 1 < count
2496 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2497 VEC_index (df_mw_hardreg_ptr, *mw_vec,
2498 i + dist + 1)))
3072d30e 2499 {
f7f05a07 2500 pool_free (problem_data->mw_reg_pool,
2501 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
3072d30e 2502 dist++;
2503 }
2504 /* Copy it down to the next position. */
f7f05a07 2505 if (dist && i + dist + 1 < count)
2506 VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2507 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
3072d30e 2508 }
2509
2510 count -= dist;
f7f05a07 2511 VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
3072d30e 2512}
2513
2514
2515/* Sort and remove duplicates from the COLLECTION_REC. */
2516
2517static void
2518df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2519{
f7f05a07 2520 df_sort_and_compress_refs (&collection_rec->def_vec);
2521 df_sort_and_compress_refs (&collection_rec->use_vec);
2522 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2523 df_sort_and_compress_mws (&collection_rec->mw_vec);
3072d30e 2524}
2525
2526
2527/* Add the new df_ref to appropriate reg_info/ref_info chains. */
2528
2529static void
ed6e85ae 2530df_install_ref (df_ref this_ref,
3072d30e 2531 struct df_reg_info *reg_info,
2532 struct df_ref_info *ref_info,
2533 bool add_to_table)
2534{
2535 unsigned int regno = DF_REF_REGNO (this_ref);
2536 /* Add the ref to the reg_{def,use,eq_use} chain. */
ed6e85ae 2537 df_ref head = reg_info->reg_chain;
3072d30e 2538
2539 reg_info->reg_chain = this_ref;
2540 reg_info->n_refs++;
2541
2542 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2543 {
2544 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2545 df->hard_regs_live_count[regno]++;
2546 }
2547
2548 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2549 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2550
2551 DF_REF_NEXT_REG (this_ref) = head;
2552
2553 /* We cannot actually link to the head of the chain. */
2554 DF_REF_PREV_REG (this_ref) = NULL;
2555
2556 if (head)
2557 DF_REF_PREV_REG (head) = this_ref;
2558
2559 if (add_to_table)
2560 {
2561 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2562 df_check_and_grow_ref_info (ref_info, 1);
2563 DF_REF_ID (this_ref) = ref_info->table_size;
2564 /* Add the ref to the big array of defs. */
2565 ref_info->refs[ref_info->table_size] = this_ref;
2566 ref_info->table_size++;
2567 }
2568 else
2569 DF_REF_ID (this_ref) = -1;
2570
2571 ref_info->total_size++;
2572}
2573
2574
2575/* This function takes one of the groups of refs (defs, uses or
2576 eq_uses) and installs the entire group into the insn. It also adds
2577 each of these refs into the appropriate chains. */
2578
ed6e85ae 2579static df_ref *
3072d30e 2580df_install_refs (basic_block bb,
f7f05a07 2581 VEC(df_ref,stack)* old_vec,
3072d30e 2582 struct df_reg_info **reg_info,
2583 struct df_ref_info *ref_info,
2584 bool is_notes)
2585{
f7f05a07 2586 unsigned int count;
2587
2588 count = VEC_length (df_ref, old_vec);
3072d30e 2589 if (count)
2590 {
ed6e85ae 2591 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
3072d30e 2592 bool add_to_table;
f7f05a07 2593 df_ref this_ref;
2594 unsigned int ix;
3072d30e 2595
2596 switch (ref_info->ref_order)
2597 {
2598 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2599 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2600 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2601 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2602 add_to_table = true;
2603 break;
2604 case DF_REF_ORDER_UNORDERED:
2605 case DF_REF_ORDER_BY_REG:
2606 case DF_REF_ORDER_BY_INSN:
2607 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2608 add_to_table = !is_notes;
2609 break;
2610 default:
2611 add_to_table = false;
2612 break;
2613 }
2614
2615 /* Do not add if ref is not in the right blocks. */
2616 if (add_to_table && df->analyze_subset)
2617 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2618
f7f05a07 2619 for (ix = 0; VEC_iterate (df_ref, old_vec, ix, this_ref); ++ix)
3072d30e 2620 {
f7f05a07 2621 new_vec[ix] = this_ref;
3072d30e 2622 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2623 ref_info, add_to_table);
2624 }
2625
2626 new_vec[count] = NULL;
2627 return new_vec;
2628 }
2629 else
2630 return df_null_ref_rec;
2631}
2632
2633
2634/* This function takes the mws installs the entire group into the
2635 insn. */
2636
2637static struct df_mw_hardreg **
f7f05a07 2638df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
3072d30e 2639{
f7f05a07 2640 unsigned int count;
2641
2642 count = VEC_length (df_mw_hardreg_ptr, old_vec);
3072d30e 2643 if (count)
2644 {
2645 struct df_mw_hardreg **new_vec
2646 = XNEWVEC (struct df_mw_hardreg*, count + 1);
f7f05a07 2647 memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2648 sizeof (struct df_mw_hardreg*) * count);
2649 new_vec[count] = NULL;
3072d30e 2650 return new_vec;
2651 }
2652 else
2653 return df_null_mw_rec;
2654}
2655
2656
2657/* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2658 chains and update other necessary information. */
2659
2660static void
2661df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2662 basic_block bb, rtx insn)
2663{
2664 if (insn)
2665 {
158b6cc9 2666 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
3072d30e 2667 /* If there is a vector in the collection rec, add it to the
2668 insn. A null rec is a signal that the caller will handle the
2669 chain specially. */
2670 if (collection_rec->def_vec)
2671 {
ce299759 2672 df_scan_free_ref_vec (insn_rec->defs);
3072d30e 2673 insn_rec->defs
f7f05a07 2674 = df_install_refs (bb, collection_rec->def_vec,
3072d30e 2675 df->def_regs,
2676 &df->def_info, false);
2677 }
2678 if (collection_rec->use_vec)
2679 {
ce299759 2680 df_scan_free_ref_vec (insn_rec->uses);
3072d30e 2681 insn_rec->uses
2682 = df_install_refs (bb, collection_rec->use_vec,
3072d30e 2683 df->use_regs,
2684 &df->use_info, false);
2685 }
2686 if (collection_rec->eq_use_vec)
2687 {
ce299759 2688 df_scan_free_ref_vec (insn_rec->eq_uses);
3072d30e 2689 insn_rec->eq_uses
2690 = df_install_refs (bb, collection_rec->eq_use_vec,
3072d30e 2691 df->eq_use_regs,
2692 &df->use_info, true);
2693 }
2694 if (collection_rec->mw_vec)
2695 {
ce299759 2696 df_scan_free_mws_vec (insn_rec->mw_hardregs);
3072d30e 2697 insn_rec->mw_hardregs
f7f05a07 2698 = df_install_mws (collection_rec->mw_vec);
3072d30e 2699 }
2700 }
2701 else
2702 {
2703 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2704
ce299759 2705 df_scan_free_ref_vec (bb_info->artificial_defs);
3072d30e 2706 bb_info->artificial_defs
f7f05a07 2707 = df_install_refs (bb, collection_rec->def_vec,
3072d30e 2708 df->def_regs,
2709 &df->def_info, false);
ce299759 2710 df_scan_free_ref_vec (bb_info->artificial_uses);
3072d30e 2711 bb_info->artificial_uses
2712 = df_install_refs (bb, collection_rec->use_vec,
3072d30e 2713 df->use_regs,
2714 &df->use_info, false);
2715 }
2716}
e011eba9 2717
e011eba9 2718
30de5b55 2719/* Allocate a ref and initialize its fields.
2720
2721 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 2722 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
30de5b55 2723 if they were constants. Otherwise they should be -1 if those flags
2724 were set. */
3072d30e 2725
ed6e85ae 2726static df_ref
2727df_ref_create_structure (enum df_ref_class cl,
2728 struct df_collection_rec *collection_rec,
3072d30e 2729 rtx reg, rtx *loc,
158b6cc9 2730 basic_block bb, struct df_insn_info *info,
3072d30e 2731 enum df_ref_type ref_type,
b9c74b4d 2732 int ref_flags,
bf1f8fbc 2733 int width, int offset, enum machine_mode mode)
3072d30e 2734{
ed6e85ae 2735 df_ref this_ref = NULL;
3072d30e 2736 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2737 struct df_scan_problem_data *problem_data
2738 = (struct df_scan_problem_data *) df_scan->problem_data;
2739
ed6e85ae 2740 switch (cl)
30de5b55 2741 {
ed6e85ae 2742 case DF_REF_BASE:
2743 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2744 gcc_assert (loc == NULL);
2745 break;
2746
2747 case DF_REF_ARTIFICIAL:
2748 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2749 this_ref->artificial_ref.bb = bb;
2750 gcc_assert (loc == NULL);
2751 break;
2752
2753 case DF_REF_REGULAR:
2754 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2755 this_ref->regular_ref.loc = loc;
2756 gcc_assert (loc);
2757 break;
2758
2759 case DF_REF_EXTRACT:
2760 this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
bf1f8fbc 2761 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2762 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2763 DF_REF_EXTRACT_MODE (this_ref) = mode;
ed6e85ae 2764 this_ref->regular_ref.loc = loc;
2765 gcc_assert (loc);
2766 break;
30de5b55 2767 }
ed6e85ae 2768
2769 DF_REF_CLASS (this_ref) = cl;
3072d30e 2770 DF_REF_ID (this_ref) = -1;
2771 DF_REF_REG (this_ref) = reg;
2772 DF_REF_REGNO (this_ref) = regno;
ed6e85ae 2773 DF_REF_TYPE (this_ref) = ref_type;
158b6cc9 2774 DF_REF_INSN_INFO (this_ref) = info;
3072d30e 2775 DF_REF_CHAIN (this_ref) = NULL;
3072d30e 2776 DF_REF_FLAGS (this_ref) = ref_flags;
3072d30e 2777 DF_REF_NEXT_REG (this_ref) = NULL;
2778 DF_REF_PREV_REG (this_ref) = NULL;
2779 DF_REF_ORDER (this_ref) = df->ref_order++;
2780
2781 /* We need to clear this bit because fwprop, and in the future
2782 possibly other optimizations sometimes create new refs using ond
2783 refs as the model. */
2784 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2785
2786 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2787 if ((regno < FIRST_PSEUDO_REGISTER)
2788 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2789 {
ed6e85ae 2790 if (DF_REF_REG_DEF_P (this_ref))
3072d30e 2791 {
2792 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2793 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2794 }
2795 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2796 && (regno == FRAME_POINTER_REGNUM
2797 || regno == ARG_POINTER_REGNUM)))
2798 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2799 }
2800
2801 if (collection_rec)
2802 {
ed6e85ae 2803 if (DF_REF_REG_DEF_P (this_ref))
f7f05a07 2804 VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
3072d30e 2805 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
f7f05a07 2806 VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
3072d30e 2807 else
f7f05a07 2808 VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
e011eba9 2809 }
3072d30e 2810
e011eba9 2811 return this_ref;
2812}
2813
2814
2815/* Create new references of type DF_REF_TYPE for each part of register REG
30de5b55 2816 at address LOC within INSN of BB.
2817
2818 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 2819 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2820 fields if they were constants. Otherwise they should be -1 if
2821 those flags were set. */
30de5b55 2822
e011eba9 2823
2824static void
ed6e85ae 2825df_ref_record (enum df_ref_class cl,
2826 struct df_collection_rec *collection_rec,
3072d30e 2827 rtx reg, rtx *loc,
158b6cc9 2828 basic_block bb, struct df_insn_info *insn_info,
e011eba9 2829 enum df_ref_type ref_type,
b9c74b4d 2830 int ref_flags,
bf1f8fbc 2831 int width, int offset, enum machine_mode mode)
e011eba9 2832{
3e6933a8 2833 unsigned int regno;
e011eba9 2834
2835 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2836
e011eba9 2837 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2838 if (regno < FIRST_PSEUDO_REGISTER)
2839 {
3e6933a8 2840 struct df_mw_hardreg *hardreg = NULL;
2841 struct df_scan_problem_data *problem_data
3072d30e 2842 = (struct df_scan_problem_data *) df_scan->problem_data;
2843 unsigned int i;
2844 unsigned int endregno;
ed6e85ae 2845 df_ref ref;
e011eba9 2846
e011eba9 2847 if (GET_CODE (reg) == SUBREG)
fe2ebfc8 2848 {
2849 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2850 SUBREG_BYTE (reg), GET_MODE (reg));
a2c6f0b7 2851 endregno = regno + subreg_nregs (reg);
fe2ebfc8 2852 }
2853 else
a2c6f0b7 2854 endregno = END_HARD_REGNO (reg);
e011eba9 2855
3072d30e 2856 /* If this is a multiword hardreg, we create some extra
2857 datastructures that will enable us to easily build REG_DEAD
2858 and REG_UNUSED notes. */
158b6cc9 2859 if ((endregno != regno + 1) && insn_info)
3e6933a8 2860 {
3e6933a8 2861 /* Sets to a subreg of a multiword register are partial.
2862 Sets to a non-subreg of a multiword register are not. */
05beda33 2863 if (GET_CODE (reg) == SUBREG)
3e6933a8 2864 ref_flags |= DF_REF_PARTIAL;
2865 ref_flags |= DF_REF_MW_HARDREG;
3072d30e 2866
364c0c59 2867 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
3e6933a8 2868 hardreg->type = ref_type;
2869 hardreg->flags = ref_flags;
2870 hardreg->mw_reg = reg;
3072d30e 2871 hardreg->start_regno = regno;
2872 hardreg->end_regno = endregno - 1;
2873 hardreg->mw_order = df->ref_order++;
f7f05a07 2874 VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2875 hardreg);
3e6933a8 2876 }
2877
e011eba9 2878 for (i = regno; i < endregno; i++)
2879 {
ed6e85ae 2880 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
158b6cc9 2881 bb, insn_info, ref_type, ref_flags,
bf1f8fbc 2882 width, offset, mode);
3e6933a8 2883
3072d30e 2884 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
e011eba9 2885 }
2886 }
2887 else
2888 {
ed6e85ae 2889 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2890 ref_type, ref_flags, width, offset, mode);
e011eba9 2891 }
2892}
2893
2894
2895/* A set to a non-paradoxical SUBREG for which the number of word_mode units
2896 covered by the outer mode is smaller than that covered by the inner mode,
2897 is a read-modify-write operation.
2898 This function returns true iff the SUBREG X is such a SUBREG. */
2899
2900bool
2901df_read_modify_subreg_p (rtx x)
2902{
2903 unsigned int isize, osize;
2904 if (GET_CODE (x) != SUBREG)
2905 return false;
2906 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2907 osize = GET_MODE_SIZE (GET_MODE (x));
c9a4672f 2908 return isize > osize
2909 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
e011eba9 2910}
2911
2912
2913/* Process all the registers defined in the rtx, X.
2914 Autoincrement/decrement definitions will be picked up by
2915 df_uses_record. */
2916
2917static void
3072d30e 2918df_def_record_1 (struct df_collection_rec *collection_rec,
158b6cc9 2919 rtx x, basic_block bb, struct df_insn_info *insn_info,
b9c74b4d 2920 int flags)
e011eba9 2921{
2922 rtx *loc;
2923 rtx dst;
30de5b55 2924 int offset = -1;
2925 int width = -1;
bc620c5c 2926 enum machine_mode mode = VOIDmode;
ed6e85ae 2927 enum df_ref_class cl = DF_REF_REGULAR;
e011eba9 2928
2929 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2930 construct. */
2931 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2932 loc = &XEXP (x, 0);
2933 else
2934 loc = &SET_DEST (x);
2935 dst = *loc;
2936
35792caf 2937 /* It is legal to have a set destination be a parallel. */
2938 if (GET_CODE (dst) == PARALLEL)
e011eba9 2939 {
2940 int i;
2941
2942 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2943 {
2944 rtx temp = XVECEXP (dst, 0, i);
2945 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2946 || GET_CODE (temp) == SET)
3072d30e 2947 df_def_record_1 (collection_rec,
158b6cc9 2948 temp, bb, insn_info,
3e6933a8 2949 GET_CODE (temp) == CLOBBER
3072d30e 2950 ? flags | DF_REF_MUST_CLOBBER : flags);
e011eba9 2951 }
2952 return;
2953 }
2954
30de5b55 2955 if (GET_CODE (dst) == STRICT_LOW_PART)
e011eba9 2956 {
30de5b55 2957 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2958
2959 loc = &XEXP (dst, 0);
2960 dst = *loc;
2961 }
2962
2963 if (GET_CODE (dst) == ZERO_EXTRACT)
2964 {
2965 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2966
971ba038 2967 if (CONST_INT_P (XEXP (dst, 1))
2968 && CONST_INT_P (XEXP (dst, 2)))
30de5b55 2969 {
2970 width = INTVAL (XEXP (dst, 1));
2971 offset = INTVAL (XEXP (dst, 2));
bf1f8fbc 2972 mode = GET_MODE (dst);
ed6e85ae 2973 cl = DF_REF_EXTRACT;
30de5b55 2974 }
dea7b504 2975
e011eba9 2976 loc = &XEXP (dst, 0);
2977 dst = *loc;
e011eba9 2978 }
2979
dea7b504 2980 /* At this point if we do not have a reg or a subreg, just return. */
2981 if (REG_P (dst))
2982 {
ed6e85ae 2983 df_ref_record (cl, collection_rec,
158b6cc9 2984 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
bf1f8fbc 2985 width, offset, mode);
dea7b504 2986
2987 /* We want to keep sp alive everywhere - by making all
2988 writes to sp also use of sp. */
2989 if (REGNO (dst) == STACK_POINTER_REGNUM)
ed6e85ae 2990 df_ref_record (DF_REF_BASE, collection_rec,
158b6cc9 2991 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
bf1f8fbc 2992 width, offset, mode);
dea7b504 2993 }
2994 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2995 {
2996 if (df_read_modify_subreg_p (dst))
2997 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
c9a4672f 2998
dea7b504 2999 flags |= DF_REF_SUBREG;
37ad3318 3000
ed6e85ae 3001 df_ref_record (cl, collection_rec,
158b6cc9 3002 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
bf1f8fbc 3003 width, offset, mode);
dea7b504 3004 }
e011eba9 3005}
3006
3007
3008/* Process all the registers defined in the pattern rtx, X. */
3009
3010static void
3072d30e 3011df_defs_record (struct df_collection_rec *collection_rec,
158b6cc9 3012 rtx x, basic_block bb, struct df_insn_info *insn_info,
b9c74b4d 3013 int flags)
e011eba9 3014{
3015 RTX_CODE code = GET_CODE (x);
3016
3017 if (code == SET || code == CLOBBER)
3018 {
3019 /* Mark the single def within the pattern. */
8458f4ca 3020 int clobber_flags = flags;
3072d30e 3021 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
158b6cc9 3022 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
e011eba9 3023 }
3024 else if (code == COND_EXEC)
3025 {
3072d30e 3026 df_defs_record (collection_rec, COND_EXEC_CODE (x),
158b6cc9 3027 bb, insn_info, DF_REF_CONDITIONAL);
e011eba9 3028 }
3029 else if (code == PARALLEL)
3030 {
3031 int i;
3032
3033 /* Mark the multiple defs within the pattern. */
3034 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
158b6cc9 3035 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
e011eba9 3036 }
3037}
3038
3039
30de5b55 3040/* Process all the registers used in the rtx at address LOC.
3041
3042 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 3043 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
3044 fields if they were constants. Otherwise they should be -1 if
3045 those flags were set. */
e011eba9 3046
3047static void
ed6e85ae 3048df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
3072d30e 3049 rtx *loc, enum df_ref_type ref_type,
158b6cc9 3050 basic_block bb, struct df_insn_info *insn_info,
b9c74b4d 3051 int flags,
bf1f8fbc 3052 int width, int offset, enum machine_mode mode)
e011eba9 3053{
3054 RTX_CODE code;
3055 rtx x;
3072d30e 3056
e011eba9 3057 retry:
3058 x = *loc;
3059 if (!x)
3060 return;
3061 code = GET_CODE (x);
3062 switch (code)
3063 {
3064 case LABEL_REF:
3065 case SYMBOL_REF:
3066 case CONST_INT:
3067 case CONST:
3068 case CONST_DOUBLE:
e397ad8e 3069 case CONST_FIXED:
e011eba9 3070 case CONST_VECTOR:
3071 case PC:
3072 case CC0:
3073 case ADDR_VEC:
3074 case ADDR_DIFF_VEC:
3075 return;
3076
3077 case CLOBBER:
3078 /* If we are clobbering a MEM, mark any registers inside the address
3079 as being used. */
3080 if (MEM_P (XEXP (x, 0)))
ed6e85ae 3081 df_uses_record (cl, collection_rec,
3072d30e 3082 &XEXP (XEXP (x, 0), 0),
158b6cc9 3083 DF_REF_REG_MEM_STORE,
3084 bb, insn_info,
3085 flags, width, offset, mode);
e011eba9 3086
3087 /* If we're clobbering a REG then we have a def so ignore. */
3088 return;
3089
3090 case MEM:
ed6e85ae 3091 df_uses_record (cl, collection_rec,
3072d30e 3092 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
158b6cc9 3093 bb, insn_info, flags & DF_REF_IN_NOTE,
bf1f8fbc 3094 width, offset, mode);
e011eba9 3095 return;
3096
3097 case SUBREG:
3098 /* While we're here, optimize this case. */
3e6933a8 3099 flags |= DF_REF_PARTIAL;
e011eba9 3100 /* In case the SUBREG is not of a REG, do not optimize. */
3101 if (!REG_P (SUBREG_REG (x)))
3102 {
3103 loc = &SUBREG_REG (x);
ed6e85ae 3104 df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
bf1f8fbc 3105 width, offset, mode);
e011eba9 3106 return;
3107 }
3108 /* ... Fall through ... */
3109
3110 case REG:
ed6e85ae 3111 df_ref_record (cl, collection_rec,
158b6cc9 3112 x, loc, bb, insn_info,
3113 ref_type, flags,
bf1f8fbc 3114 width, offset, mode);
e011eba9 3115 return;
3116
30de5b55 3117 case SIGN_EXTRACT:
3118 case ZERO_EXTRACT:
3119 {
3120 /* If the parameters to the zero or sign extract are
3121 constants, strip them off and recurse, otherwise there is
3122 no information that we can gain from this operation. */
971ba038 3123 if (CONST_INT_P (XEXP (x, 1))
3124 && CONST_INT_P (XEXP (x, 2)))
30de5b55 3125 {
3126 width = INTVAL (XEXP (x, 1));
3127 offset = INTVAL (XEXP (x, 2));
bf1f8fbc 3128 mode = GET_MODE (x);
30de5b55 3129
3130 if (code == ZERO_EXTRACT)
3131 flags |= DF_REF_ZERO_EXTRACT;
3132 else
3133 flags |= DF_REF_SIGN_EXTRACT;
3134
ed6e85ae 3135 df_uses_record (DF_REF_EXTRACT, collection_rec,
158b6cc9 3136 &XEXP (x, 0), ref_type, bb, insn_info, flags,
bf1f8fbc 3137 width, offset, mode);
30de5b55 3138 return;
3139 }
3140 }
3141 break;
3142
e011eba9 3143 case SET:
3144 {
3145 rtx dst = SET_DEST (x);
3146 gcc_assert (!(flags & DF_REF_IN_NOTE));
ed6e85ae 3147 df_uses_record (cl, collection_rec,
158b6cc9 3148 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3149 width, offset, mode);
e011eba9 3150
3151 switch (GET_CODE (dst))
3152 {
3153 case SUBREG:
3154 if (df_read_modify_subreg_p (dst))
3155 {
ed6e85ae 3156 df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
158b6cc9 3157 DF_REF_REG_USE, bb, insn_info,
bf1f8fbc 3158 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
3159 width, offset, mode);
e011eba9 3160 break;
3161 }
3162 /* Fall through. */
3163 case REG:
3164 case PARALLEL:
3165 case SCRATCH:
3166 case PC:
3167 case CC0:
3168 break;
3169 case MEM:
ed6e85ae 3170 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
158b6cc9 3171 DF_REF_REG_MEM_STORE, bb, insn_info, flags,
bf1f8fbc 3172 width, offset, mode);
e011eba9 3173 break;
3174 case STRICT_LOW_PART:
3175 {
3176 rtx *temp = &XEXP (dst, 0);
3177 /* A strict_low_part uses the whole REG and not just the
3178 SUBREG. */
3179 dst = XEXP (dst, 0);
ed6e85ae 3180 df_uses_record (cl, collection_rec,
3072d30e 3181 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
158b6cc9 3182 DF_REF_REG_USE, bb, insn_info,
bf1f8fbc 3183 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3184 width, offset, mode);
e011eba9 3185 }
3186 break;
3187 case ZERO_EXTRACT:
30de5b55 3188 {
971ba038 3189 if (CONST_INT_P (XEXP (dst, 1))
3190 && CONST_INT_P (XEXP (dst, 2)))
30de5b55 3191 {
3192 width = INTVAL (XEXP (dst, 1));
3193 offset = INTVAL (XEXP (dst, 2));
bf1f8fbc 3194 mode = GET_MODE (dst);
ed6e85ae 3195 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
3196 DF_REF_REG_USE, bb, insn_info,
3197 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3198 width, offset, mode);
30de5b55 3199 }
3200 else
3201 {
ed6e85ae 3202 df_uses_record (cl, collection_rec, &XEXP (dst, 1),
158b6cc9 3203 DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3204 width, offset, mode);
ed6e85ae 3205 df_uses_record (cl, collection_rec, &XEXP (dst, 2),
158b6cc9 3206 DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3207 width, offset, mode);
ed6e85ae 3208 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3209 DF_REF_REG_USE, bb, insn_info,
3210 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3211 width, offset, mode);
30de5b55 3212 }
3213
30de5b55 3214 }
e011eba9 3215 break;
30de5b55 3216
e011eba9 3217 default:
3218 gcc_unreachable ();
3219 }
3220 return;
3221 }
3222
3223 case RETURN:
3224 break;
3225
3226 case ASM_OPERANDS:
3227 case UNSPEC_VOLATILE:
3228 case TRAP_IF:
3229 case ASM_INPUT:
3230 {
3231 /* Traditional and volatile asm instructions must be
3232 considered to use and clobber all hard registers, all
3233 pseudo-registers and all of memory. So must TRAP_IF and
3234 UNSPEC_VOLATILE operations.
3235
3236 Consider for instance a volatile asm that changes the fpu
3237 rounding mode. An insn should not be moved across this
3238 even if it only uses pseudo-regs because it might give an
3239 incorrectly rounded result.
3240
3241 However, flow.c's liveness computation did *not* do this,
3242 giving the reasoning as " ?!? Unfortunately, marking all
3243 hard registers as live causes massive problems for the
3244 register allocator and marking all pseudos as live creates
3245 mountains of uninitialized variable warnings."
3246
3247 In order to maintain the status quo with regard to liveness
3248 and uses, we do what flow.c did and just mark any regs we
3072d30e 3249 can find in ASM_OPERANDS as used. In global asm insns are
3250 scanned and regs_asm_clobbered is filled out.
e011eba9 3251
3252 For all ASM_OPERANDS, we must traverse the vector of input
3253 operands. We can not just fall through here since then we
3254 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3255 which do not indicate traditional asms unlike their normal
3256 usage. */
3257 if (code == ASM_OPERANDS)
3258 {
3259 int j;
3260
3261 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
ed6e85ae 3262 df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
158b6cc9 3263 DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3264 width, offset, mode);
e011eba9 3265 return;
3266 }
3267 break;
3268 }
3269
3270 case PRE_DEC:
3271 case POST_DEC:
3272 case PRE_INC:
3273 case POST_INC:
3274 case PRE_MODIFY:
3275 case POST_MODIFY:
3276 /* Catch the def of the register being modified. */
ed6e85ae 3277 df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
158b6cc9 3278 bb, insn_info,
7577c7f7 3279 DF_REF_REG_DEF,
bf1f8fbc 3280 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3281 width, offset, mode);
e011eba9 3282
3283 /* ... Fall through to handle uses ... */
3284
3285 default:
3286 break;
3287 }
3288
3289 /* Recursively scan the operands of this expression. */
3290 {
3291 const char *fmt = GET_RTX_FORMAT (code);
3292 int i;
3293
3294 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3295 {
3296 if (fmt[i] == 'e')
3297 {
3298 /* Tail recursive case: save a function call level. */
3299 if (i == 0)
3300 {
3301 loc = &XEXP (x, 0);
3302 goto retry;
3303 }
ed6e85ae 3304 df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
158b6cc9 3305 bb, insn_info, flags,
bf1f8fbc 3306 width, offset, mode);
e011eba9 3307 }
3308 else if (fmt[i] == 'E')
3309 {
3310 int j;
3311 for (j = 0; j < XVECLEN (x, i); j++)
ed6e85ae 3312 df_uses_record (cl, collection_rec,
30de5b55 3313 &XVECEXP (x, i, j), ref_type,
158b6cc9 3314 bb, insn_info, flags,
bf1f8fbc 3315 width, offset, mode);
e011eba9 3316 }
3317 }
3318 }
e011eba9 3319
3072d30e 3320 return;
e011eba9 3321}
3322
3323
3072d30e 3324/* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
e011eba9 3325
3072d30e 3326static void
3327df_get_conditional_uses (struct df_collection_rec *collection_rec)
e011eba9 3328{
f7f05a07 3329 unsigned int ix;
3330 df_ref ref;
3331
3332 for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
3072d30e 3333 {
3072d30e 3334 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3335 {
30de5b55 3336 int width = -1;
3337 int offset = -1;
bc620c5c 3338 enum machine_mode mode = VOIDmode;
ed6e85ae 3339 df_ref use;
30de5b55 3340
3341 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3342 {
bf1f8fbc 3343 width = DF_REF_EXTRACT_WIDTH (ref);
3344 offset = DF_REF_EXTRACT_OFFSET (ref);
3345 mode = DF_REF_EXTRACT_MODE (ref);
30de5b55 3346 }
3347
ed6e85ae 3348 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
30de5b55 3349 DF_REF_LOC (ref), DF_REF_BB (ref),
158b6cc9 3350 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
30de5b55 3351 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
bf1f8fbc 3352 width, offset, mode);
3072d30e 3353 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3354 }
3355 }
e011eba9 3356}
3357
3358
3072d30e 3359/* Get call's extra defs and uses. */
e011eba9 3360
3361static void
3072d30e 3362df_get_call_refs (struct df_collection_rec * collection_rec,
3363 basic_block bb,
158b6cc9 3364 struct df_insn_info *insn_info,
b9c74b4d 3365 int flags)
e011eba9 3366{
3072d30e 3367 rtx note;
3368 bitmap_iterator bi;
3369 unsigned int ui;
3370 bool is_sibling_call;
3371 unsigned int i;
f7f05a07 3372 df_ref def;
3072d30e 3373 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
e011eba9 3374
3072d30e 3375 /* Do not generate clobbers for registers that are the result of the
3376 call. This causes ordering problems in the chain building code
3377 depending on which def is seen first. */
f7f05a07 3378 for (i = 0; VEC_iterate (df_ref, collection_rec->def_vec, i, def); ++i)
3379 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
e011eba9 3380
3072d30e 3381 /* Record the registers used to pass arguments, and explicitly
3382 noted as clobbered. */
158b6cc9 3383 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3072d30e 3384 note = XEXP (note, 1))
3385 {
3386 if (GET_CODE (XEXP (note, 0)) == USE)
ed6e85ae 3387 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
b9c74b4d 3388 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3389 VOIDmode);
3072d30e 3390 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3391 {
5736c998 3392 if (REG_P (XEXP (XEXP (note, 0), 0)))
3393 {
3394 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3395 if (!bitmap_bit_p (defs_generated, regno))
3396 df_defs_record (collection_rec, XEXP (note, 0), bb,
158b6cc9 3397 insn_info, flags);
5736c998 3398 }
3399 else
ed6e85ae 3400 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
b9c74b4d 3401 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3402 VOIDmode);
3072d30e 3403 }
3404 }
e011eba9 3405
3072d30e 3406 /* The stack ptr is used (honorarily) by a CALL insn. */
ed6e85ae 3407 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
158b6cc9 3408 NULL, bb, insn_info, DF_REF_REG_USE,
3409 DF_REF_CALL_STACK_USAGE | flags,
b9c74b4d 3410 -1, -1, VOIDmode);
e011eba9 3411
3072d30e 3412 /* Calls may also reference any of the global registers,
3413 so they are recorded as used. */
3414 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3415 if (global_regs[i])
fbc08f2b 3416 {
ed6e85ae 3417 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
b9c74b4d 3418 NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
3419 VOIDmode);
ed6e85ae 3420 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
b9c74b4d 3421 NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
3422 VOIDmode);
fbc08f2b 3423 }
e011eba9 3424
158b6cc9 3425 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
b888113c 3426 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3072d30e 3427 {
fbc08f2b 3428 if (!global_regs[ui]
3429 && (!bitmap_bit_p (defs_generated, ui))
3072d30e 3430 && (!is_sibling_call
3431 || !bitmap_bit_p (df->exit_block_uses, ui)
3432 || refers_to_regno_p (ui, ui+1,
abe32cce 3433 crtl->return_rtx, NULL)))
ed6e85ae 3434 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
158b6cc9 3435 NULL, bb, insn_info, DF_REF_REG_DEF,
3436 DF_REF_MAY_CLOBBER | flags,
b9c74b4d 3437 -1, -1, VOIDmode);
3072d30e 3438 }
3439
3440 BITMAP_FREE (defs_generated);
3441 return;
3442}
e011eba9 3443
3072d30e 3444/* Collect all refs in the INSN. This function is free of any
3445 side-effect - it will create and return a lists of df_ref's in the
3446 COLLECTION_REC without putting those refs into existing ref chains
3447 and reg chains. */
e011eba9 3448
3072d30e 3449static void
3450df_insn_refs_collect (struct df_collection_rec* collection_rec,
158b6cc9 3451 basic_block bb, struct df_insn_info *insn_info)
3072d30e 3452{
3453 rtx note;
158b6cc9 3454 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
e011eba9 3455
3072d30e 3456 /* Clear out the collection record. */
f7f05a07 3457 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3458 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3459 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3460 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
e011eba9 3461
3072d30e 3462 /* Record register defs. */
158b6cc9 3463 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3072d30e 3464
158b6cc9 3465 /* Process REG_EQUIV/REG_EQUAL notes. */
3466 for (note = REG_NOTES (insn_info->insn); note;
3072d30e 3467 note = XEXP (note, 1))
3468 {
3469 switch (REG_NOTE_KIND (note))
3470 {
3471 case REG_EQUIV:
3472 case REG_EQUAL:
ed6e85ae 3473 df_uses_record (DF_REF_REGULAR, collection_rec,
3072d30e 3474 &XEXP (note, 0), DF_REF_REG_USE,
b9c74b4d 3475 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3072d30e 3476 break;
3477 case REG_NON_LOCAL_GOTO:
3478 /* The frame ptr is used by a non-local goto. */
ed6e85ae 3479 df_ref_record (DF_REF_BASE, collection_rec,
3072d30e 3480 regno_reg_rtx[FRAME_POINTER_REGNUM],
158b6cc9 3481 NULL, bb, insn_info,
b9c74b4d 3482 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3072d30e 3483#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
ed6e85ae 3484 df_ref_record (DF_REF_BASE, collection_rec,
3072d30e 3485 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
158b6cc9 3486 NULL, bb, insn_info,
b9c74b4d 3487 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3072d30e 3488#endif
3489 break;
3490 default:
3491 break;
3492 }
e011eba9 3493 }
3072d30e 3494
158b6cc9 3495 if (CALL_P (insn_info->insn))
3496 df_get_call_refs (collection_rec, bb, insn_info,
3072d30e 3497 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3498
3499 /* Record the register uses. */
ed6e85ae 3500 df_uses_record (DF_REF_REGULAR, collection_rec,
158b6cc9 3501 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
b9c74b4d 3502 -1, -1, VOIDmode);
3072d30e 3503
3504 /* DF_REF_CONDITIONAL needs corresponding USES. */
3505 if (is_cond_exec)
3506 df_get_conditional_uses (collection_rec);
3507
3508 df_canonize_collection_rec (collection_rec);
e011eba9 3509}
3510
3072d30e 3511/* Recompute the luids for the insns in BB. */
3512
3513void
3514df_recompute_luids (basic_block bb)
e011eba9 3515{
e011eba9 3516 rtx insn;
3517 int luid = 0;
3e6933a8 3518
3072d30e 3519 df_grow_insn_info ();
e011eba9 3520
3521 /* Scan the block an insn at a time from beginning to end. */
3522 FOR_BB_INSNS (bb, insn)
3523 {
158b6cc9 3524 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3072d30e 3525 /* Inserting labels does not always trigger the incremental
3526 rescanning. */
3527 if (!insn_info)
e011eba9 3528 {
3072d30e 3529 gcc_assert (!INSN_P (insn));
158b6cc9 3530 insn_info = df_insn_create_insn_record (insn);
e011eba9 3531 }
3072d30e 3532
158b6cc9 3533 DF_INSN_INFO_LUID (insn_info) = luid;
3072d30e 3534 if (INSN_P (insn))
3535 luid++;
3536 }
3537}
3538
3539
3540/* Returns true if the function entry needs to
3541 define the static chain register. */
3542
3543static bool
3544df_need_static_chain_reg (struct function *fun)
3545{
3546 tree fun_context = decl_function_context (fun->decl);
3547 return fun_context
3548 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3549}
3550
3551
3552/* Collect all artificial refs at the block level for BB and add them
3553 to COLLECTION_REC. */
3554
3555static void
3556df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3557{
f7f05a07 3558 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3559 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3560 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3561 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3072d30e 3562
3563 if (bb->index == ENTRY_BLOCK)
3564 {
3565 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3566 return;
3567 }
3568 else if (bb->index == EXIT_BLOCK)
3569 {
3570 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3571 return;
e011eba9 3572 }
3573
3574#ifdef EH_RETURN_DATA_REGNO
dea7b504 3575 if (bb_has_eh_pred (bb))
e011eba9 3576 {
3577 unsigned int i;
3578 /* Mark the registers that will contain data for the handler. */
fcf2ad9f 3579 for (i = 0; ; ++i)
3580 {
3581 unsigned regno = EH_RETURN_DATA_REGNO (i);
3582 if (regno == INVALID_REGNUM)
3583 break;
ed6e85ae 3584 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
b9c74b4d 3585 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
3586 VOIDmode);
fcf2ad9f 3587 }
e011eba9 3588 }
3589#endif
3590
3072d30e 3591 /* Add the hard_frame_pointer if this block is the target of a
3592 non-local goto. */
3593 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
ed6e85ae 3594 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
b9c74b4d 3595 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
3072d30e 3596
3597 /* Add the artificial uses. */
3598 if (bb->index >= NUM_FIXED_BLOCKS)
3e6933a8 3599 {
3600 bitmap_iterator bi;
3601 unsigned int regno;
dea7b504 3602 bitmap au = bb_has_eh_pred (bb)
3072d30e 3603 ? df->eh_block_artificial_uses
3604 : df->regular_block_artificial_uses;
3e6933a8 3605
3072d30e 3606 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3e6933a8 3607 {
ed6e85ae 3608 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
b9c74b4d 3609 bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3e6933a8 3610 }
e011eba9 3611 }
3072d30e 3612
3613 df_canonize_collection_rec (collection_rec);
e011eba9 3614}
3615
05268a5f 3616
3072d30e 3617/* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3618
3619void
3620df_bb_refs_record (int bb_index, bool scan_insns)
05268a5f 3621{
3072d30e 3622 basic_block bb = BASIC_BLOCK (bb_index);
3623 rtx insn;
3624 int luid = 0;
3625 struct df_scan_bb_info *bb_info;
3626 struct df_collection_rec collection_rec;
05268a5f 3627
3072d30e 3628 if (!df)
05268a5f 3629 return;
3630
3072d30e 3631 bb_info = df_scan_get_bb_info (bb_index);
05268a5f 3632
3072d30e 3633 /* Need to make sure that there is a record in the basic block info. */
3634 if (!bb_info)
3635 {
3636 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3637 df_scan_set_bb_info (bb_index, bb_info);
3638 bb_info->artificial_defs = NULL;
3639 bb_info->artificial_uses = NULL;
05268a5f 3640 }
3072d30e 3641
f7f05a07 3642 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3643 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3644 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3645 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3646
3072d30e 3647 if (scan_insns)
3648 /* Scan the block an insn at a time from beginning to end. */
3649 FOR_BB_INSNS (bb, insn)
3650 {
158b6cc9 3651 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3072d30e 3652 gcc_assert (!insn_info);
3653
158b6cc9 3654 insn_info = df_insn_create_insn_record (insn);
3072d30e 3655 if (INSN_P (insn))
3656 {
3657 /* Record refs within INSN. */
158b6cc9 3658 DF_INSN_INFO_LUID (insn_info) = luid++;
3659 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3072d30e 3660 df_refs_add_to_chains (&collection_rec, bb, insn);
3661 }
158b6cc9 3662 DF_INSN_INFO_LUID (insn_info) = luid;
3072d30e 3663 }
3664
3665 /* Other block level artificial refs */
3666 df_bb_refs_collect (&collection_rec, bb);
3667 df_refs_add_to_chains (&collection_rec, bb, NULL);
3668
f7f05a07 3669 VEC_free (df_ref, stack, collection_rec.def_vec);
3670 VEC_free (df_ref, stack, collection_rec.use_vec);
3671 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3672 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3673
3072d30e 3674 /* Now that the block has been processed, set the block as dirty so
84da8954 3675 LR and LIVE will get it processed. */
3072d30e 3676 df_set_bb_dirty (bb);
05268a5f 3677}
e011eba9 3678
3072d30e 3679
3680/* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3681 block. */
e011eba9 3682
3683static void
3072d30e 3684df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
e011eba9 3685{
f7039b33 3686#ifdef EH_USES
3687 unsigned int i;
3688#endif
3689
3072d30e 3690 bitmap_clear (regular_block_artificial_uses);
e011eba9 3691
3072d30e 3692 if (reload_completed)
e011eba9 3693 {
3072d30e 3694 if (frame_pointer_needed)
3695 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3696 }
3697 else
3698 /* Before reload, there are a few registers that must be forced
3699 live everywhere -- which might not already be the case for
3700 blocks within infinite loops. */
3701 {
3702 /* Any reference to any pseudo before reload is a potential
3703 reference of the frame pointer. */
3704 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3705
3706#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3707 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3708#endif
3709
3710#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3711 /* Pseudos with argument area equivalences may require
3712 reloading via the argument pointer. */
3713 if (fixed_regs[ARG_POINTER_REGNUM])
3714 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3715#endif
3716
3717 /* Any constant, or pseudo with constant equivalences, may
3718 require reloading from memory using the pic register. */
3719 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3720 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3721 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
e011eba9 3722 }
3072d30e 3723 /* The all-important stack pointer must always be live. */
3724 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
f7039b33 3725
3726#ifdef EH_USES
3727 /* EH_USES registers are used:
3728 1) at all insns that might throw (calls or with -fnon-call-exceptions
3729 trapping insns)
3730 2) in all EH edges
3731 3) to support backtraces and/or debugging, anywhere between their
3732 initialization and where they the saved registers are restored
3733 from them, including the cases where we don't reach the epilogue
3734 (noreturn call or infinite loop). */
3735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3736 if (EH_USES (i))
3737 bitmap_set_bit (regular_block_artificial_uses, i);
3738#endif
3072d30e 3739}
3740
e011eba9 3741
3072d30e 3742/* Get the artificial use set for an eh block. */
fcf2ad9f 3743
3072d30e 3744static void
3745df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3746{
3747 bitmap_clear (eh_block_artificial_uses);
05268a5f 3748
6dfdc153 3749 /* The following code (down thru the arg_pointer setting APPEARS
3072d30e 3750 to be necessary because there is nothing that actually
3751 describes what the exception handling code may actually need
3752 to keep alive. */
3753 if (reload_completed)
3754 {
3755 if (frame_pointer_needed)
3756 {
3757 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3758#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3759 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3760#endif
3761 }
3762#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3763 if (fixed_regs[ARG_POINTER_REGNUM])
3764 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3765#endif
3766 }
e011eba9 3767}
3768
3769
3072d30e 3770\f
e011eba9 3771/*----------------------------------------------------------------------------
3772 Specialized hard register scanning functions.
3773----------------------------------------------------------------------------*/
3774
3072d30e 3775
e011eba9 3776/* Mark a register in SET. Hard registers in large modes get all
3777 of their component registers set as well. */
3778
3779static void
3780df_mark_reg (rtx reg, void *vset)
3781{
3782 bitmap set = (bitmap) vset;
3783 int regno = REGNO (reg);
3784
3785 gcc_assert (GET_MODE (reg) != BLKmode);
3786
3787 bitmap_set_bit (set, regno);
3788 if (regno < FIRST_PSEUDO_REGISTER)
3789 {
3790 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3791 while (--n > 0)
3792 bitmap_set_bit (set, regno + n);
3793 }
3794}
3795
fcf2ad9f 3796
3072d30e 3797/* Set the bit for regs that are considered being defined at the entry. */
fcf2ad9f 3798
3799static void
3072d30e 3800df_get_entry_block_def_set (bitmap entry_block_defs)
fcf2ad9f 3801{
fcf2ad9f 3802 rtx r;
3072d30e 3803 int i;
fcf2ad9f 3804
3072d30e 3805 bitmap_clear (entry_block_defs);
fcf2ad9f 3806
3807 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3808 {
3809 if (FUNCTION_ARG_REGNO_P (i))
3810#ifdef INCOMING_REGNO
3072d30e 3811 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
fcf2ad9f 3812#else
3072d30e 3813 bitmap_set_bit (entry_block_defs, i);
fcf2ad9f 3814#endif
3815 }
3816
df4fadd5 3817 /* The always important stack pointer. */
3818 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3819
fcf2ad9f 3820 /* Once the prologue has been generated, all of these registers
3821 should just show up in the first regular block. */
3822 if (HAVE_prologue && epilogue_completed)
3823 {
3824 /* Defs for the callee saved registers are inserted so that the
3825 pushes have some defining location. */
3826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072d30e 3827 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3828 bitmap_set_bit (entry_block_defs, i);
fcf2ad9f 3829 }
3830 else
3831 {
fcf2ad9f 3832 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3833 only STATIC_CHAIN_REGNUM is defined. If they are different,
3834 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3835#ifdef STATIC_CHAIN_INCOMING_REGNUM
3072d30e 3836 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
fcf2ad9f 3837#else
3838#ifdef STATIC_CHAIN_REGNUM
3072d30e 3839 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
fcf2ad9f 3840#endif
3841#endif
fcf2ad9f 3842 }
3843
652ff291 3844 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3845 if (r && REG_P (r))
3846 bitmap_set_bit (entry_block_defs, REGNO (r));
3847
3e6933a8 3848 if ((!reload_completed) || frame_pointer_needed)
fcf2ad9f 3849 {
3850 /* Any reference to any pseudo before reload is a potential
3851 reference of the frame pointer. */
3072d30e 3852 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3e6933a8 3853#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3854 /* If they are different, also mark the hard frame pointer as live. */
3855 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3072d30e 3856 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3e6933a8 3857#endif
3858 }
fcf2ad9f 3859
3e6933a8 3860 /* These registers are live everywhere. */
3861 if (!reload_completed)
3862 {
fcf2ad9f 3863#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3864 /* Pseudos with argument area equivalences may require
3865 reloading via the argument pointer. */
3866 if (fixed_regs[ARG_POINTER_REGNUM])
3072d30e 3867 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
fcf2ad9f 3868#endif
3869
3870#ifdef PIC_OFFSET_TABLE_REGNUM
3871 /* Any constant, or pseudo with constant equivalences, may
3872 require reloading from memory using the pic register. */
3873 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3874 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3072d30e 3875 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3876#endif
3877 }
3878
3879#ifdef INCOMING_RETURN_ADDR_RTX
3880 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3881 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3882#endif
3883
3884 targetm.live_on_entry (entry_block_defs);
3885
3886 /* If the function has an incoming STATIC_CHAIN,
3887 it has to show up in the entry def set. */
3888 if (df_need_static_chain_reg (cfun))
3889 {
fdfd360b 3890#ifdef STATIC_CHAIN_INCOMING_REGNUM
3072d30e 3891 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
fdfd360b 3892#else
3893#ifdef STATIC_CHAIN_REGNUM
3894 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3895#endif
fcf2ad9f 3896#endif
3897 }
3072d30e 3898}
3899
3900
3901/* Return the (conservative) set of hard registers that are defined on
3902 entry to the function.
bef304b8 3903 It uses df->entry_block_defs to determine which register
3072d30e 3904 reference to include. */
3905
3906static void
3907df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3908 bitmap entry_block_defs)
3909{
3910 unsigned int i;
3911 bitmap_iterator bi;
3912
3913 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3914 {
ed6e85ae 3915 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
b9c74b4d 3916 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
3917 VOIDmode);
3072d30e 3918 }
3919
3920 df_canonize_collection_rec (collection_rec);
3921}
3922
3923
3924/* Record the (conservative) set of hard registers that are defined on
3925 entry to the function. */
3926
3927static void
3928df_record_entry_block_defs (bitmap entry_block_defs)
3929{
3930 struct df_collection_rec collection_rec;
3931 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
f7f05a07 3932 collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3072d30e 3933 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3934
3935 /* Process bb_refs chain */
3936 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
f7f05a07 3937 VEC_free (df_ref, stack, collection_rec.def_vec);
3072d30e 3938}
3939
3940
becfaa62 3941/* Update the defs in the entry block. */
3072d30e 3942
3943void
3944df_update_entry_block_defs (void)
3945{
3946 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3947 bool changed = false;
fcf2ad9f 3948
3072d30e 3949 df_get_entry_block_def_set (refs);
3950 if (df->entry_block_defs)
3951 {
3952 if (!bitmap_equal_p (df->entry_block_defs, refs))
3953 {
3954 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3955 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3956 df_ref_chain_delete (bb_info->artificial_defs);
3957 bb_info->artificial_defs = NULL;
3958 changed = true;
3959 }
3960 }
3961 else
3962 {
3963 struct df_scan_problem_data *problem_data
3964 = (struct df_scan_problem_data *) df_scan->problem_data;
3965 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3966 changed = true;
3967 }
fcf2ad9f 3968
3072d30e 3969 if (changed)
fcf2ad9f 3970 {
3072d30e 3971 df_record_entry_block_defs (refs);
3972 bitmap_copy (df->entry_block_defs, refs);
3973 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
fcf2ad9f 3974 }
3072d30e 3975 BITMAP_FREE (refs);
fcf2ad9f 3976}
3977
3978
3072d30e 3979/* Set the bit for regs that are considered being used at the exit. */
e011eba9 3980
3981static void
3072d30e 3982df_get_exit_block_use_set (bitmap exit_block_uses)
e011eba9 3983{
3984 unsigned int i;
e011eba9 3985
3072d30e 3986 bitmap_clear (exit_block_uses);
bff9eb26 3987
3988 /* Stack pointer is always live at the exit. */
3989 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
e011eba9 3990
3991 /* Mark the frame pointer if needed at the end of the function.
3992 If we end up eliminating it, it will be removed from the live
3993 list of each basic block by reload. */
3994
3e6933a8 3995 if ((!reload_completed) || frame_pointer_needed)
e011eba9 3996 {
3072d30e 3997 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
e011eba9 3998#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3999 /* If they are different, also mark the hard frame pointer as live. */
3e6933a8 4000 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3072d30e 4001 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
e011eba9 4002#endif
4003 }
4004
4005#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4006 /* Many architectures have a GP register even without flag_pic.
4007 Assume the pic register is not in use, or will be handled by
4008 other means, if it is not fixed. */
4009 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4010 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3072d30e 4011 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
e011eba9 4012#endif
4013
4014 /* Mark all global registers, and all registers used by the
4015 epilogue as being live at the end of the function since they
4016 may be referenced by our caller. */
4017 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4018 if (global_regs[i] || EPILOGUE_USES (i))
3072d30e 4019 bitmap_set_bit (exit_block_uses, i);
e011eba9 4020
4021 if (HAVE_epilogue && epilogue_completed)
4022 {
4023 /* Mark all call-saved registers that we actually used. */
4024 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072d30e 4025 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3e6933a8 4026 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3072d30e 4027 bitmap_set_bit (exit_block_uses, i);
e011eba9 4028 }
4029
4030#ifdef EH_RETURN_DATA_REGNO
4031 /* Mark the registers that will contain data for the handler. */
18d50ae6 4032 if (reload_completed && crtl->calls_eh_return)
e011eba9 4033 for (i = 0; ; ++i)
4034 {
4035 unsigned regno = EH_RETURN_DATA_REGNO (i);
4036 if (regno == INVALID_REGNUM)
4037 break;
3072d30e 4038 bitmap_set_bit (exit_block_uses, regno);
e011eba9 4039 }
4040#endif
4041
4042#ifdef EH_RETURN_STACKADJ_RTX
3e6933a8 4043 if ((!HAVE_epilogue || ! epilogue_completed)
18d50ae6 4044 && crtl->calls_eh_return)
e011eba9 4045 {
4046 rtx tmp = EH_RETURN_STACKADJ_RTX;
4047 if (tmp && REG_P (tmp))
3072d30e 4048 df_mark_reg (tmp, exit_block_uses);
e011eba9 4049 }
4050#endif
4051
4052#ifdef EH_RETURN_HANDLER_RTX
3e6933a8 4053 if ((!HAVE_epilogue || ! epilogue_completed)
18d50ae6 4054 && crtl->calls_eh_return)
e011eba9 4055 {
4056 rtx tmp = EH_RETURN_HANDLER_RTX;
4057 if (tmp && REG_P (tmp))
3072d30e 4058 df_mark_reg (tmp, exit_block_uses);
e011eba9 4059 }
4060#endif
3072d30e 4061
e011eba9 4062 /* Mark function return value. */
3072d30e 4063 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4064}
4065
4066
4067/* Return the refs of hard registers that are used in the exit block.
4068 It uses df->exit_block_uses to determine register to include. */
4069
4070static void
4071df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4072{
4073 unsigned int i;
4074 bitmap_iterator bi;
4075
4076 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
ed6e85ae 4077 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
b9c74b4d 4078 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
e011eba9 4079
3072d30e 4080#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4081 /* It is deliberate that this is not put in the exit block uses but
4082 I do not know why. */
4083 if (reload_completed
4084 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
dea7b504 4085 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3072d30e 4086 && fixed_regs[ARG_POINTER_REGNUM])
ed6e85ae 4087 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
b9c74b4d 4088 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3072d30e 4089#endif
4090
4091 df_canonize_collection_rec (collection_rec);
4092}
4093
4094
4095/* Record the set of hard registers that are used in the exit block.
4096 It uses df->exit_block_uses to determine which bit to include. */
4097
4098static void
4099df_record_exit_block_uses (bitmap exit_block_uses)
4100{
4101 struct df_collection_rec collection_rec;
4102 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
f7f05a07 4103 collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3072d30e 4104
4105 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4106
4107 /* Process bb_refs chain */
4108 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
f7f05a07 4109 VEC_free (df_ref, stack, collection_rec.use_vec);
3072d30e 4110}
4111
4112
4113/* Update the uses in the exit block. */
4114
4115void
4116df_update_exit_block_uses (void)
4117{
4118 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
4119 bool changed = false;
4120
4121 df_get_exit_block_use_set (refs);
4122 if (df->exit_block_uses)
4123 {
4124 if (!bitmap_equal_p (df->exit_block_uses, refs))
4125 {
4126 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4127 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4128 df_ref_chain_delete (bb_info->artificial_uses);
4129 bb_info->artificial_uses = NULL;
4130 changed = true;
4131 }
4132 }
4133 else
4134 {
4135 struct df_scan_problem_data *problem_data
4136 = (struct df_scan_problem_data *) df_scan->problem_data;
4137 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4138 changed = true;
4139 }
4140
4141 if (changed)
4142 {
4143 df_record_exit_block_uses (refs);
4144 bitmap_copy (df->exit_block_uses, refs);
4145 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4146 }
4147 BITMAP_FREE (refs);
e011eba9 4148}
4149
4150static bool initialized = false;
4151
3072d30e 4152
e011eba9 4153/* Initialize some platform specific structures. */
4154
4155void
4156df_hard_reg_init (void)
4157{
bebf8106 4158#ifdef ELIMINABLE_REGS
f9a42947 4159 int i;
e011eba9 4160 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4161#endif
e011eba9 4162 if (initialized)
4163 return;
4164
e011eba9 4165 /* Record which registers will be eliminated. We use this in
4166 mark_used_regs. */
4167 CLEAR_HARD_REG_SET (elim_reg_set);
4168
4169#ifdef ELIMINABLE_REGS
4170 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4171 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4172#else
4173 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4174#endif
4175
e011eba9 4176 initialized = true;
4177}
3072d30e 4178
4179
4180/* Recompute the parts of scanning that are based on regs_ever_live
4181 because something changed in that array. */
4182
4183void
4184df_update_entry_exit_and_calls (void)
4185{
4186 basic_block bb;
4187
4188 df_update_entry_block_defs ();
4189 df_update_exit_block_uses ();
4190
4191 /* The call insns need to be rescanned because there may be changes
4192 in the set of registers clobbered across the call. */
4193 FOR_EACH_BB (bb)
4194 {
4195 rtx insn;
4196 FOR_BB_INSNS (bb, insn)
4197 {
4198 if (INSN_P (insn) && CALL_P (insn))
4199 df_insn_rescan (insn);
4200 }
4201 }
4202}
4203
4204
4205/* Return true if hard REG is actually used in the some instruction.
4206 There are a fair number of conditions that affect the setting of
4207 this array. See the comment in df.h for df->hard_regs_live_count
4208 for the conditions that this array is set. */
4209
4210bool
4211df_hard_reg_used_p (unsigned int reg)
4212{
4213 gcc_assert (df);
4214 return df->hard_regs_live_count[reg] != 0;
4215}
4216
4217
4218/* A count of the number of times REG is actually used in the some
4219 instruction. There are a fair number of conditions that affect the
4220 setting of this array. See the comment in df.h for
4221 df->hard_regs_live_count for the conditions that this array is
4222 set. */
4223
4224
4225unsigned int
4226df_hard_reg_used_count (unsigned int reg)
4227{
4228 gcc_assert (df);
4229 return df->hard_regs_live_count[reg];
4230}
4231
4232
4233/* Get the value of regs_ever_live[REGNO]. */
4234
4235bool
4236df_regs_ever_live_p (unsigned int regno)
4237{
4238 return regs_ever_live[regno];
4239}
4240
4241
4242/* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4243 to change, schedule that change for the next update. */
4244
4245void
4246df_set_regs_ever_live (unsigned int regno, bool value)
4247{
4248 if (regs_ever_live[regno] == value)
4249 return;
4250
4251 regs_ever_live[regno] = value;
4252 if (df)
4253 df->redo_entry_and_exit = true;
4254}
4255
4256
4257/* Compute "regs_ever_live" information from the underlying df
4258 information. Set the vector to all false if RESET. */
4259
4260void
4261df_compute_regs_ever_live (bool reset)
4262{
4263 unsigned int i;
4264 bool changed = df->redo_entry_and_exit;
4265
4266 if (reset)
4267 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4268
4269 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4270 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4271 {
4272 regs_ever_live[i] = true;
4273 changed = true;
4274 }
4275 if (changed)
4276 df_update_entry_exit_and_calls ();
4277 df->redo_entry_and_exit = false;
4278}
4279
4280\f
4281/*----------------------------------------------------------------------------
4282 Dataflow ref information verification functions.
4283
4284 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4285 df_reg_chain_verify_unmarked (refs)
f7f05a07 4286 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
3072d30e 4287 df_mws_verify (mw*, mw*, bool)
4288 df_insn_refs_verify (collection_rec, bb, insn, bool)
4289 df_bb_refs_verify (bb, refs, bool)
4290 df_bb_verify (bb)
4291 df_exit_block_bitmap_verify (bool)
4292 df_entry_block_bitmap_verify (bool)
4293 df_scan_verify ()
4294----------------------------------------------------------------------------*/
4295
4296
4297/* Mark all refs in the reg chain. Verify that all of the registers
4298are in the correct chain. */
4299
4300static unsigned int
ed6e85ae 4301df_reg_chain_mark (df_ref refs, unsigned int regno,
3072d30e 4302 bool is_def, bool is_eq_use)
4303{
4304 unsigned int count = 0;
ed6e85ae 4305 df_ref ref;
3072d30e 4306 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4307 {
4308 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4309
4310 /* If there are no def-use or use-def chains, make sure that all
4311 of the chains are clear. */
4312 if (!df_chain)
4313 gcc_assert (!DF_REF_CHAIN (ref));
4314
4315 /* Check to make sure the ref is in the correct chain. */
4316 gcc_assert (DF_REF_REGNO (ref) == regno);
4317 if (is_def)
ed6e85ae 4318 gcc_assert (DF_REF_REG_DEF_P (ref));
3072d30e 4319 else
ed6e85ae 4320 gcc_assert (!DF_REF_REG_DEF_P (ref));
3072d30e 4321
4322 if (is_eq_use)
4323 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4324 else
4325 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4326
ed6e85ae 4327 if (DF_REF_NEXT_REG (ref))
4328 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3072d30e 4329 count++;
4330 DF_REF_REG_MARK (ref);
4331 }
4332 return count;
4333}
4334
4335
4336/* Verify that all of the registers in the chain are unmarked. */
4337
4338static void
ed6e85ae 4339df_reg_chain_verify_unmarked (df_ref refs)
3072d30e 4340{
ed6e85ae 4341 df_ref ref;
3072d30e 4342 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4343 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4344}
4345
4346
4347/* Verify that NEW_REC and OLD_REC have exactly the same members. */
4348
4349static bool
f7f05a07 4350df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
3072d30e 4351 bool abort_if_fail)
4352{
f7f05a07 4353 unsigned int ix;
4354 df_ref new_ref;
4355
4356 for (ix = 0; VEC_iterate (df_ref, new_rec, ix, new_ref); ++ix)
3072d30e 4357 {
f7f05a07 4358 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
3072d30e 4359 {
4360 if (abort_if_fail)
4361 gcc_assert (0);
4362 else
4363 return false;
4364 }
4365
4366 /* Abort if fail is called from the function level verifier. If
4367 that is the context, mark this reg as being seem. */
4368 if (abort_if_fail)
4369 {
4370 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4371 DF_REF_REG_UNMARK (*old_rec);
4372 }
4373
3072d30e 4374 old_rec++;
4375 }
4376
4377 if (abort_if_fail)
f7f05a07 4378 gcc_assert (*old_rec == NULL);
3072d30e 4379 else
f7f05a07 4380 return *old_rec == NULL;
3072d30e 4381 return false;
4382}
4383
4384
4385/* Verify that NEW_REC and OLD_REC have exactly the same members. */
4386
4387static bool
f7f05a07 4388df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4389 struct df_mw_hardreg **old_rec,
3072d30e 4390 bool abort_if_fail)
4391{
f7f05a07 4392 unsigned int ix;
4393 struct df_mw_hardreg *new_reg;
4394
4395 for (ix = 0; VEC_iterate (df_mw_hardreg_ptr, new_rec, ix, new_reg); ++ix)
3072d30e 4396 {
f7f05a07 4397 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
3072d30e 4398 {
4399 if (abort_if_fail)
4400 gcc_assert (0);
4401 else
4402 return false;
4403 }
3072d30e 4404 old_rec++;
4405 }
4406
4407 if (abort_if_fail)
f7f05a07 4408 gcc_assert (*old_rec == NULL);
3072d30e 4409 else
f7f05a07 4410 return *old_rec == NULL;
3072d30e 4411 return false;
4412}
4413
4414
4415/* Return true if the existing insn refs information is complete and
4416 correct. Otherwise (i.e. if there's any missing or extra refs),
4417 return the correct df_ref chain in REFS_RETURN.
4418
4419 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4420 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4421 verification mode instead of the whole function, so unmark
4422 everything.
4423
4424 If ABORT_IF_FAIL is set, this function never returns false. */
4425
4426static bool
4427df_insn_refs_verify (struct df_collection_rec *collection_rec,
4428 basic_block bb,
4429 rtx insn,
4430 bool abort_if_fail)
4431{
4432 bool ret1, ret2, ret3, ret4;
4433 unsigned int uid = INSN_UID (insn);
158b6cc9 4434 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3072d30e 4435
158b6cc9 4436 df_insn_refs_collect (collection_rec, bb, insn_info);
3072d30e 4437
4438 if (!DF_INSN_UID_DEFS (uid))
4439 {
4440 /* The insn_rec was created but it was never filled out. */
4441 if (abort_if_fail)
4442 gcc_assert (0);
4443 else
4444 return false;
4445 }
4446
4447 /* Unfortunately we cannot opt out early if one of these is not
4448 right because the marks will not get cleared. */
4449 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4450 abort_if_fail);
4451 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4452 abort_if_fail);
4453 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4454 abort_if_fail);
4455 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4456 abort_if_fail);
4457 return (ret1 && ret2 && ret3 && ret4);
4458}
4459
4460
4461/* Return true if all refs in the basic block are correct and complete.
4462 Due to df_ref_chain_verify, it will cause all refs
4463 that are verified to have DF_REF_MARK bit set. */
4464
4465static bool
4466df_bb_verify (basic_block bb)
4467{
4468 rtx insn;
4469 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4470 struct df_collection_rec collection_rec;
4471
4472 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
f7f05a07 4473 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4474 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4475 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4476 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3072d30e 4477
4478 gcc_assert (bb_info);
4479
ed6e85ae 4480 /* Scan the block, one insn at a time, from beginning to end. */
3072d30e 4481 FOR_BB_INSNS_REVERSE (bb, insn)
4482 {
4483 if (!INSN_P (insn))
4484 continue;
4485 df_insn_refs_verify (&collection_rec, bb, insn, true);
4486 df_free_collection_rec (&collection_rec);
4487 }
4488
4489 /* Do the artificial defs and uses. */
4490 df_bb_refs_collect (&collection_rec, bb);
4491 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4492 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4493 df_free_collection_rec (&collection_rec);
4494
4495 return true;
4496}
4497
4498
4499/* Returns true if the entry block has correct and complete df_ref set.
4500 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4501
4502static bool
4503df_entry_block_bitmap_verify (bool abort_if_fail)
4504{
4505 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4506 bool is_eq;
4507
4508 df_get_entry_block_def_set (entry_block_defs);
4509
4510 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4511
4512 if (!is_eq && abort_if_fail)
4513 {
4514 print_current_pass (stderr);
4515 fprintf (stderr, "entry_block_defs = ");
4516 df_print_regset (stderr, entry_block_defs);
4517 fprintf (stderr, "df->entry_block_defs = ");
4518 df_print_regset (stderr, df->entry_block_defs);
4519 gcc_assert (0);
4520 }
4521
4522 BITMAP_FREE (entry_block_defs);
4523
4524 return is_eq;
4525}
4526
4527
4528/* Returns true if the exit block has correct and complete df_ref set.
4529 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4530
4531static bool
4532df_exit_block_bitmap_verify (bool abort_if_fail)
4533{
4534 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4535 bool is_eq;
4536
4537 df_get_exit_block_use_set (exit_block_uses);
4538
4539 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4540
4541 if (!is_eq && abort_if_fail)
4542 {
4543 print_current_pass (stderr);
4544 fprintf (stderr, "exit_block_uses = ");
4545 df_print_regset (stderr, exit_block_uses);
4546 fprintf (stderr, "df->exit_block_uses = ");
4547 df_print_regset (stderr, df->exit_block_uses);
4548 gcc_assert (0);
4549 }
4550
4551 BITMAP_FREE (exit_block_uses);
4552
4553 return is_eq;
4554}
4555
4556
bf1f8fbc 4557/* Return true if df_ref information for all insns in all blocks are
4558 correct and complete. */
3072d30e 4559
4560void
4561df_scan_verify (void)
4562{
4563 unsigned int i;
4564 basic_block bb;
4565 bitmap regular_block_artificial_uses;
4566 bitmap eh_block_artificial_uses;
4567
4568 if (!df)
4569 return;
4570
3072d30e 4571 /* Verification is a 4 step process. */
4572
4573 /* (1) All of the refs are marked by going thru the reg chains. */
4574 for (i = 0; i < DF_REG_SIZE (df); i++)
4575 {
4576 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4577 == DF_REG_DEF_COUNT(i));
4578 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4579 == DF_REG_USE_COUNT(i));
4580 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4581 == DF_REG_EQ_USE_COUNT(i));
4582 }
4583
4584 /* (2) There are various bitmaps whose value may change over the
4585 course of the compilation. This step recomputes them to make
4586 sure that they have not slipped out of date. */
4587 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4588 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4589
4590 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4591 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4592
4593 bitmap_ior_into (eh_block_artificial_uses,
4594 regular_block_artificial_uses);
4595
4596 /* Check artificial_uses bitmaps didn't change. */
4597 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4598 df->regular_block_artificial_uses));
4599 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4600 df->eh_block_artificial_uses));
4601
4602 BITMAP_FREE (regular_block_artificial_uses);
4603 BITMAP_FREE (eh_block_artificial_uses);
4604
4605 /* Verify entry block and exit block. These only verify the bitmaps,
4606 the refs are verified in df_bb_verify. */
4607 df_entry_block_bitmap_verify (true);
4608 df_exit_block_bitmap_verify (true);
4609
4610 /* (3) All of the insns in all of the blocks are traversed and the
4611 marks are cleared both in the artificial refs attached to the
4612 blocks and the real refs inside the insns. It is a failure to
4613 clear a mark that has not been set as this means that the ref in
4614 the block or insn was not in the reg chain. */
4615
4616 FOR_ALL_BB (bb)
4617 df_bb_verify (bb);
4618
4619 /* (4) See if all reg chains are traversed a second time. This time
4620 a check is made that the marks are clear. A set mark would be a
4621 from a reg that is not in any insn or basic block. */
4622
4623 for (i = 0; i < DF_REG_SIZE (df); i++)
4624 {
4625 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4626 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4627 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4628 }
4629}