]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/df-scan.c
* gcc.c (compare_files): Cast munmap argumento to caddr_t.
[thirdparty/gcc.git] / gcc / df-scan.c
CommitLineData
e011eba9 1/* Scanning of rtl for dataflow analysis.
a2bdd643 2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
0ac758f7 3 2008, 2009 Free Software Foundation, Inc.
e011eba9 4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
8c4c00c1 13Software Foundation; either version 3, or (at your option) any later
e011eba9 14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
8c4c00c1 22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
e011eba9 24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "rtl.h"
30#include "tm_p.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "function.h"
34#include "regs.h"
35#include "output.h"
36#include "alloc-pool.h"
37#include "flags.h"
38#include "hard-reg-set.h"
39#include "basic-block.h"
40#include "sbitmap.h"
41#include "bitmap.h"
42#include "timevar.h"
fcf2ad9f 43#include "tree.h"
44#include "target.h"
45#include "target-def.h"
e011eba9 46#include "df.h"
3072d30e 47#include "tree-pass.h"
e011eba9 48
49#ifndef HAVE_epilogue
50#define HAVE_epilogue 0
51#endif
52#ifndef HAVE_prologue
53#define HAVE_prologue 0
54#endif
55#ifndef HAVE_sibcall_epilogue
56#define HAVE_sibcall_epilogue 0
57#endif
58
59#ifndef EPILOGUE_USES
60#define EPILOGUE_USES(REGNO) 0
61#endif
62
ce299759 63/* The following two macros free the vecs that hold either the refs or
64 the mw refs. They are a little tricky because the vec has 0
65 elements is special and is not to be freed. */
66#define df_scan_free_ref_vec(V) \
67 do { \
68 if (V && *V) \
69 free (V); \
70 } while (0)
71
72#define df_scan_free_mws_vec(V) \
73 do { \
74 if (V && *V) \
75 free (V); \
76 } while (0)
77
e011eba9 78/* The set of hard registers in eliminables[i].from. */
79
80static HARD_REG_SET elim_reg_set;
81
e011eba9 82/* Initialize ur_in and ur_out as if all hard registers were partially
83 available. */
84
3072d30e 85struct df_collection_rec
86{
ed6e85ae 87 df_ref * def_vec;
ed6e85ae 88 df_ref * use_vec;
0ac758f7 89 unsigned int next_def;
3072d30e 90 unsigned int next_use;
ed6e85ae 91 df_ref * eq_use_vec;
3072d30e 92 struct df_mw_hardreg **mw_vec;
0ac758f7 93 unsigned int next_eq_use;
3072d30e 94 unsigned int next_mw;
95};
96
ed6e85ae 97static df_ref df_null_ref_rec[1];
3072d30e 98static struct df_mw_hardreg * df_null_mw_rec[1];
99
ed6e85ae 100static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
3072d30e 101 rtx, rtx *,
158b6cc9 102 basic_block, struct df_insn_info *,
b9c74b4d 103 enum df_ref_type, int ref_flags,
158b6cc9 104 int, int, enum machine_mode);
105static void df_def_record_1 (struct df_collection_rec *, rtx,
106 basic_block, struct df_insn_info *,
b9c74b4d 107 int ref_flags);
158b6cc9 108static void df_defs_record (struct df_collection_rec *, rtx,
109 basic_block, struct df_insn_info *,
b9c74b4d 110 int ref_flags);
ed6e85ae 111static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
3072d30e 112 rtx *, enum df_ref_type,
158b6cc9 113 basic_block, struct df_insn_info *,
b9c74b4d 114 int ref_flags,
bf1f8fbc 115 int, int, enum machine_mode);
e011eba9 116
ed6e85ae 117static df_ref df_ref_create_structure (enum df_ref_class,
118 struct df_collection_rec *, rtx, rtx *,
119 basic_block, struct df_insn_info *,
b9c74b4d 120 enum df_ref_type, int ref_flags,
ed6e85ae 121 int, int, enum machine_mode);
3072d30e 122
123static void df_insn_refs_collect (struct df_collection_rec*,
158b6cc9 124 basic_block, struct df_insn_info *);
3072d30e 125static void df_canonize_collection_rec (struct df_collection_rec *);
126
127static void df_get_regular_block_artificial_uses (bitmap);
128static void df_get_eh_block_artificial_uses (bitmap);
129
130static void df_record_entry_block_defs (bitmap);
131static void df_record_exit_block_uses (bitmap);
132static void df_get_exit_block_use_set (bitmap);
133static void df_get_entry_block_def_set (bitmap);
e011eba9 134static void df_grow_ref_info (struct df_ref_info *, unsigned int);
ed6e85ae 135static void df_ref_chain_delete_du_chain (df_ref *);
136static void df_ref_chain_delete (df_ref *);
e011eba9 137
3072d30e 138static void df_refs_add_to_chains (struct df_collection_rec *,
139 basic_block, rtx);
140
141static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
142static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
143static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
ed6e85ae 144static void df_install_ref (df_ref, struct df_reg_info *,
3072d30e 145 struct df_ref_info *, bool);
146
147static int df_ref_compare (const void *, const void *);
148static int df_mw_compare (const void *, const void *);
149
150/* Indexed by hardware reg number, is true if that register is ever
151 used in the current function.
152
153 In df-scan.c, this is set up to record the hard regs used
154 explicitly. Reload adds in the hard regs used for holding pseudo
155 regs. Final uses it to generate the code in the function prologue
156 and epilogue to save and restore registers as needed. */
157
158static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
e011eba9 159\f
160/*----------------------------------------------------------------------------
161 SCANNING DATAFLOW PROBLEM
162
163 There are several ways in which scanning looks just like the other
164 dataflow problems. It shares the all the mechanisms for local info
165 as well as basic block info. Where it differs is when and how often
166 it gets run. It also has no need for the iterative solver.
167----------------------------------------------------------------------------*/
168
169/* Problem data for the scanning dataflow function. */
170struct df_scan_problem_data
171{
ed6e85ae 172 alloc_pool ref_base_pool;
173 alloc_pool ref_artificial_pool;
174 alloc_pool ref_regular_pool;
30de5b55 175 alloc_pool ref_extract_pool;
e011eba9 176 alloc_pool insn_pool;
177 alloc_pool reg_pool;
3e6933a8 178 alloc_pool mw_reg_pool;
3072d30e 179 bitmap_obstack reg_bitmaps;
180 bitmap_obstack insn_bitmaps;
e011eba9 181};
182
183typedef struct df_scan_bb_info *df_scan_bb_info_t;
184
ce299759 185
186/* Internal function to shut down the scanning problem. */
e011eba9 187static void
3072d30e 188df_scan_free_internal (void)
e011eba9 189{
3e6933a8 190 struct df_scan_problem_data *problem_data
3072d30e 191 = (struct df_scan_problem_data *) df_scan->problem_data;
ce299759 192 unsigned int i;
193 basic_block bb;
194
195 /* The vectors that hold the refs are not pool allocated because
196 they come in many sizes. This makes them impossible to delete
197 all at once. */
198 for (i = 0; i < DF_INSN_SIZE(); i++)
199 {
200 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
201 /* Skip the insns that have no insn_info or have been
202 deleted. */
203 if (insn_info)
204 {
205 df_scan_free_ref_vec (insn_info->defs);
206 df_scan_free_ref_vec (insn_info->uses);
207 df_scan_free_ref_vec (insn_info->eq_uses);
208 df_scan_free_mws_vec (insn_info->mw_hardregs);
209 }
210 }
211
212 FOR_ALL_BB (bb)
213 {
214 unsigned int bb_index = bb->index;
215 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
216 if (bb_info)
217 {
218 df_scan_free_ref_vec (bb_info->artificial_defs);
219 df_scan_free_ref_vec (bb_info->artificial_uses);
220 }
221 }
e011eba9 222
e011eba9 223 free (df->def_info.refs);
3072d30e 224 free (df->def_info.begin);
225 free (df->def_info.count);
e011eba9 226 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
227
e011eba9 228 free (df->use_info.refs);
3072d30e 229 free (df->use_info.begin);
230 free (df->use_info.count);
e011eba9 231 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
232
3072d30e 233 free (df->def_regs);
234 df->def_regs = NULL;
235 free (df->use_regs);
236 df->use_regs = NULL;
237 free (df->eq_use_regs);
238 df->eq_use_regs = NULL;
239 df->regs_size = 0;
240 DF_REG_SIZE(df) = 0;
241
e011eba9 242 free (df->insns);
243 df->insns = NULL;
3072d30e 244 DF_INSN_SIZE () = 0;
e011eba9 245
3072d30e 246 free (df_scan->block_info);
247 df_scan->block_info = NULL;
248 df_scan->block_info_size = 0;
e011eba9 249
250 BITMAP_FREE (df->hardware_regs_used);
3072d30e 251 BITMAP_FREE (df->regular_block_artificial_uses);
252 BITMAP_FREE (df->eh_block_artificial_uses);
fcf2ad9f 253 BITMAP_FREE (df->entry_block_defs);
e011eba9 254 BITMAP_FREE (df->exit_block_uses);
3072d30e 255 BITMAP_FREE (df->insns_to_delete);
256 BITMAP_FREE (df->insns_to_rescan);
257 BITMAP_FREE (df->insns_to_notes_rescan);
e011eba9 258
3072d30e 259 free_alloc_pool (df_scan->block_pool);
ed6e85ae 260 free_alloc_pool (problem_data->ref_base_pool);
261 free_alloc_pool (problem_data->ref_artificial_pool);
262 free_alloc_pool (problem_data->ref_regular_pool);
30de5b55 263 free_alloc_pool (problem_data->ref_extract_pool);
e011eba9 264 free_alloc_pool (problem_data->insn_pool);
265 free_alloc_pool (problem_data->reg_pool);
3e6933a8 266 free_alloc_pool (problem_data->mw_reg_pool);
3072d30e 267 bitmap_obstack_release (&problem_data->reg_bitmaps);
268 bitmap_obstack_release (&problem_data->insn_bitmaps);
269 free (df_scan->problem_data);
e011eba9 270}
271
272
273/* Set basic block info. */
274
275static void
3072d30e 276df_scan_set_bb_info (unsigned int index,
e011eba9 277 struct df_scan_bb_info *bb_info)
278{
3072d30e 279 gcc_assert (df_scan);
280 df_grow_bb_info (df_scan);
281 df_scan->block_info[index] = (void *) bb_info;
e011eba9 282}
283
284
285/* Free basic block info. */
286
287static void
3072d30e 288df_scan_free_bb_info (basic_block bb, void *vbb_info)
e011eba9 289{
290 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
3072d30e 291 unsigned int bb_index = bb->index;
e011eba9 292 if (bb_info)
d0802b39 293 {
3072d30e 294 rtx insn;
295 FOR_BB_INSNS (bb, insn)
296 {
297 if (INSN_P (insn))
298 /* Record defs within INSN. */
299 df_insn_delete (bb, INSN_UID (insn));
300 }
301
302 if (bb_index < df_scan->block_info_size)
303 bb_info = df_scan_get_bb_info (bb_index);
304
305 /* Get rid of any artificial uses or defs. */
306 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
307 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
308 df_ref_chain_delete (bb_info->artificial_defs);
309 df_ref_chain_delete (bb_info->artificial_uses);
310 bb_info->artificial_defs = NULL;
311 bb_info->artificial_uses = NULL;
312 pool_free (df_scan->block_pool, bb_info);
d0802b39 313 }
e011eba9 314}
315
316
317/* Allocate the problem data for the scanning problem. This should be
318 called when the problem is created or when the entire function is to
319 be rescanned. */
3072d30e 320void
321df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
e011eba9 322{
e011eba9 323 struct df_scan_problem_data *problem_data;
324 unsigned int insn_num = get_max_uid () + 1;
3072d30e 325 unsigned int block_size = 400;
326 basic_block bb;
e011eba9 327
328 /* Given the number of pools, this is really faster than tearing
329 everything apart. */
3072d30e 330 if (df_scan->problem_data)
331 df_scan_free_internal ();
e011eba9 332
3072d30e 333 df_scan->block_pool
e011eba9 334 = create_alloc_pool ("df_scan_block pool",
335 sizeof (struct df_scan_bb_info),
336 block_size);
337
4c36ffe6 338 problem_data = XNEW (struct df_scan_problem_data);
3072d30e 339 df_scan->problem_data = problem_data;
340 df_scan->computed = true;
e011eba9 341
ed6e85ae 342 problem_data->ref_base_pool
343 = create_alloc_pool ("df_scan ref base",
344 sizeof (struct df_base_ref), block_size);
345 problem_data->ref_artificial_pool
346 = create_alloc_pool ("df_scan ref artificial",
347 sizeof (struct df_artificial_ref), block_size);
348 problem_data->ref_regular_pool
349 = create_alloc_pool ("df_scan ref regular",
350 sizeof (struct df_regular_ref), block_size);
30de5b55 351 problem_data->ref_extract_pool
ed6e85ae 352 = create_alloc_pool ("df_scan ref extract",
353 sizeof (struct df_extract_ref), block_size);
e011eba9 354 problem_data->insn_pool
ed6e85ae 355 = create_alloc_pool ("df_scan insn",
e011eba9 356 sizeof (struct df_insn_info), block_size);
e011eba9 357 problem_data->reg_pool
ed6e85ae 358 = create_alloc_pool ("df_scan reg",
e011eba9 359 sizeof (struct df_reg_info), block_size);
3e6933a8 360 problem_data->mw_reg_pool
ed6e85ae 361 = create_alloc_pool ("df_scan mw_reg",
3e6933a8 362 sizeof (struct df_mw_hardreg), block_size);
e011eba9 363
3072d30e 364 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
365 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
e011eba9 366
3072d30e 367 insn_num += insn_num / 4;
368 df_grow_reg_info ();
e011eba9 369
3072d30e 370 df_grow_insn_info ();
371 df_grow_bb_info (df_scan);
e011eba9 372
3072d30e 373 FOR_ALL_BB (bb)
e011eba9 374 {
3072d30e 375 unsigned int bb_index = bb->index;
376 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
e011eba9 377 if (!bb_info)
378 {
3072d30e 379 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
380 df_scan_set_bb_info (bb_index, bb_info);
e011eba9 381 }
382 bb_info->artificial_defs = NULL;
383 bb_info->artificial_uses = NULL;
384 }
385
3072d30e 386 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
387 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
388 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
389 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
390 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
391 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
392 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
393 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
deb2741b 394 df_scan->optional_p = false;
e011eba9 395}
396
397
398/* Free all of the data associated with the scan problem. */
399
400static void
3072d30e 401df_scan_free (void)
e011eba9 402{
3072d30e 403 if (df_scan->problem_data)
404 df_scan_free_internal ();
d0802b39 405
e011eba9 406 if (df->blocks_to_analyze)
3072d30e 407 {
408 BITMAP_FREE (df->blocks_to_analyze);
409 df->blocks_to_analyze = NULL;
410 }
e011eba9 411
3072d30e 412 free (df_scan);
e011eba9 413}
414
3072d30e 415/* Dump the preamble for DF_SCAN dump. */
e011eba9 416static void
3072d30e 417df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
e011eba9 418{
e011eba9 419 int i;
ed6e85ae 420 int dcount = 0;
421 int ucount = 0;
422 int ecount = 0;
423 int icount = 0;
424 int ccount = 0;
425 basic_block bb;
426 rtx insn;
e011eba9 427
3072d30e 428 fprintf (file, ";; invalidated by call \t");
b888113c 429 df_print_regset (file, regs_invalidated_by_call_regset);
3072d30e 430 fprintf (file, ";; hardware regs used \t");
431 df_print_regset (file, df->hardware_regs_used);
432 fprintf (file, ";; regular block artificial uses \t");
433 df_print_regset (file, df->regular_block_artificial_uses);
434 fprintf (file, ";; eh block artificial uses \t");
435 df_print_regset (file, df->eh_block_artificial_uses);
436 fprintf (file, ";; entry block defs \t");
437 df_print_regset (file, df->entry_block_defs);
438 fprintf (file, ";; exit block uses \t");
439 df_print_regset (file, df->exit_block_uses);
440 fprintf (file, ";; regs ever live \t");
e011eba9 441 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072d30e 442 if (df_regs_ever_live_p (i))
443 fprintf (file, " %d[%s]", i, reg_names[i]);
ed6e85ae 444 fprintf (file, "\n;; ref usage \t");
445
446 for (i = 0; i < (int)df->regs_inited; i++)
447 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
448 {
449 const char * sep = "";
450
451 fprintf (file, "r%d={", i);
452 if (DF_REG_DEF_COUNT (i))
453 {
454 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
455 sep = ",";
456 dcount += DF_REG_DEF_COUNT (i);
457 }
458 if (DF_REG_USE_COUNT (i))
459 {
460 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
461 sep = ",";
462 ucount += DF_REG_USE_COUNT (i);
463 }
464 if (DF_REG_EQ_USE_COUNT (i))
465 {
466 fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
467 ecount += DF_REG_EQ_USE_COUNT (i);
468 }
469 fprintf (file, "} ");
470 }
3072d30e 471
ed6e85ae 472 FOR_EACH_BB (bb)
473 FOR_BB_INSNS (bb, insn)
474 if (INSN_P (insn))
475 {
476 if (CALL_P (insn))
477 ccount++;
478 else
479 icount++;
480 }
481
482 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
483 dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
e011eba9 484}
485
3072d30e 486/* Dump the bb_info for a given basic block. */
487static void
488df_scan_start_block (basic_block bb, FILE *file)
489{
490 struct df_scan_bb_info *bb_info
491 = df_scan_get_bb_info (bb->index);
492
493 if (bb_info)
494 {
495 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
496 df_refs_chain_dump (bb_info->artificial_defs, true, file);
497 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
498 df_refs_chain_dump (bb_info->artificial_uses, true, file);
499 fprintf (file, "\n");
500 }
501#if 0
502 {
503 rtx insn;
504 FOR_BB_INSNS (bb, insn)
505 if (INSN_P (insn))
506 df_insn_debug (insn, false, file);
507 }
508#endif
509}
510
e011eba9 511static struct df_problem problem_SCAN =
512{
513 DF_SCAN, /* Problem id. */
514 DF_NONE, /* Direction. */
515 df_scan_alloc, /* Allocate the problem specific data. */
f64e6a69 516 NULL, /* Reset global information. */
e011eba9 517 df_scan_free_bb_info, /* Free basic block info. */
518 NULL, /* Local compute function. */
519 NULL, /* Init the solution specific data. */
520 NULL, /* Iterative solver. */
521 NULL, /* Confluence operator 0. */
522 NULL, /* Confluence operator n. */
523 NULL, /* Transfer function. */
524 NULL, /* Finalize function. */
525 df_scan_free, /* Free all of the problem information. */
3072d30e 526 NULL, /* Remove this problem from the stack of dataflow problems. */
527 df_scan_start_dump, /* Debugging. */
528 df_scan_start_block, /* Debugging start block. */
529 NULL, /* Debugging end block. */
530 NULL, /* Incremental solution verify start. */
6dfdc153 531 NULL, /* Incremental solution verify end. */
3e6933a8 532 NULL, /* Dependent problem. */
deb2741b 533 TV_DF_SCAN, /* Timing variable. */
534 false /* Reset blocks on dropping out of blocks_to_analyze. */
e011eba9 535};
536
537
538/* Create a new DATAFLOW instance and add it to an existing instance
539 of DF. The returned structure is what is used to get at the
540 solution. */
541
3072d30e 542void
543df_scan_add_problem (void)
e011eba9 544{
3072d30e 545 df_add_problem (&problem_SCAN);
e011eba9 546}
547
3072d30e 548\f
e011eba9 549/*----------------------------------------------------------------------------
550 Storage Allocation Utilities
551----------------------------------------------------------------------------*/
552
553
554/* First, grow the reg_info information. If the current size is less than
f0b5f617 555 the number of pseudos, grow to 25% more than the number of
e011eba9 556 pseudos.
557
558 Second, assure that all of the slots up to max_reg_num have been
559 filled with reg_info structures. */
560
3072d30e 561void
562df_grow_reg_info (void)
e011eba9 563{
564 unsigned int max_reg = max_reg_num ();
565 unsigned int new_size = max_reg;
3e6933a8 566 struct df_scan_problem_data *problem_data
3072d30e 567 = (struct df_scan_problem_data *) df_scan->problem_data;
e011eba9 568 unsigned int i;
569
3072d30e 570 if (df->regs_size < new_size)
e011eba9 571 {
572 new_size += new_size / 4;
364c0c59 573 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
574 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
575 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
576 new_size);
577 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
578 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
579 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
580 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
3072d30e 581 df->regs_size = new_size;
e011eba9 582 }
583
3072d30e 584 for (i = df->regs_inited; i < max_reg; i++)
e011eba9 585 {
3072d30e 586 struct df_reg_info *reg_info;
587
364c0c59 588 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
3072d30e 589 memset (reg_info, 0, sizeof (struct df_reg_info));
590 df->def_regs[i] = reg_info;
364c0c59 591 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
3072d30e 592 memset (reg_info, 0, sizeof (struct df_reg_info));
593 df->use_regs[i] = reg_info;
364c0c59 594 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
e011eba9 595 memset (reg_info, 0, sizeof (struct df_reg_info));
3072d30e 596 df->eq_use_regs[i] = reg_info;
597 df->def_info.begin[i] = 0;
598 df->def_info.count[i] = 0;
599 df->use_info.begin[i] = 0;
600 df->use_info.count[i] = 0;
e011eba9 601 }
602
3072d30e 603 df->regs_inited = max_reg;
e011eba9 604}
605
606
607/* Grow the ref information. */
608
609static void
610df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
611{
612 if (ref_info->refs_size < new_size)
613 {
ed6e85ae 614 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
e011eba9 615 memset (ref_info->refs + ref_info->refs_size, 0,
ed6e85ae 616 (new_size - ref_info->refs_size) *sizeof (df_ref));
e011eba9 617 ref_info->refs_size = new_size;
618 }
619}
620
621
3072d30e 622/* Check and grow the ref information if necessary. This routine
623 guarantees total_size + BITMAP_ADDEND amount of entries in refs
624 array. It updates ref_info->refs_size only and does not change
625 ref_info->total_size. */
626
627static void
628df_check_and_grow_ref_info (struct df_ref_info *ref_info,
629 unsigned bitmap_addend)
630{
631 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
632 {
633 int new_size = ref_info->total_size + bitmap_addend;
634 new_size += ref_info->total_size / 4;
635 df_grow_ref_info (ref_info, new_size);
636 }
637}
638
639
e011eba9 640/* Grow the ref information. If the current size is less than the
641 number of instructions, grow to 25% more than the number of
642 instructions. */
643
3072d30e 644void
645df_grow_insn_info (void)
e011eba9 646{
647 unsigned int new_size = get_max_uid () + 1;
3072d30e 648 if (DF_INSN_SIZE () < new_size)
e011eba9 649 {
650 new_size += new_size / 4;
364c0c59 651 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
e011eba9 652 memset (df->insns + df->insns_size, 0,
3072d30e 653 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
654 DF_INSN_SIZE () = new_size;
e011eba9 655 }
656}
657
658
659
660\f
661/*----------------------------------------------------------------------------
662 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
663----------------------------------------------------------------------------*/
664
3072d30e 665/* Rescan all of the block_to_analyze or all of the blocks in the
666 function if df_set_blocks if blocks_to_analyze is NULL; */
e011eba9 667
668void
3072d30e 669df_scan_blocks (void)
e011eba9 670{
e011eba9 671 basic_block bb;
672
3072d30e 673 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
674 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
3e6933a8 675
3072d30e 676 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
677 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
e011eba9 678
3072d30e 679 bitmap_ior_into (df->eh_block_artificial_uses,
680 df->regular_block_artificial_uses);
f64e6a69 681
3072d30e 682 /* ENTRY and EXIT blocks have special defs/uses. */
683 df_get_entry_block_def_set (df->entry_block_defs);
684 df_record_entry_block_defs (df->entry_block_defs);
685 df_get_exit_block_use_set (df->exit_block_uses);
686 df_record_exit_block_uses (df->exit_block_uses);
687 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
688 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
e011eba9 689
3072d30e 690 /* Regular blocks */
691 FOR_EACH_BB (bb)
e011eba9 692 {
3072d30e 693 unsigned int bb_index = bb->index;
694 df_bb_refs_record (bb_index, true);
e011eba9 695 }
e011eba9 696}
697
3e6933a8 698
e011eba9 699/* Create a new ref of type DF_REF_TYPE for register REG at address
30de5b55 700 LOC within INSN of BB. This function is only used externally.
701
702 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 703 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
704 fields if they were constants. Otherwise they should be -1 if
705 those flags were set. */
e011eba9 706
ed6e85ae 707df_ref
3072d30e 708df_ref_create (rtx reg, rtx *loc, rtx insn,
e011eba9 709 basic_block bb,
710 enum df_ref_type ref_type,
b9c74b4d 711 int ref_flags,
bf1f8fbc 712 int width, int offset, enum machine_mode mode)
e011eba9 713{
ed6e85ae 714 df_ref ref;
3072d30e 715 struct df_reg_info **reg_info;
716 struct df_ref_info *ref_info;
ed6e85ae 717 df_ref *ref_rec;
718 df_ref **ref_rec_ptr;
3072d30e 719 unsigned int count = 0;
720 bool add_to_table;
ed6e85ae 721 enum df_ref_class cl;
e011eba9 722
3072d30e 723 df_grow_reg_info ();
e011eba9 724
3072d30e 725 /* You cannot hack artificial refs. */
726 gcc_assert (insn);
ed6e85ae 727
728 if (width != -1 || offset != -1)
729 cl = DF_REF_EXTRACT;
730 else if (loc)
731 cl = DF_REF_REGULAR;
732 else
733 cl = DF_REF_BASE;
734 ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
bf1f8fbc 735 ref_type, ref_flags,
736 width, offset, mode);
e011eba9 737
ed6e85ae 738 if (DF_REF_REG_DEF_P (ref))
3072d30e 739 {
740 reg_info = df->def_regs;
741 ref_info = &df->def_info;
742 ref_rec_ptr = &DF_INSN_DEFS (insn);
743 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
744 }
745 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
746 {
747 reg_info = df->eq_use_regs;
748 ref_info = &df->use_info;
749 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
750 switch (ref_info->ref_order)
751 {
752 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
753 case DF_REF_ORDER_BY_REG_WITH_NOTES:
754 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
755 add_to_table = true;
756 break;
757 default:
758 add_to_table = false;
759 break;
760 }
761 }
762 else
763 {
764 reg_info = df->use_regs;
765 ref_info = &df->use_info;
766 ref_rec_ptr = &DF_INSN_USES (insn);
767 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
768 }
e011eba9 769
3072d30e 770 /* Do not add if ref is not in the right blocks. */
771 if (add_to_table && df->analyze_subset)
772 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
e011eba9 773
3072d30e 774 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
775
776 if (add_to_table)
777 switch (ref_info->ref_order)
778 {
779 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
780 case DF_REF_ORDER_BY_REG_WITH_NOTES:
781 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
782 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
783 break;
784 default:
785 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
786 break;
787 }
e011eba9 788
3072d30e 789 ref_rec = *ref_rec_ptr;
790 while (*ref_rec)
791 {
792 count++;
793 ref_rec++;
794 }
e011eba9 795
3072d30e 796 ref_rec = *ref_rec_ptr;
797 if (count)
e011eba9 798 {
ed6e85ae 799 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
3072d30e 800 *ref_rec_ptr = ref_rec;
801 ref_rec[count] = ref;
802 ref_rec[count+1] = NULL;
ed6e85ae 803 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
3072d30e 804 }
805 else
806 {
ed6e85ae 807 df_ref *ref_rec = XNEWVEC (df_ref, 2);
3072d30e 808 ref_rec[0] = ref;
809 ref_rec[1] = NULL;
810 *ref_rec_ptr = ref_rec;
811 }
e011eba9 812
3072d30e 813#if 0
814 if (dump_file)
815 {
816 fprintf (dump_file, "adding ref ");
817 df_ref_debug (ref, dump_file);
e011eba9 818 }
3072d30e 819#endif
820 /* By adding the ref directly, df_insn_rescan my not find any
821 differences even though the block will have changed. So we need
822 to mark the block dirty ourselves. */
823 df_set_bb_dirty (bb);
e011eba9 824
3072d30e 825 return ref;
e011eba9 826}
827
828
3072d30e 829\f
830/*----------------------------------------------------------------------------
831 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
832----------------------------------------------------------------------------*/
833
30de5b55 834static void
ed6e85ae 835df_free_ref (df_ref ref)
30de5b55 836{
837 struct df_scan_problem_data *problem_data
838 = (struct df_scan_problem_data *) df_scan->problem_data;
839
ed6e85ae 840 switch (DF_REF_CLASS (ref))
841 {
842 case DF_REF_BASE:
843 pool_free (problem_data->ref_base_pool, ref);
844 break;
845
846 case DF_REF_ARTIFICIAL:
847 pool_free (problem_data->ref_artificial_pool, ref);
848 break;
849
850 case DF_REF_REGULAR:
851 pool_free (problem_data->ref_regular_pool, ref);
852 break;
853
854 case DF_REF_EXTRACT:
855 pool_free (problem_data->ref_extract_pool, ref);
856 break;
857 }
30de5b55 858}
859
e011eba9 860
3072d30e 861/* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
862 Also delete the def-use or use-def chain if it exists. */
863
864static void
ed6e85ae 865df_reg_chain_unlink (df_ref ref)
e011eba9 866{
ed6e85ae 867 df_ref next = DF_REF_NEXT_REG (ref);
868 df_ref prev = DF_REF_PREV_REG (ref);
3072d30e 869 int id = DF_REF_ID (ref);
e011eba9 870 struct df_reg_info *reg_info;
ed6e85ae 871 df_ref *refs = NULL;
e011eba9 872
ed6e85ae 873 if (DF_REF_REG_DEF_P (ref))
e011eba9 874 {
ed6e85ae 875 int regno = DF_REF_REGNO (ref);
876 reg_info = DF_REG_DEF_GET (regno);
3072d30e 877 refs = df->def_info.refs;
e011eba9 878 }
879 else
880 {
3072d30e 881 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
882 {
883 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
884 switch (df->use_info.ref_order)
885 {
886 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
887 case DF_REF_ORDER_BY_REG_WITH_NOTES:
888 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
889 refs = df->use_info.refs;
890 break;
891 default:
892 break;
893 }
894 }
895 else
896 {
897 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
898 refs = df->use_info.refs;
899 }
900 }
901
902 if (refs)
903 {
904 if (df->analyze_subset)
905 {
ed6e85ae 906 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
3072d30e 907 refs[id] = NULL;
908 }
909 else
910 refs[id] = NULL;
e011eba9 911 }
912
3072d30e 913 /* Delete any def-use or use-def chains that start here. It is
914 possible that there is trash in this field. This happens for
915 insns that have been deleted when rescanning has been deferred
916 and the chain problem has also been deleted. The chain tear down
917 code skips deleted insns. */
918 if (df_chain && DF_REF_CHAIN (ref))
919 df_chain_unlink (ref);
920
e011eba9 921 reg_info->n_refs--;
3072d30e 922 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
923 {
924 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
925 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
926 }
e011eba9 927
928 /* Unlink from the reg chain. If there is no prev, this is the
929 first of the list. If not, just join the next and prev. */
930 if (prev)
3072d30e 931 DF_REF_NEXT_REG (prev) = next;
e011eba9 932 else
933 {
3072d30e 934 gcc_assert (reg_info->reg_chain == ref);
e011eba9 935 reg_info->reg_chain = next;
e011eba9 936 }
3072d30e 937 if (next)
938 DF_REF_PREV_REG (next) = prev;
e011eba9 939
30de5b55 940 df_free_ref (ref);
3072d30e 941}
942
943
944/* Remove REF from VEC. */
945
946static void
ed6e85ae 947df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
3072d30e 948{
ed6e85ae 949 df_ref *vec = *vec_ptr;
3072d30e 950
951 if (vec[1])
952 {
953 while (*vec && *vec != ref)
954 vec++;
955
956 while (*vec)
957 {
958 *vec = *(vec+1);
959 vec++;
960 }
961 }
962 else
963 {
964 free (vec);
965 *vec_ptr = df_null_ref_rec;
966 }
e011eba9 967}
968
969
970/* Unlink REF from all def-use/use-def chains, etc. */
971
972void
ed6e85ae 973df_ref_remove (df_ref ref)
e011eba9 974{
3072d30e 975#if 0
976 if (dump_file)
977 {
978 fprintf (dump_file, "removing ref ");
979 df_ref_debug (ref, dump_file);
980 }
981#endif
982
e011eba9 983 if (DF_REF_REG_DEF_P (ref))
984 {
3072d30e 985 if (DF_REF_IS_ARTIFICIAL (ref))
e011eba9 986 {
987 struct df_scan_bb_info *bb_info
ed6e85ae 988 = df_scan_get_bb_info (DF_REF_BBNO (ref));
3072d30e 989 df_ref_compress_rec (&bb_info->artificial_defs, ref);
e011eba9 990 }
991 else
3072d30e 992 {
993 unsigned int uid = DF_REF_INSN_UID (ref);
994 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
995 df_ref_compress_rec (&insn_rec->defs, ref);
996 }
e011eba9 997 }
998 else
999 {
3072d30e 1000 if (DF_REF_IS_ARTIFICIAL (ref))
e011eba9 1001 {
1002 struct df_scan_bb_info *bb_info
ed6e85ae 1003 = df_scan_get_bb_info (DF_REF_BBNO (ref));
3072d30e 1004 df_ref_compress_rec (&bb_info->artificial_uses, ref);
1005 }
1006 else
1007 {
1008 unsigned int uid = DF_REF_INSN_UID (ref);
1009 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1010
1011 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1012 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1013 else
1014 df_ref_compress_rec (&insn_rec->uses, ref);
e011eba9 1015 }
e011eba9 1016 }
1017
3072d30e 1018 /* By deleting the ref directly, df_insn_rescan my not find any
1019 differences even though the block will have changed. So we need
1020 to mark the block dirty ourselves. */
1021 df_set_bb_dirty (DF_REF_BB (ref));
1022 df_reg_chain_unlink (ref);
e011eba9 1023}
1024
1025
3072d30e 1026/* Create the insn record for INSN. If there was one there, zero it
1027 out. */
e011eba9 1028
3072d30e 1029struct df_insn_info *
1030df_insn_create_insn_record (rtx insn)
e011eba9 1031{
3e6933a8 1032 struct df_scan_problem_data *problem_data
3072d30e 1033 = (struct df_scan_problem_data *) df_scan->problem_data;
1034 struct df_insn_info *insn_rec;
e011eba9 1035
3072d30e 1036 df_grow_insn_info ();
158b6cc9 1037 insn_rec = DF_INSN_INFO_GET (insn);
e011eba9 1038 if (!insn_rec)
1039 {
364c0c59 1040 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
158b6cc9 1041 DF_INSN_INFO_SET (insn, insn_rec);
e011eba9 1042 }
1043 memset (insn_rec, 0, sizeof (struct df_insn_info));
3072d30e 1044 insn_rec->insn = insn;
e011eba9 1045 return insn_rec;
1046}
1047
d0802b39 1048
3072d30e 1049/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
e011eba9 1050
3072d30e 1051static void
ed6e85ae 1052df_ref_chain_delete_du_chain (df_ref *ref_rec)
e011eba9 1053{
3072d30e 1054 while (*ref_rec)
e011eba9 1055 {
ed6e85ae 1056 df_ref ref = *ref_rec;
3072d30e 1057 /* CHAIN is allocated by DF_CHAIN. So make sure to
1058 pass df_scan instance for the problem. */
1059 if (DF_REF_CHAIN (ref))
1060 df_chain_unlink (ref);
1061 ref_rec++;
1062 }
1063}
3e6933a8 1064
e011eba9 1065
3072d30e 1066/* Delete all refs in the ref chain. */
1067
1068static void
ed6e85ae 1069df_ref_chain_delete (df_ref *ref_rec)
3072d30e 1070{
ed6e85ae 1071 df_ref *start = ref_rec;
3072d30e 1072 while (*ref_rec)
1073 {
1074 df_reg_chain_unlink (*ref_rec);
1075 ref_rec++;
1076 }
1077
1078 /* If the list is empty, it has a special shared element that is not
1079 to be deleted. */
1080 if (*start)
1081 free (start);
1082}
1083
1084
1085/* Delete the hardreg chain. */
1086
1087static void
1088df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1089{
1090 struct df_scan_problem_data *problem_data;
1091
1092 if (!hardregs)
1093 return;
1094
1095 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1096
1097 while (*hardregs)
1098 {
1099 pool_free (problem_data->mw_reg_pool, *hardregs);
1100 hardregs++;
1101 }
1102}
1103
1104
1105/* Delete all of the refs information from INSN. BB must be passed in
1106 except when called from df_process_deferred_rescans to mark the block
1107 as dirty. */
1108
1109void
1110df_insn_delete (basic_block bb, unsigned int uid)
1111{
1112 struct df_insn_info *insn_info = NULL;
1113 if (!df)
1114 return;
1115
1116 df_grow_bb_info (df_scan);
1117 df_grow_reg_info ();
1118
1119 /* The block must be marked as dirty now, rather than later as in
1120 df_insn_rescan and df_notes_rescan because it may not be there at
1121 rescanning time and the mark would blow up. */
1122 if (bb)
1123 df_set_bb_dirty (bb);
1124
1125 insn_info = DF_INSN_UID_SAFE_GET (uid);
1126
a3de79f9 1127 /* The client has deferred rescanning. */
3072d30e 1128 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1129 {
1130 if (insn_info)
1131 {
1132 bitmap_clear_bit (df->insns_to_rescan, uid);
1133 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1134 bitmap_set_bit (df->insns_to_delete, uid);
1135 }
1136 if (dump_file)
bff9eb26 1137 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
3072d30e 1138 return;
1139 }
1140
1141 if (dump_file)
1142 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1143
1144 bitmap_clear_bit (df->insns_to_delete, uid);
1145 bitmap_clear_bit (df->insns_to_rescan, uid);
1146 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1147 if (insn_info)
1148 {
1149 struct df_scan_problem_data *problem_data
1150 = (struct df_scan_problem_data *) df_scan->problem_data;
1151
1152 /* In general, notes do not have the insn_info fields
1153 initialized. However, combine deletes insns by changing them
1154 to notes. How clever. So we cannot just check if it is a
1155 valid insn before short circuiting this code, we need to see
1156 if we actually initialized it. */
1157 if (insn_info->defs)
1158 {
1159 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1160
1161 if (df_chain)
1162 {
1163 df_ref_chain_delete_du_chain (insn_info->defs);
1164 df_ref_chain_delete_du_chain (insn_info->uses);
1165 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1166 }
1167
1168 df_ref_chain_delete (insn_info->defs);
1169 df_ref_chain_delete (insn_info->uses);
1170 df_ref_chain_delete (insn_info->eq_uses);
1171 }
e011eba9 1172 pool_free (problem_data->insn_pool, insn_info);
3072d30e 1173 DF_INSN_UID_SET (uid, NULL);
e011eba9 1174 }
1175}
1176
1177
3072d30e 1178/* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
d0802b39 1179
3072d30e 1180static void
1181df_free_collection_rec (struct df_collection_rec *collection_rec)
d0802b39 1182{
3072d30e 1183 struct df_scan_problem_data *problem_data
1184 = (struct df_scan_problem_data *) df_scan->problem_data;
ed6e85ae 1185 df_ref *ref;
3072d30e 1186 struct df_mw_hardreg **mw;
1187
1188 if (collection_rec->def_vec)
1189 for (ref = collection_rec->def_vec; *ref; ref++)
30de5b55 1190 df_free_ref (*ref);
3072d30e 1191 if (collection_rec->use_vec)
1192 for (ref = collection_rec->use_vec; *ref; ref++)
30de5b55 1193 df_free_ref (*ref);
3072d30e 1194 if (collection_rec->eq_use_vec)
1195 for (ref = collection_rec->eq_use_vec; *ref; ref++)
30de5b55 1196 df_free_ref (*ref);
3072d30e 1197 if (collection_rec->mw_vec)
1198 for (mw = collection_rec->mw_vec; *mw; mw++)
1199 pool_free (problem_data->mw_reg_pool, *mw);
1200}
d0802b39 1201
3072d30e 1202
1203/* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1204
1205bool
1206df_insn_rescan (rtx insn)
1207{
1208 unsigned int uid = INSN_UID (insn);
1209 struct df_insn_info *insn_info = NULL;
1210 basic_block bb = BLOCK_FOR_INSN (insn);
1211 struct df_collection_rec collection_rec;
ed6e85ae 1212 collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
1213 collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
1214 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
364c0c59 1215 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
3072d30e 1216
1217 if ((!df) || (!INSN_P (insn)))
1218 return false;
1219
1220 if (!bb)
d0802b39 1221 {
3072d30e 1222 if (dump_file)
1223 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1224 return false;
1225 }
1226
1227 /* The client has disabled rescanning and plans to do it itself. */
1228 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1229 return false;
1230
1231 df_grow_bb_info (df_scan);
1232 df_grow_reg_info ();
1233
1234 insn_info = DF_INSN_UID_SAFE_GET (uid);
1235
a3de79f9 1236 /* The client has deferred rescanning. */
3072d30e 1237 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1238 {
1239 if (!insn_info)
1240 {
1241 insn_info = df_insn_create_insn_record (insn);
1242 insn_info->defs = df_null_ref_rec;
1243 insn_info->uses = df_null_ref_rec;
1244 insn_info->eq_uses = df_null_ref_rec;
1245 insn_info->mw_hardregs = df_null_mw_rec;
1246 }
1247 if (dump_file)
bff9eb26 1248 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
3072d30e 1249
1250 bitmap_clear_bit (df->insns_to_delete, uid);
1251 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1252 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1253 return false;
1254 }
1255
1256 bitmap_clear_bit (df->insns_to_delete, uid);
1257 bitmap_clear_bit (df->insns_to_rescan, uid);
1258 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1259 if (insn_info)
1260 {
2e81afe5 1261 int luid;
3072d30e 1262 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1263 /* If there's no change, return false. */
1264 if (the_same)
d0802b39 1265 {
3072d30e 1266 df_free_collection_rec (&collection_rec);
1267 if (dump_file)
1268 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1269 return false;
d0802b39 1270 }
3072d30e 1271 if (dump_file)
1272 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1273
2e81afe5 1274 /* There's change - we need to delete the existing info.
1275 Since the insn isn't moved, we can salvage its LUID. */
1276 luid = DF_INSN_LUID (insn);
3072d30e 1277 df_insn_delete (NULL, uid);
1278 df_insn_create_insn_record (insn);
2e81afe5 1279 DF_INSN_LUID (insn) = luid;
3072d30e 1280 }
1281 else
1282 {
158b6cc9 1283 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1284 df_insn_refs_collect (&collection_rec, bb, insn_info);
3072d30e 1285 if (dump_file)
1286 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1287 }
1288
1289 df_refs_add_to_chains (&collection_rec, bb, insn);
1290 df_set_bb_dirty (bb);
1291 return true;
1292}
1293
1294
1295/* Rescan all of the insns in the function. Note that the artificial
1296 uses and defs are not touched. This function will destroy def-se
1297 or use-def chains. */
1298
1299void
1300df_insn_rescan_all (void)
1301{
1302 bool no_insn_rescan = false;
1303 bool defer_insn_rescan = false;
1304 basic_block bb;
1305 bitmap_iterator bi;
1306 unsigned int uid;
1307 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1308
1309 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1310 {
1311 df_clear_flags (DF_NO_INSN_RESCAN);
1312 no_insn_rescan = true;
d0802b39 1313 }
1314
3072d30e 1315 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
d0802b39 1316 {
3072d30e 1317 df_clear_flags (DF_DEFER_INSN_RESCAN);
1318 defer_insn_rescan = true;
1319 }
1320
1321 bitmap_copy (tmp, df->insns_to_delete);
1322 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1323 {
1324 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1325 if (insn_info)
1326 df_insn_delete (NULL, uid);
d0802b39 1327 }
3072d30e 1328
1329 BITMAP_FREE (tmp);
1330 bitmap_clear (df->insns_to_delete);
1331 bitmap_clear (df->insns_to_rescan);
1332 bitmap_clear (df->insns_to_notes_rescan);
1333
1334 FOR_EACH_BB (bb)
1335 {
1336 rtx insn;
1337 FOR_BB_INSNS (bb, insn)
1338 {
1339 df_insn_rescan (insn);
1340 }
1341 }
1342
1343 if (no_insn_rescan)
1344 df_set_flags (DF_NO_INSN_RESCAN);
1345 if (defer_insn_rescan)
1346 df_set_flags (DF_DEFER_INSN_RESCAN);
d0802b39 1347}
1348
1349
a3de79f9 1350/* Process all of the deferred rescans or deletions. */
e011eba9 1351
3072d30e 1352void
1353df_process_deferred_rescans (void)
e011eba9 1354{
3072d30e 1355 bool no_insn_rescan = false;
1356 bool defer_insn_rescan = false;
e011eba9 1357 bitmap_iterator bi;
3072d30e 1358 unsigned int uid;
1359 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1360
1361 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1362 {
1363 df_clear_flags (DF_NO_INSN_RESCAN);
1364 no_insn_rescan = true;
1365 }
1366
1367 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1368 {
1369 df_clear_flags (DF_DEFER_INSN_RESCAN);
1370 defer_insn_rescan = true;
1371 }
1372
1373 if (dump_file)
a3de79f9 1374 fprintf (dump_file, "starting the processing of deferred insns\n");
3072d30e 1375
1376 bitmap_copy (tmp, df->insns_to_delete);
1377 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1378 {
1379 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1380 if (insn_info)
1381 df_insn_delete (NULL, uid);
1382 }
1383
1384 bitmap_copy (tmp, df->insns_to_rescan);
1385 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1386 {
1387 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1388 if (insn_info)
1389 df_insn_rescan (insn_info->insn);
1390 }
1391
1392 bitmap_copy (tmp, df->insns_to_notes_rescan);
1393 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1394 {
1395 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1396 if (insn_info)
1397 df_notes_rescan (insn_info->insn);
1398 }
1399
1400 if (dump_file)
a3de79f9 1401 fprintf (dump_file, "ending the processing of deferred insns\n");
3072d30e 1402
1403 BITMAP_FREE (tmp);
1404 bitmap_clear (df->insns_to_delete);
1405 bitmap_clear (df->insns_to_rescan);
1406 bitmap_clear (df->insns_to_notes_rescan);
1407
1408 if (no_insn_rescan)
1409 df_set_flags (DF_NO_INSN_RESCAN);
1410 if (defer_insn_rescan)
1411 df_set_flags (DF_DEFER_INSN_RESCAN);
1412
1413 /* If someone changed regs_ever_live during this pass, fix up the
1414 entry and exit blocks. */
1415 if (df->redo_entry_and_exit)
1416 {
1417 df_update_entry_exit_and_calls ();
1418 df->redo_entry_and_exit = false;
1419 }
1420}
1421
e011eba9 1422
3072d30e 1423/* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1424 the uses if INCLUDE_USES. Include the eq_uses if
1425 INCLUDE_EQ_USES. */
1426
1427static unsigned int
1428df_count_refs (bool include_defs, bool include_uses,
1429 bool include_eq_uses)
1430{
1431 unsigned int regno;
1432 int size = 0;
1433 unsigned int m = df->regs_inited;
1434
1435 for (regno = 0; regno < m; regno++)
e011eba9 1436 {
3072d30e 1437 if (include_defs)
1438 size += DF_REG_DEF_COUNT (regno);
1439 if (include_uses)
1440 size += DF_REG_USE_COUNT (regno);
1441 if (include_eq_uses)
1442 size += DF_REG_EQ_USE_COUNT (regno);
e011eba9 1443 }
3072d30e 1444 return size;
e011eba9 1445}
1446
1447
1448/* Take build ref table for either the uses or defs from the reg-use
3072d30e 1449 or reg-def chains. This version processes the refs in reg order
1450 which is likely to be best if processing the whole function. */
e011eba9 1451
3072d30e 1452static void
1453df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1454 bool include_defs,
1455 bool include_uses,
1456 bool include_eq_uses)
e011eba9 1457{
3072d30e 1458 unsigned int m = df->regs_inited;
e011eba9 1459 unsigned int regno;
1460 unsigned int offset = 0;
3072d30e 1461 unsigned int start;
e011eba9 1462
3072d30e 1463 if (df->changeable_flags & DF_NO_HARD_REGS)
1464 {
1465 start = FIRST_PSEUDO_REGISTER;
1466 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1467 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
e011eba9 1468 }
3072d30e 1469 else
1470 start = 0;
e011eba9 1471
3072d30e 1472 ref_info->total_size
1473 = df_count_refs (include_defs, include_uses, include_eq_uses);
1474
1475 df_check_and_grow_ref_info (ref_info, 1);
1476
1477 for (regno = start; regno < m; regno++)
e011eba9 1478 {
e011eba9 1479 int count = 0;
3072d30e 1480 ref_info->begin[regno] = offset;
1481 if (include_defs)
1482 {
ed6e85ae 1483 df_ref ref = DF_REG_DEF_CHAIN (regno);
3072d30e 1484 while (ref)
1485 {
1486 ref_info->refs[offset] = ref;
1487 DF_REF_ID (ref) = offset++;
1488 count++;
1489 ref = DF_REF_NEXT_REG (ref);
1490 gcc_assert (offset < ref_info->refs_size);
1491 }
1492 }
1493 if (include_uses)
e011eba9 1494 {
ed6e85ae 1495 df_ref ref = DF_REG_USE_CHAIN (regno);
e011eba9 1496 while (ref)
1497 {
1498 ref_info->refs[offset] = ref;
1499 DF_REF_ID (ref) = offset++;
3072d30e 1500 count++;
e011eba9 1501 ref = DF_REF_NEXT_REG (ref);
3072d30e 1502 gcc_assert (offset < ref_info->refs_size);
1503 }
1504 }
1505 if (include_eq_uses)
1506 {
ed6e85ae 1507 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
3072d30e 1508 while (ref)
1509 {
1510 ref_info->refs[offset] = ref;
1511 DF_REF_ID (ref) = offset++;
e011eba9 1512 count++;
3072d30e 1513 ref = DF_REF_NEXT_REG (ref);
1514 gcc_assert (offset < ref_info->refs_size);
e011eba9 1515 }
e011eba9 1516 }
3072d30e 1517 ref_info->count[regno] = count;
e011eba9 1518 }
3072d30e 1519
e011eba9 1520 /* The bitmap size is not decremented when refs are deleted. So
1521 reset it now that we have squished out all of the empty
1522 slots. */
3072d30e 1523 ref_info->table_size = offset;
e011eba9 1524}
1525
e011eba9 1526
3072d30e 1527/* Take build ref table for either the uses or defs from the reg-use
1528 or reg-def chains. This version processes the refs in insn order
1529 which is likely to be best if processing some segment of the
1530 function. */
e011eba9 1531
3072d30e 1532static void
1533df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1534 bool include_defs,
1535 bool include_uses,
1536 bool include_eq_uses)
e011eba9 1537{
3072d30e 1538 bitmap_iterator bi;
1539 unsigned int bb_index;
1540 unsigned int m = df->regs_inited;
1541 unsigned int offset = 0;
1542 unsigned int r;
1543 unsigned int start
1544 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
e011eba9 1545
3072d30e 1546 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1547 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1548
1549 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1550 df_check_and_grow_ref_info (ref_info, 1);
e011eba9 1551
3072d30e 1552 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
e011eba9 1553 {
3072d30e 1554 basic_block bb = BASIC_BLOCK (bb_index);
1555 rtx insn;
ed6e85ae 1556 df_ref *ref_rec;
3072d30e 1557
1558 if (include_defs)
1559 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
3e6933a8 1560 {
3072d30e 1561 unsigned int regno = DF_REF_REGNO (*ref_rec);
1562 ref_info->count[regno]++;
3e6933a8 1563 }
3072d30e 1564 if (include_uses)
1565 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
3e6933a8 1566 {
3072d30e 1567 unsigned int regno = DF_REF_REGNO (*ref_rec);
1568 ref_info->count[regno]++;
3e6933a8 1569 }
e011eba9 1570
3072d30e 1571 FOR_BB_INSNS (bb, insn)
1572 {
1573 if (INSN_P (insn))
1574 {
1575 unsigned int uid = INSN_UID (insn);
1576
1577 if (include_defs)
1578 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1579 {
1580 unsigned int regno = DF_REF_REGNO (*ref_rec);
1581 ref_info->count[regno]++;
1582 }
1583 if (include_uses)
1584 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1585 {
1586 unsigned int regno = DF_REF_REGNO (*ref_rec);
1587 ref_info->count[regno]++;
1588 }
1589 if (include_eq_uses)
1590 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1591 {
1592 unsigned int regno = DF_REF_REGNO (*ref_rec);
1593 ref_info->count[regno]++;
1594 }
1595 }
1596 }
1597 }
1598
1599 for (r = start; r < m; r++)
1600 {
1601 ref_info->begin[r] = offset;
1602 offset += ref_info->count[r];
1603 ref_info->count[r] = 0;
1604 }
1605
1606 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1607 {
1608 basic_block bb = BASIC_BLOCK (bb_index);
1609 rtx insn;
ed6e85ae 1610 df_ref *ref_rec;
3072d30e 1611
1612 if (include_defs)
1613 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
3e6933a8 1614 {
ed6e85ae 1615 df_ref ref = *ref_rec;
3072d30e 1616 unsigned int regno = DF_REF_REGNO (ref);
1617 if (regno >= start)
3e6933a8 1618 {
3072d30e 1619 unsigned int id
1620 = ref_info->begin[regno] + ref_info->count[regno]++;
1621 DF_REF_ID (ref) = id;
1622 ref_info->refs[id] = ref;
3e6933a8 1623 }
3e6933a8 1624 }
3072d30e 1625 if (include_uses)
1626 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
3e6933a8 1627 {
ed6e85ae 1628 df_ref ref = *ref_rec;
3072d30e 1629 unsigned int regno = DF_REF_REGNO (ref);
1630 if (regno >= start)
1631 {
1632 unsigned int id
1633 = ref_info->begin[regno] + ref_info->count[regno]++;
1634 DF_REF_ID (ref) = id;
1635 ref_info->refs[id] = ref;
1636 }
3e6933a8 1637 }
3072d30e 1638
1639 FOR_BB_INSNS (bb, insn)
1640 {
1641 if (INSN_P (insn))
1642 {
1643 unsigned int uid = INSN_UID (insn);
1644
1645 if (include_defs)
1646 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1647 {
ed6e85ae 1648 df_ref ref = *ref_rec;
3072d30e 1649 unsigned int regno = DF_REF_REGNO (ref);
1650 if (regno >= start)
1651 {
1652 unsigned int id
1653 = ref_info->begin[regno] + ref_info->count[regno]++;
1654 DF_REF_ID (ref) = id;
1655 ref_info->refs[id] = ref;
1656 }
1657 }
1658 if (include_uses)
1659 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1660 {
ed6e85ae 1661 df_ref ref = *ref_rec;
3072d30e 1662 unsigned int regno = DF_REF_REGNO (ref);
1663 if (regno >= start)
1664 {
1665 unsigned int id
1666 = ref_info->begin[regno] + ref_info->count[regno]++;
1667 DF_REF_ID (ref) = id;
1668 ref_info->refs[id] = ref;
1669 }
1670 }
1671 if (include_eq_uses)
1672 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1673 {
ed6e85ae 1674 df_ref ref = *ref_rec;
3072d30e 1675 unsigned int regno = DF_REF_REGNO (ref);
1676 if (regno >= start)
1677 {
1678 unsigned int id
1679 = ref_info->begin[regno] + ref_info->count[regno]++;
1680 DF_REF_ID (ref) = id;
1681 ref_info->refs[id] = ref;
1682 }
1683 }
1684 }
1685 }
1686 }
1687
1688 /* The bitmap size is not decremented when refs are deleted. So
1689 reset it now that we have squished out all of the empty
1690 slots. */
1691
1692 ref_info->table_size = offset;
1693}
1694
1695/* Take build ref table for either the uses or defs from the reg-use
1696 or reg-def chains. */
1697
1698static void
1699df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1700 bool include_defs,
1701 bool include_uses,
1702 bool include_eq_uses)
1703{
1704 if (df->analyze_subset)
1705 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1706 include_uses, include_eq_uses);
1707 else
1708 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1709 include_uses, include_eq_uses);
1710}
1711
1712
1713/* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1714static unsigned int
1715df_add_refs_to_table (unsigned int offset,
1716 struct df_ref_info *ref_info,
ed6e85ae 1717 df_ref *ref_vec)
3072d30e 1718{
1719 while (*ref_vec)
1720 {
ed6e85ae 1721 df_ref ref = *ref_vec;
3072d30e 1722 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1723 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1724 {
1725 ref_info->refs[offset] = ref;
1726 DF_REF_ID (*ref_vec) = offset++;
1727 }
1728 ref_vec++;
1729 }
1730 return offset;
1731}
1732
1733
1734/* Count the number of refs in all of the insns of BB. Include the
1735 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1736 eq_uses if INCLUDE_EQ_USES. */
1737
1738static unsigned int
1739df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1740 struct df_ref_info *ref_info,
1741 bool include_defs, bool include_uses,
1742 bool include_eq_uses)
1743{
1744 rtx insn;
1745
1746 if (include_defs)
1747 offset = df_add_refs_to_table (offset, ref_info,
1748 df_get_artificial_defs (bb->index));
1749 if (include_uses)
1750 offset = df_add_refs_to_table (offset, ref_info,
1751 df_get_artificial_uses (bb->index));
1752
1753 FOR_BB_INSNS (bb, insn)
1754 if (INSN_P (insn))
1755 {
1756 unsigned int uid = INSN_UID (insn);
1757 if (include_defs)
1758 offset = df_add_refs_to_table (offset, ref_info,
1759 DF_INSN_UID_DEFS (uid));
1760 if (include_uses)
1761 offset = df_add_refs_to_table (offset, ref_info,
1762 DF_INSN_UID_USES (uid));
1763 if (include_eq_uses)
1764 offset = df_add_refs_to_table (offset, ref_info,
1765 DF_INSN_UID_EQ_USES (uid));
3e6933a8 1766 }
3072d30e 1767 return offset;
1768}
1769
1770
bef304b8 1771/* Organize the refs by insn into the table in REF_INFO. If
3072d30e 1772 blocks_to_analyze is defined, use that set, otherwise the entire
1773 program. Include the defs if INCLUDE_DEFS. Include the uses if
1774 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1775
1776static void
1777df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1778 bool include_defs, bool include_uses,
1779 bool include_eq_uses)
1780{
1781 basic_block bb;
1782 unsigned int offset = 0;
1783
1784 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1785 df_check_and_grow_ref_info (ref_info, 1);
1786 if (df->blocks_to_analyze)
1787 {
1788 bitmap_iterator bi;
1789 unsigned int index;
1790
1791 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1792 {
1793 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1794 include_defs, include_uses,
1795 include_eq_uses);
1796 }
1797
1798 ref_info->table_size = offset;
1799 }
1800 else
1801 {
1802 FOR_ALL_BB (bb)
1803 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1804 include_defs, include_uses,
1805 include_eq_uses);
1806 ref_info->table_size = offset;
1807 }
1808}
1809
1810
1811/* If the use refs in DF are not organized, reorganize them. */
1812
1813void
1814df_maybe_reorganize_use_refs (enum df_ref_order order)
1815{
1816 if (order == df->use_info.ref_order)
1817 return;
1818
1819 switch (order)
1820 {
1821 case DF_REF_ORDER_BY_REG:
1822 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1823 break;
1824
1825 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1826 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1827 break;
1828
1829 case DF_REF_ORDER_BY_INSN:
1830 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1831 break;
1832
1833 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1834 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1835 break;
1836
1837 case DF_REF_ORDER_NO_TABLE:
1838 free (df->use_info.refs);
1839 df->use_info.refs = NULL;
1840 df->use_info.refs_size = 0;
1841 break;
1842
1843 case DF_REF_ORDER_UNORDERED:
1844 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1845 gcc_unreachable ();
1846 break;
1847 }
1848
1849 df->use_info.ref_order = order;
1850}
1851
1852
1853/* If the def refs in DF are not organized, reorganize them. */
1854
1855void
1856df_maybe_reorganize_def_refs (enum df_ref_order order)
1857{
1858 if (order == df->def_info.ref_order)
1859 return;
1860
1861 switch (order)
1862 {
1863 case DF_REF_ORDER_BY_REG:
1864 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1865 break;
1866
1867 case DF_REF_ORDER_BY_INSN:
1868 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1869 break;
1870
1871 case DF_REF_ORDER_NO_TABLE:
1872 free (df->def_info.refs);
1873 df->def_info.refs = NULL;
1874 df->def_info.refs_size = 0;
1875 break;
1876
1877 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1878 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1879 case DF_REF_ORDER_UNORDERED:
1880 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1881 gcc_unreachable ();
1882 break;
1883 }
1884
1885 df->def_info.ref_order = order;
1886}
1887
1888
3072d30e 1889/* Change all of the basic block references in INSN to use the insn's
1890 current basic block. This function is called from routines that move
1891 instructions from one block to another. */
1892
1893void
a2bdd643 1894df_insn_change_bb (rtx insn, basic_block new_bb)
3072d30e 1895{
a2bdd643 1896 basic_block old_bb = BLOCK_FOR_INSN (insn);
3072d30e 1897 struct df_insn_info *insn_info;
1898 unsigned int uid = INSN_UID (insn);
1899
a2bdd643 1900 if (old_bb == new_bb)
1901 return;
1902
1903 set_block_for_insn (insn, new_bb);
1904
3072d30e 1905 if (!df)
1906 return;
1907
1908 if (dump_file)
1909 fprintf (dump_file, "changing bb of uid %d\n", uid);
1910
1911 insn_info = DF_INSN_UID_SAFE_GET (uid);
1912 if (insn_info == NULL)
1913 {
1914 if (dump_file)
1915 fprintf (dump_file, " unscanned insn\n");
1916 df_insn_rescan (insn);
1917 return;
1918 }
1919
1920 if (!INSN_P (insn))
1921 return;
1922
3072d30e 1923 df_set_bb_dirty (new_bb);
1924 if (old_bb)
1925 {
1926 if (dump_file)
1927 fprintf (dump_file, " from %d to %d\n",
1928 old_bb->index, new_bb->index);
1929 df_set_bb_dirty (old_bb);
1930 }
1931 else
1932 if (dump_file)
1933 fprintf (dump_file, " to %d\n", new_bb->index);
1934}
1935
1936
1937/* Helper function for df_ref_change_reg_with_loc. */
1938
1939static void
ed6e85ae 1940df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1941 struct df_reg_info *new_df,
3072d30e 1942 int new_regno, rtx loc)
1943{
ed6e85ae 1944 df_ref the_ref = old_df->reg_chain;
3072d30e 1945
1946 while (the_ref)
1947 {
ed6e85ae 1948 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1949 && (DF_REF_LOC (the_ref))
1950 && (*DF_REF_LOC (the_ref) == loc))
3072d30e 1951 {
ed6e85ae 1952 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1953 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1954 df_ref *ref_vec, *ref_vec_t;
1955 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
3072d30e 1956 unsigned int count = 0;
1957
1958 DF_REF_REGNO (the_ref) = new_regno;
1959 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1960
1961 /* Pull the_ref out of the old regno chain. */
1962 if (prev_ref)
ed6e85ae 1963 DF_REF_NEXT_REG (prev_ref) = next_ref;
3072d30e 1964 else
9ce37fa7 1965 old_df->reg_chain = next_ref;
3072d30e 1966 if (next_ref)
ed6e85ae 1967 DF_REF_PREV_REG (next_ref) = prev_ref;
9ce37fa7 1968 old_df->n_refs--;
3072d30e 1969
1970 /* Put the ref into the new regno chain. */
ed6e85ae 1971 DF_REF_PREV_REG (the_ref) = NULL;
1972 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
9ce37fa7 1973 if (new_df->reg_chain)
ed6e85ae 1974 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
9ce37fa7 1975 new_df->reg_chain = the_ref;
1976 new_df->n_refs++;
a43c254c 1977 if (DF_REF_BB (the_ref))
1978 df_set_bb_dirty (DF_REF_BB (the_ref));
3072d30e 1979
ed6e85ae 1980 /* Need to sort the record again that the ref was in because
1981 the regno is a sorting key. First, find the right
1982 record. */
1983 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1984 ref_vec = insn_info->eq_uses;
3072d30e 1985 else
ed6e85ae 1986 ref_vec = insn_info->uses;
1987 if (dump_file)
1988 fprintf (dump_file, "changing reg in insn %d\n",
1989 DF_REF_INSN_UID (the_ref));
1990
3072d30e 1991 ref_vec_t = ref_vec;
ed6e85ae 1992
3072d30e 1993 /* Find the length. */
1994 while (*ref_vec_t)
1995 {
1996 count++;
1997 ref_vec_t++;
1998 }
ed6e85ae 1999 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
3072d30e 2000
2001 the_ref = next_ref;
2002 }
2003 else
ed6e85ae 2004 the_ref = DF_REF_NEXT_REG (the_ref);
3072d30e 2005 }
2006}
2007
2008
2009/* Change the regno of all refs that contained LOC from OLD_REGNO to
ed6e85ae 2010 NEW_REGNO. Refs that do not match LOC are not changed which means
2011 that artificial refs are not changed since they have no loc. This
2012 call is to support the SET_REGNO macro. */
3072d30e 2013
2014void
2015df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2016{
2017 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2018 return;
2019
2020 df_grow_reg_info ();
2021
2022 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2023 DF_REG_DEF_GET (new_regno), new_regno, loc);
2024 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2025 DF_REG_USE_GET (new_regno), new_regno, loc);
2026 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2027 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2028}
2029
2030
2031/* Delete the mw_hardregs that point into the eq_notes. */
2032
2033static unsigned int
2034df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2035{
2036 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2037 unsigned int deleted = 0;
2038 unsigned int count = 0;
2039 struct df_scan_problem_data *problem_data
2040 = (struct df_scan_problem_data *) df_scan->problem_data;
2041
2042 if (!*mw_vec)
2043 return 0;
2044
2045 while (*mw_vec)
2046 {
2047 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2048 {
2049 struct df_mw_hardreg **temp_vec = mw_vec;
2050
2051 pool_free (problem_data->mw_reg_pool, *mw_vec);
2052 temp_vec = mw_vec;
2053 /* Shove the remaining ones down one to fill the gap. While
2054 this looks n**2, it is highly unusual to have any mw regs
2055 in eq_notes and the chances of more than one are almost
2056 non existent. */
2057 while (*temp_vec)
2058 {
2059 *temp_vec = *(temp_vec + 1);
2060 temp_vec++;
2061 }
2062 deleted++;
2063 }
2064 else
2065 {
2066 mw_vec++;
2067 count++;
2068 }
2069 }
2070
2071 if (count == 0)
2072 {
ce299759 2073 df_scan_free_mws_vec (insn_info->mw_hardregs);
3072d30e 2074 insn_info->mw_hardregs = df_null_mw_rec;
2075 return 0;
2076 }
2077 return deleted;
2078}
2079
2080
2081/* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2082
2083void
2084df_notes_rescan (rtx insn)
2085{
2086 struct df_insn_info *insn_info;
2087 unsigned int uid = INSN_UID (insn);
2088
2089 if (!df)
2090 return;
2091
2092 /* The client has disabled rescanning and plans to do it itself. */
2093 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2094 return;
2095
3e3659e7 2096 /* Do nothing if the insn hasn't been emitted yet. */
2097 if (!BLOCK_FOR_INSN (insn))
2098 return;
2099
3072d30e 2100 df_grow_bb_info (df_scan);
2101 df_grow_reg_info ();
2102
2103 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2104
a3de79f9 2105 /* The client has deferred rescanning. */
3072d30e 2106 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2107 {
2108 if (!insn_info)
2109 {
2110 insn_info = df_insn_create_insn_record (insn);
2111 insn_info->defs = df_null_ref_rec;
2112 insn_info->uses = df_null_ref_rec;
2113 insn_info->eq_uses = df_null_ref_rec;
2114 insn_info->mw_hardregs = df_null_mw_rec;
2115 }
2116
2117 bitmap_clear_bit (df->insns_to_delete, uid);
2118 /* If the insn is set to be rescanned, it does not need to also
2119 be notes rescanned. */
2120 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2121 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2122 return;
2123 }
2124
2125 bitmap_clear_bit (df->insns_to_delete, uid);
2126 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2127
2128 if (insn_info)
2129 {
2130 basic_block bb = BLOCK_FOR_INSN (insn);
2131 rtx note;
2132 struct df_collection_rec collection_rec;
2133 unsigned int num_deleted;
2134
2135 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
ed6e85ae 2136 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
364c0c59 2137 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
3072d30e 2138
2139 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2140 df_ref_chain_delete (insn_info->eq_uses);
2141 insn_info->eq_uses = NULL;
2142
2143 /* Process REG_EQUIV/REG_EQUAL notes */
2144 for (note = REG_NOTES (insn); note;
2145 note = XEXP (note, 1))
2146 {
2147 switch (REG_NOTE_KIND (note))
2148 {
2149 case REG_EQUIV:
2150 case REG_EQUAL:
ed6e85ae 2151 df_uses_record (DF_REF_REGULAR, &collection_rec,
3072d30e 2152 &XEXP (note, 0), DF_REF_REG_USE,
b9c74b4d 2153 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3072d30e 2154 default:
2155 break;
2156 }
2157 }
2158
2159 /* Find some place to put any new mw_hardregs. */
2160 df_canonize_collection_rec (&collection_rec);
2161 if (collection_rec.next_mw)
2162 {
2163 unsigned int count = 0;
2164 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2165 while (*mw_rec)
2166 {
2167 count++;
2168 mw_rec++;
2169 }
2170
2171 if (count)
2172 {
2173 /* Append to the end of the existing record after
2174 expanding it if necessary. */
2175 if (collection_rec.next_mw > num_deleted)
2176 {
2177 insn_info->mw_hardregs =
c768b48b 2178 XRESIZEVEC (struct df_mw_hardreg *,
2179 insn_info->mw_hardregs,
2180 count + 1 + collection_rec.next_mw);
3072d30e 2181 }
2182 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2183 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2184 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2185 sizeof (struct df_mw_hardreg *), df_mw_compare);
2186 }
2187 else
2188 {
2189 /* No vector there. */
2190 insn_info->mw_hardregs
2191 = XNEWVEC (struct df_mw_hardreg*,
2192 count + 1 + collection_rec.next_mw);
2193 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2194 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2195 }
2196 }
2197 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2198 ignore it. */
2199 collection_rec.mw_vec = NULL;
2200 collection_rec.next_mw = 0;
2201 df_refs_add_to_chains (&collection_rec, bb, insn);
2202 }
2203 else
2204 df_insn_rescan (insn);
2205
2206}
2207
2208\f
2209/*----------------------------------------------------------------------------
2210 Hard core instruction scanning code. No external interfaces here,
2211 just a lot of routines that look inside insns.
2212----------------------------------------------------------------------------*/
2213
2214
2215/* Return true if the contents of two df_ref's are identical.
2216 It ignores DF_REF_MARKER. */
2217
2218static bool
ed6e85ae 2219df_ref_equal_p (df_ref ref1, df_ref ref2)
3072d30e 2220{
2221 if (!ref2)
2222 return false;
ed6e85ae 2223
2224 if (ref1 == ref2)
2225 return true;
2226
2227 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2228 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2229 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2230 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2231 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2232 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2233 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2234 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2235 return false;
2236
2237 switch (DF_REF_CLASS (ref1))
2238 {
2239 case DF_REF_ARTIFICIAL:
2240 case DF_REF_BASE:
2241 return true;
30de5b55 2242
ed6e85ae 2243 case DF_REF_EXTRACT:
2244 if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
bf1f8fbc 2245 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
ed6e85ae 2246 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
2247 return false;
2248 /* fallthru. */
2249
2250 case DF_REF_REGULAR:
2251 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
30de5b55 2252
ed6e85ae 2253 default:
2254 gcc_unreachable ();
2255 }
2256 return false;
3072d30e 2257}
2258
2259
2260/* Compare REF1 and REF2 for sorting. This is only called from places
2261 where all of the refs are of the same type, in the same insn, and
2262 have the same bb. So these fields are not checked. */
2263
2264static int
2265df_ref_compare (const void *r1, const void *r2)
2266{
ed6e85ae 2267 const df_ref ref1 = *(const df_ref *)r1;
2268 const df_ref ref2 = *(const df_ref *)r2;
3072d30e 2269
2270 if (ref1 == ref2)
2271 return 0;
2272
ed6e85ae 2273 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2274 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2275
3072d30e 2276 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2277 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2278
2279 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2280 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2281
ed6e85ae 2282 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2283 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2284
2285 /* Cannot look at the LOC field on artificial refs. */
2286 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2287 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
3072d30e 2288 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2289
2290 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2291 {
2292 /* If two refs are identical except that one of them has is from
2293 a mw and one is not, we need to have the one with the mw
2294 first. */
2295 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2296 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2297 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2298 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2299 return -1;
2300 else
2301 return 1;
2302 }
30de5b55 2303
ed6e85ae 2304 /* The classes are the same at this point so it is safe to only look
30de5b55 2305 at ref1. */
ed6e85ae 2306 if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
30de5b55 2307 {
ed6e85ae 2308 if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2309 return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2310 if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2311 return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2312 if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2313 return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
30de5b55 2314 }
3072d30e 2315 return 0;
2316}
2317
2318static void
ed6e85ae 2319df_swap_refs (df_ref *ref_vec, int i, int j)
3072d30e 2320{
ed6e85ae 2321 df_ref tmp = ref_vec[i];
3072d30e 2322 ref_vec[i] = ref_vec[j];
2323 ref_vec[j] = tmp;
2324}
2325
2326/* Sort and compress a set of refs. */
2327
2328static unsigned int
ed6e85ae 2329df_sort_and_compress_refs (df_ref *ref_vec, unsigned int count)
3072d30e 2330{
3072d30e 2331 unsigned int i;
2332 unsigned int dist = 0;
2333
2334 ref_vec[count] = NULL;
2335 /* If there are 1 or 0 elements, there is nothing to do. */
2336 if (count < 2)
2337 return count;
2338 else if (count == 2)
2339 {
2340 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2341 df_swap_refs (ref_vec, 0, 1);
2342 }
2343 else
2344 {
2345 for (i = 0; i < count - 1; i++)
2346 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2347 break;
2348 /* If the array is already strictly ordered,
2349 which is the most common case for large COUNT case
2350 (which happens for CALL INSNs),
2351 no need to sort and filter out duplicate.
2352 Simply return the count.
2353 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2354 of DF_REF_COMPARE. */
2355 if (i == count - 1)
2356 return count;
ed6e85ae 2357 qsort (ref_vec, count, sizeof (df_ref), df_ref_compare);
3072d30e 2358 }
2359
2360 for (i=0; i<count-dist; i++)
2361 {
2362 /* Find the next ref that is not equal to the current ref. */
2363 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2364 {
30de5b55 2365 df_free_ref (ref_vec[i + dist + 1]);
3072d30e 2366 dist++;
2367 }
2368 /* Copy it down to the next position. */
2369 if (dist)
2370 ref_vec[i+1] = ref_vec[i + dist + 1];
2371 }
2372
2373 count -= dist;
2374 ref_vec[count] = NULL;
2375 return count;
2376}
2377
2378
2379/* Return true if the contents of two df_ref's are identical.
2380 It ignores DF_REF_MARKER. */
2381
2382static bool
2383df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2384{
2385 if (!mw2)
2386 return false;
2387 return (mw1 == mw2) ||
2388 (mw1->mw_reg == mw2->mw_reg
2389 && mw1->type == mw2->type
2390 && mw1->flags == mw2->flags
2391 && mw1->start_regno == mw2->start_regno
2392 && mw1->end_regno == mw2->end_regno);
2393}
2394
2395
2396/* Compare MW1 and MW2 for sorting. */
2397
2398static int
2399df_mw_compare (const void *m1, const void *m2)
2400{
c1fdef8e 2401 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2402 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
3072d30e 2403
2404 if (mw1 == mw2)
2405 return 0;
2406
2407 if (mw1->type != mw2->type)
2408 return mw1->type - mw2->type;
2409
2410 if (mw1->flags != mw2->flags)
2411 return mw1->flags - mw2->flags;
2412
2413 if (mw1->start_regno != mw2->start_regno)
2414 return mw1->start_regno - mw2->start_regno;
2415
2416 if (mw1->end_regno != mw2->end_regno)
2417 return mw1->end_regno - mw2->end_regno;
2418
2419 if (mw1->mw_reg != mw2->mw_reg)
2420 return mw1->mw_order - mw2->mw_order;
2421
2422 return 0;
2423}
2424
2425
2426/* Sort and compress a set of refs. */
2427
2428static unsigned int
2429df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2430{
2431 struct df_scan_problem_data *problem_data
2432 = (struct df_scan_problem_data *) df_scan->problem_data;
2433 unsigned int i;
2434 unsigned int dist = 0;
2435 mw_vec[count] = NULL;
2436
2437 if (count < 2)
2438 return count;
2439 else if (count == 2)
2440 {
2441 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2442 {
2443 struct df_mw_hardreg *tmp = mw_vec[0];
2444 mw_vec[0] = mw_vec[1];
2445 mw_vec[1] = tmp;
2446 }
2447 }
2448 else
2449 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2450
2451 for (i=0; i<count-dist; i++)
2452 {
2453 /* Find the next ref that is not equal to the current ref. */
2454 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2455 {
2456 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2457 dist++;
2458 }
2459 /* Copy it down to the next position. */
2460 if (dist)
2461 mw_vec[i+1] = mw_vec[i + dist + 1];
2462 }
2463
2464 count -= dist;
2465 mw_vec[count] = NULL;
2466 return count;
2467}
2468
2469
2470/* Sort and remove duplicates from the COLLECTION_REC. */
2471
2472static void
2473df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2474{
2475 if (collection_rec->def_vec)
2476 collection_rec->next_def
2477 = df_sort_and_compress_refs (collection_rec->def_vec,
2478 collection_rec->next_def);
2479 if (collection_rec->use_vec)
2480 collection_rec->next_use
2481 = df_sort_and_compress_refs (collection_rec->use_vec,
2482 collection_rec->next_use);
2483 if (collection_rec->eq_use_vec)
2484 collection_rec->next_eq_use
2485 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2486 collection_rec->next_eq_use);
2487 if (collection_rec->mw_vec)
2488 collection_rec->next_mw
2489 = df_sort_and_compress_mws (collection_rec->mw_vec,
2490 collection_rec->next_mw);
2491}
2492
2493
2494/* Add the new df_ref to appropriate reg_info/ref_info chains. */
2495
2496static void
ed6e85ae 2497df_install_ref (df_ref this_ref,
3072d30e 2498 struct df_reg_info *reg_info,
2499 struct df_ref_info *ref_info,
2500 bool add_to_table)
2501{
2502 unsigned int regno = DF_REF_REGNO (this_ref);
2503 /* Add the ref to the reg_{def,use,eq_use} chain. */
ed6e85ae 2504 df_ref head = reg_info->reg_chain;
3072d30e 2505
2506 reg_info->reg_chain = this_ref;
2507 reg_info->n_refs++;
2508
2509 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2510 {
2511 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2512 df->hard_regs_live_count[regno]++;
2513 }
2514
2515 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2516 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2517
2518 DF_REF_NEXT_REG (this_ref) = head;
2519
2520 /* We cannot actually link to the head of the chain. */
2521 DF_REF_PREV_REG (this_ref) = NULL;
2522
2523 if (head)
2524 DF_REF_PREV_REG (head) = this_ref;
2525
2526 if (add_to_table)
2527 {
2528 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2529 df_check_and_grow_ref_info (ref_info, 1);
2530 DF_REF_ID (this_ref) = ref_info->table_size;
2531 /* Add the ref to the big array of defs. */
2532 ref_info->refs[ref_info->table_size] = this_ref;
2533 ref_info->table_size++;
2534 }
2535 else
2536 DF_REF_ID (this_ref) = -1;
2537
2538 ref_info->total_size++;
2539}
2540
2541
2542/* This function takes one of the groups of refs (defs, uses or
2543 eq_uses) and installs the entire group into the insn. It also adds
2544 each of these refs into the appropriate chains. */
2545
ed6e85ae 2546static df_ref *
3072d30e 2547df_install_refs (basic_block bb,
ed6e85ae 2548 df_ref *old_vec, unsigned int count,
3072d30e 2549 struct df_reg_info **reg_info,
2550 struct df_ref_info *ref_info,
2551 bool is_notes)
2552{
2553 if (count)
2554 {
2555 unsigned int i;
ed6e85ae 2556 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
3072d30e 2557 bool add_to_table;
2558
2559 switch (ref_info->ref_order)
2560 {
2561 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2562 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2563 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2564 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2565 add_to_table = true;
2566 break;
2567 case DF_REF_ORDER_UNORDERED:
2568 case DF_REF_ORDER_BY_REG:
2569 case DF_REF_ORDER_BY_INSN:
2570 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2571 add_to_table = !is_notes;
2572 break;
2573 default:
2574 add_to_table = false;
2575 break;
2576 }
2577
2578 /* Do not add if ref is not in the right blocks. */
2579 if (add_to_table && df->analyze_subset)
2580 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2581
2582 for (i = 0; i < count; i++)
2583 {
ed6e85ae 2584 df_ref this_ref = old_vec[i];
3072d30e 2585 new_vec[i] = this_ref;
2586 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2587 ref_info, add_to_table);
2588 }
2589
2590 new_vec[count] = NULL;
2591 return new_vec;
2592 }
2593 else
2594 return df_null_ref_rec;
2595}
2596
2597
2598/* This function takes the mws installs the entire group into the
2599 insn. */
2600
2601static struct df_mw_hardreg **
2602df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2603{
2604 if (count)
2605 {
2606 struct df_mw_hardreg **new_vec
2607 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2608 memcpy (new_vec, old_vec,
2609 sizeof (struct df_mw_hardreg*) * (count + 1));
2610 return new_vec;
2611 }
2612 else
2613 return df_null_mw_rec;
2614}
2615
2616
2617/* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2618 chains and update other necessary information. */
2619
2620static void
2621df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2622 basic_block bb, rtx insn)
2623{
2624 if (insn)
2625 {
158b6cc9 2626 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
3072d30e 2627 /* If there is a vector in the collection rec, add it to the
2628 insn. A null rec is a signal that the caller will handle the
2629 chain specially. */
2630 if (collection_rec->def_vec)
2631 {
ce299759 2632 df_scan_free_ref_vec (insn_rec->defs);
3072d30e 2633 insn_rec->defs
2634 = df_install_refs (bb, collection_rec->def_vec,
2635 collection_rec->next_def,
2636 df->def_regs,
2637 &df->def_info, false);
2638 }
2639 if (collection_rec->use_vec)
2640 {
ce299759 2641 df_scan_free_ref_vec (insn_rec->uses);
3072d30e 2642 insn_rec->uses
2643 = df_install_refs (bb, collection_rec->use_vec,
2644 collection_rec->next_use,
2645 df->use_regs,
2646 &df->use_info, false);
2647 }
2648 if (collection_rec->eq_use_vec)
2649 {
ce299759 2650 df_scan_free_ref_vec (insn_rec->eq_uses);
3072d30e 2651 insn_rec->eq_uses
2652 = df_install_refs (bb, collection_rec->eq_use_vec,
2653 collection_rec->next_eq_use,
2654 df->eq_use_regs,
2655 &df->use_info, true);
2656 }
2657 if (collection_rec->mw_vec)
2658 {
ce299759 2659 df_scan_free_mws_vec (insn_rec->mw_hardregs);
3072d30e 2660 insn_rec->mw_hardregs
2661 = df_install_mws (collection_rec->mw_vec,
2662 collection_rec->next_mw);
2663 }
2664 }
2665 else
2666 {
2667 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2668
ce299759 2669 df_scan_free_ref_vec (bb_info->artificial_defs);
3072d30e 2670 bb_info->artificial_defs
2671 = df_install_refs (bb, collection_rec->def_vec,
2672 collection_rec->next_def,
2673 df->def_regs,
2674 &df->def_info, false);
ce299759 2675 df_scan_free_ref_vec (bb_info->artificial_uses);
3072d30e 2676 bb_info->artificial_uses
2677 = df_install_refs (bb, collection_rec->use_vec,
2678 collection_rec->next_use,
2679 df->use_regs,
2680 &df->use_info, false);
2681 }
2682}
e011eba9 2683
e011eba9 2684
30de5b55 2685/* Allocate a ref and initialize its fields.
2686
2687 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 2688 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
30de5b55 2689 if they were constants. Otherwise they should be -1 if those flags
2690 were set. */
3072d30e 2691
ed6e85ae 2692static df_ref
2693df_ref_create_structure (enum df_ref_class cl,
2694 struct df_collection_rec *collection_rec,
3072d30e 2695 rtx reg, rtx *loc,
158b6cc9 2696 basic_block bb, struct df_insn_info *info,
3072d30e 2697 enum df_ref_type ref_type,
b9c74b4d 2698 int ref_flags,
bf1f8fbc 2699 int width, int offset, enum machine_mode mode)
3072d30e 2700{
ed6e85ae 2701 df_ref this_ref = NULL;
3072d30e 2702 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2703 struct df_scan_problem_data *problem_data
2704 = (struct df_scan_problem_data *) df_scan->problem_data;
2705
ed6e85ae 2706 switch (cl)
30de5b55 2707 {
ed6e85ae 2708 case DF_REF_BASE:
2709 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2710 gcc_assert (loc == NULL);
2711 break;
2712
2713 case DF_REF_ARTIFICIAL:
2714 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2715 this_ref->artificial_ref.bb = bb;
2716 gcc_assert (loc == NULL);
2717 break;
2718
2719 case DF_REF_REGULAR:
2720 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2721 this_ref->regular_ref.loc = loc;
2722 gcc_assert (loc);
2723 break;
2724
2725 case DF_REF_EXTRACT:
2726 this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
bf1f8fbc 2727 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2728 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2729 DF_REF_EXTRACT_MODE (this_ref) = mode;
ed6e85ae 2730 this_ref->regular_ref.loc = loc;
2731 gcc_assert (loc);
2732 break;
30de5b55 2733 }
ed6e85ae 2734
2735 DF_REF_CLASS (this_ref) = cl;
3072d30e 2736 DF_REF_ID (this_ref) = -1;
2737 DF_REF_REG (this_ref) = reg;
2738 DF_REF_REGNO (this_ref) = regno;
ed6e85ae 2739 DF_REF_TYPE (this_ref) = ref_type;
158b6cc9 2740 DF_REF_INSN_INFO (this_ref) = info;
3072d30e 2741 DF_REF_CHAIN (this_ref) = NULL;
3072d30e 2742 DF_REF_FLAGS (this_ref) = ref_flags;
3072d30e 2743 DF_REF_NEXT_REG (this_ref) = NULL;
2744 DF_REF_PREV_REG (this_ref) = NULL;
2745 DF_REF_ORDER (this_ref) = df->ref_order++;
2746
2747 /* We need to clear this bit because fwprop, and in the future
2748 possibly other optimizations sometimes create new refs using ond
2749 refs as the model. */
2750 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2751
2752 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2753 if ((regno < FIRST_PSEUDO_REGISTER)
2754 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2755 {
ed6e85ae 2756 if (DF_REF_REG_DEF_P (this_ref))
3072d30e 2757 {
2758 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2759 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2760 }
2761 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2762 && (regno == FRAME_POINTER_REGNUM
2763 || regno == ARG_POINTER_REGNUM)))
2764 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2765 }
2766
2767 if (collection_rec)
2768 {
ed6e85ae 2769 if (DF_REF_REG_DEF_P (this_ref))
3072d30e 2770 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2771 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2772 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2773 else
2774 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
e011eba9 2775 }
3072d30e 2776
e011eba9 2777 return this_ref;
2778}
2779
2780
2781/* Create new references of type DF_REF_TYPE for each part of register REG
30de5b55 2782 at address LOC within INSN of BB.
2783
2784 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 2785 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2786 fields if they were constants. Otherwise they should be -1 if
2787 those flags were set. */
30de5b55 2788
e011eba9 2789
2790static void
ed6e85ae 2791df_ref_record (enum df_ref_class cl,
2792 struct df_collection_rec *collection_rec,
3072d30e 2793 rtx reg, rtx *loc,
158b6cc9 2794 basic_block bb, struct df_insn_info *insn_info,
e011eba9 2795 enum df_ref_type ref_type,
b9c74b4d 2796 int ref_flags,
bf1f8fbc 2797 int width, int offset, enum machine_mode mode)
e011eba9 2798{
3e6933a8 2799 unsigned int regno;
e011eba9 2800
2801 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2802
e011eba9 2803 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2804 if (regno < FIRST_PSEUDO_REGISTER)
2805 {
3e6933a8 2806 struct df_mw_hardreg *hardreg = NULL;
2807 struct df_scan_problem_data *problem_data
3072d30e 2808 = (struct df_scan_problem_data *) df_scan->problem_data;
2809 unsigned int i;
2810 unsigned int endregno;
ed6e85ae 2811 df_ref ref;
e011eba9 2812
e011eba9 2813 if (GET_CODE (reg) == SUBREG)
fe2ebfc8 2814 {
2815 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2816 SUBREG_BYTE (reg), GET_MODE (reg));
a2c6f0b7 2817 endregno = regno + subreg_nregs (reg);
fe2ebfc8 2818 }
2819 else
a2c6f0b7 2820 endregno = END_HARD_REGNO (reg);
e011eba9 2821
3072d30e 2822 /* If this is a multiword hardreg, we create some extra
2823 datastructures that will enable us to easily build REG_DEAD
2824 and REG_UNUSED notes. */
158b6cc9 2825 if ((endregno != regno + 1) && insn_info)
3e6933a8 2826 {
3e6933a8 2827 /* Sets to a subreg of a multiword register are partial.
2828 Sets to a non-subreg of a multiword register are not. */
05beda33 2829 if (GET_CODE (reg) == SUBREG)
3e6933a8 2830 ref_flags |= DF_REF_PARTIAL;
2831 ref_flags |= DF_REF_MW_HARDREG;
3072d30e 2832
364c0c59 2833 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
3e6933a8 2834 hardreg->type = ref_type;
2835 hardreg->flags = ref_flags;
2836 hardreg->mw_reg = reg;
3072d30e 2837 hardreg->start_regno = regno;
2838 hardreg->end_regno = endregno - 1;
2839 hardreg->mw_order = df->ref_order++;
2840 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
3e6933a8 2841 }
2842
e011eba9 2843 for (i = regno; i < endregno; i++)
2844 {
ed6e85ae 2845 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
158b6cc9 2846 bb, insn_info, ref_type, ref_flags,
bf1f8fbc 2847 width, offset, mode);
3e6933a8 2848
3072d30e 2849 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
e011eba9 2850 }
2851 }
2852 else
2853 {
ed6e85ae 2854 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2855 ref_type, ref_flags, width, offset, mode);
e011eba9 2856 }
2857}
2858
2859
2860/* A set to a non-paradoxical SUBREG for which the number of word_mode units
2861 covered by the outer mode is smaller than that covered by the inner mode,
2862 is a read-modify-write operation.
2863 This function returns true iff the SUBREG X is such a SUBREG. */
2864
2865bool
2866df_read_modify_subreg_p (rtx x)
2867{
2868 unsigned int isize, osize;
2869 if (GET_CODE (x) != SUBREG)
2870 return false;
2871 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2872 osize = GET_MODE_SIZE (GET_MODE (x));
c9a4672f 2873 return isize > osize
2874 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
e011eba9 2875}
2876
2877
2878/* Process all the registers defined in the rtx, X.
2879 Autoincrement/decrement definitions will be picked up by
2880 df_uses_record. */
2881
2882static void
3072d30e 2883df_def_record_1 (struct df_collection_rec *collection_rec,
158b6cc9 2884 rtx x, basic_block bb, struct df_insn_info *insn_info,
b9c74b4d 2885 int flags)
e011eba9 2886{
2887 rtx *loc;
2888 rtx dst;
30de5b55 2889 int offset = -1;
2890 int width = -1;
bc620c5c 2891 enum machine_mode mode = VOIDmode;
ed6e85ae 2892 enum df_ref_class cl = DF_REF_REGULAR;
e011eba9 2893
2894 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2895 construct. */
2896 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2897 loc = &XEXP (x, 0);
2898 else
2899 loc = &SET_DEST (x);
2900 dst = *loc;
2901
35792caf 2902 /* It is legal to have a set destination be a parallel. */
2903 if (GET_CODE (dst) == PARALLEL)
e011eba9 2904 {
2905 int i;
2906
2907 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2908 {
2909 rtx temp = XVECEXP (dst, 0, i);
2910 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2911 || GET_CODE (temp) == SET)
3072d30e 2912 df_def_record_1 (collection_rec,
158b6cc9 2913 temp, bb, insn_info,
3e6933a8 2914 GET_CODE (temp) == CLOBBER
3072d30e 2915 ? flags | DF_REF_MUST_CLOBBER : flags);
e011eba9 2916 }
2917 return;
2918 }
2919
30de5b55 2920 if (GET_CODE (dst) == STRICT_LOW_PART)
e011eba9 2921 {
30de5b55 2922 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2923
2924 loc = &XEXP (dst, 0);
2925 dst = *loc;
2926 }
2927
2928 if (GET_CODE (dst) == ZERO_EXTRACT)
2929 {
2930 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2931
2932 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
2933 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
2934 {
2935 width = INTVAL (XEXP (dst, 1));
2936 offset = INTVAL (XEXP (dst, 2));
bf1f8fbc 2937 mode = GET_MODE (dst);
ed6e85ae 2938 cl = DF_REF_EXTRACT;
30de5b55 2939 }
dea7b504 2940
e011eba9 2941 loc = &XEXP (dst, 0);
2942 dst = *loc;
e011eba9 2943 }
2944
dea7b504 2945 /* At this point if we do not have a reg or a subreg, just return. */
2946 if (REG_P (dst))
2947 {
ed6e85ae 2948 df_ref_record (cl, collection_rec,
158b6cc9 2949 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
bf1f8fbc 2950 width, offset, mode);
dea7b504 2951
2952 /* We want to keep sp alive everywhere - by making all
2953 writes to sp also use of sp. */
2954 if (REGNO (dst) == STACK_POINTER_REGNUM)
ed6e85ae 2955 df_ref_record (DF_REF_BASE, collection_rec,
158b6cc9 2956 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
bf1f8fbc 2957 width, offset, mode);
dea7b504 2958 }
2959 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2960 {
2961 if (df_read_modify_subreg_p (dst))
2962 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
c9a4672f 2963
dea7b504 2964 flags |= DF_REF_SUBREG;
37ad3318 2965
ed6e85ae 2966 df_ref_record (cl, collection_rec,
158b6cc9 2967 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
bf1f8fbc 2968 width, offset, mode);
dea7b504 2969 }
e011eba9 2970}
2971
2972
2973/* Process all the registers defined in the pattern rtx, X. */
2974
2975static void
3072d30e 2976df_defs_record (struct df_collection_rec *collection_rec,
158b6cc9 2977 rtx x, basic_block bb, struct df_insn_info *insn_info,
b9c74b4d 2978 int flags)
e011eba9 2979{
2980 RTX_CODE code = GET_CODE (x);
2981
2982 if (code == SET || code == CLOBBER)
2983 {
2984 /* Mark the single def within the pattern. */
8458f4ca 2985 int clobber_flags = flags;
3072d30e 2986 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
158b6cc9 2987 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
e011eba9 2988 }
2989 else if (code == COND_EXEC)
2990 {
3072d30e 2991 df_defs_record (collection_rec, COND_EXEC_CODE (x),
158b6cc9 2992 bb, insn_info, DF_REF_CONDITIONAL);
e011eba9 2993 }
2994 else if (code == PARALLEL)
2995 {
2996 int i;
2997
2998 /* Mark the multiple defs within the pattern. */
2999 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
158b6cc9 3000 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
e011eba9 3001 }
3002}
3003
3004
30de5b55 3005/* Process all the registers used in the rtx at address LOC.
3006
3007 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
bf1f8fbc 3008 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
3009 fields if they were constants. Otherwise they should be -1 if
3010 those flags were set. */
e011eba9 3011
3012static void
ed6e85ae 3013df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
3072d30e 3014 rtx *loc, enum df_ref_type ref_type,
158b6cc9 3015 basic_block bb, struct df_insn_info *insn_info,
b9c74b4d 3016 int flags,
bf1f8fbc 3017 int width, int offset, enum machine_mode mode)
e011eba9 3018{
3019 RTX_CODE code;
3020 rtx x;
3072d30e 3021
e011eba9 3022 retry:
3023 x = *loc;
3024 if (!x)
3025 return;
3026 code = GET_CODE (x);
3027 switch (code)
3028 {
3029 case LABEL_REF:
3030 case SYMBOL_REF:
3031 case CONST_INT:
3032 case CONST:
3033 case CONST_DOUBLE:
e397ad8e 3034 case CONST_FIXED:
e011eba9 3035 case CONST_VECTOR:
3036 case PC:
3037 case CC0:
3038 case ADDR_VEC:
3039 case ADDR_DIFF_VEC:
3040 return;
3041
3042 case CLOBBER:
3043 /* If we are clobbering a MEM, mark any registers inside the address
3044 as being used. */
3045 if (MEM_P (XEXP (x, 0)))
ed6e85ae 3046 df_uses_record (cl, collection_rec,
3072d30e 3047 &XEXP (XEXP (x, 0), 0),
158b6cc9 3048 DF_REF_REG_MEM_STORE,
3049 bb, insn_info,
3050 flags, width, offset, mode);
e011eba9 3051
3052 /* If we're clobbering a REG then we have a def so ignore. */
3053 return;
3054
3055 case MEM:
ed6e85ae 3056 df_uses_record (cl, collection_rec,
3072d30e 3057 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
158b6cc9 3058 bb, insn_info, flags & DF_REF_IN_NOTE,
bf1f8fbc 3059 width, offset, mode);
e011eba9 3060 return;
3061
3062 case SUBREG:
3063 /* While we're here, optimize this case. */
3e6933a8 3064 flags |= DF_REF_PARTIAL;
e011eba9 3065 /* In case the SUBREG is not of a REG, do not optimize. */
3066 if (!REG_P (SUBREG_REG (x)))
3067 {
3068 loc = &SUBREG_REG (x);
ed6e85ae 3069 df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
bf1f8fbc 3070 width, offset, mode);
e011eba9 3071 return;
3072 }
3073 /* ... Fall through ... */
3074
3075 case REG:
ed6e85ae 3076 df_ref_record (cl, collection_rec,
158b6cc9 3077 x, loc, bb, insn_info,
3078 ref_type, flags,
bf1f8fbc 3079 width, offset, mode);
e011eba9 3080 return;
3081
30de5b55 3082 case SIGN_EXTRACT:
3083 case ZERO_EXTRACT:
3084 {
3085 /* If the parameters to the zero or sign extract are
3086 constants, strip them off and recurse, otherwise there is
3087 no information that we can gain from this operation. */
3088 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3089 && GET_CODE (XEXP (x, 2)) == CONST_INT)
3090 {
3091 width = INTVAL (XEXP (x, 1));
3092 offset = INTVAL (XEXP (x, 2));
bf1f8fbc 3093 mode = GET_MODE (x);
30de5b55 3094
3095 if (code == ZERO_EXTRACT)
3096 flags |= DF_REF_ZERO_EXTRACT;
3097 else
3098 flags |= DF_REF_SIGN_EXTRACT;
3099
ed6e85ae 3100 df_uses_record (DF_REF_EXTRACT, collection_rec,
158b6cc9 3101 &XEXP (x, 0), ref_type, bb, insn_info, flags,
bf1f8fbc 3102 width, offset, mode);
30de5b55 3103 return;
3104 }
3105 }
3106 break;
3107
e011eba9 3108 case SET:
3109 {
3110 rtx dst = SET_DEST (x);
3111 gcc_assert (!(flags & DF_REF_IN_NOTE));
ed6e85ae 3112 df_uses_record (cl, collection_rec,
158b6cc9 3113 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3114 width, offset, mode);
e011eba9 3115
3116 switch (GET_CODE (dst))
3117 {
3118 case SUBREG:
3119 if (df_read_modify_subreg_p (dst))
3120 {
ed6e85ae 3121 df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
158b6cc9 3122 DF_REF_REG_USE, bb, insn_info,
bf1f8fbc 3123 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
3124 width, offset, mode);
e011eba9 3125 break;
3126 }
3127 /* Fall through. */
3128 case REG:
3129 case PARALLEL:
3130 case SCRATCH:
3131 case PC:
3132 case CC0:
3133 break;
3134 case MEM:
ed6e85ae 3135 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
158b6cc9 3136 DF_REF_REG_MEM_STORE, bb, insn_info, flags,
bf1f8fbc 3137 width, offset, mode);
e011eba9 3138 break;
3139 case STRICT_LOW_PART:
3140 {
3141 rtx *temp = &XEXP (dst, 0);
3142 /* A strict_low_part uses the whole REG and not just the
3143 SUBREG. */
3144 dst = XEXP (dst, 0);
ed6e85ae 3145 df_uses_record (cl, collection_rec,
3072d30e 3146 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
158b6cc9 3147 DF_REF_REG_USE, bb, insn_info,
bf1f8fbc 3148 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3149 width, offset, mode);
e011eba9 3150 }
3151 break;
3152 case ZERO_EXTRACT:
30de5b55 3153 {
3154 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
3155 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
3156 {
3157 width = INTVAL (XEXP (dst, 1));
3158 offset = INTVAL (XEXP (dst, 2));
bf1f8fbc 3159 mode = GET_MODE (dst);
ed6e85ae 3160 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
3161 DF_REF_REG_USE, bb, insn_info,
3162 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3163 width, offset, mode);
30de5b55 3164 }
3165 else
3166 {
ed6e85ae 3167 df_uses_record (cl, collection_rec, &XEXP (dst, 1),
158b6cc9 3168 DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3169 width, offset, mode);
ed6e85ae 3170 df_uses_record (cl, collection_rec, &XEXP (dst, 2),
158b6cc9 3171 DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3172 width, offset, mode);
ed6e85ae 3173 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3174 DF_REF_REG_USE, bb, insn_info,
3175 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3176 width, offset, mode);
30de5b55 3177 }
3178
30de5b55 3179 }
e011eba9 3180 break;
30de5b55 3181
e011eba9 3182 default:
3183 gcc_unreachable ();
3184 }
3185 return;
3186 }
3187
3188 case RETURN:
3189 break;
3190
3191 case ASM_OPERANDS:
3192 case UNSPEC_VOLATILE:
3193 case TRAP_IF:
3194 case ASM_INPUT:
3195 {
3196 /* Traditional and volatile asm instructions must be
3197 considered to use and clobber all hard registers, all
3198 pseudo-registers and all of memory. So must TRAP_IF and
3199 UNSPEC_VOLATILE operations.
3200
3201 Consider for instance a volatile asm that changes the fpu
3202 rounding mode. An insn should not be moved across this
3203 even if it only uses pseudo-regs because it might give an
3204 incorrectly rounded result.
3205
3206 However, flow.c's liveness computation did *not* do this,
3207 giving the reasoning as " ?!? Unfortunately, marking all
3208 hard registers as live causes massive problems for the
3209 register allocator and marking all pseudos as live creates
3210 mountains of uninitialized variable warnings."
3211
3212 In order to maintain the status quo with regard to liveness
3213 and uses, we do what flow.c did and just mark any regs we
3072d30e 3214 can find in ASM_OPERANDS as used. In global asm insns are
3215 scanned and regs_asm_clobbered is filled out.
e011eba9 3216
3217 For all ASM_OPERANDS, we must traverse the vector of input
3218 operands. We can not just fall through here since then we
3219 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3220 which do not indicate traditional asms unlike their normal
3221 usage. */
3222 if (code == ASM_OPERANDS)
3223 {
3224 int j;
3225
3226 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
ed6e85ae 3227 df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
158b6cc9 3228 DF_REF_REG_USE, bb, insn_info, flags,
bf1f8fbc 3229 width, offset, mode);
e011eba9 3230 return;
3231 }
3232 break;
3233 }
3234
3235 case PRE_DEC:
3236 case POST_DEC:
3237 case PRE_INC:
3238 case POST_INC:
3239 case PRE_MODIFY:
3240 case POST_MODIFY:
3241 /* Catch the def of the register being modified. */
ed6e85ae 3242 df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
158b6cc9 3243 bb, insn_info,
7577c7f7 3244 DF_REF_REG_DEF,
bf1f8fbc 3245 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3246 width, offset, mode);
e011eba9 3247
3248 /* ... Fall through to handle uses ... */
3249
3250 default:
3251 break;
3252 }
3253
3254 /* Recursively scan the operands of this expression. */
3255 {
3256 const char *fmt = GET_RTX_FORMAT (code);
3257 int i;
3258
3259 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3260 {
3261 if (fmt[i] == 'e')
3262 {
3263 /* Tail recursive case: save a function call level. */
3264 if (i == 0)
3265 {
3266 loc = &XEXP (x, 0);
3267 goto retry;
3268 }
ed6e85ae 3269 df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
158b6cc9 3270 bb, insn_info, flags,
bf1f8fbc 3271 width, offset, mode);
e011eba9 3272 }
3273 else if (fmt[i] == 'E')
3274 {
3275 int j;
3276 for (j = 0; j < XVECLEN (x, i); j++)
ed6e85ae 3277 df_uses_record (cl, collection_rec,
30de5b55 3278 &XVECEXP (x, i, j), ref_type,
158b6cc9 3279 bb, insn_info, flags,
bf1f8fbc 3280 width, offset, mode);
e011eba9 3281 }
3282 }
3283 }
e011eba9 3284
3072d30e 3285 return;
e011eba9 3286}
3287
3288
3072d30e 3289/* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
e011eba9 3290
3072d30e 3291static void
3292df_get_conditional_uses (struct df_collection_rec *collection_rec)
e011eba9 3293{
3072d30e 3294 unsigned int i;
3295 for (i = 0; i < collection_rec->next_def; i++)
3296 {
ed6e85ae 3297 df_ref ref = collection_rec->def_vec[i];
3072d30e 3298 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3299 {
30de5b55 3300 int width = -1;
3301 int offset = -1;
bc620c5c 3302 enum machine_mode mode = VOIDmode;
ed6e85ae 3303 df_ref use;
30de5b55 3304
3305 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3306 {
bf1f8fbc 3307 width = DF_REF_EXTRACT_WIDTH (ref);
3308 offset = DF_REF_EXTRACT_OFFSET (ref);
3309 mode = DF_REF_EXTRACT_MODE (ref);
30de5b55 3310 }
3311
ed6e85ae 3312 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
30de5b55 3313 DF_REF_LOC (ref), DF_REF_BB (ref),
158b6cc9 3314 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
30de5b55 3315 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
bf1f8fbc 3316 width, offset, mode);
3072d30e 3317 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3318 }
3319 }
e011eba9 3320}
3321
3322
3072d30e 3323/* Get call's extra defs and uses. */
e011eba9 3324
3325static void
3072d30e 3326df_get_call_refs (struct df_collection_rec * collection_rec,
3327 basic_block bb,
158b6cc9 3328 struct df_insn_info *insn_info,
b9c74b4d 3329 int flags)
e011eba9 3330{
3072d30e 3331 rtx note;
3332 bitmap_iterator bi;
3333 unsigned int ui;
3334 bool is_sibling_call;
3335 unsigned int i;
3336 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
e011eba9 3337
3072d30e 3338 /* Do not generate clobbers for registers that are the result of the
3339 call. This causes ordering problems in the chain building code
3340 depending on which def is seen first. */
3341 for (i=0; i<collection_rec->next_def; i++)
e011eba9 3342 {
ed6e85ae 3343 df_ref def = collection_rec->def_vec[i];
3072d30e 3344 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3345 }
e011eba9 3346
3072d30e 3347 /* Record the registers used to pass arguments, and explicitly
3348 noted as clobbered. */
158b6cc9 3349 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3072d30e 3350 note = XEXP (note, 1))
3351 {
3352 if (GET_CODE (XEXP (note, 0)) == USE)
ed6e85ae 3353 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
b9c74b4d 3354 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3355 VOIDmode);
3072d30e 3356 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3357 {
5736c998 3358 if (REG_P (XEXP (XEXP (note, 0), 0)))
3359 {
3360 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3361 if (!bitmap_bit_p (defs_generated, regno))
3362 df_defs_record (collection_rec, XEXP (note, 0), bb,
158b6cc9 3363 insn_info, flags);
5736c998 3364 }
3365 else
ed6e85ae 3366 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
b9c74b4d 3367 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3368 VOIDmode);
3072d30e 3369 }
3370 }
e011eba9 3371
3072d30e 3372 /* The stack ptr is used (honorarily) by a CALL insn. */
ed6e85ae 3373 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
158b6cc9 3374 NULL, bb, insn_info, DF_REF_REG_USE,
3375 DF_REF_CALL_STACK_USAGE | flags,
b9c74b4d 3376 -1, -1, VOIDmode);
e011eba9 3377
3072d30e 3378 /* Calls may also reference any of the global registers,
3379 so they are recorded as used. */
3380 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3381 if (global_regs[i])
fbc08f2b 3382 {
ed6e85ae 3383 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
b9c74b4d 3384 NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
3385 VOIDmode);
ed6e85ae 3386 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
b9c74b4d 3387 NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
3388 VOIDmode);
fbc08f2b 3389 }
e011eba9 3390
158b6cc9 3391 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
b888113c 3392 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3072d30e 3393 {
fbc08f2b 3394 if (!global_regs[ui]
3395 && (!bitmap_bit_p (defs_generated, ui))
3072d30e 3396 && (!is_sibling_call
3397 || !bitmap_bit_p (df->exit_block_uses, ui)
3398 || refers_to_regno_p (ui, ui+1,
abe32cce 3399 crtl->return_rtx, NULL)))
ed6e85ae 3400 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
158b6cc9 3401 NULL, bb, insn_info, DF_REF_REG_DEF,
3402 DF_REF_MAY_CLOBBER | flags,
b9c74b4d 3403 -1, -1, VOIDmode);
3072d30e 3404 }
3405
3406 BITMAP_FREE (defs_generated);
3407 return;
3408}
e011eba9 3409
3072d30e 3410/* Collect all refs in the INSN. This function is free of any
3411 side-effect - it will create and return a lists of df_ref's in the
3412 COLLECTION_REC without putting those refs into existing ref chains
3413 and reg chains. */
e011eba9 3414
3072d30e 3415static void
3416df_insn_refs_collect (struct df_collection_rec* collection_rec,
158b6cc9 3417 basic_block bb, struct df_insn_info *insn_info)
3072d30e 3418{
3419 rtx note;
158b6cc9 3420 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
e011eba9 3421
3072d30e 3422 /* Clear out the collection record. */
3423 collection_rec->next_def = 0;
3424 collection_rec->next_use = 0;
3425 collection_rec->next_eq_use = 0;
3426 collection_rec->next_mw = 0;
e011eba9 3427
3072d30e 3428 /* Record register defs. */
158b6cc9 3429 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3072d30e 3430
158b6cc9 3431 /* Process REG_EQUIV/REG_EQUAL notes. */
3432 for (note = REG_NOTES (insn_info->insn); note;
3072d30e 3433 note = XEXP (note, 1))
3434 {
3435 switch (REG_NOTE_KIND (note))
3436 {
3437 case REG_EQUIV:
3438 case REG_EQUAL:
ed6e85ae 3439 df_uses_record (DF_REF_REGULAR, collection_rec,
3072d30e 3440 &XEXP (note, 0), DF_REF_REG_USE,
b9c74b4d 3441 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3072d30e 3442 break;
3443 case REG_NON_LOCAL_GOTO:
3444 /* The frame ptr is used by a non-local goto. */
ed6e85ae 3445 df_ref_record (DF_REF_BASE, collection_rec,
3072d30e 3446 regno_reg_rtx[FRAME_POINTER_REGNUM],
158b6cc9 3447 NULL, bb, insn_info,
b9c74b4d 3448 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3072d30e 3449#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
ed6e85ae 3450 df_ref_record (DF_REF_BASE, collection_rec,
3072d30e 3451 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
158b6cc9 3452 NULL, bb, insn_info,
b9c74b4d 3453 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3072d30e 3454#endif
3455 break;
3456 default:
3457 break;
3458 }
e011eba9 3459 }
3072d30e 3460
158b6cc9 3461 if (CALL_P (insn_info->insn))
3462 df_get_call_refs (collection_rec, bb, insn_info,
3072d30e 3463 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3464
3465 /* Record the register uses. */
ed6e85ae 3466 df_uses_record (DF_REF_REGULAR, collection_rec,
158b6cc9 3467 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
b9c74b4d 3468 -1, -1, VOIDmode);
3072d30e 3469
3470 /* DF_REF_CONDITIONAL needs corresponding USES. */
3471 if (is_cond_exec)
3472 df_get_conditional_uses (collection_rec);
3473
3474 df_canonize_collection_rec (collection_rec);
e011eba9 3475}
3476
3072d30e 3477/* Recompute the luids for the insns in BB. */
3478
3479void
3480df_recompute_luids (basic_block bb)
e011eba9 3481{
e011eba9 3482 rtx insn;
3483 int luid = 0;
3e6933a8 3484
3072d30e 3485 df_grow_insn_info ();
e011eba9 3486
3487 /* Scan the block an insn at a time from beginning to end. */
3488 FOR_BB_INSNS (bb, insn)
3489 {
158b6cc9 3490 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3072d30e 3491 /* Inserting labels does not always trigger the incremental
3492 rescanning. */
3493 if (!insn_info)
e011eba9 3494 {
3072d30e 3495 gcc_assert (!INSN_P (insn));
158b6cc9 3496 insn_info = df_insn_create_insn_record (insn);
e011eba9 3497 }
3072d30e 3498
158b6cc9 3499 DF_INSN_INFO_LUID (insn_info) = luid;
3072d30e 3500 if (INSN_P (insn))
3501 luid++;
3502 }
3503}
3504
3505
3506/* Returns true if the function entry needs to
3507 define the static chain register. */
3508
3509static bool
3510df_need_static_chain_reg (struct function *fun)
3511{
3512 tree fun_context = decl_function_context (fun->decl);
3513 return fun_context
3514 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3515}
3516
3517
3518/* Collect all artificial refs at the block level for BB and add them
3519 to COLLECTION_REC. */
3520
3521static void
3522df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3523{
3524 collection_rec->next_def = 0;
3525 collection_rec->next_use = 0;
3526 collection_rec->next_eq_use = 0;
3527 collection_rec->next_mw = 0;
3528
3529 if (bb->index == ENTRY_BLOCK)
3530 {
3531 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3532 return;
3533 }
3534 else if (bb->index == EXIT_BLOCK)
3535 {
3536 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3537 return;
e011eba9 3538 }
3539
3540#ifdef EH_RETURN_DATA_REGNO
dea7b504 3541 if (bb_has_eh_pred (bb))
e011eba9 3542 {
3543 unsigned int i;
3544 /* Mark the registers that will contain data for the handler. */
fcf2ad9f 3545 for (i = 0; ; ++i)
3546 {
3547 unsigned regno = EH_RETURN_DATA_REGNO (i);
3548 if (regno == INVALID_REGNUM)
3549 break;
ed6e85ae 3550 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
b9c74b4d 3551 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
3552 VOIDmode);
fcf2ad9f 3553 }
e011eba9 3554 }
3555#endif
3556
3072d30e 3557 /* Add the hard_frame_pointer if this block is the target of a
3558 non-local goto. */
3559 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
ed6e85ae 3560 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
b9c74b4d 3561 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
3072d30e 3562
3563 /* Add the artificial uses. */
3564 if (bb->index >= NUM_FIXED_BLOCKS)
3e6933a8 3565 {
3566 bitmap_iterator bi;
3567 unsigned int regno;
dea7b504 3568 bitmap au = bb_has_eh_pred (bb)
3072d30e 3569 ? df->eh_block_artificial_uses
3570 : df->regular_block_artificial_uses;
3e6933a8 3571
3072d30e 3572 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3e6933a8 3573 {
ed6e85ae 3574 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
b9c74b4d 3575 bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3e6933a8 3576 }
e011eba9 3577 }
3072d30e 3578
3579 df_canonize_collection_rec (collection_rec);
e011eba9 3580}
3581
05268a5f 3582
3072d30e 3583/* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3584
3585void
3586df_bb_refs_record (int bb_index, bool scan_insns)
05268a5f 3587{
3072d30e 3588 basic_block bb = BASIC_BLOCK (bb_index);
3589 rtx insn;
3590 int luid = 0;
3591 struct df_scan_bb_info *bb_info;
3592 struct df_collection_rec collection_rec;
ed6e85ae 3593 collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
3594 collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
3595 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
364c0c59 3596 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
05268a5f 3597
3072d30e 3598 if (!df)
05268a5f 3599 return;
3600
3072d30e 3601 bb_info = df_scan_get_bb_info (bb_index);
05268a5f 3602
3072d30e 3603 /* Need to make sure that there is a record in the basic block info. */
3604 if (!bb_info)
3605 {
3606 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3607 df_scan_set_bb_info (bb_index, bb_info);
3608 bb_info->artificial_defs = NULL;
3609 bb_info->artificial_uses = NULL;
05268a5f 3610 }
3072d30e 3611
3612 if (scan_insns)
3613 /* Scan the block an insn at a time from beginning to end. */
3614 FOR_BB_INSNS (bb, insn)
3615 {
158b6cc9 3616 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3072d30e 3617 gcc_assert (!insn_info);
3618
158b6cc9 3619 insn_info = df_insn_create_insn_record (insn);
3072d30e 3620 if (INSN_P (insn))
3621 {
3622 /* Record refs within INSN. */
158b6cc9 3623 DF_INSN_INFO_LUID (insn_info) = luid++;
3624 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3072d30e 3625 df_refs_add_to_chains (&collection_rec, bb, insn);
3626 }
158b6cc9 3627 DF_INSN_INFO_LUID (insn_info) = luid;
3072d30e 3628 }
3629
3630 /* Other block level artificial refs */
3631 df_bb_refs_collect (&collection_rec, bb);
3632 df_refs_add_to_chains (&collection_rec, bb, NULL);
3633
3634 /* Now that the block has been processed, set the block as dirty so
84da8954 3635 LR and LIVE will get it processed. */
3072d30e 3636 df_set_bb_dirty (bb);
05268a5f 3637}
e011eba9 3638
3072d30e 3639
3640/* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3641 block. */
e011eba9 3642
3643static void
3072d30e 3644df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
e011eba9 3645{
f7039b33 3646#ifdef EH_USES
3647 unsigned int i;
3648#endif
3649
3072d30e 3650 bitmap_clear (regular_block_artificial_uses);
e011eba9 3651
3072d30e 3652 if (reload_completed)
e011eba9 3653 {
3072d30e 3654 if (frame_pointer_needed)
3655 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3656 }
3657 else
3658 /* Before reload, there are a few registers that must be forced
3659 live everywhere -- which might not already be the case for
3660 blocks within infinite loops. */
3661 {
3662 /* Any reference to any pseudo before reload is a potential
3663 reference of the frame pointer. */
3664 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3665
3666#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3667 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3668#endif
3669
3670#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3671 /* Pseudos with argument area equivalences may require
3672 reloading via the argument pointer. */
3673 if (fixed_regs[ARG_POINTER_REGNUM])
3674 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3675#endif
3676
3677 /* Any constant, or pseudo with constant equivalences, may
3678 require reloading from memory using the pic register. */
3679 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3680 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3681 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
e011eba9 3682 }
3072d30e 3683 /* The all-important stack pointer must always be live. */
3684 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
f7039b33 3685
3686#ifdef EH_USES
3687 /* EH_USES registers are used:
3688 1) at all insns that might throw (calls or with -fnon-call-exceptions
3689 trapping insns)
3690 2) in all EH edges
3691 3) to support backtraces and/or debugging, anywhere between their
3692 initialization and where they the saved registers are restored
3693 from them, including the cases where we don't reach the epilogue
3694 (noreturn call or infinite loop). */
3695 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3696 if (EH_USES (i))
3697 bitmap_set_bit (regular_block_artificial_uses, i);
3698#endif
3072d30e 3699}
3700
e011eba9 3701
3072d30e 3702/* Get the artificial use set for an eh block. */
fcf2ad9f 3703
3072d30e 3704static void
3705df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3706{
3707 bitmap_clear (eh_block_artificial_uses);
05268a5f 3708
6dfdc153 3709 /* The following code (down thru the arg_pointer setting APPEARS
3072d30e 3710 to be necessary because there is nothing that actually
3711 describes what the exception handling code may actually need
3712 to keep alive. */
3713 if (reload_completed)
3714 {
3715 if (frame_pointer_needed)
3716 {
3717 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3718#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3719 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3720#endif
3721 }
3722#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3723 if (fixed_regs[ARG_POINTER_REGNUM])
3724 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3725#endif
3726 }
e011eba9 3727}
3728
3729
3072d30e 3730\f
e011eba9 3731/*----------------------------------------------------------------------------
3732 Specialized hard register scanning functions.
3733----------------------------------------------------------------------------*/
3734
3072d30e 3735
e011eba9 3736/* Mark a register in SET. Hard registers in large modes get all
3737 of their component registers set as well. */
3738
3739static void
3740df_mark_reg (rtx reg, void *vset)
3741{
3742 bitmap set = (bitmap) vset;
3743 int regno = REGNO (reg);
3744
3745 gcc_assert (GET_MODE (reg) != BLKmode);
3746
3747 bitmap_set_bit (set, regno);
3748 if (regno < FIRST_PSEUDO_REGISTER)
3749 {
3750 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3751 while (--n > 0)
3752 bitmap_set_bit (set, regno + n);
3753 }
3754}
3755
fcf2ad9f 3756
3072d30e 3757/* Set the bit for regs that are considered being defined at the entry. */
fcf2ad9f 3758
3759static void
3072d30e 3760df_get_entry_block_def_set (bitmap entry_block_defs)
fcf2ad9f 3761{
fcf2ad9f 3762 rtx r;
3072d30e 3763 int i;
fcf2ad9f 3764
3072d30e 3765 bitmap_clear (entry_block_defs);
fcf2ad9f 3766
3767 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3768 {
3769 if (FUNCTION_ARG_REGNO_P (i))
3770#ifdef INCOMING_REGNO
3072d30e 3771 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
fcf2ad9f 3772#else
3072d30e 3773 bitmap_set_bit (entry_block_defs, i);
fcf2ad9f 3774#endif
3775 }
3776
df4fadd5 3777 /* The always important stack pointer. */
3778 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3779
fcf2ad9f 3780 /* Once the prologue has been generated, all of these registers
3781 should just show up in the first regular block. */
3782 if (HAVE_prologue && epilogue_completed)
3783 {
3784 /* Defs for the callee saved registers are inserted so that the
3785 pushes have some defining location. */
3786 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072d30e 3787 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3788 bitmap_set_bit (entry_block_defs, i);
fcf2ad9f 3789 }
3790 else
3791 {
fcf2ad9f 3792 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3793 only STATIC_CHAIN_REGNUM is defined. If they are different,
3794 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3795#ifdef STATIC_CHAIN_INCOMING_REGNUM
3072d30e 3796 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
fcf2ad9f 3797#else
3798#ifdef STATIC_CHAIN_REGNUM
3072d30e 3799 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
fcf2ad9f 3800#endif
3801#endif
fcf2ad9f 3802 }
3803
652ff291 3804 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3805 if (r && REG_P (r))
3806 bitmap_set_bit (entry_block_defs, REGNO (r));
3807
3e6933a8 3808 if ((!reload_completed) || frame_pointer_needed)
fcf2ad9f 3809 {
3810 /* Any reference to any pseudo before reload is a potential
3811 reference of the frame pointer. */
3072d30e 3812 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3e6933a8 3813#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3814 /* If they are different, also mark the hard frame pointer as live. */
3815 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3072d30e 3816 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3e6933a8 3817#endif
3818 }
fcf2ad9f 3819
3e6933a8 3820 /* These registers are live everywhere. */
3821 if (!reload_completed)
3822 {
fcf2ad9f 3823#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3824 /* Pseudos with argument area equivalences may require
3825 reloading via the argument pointer. */
3826 if (fixed_regs[ARG_POINTER_REGNUM])
3072d30e 3827 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
fcf2ad9f 3828#endif
3829
3830#ifdef PIC_OFFSET_TABLE_REGNUM
3831 /* Any constant, or pseudo with constant equivalences, may
3832 require reloading from memory using the pic register. */
3833 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3834 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3072d30e 3835 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3836#endif
3837 }
3838
3839#ifdef INCOMING_RETURN_ADDR_RTX
3840 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3841 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3842#endif
3843
3844 targetm.live_on_entry (entry_block_defs);
3845
3846 /* If the function has an incoming STATIC_CHAIN,
3847 it has to show up in the entry def set. */
3848 if (df_need_static_chain_reg (cfun))
3849 {
fdfd360b 3850#ifdef STATIC_CHAIN_INCOMING_REGNUM
3072d30e 3851 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
fdfd360b 3852#else
3853#ifdef STATIC_CHAIN_REGNUM
3854 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3855#endif
fcf2ad9f 3856#endif
3857 }
3072d30e 3858}
3859
3860
3861/* Return the (conservative) set of hard registers that are defined on
3862 entry to the function.
bef304b8 3863 It uses df->entry_block_defs to determine which register
3072d30e 3864 reference to include. */
3865
3866static void
3867df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3868 bitmap entry_block_defs)
3869{
3870 unsigned int i;
3871 bitmap_iterator bi;
3872
3873 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3874 {
ed6e85ae 3875 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
b9c74b4d 3876 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
3877 VOIDmode);
3072d30e 3878 }
3879
3880 df_canonize_collection_rec (collection_rec);
3881}
3882
3883
3884/* Record the (conservative) set of hard registers that are defined on
3885 entry to the function. */
3886
3887static void
3888df_record_entry_block_defs (bitmap entry_block_defs)
3889{
3890 struct df_collection_rec collection_rec;
3891 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
ed6e85ae 3892 collection_rec.def_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
3072d30e 3893
3894 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3895
3896 /* Process bb_refs chain */
3897 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3898}
3899
3900
becfaa62 3901/* Update the defs in the entry block. */
3072d30e 3902
3903void
3904df_update_entry_block_defs (void)
3905{
3906 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3907 bool changed = false;
fcf2ad9f 3908
3072d30e 3909 df_get_entry_block_def_set (refs);
3910 if (df->entry_block_defs)
3911 {
3912 if (!bitmap_equal_p (df->entry_block_defs, refs))
3913 {
3914 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3915 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3916 df_ref_chain_delete (bb_info->artificial_defs);
3917 bb_info->artificial_defs = NULL;
3918 changed = true;
3919 }
3920 }
3921 else
3922 {
3923 struct df_scan_problem_data *problem_data
3924 = (struct df_scan_problem_data *) df_scan->problem_data;
3925 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3926 changed = true;
3927 }
fcf2ad9f 3928
3072d30e 3929 if (changed)
fcf2ad9f 3930 {
3072d30e 3931 df_record_entry_block_defs (refs);
3932 bitmap_copy (df->entry_block_defs, refs);
3933 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
fcf2ad9f 3934 }
3072d30e 3935 BITMAP_FREE (refs);
fcf2ad9f 3936}
3937
3938
3072d30e 3939/* Set the bit for regs that are considered being used at the exit. */
e011eba9 3940
3941static void
3072d30e 3942df_get_exit_block_use_set (bitmap exit_block_uses)
e011eba9 3943{
3944 unsigned int i;
e011eba9 3945
3072d30e 3946 bitmap_clear (exit_block_uses);
bff9eb26 3947
3948 /* Stack pointer is always live at the exit. */
3949 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
e011eba9 3950
3951 /* Mark the frame pointer if needed at the end of the function.
3952 If we end up eliminating it, it will be removed from the live
3953 list of each basic block by reload. */
3954
3e6933a8 3955 if ((!reload_completed) || frame_pointer_needed)
e011eba9 3956 {
3072d30e 3957 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
e011eba9 3958#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3959 /* If they are different, also mark the hard frame pointer as live. */
3e6933a8 3960 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3072d30e 3961 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
e011eba9 3962#endif
3963 }
3964
3965#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3966 /* Many architectures have a GP register even without flag_pic.
3967 Assume the pic register is not in use, or will be handled by
3968 other means, if it is not fixed. */
3969 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3970 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3072d30e 3971 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
e011eba9 3972#endif
3973
3974 /* Mark all global registers, and all registers used by the
3975 epilogue as being live at the end of the function since they
3976 may be referenced by our caller. */
3977 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3978 if (global_regs[i] || EPILOGUE_USES (i))
3072d30e 3979 bitmap_set_bit (exit_block_uses, i);
e011eba9 3980
3981 if (HAVE_epilogue && epilogue_completed)
3982 {
3983 /* Mark all call-saved registers that we actually used. */
3984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3072d30e 3985 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3e6933a8 3986 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3072d30e 3987 bitmap_set_bit (exit_block_uses, i);
e011eba9 3988 }
3989
3990#ifdef EH_RETURN_DATA_REGNO
3991 /* Mark the registers that will contain data for the handler. */
18d50ae6 3992 if (reload_completed && crtl->calls_eh_return)
e011eba9 3993 for (i = 0; ; ++i)
3994 {
3995 unsigned regno = EH_RETURN_DATA_REGNO (i);
3996 if (regno == INVALID_REGNUM)
3997 break;
3072d30e 3998 bitmap_set_bit (exit_block_uses, regno);
e011eba9 3999 }
4000#endif
4001
4002#ifdef EH_RETURN_STACKADJ_RTX
3e6933a8 4003 if ((!HAVE_epilogue || ! epilogue_completed)
18d50ae6 4004 && crtl->calls_eh_return)
e011eba9 4005 {
4006 rtx tmp = EH_RETURN_STACKADJ_RTX;
4007 if (tmp && REG_P (tmp))
3072d30e 4008 df_mark_reg (tmp, exit_block_uses);
e011eba9 4009 }
4010#endif
4011
4012#ifdef EH_RETURN_HANDLER_RTX
3e6933a8 4013 if ((!HAVE_epilogue || ! epilogue_completed)
18d50ae6 4014 && crtl->calls_eh_return)
e011eba9 4015 {
4016 rtx tmp = EH_RETURN_HANDLER_RTX;
4017 if (tmp && REG_P (tmp))
3072d30e 4018 df_mark_reg (tmp, exit_block_uses);
e011eba9 4019 }
4020#endif
3072d30e 4021
e011eba9 4022 /* Mark function return value. */
3072d30e 4023 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4024}
4025
4026
4027/* Return the refs of hard registers that are used in the exit block.
4028 It uses df->exit_block_uses to determine register to include. */
4029
4030static void
4031df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4032{
4033 unsigned int i;
4034 bitmap_iterator bi;
4035
4036 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
ed6e85ae 4037 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
b9c74b4d 4038 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
e011eba9 4039
3072d30e 4040#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4041 /* It is deliberate that this is not put in the exit block uses but
4042 I do not know why. */
4043 if (reload_completed
4044 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
dea7b504 4045 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3072d30e 4046 && fixed_regs[ARG_POINTER_REGNUM])
ed6e85ae 4047 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
b9c74b4d 4048 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3072d30e 4049#endif
4050
4051 df_canonize_collection_rec (collection_rec);
4052}
4053
4054
4055/* Record the set of hard registers that are used in the exit block.
4056 It uses df->exit_block_uses to determine which bit to include. */
4057
4058static void
4059df_record_exit_block_uses (bitmap exit_block_uses)
4060{
4061 struct df_collection_rec collection_rec;
4062 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
ed6e85ae 4063 collection_rec.use_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
3072d30e 4064
4065 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4066
4067 /* Process bb_refs chain */
4068 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4069}
4070
4071
4072/* Update the uses in the exit block. */
4073
4074void
4075df_update_exit_block_uses (void)
4076{
4077 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
4078 bool changed = false;
4079
4080 df_get_exit_block_use_set (refs);
4081 if (df->exit_block_uses)
4082 {
4083 if (!bitmap_equal_p (df->exit_block_uses, refs))
4084 {
4085 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4086 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4087 df_ref_chain_delete (bb_info->artificial_uses);
4088 bb_info->artificial_uses = NULL;
4089 changed = true;
4090 }
4091 }
4092 else
4093 {
4094 struct df_scan_problem_data *problem_data
4095 = (struct df_scan_problem_data *) df_scan->problem_data;
4096 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4097 changed = true;
4098 }
4099
4100 if (changed)
4101 {
4102 df_record_exit_block_uses (refs);
4103 bitmap_copy (df->exit_block_uses, refs);
4104 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4105 }
4106 BITMAP_FREE (refs);
e011eba9 4107}
4108
4109static bool initialized = false;
4110
3072d30e 4111
e011eba9 4112/* Initialize some platform specific structures. */
4113
4114void
4115df_hard_reg_init (void)
4116{
bebf8106 4117#ifdef ELIMINABLE_REGS
f9a42947 4118 int i;
e011eba9 4119 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4120#endif
e011eba9 4121 if (initialized)
4122 return;
4123
e011eba9 4124 /* Record which registers will be eliminated. We use this in
4125 mark_used_regs. */
4126 CLEAR_HARD_REG_SET (elim_reg_set);
4127
4128#ifdef ELIMINABLE_REGS
4129 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4130 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4131#else
4132 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4133#endif
4134
e011eba9 4135 initialized = true;
4136}
3072d30e 4137
4138
4139/* Recompute the parts of scanning that are based on regs_ever_live
4140 because something changed in that array. */
4141
4142void
4143df_update_entry_exit_and_calls (void)
4144{
4145 basic_block bb;
4146
4147 df_update_entry_block_defs ();
4148 df_update_exit_block_uses ();
4149
4150 /* The call insns need to be rescanned because there may be changes
4151 in the set of registers clobbered across the call. */
4152 FOR_EACH_BB (bb)
4153 {
4154 rtx insn;
4155 FOR_BB_INSNS (bb, insn)
4156 {
4157 if (INSN_P (insn) && CALL_P (insn))
4158 df_insn_rescan (insn);
4159 }
4160 }
4161}
4162
4163
4164/* Return true if hard REG is actually used in the some instruction.
4165 There are a fair number of conditions that affect the setting of
4166 this array. See the comment in df.h for df->hard_regs_live_count
4167 for the conditions that this array is set. */
4168
4169bool
4170df_hard_reg_used_p (unsigned int reg)
4171{
4172 gcc_assert (df);
4173 return df->hard_regs_live_count[reg] != 0;
4174}
4175
4176
4177/* A count of the number of times REG is actually used in the some
4178 instruction. There are a fair number of conditions that affect the
4179 setting of this array. See the comment in df.h for
4180 df->hard_regs_live_count for the conditions that this array is
4181 set. */
4182
4183
4184unsigned int
4185df_hard_reg_used_count (unsigned int reg)
4186{
4187 gcc_assert (df);
4188 return df->hard_regs_live_count[reg];
4189}
4190
4191
4192/* Get the value of regs_ever_live[REGNO]. */
4193
4194bool
4195df_regs_ever_live_p (unsigned int regno)
4196{
4197 return regs_ever_live[regno];
4198}
4199
4200
4201/* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4202 to change, schedule that change for the next update. */
4203
4204void
4205df_set_regs_ever_live (unsigned int regno, bool value)
4206{
4207 if (regs_ever_live[regno] == value)
4208 return;
4209
4210 regs_ever_live[regno] = value;
4211 if (df)
4212 df->redo_entry_and_exit = true;
4213}
4214
4215
4216/* Compute "regs_ever_live" information from the underlying df
4217 information. Set the vector to all false if RESET. */
4218
4219void
4220df_compute_regs_ever_live (bool reset)
4221{
4222 unsigned int i;
4223 bool changed = df->redo_entry_and_exit;
4224
4225 if (reset)
4226 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4227
4228 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4229 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4230 {
4231 regs_ever_live[i] = true;
4232 changed = true;
4233 }
4234 if (changed)
4235 df_update_entry_exit_and_calls ();
4236 df->redo_entry_and_exit = false;
4237}
4238
4239\f
4240/*----------------------------------------------------------------------------
4241 Dataflow ref information verification functions.
4242
4243 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4244 df_reg_chain_verify_unmarked (refs)
4245 df_refs_verify (ref*, ref*, bool)
4246 df_mws_verify (mw*, mw*, bool)
4247 df_insn_refs_verify (collection_rec, bb, insn, bool)
4248 df_bb_refs_verify (bb, refs, bool)
4249 df_bb_verify (bb)
4250 df_exit_block_bitmap_verify (bool)
4251 df_entry_block_bitmap_verify (bool)
4252 df_scan_verify ()
4253----------------------------------------------------------------------------*/
4254
4255
4256/* Mark all refs in the reg chain. Verify that all of the registers
4257are in the correct chain. */
4258
4259static unsigned int
ed6e85ae 4260df_reg_chain_mark (df_ref refs, unsigned int regno,
3072d30e 4261 bool is_def, bool is_eq_use)
4262{
4263 unsigned int count = 0;
ed6e85ae 4264 df_ref ref;
3072d30e 4265 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4266 {
4267 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4268
4269 /* If there are no def-use or use-def chains, make sure that all
4270 of the chains are clear. */
4271 if (!df_chain)
4272 gcc_assert (!DF_REF_CHAIN (ref));
4273
4274 /* Check to make sure the ref is in the correct chain. */
4275 gcc_assert (DF_REF_REGNO (ref) == regno);
4276 if (is_def)
ed6e85ae 4277 gcc_assert (DF_REF_REG_DEF_P (ref));
3072d30e 4278 else
ed6e85ae 4279 gcc_assert (!DF_REF_REG_DEF_P (ref));
3072d30e 4280
4281 if (is_eq_use)
4282 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4283 else
4284 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4285
ed6e85ae 4286 if (DF_REF_NEXT_REG (ref))
4287 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3072d30e 4288 count++;
4289 DF_REF_REG_MARK (ref);
4290 }
4291 return count;
4292}
4293
4294
4295/* Verify that all of the registers in the chain are unmarked. */
4296
4297static void
ed6e85ae 4298df_reg_chain_verify_unmarked (df_ref refs)
3072d30e 4299{
ed6e85ae 4300 df_ref ref;
3072d30e 4301 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4302 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4303}
4304
4305
4306/* Verify that NEW_REC and OLD_REC have exactly the same members. */
4307
4308static bool
ed6e85ae 4309df_refs_verify (df_ref *new_rec, df_ref *old_rec,
3072d30e 4310 bool abort_if_fail)
4311{
4312 while ((*new_rec) && (*old_rec))
4313 {
4314 if (!df_ref_equal_p (*new_rec, *old_rec))
4315 {
4316 if (abort_if_fail)
4317 gcc_assert (0);
4318 else
4319 return false;
4320 }
4321
4322 /* Abort if fail is called from the function level verifier. If
4323 that is the context, mark this reg as being seem. */
4324 if (abort_if_fail)
4325 {
4326 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4327 DF_REF_REG_UNMARK (*old_rec);
4328 }
4329
4330 new_rec++;
4331 old_rec++;
4332 }
4333
4334 if (abort_if_fail)
4335 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4336 else
4337 return ((*new_rec == NULL) && (*old_rec == NULL));
4338 return false;
4339}
4340
4341
4342/* Verify that NEW_REC and OLD_REC have exactly the same members. */
4343
4344static bool
4345df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4346 bool abort_if_fail)
4347{
4348 while ((*new_rec) && (*old_rec))
4349 {
4350 if (!df_mw_equal_p (*new_rec, *old_rec))
4351 {
4352 if (abort_if_fail)
4353 gcc_assert (0);
4354 else
4355 return false;
4356 }
4357 new_rec++;
4358 old_rec++;
4359 }
4360
4361 if (abort_if_fail)
4362 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4363 else
4364 return ((*new_rec == NULL) && (*old_rec == NULL));
4365 return false;
4366}
4367
4368
4369/* Return true if the existing insn refs information is complete and
4370 correct. Otherwise (i.e. if there's any missing or extra refs),
4371 return the correct df_ref chain in REFS_RETURN.
4372
4373 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4374 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4375 verification mode instead of the whole function, so unmark
4376 everything.
4377
4378 If ABORT_IF_FAIL is set, this function never returns false. */
4379
4380static bool
4381df_insn_refs_verify (struct df_collection_rec *collection_rec,
4382 basic_block bb,
4383 rtx insn,
4384 bool abort_if_fail)
4385{
4386 bool ret1, ret2, ret3, ret4;
4387 unsigned int uid = INSN_UID (insn);
158b6cc9 4388 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3072d30e 4389
158b6cc9 4390 df_insn_refs_collect (collection_rec, bb, insn_info);
3072d30e 4391
4392 if (!DF_INSN_UID_DEFS (uid))
4393 {
4394 /* The insn_rec was created but it was never filled out. */
4395 if (abort_if_fail)
4396 gcc_assert (0);
4397 else
4398 return false;
4399 }
4400
4401 /* Unfortunately we cannot opt out early if one of these is not
4402 right because the marks will not get cleared. */
4403 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4404 abort_if_fail);
4405 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4406 abort_if_fail);
4407 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4408 abort_if_fail);
4409 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4410 abort_if_fail);
4411 return (ret1 && ret2 && ret3 && ret4);
4412}
4413
4414
4415/* Return true if all refs in the basic block are correct and complete.
4416 Due to df_ref_chain_verify, it will cause all refs
4417 that are verified to have DF_REF_MARK bit set. */
4418
4419static bool
4420df_bb_verify (basic_block bb)
4421{
4422 rtx insn;
4423 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4424 struct df_collection_rec collection_rec;
4425
4426 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
ed6e85ae 4427 collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
4428 collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
4429 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
364c0c59 4430 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
3072d30e 4431
4432 gcc_assert (bb_info);
4433
ed6e85ae 4434 /* Scan the block, one insn at a time, from beginning to end. */
3072d30e 4435 FOR_BB_INSNS_REVERSE (bb, insn)
4436 {
4437 if (!INSN_P (insn))
4438 continue;
4439 df_insn_refs_verify (&collection_rec, bb, insn, true);
4440 df_free_collection_rec (&collection_rec);
4441 }
4442
4443 /* Do the artificial defs and uses. */
4444 df_bb_refs_collect (&collection_rec, bb);
4445 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4446 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4447 df_free_collection_rec (&collection_rec);
4448
4449 return true;
4450}
4451
4452
4453/* Returns true if the entry block has correct and complete df_ref set.
4454 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4455
4456static bool
4457df_entry_block_bitmap_verify (bool abort_if_fail)
4458{
4459 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4460 bool is_eq;
4461
4462 df_get_entry_block_def_set (entry_block_defs);
4463
4464 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4465
4466 if (!is_eq && abort_if_fail)
4467 {
4468 print_current_pass (stderr);
4469 fprintf (stderr, "entry_block_defs = ");
4470 df_print_regset (stderr, entry_block_defs);
4471 fprintf (stderr, "df->entry_block_defs = ");
4472 df_print_regset (stderr, df->entry_block_defs);
4473 gcc_assert (0);
4474 }
4475
4476 BITMAP_FREE (entry_block_defs);
4477
4478 return is_eq;
4479}
4480
4481
4482/* Returns true if the exit block has correct and complete df_ref set.
4483 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4484
4485static bool
4486df_exit_block_bitmap_verify (bool abort_if_fail)
4487{
4488 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4489 bool is_eq;
4490
4491 df_get_exit_block_use_set (exit_block_uses);
4492
4493 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4494
4495 if (!is_eq && abort_if_fail)
4496 {
4497 print_current_pass (stderr);
4498 fprintf (stderr, "exit_block_uses = ");
4499 df_print_regset (stderr, exit_block_uses);
4500 fprintf (stderr, "df->exit_block_uses = ");
4501 df_print_regset (stderr, df->exit_block_uses);
4502 gcc_assert (0);
4503 }
4504
4505 BITMAP_FREE (exit_block_uses);
4506
4507 return is_eq;
4508}
4509
4510
bf1f8fbc 4511/* Return true if df_ref information for all insns in all blocks are
4512 correct and complete. */
3072d30e 4513
4514void
4515df_scan_verify (void)
4516{
4517 unsigned int i;
4518 basic_block bb;
4519 bitmap regular_block_artificial_uses;
4520 bitmap eh_block_artificial_uses;
4521
4522 if (!df)
4523 return;
4524
3072d30e 4525 /* Verification is a 4 step process. */
4526
4527 /* (1) All of the refs are marked by going thru the reg chains. */
4528 for (i = 0; i < DF_REG_SIZE (df); i++)
4529 {
4530 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4531 == DF_REG_DEF_COUNT(i));
4532 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4533 == DF_REG_USE_COUNT(i));
4534 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4535 == DF_REG_EQ_USE_COUNT(i));
4536 }
4537
4538 /* (2) There are various bitmaps whose value may change over the
4539 course of the compilation. This step recomputes them to make
4540 sure that they have not slipped out of date. */
4541 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4542 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4543
4544 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4545 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4546
4547 bitmap_ior_into (eh_block_artificial_uses,
4548 regular_block_artificial_uses);
4549
4550 /* Check artificial_uses bitmaps didn't change. */
4551 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4552 df->regular_block_artificial_uses));
4553 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4554 df->eh_block_artificial_uses));
4555
4556 BITMAP_FREE (regular_block_artificial_uses);
4557 BITMAP_FREE (eh_block_artificial_uses);
4558
4559 /* Verify entry block and exit block. These only verify the bitmaps,
4560 the refs are verified in df_bb_verify. */
4561 df_entry_block_bitmap_verify (true);
4562 df_exit_block_bitmap_verify (true);
4563
4564 /* (3) All of the insns in all of the blocks are traversed and the
4565 marks are cleared both in the artificial refs attached to the
4566 blocks and the real refs inside the insns. It is a failure to
4567 clear a mark that has not been set as this means that the ref in
4568 the block or insn was not in the reg chain. */
4569
4570 FOR_ALL_BB (bb)
4571 df_bb_verify (bb);
4572
4573 /* (4) See if all reg chains are traversed a second time. This time
4574 a check is made that the marks are clear. A set mark would be a
4575 from a reg that is not in any insn or basic block. */
4576
4577 for (i = 0; i < DF_REG_SIZE (df); i++)
4578 {
4579 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4580 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4581 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4582 }
4583}